+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.mA3uXbWMcz --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools [2 ymakes processing] [7914/7914 modules configured] [2305/5251 modules rendered] [2 ymakes processing] [7914/7914 modules configured] [5229/5251 modules rendered] [2 ymakes processing] [7914/7914 modules configured] [5251/5251 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7920/7920 modules configured] [5251/5251 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 0.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/libydb-core-base.a | 0.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a | 1.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a | 1.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test | 2.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a | 2.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a | 3.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a | 3.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a | 4.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/tablet_killer.cpp | 5.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a | 5.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a | 6.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a | 6.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/backtrace.cpp | 6.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/actor_activity_names.cpp | 6.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a | 6.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob.cpp | 6.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a | 6.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a | 6.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a | 7.0%| PREPARE $(VCS) - 0 bytes | 7.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a | 7.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a | 7.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a | 7.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a | 7.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a | 8.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a | 7.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a | 8.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a | 8.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a | 8.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a | 8.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a | 8.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a | 8.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a | 8.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCParser/liblib-MC-MCParser.a | 8.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a | 9.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a | 9.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a | 9.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a | 9.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a | 9.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a | 9.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a | 9.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |10.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |10.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |10.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |10.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |10.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |10.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |10.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |10.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |11.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |11.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |10.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |11.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |11.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/version/libversion_definition.a |11.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |12.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |12.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.a |12.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |12.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |13.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.global.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |14.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |14.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_pgwire/sql_parser.cpp |14.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |15.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/net_classifier.h_serialized.cpp |15.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |15.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |15.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/libydb-core-audit.a |15.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_driver/libcpp-client-ydb_driver.a |15.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_export/libcpp-client-ydb_export.a |15.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.a |15.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_discovery/libcpp-client-ydb_discovery.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |16.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/libcore-base-generated.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |16.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |16.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/libydb_topic-ut-ut_utils.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |17.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/py3/libpy3python-websocket-client-py3.global.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/py3/libpy3python-websocket-client-py3.a |17.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.a |20.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |20.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |20.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.a |21.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.global.a |22.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_settings.cpp |22.1%| PREPARE $(YMAKE_PYTHON3-4256832079) - 0 bytes |22.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |21.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_yandex.cpp |21.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_nebius.cpp |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |22.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_handler.cpp |22.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_yandex.cpp |22.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create.cpp |22.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_handler.cpp |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |23.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |23.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |23.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_nebius.cpp |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpython-symbols-python.global.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_datastreams/libcpp-client-ydb_datastreams.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |23.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/trace.cpp |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |24.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |25.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_publish.cpp |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |26.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/managed_executor.cpp |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |27.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.a |28.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |28.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/insert_table/broken_dedup.cpp |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |29.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_client.cpp |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |30.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |34.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_util.cpp |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |34.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/context.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page.cpp |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |36.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.a |35.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |35.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/libessentials-parser-proto_ast.a |35.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |35.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |35.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |36.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.a |36.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/libydb-services-metadata.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |36.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/topic_sdk_test_setup.cpp |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/operation/libclient-ydb_types-operation.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/libcpp-client-ydb_types.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/common/libclient-ydb_topic-common.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/fatal_error_handlers/libclient-ydb_types-fatal_error_handlers.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/exceptions/libclient-ydb_types-exceptions.a |37.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |37.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |37.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/base/libpublic-lib-base.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/libcpp-client-ydb_topic.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/libclient-ydb_types-credentials.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_value/libcpp-client-ydb_value.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/query_stats/libclient-ydb_table-query_stats.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/status/libclient-ydb_types-status.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/login/libydb_types-credentials-login.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_ss_tasks/libcpp-client-ydb_ss_tasks.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bsconfig/libydb-services-bsconfig.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/oauth2_token_exchange/libydb_types-credentials-oauth2_token_exchange.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/impl/libclient-ydb_persqueue_core-impl.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_params/libcpp-client-ydb_params.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_stats/libclient-impl-ydb_stats.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/resources/libcpp-client-resources.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_import/libcpp-client-ydb_import.a |38.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |38.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/jwt/libpublic-lib-jwt.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/protos/liblib-operation_id-protos.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |38.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_operation/libcpp-client-ydb_operation.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/iam_private/libcpp-client-iam_private.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/libcpp-client-ydb_table.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/iam/common/libclient-iam-common.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |39.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/libpublic-lib-operation_id.a |39.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/proto/libproviders-yt-proto.a |39.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/ydb-core-base-ut |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |40.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/job/libproviders-yt-job.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/opt/libproviders-yt-opt.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/codegen/libyt-codec-codegen.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |40.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/comp_nodes/llvm14/libyt-comp_nodes-llvm14.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |41.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/async_io/libproviders-solomon-async_io.a |41.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/codegen/libyt-codec-codegen.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/impl/libclient-ydb_topic-impl.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/draft/libcpp-client-draft.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/common/libproviders-yt-common.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |41.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |41.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libconnector-api-common.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |42.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/comp_nodes/dq/libyt-comp_nodes-dq.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |42.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/obfuscate/liblibrary-persqueue-obfuscate.a |43.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |43.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |43.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |43.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |43.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |44.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |44.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |44.0%| PREPARE $(LLD_ROOT-2644097164) - 0 bytes |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/native/libyt-gateway-native.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |44.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |43.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/stat/uploader/libproviders-stat-uploader.a |44.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/hash/libyt-lib-hash.a |44.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/log/libyt-lib-log.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/schema/libyt-lib-schema.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/libproviders-yt-codec.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/transactions/libdata_sharing-common-transactions.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider_context.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |45.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_table_desc.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/columns_set.h_serialized.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_wide_flow.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider_impl.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/columns_set.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_key.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_op_settings.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_gateway.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_optimize.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_join_reorder.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_block_input.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_table.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_helpers.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_block_input_filter.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_op_hash.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_epoch.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_intent_determination.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_integration.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_peephole.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_io_discovery.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |48.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_extension/libcpp-client-ydb_extension.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/resources/libcpp-client-resources.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_common_client/impl/libclient-ydb_common_client-impl.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_common_client/libcpp-client-ydb_common_client.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/include/libclient-ydb_persqueue_public-include.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_proto/libcpp-client-ydb_proto.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_result/libcpp-client-ydb_result.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |49.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |49.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_join_impl.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_scheme/libcpp-client-ydb_scheme.a |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_query/impl/libclient-ydb_query-impl.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_rate_limiter/libcpp-client-ydb_rate_limiter.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_coordination/libcpp-client-ydb_coordination.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |49.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_query/libcpp-client-ydb_query.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/impl/libclient-ydb_persqueue_public-impl.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/impl/libclient-ydb_table-impl.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IRReader/libllvm14-lib-IRReader.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Linker/libllvm14-lib-Linker.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/libllvm14-lib-ExecutionEngine.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ProfileData/libllvm14-lib-ProfileData.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/libllvm14-lib-MC.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/crypto/liblibs-openssl-crypto.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructor.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Object/libllvm14-lib-Object.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx512/liblibs-hyperscan-runtime_avx512.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |53.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_shard_context.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_initialize.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_tablet.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_state.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/BinaryFormat/libllvm14-lib-BinaryFormat.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Utils/liblib-Transforms-Utils.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IR/libllvm14-lib-IR.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/scan.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_write.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/AsmParser/libllvm14-lib-AsmParser.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Remarks/libllvm14-lib-Remarks.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Demangle/libllvm14-lib-Demangle.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/libllvm14-lib-Target.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/state_server_interface.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/TextAPI/libllvm14-lib-TextAPI.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Support/libllvm14-lib-Support.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/liblib-Target-X86.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/IPO/liblib-Transforms-IPO.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/auditlog_helpers.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Analysis/libllvm14-lib-Analysis.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/libcore-config-init.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |53.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/json2_udf.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/re2_udf.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |54.2%| PREPARE $(CLANG_FORMAT-2313326005) - 0 bytes |54.2%| PREPARE $(PYTHON) - 0 bytes |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/string_udf.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/datetime2_udf.cpp |54.3%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/init/dummy.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_hugeblobctx.cpp |54.2%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |54.3%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_histograms.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_histogram_latency.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_handle_class.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/group_stat_aggregator.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_delayed_cost_loop.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_defs.h_serialized.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_drivemodel_db.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_params.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_sectorrestorator.cpp |54.4%| PREPARE $(FLAKE8_PY2-2255386470) - 0 bytes |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/init/init_noop.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_signal_event.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_flightcontrol.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_atomicblockcounter.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_requestimpl.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_writer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_performance_params.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/drivedata_serializer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |54.5%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) - 0 bytes |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/libllvm14-lib-CodeGen.a |54.5%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/flake8_linter/flake8_linter |54.5%| PREPARE $(FLAKE8_PY3-1472545107) - 0 bytes |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_vdiskid.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |54.4%| PREPARE $(TEST_TOOL_HOST-sbr:7480276291) - 0 bytes |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/libpy3benchmarks-runner-result_compare.global.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/html.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_syncstate.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/init.h_serialized.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/libydb-core-cms.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hulloptlsn.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/librun.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/blob_depot.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |54.6%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 0 bytes |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_load.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_gc.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/coro_tx.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_decommit.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/audit_log.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/assimilator.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/given_id_range.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/status.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_mon.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_resolve.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/event.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/services.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/services.h_serialized.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/blocks.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |54.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_trash.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/blocks.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/query.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/agent.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_init_schema.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/metrics.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_uncertain.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/read.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.global.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/comm.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/request.cpp |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/discovery/libydb-core-discovery.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/garbage_collection.cpp |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/libydb-core-control.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_load.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_audit.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/libcore-cms-console.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/testing.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/garbage.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/grpc_library_helper.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/libcore-client-server.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/proxy.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/mon_main.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/tx_processor.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_configdummy.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/main.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/ic_nodes_cache_service.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/node_checkers.cpp |54.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config_parser.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |54.7%| PREPARE $(GDB) - 0 bytes |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/validator.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/info_collector.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_apply_config.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/factories.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/registry.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/core_validators.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/space_monitor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/logger.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cluster_info.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/api_adapters.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/http_ping.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/http.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_load_state.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_api_handler.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogformat.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/compile_context.cpp |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgreader.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogneighbors.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/db_key_resolver.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/erasure_checkers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/compile_result.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/downtime.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_update_config.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/modifications_validator.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/run.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |54.8%| PREPARE $(WITH_JDK11-sbr:6936090488) - 0 bytes |54.8%| PREPARE $(JDK11-1325468316) - 0 bytes |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_impl.cpp |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_control.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_sqs.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_provider.cpp |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/logger.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_manager.cpp |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/immediate_control_board_actor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/cli_utils/melancholic_gopher.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_types.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_keyvalue.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_http_server.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/grpc_server.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |55.1%| PREPARE $(CLANG-1735056821) - 0 bytes |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/util.cpp |55.0%| PREPARE $(CLANG18-390461695) - 0 bytes |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/minikql/minikql_engine_host.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_kill.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |55.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |55.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_config.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/discovery/discovery.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/util/libcms-console-util.a |55.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |55.2%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_local_scheme_tx.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |55.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |55.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |55.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a >> collection.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> hive_matchers.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a >> test_stats_mode.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] |55.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |55.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_local_enumerate_tablets.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |55.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] |55.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |55.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/flake8 >> test_config_with_metadata.py::flake8 [GOOD] |55.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |55.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |55.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |55.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/proxy.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/clusters_from_connections.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/nodes_health_check.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |55.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/config_helpers.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/nodes_manager.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__load_state.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/database_resolver.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/rate_limiter_resources.cpp |55.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |55.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__configure.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_result_write.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/table_bindings_from_bindings.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_get.cpp |55.6%| PREPARE $(CLANG14-1922233694) - 0 bytes |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/result_writer.cpp |55.6%| PREPARE $(CLANG-1922233694) - 0 bytes |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_factory.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_local_minikql.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/validation_functions.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_console.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/compute_databases_cache.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/cms_grpc_client_actor.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/monitoring_rest_client_actor.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/ydbcp_grpc_client_actor.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/monitoring_grpc_client_actor.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a >> http_client.py::flake8 [GOOD] >> query_results.py::flake8 [GOOD] |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/compute_database_control_plane_service.cpp |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/http.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/database_monitoring.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/config.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/probes.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/config.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/probes.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/control_plane_storage_counters.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |55.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/http_api_client/flake8 >> query_results.py::flake8 [GOOD] |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/request_validators.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/util.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_kv_workload.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_dynumber.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] >> test_liveness_wardens.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> tpc_tests.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] |55.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] |55.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_stability.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__init_scheme.cpp >> collection.py::flake8 [GOOD] >> compare.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] >> run_tests.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |55.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |55.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |55.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__create_tenant.cpp |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/dynumber/flake8 >> test_dynumber.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/flake8 >> __main__.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kv_workload/flake8 >> test_kv_workload.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/flake8 >> test_stability.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/request_features.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/parsing.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/kqp_common.h_serialized.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/events.cpp |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/events.cpp |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/decoder.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/object_storage.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/counters/proxy_counters.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/local_discovery/grpc_service.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_cache.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/probes.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/counters.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/json_filter.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/actors_factory.cpp |56.2%| PREPARE $(CLANG16-1380963495) - 0 bytes |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/common.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/json_parser.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/libydb-services-ydb.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/backends.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/rate_limiter.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_export.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/task_ping.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/executer_actor.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/ds_table/config.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/common/utils.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/pending_fetcher.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/error.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scheme.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/private_client/loopback_service.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_operation.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__set_config.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |56.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ymq/libydb-services-ymq.a |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/private_client/private_client.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/libydb-core-health_check.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ymq/utils.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/libcore-io_formats-arrow.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/common/run_actor_params.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table.cpp |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_request.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_logins.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/util/config_index.cpp |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/synchronization_service/synchronization_service.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/actors_factory.cpp |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/common/timeout.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/initializer_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/counters/counters.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_sessions_describe.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_describe.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/manager.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_destroy.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_detach.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_impl.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_html.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet.cpp >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_db.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher_service.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/common/ss_dialog.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/stopper_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/resources_cleaner_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/object.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/common/pinger.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/proxy_private.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_minikql_compile_and_exec.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/schema.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/events.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_runtime.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_records.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/finalizer_actor.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/test_connection/test_monitoring.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_config_get.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_dummy.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_init_schema.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |56.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_protocol.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/kafka.h_serialized.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_messages_int.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_release.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_describe.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_update.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_add.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_delete.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_delete.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_create.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_acquire.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_attach.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_update.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_log.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/test_connection/test_data_streams.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/private_client/internal_service.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/probes.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_messages.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_helper.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_mon.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/status_tracker_actor.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_publisher_service_actor.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/test_connection/test_object_storage.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/health/health.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/metrics_actor.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/result_writer_actor.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/common.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/test_connection/test_connection.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_dummy.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/ydb_run_actor.cpp |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/initializer.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/ydb_connector_actor.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/initializer.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/behaviour.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/snapshot.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/init/init.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/fetcher.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/initializer.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/fetcher.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/object.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/manager.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/registration.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/checker_access.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp >> test.py::py2_flake8 [GOOD] |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/snapshot.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/initialization.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/manager.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_init.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/access.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/fetcher.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/common.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/secret.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/checker_secret.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |56.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/exceptions_mapping.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/grpc_service.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_export.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |56.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |56.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/logs/log.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_startup.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp >> __main__.py::flake8 [GOOD] |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_yql.h_serialized.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ymq/grpc_service.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_user_request_context.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_types.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_stored_state_data.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_yql.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/shard_impl.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_data.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collect_operation.cpp >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_simple_db_flat.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ymq/ymq_proxy.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_helpers.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/service.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_tx_info.h_serialized.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_lwtrace_probes.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_import.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_script_executions.cpp |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |56.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a >> gen-report.py::flake8 [GOOD] |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_replication.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/http_service.cpp |56.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_stat.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |56.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/operation_helpers.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_backup.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_view.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_bsconfig.cpp |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/events.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_fq.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_login.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp >> test.py::py2_flake8 [GOOD] >> main.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_locks_helper.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/auth_factory.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/http_req.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cms.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/apps/dstool/flake8 >> main.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp >> test.py::py2_flake8 [GOOD] |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/proxy.cpp |56.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp >> test.py::py2_flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/health_check/health_check.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/physical/predicate_collector.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/discovery_actor.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue.cpp |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp >> kikimr_config.py::flake8 [GOOD] |56.9%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/minikql/computation/llvm14/yql/essentials/minikql/computation/mkql_computation_node_codegen.h |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/table_settings.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |56.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |56.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/libydb-core-quoter.a |56.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp.cpp |56.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |56.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/libydb-core-load_test.a |56.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> integrations_test.py::flake8 [GOOD] |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |56.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_program_builder.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/query_data/kqp_query_data.cpp >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/yql_single_query.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/pdisk_log.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/flake8 >> integrations_test.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_factory.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_effects.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_meta.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_arrow_memory_pool.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_table.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_factory.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/debug_info.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor_settings.cpp >> test.py::py2_flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/probes.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/archive.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/actors/pool_handlers_acors.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ycsb/info_collector.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/aggregated_result.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/config_examples.cpp >> test_postgres.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/percentile.h_serialized.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/pdisk_write.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_read.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp >> test_ttl.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |56.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/libydb-core-security.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/libydb-core-mind.a >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/group_write.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection.cpp >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |56.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/common.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/flake8 >> test.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/flake8 >> __main__.py::flake8 [GOOD] |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/memory.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/flake8 >> __main__.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/lease_holder.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/keyvalue_write.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/get_group.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/vdisk_write.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/grouper.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp >> test.py::py2_flake8 [GOOD] |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/kqp.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/bsc.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_pool.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/register_node.cpp >> __main__.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/node_report.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/migrate.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/service_actor.cpp |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/libydb-core-mon.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/memory_controller/memory_controller.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/slot_indexes_pool.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon_alloc/memory_info.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon_alloc/profiler.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |56.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/statistics_workload/flake8 >> __main__.py::flake8 [GOOD] |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/crossref.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon_alloc/stats.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/blob.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/microseconds_sliding_window.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/utils.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/heartbeat.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/type_codecs_defs.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/login_page.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/header.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common_app.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/scrub.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_balancer.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ticket_parser.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pq_database.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/boot_queue.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/key.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/offload_actor.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/monitor.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tablet_info.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/percentile_counter.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/login_shared_func.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pq_rl_helpers.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/fill.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/quota_tracker.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/sourceid_info.h_serialized.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_impl.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/drain.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/metering_sink.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/write_id.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/write_meta.cpp |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/libydb-core-public_http.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/dynamic_nameserver.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_domains.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/out/libcore-protos-out.a |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |57.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/_18cb347d3165b0d493858ab581.yasm |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__register_node.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> test_serializable.py::flake8 [GOOD] |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/grpc_request_context_wrapper.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_long_tx_service.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_sequenceshard.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_cms.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |56.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__status.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/domain_info.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc >> test.py::py2_flake8 [GOOD] |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |56.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |56.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |56.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/flake8 >> test_serializable.py::flake8 [GOOD] |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |56.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |56.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_log.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/flake8 >> test_compatibility.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/schema.cpp |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/tools/simple_json_diff/flake8 >> __main__.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp >> test_transform.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tablet_move_info.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/flake8 >> test_transform.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |57.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc >> __main__.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/log_backend/log_backend_build.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |57.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/flake8 >> __main__.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/node_info.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__load_state.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_statics.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/balancer.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_config.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/sync_http_mon.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/log_backend/log_backend.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |57.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/labels_maintainer.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/async_http_mon.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/mon.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/writer.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/cluster_tracker.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc >> test.py::py2_flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_group_info.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |57.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |57.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/local.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm14/libminikql-codegen-llvm14.a |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/event_helpers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.pb.cc |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libpy3connector-api-common.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm14/libminikql-computation-llvm14.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_apply.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_auth_processor.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/monitoring.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_check_args.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_heap.cpp >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_map.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_lookup.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_logical.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_group.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_flow.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_dictitems.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_compress.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/partition_scale_request.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_coalesce.cpp >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_mul.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_contains.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chain_map.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_func.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_mod.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_getelem.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_container.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_factory.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_condense1.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_addmember.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_count.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/http_request.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_aggrcount.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_exists.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/account_read_quoter.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_append.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_skiptake.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_just.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chain1_map.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_coalesce.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_callable.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_some.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_decimal.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_collect.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_logical.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_blocks.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chopper.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_if.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_condense.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_exists.cpp |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_ensure.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_sum.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_combine.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_div.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_map_join.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_element.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_top.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_flatmap.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_minmax.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_discard.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_enumerate.cpp |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/flake8 >> __main__.py::flake8 [GOOD] |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_filter.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_factory.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fold1.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_grace_join.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fold.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_extend.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_guess.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fromyson.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fromstring.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_frombytes.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_length.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_filter.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_hasitems.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_hopping.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_iterable.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_if.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_grace_join_imp.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_invoke.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_lazy_list.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_ifpresent.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_withcontext.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_join_dict.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_removemember.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_iterator.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_take.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_pickle.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_listfromrange.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_multihopping.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_mapnext.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/sourceid.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_nop.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_multimap.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_next_value.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_reduce.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/partition_scale_manager.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_prepend.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pq.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_null.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_range.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_now.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_size.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_random.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_match_recognize.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_queue.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_map_join.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_rh_hash.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_reverse.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/mirrorer.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_replicate.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_seq.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_safe_circular_buffer.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_switch.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_squeeze_to_list.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_round.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_skip.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/partition_sourcemanager.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/partition_init.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_time_order_recover.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_sort.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/user_info.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_squeeze_state.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_source.cpp |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/host/libcore-kqp-host.a >> test.py::py2_flake8 [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__register_node.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_scalar_apply.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_condense.cpp |58.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_toindexdict.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_unwrap.cpp |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_join.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_timezone.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tobytes.cpp |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/public_http/http_req.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/partition.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/subscriber.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/protos/out/out.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_combine.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tostring.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_udf.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tooptional.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_chopper.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_weakmember.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_visitall.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_way.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/partition_read.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_varitem.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_chain_map.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_zip.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/fetch_request_actor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/write_quoter.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_map.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_while.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libpy3connector-api-common.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_top_sort.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/read_balancer__balancing.h_serialized.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/events/events.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |58.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |58.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a >> test_query_cache.py::flake8 [GOOD] |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.a |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |58.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |58.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/partition_write.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/libydb-core-tablet.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/libydb-core-testlib.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/read_balancer__balancing_app.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_response.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/read_balancer.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tracing/libydb-core-tracing.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/partition_monitoring.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor/service/worker.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/read_balancer_app.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/probes.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/control_plane_storage_requester_actor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/private/aggregated_counters.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_observer.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_tracing_signals.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions.cpp >> conftest.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table_btree_index.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_part_group_iter_create.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_counters.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/http.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/trace_collection.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_charge_create.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_borrowlogic.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_seat.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp_gen.cpp |58.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/flake8 >> test_tenants.py::flake8 [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/read_quoter.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_client_cache.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_database.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp_create.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/labeled_counters_merger.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_dbase_scheme.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_dbase_apply.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_commit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_broker.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/labeled_db_counters.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_app.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_commit_mgr.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/private/labeled_db_counters.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_page_label.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_server.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor_compaction_logic.h_serialized.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_snapshot.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_txloglogic.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pq_l2_cache.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_tx_env.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_mem_warm.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_fwd_misc.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_dump.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_charge_range.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/trace.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_index_iter_create.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_outset.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_overlay.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_row_eggs.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_part_loader.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_sausage_meta.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_committed.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actors/block_events.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actors/wait_events.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_todict.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_misc.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table_btree_index_histogram.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_store_hotdog.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actor_helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_sausagecache.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_delivery_problem.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/database/database.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pq_impl_app.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/processor/schema.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/public_http/http_service.cpp >> test_base.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_slice.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_comp_gen.h_serialized.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/libydb-services-fq.a >> test_generator.py::flake8 [GOOD] >> test_init.py::flake8 [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ownerinfo.cpp |58.4%| PREPARE $(BLACK_LINTER-sbr:6648883615) - 0 bytes |58.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm14/libminikql-invoke_builtins-llvm14.a |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/common/events.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_check.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/common/service.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/flake8 >> test_init.py::flake8 [GOOD] |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/transaction.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pq_impl.cpp |58.6%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/black_linter/black_linter |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/address.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/read.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/private_grpc.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/read.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/resource_broker.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/common/schema.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tracing/tablet_info.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/common.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/remove.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/read.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_init.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/blob_set.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/node_whiteboard.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/background_controller.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service_impl.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/scan.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/column_tables.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/splitter.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/req_tracer.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/insert_table.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common/client.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common/agent.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common/object_counter.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common/private.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common_data.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common/histogram.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/processor.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_metrics.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a >> test.py::py2_flake8 [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.grpc.pb.cc |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/db_counters.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/kesus/libydb-services-kesus.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |58.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a >> __main__.py::flake8 [GOOD] >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |58.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/flake8 >> __main__.py::flake8 [GOOD] |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/read_balancer__balancing.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp >> test.py::py2_flake8 [GOOD] |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_request.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp >> test_base.py::flake8 [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp >> test_http_api.py::flake8 [GOOD] >> runner.py::flake8 [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp >> test.py::py2_flake8 [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |58.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_reset.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/tablets/tablets.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/service/service.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/common/config.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/runtime.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/common/owner.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |58.7%| PREPARE $(JDK_DEFAULT-4020545899) - 0 bytes |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_translate.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/appdata.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |58.8%| PREPARE $(WITH_JDK-sbr:6941855347) - 0 bytes |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_host.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/services.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_transform.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_resolver.cpp |58.8%| PREPARE $(JDK17-4020545899) - 0 bytes |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ydb_over_fq.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/groups.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_type_ann.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |58.9%| PREPARE $(WITH_JDK17-sbr:6941855347) - 0 bytes |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/pdisks.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/bootstrapper.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/fake_coordinator.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_delete.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tx_helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tenant_runtime.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/test_runtime.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/columnshard.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/nodes/nodes.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/vslots.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tablet_helpers.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_runner.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_sys.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/sessions/sessions.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/cs_helper.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/common_helper.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/grpc_service.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/test_client.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_insert_table.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/add_index.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/executor.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/deleting.cpp >> test_quoting.py::flake8 [GOOD] |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/activation.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/kesus/grpc_service.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/add_data.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |59.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp >> tstool.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |58.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |58.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |58.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |58.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |58.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |58.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |58.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/collector.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/constructor.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/client/cpp/libymq-client-cpp.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/insert_table/committed.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/insert_table/user_data.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/libydb-core-tx.a |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/tstool/flake8 >> tstool.py::flake8 [GOOD] |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/defs.cpp |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/filter.cpp |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/stats.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/queue.cpp |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/resolver.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/settings.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/compaction_info.cpp |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/base_with_blobs.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/common.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/counters.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/insert_table/meta.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/insert_table/inserted.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/interaction.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/abstract.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/raw_client/libcpp-mapreduce-raw_client.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/data_events/write_data.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_cdc_serializer.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/origin_attributes.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/stripped_error.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/crash_handler.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error_code.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/program/libcore-tx-program.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/usage/abstract.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_traits.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/backup_restore_traits.h_serialized.cpp |57.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/_a5874f235d39dc6d1df389245e.yasm |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_helpers.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_conflicts.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/insert_table/rt_insertion.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/insert_table/path_info.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/backup_restore_common.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/insert_table/insert_table.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_scan.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/registry.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_failpoints.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_table.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_effects.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_delete_rows.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_read_table.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/client/cpp/client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_lookup_table.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/shard_writer.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/follower_edge.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_s3_upload.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/backup_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/metadata.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_zstd.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_iface.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit_kind.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_raw.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/local_kmeans.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/indexation.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/extstorage_usage_config.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/kmeans_helper.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_locks.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/probes.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/range_ops.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/policy.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/granule_view.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/scan_common.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/type_serialization.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/service/counters.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/filler.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/iterator.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/program/program.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/reshuffle_kmeans.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/events.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/new_fair_share_thread_pool.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/phoenix.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/string_helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/config.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__write.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execution_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/usage/events.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__init.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/change_exchange.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_body_serializer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/upload_stats.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/restore_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/import_s3.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/operation.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_common.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_buffered_dynamic_table_writer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/requests.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |59.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |59.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |58.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |58.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/packet.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/protocol.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/time_counters.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap.cpp |58.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/lwtrace_probes.cpp |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/locks_db.cpp |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_hooks.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/service/service.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/message_seqno.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy_schemereq.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |58.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__init.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/sample_k.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |59.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/replication.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_remover.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/event_util.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/two_part_description.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/private_events.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/lag_provider.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/subscriber.h_serialized.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/events.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/execute_queue.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/key_validator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/service/manager.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/locks/locks.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/usage/service.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/usage/config.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/logging.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/replica.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/external_data.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/long_tx_write.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/controller.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/load_test.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/status_channel.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/task.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/tier/initializer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/common.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/hash.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/unboxed_reader.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/scheme_cache.h_serialized.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/worker.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/helpers.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/tenant_resolver.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/session_info.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/sys_params.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_table.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/replication.h_serialized.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_with_stream.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_base.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer.cpp |59.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |59.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.global.a |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |59.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/tier/manager.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |59.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |59.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |59.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/libydb-core-util.a |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/client.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/global.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/mon.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/backoff.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/gen_step.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/format.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/random.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/text.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/read_table_impl.h_serialized.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/source_location.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/rule/ss_checker.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/tier/object.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier.cpp |59.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/libydb-core-viewer.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/console.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/tier/checker.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/worker.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_slider.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_aggregate.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/sharding.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_merge.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_filter.cpp |59.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |59.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/random.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |59.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/response_tasks.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/message_delay_stats.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/attributes_md5.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/cache.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/auth_mocks.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/time_cast/time_cast.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/describe.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/memory_tracker.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/rule/initializer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/infly.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/local_rate_limiter_allocator.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/log.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/service.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/user_settings_names.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/sha256.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/cloud_enums.h_serialized.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/helpers.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/events_writer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/acl.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |59.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/query_id.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/queue_id.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/probes.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/queue_attributes.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/failure_injection.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/xml_builder.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/parser.rl6.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/rule/behaviour.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |59.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |59.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/libydb-mvp-core.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/populator.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/common.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/rule/manager.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_mem_profiler.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/reducer.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/filter.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/parser.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/cache_policy.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/merger.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/counters.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mapper.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/abstract/liblibrary-formats-arrow-accessor-abstract.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/client/liblibrary-grpc-client.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/rule/ss_fetcher.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/rule/checker.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/rule/object.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser_public/liblibrary-persqueue-topic_parser_public.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |59.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |59.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/column_families.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |59.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/subdomain.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/row_version.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/group_stat.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/storage_pools.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/services_assert.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_guardian_impl_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |59.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/pool_stats_collector.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/path.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tablet.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_68875c7d34f9bbe09248b5ec55.yasm |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/event_filter.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers.cpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_2549b9c50b780e2386d838ff17.yasm |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/68875c7d34f9bbe09248b5ec55.auxcpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_583eccaec03903a04e0516e9bb.yasm |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/domain.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queue_schema.cpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_b74ebee90bb7903d84da5b42f7.yasm |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/memory_stats_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/path_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/ut/ydb-core-blobstorage-crypto-ut |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_ae6accdc802b0e073e8d19156b.yasm |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.a |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_b74ebee90bb7903d84da5b42f7.yasm |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_browse.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/109168012f4665542dd2bafba9.auxcpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |59.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/_60cfd0c71f99697efa7d884ea6.yasm |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/auth_factory.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/tier/behaviour.cpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_109168012f4665542dd2bafba9.yasm |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/table_index.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_user.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/45bf9e1d124d3a4ab8f9f012d8.auxcpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/traceid.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_portion_from_chunks.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_b74ebee90bb7903d84da5b42f7.yasm |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_storage.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_request.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/b74ebee90bb7903d84da5b42f7.auxcpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/mkql_proto/mkql_proto_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/error.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/change_visibility.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_operation.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_history_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |59.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/join_bf2c9ed2f082df133ad2524c35.yasm |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_message.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/join_774f9df586ad2dd13cb3b71a58.auxcpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_profiles.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/count_queues.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/result_compare |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_settings.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp >> TBlobStorageCrypto::TestMixedStreamCypher [GOOD] >> TBlobStorageCrypto::TestInplaceStreamCypher >> TBlobStorageCrypto::TestOffsetStreamCypher [GOOD] >> TBlobStorageCrypto::TestInplaceStreamCypher [GOOD] >> TBlobStorageCrypto::PerfTestStreamCypher [GOOD] >> TBlobStorageCrypto::UnalignedTestStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/snapshot_from_chunks.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/http_client.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cfg.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_pq.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator/ydb-library-yaml_config-validator-ut-validator |59.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_query.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_queue.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/common/util_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_permissions.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/abstract.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queue_leader.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_wb_req.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/composite.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/create_queue.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |59.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |59.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/feature_flags_service.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/c36460c1f3f976caa23b5bd087.auxcpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_queues.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/types.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/metering.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/get_value.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/purge.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/action.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/actor.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/counters.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/utils.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_users.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/table_index_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/counters.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/purge_queue.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_description.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/index_events_processor.cpp >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/receive_message.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/node_tracker.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/b74ebee90bb7903d84da5b42f7.auxcpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.a |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/service.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/http.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |59.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/_b74ebee90bb7903d84da5b42f7.yasm |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/table_creator/table_creator.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |59.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_guardian.cpp >> Validator::MapValidation [GOOD] >> Validator::BoolValidation [GOOD] >> Validator::IntArrayValidation [GOOD] >> Validator::Enums [GOOD] >> Validator::StringValidation [GOOD] >> Validator::MultitypeNodeValidation [GOOD] >> Validator::IntValidation [GOOD] >> Validator::OpaqueMaps [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/schema.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/60cfd0c71f99697efa7d884ea6.auxcpp |58.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_6b2f2f7191f2fb9fffba30b043.yasm |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |58.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |58.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/retention.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/send_message.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/executor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/core_ydb.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_monitoring.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/create_user.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/appdata.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/proxy_service.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_pipe_req.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator/unittest >> Validator::OpaqueMaps [GOOD] |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/manager.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/mvp.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_event_filter.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/snapshot.cpp |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/monitoring.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_lookup.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/core_ydbc.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/run_query.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |59.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/583eccaec03903a04e0516e9bb.auxcpp |59.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/6b2f2f7191f2fb9fffba30b043.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/2549b9c50b780e2386d838ff17.auxcpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_45bf9e1d124d3a4ab8f9f012d8.yasm |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tablet_status_checker.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tx_processing.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/defs.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blob.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/console_dumper.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |59.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.grpc.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_proxy.cpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_c36460c1f3f976caa23b5bd087.yasm >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestOffsetStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestInplaceStreamCypher |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |59.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_b74ebee90bb7903d84da5b42f7.yasm |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/ae6accdc802b0e073e8d19156b.auxcpp >> TBlobStorageCryptoRope::TestInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::PerfTestStreamCypher [GOOD] >> TBlobStorageCryptoRope::UnalignedTestStreamCypher [GOOD] >> TChaCha::KeystreamTest1 [GOOD] >> TChaCha::KeystreamTest2 [GOOD] >> TChaCha::KeystreamTest3 [GOOD] >> TChaCha::KeystreamTest4 [GOOD] >> TChaCha::KeystreamTest5 [GOOD] >> TChaCha::KeystreamTest6 [GOOD] >> TChaCha::KeystreamTest7 [GOOD] >> TChaCha::KeystreamTest8 [GOOD] >> TChaCha::MultiEncipherOneDecipher [GOOD] >> TChaCha::SecondBlock [GOOD] >> TChaCha512::KeystreamTest1 [GOOD] >> TChaCha512::KeystreamTest2 [GOOD] >> TChaCha512::KeystreamTest3 [GOOD] >> TChaCha512::KeystreamTest4 [GOOD] >> TChaCha512::KeystreamTest5 [GOOD] >> TChaCha512::KeystreamTest6 [GOOD] >> TChaCha512::KeystreamTest7 [GOOD] >> TChaCha512::KeystreamTest8 [GOOD] >> TChaCha512::MultiEncipherOneDecipher [GOOD] >> TChaCha512::SecondBlock [GOOD] >> TChaCha512::CompatibilityTest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/meta/meta_versions.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/xml.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.pb.cc |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a >> TChaCha512::CompatibilityTest [GOOD] >> TChaChaVec::KeystreamTest1 [GOOD] >> TChaChaVec::KeystreamTest2 [GOOD] >> TChaChaVec::KeystreamTest3 [GOOD] >> TChaChaVec::KeystreamTest4 [GOOD] >> TChaChaVec::KeystreamTest5 [GOOD] >> TChaChaVec::KeystreamTest6 [GOOD] >> TChaChaVec::KeystreamTest7 [GOOD] >> TChaChaVec::KeystreamTest8 [GOOD] >> TChaChaVec::MultiEncipherOneDecipher [GOOD] >> TChaChaVec::SecondBlock [GOOD] >> TChaChaVec::CompatibilityTest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/client/bin/main.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |59.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/client/bin/sqs |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/validators_ut.cpp >> TChaChaVec::CompatibilityTest [GOOD] >> TPoly1305::TestVector1 [GOOD] >> TPoly1305::TestVector2 [GOOD] >> TPoly1305::TestVector3 [GOOD] >> TPoly1305::TestVector4 [GOOD] >> TPoly1305Vec::TestVector1 [GOOD] >> TPoly1305Vec::TestVector2 [GOOD] >> TPoly1305Vec::TestVector3 [GOOD] >> TPoly1305Vec::TestVector4 [GOOD] >> TTest_t1ha::TestZeroInputHashIsNotZero [GOOD] >> TTest_t1ha::PerfTest [GOOD] >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/meta/meta.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/stream_ru_calculator_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/time_grid_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/events.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/_b74ebee90bb7903d84da5b42f7.yasm |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/58fd777f7f26b92a5c3a65ebdb.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/c3c77022f32ea9f03063c598f4.auxcpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/_c3c77022f32ea9f03063c598f4.yasm |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/memory_controller_iface.h_serialized.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/a0ac6bff4d1f5e5b56eb56eb04.auxcpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_b74ebee90bb7903d84da5b42f7.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_0c234afaa407a4418f9cfff531.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_4795fb4850b9d88b2c7b5e8ec2.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_b74ebee90bb7903d84da5b42f7.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_3d2ad5aac03dd48ea1a0a83eb8.yasm |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_833dd80e79c977aa58b8ac97ec.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_b74ebee90bb7903d84da5b42f7.yasm |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/833dd80e79c977aa58b8ac97ec.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_7b240b071767564ebe8b43187b.yasm |59.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/crypto/ut/unittest >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/51b73721929f13078ecfb118b8.auxcpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_dc40bacd6a1983f1de3e155468.yasm |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/3d2ad5aac03dd48ea1a0a83eb8.auxcpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_5689282d9693ccac57318ca874.yasm |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/b8f2779e20208045d5f4aadd3f.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/7b240b071767564ebe8b43187b.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/0c234afaa407a4418f9cfff531.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/5689282d9693ccac57318ca874.auxcpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_b8f2779e20208045d5f4aadd3f.yasm |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_pool_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/dc40bacd6a1983f1de3e155468.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/json/json_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_string_ut.cpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_b74ebee90bb7903d84da5b42f7.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_a0ac6bff4d1f5e5b56eb56eb04.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/_58fd777f7f26b92a5c3a65ebdb.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_51b73721929f13078ecfb118b8.yasm |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/4795fb4850b9d88b2c7b5e8ec2.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/_d7c26740fdaf6e78003c7f21df.yasm |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/d7c26740fdaf6e78003c7f21df.auxcpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/b74ebee90bb7903d84da5b42f7.auxcpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |59.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/security/simple/libmvp-security-simple.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/libmvp-core-protos.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/libclient-yc_private-access.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/libclient-yc_private-quota.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/libclient-nc_private-iam.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/129761279a8f635b5cb25be6f6.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/e0190ea6b9626b7936bb01e6fa.auxcpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_3a6f4eda1ec5d2bd4b5d7ab909.yasm |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/libpy3benchmarks-runner-result_compare.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_replica.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/3a6f4eda1ec5d2bd4b5d7ab909.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/io_formats/arrow/csv_arrow_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/9a3d5b70802b945274f285f587.auxcpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_9a3d5b70802b945274f285f587.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/55f03b5c7ff48bc16cc7bbe438.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_b74ebee90bb7903d84da5b42f7.yasm |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_129761279a8f635b5cb25be6f6.yasm |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/libpy3statistics_workload.global.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/libpy3statistics_workload.a |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/_55f03b5c7ff48bc16cc7bbe438.yasm |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_e0190ea6b9626b7936bb01e6fa.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/public/types_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/e4737a1de2b548fed21b6733c7.auxcpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/b74ebee90bb7903d84da5b42f7.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_e4737a1de2b548fed21b6733c7.yasm |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_e8e379b61234dd7ed260efcc27.yasm |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_b74ebee90bb7903d84da5b42f7.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/e8e379b61234dd7ed260efcc27.auxcpp |60.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/libydb_persqueue_public-ut-ut_utils.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.a |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_replica.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/statistics_workload/statistics_workload |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/data_plane_helpers.cpp |60.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/libyc_private-ydb-v1.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_matrix_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |59.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |59.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/session/cursor.cpp |59.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/4e6881630bb7d87e9ab9f3d91f.auxcpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/driver/nemesis |59.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_4e6881630bb7d87e9ab9f3d91f.yasm |59.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_c51b3218d1d85449db60fbe731.yasm |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/a0eea144e748338d07d6e2c675.auxcpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/database_service.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/actor.cpp |59.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_b74ebee90bb7903d84da5b42f7.yasm |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_4ffb80773cd819c6f64ae3337b.yasm |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_large/ut_btree_index_large.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/c0f0d29b6c33e7f05e57d209d8.auxcpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/_c0f0d29b6c33e7f05e57d209d8.yasm |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/4ffb80773cd819c6f64ae3337b.auxcpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/_50e0f2b3b90efa9a1305502b02.yasm |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/c51b3218d1d85449db60fbe731.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/50e0f2b3b90efa9a1305502b02.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/b74ebee90bb7903d84da5b42f7.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_a0eea144e748338d07d6e2c675.yasm |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.a |59.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/_d328b88e4d44d441b3413acc15.yasm |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/d328b88e4d44d441b3413acc15.auxcpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp >> result_compare::import_test [GOOD] |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/collector.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/2df5d6ce0e7b4d20016c681571.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/a38dc35da146e8497390eb9070.auxcpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_a38dc35da146e8497390eb9070.yasm |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_2df5d6ce0e7b4d20016c681571.yasm |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/background_controller.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/b74ebee90bb7903d84da5b42f7.auxcpp |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_b74ebee90bb7903d84da5b42f7.yasm |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |59.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/import_test >> result_compare::import_test [GOOD] |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/new_fair_share_thread_pool.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/43bac175e95a3fec996063d2b4.auxcpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_51e39740d1d758840624baee66.yasm |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/b74ebee90bb7903d84da5b42f7.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_b74ebee90bb7903d84da5b42f7.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/51e39740d1d758840624baee66.auxcpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_43bac175e95a3fec996063d2b4.yasm |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/_b74ebee90bb7903d84da5b42f7.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error_code.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/join_c8aed7aa9a190238ac421a9657.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/join_2e65a3bc8d7db29fed5b5bb7ff.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/0c6a7b86ca2476db99d999e3e2.auxcpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/_0c6a7b86ca2476db99d999e3e2.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/b74ebee90bb7903d84da5b42f7.auxcpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/origin_attributes.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/phoenix.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/crash_handler.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/write_actor.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/string_helpers.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/stripped_error.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/61e39f83bb1c7466cec418c177.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_61e39f83bb1c7466cec418c177.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/join_d00e29c6b73da4719cad82b752.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_da0981963f86194066f883caf8.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/b74ebee90bb7903d84da5b42f7.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_b74ebee90bb7903d84da5b42f7.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/754a90f95994461130feaa1756.auxcpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/da0981963f86194066f883caf8.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/join_9049d840b669fdc5bfce521dda.yasm |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_754a90f95994461130feaa1756.yasm |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/15bedb40aa24416f66f53388b9.auxcpp |60.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_475563fb51fb0e7131a897a5c7.yasm |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/b74ebee90bb7903d84da5b42f7.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/475563fb51fb0e7131a897a5c7.auxcpp |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_15bedb40aa24416f66f53388b9.yasm |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/9320981177f1bb46a5cf7bb627.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/c08ba1db5492c87ddfd8611d8f.auxcpp |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_b74ebee90bb7903d84da5b42f7.yasm |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_b74ebee90bb7903d84da5b42f7.yasm |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/020e2413fa05acf6fcc0b6a0a8.auxcpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.a |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_b74ebee90bb7903d84da5b42f7.yasm |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/b74ebee90bb7903d84da5b42f7.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/9725501498f74c7e358c80ca6f.auxcpp |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_c08ba1db5492c87ddfd8611d8f.yasm |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/323be4a89ad1864399ea311db4.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/b74ebee90bb7903d84da5b42f7.auxcpp |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_323be4a89ad1864399ea311db4.yasm |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_9320981177f1bb46a5cf7bb627.yasm |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/f0624cc5f1734a0e6284cc3450.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/b74ebee90bb7903d84da5b42f7.auxcpp |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/_77628d0748cc5bf266f6e05741.yasm |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_9725501498f74c7e358c80ca6f.yasm |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/_f0624cc5f1734a0e6284cc3450.yasm |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/77628d0748cc5bf266f6e05741.auxcpp |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/_020e2413fa05acf6fcc0b6a0a8.yasm |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/_b74ebee90bb7903d84da5b42f7.yasm |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_b74ebee90bb7903d84da5b42f7.yasm |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_e9446a953b5a015999d71407a6.yasm |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_18e05a10f6ea49dd0f554fa51f.yasm |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/b74ebee90bb7903d84da5b42f7.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/e9446a953b5a015999d71407a6.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/18e05a10f6ea49dd0f554fa51f.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/config.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/1f45adb640e82c46627e2b2d3a.auxcpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/77ff0e3be10902817b4214e3df.auxcpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_ba4cba5dcefc679d9e6b854354.yasm |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/ba4cba5dcefc679d9e6b854354.auxcpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_77ff0e3be10902817b4214e3df.yasm |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/b74ebee90bb7903d84da5b42f7.auxcpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_b74ebee90bb7903d84da5b42f7.yasm |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/03a7e7319c52b37778aca2325e.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_03a7e7319c52b37778aca2325e.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/b74ebee90bb7903d84da5b42f7.auxcpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_2549b9c50b780e2386d838ff17.yasm |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/2549b9c50b780e2386d838ff17.auxcpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_b74ebee90bb7903d84da5b42f7.yasm |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/metadata/ut/functions_metadata_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_1f45adb640e82c46627e2b2d3a.yasm |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_b5385522105a31f0a0c490bbb8.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/b5385522105a31f0a0c490bbb8.auxcpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/dqs.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |60.3%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.h |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/events.pb.{h, cc} |60.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/yql_s3_expr_nodes.{gen.h ... defs.inl.h} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/initiator.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/persqueue.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/service.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_scheme_v1.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |60.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.{gen.h ... defs.inl.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |60.3%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_auth_v1.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/cloud.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/quota.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/operation.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/resource.{pb.h ... grpc.pb.h} |60.3%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/config/protos/join_f433140a78035b6bb1a3bd51d3.yasm |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/database.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/resource_preset.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/backup_service.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/quota_service.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/quota_internal.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/resource_preset_service.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/mock/libcommon-http_gateway-mock.a |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/storage_type_service.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/events.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/storage_type.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/console_service.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/token_service.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |60.4%| [JS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/config/protos/join_f433140a78035b6bb1a3bd51d3.yasm |60.3%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/config/protos/_d{1f21923e66a553640c7dc0344.yasm ... 451efb68eb34287316943a5e1.rodata} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/schema.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_bsconfig_v1.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/yasm/yasm |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/cloud_user.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/claims.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/session_service.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_persqueue_v1.{pb.h ... grpc.pb.h} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.{h, cc} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |60.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher__intpy3___pb2.py.ksfy.yapyc3 |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/restore_controller.cpp |60.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher__intpy3___pb2.py{, i} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage__intpy3___pb2.py.ksfy.yapyc3 |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor__intpy3___pb2.py.ksfy.yapyc3 |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage__intpy3___pb2.py{, i} |60.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/_{ae78ceb61eab358f49f5d59550.yasm ... 7a59b4730395b0c51419efa275.rodata} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor__intpy3___pb2.py{, i} |60.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/_{7093756b75f7cfa63c986866ea.yasm ... 4f7ce44b0eed8e70b6a2fb15e5.rodata} |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager__intpy3___pb2.py.ksfy.yapyc3 |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection__intpy3___pb2.py{, i} |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection__intpy3___pb2.py.ksfy.yapyc3 |60.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/_{3488920bb01c29270159c28abb.yasm ... ad770e0ad06dce7f33b2773bf3.rodata} |60.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/_b{eefa2f9fbfd9d00690fe99a79.yasm ... 9d47a87c55fa809544def169b.rodata} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_query_v1.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init_ut.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_dynamic_config_v1.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |60.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompressor/rescompressor |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_backup.pb.{h, cc} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/fq_v1.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_keyvalue_v1.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_operation_v1.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/mvp_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/checker.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/protoc |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/test_server.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/ut_utils.cpp |60.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/join_51503b133b03b2766b3fc9f3e4.auxcpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_task_params.pb.{h, cc} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/ed7a171e0f8176da271b70841d.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_55a8ee17216c8627b2de1b874d.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/55a8ee17216c8627b2de1b874d.auxcpp |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/py3cc |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_b74ebee90bb7903d84da5b42f7.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/b74ebee90bb7903d84da5b42f7.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/5c7efee738caa61f00e33b41e9.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_5c7efee738caa61f00e33b41e9.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/b353e86138e866a44fba7693ec.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/_ed7a171e0f8176da271b70841d.yasm |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/_b353e86138e866a44fba7693ec.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/protoc-gen-mypy |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_maintenance_v1.{pb.h ... grpc.pb.h} |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/sensitive.{pb.h ... grpc.pb.h} |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jinja2cpp/libcontrib-libs-jinja2cpp.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/b74ebee90bb7903d84da5b42f7.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |60.3%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher__intpy3___pb2.py{, i} |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/e68548efa11d3a00711f021bed.auxcpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/8e7a34ec2df8fda1ade7839923.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_e68548efa11d3a00711f021bed.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/d0bdb20fb3701cab7b1e468fa5.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/4d6f3620ae7a47b656a8b1df88.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_4d6f3620ae7a47b656a8b1df88.yasm |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_b74ebee90bb7903d84da5b42f7.yasm |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_b74ebee90bb7903d84da5b42f7.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/b74ebee90bb7903d84da5b42f7.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_d0bdb20fb3701cab7b1e468fa5.yasm |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_ef8d28aaeb50572325dd14d9b4.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/2549b9c50b780e2386d838ff17.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_b74ebee90bb7903d84da5b42f7.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/ef8d28aaeb50572325dd14d9b4.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_2549b9c50b780e2386d838ff17.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/b74ebee90bb7903d84da5b42f7.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_8e7a34ec2df8fda1ade7839923.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampler_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/generated/codegen/main.cpp |60.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/codegen/codegen |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/throttler_ut.cpp |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/filesystem/librestricted-boost-filesystem.a |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/events.pb.{h, cc} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_backup_v1.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_ymq_v1.{pb.h ... grpc.pb.h} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/events.pb.{h, cc} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |60.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/yql_pq_expr_nodes.{gen.h ... defs.inl.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/task_command_executor.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_monitoring_v1.{pb.h ... grpc.pb.h} |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/mdb_endpoint_generator_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/cpp_styleguide/cpp_styleguide |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/grpc_cpp/grpc_cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/portions.cpp |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/config_proto_plugin |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |60.4%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher__intpy3___pb2.py.ksfy.yapyc3 |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/_b74ebee90bb7903d84da5b42f7.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/33485b935b1565ddeda662d334.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/d1ee025732a583b0ba04814c58.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/61e4b816cf79b7606ca15b5877.auxcpp |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/persqueue.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/topic.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/ydb-tests-functional-tpc |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.a |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/_d1ee025732a583b0ba04814c58.yasm |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/_33485b935b1565ddeda662d334.yasm |60.4%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/config/protos/_d{1f21923e66a553640c7dc0344.yasm ... 451efb68eb34287316943a5e1.rodata} |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_61e4b816cf79b7606ca15b5877.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/35c2f7a04f289a9f72763c2025.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_35c2f7a04f289a9f72763c2025.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/ydb_value_operator.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/6f74072898e36b4312ab75a0db.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_6f74072898e36b4312ab75a0db.yasm |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token.{pb.h ... grpc.pb.h} |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_f00d69da9467a4a52da9b22496.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/f00d69da9467a4a52da9b22496.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/d4428c6555fc34a79b567ae531.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_d4428c6555fc34a79b567ae531.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/b74ebee90bb7903d84da5b42f7.auxcpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_8e0314ef7ed855a3126c9e5eb6.yasm |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_b74ebee90bb7903d84da5b42f7.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/8e0314ef7ed855a3126c9e5eb6.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/top_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/logic.cpp |60.4%| [JS] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/config/protos/join_f433140a78035b6bb1a3bd51d3.yasm |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/result_set_meta.pb.{h, cc} |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |60.4%| [AS] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/config/protos/join_f433140a78035b6bb1a3bd51d3.yasm |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/field_behavior.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/service_account.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/task.pb.{h, cc} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/reference.{pb.h ... grpc.pb.h} |60.4%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/common.cpp |60.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/expr_nodes/dqs_expr_nodes.{gen.h ... defs.inl.h} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |60.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/_b74ebee90bb7903d84da5b42f7.yasm |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/file/libyt-gateway-file.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ymq.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/defaults.cpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/join_b7c10b4864a820ed988f274a3b.yasm |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/c17932a1c7065b959cf7db2c7d.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/join_faf7f8a83413e98910a74c5e89.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/b74ebee90bb7903d84da5b42f7.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/database_resolver_mock.cpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/_c17932a1c7065b959cf7db2c7d.yasm |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/14453aaeaf36a596bef15bc685.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_14453aaeaf36a596bef15bc685.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/48a0e969cc306fdb22d55c035b.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_48a0e969cc306fdb22d55c035b.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/provider/yql_s3_listing_strategy_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/alter_impl.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/selector/backup/selector.cpp |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_column_filter.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_hash.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_arrow.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_id_dict_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_program_step.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_table_v1.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.{pb.h ... grpc.pb.h} |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/ydbd/main.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |60.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/9cde4489f7fa94a76b9b02d638.auxcpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ed475535b561d333796c95a705.auxcpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/modification.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/86d3e302364c3382a2b168ce57.auxcpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_9cde4489f7fa94a76b9b02d638.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/join_3ef875ff1dc5749e1fcb922b21.auxcpp |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/c00748ee9f4a8df0a497895714.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_3d91683202a822f8cc1b66c627.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/3d91683202a822f8cc1b66c627.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_86d3e302364c3382a2b168ce57.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/optimizer.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_c00748ee9f4a8df0a497895714.yasm |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ee50fbcae5d7c3ae1b7c168722.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_ed475535b561d333796c95a705.yasm |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |60.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_ee50fbcae5d7c3ae1b7c168722.yasm |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/0edefb735db84420d76f6da5ad.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_3094db96f925466f57c2e99df3.yasm |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_0edefb735db84420d76f6da5ad.yasm |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/22e0b4e28e344fbe4b14fc4e7f.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_22e0b4e28e344fbe4b14fc4e7f.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/b74ebee90bb7903d84da5b42f7.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/3094db96f925466f57c2e99df3.auxcpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/44cfa0611a8f90f9f92d8e831f.auxcpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/_44cfa0611a8f90f9f92d8e831f.yasm |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/join_d158d6388395f7fac32a213c83.yasm |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/counters.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/logic.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/fetch_database.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_maintenance.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.{pb.h ... grpc.pb.h} |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/program.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_filter_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_parser_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/bucket.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/terminate_policy/libudf-service-terminate_policy.global.a |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/_2549b9c50b780e2386d838ff17.yasm |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/df57a028ba7de3d582f12edff5.auxcpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/b74ebee90bb7903d84da5b42f7.auxcpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/_df57a028ba7de3d582f12edff5.yasm |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/2549b9c50b780e2386d838ff17.auxcpp |60.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/_b74ebee90bb7903d84da5b42f7.yasm |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang16/tools/clang-format/clang-format |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_a85123d3cf465cba982424dc08.yasm |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/a1a4fef3e58eac5c8cd56e360e.auxcpp |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/base/msgbus.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/ff94c99b3d9492ea47f26af81f.auxcpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/bf91d2c3152cd9f79aee642443.auxcpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/b74ebee90bb7903d84da5b42f7.auxcpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_ff94c99b3d9492ea47f26af81f.yasm |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_bf91d2c3152cd9f79aee642443.yasm |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/a85123d3cf465cba982424dc08.auxcpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_a1a4fef3e58eac5c8cd56e360e.yasm |60.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_b74ebee90bb7903d84da5b42f7.yasm |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |60.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_ut.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/restore.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/access_service.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter_ut.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/codecs_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/ut_pg/scheme_tablecell_pg_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_ut_configs.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/style/ydb-core-kqp-ut-federated_query-style |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/dq/actors/common/ut/retry_events_queue_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/resource_broker_ut.cpp |60.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/_f876c7e5551ebce27aee411303.yasm |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/f876c7e5551ebce27aee411303.auxcpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/solomon/async_io/ut/ut_helpers.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/solomon/async_io/ut/dq_solomon_write_actor_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_tablet_v1.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/provider/ut/pushdown/pushdown_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/request/common.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.{pb.h ... grpc.pb.h} >> test_cpp.py::test_cpp_style[ydb/core/kqp/ut/federated_query/common/common.cpp] [GOOD] >> test_cpp.py::test_cpp_style[ydb/core/kqp/ut/federated_query/common/common.h] [GOOD] |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |59.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/style/py3test >> test_cpp.py::test_cpp_style[ydb/core/kqp/ut/federated_query/common/common.h] [GOOD] |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/next_token.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |59.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_message.pb.{h, cc} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.{pb.h ... grpc.pb.h} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg/libcpp-lfalloc-dbg.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_bsconfig.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |60.0%| [EN] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/kv.h_serialized.{cpp, h} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/type/dayofweek.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |60.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/sessions.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/alter.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/docker_compose |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a >> ydb-core-kqp-ut-federated_query-style::import_test [GOOD] |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |59.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/style/import_test >> ydb-core-kqp-ut-federated_query-style::import_test [GOOD] |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/bin/main.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/589c15a03e749fa7fc88e46a30.auxcpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.a |59.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/_589c15a03e749fa7fc88e46a30.yasm |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.global.a |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_topic_v1.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tstool/tstool |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/connector-tests-datasource-mysql |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |59.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/grpc_service.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/name_service_client_protocol.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/bits_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/simple_cache_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_cow_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/ut/ydb-core-util-ut |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/bin/mvp_meta |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/circular_queue_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_cache_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/libcpp-client-ydb_federated_topic.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/event_priority_queue_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/impl/libclient-ydb_federated_topic-impl.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_oneone_inplace_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_stack_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_priority_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_fixed_hash_set_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lz4_data_generator_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/log_priority_mute_checker_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lf_stack_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_inplace_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/wildcard_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/token_bucket_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/interval_set_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/events.pb.{h, cc} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/connector.{pb.h ... grpc.pb.h} |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |59.8%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullstorageratio_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/6d1763e0cdc6e301e2989d8343.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/4a7a64454c9245b8cfbbd6c568.auxcpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_b74ebee90bb7903d84da5b42f7.yasm |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/efe80a67dc5fbde40e7e446fba.auxcpp |59.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_6d1763e0cdc6e301e2989d8343.yasm |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_5f751080373d4214d525810354.yasm |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/b74ebee90bb7903d84da5b42f7.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/5f751080373d4214d525810354.auxcpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_efe80a67dc5fbde40e7e446fba.yasm |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_4a7a64454c9245b8cfbbd6c568.yasm |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/2ef1517045ab9cce02fdf81d44.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/4a4d1a0f629769ad18cbbbf6ac.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/b74ebee90bb7903d84da5b42f7.auxcpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_b74ebee90bb7903d84da5b42f7.yasm |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_2ef1517045ab9cce02fdf81d44.yasm |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_4a4d1a0f629769ad18cbbbf6ac.yasm |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/mvp.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/data.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/token.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token_service.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/token_exchange_service.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/folder.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/transitional/folder_service.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/data_source.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/yandex_passport_cookie.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/http.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/rpc/status.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_interval.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/type/timeofday.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_compiler.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/main.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_replay.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_object_storage_v1.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/pgproxy.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/device_perf_test.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/example_configs/static_validator-ut-example_configs |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/request/config.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_proccessor.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/main.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufstream_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/query_actor/query_actor.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebuf_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/trace_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/cloud_service.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufresize_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_lsnmngr_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_syncneighbors_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/memusage_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |59.8%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/expr_nodes/yql_yt_expr_nodes.{gen.h ... defs.inl.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |59.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} >> StaticConfigExamples::MIRROR_3_DC_NODES_IN_MEMORY [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_federation_discovery_v1.{pb.h ... grpc.pb.h} >> StaticConfigExamples::MIRROR_3_DC_NODES [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} >> StaticConfigExamples::MIRROR_3_DC_9_NODES [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} >> StaticConfigExamples::SINGLE_NODE_IN_MEMORY [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} >> StaticConfigExamples::BLOCK42 [GOOD] |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp >> StaticConfigExamples::SingleNodeWithFile [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/cursor.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/basic_usage_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |59.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/static_validator/ut/example_configs/unittest >> StaticConfigExamples::SingleNodeWithFile [GOOD] |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/service/sysview_service.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/bsconfig/bsconfig_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/yq_internal.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/codegen/codegen |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/cms/cms_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/constructor.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/utils/libcore-config-utils.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libconnector-api-common.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jinja2cpp/libcontrib-libs-jinja2cpp.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_context.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_crypto_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/crypto/liblibs-openssl-crypto.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/ut/ydb-core-config-ut |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/datastreams_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/accurate_accumulate/liblibrary-cpp-accurate_accumulate.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/filesystem/librestricted-boost-filesystem.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/generated/codegen/main.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/counters.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/topic_to_table_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/basic_usage_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/local_partition_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/linear_regression/liblibrary-cpp-linear_regression.a |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/config.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/fq_private.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/generic_manager.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/libpy3benchmarks-runner-run_tests.a |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_auth.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/describe_topic_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/ut/main.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/request_actor.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/cfg/bin/ydb_configure |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/4b59ef27c4220e585fc22b07d4.auxcpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.a |59.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/_4b59ef27c4220e585fc22b07d4.yasm |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/b74ebee90bb7903d84da5b42f7.auxcpp |59.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/_53c2f5b0df5fc66a7bcadf28e7.yasm |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/_b74ebee90bb7903d84da5b42f7.yasm |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/libpy3simple_json_diff.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/53c2f5b0df5fc66a7bcadf28e7.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut_perf/ydb-core-erasure-ut_perf |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/events/events.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/bin/main.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/main/libtesting-benchmark-main.global.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/9fce44782e4afe532b919ef481.auxcpp |59.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/_9fce44782e4afe532b919ef481.yasm |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/batched_vec_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/bufferwithgaps_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/ptr_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/ut/topic_names_converter_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_benchmark/main.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/libcpp-testing-benchmark.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/actors/ut/yql_arrow_push_down_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/access.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_ut_local.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/libpy3simple_json_diff.global.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp >> TErasurePerfTest::Split >> TErasurePerfTest::Split [GOOD] >> TErasurePerfTest::Restore |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/run_tests |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_labeled.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/libpy3benchmarks-runner-run_tests.global.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_counters.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tier_info.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.{pb.h ... grpc.pb.h} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_ut_pool.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/viewer.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_kqp.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/ut/ydb-core-resource_pools-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scripting.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/events.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_tablet.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_common.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/resource.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.a |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_import_v1.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_logstore_v1.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_parser/enum_parser |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/access_service.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/modification_controller.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.{pb.h ... grpc.pb.h} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_rate_limiter.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_coordination_v1.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/blobsan/blobsan |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/graph_description.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_subscriber_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/sink.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/links.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/mrrun/mrrun.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp >> ResourcePoolTest::SettingsValidation [GOOD] >> ResourcePoolTest::SecondsSettingsParsing [GOOD] >> ResourcePoolClassifierTest::IntSettingsParsing [GOOD] >> ResourcePoolTest::SettingsExtracting [GOOD] >> ResourcePoolClassifierTest::StringSettingsParsing [GOOD] >> ResourcePoolClassifierTest::SettingsExtracting [GOOD] >> ResourcePoolTest::PercentSettingsParsing [GOOD] >> ResourcePoolClassifierTest::SettingsValidation [GOOD] >> ResourcePoolTest::IntSettingsParsing [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/metrics_registry.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/selector.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/user_account.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/pg_ext.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/blobs.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_clickhouse_internal_v1.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/data.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_view.pb.{h, cc} |59.8%| [PR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/include/llvm/IR/Attributes.inc{, .d} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/actors.cpp >> run_tests::import_test [GOOD] |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/blobsan/main.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |59.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/resource_pools/ut/unittest >> ResourcePoolTest::IntSettingsParsing [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/fq.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |59.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/import_test >> run_tests::import_test [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/zero_level.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |59.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |59.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests >> simple_json_diff::import_test [GOOD] |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/fq_private_v1.{pb.h ... grpc.pb.h} |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |59.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/long_timer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/abstract.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/selector/abstract/selector.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |59.7%| [EN] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/stock.h_serialized.{cpp, h} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/api.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/optimizer.cpp |59.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |59.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/tools/simple_json_diff/import_test >> simple_json_diff::import_test [GOOD] |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |59.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/generated/codegen/main.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/event.pb.{h, cc} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/granules.cpp |59.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/read_balancer__balancing.h_serialized.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/table_record.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/protos/operation_id.pb.{h, cc} |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/mrrun/mrrun |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/service.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/folder_service.{pb.h ... grpc.pb.h} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/utils/actors/http_sender_actor_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_query.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare_scheme.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/main.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_drop.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_partlayout_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_iter_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_blobmap_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/commands.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/surg/main.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/object.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_delayedresp_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/optimizer.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/1144738eb9e014641c1ecd8edb.auxcpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/_1144738eb9e014641c1ecd8edb.yasm |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_3989ea13006d67e89dd1a8ad12.yasm |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_monitoring/libcpp-client-ydb_monitoring.a |59.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_b74ebee90bb7903d84da5b42f7.yasm |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/5bb9b4714ab16ef374043b6486.auxcpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |59.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_5bb9b4714ab16ef374043b6486.yasm |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/b74ebee90bb7903d84da5b42f7.auxcpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/3989ea13006d67e89dd1a8ad12.auxcpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/endpoint.pb.{h, cc} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/checker.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/preparation_controller.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/oauth_request.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/put_records_actor.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/helpers.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/tpch/tpch |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.{pb.h ... grpc.pb.h} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/graph_params.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/read_http_reply_protocol.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/local_ydb/local_ydb |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.{pb.h ... grpc.pb.h} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.{pb.h ... grpc.pb.h} |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/ut/graph_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ticket_parser_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/health_check/health_check_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/5d85197d778faf9a7a67ead8d8.auxcpp >> ydb-library-benchmarks-runner::import_test [GOOD] |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/b74ebee90bb7903d84da5b42f7.auxcpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/_c48f2a4e960700ea8a9826632f.yasm |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/_5d85197d778faf9a7a67ead8d8.yasm |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/c48f2a4e960700ea8a9826632f.auxcpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/_b74ebee90bb7903d84da5b42f7.yasm |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_b74ebee90bb7903d84da5b42f7.yasm |59.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/import_test >> ydb-library-benchmarks-runner::import_test [GOOD] |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/6b2d83fc4b34dc0640579a5038.auxcpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/b74ebee90bb7903d84da5b42f7.auxcpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/b74ebee90bb7903d84da5b42f7.auxcpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/_b74ebee90bb7903d84da5b42f7.yasm |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/dc6742774f7f7be07b72a0f255.auxcpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/c7dcee7daed3ea80f68bb6b1c8.auxcpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_c7dcee7daed3ea80f68bb6b1c8.yasm |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/885d8d9c768559516300f81904.auxcpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/storage/s3/storage.cpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/_885d8d9c768559516300f81904.yasm |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/_51b9bdd1ba7a18799d77b948ec.yasm |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_b74ebee90bb7903d84da5b42f7.yasm |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_dc6742774f7f7be07b72a0f255.yasm |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/51b9bdd1ba7a18799d77b948ec.auxcpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_6b2d83fc4b34dc0640579a5038.yasm |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_55acb3440d202d5436c3eebe8d.yasm |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/55acb3440d202d5436c3eebe8d.auxcpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/b74ebee90bb7903d84da5b42f7.auxcpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_logstore.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/join_b2051df0a060d4ce8159a90fe2.auxcpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} >> TErasurePerfTest::Restore [GOOD] >> TErasureSmallBlobSizePerfTest::StringErasureMode [GOOD] >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/data_source.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/endpoint.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/container.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/graph.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/dc9abab7075b555a3ef54c0d31.auxcpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/_dc9abab7075b555a3ef54c0d31.yasm |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut_perf/unittest >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/import_test >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/index.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |59.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/flat_table_part.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/access.{pb.h ... grpc.pb.h} |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_replication.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_export_v1.{pb.h ... grpc.pb.h} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/abstract.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/connector.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.{pb.h ... grpc.pb.h} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.{pb.h ... grpc.pb.h} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/checker.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/query_actor/query_actor_ut.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/credentials.pb.{h, cc} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/put_status.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/records.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/service_account_service.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/unittests.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/logger_config.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/init.h_serialized.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/chunks.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/_c43757827e03b03f81c937ad5a.yasm |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/c43757827e03b03f81c937ad5a.auxcpp |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/4a6a74a0ab38f783afd5375054.auxcpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/270cc1cc4ad07a20fdc1de7945.auxcpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_b74ebee90bb7903d84da5b42f7.yasm |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/b74ebee90bb7903d84da5b42f7.auxcpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/actor/write.cpp |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_4a6a74a0ab38f783afd5375054.yasm |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_270cc1cc4ad07a20fdc1de7945.yasm |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/common_level.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |59.5%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/yql_pg_expr_nodes.{gen.h ... defs.inl.h} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/constructor.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/generated/codegen/main.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |59.5%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/yql_generic_expr_nodes.{gen.h ... defs.inl.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token_service_subject.{pb.h ... grpc.pb.h} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/db_counters.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/version/version_definition.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/SQLv1Parser.pb.{code0.cc ... main.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/service/ext_counters.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/kv_workload/ydb-tests-functional-kv_workload |59.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kv_workload/_52280868a7b2d05c7056d200ff.yasm |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kv_workload/68fcafbd099dc13b9ccbcacac5.auxcpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kv_workload/_68fcafbd099dc13b9ccbcacac5.yasm |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |59.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kv_workload/b74ebee90bb7903d84da5b42f7.auxcpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/double_indexed_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kv_workload/52280868a7b2d05c7056d200ff.auxcpp |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kv_workload/_b74ebee90bb7903d84da5b42f7.yasm |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tsserver/tsserver |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/_b8c5754a195bdea98cbf907af0.yasm |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/b8c5754a195bdea98cbf907af0.auxcpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/tsserver/main.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_log_impl.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/join/yql-providers-generic-connector-tests-join |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/codecs.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_keyvalue.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/annotations.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/backup.{pb.h ... grpc.pb.h} |59.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_replication_v1.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/access_service.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/yqlrun/gateway_spec.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_scripting_v1.{pb.h ... grpc.pb.h} |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/yqlrun/yqlrun.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/yqlrun/http/libtools-yqlrun-http.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/data.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/_e93b94c36ea8b5ce684eea2c49.yasm |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/_b74ebee90bb7903d84da5b42f7.yasm |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/join_dcd97fcac6a334ca569de2c107.auxcpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/b74ebee90bb7903d84da5b42f7.auxcpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/e93b94c36ea8b5ce684eea2c49.auxcpp |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/join_487d6f374d6d03f9641be7dbbc.yasm |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/simple.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/8dffc5726b9be6abfc5e0f9557.auxcpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/retry_options.pb.{h, cc} |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_8dffc5726b9be6abfc5e0f9557.yasm |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_b74ebee90bb7903d84da5b42f7.yasm |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/b74ebee90bb7903d84da5b42f7.auxcpp |59.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/f2c85040cc1290644ebb21b197.auxcpp |59.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_f2c85040cc1290644ebb21b197.yasm |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/counters_shard.pb.{h, cc} |59.3%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/join_c09b118b667044a5f8150e6958.yasm |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/user_account_service.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/issue_id.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_cms_v1.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/update.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{2bb5ed6d8415fe8d25dd74bc98.yasm ... 76e5950e7e5d36e684c6988e3a.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{1b5cc92f0b1e88b914023edf3c.yasm ... 6a0a0c50c83ed2567678f2d8a8.rodata} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_spacetracker_ut.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{33d66b07c02a3de84c0dabfb99.yasm ... 9513a2ec720ed1a62e7a057d4e.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{06b686260eb9e97e4c0bb006b6.yasm ... f256941771b0e03178ebddeaba.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{30b1de3e2a88367eedf8d6fb4d.yasm ... bc69baae6e12a0af4c99bf0366.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{12c47d4c7a1e734a60369680cb.yasm ... 7fb3046efa74bbe04465c75db1.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{33e2dda42df10bffdaa05768ee.yasm ... 2c990fa2ba755f8b4853025f0f.rodata} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |59.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/_{70cb4fc9708c18f38ed236e25b.yasm ... ea9739c5637ddd163f7384c82f.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_3{aaacae23efe03bc057e88a2c2.yasm ... 521e0d29e443961068237cc5e.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{40d3e458683d76fa7c6f2e7bfb.yasm ... c348b7c97a307f3dad5abd2613.rodata} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{649bf84e5446367cdd96be8761.yasm ... 1b83baf5154a1c0f7d539bd49d.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{4d5b699a3b111b5e1e43c210ba.yasm ... fb1a38ef73237feb19497f0dba.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{705f65f8eccc6fb1406a177cd8.yasm ... f1d3a381ca6d07fed3f25b17a7.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{6b06abc7d6a438241bcb2f4f2a.yasm ... be5f300e7c2f4e694039950190.rodata} |59.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{7dc64b34167cf0c6e4a691a5a3.yasm ... 0de2bad8ee0fdf7c9e6ea5d840.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{710aaad0a0a5006304da184415.yasm ... 6c283b268154c7808d8a408ef0.rodata} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{7de42909887ed0fe5a229e5cf7.yasm ... 9f20e642466ed8f0f9ca842273.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{85d3ad37886bfa1996ae1f18b5.yasm ... 50e64878c3c5099106cc738ea5.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{8ed6d08a903484c5df5bfd0fec.yasm ... 51287b68dde3989657b8480563.rodata} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{92137233aa6cc00c4fc669575a.yasm ... 229b4e02927e832eadff554781.rodata} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{931072b48a9a87f223f98a0a7f.yasm ... 64a0b28435ee9c1c99bb1853d2.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{93732a31a82927528cdbd63b33.yasm ... 7cc6d3cc979adc6e35a57eaf72.rodata} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2.py{ ... i} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2.py{ ... i} |59.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/_{c2b539c0b56a034ffeb9567429.yasm ... 74b7ddc5fa7f12b17d309ed237.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{9b8094d0dd5e66f67c491a0af0.yasm ... 30ec7ae028318f87a399e9a5a9.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{a93e4b7f1d261ac4bbe89edda9.yasm ... 8a505d7b31b694a04fd17c714e.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_a{b2dbb5d90775a57f8e336efb2.yasm ... 53cc2789f59171c431ad268cb.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{aaa1f5009aa080bf14312629b9.yasm ... 02fd22796ffa3aabb954d4fbf0.rodata} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{adbfbcea6a0c0d6da50c911235.yasm ... 840b6f5ad66bc93e2032918813.rodata} |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/grpc_python/grpc_python |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{b4d10a92a303e56367ead23f5e.yasm ... 7714dad988ad0455d7ed9c4e87.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{b6f3c3158108200cbd7a761697.yasm ... 1d076bbec4a2c5545d8b5ac6dd.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{b947375f57cdb433013045701a.yasm ... 87350e5bcc30b25242c9e6d6a2.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{bc045db8b896127d3e6331d8af.yasm ... 5e93abf95f0600b4311b7c9ef4.rodata} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2.py{ ... i} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{c3566661154e9506768284e1c2.yasm ... d5da74c4ed972a9296569410c5.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{c9fe0095ef04a52135c59ccfd3.yasm ... a06b01f71f5ff17b9df7f1ea55.rodata} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk__intpy3___pb2.py{ ... i} |59.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{d66edeb872b8e17d93f19fb54b.yasm ... fe450f460229744b342426345e.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{d8d7fd5830c73b04464745f687.yasm ... 75858dd37719ddb05254112356.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_d{afff77f7ed2434503076c384d.yasm ... c956e7798e007443d9a11842d.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{cad6483b5eed666f6afdd46806.yasm ... 8ec2646845b677e7a2262af97c.rodata} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |59.4%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/_{c2b539c0b56a034ffeb9567429.yasm ... 74b7ddc5fa7f12b17d309ed237.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{4dd92a0bd8e48ba580898fc18e.yasm ... 8a365db3d69fd042ddd28d4f67.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{dd876e5de56df0dd7cd9403baf.yasm ... 916e28c587a22955c341281e88.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{e7394836d4e02156f90bea5da1.yasm ... 474c847512298494790b4e2a05.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{eb9b1db40f112752edd02233da.yasm ... 0ac8c665ac6d83aea2cb467abf.rodata} |59.4%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{ee2759c8835aeb5b99221ed6a6.yasm ... b28ff4bdbf298b9ef3f5824482.rodata} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{ff6fcf9659d1db6624a128a52c.yasm ... db15ec0d4ea6e0e0439d41c4a6.rodata} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{9bd6fc3b68268492157a9b65e0.yasm ... a8f635f733b39ead9762d18807.rodata} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{101a44cd8742b458ba5ecda710.yasm ... 059c4cb9250238a015c37b1cd7.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{1eddeb0de757bce79159d8d622.yasm ... a03cb153c28e30440cc44859c4.rodata} |59.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{00505e604bb285e1d0d414c421.yasm ... 61b2f14b49736dbd4d5cd2b3ad.rodata} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_rate_limiter_v1.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_view_v1.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |59.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/dq_solomon_shard.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |59.4%| [JS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/join_c09b118b667044a5f8150e6958.yasm |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/source.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |59.4%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/storage.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |59.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/shard_iterator.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |59.4%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{1e82d3dbe0fbba9e6ffcac2b64.yasm ... e27f4d632478f64952bb596cfc.rodata} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/streaming_service.{pb.h ... grpc.pb.h} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{4e52f51de96d7c8899e18e84cf.yasm ... 8f067f7f0b944d0688a4042f05.rodata} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{586d7071f95927b5de5aa11da6.yasm ... 00aabde37e3e8f75b4e15d7c93.rodata} |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/_{ee74027b424058c48eddab5812.yasm ... 6d5489cea3c97f3348012f92cd.rodata} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_ut.cpp |59.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/_d3a4528808d8425466dfec4185.yasm |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/d3a4528808d8425466dfec4185.auxcpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.global.a |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/ut_helpers.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/6f43a28f839ac89d335c7ab9b1.auxcpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/colons.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/dstool/ydb-dstool |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/main.cpp |59.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/_6f43a28f839ac89d335c7ab9b1.yasm |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |61.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |62.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |63.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/_79b13353271c8cfe46ea4b9f1e.yasm |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/pgproxy/pg_proxy_ut.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |63.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/79b13353271c8cfe46ea4b9f1e.auxcpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |63.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/_10c9874010308af47fbf8680a3.yasm |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/10c9874010308af47fbf8680a3.auxcpp |63.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |63.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/_26182f71da26956759f0d6a4bc.yasm |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/26182f71da26956759f0d6a4bc.auxcpp |63.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/_36a9c2c404ae886b8a0915297e.yasm |63.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/_b74ebee90bb7903d84da5b42f7.yasm |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cluster_info_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/table_creator/table_creator_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/b74ebee90bb7903d84da5b42f7.auxcpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/36a9c2c404ae886b8a0915297e.auxcpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/downtime_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/ut_helpers.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tenants_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_group/main.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/snap_vec_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_pg/flat_database_pg_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |62.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |63.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |63.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |63.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |63.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |64.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |64.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc.pb.cc |64.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console.grpc.pb.cc |64.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |64.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |64.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |64.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |64.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console.pb.cc |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |65.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |65.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |65.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ut_ycsb.cpp |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |65.8%| [AS] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/config/protos/join_f433140a78035b6bb1a3bd51d3.yasm |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.pb.cc |66.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.grpc.pb.cc |66.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |66.0%| [JS] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/config/protos/join_f433140a78035b6bb1a3bd51d3.yasm |66.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher__intpy3___pb2.py.ksfy.yapyc3 |66.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher__intpy3___pb2.py{, i} |66.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/audit/audit_log_impl.cpp |66.1%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/modifications_validator.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.pb.cc |66.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/audit/libydb-core-audit.a |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/version/version_definition.cpp |66.4%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/config/protos/_d{1f21923e66a553640c7dc0344.yasm ... 451efb68eb34287316943a5e1.rodata} |66.4%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/_{c2b539c0b56a034ffeb9567429.yasm ... 74b7ddc5fa7f12b17d309ed237.rodata} |66.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |66.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/version/libversion_definition.a |66.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |66.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/modifications_validator.cpp |66.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |66.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/apps/version/libversion_definition.a |66.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |66.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |66.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |66.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |67.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |67.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |67.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |67.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |67.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |67.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/version/version_definition.cpp |67.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_log_impl.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |68.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/certificate_check/cert_check.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |68.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |68.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |68.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/datasource-ms_sql_server |68.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_check.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |68.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc.grpc.pb.cc |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |68.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |68.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |68.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/util/config_index.cpp |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_ut.cpp |69.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/util/libcms-console-util.a |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.grpc.pb.cc |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |69.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |69.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |69.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |69.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |69.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |70.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |70.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |70.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |70.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |70.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |70.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |70.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |70.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |70.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |70.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |70.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |70.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_checks/yaml_config-validator-ut-validator_checks |70.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/util/config_index.cpp |70.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |70.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/config.grpc.pb.cc |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.grpc.pb.cc |71.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |71.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |71.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_builder/yaml_config-validator-ut-validator_builder |72.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |72.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |72.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |72.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |72.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |72.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config.grpc.pb.cc |72.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |73.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.grpc.pb.cc |73.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |73.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |73.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |73.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |74.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |74.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |74.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |74.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |74.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |74.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |74.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |74.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |74.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |74.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |75.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |75.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |75.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |75.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb |75.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |75.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |75.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |75.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/base/msgbus.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/config.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/public_http/http_req.cpp |76.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/base/libpublic-lib-base.a |76.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config.cpp |76.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |76.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/public_http/http_req.cpp |76.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |76.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |76.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |76.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |76.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/benchmark |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |77.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |77.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |77.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/base/msgbus.cpp |77.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |77.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |77.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |77.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |77.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |77.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |77.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |77.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/protos/out/out.cpp |77.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |77.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |77.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/out/libcore-protos-out.a |77.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |77.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/protos/out/out.cpp |77.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |77.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |77.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |77.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |77.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |76.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |77.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/common/run_actor_params.cpp |76.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug_tools/ut/ydb-core-debug_tools-ut |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/common/run_actor_params.cpp |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_import_ut.cpp |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |76.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/node_checkers.cpp |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |76.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/ut/ydb-core-fq-libs-hmac-ut |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/node_checkers.cpp |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |76.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_query_ut.cpp |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table_ut.cpp |76.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_ut.cpp |76.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |76.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |76.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |76.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |76.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |76.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |76.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |76.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |76.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |76.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |75.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/libpy3benchmarks-runner-runner.global.a |75.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/libpy3benchmarks-runner-runner.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config.pb.cc |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/core/libyt-yt-core.a |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_config.cpp |75.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_config.cpp |74.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/libpy3benchmarks-runner-result_convert.a |74.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/libpy3benchmarks-runner-result_convert.global.a |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/discovery_actor.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/error.cpp |74.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/token_accessor_mock/recipe |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/error.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/discovery_actor.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.pb.cc |74.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/cluster_ordering-ut |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |73.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/mdb_mock/recipe |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |73.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/ut/ydb-core-fq-libs-signer-ut |73.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/tests-datasource-oracle |73.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/runner |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/msgbus.pb.cc |72.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/ydb-library-yaml_config-static_validator-ut |72.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/login_shared_func.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/http.cpp |71.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/result_convert |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/login_shared_func.cpp |71.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/http.cpp |71.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |71.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |71.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |71.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |70.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/tests-datasource-postgresql |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/e244fbe4e765a00d66150f4ec9.auxcpp |69.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.pb.cc |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/config_parser.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/console_dumper.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |68.9%| RESOURCE $(sbr:4966407557) - 0 bytes |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/config.pb.cc |68.9%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/http_service.cpp |68.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |68.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/error.cpp |68.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |68.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |68.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/keyvalue_write.cpp |68.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |68.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |67.9%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |67.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |67.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |67.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/synchronization_service/synchronization_service.cpp |67.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |67.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |67.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |67.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp.cpp |67.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |67.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |66.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |66.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |66.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/task_ping.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |66.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |66.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/http_service.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |66.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config_parser.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/console_dumper.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/error.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |67.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/tests-datasource-clickhouse |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |67.0%| RESOURCE $(sbr:770480022) - 0 bytes |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/rate_limiter.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |67.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut/ydb-core-erasure-ut |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/common/utils.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/keyvalue_write.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |67.0%| [AR] {RESULT} $(B)/ydb/apps/version/libversion_definition.a |67.0%| [AR] {RESULT} $(B)/ydb/core/audit/libydb-core-audit.a |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__configure.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |67.0%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |67.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/pending_fetcher.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/task_ping.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/rate_limiter.cpp |66.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |66.8%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |66.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |66.7%| [AR] {RESULT} $(B)/ydb/core/cms/console/util/libcms-console-util.a |66.7%| [AR] {default-linux-x86_64, relwithdebinfo, pic} $(B)/yt/yt/core/libyt-yt-core.a |66.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |66.7%| [AR] {RESULT} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |66.7%| [AR] {RESULT} $(B)/ydb/core/protos/out/libcore-protos-out.a |66.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |66.7%| [AR] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/core/core_ydbc.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__configure.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |66.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part15/ydb-library-yql-tests-sql-dq_file-part15 |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/common/utils.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/core_ydbc.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/pending_fetcher.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler.cpp |66.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/ydb-library-yql-tests-sql-hybrid_file-part0 |66.5%| [AR] {RESULT} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |66.5%| [AR] {RESULT} $(B)/ydb/public/lib/base/libpublic-lib-base.a |66.5%| [AR] {RESULT} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |66.4%| [AR] {RESULT} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |66.4%| [AR] {RESULT} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |66.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |66.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |66.3%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |66.3%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |66.3%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/ydb_connector_actor.cpp |66.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/ut/ydb-core-config-tools-protobuf_plugin-ut |66.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |66.1%| [AR] {RESULT} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/core/core_ydb.cpp |66.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/ut/ydb-core-fq-libs-metrics-ut |66.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.pb.cc |66.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |66.0%| [LD] {RESULT} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |65.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |65.7%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |65.7%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/synchronization_service/synchronization_service.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |65.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/ut/ydb-core-persqueue-codecs-ut |65.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part17/ydb-library-yql-tests-sql-dq_file-part17 |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/ydb_connector_actor.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/kqp.cpp |65.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |64.9%| [TS] {RESULT} ydb/core/fq/libs/http_api_client/flake8 |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |64.9%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 |64.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |64.8%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |64.8%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |64.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/monitoring.cpp ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/kqp.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part16/ydb-library-yql-tests-sql-dq_file-part16 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part3/ydb-library-yql-tests-sql-dq_file-part3 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |64.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/monitoring.cpp |64.5%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |64.5%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |64.5%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |64.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |64.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |64.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/py2_flake8 |64.5%| COMPACTING CACHE 12.2GiB |64.5%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |64.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |64.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part12/ydb-library-yql-tests-sql-dq_file-part12 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part4/ydb-library-yql-tests-sql-dq_file-part4 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/ydb-library-yql-tests-sql-hybrid_file-part9 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/ydb-library-yql-tests-sql-hybrid_file-part6 |64.5%| [TS] {RESULT} ydb/core/resource_pools/ut/unittest |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part9/ydb-library-yql-tests-sql-dq_file-part9 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part19/ydb-library-yql-tests-sql-dq_file-part19 |64.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |64.5%| [TS] {RESULT} ydb/tests/functional/large_serializable/flake8 |64.5%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |64.5%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part1/ydb-library-yql-tests-sql-dq_file-part1 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |64.5%| [TS] {RESULT} ydb/tests/functional/api/flake8 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |64.5%| [TS] {RESULT} ydb/tests/library/ut/flake8 |64.5%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/flake8 |64.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part2/ydb-library-yql-tests-sql-dq_file-part2 |64.5%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |64.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part0/ydb-library-yql-tests-sql-dq_file-part0 |64.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |64.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |64.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/ydb-library-yql-tests-sql-hybrid_file-part7 |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |64.6%| [DL] $(B)/canondata_storage/1775059/2d8e13650c57e1489831ef0318288079029e2e62/resource.tar.gz{, .log} |64.6%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/ydb-library-yql-tests-sql-hybrid_file-part10 |64.6%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |64.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |64.6%| [TS] {RESULT} ydb/tools/statistics_workload/flake8 |64.6%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/flake8 |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |64.6%| [TS] {RESULT} ydb/library/benchmarks/runner/import_test |64.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/py2_flake8 |64.6%| [DL] $(B)/canondata_storage/1784826/8a1791d883c330f1ed18937b9d7a33af5031d735/resource.tar.gz{, .log} |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/ydb-library-yql-tests-sql-hybrid_file-part5 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part6/ydb-library-yql-tests-sql-dq_file-part6 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/ydb-library-yql-tests-sql-hybrid_file-part2 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part5/ydb-library-yql-tests-sql-dq_file-part5 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/connector-tests-datasource-ydb |64.6%| [DL] $(B)/canondata_storage/1937429/8922776cd7638b44d90c03cdd08ebd58640651e4/resource.tar.gz{, .log} |64.6%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |64.6%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |64.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |64.6%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |64.6%| [TS] {RESULT} ydb/core/erasure/ut_perf/unittest |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/ydb-library-yql-tests-sql-hybrid_file-part8 |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/ydb_run_actor.cpp |64.6%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part11/ydb-library-yql-tests-sql-dq_file-part11 |64.6%| [DL] $(B)/canondata_storage/1917492/86ab0de654a60bf1e3145a3d8e3d7eae4a9f26b8/resource.tar.gz{, .log} |64.6%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |64.6%| [TS] {RESULT} ydb/tools/cfg/bin/flake8 |64.6%| [DL] $(B)/canondata_storage/1871002/6a2014c70e9b412ca99aa36a7e7375d181155757/resource.tar.gz{, .log} |64.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part8/ydb-library-yql-tests-sql-dq_file-part8 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/ydb-library-yql-tests-sql-hybrid_file-part1 |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part13/ydb-library-yql-tests-sql-dq_file-part13 |64.6%| [DL] $(B)/canondata_storage/1871102/7b4b0482d4b48dd41de6cb20a8532e8054ae5f81/resource.tar.gz{, .log} |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part18/ydb-library-yql-tests-sql-dq_file-part18 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part14/ydb-library-yql-tests-sql-dq_file-part14 |64.6%| [DL] $(B)/canondata_storage/1937424/aec2375680b5e09e454587725abefb1869ba1f0e/resource.tar.gz{, .log} |64.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/core/libydb-mvp-core.a |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/ydb-library-yql-tests-sql-hybrid_file-part4 |64.6%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |64.6%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |64.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/core_ydb.cpp |64.6%| [DL] $(B)/canondata_storage/1903280/12e22760f1793514adf21cca06ce333170f22e8f/resource.tar.gz{, .log} |64.6%| [TS] {RESULT} ydb/tests/functional/kv_workload/flake8 |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/ydb-library-yql-tests-sql-hybrid_file-part3 |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/ydb_run_actor.cpp |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part10/ydb-library-yql-tests-sql-dq_file-part10 |64.6%| [AR] {RESULT} $(B)/ydb/mvp/core/libydb-mvp-core.a |64.7%| [AR] {RESULT} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |64.7%| [DL] $(B)/canondata_storage/1942100/00ac27cb3793ebb6e30aaa3a242eb80980877725/resource.tar.gz{, .log} |64.7%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |64.7%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |64.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part7/ydb-library-yql-tests-sql-dq_file-part7 |64.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/py2_flake8 |64.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/py2_flake8 |64.7%| [DL] $(B)/canondata_storage/1899731/ccde90ce5670ebf87e5c6a2c04f40658049b2aee/resource.tar.gz{, .log} |64.7%| [DL] $(B)/canondata_storage/937458/dab17048102e83ee373249c500ad7a1a9718d95c/resource.tar.gz{, .log} |64.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |64.7%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |64.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/py2_flake8 |64.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |64.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |64.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/py2_flake8 |64.7%| [TS] {RESULT} ydb/public/tools/local_ydb/flake8 |64.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |64.7%| [LD] {RESULT} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |64.7%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |64.7%| [TS] {RESULT} ydb/tests/functional/tpc/flake8 |64.7%| [DL] $(B)/canondata_storage/1775319/3a09dd529e9fa310fb6dffe9fe276926ac1befbd/resource.tar.gz{, .log} |64.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/py2_flake8 |64.7%| [TS] {RESULT} ydb/core/kqp/ut/federated_query/style/import_test |64.7%| [DL] $(B)/canondata_storage/1597364/74246415d71e5dc9c8dae1626e184fae0faa778c/resource.tar.gz{, .log} |64.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/py2_flake8 |64.7%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/flake8 |64.7%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/import_test |64.7%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |64.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |64.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/py2_flake8 |64.7%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |64.7%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |64.7%| [TS] {RESULT} ydb/tests/fq/common/flake8 |64.7%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |64.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |64.7%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |64.7%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |64.7%| [DL] $(B)/canondata_storage/212715/8c116101a7f8683fa1e963a6b0079bf9c213f4e3/resource.tar.gz{, .log} |64.7%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/import_test |64.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |64.8%| [DL] $(B)/canondata_storage/1871002/7b1fef4883196ccbe709169b603793723ad3e469/resource.tar.gz{, .log} |64.8%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |64.8%| [TS] {RESULT} ydb/tests/stability/ydb/flake8 |64.8%| [TS] {RESULT} ydb/core/blobstorage/crypto/ut/unittest |64.8%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |64.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 |64.8%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |64.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/py2_flake8 |64.8%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 |64.8%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |64.8%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/flake8 |64.8%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/flake8 |64.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |64.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/py2_flake8 |64.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |64.8%| [DL] $(B)/canondata_storage/1936997/9b38bc90047b0c4770ffeeb948e381476a3c0703/resource.tar.gz{, .log} |64.8%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/import_test |64.8%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/flake8 |64.8%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |64.8%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |64.8%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |64.8%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |64.8%| [DL] $(B)/canondata_storage/1931696/83451cbd5c05baf359743802ffeb9d68445da80c/resource.tar.gz{, .log} |64.8%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |64.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |64.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/py2_flake8 |64.8%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/flake8 |64.8%| [TS] {RESULT} ydb/core/kqp/ut/federated_query/style/py3test |64.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |64.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/py2_flake8 |64.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |64.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/py2_flake8 |64.8%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 |64.8%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |64.8%| [DL] $(B)/canondata_storage/1937001/2391ee3b82c774fbfdf3b5fe09aeba01826624f7/resource.tar.gz{, .log} |64.8%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/example_configs/unittest |64.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |64.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/core/libydb-mvp-core.a |64.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/py2_flake8 |64.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/py2_flake8 |64.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |64.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/py2_flake8 |64.9%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |64.9%| [DL] $(B)/canondata_storage/1931696/221cedac6157fdff4d16e16ac8e9133139de7efd/resource.tar.gz{, .log} |64.9%| [TS] {RESULT} ydb/tests/functional/dynumber/flake8 |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |64.9%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |64.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |64.9%| [DL] $(B)/canondata_storage/1937027/670f34f9f911e780c84d06ea0c4a9f3d52e759cc/resource.tar.gz{, .log} |64.9%| [TS] {RESULT} ydb/tools/tstool/flake8 |64.9%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/import_test |64.9%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |64.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/py2_flake8 |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |64.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/py2_flake8 |64.9%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |64.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |64.9%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |64.9%| [DL] $(B)/canondata_storage/1130705/75c67aa1b52e1003c4244d8776963fa4e2ddd3be/resource.tar.gz{, .log} |64.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/py2_flake8 |64.9%| [DL] $(B)/canondata_storage/1936997/7795d91e16dc8934afb9cac9de729a7e77d64422/resource.tar.gz{, .log} |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |64.9%| [TS] {RESULT} ydb/tests/functional/config/flake8 |64.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |64.9%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |64.9%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator/unittest |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |64.9%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |64.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |64.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |64.9%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |64.9%| [DL] $(B)/canondata_storage/1809005/036501517c77edaf8dfa3239345c24cc4e73081c/resource.tar.gz{, .log} |64.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |64.9%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |64.9%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |64.9%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |65.0%| [DL] $(B)/canondata_storage/1942671/db8798ead58a329e866fa250746caab9ca42a26c/resource.tar.gz{, .log} |65.0%| [DL] $(B)/canondata_storage/1937001/009017e496ccb73a3d6e033003c2ef47734562eb/resource.tar.gz{, .log} |65.0%| [DL] $(B)/canondata_storage/995452/78b37afc56674ddcf0dda847c991261f95c763f2/resource.tar.gz{, .log} |65.0%| [TS] {RESULT} ydb/public/tools/ydb_recipe/flake8 |65.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |65.0%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |65.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |65.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |65.1%| [TS] {RESULT} ydb/apps/dstool/flake8 |65.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |65.1%| [DL] $(B)/canondata_storage/1130705/0b2e5d7ac211de720dc0c4641c32a1cc0d2b67b0/resource.tar.gz{, .log} |65.1%| [DL] $(B)/canondata_storage/1809005/381bdd936adada83f5b48f5d53fe44e3adc4ea7d/resource.tar.gz{, .log} |65.2%| [DL] $(B)/canondata_storage/1599023/3620bf59870617da29d0c99266d709935d1c2b9b/resource.tar.gz{, .log} |65.2%| [DL] $(B)/canondata_storage/1936947/f00fcb6d97bcb608640b679ee786567fc190891c/resource.tar.gz{, .log} |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/test_connection/test_object_storage.cpp |65.2%| [DL] $(B)/canondata_storage/1775059/cd7ea330e6f31e023b144d9e8cd414d6b5bfa5da/resource.tar.gz{, .log} |65.2%| [DL] $(B)/canondata_storage/1784826/6c4e23b08b618ad38a21babd86e439d03aa22777/resource.tar.gz{, .log} |65.2%| [DL] $(B)/canondata_storage/937458/4bf72cbe06e1a5a68a1245f072f61a840c65f346/resource.tar.gz{, .log} |65.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |65.2%| [AR] {RESULT} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |65.2%| [DL] $(B)/canondata_storage/1847551/682469d9195325562bb93194e1d96345f651ca93/resource.tar.gz{, .log} |65.2%| [DL] $(B)/canondata_storage/1031349/5fede2b676e5759a71fc5ee84a5366ea2398c3eb/resource.tar.gz{, .log} |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/test_connection/test_object_storage.cpp |65.2%| [DL] $(B)/canondata_storage/1923547/4a11bf336fd7fb8da5f5162c16271b830cef13e4/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1937492/caaa9a42499278fe8d2abe06fe2b17bceeb09e18/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1775319/1b7c1e5298ad827e3c0e08d1d3f96ba4f42d8217/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1946324/cf38a9e18bcb2d145a9ceedb60a30cd36c433437/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1942173/f8a5d34ee2135f3e8e692d721f4410199915185e/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1942671/2f792d15a60e5018e659b61b46adbe398e438ffb/resource.tar.gz{, .log} |65.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/config.pb.cc |65.3%| [DL] $(B)/canondata_storage/1880306/dcc32ce026896abde9b111c8c4cae06611f4a004/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1936947/900ed4b07b3e497bdca6ea0063b227dc2b03c52d/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1925821/e5366a80242cb3e6ad8a288604782493b360be86/resource.tar.gz{, .log} |65.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |65.3%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |65.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |65.3%| [DL] $(B)/canondata_storage/1942671/4927147e0e1a576d9194a32faf141dd18c8830e8/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1942173/0831c8429f2eb96fec38ae943a6ac1e22d739948/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/995452/8d06d1c638f81a03e22880d706b0ed36b13787b3/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1784826/5acf470cb57912e11b4cd6083ac398f4eb2ce3b5/resource.tar.gz{, .log} |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/util.cpp |65.3%| [DL] $(B)/canondata_storage/1936273/b293975a7642b91c5614f8db12d1bd08a0069400/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1936947/eb45622f6e742230b1613aebb0bac678cc96fb83/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1784117/27be18c4c655d803ac4ad0ec88e5308caa093c37/resource.tar.gz{, .log} |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/util.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.pb.cc |65.3%| [DL] $(B)/canondata_storage/1773845/0758bece23c981ada2b0dd6767d862a51f17b041/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1814674/84cbfbe4040a046d449594db65102999cee0bced/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1775319/2fe33d3feab8838bd96c496a2503fc8b7760e1af/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1775059/dfb29f074743c819be85b8e41c2f4f4f5970e492/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1031349/10fd2cab5eaef26b8b41b09e42e0b0458004c724/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1931696/a36bf900a22fac0635ebe830de6bcae1c73133c0/resource.tar.gz{, .log} |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/result_writer_actor.cpp |65.4%| [DL] $(B)/canondata_storage/1871102/680e072d487740a733846c6fb8acae02496a7035/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1784826/01a49f49738931cb8fe58f887acf08cd22b8816f/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1600758/8cab8973f2ea39497a139c994f146f17f194bc88/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1809005/6c0b793ded39fed6215e26ef8284b30340b9dfac/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1937027/260442135926ff6c9957da5c2478f83f49087cc4/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1889210/25bb12516fb50fd6341f375d4bd251cc1316e0aa/resource.tar.gz{, .log} |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/result_writer_actor.cpp |65.4%| [DL] $(B)/canondata_storage/1924537/8552d3077ff10f95a9d6ae208c13d115e0b5bdf8/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1784826/ca2edc7f1a2a5c0080870544a7f83cdc18543a05/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1784117/035863e9d5dcd47a63c8d359ae9a30ba4ceaf67b/resource.tar.gz{, .log} |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/test_connection/test_connection.cpp |65.4%| [DL] $(B)/canondata_storage/1937367/9e6103f3844abd305fb3ecba5a38bd2939f032ea/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1942415/9b0f428523034738a81372e143ed76e0d1ffdfad/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1937492/63930c59f78bd833253a0a3dd62479c8ad6cb321/resource.tar.gz{, .log} |65.4%| [DL] $(B)/canondata_storage/1881367/55ee657a8e2fea05538badc8317b24fcb3a4115c/resource.tar.gz{, .log} |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/priorities/usage/config.cpp |65.4%| [DL] $(B)/canondata_storage/212715/b05f0bb536c8488f970e72a13051cd475b3d6a41/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1781765/62947eb159726b899d4d2af791e768b5990f0b7e/resource.tar.gz{, .log} |65.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/test_connection/test_connection.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/usage/config.cpp |65.4%| [DL] $(B)/canondata_storage/1903885/e5bf03e50274ceb0e0a7794568205a1dbe547554/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1597364/bb2a478f5e9c5bfeb5e6ba7fde27b7879d5d2f67/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1900335/706a47a293cd09905f9fb37c502faaa4d8e51b5e/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1937424/d5d9e5b42a440866dd2b2f9da0c4923a86da8bea/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1900335/6da4e798745eb2a68f5231cd7d5c7f35ec91c905/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/212715/61f0c59354c0aee96d5e21e3fd5f5993b2817ac3/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1773845/0823c71a646fdb00c7b391638ef2b27d82ee864d/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1937001/48471e6d9c7324ace71b9be0fd74072f683de033/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1781765/8061b1bca1dbb79a1a8135dd98452c011059369f/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1597364/674fcbbcaed1c9cfba2b74e3352aab82299cbfca/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/995452/e78675a82a4300d32887a13f4b9e86cb1608f590/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1924537/dd93ed409770f0ece7fecd435f3849c139b141e9/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1942525/80cf40971a29683f0f53d3784e5ea988eae1a473/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1946324/4e55210d05969637e5668c50c7fbdc1a61f108f5/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1773845/06fab929582e640fdce3e7cdf48ad02f2a7fe75f/resource.tar.gz{, .log} |65.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/libydb-core-protos.a |65.5%| [AR] {RESULT} $(B)/ydb/core/protos/libydb-core-protos.a |65.5%| [DL] $(B)/canondata_storage/212715/84f834803ff8b4e4bab2716894be9d659edfc198/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1871002/e8ac0c0b88a39f7e5d48369b8a24b2e049e5bf3d/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1889210/c48249ef01b032757b4c9d64577e12744571e6ff/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1900335/773a1202d72424e925be03bb8ba15e6cc71fa3f4/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/937458/623d3bd7a832446925fc7d56bc3639f7411705b4/resource.tar.gz{, .log} |65.5%| [DL] $(B)/canondata_storage/1871102/cedf8264a1905131c6de15c01a397082d1677da3/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1903280/f275d6f071715007b59c0fcf1ce9a3d4eafb9599/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1880306/01f3c1debba8d61b9fa1fa512eb76ffb3cb3838e/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1942525/7de1fbc5f1b7918aec7094b41384bc4c27fc7953/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/937458/3ce9d3f90d17a09aa182a1ae8e08f2f065219fab/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1773845/27425423327af8b10415a6bbb80a5aec7c55b13f/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1781765/79e3b478a7ff22ae5ad7d4f8b04d16d423583c5d/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1773845/58669ef67391607d0bea8c241fd7cfc291b1e4a8/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1903885/e0147df24000ea90170041cff25000baa7559abf/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1925842/1c73675b2ef22d3db833d7bb81e6d092b9398bca/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1931696/3b66a4b8ee9789607df97fd1b710d3ca890dd9b2/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1923547/320f607d9e9c19a93a835d3183938f1fba6dd52c/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1773845/ddb0deeaff9c015244a4452e588027cb4567b1a2/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1923547/3423d2190488ae10821f2c300e70cd1b3ed9fc6d/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/995452/094991b6c0cfd5ed30c429b04d69b8af3c10eec4/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1031349/6c70521322fc43f752ef6b89f8667fefd006af8b/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1031349/44024c82812375f0173f3b2781471a0977b071db/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1784117/bb10ae9ea87fb7aac538ebffcd58fdc507d9f394/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1942525/68adc93267fab0086b1faf825d05122058d5f469/resource.tar.gz{, .log} |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/status_tracker_actor.cpp |65.6%| [DL] $(B)/canondata_storage/1937027/bae649e6896209dbfb01462c67cd54c0f971d262/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1937001/7bff8f98ab448f07ac3e80a4af0d2aed91a791f3/resource.tar.gz{, .log} |65.6%| [DL] $(B)/canondata_storage/1937001/cce9b26ef7e344a7dad40ff2fee61fd47fb80a21/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/995452/59cb21feb51bcd4aaf002804abbfbb4a05ffe65f/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1880306/c238261ddd77e921e2cea2a78db88e0431b4cb3e/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1773845/c255bb2163f40b1ca08f81b23e10624ae1969605/resource.tar.gz{, .log} |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/status_tracker_actor.cpp |65.7%| [DL] $(B)/canondata_storage/1880306/9e9848effe0d45eb3d4372fca57bf6962d09aeed/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1871182/0dad491c929525221cf344b9f6f54cd14f2ce4f0/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1920236/9f586e65bb81318babf436f4458de22c9257b1d1/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1871182/027431fe7e452cb0fbf3b9f53f15eb69aef793e3/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1599023/5228a2529d3a722c804df96464cc1cfa228876a1/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1809005/4d456bdd41cc8761526df95e5b61e959b1dfc12f/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1942525/471bed5f88a668af45ac44bbcfdc687ddde8df24/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1689644/763d9bd4404423a24deab02585b884f08692c90b/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1925842/c1066aab7478fbe8c5b14337f793c111997cc324/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1775319/2c692d91178bcc9774270c84072137fd625262ae/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1925842/72490ba4ca88a4360df1e1456a0800bc1ada47f5/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1775059/17f0d56cbd3b1817a494481bb24fbafc1bd7be1b/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1781765/e399fc9631f4d96fcb5c717860c01dc3564d9b6c/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1937150/6b5a12edf907ec102b80b96b177733ca6ed1ded6/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1600758/32cfdeb8c6377a2e7e62c6c4adbb95f25af7669b/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1942173/f28a221bc60f2450a1edbf8db6e85651a9c3bd79/resource.tar.gz{, .log} |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |65.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |65.8%| [LD] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |65.8%| [DL] $(B)/canondata_storage/937458/e291908d1acc33f8b839a28a42f73be6affed850/resource.tar.gz{, .log} |65.7%| [DL] $(B)/canondata_storage/1784117/523c2e2d47e3427f1204fa18ca2d5ade41618bc0/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1942525/de296b35a0b1102cd2228744e8e164bffd57d12e/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1130705/d9515e536cf880a45dedae2a41661295b91c6258/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1809005/7e4dc59583cad760822faf30fa4695e365329148/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/212715/3045678bab9ba65eca350a0c5b4618902a97028e/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1923547/eaec090e24c98f865902b418cc92d70420b05c0b/resource.tar.gz{, .log} |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |65.8%| [DL] $(B)/canondata_storage/1924537/1ee261cbcafb6071f58372428e9ece77d59ec4fd/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1784117/1f7c2e1c35d03b983fa1a69e594b4cacf4c42d7b/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/937458/77127fceb419592ea59ebf8fd420f0f67121e538/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1937150/bfd37f8371e4e91bdf62bcb8724b428fe27f2206/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1903280/bdce64a3cd13bc28600baabc9b05d8e5728e311e/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1936273/b21d0cac033210604f1cd49a5c524081567ed4e3/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1937367/81cc07436d22bfe9ec8505998487b9ec6016cc03/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1937367/e20c4de8f0db337c5ef869a8ce171ea4e06d80ca/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1942278/fd0ca7f95f7c6343b4a13953597c61288fa071d0/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1937367/ffc26952304424e6a4538295e7d27e30362a4e89/resource.tar.gz{, .log} |65.8%| [DL] $(B)/canondata_storage/1847551/88403ddbf01474ba2e3e37f885d908baa723db7c/resource.tar.gz{, .log} |65.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/codegen/codegen |65.8%| [DL] $(B)/canondata_storage/1942525/6b25db4f7d87b2343d69fa758c7b0e0d2c385243/resource.tar.gz{, .log} |65.8%| [LD] {RESULT} $(B)/ydb/core/base/generated/codegen/codegen |65.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/tsserver/tsserver |65.8%| [LD] {RESULT} $(B)/ydb/tools/tsserver/tsserver |65.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |65.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |65.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |65.9%| [DL] $(B)/canondata_storage/1777230/92358f07848628e912a541ea35cf562f3ca2e131/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1903885/7dffac89ce1ad5b85a289c1c8f6a474e7e3a9362/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1784117/bdd3c0ddc1670802f060fcdb1711e78ede383acf/resource.tar.gz{, .log} |65.9%| [LD] {RESULT} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |65.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |65.9%| [DL] $(B)/canondata_storage/1942173/88bbdf23a1e54cd5c5a8e5a6a9b995056573e90a/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1775059/60aa9c77d2376aa1beb6e616fcbdc82d0b2724be/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1920236/87ffa4c1bdf96124a80c950f8ff630741d28d4cf/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1130705/ac9859ee8d53f34b0483c0f88da3629c3f1f4324/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1937429/3ec353865b88f20c966196a0ce16243c37e12190/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1936273/3aa418c24e77eb510bbef390a883a810189ad7de/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1903280/ec3176815eddcf643d7c668ba3e55b8b28ad99b8/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1937429/e5eaf8d78c61231eab5dfa6a18215af9f922a482/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1916746/2673649875bad3e7ba633862e83892bc57334832/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/212715/b10a3a963ab6644683db33c830058d65ff99d14f/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1847551/682cc73a2d58def116940ca081e758391e0f27cb/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1931696/0cf0f02388f28b9c85e51af557c6c0adc7e3c2e1/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1889210/431569691fa60b20bf9ef4cc94610d8f1b1518e2/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1777230/dd70c380673122cd500d799c70016541eabd320a/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1130705/da7974592864104e97d4cfb7947d82f2379f0266/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1773845/6e61cbdfae8bc6d693a1ad4cc304d9d45edb7242/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1600758/3d0654608172a6c45b1fa51e959c06528202c760/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1925842/ed80ec5985c654eed9c46bc1e3ce1c860557e572/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/212715/536da9610d37d18c947fc7368e3720d62d90036f/resource.tar.gz{, .log} |65.9%| [DL] $(B)/canondata_storage/1773845/57222273edb3e600187ff62653acf03a13f24744/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1925821/4e746178682b9df3b5c4d499b609fbaf2ac5b376/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1814674/a71168cd07531a101c5ea9b29bae77a3ea1d4693/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1925842/698bbd06dcb399988109e3543f9819966d5e9daa/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1936947/25efb9f6eb4d1e76047ae7c2aef5ff59896f5b3c/resource.tar.gz{, .log} |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/init/init.cpp |66.0%| [DL] $(B)/canondata_storage/1817427/6b9d6900149bd623684788c18b56b70ca178d680/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1871182/1e53ee2b92848bee51fb8b73b6906845db1d0bd7/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1871182/0b81d4e80c80fb4df981caded22cf7246cf56ec0/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1777230/13a939531efb4067a14f4e097b82c407f6c484fc/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1942525/8f1e438ab44695d5d2d07d5aa00ddcc561c38421/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1777230/00c02c2221ad7773f9cfecb5ec1bd067dbaacbc5/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1597364/bf005de2d34f6496206315fd4a9226fb6f90b88a/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1936273/0cce21b284076a33a7d8bf253f8daebd8c196efa/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1936947/960a823a7074bb0a4fc6829f35dc9035ea62bcf1/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1925821/9c3baaef9cf7cf541749b011b756ac9d83887457/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1937027/65db208ba11cd5e90b41bbb5f58baaa54793e4a9/resource.tar.gz{, .log} |66.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |66.0%| [DL] $(B)/canondata_storage/1925821/6132b4b967a7c6d2d9c522d4a344e781b4121793/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1924537/081e3ea5ef34a4fe33a8e971e47d53ea3a5151a4/resource.tar.gz{, .log} |66.0%| [LD] {RESULT} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |66.0%| [DL] $(B)/canondata_storage/1946324/f0844b7187f1db0315c7ba22b24ff34c0bddf188/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1880306/b7c0983a1c6c9c608654f7a228532df5441ad227/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1784826/6123ea7aa0267741109a77c112e89d06fa728b93/resource.tar.gz{, .log} |66.0%| [DL] $(B)/canondata_storage/1936947/d814cd457b003ee9da1e09ee0877a39078a61012/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/995452/916a3abc0188ce4b46268a98f2f6487c53d8a14c/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1871102/bf551d97ceb3ef56f786a233cb690503836fb993/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1942100/7dbb0fabca371736b54699a0459fc74089bb4c57/resource.tar.gz{, .log} |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init.cpp |66.1%| [DL] $(B)/canondata_storage/1809005/7478904042df2a3888a84b6a917dd7cf55a05d66/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1936842/c642aa0e8ba8df646b4ff92018c848b92dcb289d/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1923547/82c5214ca1ac24aea9514c0da9d0fdf3a36d3b61/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1880306/5213fbc312a45950f1152a68258af55d6e4976a2/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1942415/8d010130e1284b2e3b1f4a934fc3768cc8409e69/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1937027/8b932d9e8daf49fb9e777a1b6fa53c785126e3d6/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1936273/8ae056ade64c358b3bb34eda82c0969989c5ee85/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1031349/6832f9241abb81a3c19acf956e8e9e9ed37578ef/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1130705/223d79eda7e49588c54267c8b7c488154ed801c9/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1942671/580c7dcd26532517044d16107182929c3788d099/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1942415/0be95092588fe1a5379e1336687f83ad5f8d20f7/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1936997/9ec8b4b9f89889c3a5dbb346465333d3b7417d16/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1847551/d239124b206d17cdfeda5a30fdaf3832c020b2f2/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1937424/c129022da3a346dda28c53da9edba53b3eb3f07a/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1871182/035cffc2aba7adbed541caae65e59e0ddf49b527/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1130705/eac262cda27c06132baae4257bc3accb70d14812/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1937367/dbee9962f462acf3732a651327b1b87b5361f327/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1942278/5982c0f62a0472a9822a5612ad84d4aae9998491/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1871102/76c86b8b78af73dd74b03bc83dba0b0e32bd1cd6/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1936273/1aa122f73bdfc2d7c7ecc6254b0a10b2df910380/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1937492/7ae37c32b42bb57d4df171a62ced7ab76867a8ea/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1925842/c5c946201c75187c543428fea19a69e208afda3e/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1903280/e8f3ad772a90a9a3975d3f5f482904d0052c1f16/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1936842/557f7ab03608bf231a6bd2276c94b8a7ee4523b0/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1942100/9da380c43b5de6ff69f9fc402c31f0485dedddb1/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1942100/c3d647446edbe752077b3f908285ed4a4a032d7f/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1847551/c04b6845f7d6b8061d0f3bb18348cc2396fe3c4b/resource.tar.gz{, .log} |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |66.2%| [DL] $(B)/canondata_storage/1931696/59c974a5d18c41e65f27bd82416d6f8307cc1616/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1942415/69832751508a31a66677889fced4735a42f62092/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1899731/8371f5cbc66c5d22b5e30e94f49d4c3423336bd2/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1942671/d6da076374b1124e492566e9f81d7f26078203f0/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1777230/b8c638a79c26a4c14c582731ad5b06fe98478bb4/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1130705/9a8e26c0a28d46dba8d12985fe62df3bee2a07d3/resource.tar.gz{, .log} |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |66.2%| [DL] $(B)/canondata_storage/1871102/0805bf7d763724d22f5600786ba3f36973f091d1/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1773845/461e7989a09a65be78c660f6a49d876212096306/resource.tar.gz{, .log} |66.2%| [DL] $(B)/canondata_storage/1942100/da694607ab211ab453c7880d608269737da0e1e0/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1937150/2d475e4bf97968b8ba3ae996beb4334ab9cfac60/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1814674/aff89164540cb0673786555c6e82154c2d8a3ef5/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1871102/8763764d391af8a7276b9788d89479d09d42e9ed/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1942100/ea5d6f04ae9c974212a40f8582f093e394dd64d6/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1871182/2666f20d4e176027b281a7b851dd96e818956ef4/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1923547/e4e818b787fc28bd7492f949b98701f356713fca/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1871002/b21941ba70054720e6cf10accab3a568d92d2d97/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1775319/864d0177d3988207c37d5c5eda7be9164a718f0d/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1880306/b8a146dff266e2b5388e4e9ae22aa20c1b4fbc64/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1871182/e02f7cf2d403eeff46ab74696026e36be4ded9a9/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1942278/a9dafa0c92a2a568391db5df4c8a2a950955314c/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1600758/1260842a548b9eeb101aabd689cd26a911953004/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1773845/4aaca50c52fbfe0fc1a237a3c226e5e498d0a750/resource.tar.gz{, .log} |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |66.3%| [DL] $(B)/canondata_storage/1899731/b9b361b8c242c528f8d095b6a77697b3bd10ec53/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1781765/75774e90f574004e23fc9aacf32e1f561a8c66ec/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1936997/ad7538cf8edf8e81865f7eee42c2de851daf1211/resource.tar.gz{, .log} |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |66.3%| [DL] $(B)/canondata_storage/1920236/3d3a0652b04204362f293bde5bd1db28f8645dd7/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1130705/98bfcb23db43674b07f163a5d89bc355761ccf70/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1942278/bbfba5da727810c5f55bb0961dd52294da024504/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1871102/41439821d7c11fdff3fabc8b28b32c29a0af320c/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1931696/5c3e75d3a93b046b25d923d257b180452f6b217a/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/212715/5139a033a064dcf51fe12f342340ef4c205e977a/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1784826/ded8e4bd34c1c30373c7726abce38eca90ec35ab/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1775059/b354e53d4914595d2de6dddba4f1af4ec0b36621/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1775059/502776df8bd4c104347b692d9cedc4d35048bfbb/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1871102/597d6ee930787f14a7fd3507c37be2e17e206201/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1946324/208a50d83749c76dc119c7025e7f828673e1f366/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1931696/9b1f78d9612780e865306f09a2040d8a0d826732/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1936997/f69902e9df436dbd7c079c9b996bb43c65b9828c/resource.tar.gz{, .log} |66.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |66.4%| [DL] $(B)/canondata_storage/1871182/54497cb67187ae0d3ca73f41cfdfc13334cad2d3/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1936947/59872f3b1f1eddc6f2194f87604a00cc7300d8e1/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1937492/3b472eddc14507ca61231b1a308e847ec2b7b2bf/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1903280/b3ad5a45d76b516f66899551cc71277d7e559aab/resource.tar.gz{, .log} |66.4%| [AR] {RESULT} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |66.4%| [DL] $(B)/canondata_storage/1777230/1be81c0af4d894e438b1e6abeb6641aa0309b29e/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1924537/99b9f14a8cf47c7ed8fe38921521b6e743c7838e/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1937429/5c4bfbf1589eb61d7300d31dac8b0581c1292c14/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1920236/56560fc4eb0991ee6681b0a1b288f62576ec0df7/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1031349/186bffdf663847fce34ef344f3142b3cf148e402/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1871002/ecb1006531e1f9b13e35feb30ba820285342f340/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1847551/fc7297e0cdc0ebe075e27df94088bbf4da4a2595/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1781765/e6ccfc9e44a62c32a107d9b796d30e78c8539094/resource.tar.gz{, .log} |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |66.4%| [DL] $(B)/canondata_storage/1600758/a3c9dd835d113e6cdf30d9e35d9cc95c3a203da3/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1937027/d0866b9a79f4fc7bab36a1975d074abe641f546b/resource.tar.gz{, .log} |66.4%| [DL] $(B)/canondata_storage/1777230/a0620ade18d5d9590309b7ceb4fe7b5f476ce7cb/resource.tar.gz{, .log} |66.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/codegen/codegen |66.5%| [DL] $(B)/canondata_storage/1937027/9074da5ec3159ab717d6f0fee0639313448b4579/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1942278/340f722a851e4412d2c35b434f3ee6113a9f7959/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1871182/3ca7d9f793310690733c1f09756d621fb525e562/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1871002/fb6fb37c565974a6f0c497e8b3e58f6b5bf320b2/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1942525/bdcfc13ab61bbc8317caf8a3654b63d189e255fa/resource.tar.gz{, .log} |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |66.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |66.5%| [DL] $(B)/canondata_storage/1946324/e1f7c67cafa20200008de81571567844ef07755d/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1889210/b3ec54c8ba5425d52cf7fa3db3638fad22de7e87/resource.tar.gz{, .log} |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |66.5%| [DL] $(B)/canondata_storage/1931696/c6aa257a7050331fd824bbdb0d587a5a0f000ab3/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1937424/be6de2a45c0e092d8da0f5c04670601e603a4d75/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1130705/20da23c279246100d1cf6675c98b016c27d78ebb/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1817427/afca08871a12f7dcb17f712cbd2347965f220a40/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1942671/a6ef6234ecec8bdd9b5f7ec30206378c9f7268ef/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1903885/517bc28c121f874aa51bbc3deb7b23a0b3fdeaf2/resource.tar.gz{, .log} |66.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/tsserver/tsserver |66.5%| [DL] $(B)/canondata_storage/212715/281225c593b89b14398e3d64718321920556da62/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1936842/11d23d4a39031af80d6dc470ce99f9427771e7d4/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1937367/be35feae41cdf3f87ad7ea8d4ce18ba9629b9c4f/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1773845/c0a795fb831b832e6e40aa0f1b1a1e5b60134f81/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1942415/4efc96736f3d5e3406745ae6daac7330e100c4f4/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1889210/9f0ba7bc92451aa4a498112bc8c2e703011101c2/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1597364/75cbdd585b0656f6fb0390e1698cb16d6290c17c/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1881367/fd6fe303f95983c7923be22740c4aa07b052e199/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1936842/2efaf15f30b906d1247f2ec0553f1f18fd6acee8/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1942525/e1a82d4e7077e073e6175abb7c2d712d0cd08dc4/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1880306/9f93bbb7f8cdbc54330d6e0f905404d0e826ce1a/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1931696/fca86c589326e9bc05817a71a47f8b9d16219dcc/resource.tar.gz{, .log} |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |66.6%| [DL] $(B)/canondata_storage/1936273/9613a7e6a06ad9123b9b3496470bc3108947b98f/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1784826/27f8cd2f641de7f2ee55266beaca6dda7fb6d4cc/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1809005/b9b47ee4a9e9f1d94a493c099f72559fa9f3f498/resource.tar.gz{, .log} |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |66.6%| [DL] $(B)/canondata_storage/1889210/99053ce259e5acf0cd21c3100078510bb975c3cf/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1936273/85968a675c17dd0728c8d7ba5fd43bd0b237dc65/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1936997/93899b3de50fae3f9677baacc98094a7a629590a/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1903280/2010996c42ed76fd6d1e7bedccdf6026ec5a5fdb/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1925842/70942689b7ce63cefca5f7da5343fab5153230a8/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1925821/ecd9dd14fd5c368a780124aeaab181143df1a49c/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1881367/03ce4da085261f32ea1c441399858f72350f0970/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1889210/0a74d27984bc3e33adaacdc7b85618c55673e8d8/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1871182/cf13957d635dc8c77a65ef70797b7c6b8d4646c5/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1903885/020891901aa63873d865b7d859cce18f09b6b3c3/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1942173/1b0498e994a7de827f7d1a300010b8424167a1f2/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1903885/a4d0122d8471ff0ca85352e617bed922d9ad8df1/resource.tar.gz{, .log} |66.6%| [DL] $(B)/canondata_storage/1917492/75449c24a279528381d8f6bec1271caa90cd7a95/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1916746/3cb022be2018b398cd935b68f7b1091e2882ca57/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1871002/87af0e803663459b2fc0b931b22ed73d40f91575/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1937492/280f310029e9135c17fc7143ea31b16e51fad84f/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1899731/d2bc375d62c2739c9466376d2ff97d13069fe91a/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1597364/8c3e86bd1d9a6577c911775a64d51195a61e9b9e/resource.tar.gz{, .log} |66.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |66.7%| [DL] $(B)/canondata_storage/1942100/015c616d2e4af2cc361f357361c829cbfef60e80/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1784826/19728bd1f1bdea5d0605d9a498ec2970c5f3e92a/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1936842/8d9f23542db0c4f13723c24b10a242ee68c61ce3/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1937027/3d8ae7405c87d4dd8f5110c9cafc9e3ec447e435/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1937027/74f1823cd9853da5a0b0d77e4281e13574c3c11f/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1923547/b752d090106a138d94de874b3e8b115ee6aa3c61/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1889210/a392a54ec359804e59b33f48a3c5e8f3c7765cda/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1925821/301359f830853bb29d8dc6bedf12ccc575fd3fd8/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1942415/c4bc3e1ce2d8446b0a147e62db95c2cae64947db/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1775059/34e3c4f18af78c5b08d1779bfd2babe42d60869d/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1924537/67045fc137b7aae73c1137f6fa60894b964dec45/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1775319/171cd905ac5402be308349251477ae77481201b4/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1784826/8e073b9fd058f3f074a4656c14602ccbd76303e2/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1942525/4fe45c70cbf0c28bef09a91de22cc41d1fc13153/resource.tar.gz{, .log} |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/client/cpp/client.cpp |66.7%| [DL] $(B)/canondata_storage/1942173/4c1cefaf51bea7c00d33359856fe5c9bd35e17a5/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1809005/407ce5051ca928115d8bf49186af1531f3ab285d/resource.tar.gz{, .log} |66.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |66.7%| [DL] $(B)/canondata_storage/1777230/2768c5271266d2bfc16d534dcba9a9afad3910bf/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1784826/ca2b5c92f6c48a734cb3bc782f744c31b81d1837/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1773845/d6fe26bbfec70ff5239a20763ffbdc27ad4a01c0/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1925821/8a81c7c999d37062d42776827b3f982555d3bdd4/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1889210/e2ed52d5150d0296ed106dcece44652f0506ca42/resource.tar.gz{, .log} |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/priorities/usage/service.cpp |66.8%| [DL] $(B)/canondata_storage/1924537/36fe336db7de347902767b13c1e0d63cf42757cd/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1777230/65685e9d54d416f54450defb84f83fe3b04456b0/resource.tar.gz{, .log} |66.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |66.8%| [DL] $(B)/canondata_storage/1936842/0049c952a1bcb0ee8c00f8d262e8ccbc9a964444/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1871102/3fcf32ea5c486527b20a5dea1db1e9ccf2e36a61/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1880306/25bda7bb5f356755a0d73916af1171e59aa33ace/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1923547/45484b99c033020b648870c9707d8e325a2db399/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1130705/6c54f70b6ca10a02b9f318b370b9fd95ba01421a/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1942173/7a7dc71e67e8e32cfc358509ee0600a7789a62ce/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1777230/5929a5f68e30c5cd123eff09f1b1487815a8578e/resource.tar.gz{, .log} |66.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/client/cpp/libymq-client-cpp.a |66.8%| [DL] $(B)/canondata_storage/1946324/6416045a0bb9d6e8e5b0b141a708474cc016eb51/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1777230/c67090a00be45ad908b79606c276697ef3208c16/resource.tar.gz{, .log} |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/priorities/service/manager.cpp |66.8%| [DL] $(B)/canondata_storage/1937027/24b357f9aec37ec57afebd08a4612bbe97bcf4ca/resource.tar.gz{, .log} |66.8%| [AR] {RESULT} $(B)/ydb/core/ymq/client/cpp/libymq-client-cpp.a |66.9%| [DL] $(B)/canondata_storage/1599023/af4de9c2015a8dbd6c450ba09edc50e553a0c403/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1937367/1c67f3b284e4254e921806deed7356235d09b497/resource.tar.gz{, .log} |66.8%| [DL] $(B)/canondata_storage/1880306/fe6ed74453624d8e9abdbfac070631bc503097dd/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1689644/3e68881a70015247fc9201a3e5f029c5770414d0/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1599023/28c053adbf3f9551c4abe79e7a68fce4419330c3/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1784826/2a831b2d73099cc758734ddc249903ecf064810d/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1781765/db7f9f8ef3e4bbba649bbea05cce38a1eb4d74a1/resource.tar.gz{, .log} |66.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/service/manager.cpp |66.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/client/cpp/libymq-client-cpp.a |66.9%| [AR] {RESULT} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |66.9%| [DL] $(B)/canondata_storage/1942525/54e70dcd0201d7d7770d670aadf55f64af334a0c/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1936273/a62235705daacff938053a0c1726e0c527b8307a/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1931696/baaf97fef703e42cdaefa73847e109a1450b5b99/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1847551/6cbebf34318d9de1322f2f5dba4ee3de59c62096/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1889210/d428a31e20bca6fc83066589a64f342c0bce07ea/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/212715/05112758aa31c86216a47b30fa10eee1e52db258/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1871102/fc5ed8103fd812712bf2a97977898a961a27156e/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1942173/4d0058e024fc0472a561e1e5fbadce0ca3ce8a5c/resource.tar.gz{, .log} |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/client/cpp/client.cpp |66.9%| [DL] $(B)/canondata_storage/1924537/994204c85c8f656606cca064cdae9e3d22058188/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1775319/63afeae1357fd3b8b5336934dd3e288ccc05a9e3/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1775059/8b34c49761913e81a10df2ad921137b17e044791/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1923547/7c8c70a333bc60c54015f8d05a34f8e2b252b396/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1599023/ec28b7e0cb376a1e45f470b7991522c343aa2f7e/resource.tar.gz{, .log} |66.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |66.9%| [DL] $(B)/canondata_storage/1920236/35006d56f02bf6830f30b607dded3342ab6fe947/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1937001/96df220872bbb62db85fbbf2896ad6c42e1ea831/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1937429/b2e019e5c80a384dae2cb46b81e53ad9800ec6e1/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1925842/78a06865e4e9078622d4dcf470d14e1b733b818e/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1923547/0012db846c37fd32ba35beed8bb1867e995553e6/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1881367/a251b5ef11aa21f92a7b9ccbb7bd43cb18874df3/resource.tar.gz{, .log} |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/usage/service.cpp |67.0%| [DL] $(B)/canondata_storage/1817427/540457314503b1dd9fa4c7f6a441cd55328f0f12/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1924537/2285eda3e0498786ce5e558b2150b7c3df203fa2/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1923547/dbb09fc9d877657acef8a7d59b3a1cdfa4706056/resource.tar.gz{, .log} |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |67.0%| [DL] $(B)/canondata_storage/1871182/3f2a613c9c9875708b545bc50785cef27e54013e/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1937027/d3548eb0e12456df7d78d12789a5f6e6325fd027/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1773845/77f8b008f626911d8af361315777d5aab3bab090/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1942100/0aeaf9869b0ddd879ea5b962964545b2e8d4ee29/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1775319/3d397bae2149eca31d2d1d70d1f869cf3ef06a27/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1889210/39a2e296a18100a101286d021c3bbaf0dd7c910d/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1931696/82ea795a06df7c3a825b7d991893b35cf6220bf5/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1937001/b50cbf33819e30c3dbd3f1d7aa3988fe10d7563d/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1871002/761cff393d72758da30485c6468fa0482f26f7e9/resource.tar.gz{, .log} |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |67.0%| [DL] $(B)/canondata_storage/1936842/4c93d91b791c8afc87ba8ddb554fd8416c95c5a3/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1936842/8cd90dfd54f3010a76d89484812059b56b435e61/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1925842/cbf83d6f077878c7a82321913f58b44bec270ff4/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1899731/9a6c84379323b592de124cd879525cc20bc7dae9/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1937027/591a1ceca790d81eaf524a7a3e730722b0d7bdb7/resource.tar.gz{, .log} |67.0%| [DL] $(B)/canondata_storage/1881367/e42a6d3bf5f7cfd5174c33ae18a047e043b3972e/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1599023/c740047c261980b1e01d1f5aa2d5ef7442556a50/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1942173/b8a648bc4016a8f5127fa55fb15319a716377608/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/995452/cd615849d57fceb08a91b5ec6193f9c9b5284bc5/resource.tar.gz{, .log} |67.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |67.0%| [DL] $(B)/canondata_storage/1916746/7215649f3d46d5bcf38f953188bce90d92d5269d/resource.tar.gz{, .log} |67.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |67.1%| [DL] $(B)/canondata_storage/1784826/2b974a6b59b529ca8b3df4eb934abf7f3c449c69/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1946324/c7905148ccc6742256ee4a209186276ffdcd07ac/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1923547/afaeadd931dc450ce4874920f37bf8ac420697e3/resource.tar.gz{, .log} |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |67.1%| [DL] $(B)/canondata_storage/1936997/4ecf9d16af51c71c1bd05cb4c6d3b08f52610ba0/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1942278/fa8d61d23d54178691359d36c79c3aeb38e8d3a9/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1871182/07b7b4e8c24b11ac495e88374330f1c5e22423bd/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1814674/30771d6d06c81affd1305529445a0377b856ae73/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1936947/a51b566b93f106351c93790e8ed778e57174fe45/resource.tar.gz{, .log} |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |67.1%| [DL] $(B)/canondata_storage/1937429/c8fa98e2ff8b4277f546c7744d6a553dc5cce2e0/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1899731/f47ce36b219d8f50f3f3fb1cfb49b6138993e5d3/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1942525/795fb6df3f52b9e4c9442c13b6e21f3c55c8e287/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/937458/8cf48c219a2939bf3e0b54c55a5f53cb19e8be63/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1937150/d999d25ecf89b5fdaab059bd630b998ae547ae2c/resource.tar.gz{, .log} |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |67.2%| [DL] $(B)/canondata_storage/1903885/d90cd049c4b259907eb16c654dc3e99a98aca749/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1923547/a231b4de20366f8952df108d9a4eee6ccfa653a7/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1925821/7d03c02318ec52ae0e54d4999f68346bc093fa47/resource.tar.gz{, .log} |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |67.2%| [DL] $(B)/canondata_storage/1031349/b4f16899a4015d5c80fc4b9db6742f6406940336/resource.tar.gz{, .log} |67.1%| [DL] $(B)/canondata_storage/1689644/90acca9b171ecce398f65b70b237cc25e6006718/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1942415/8c185f02ebd1857df69e888726609303d69b0657/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1600758/9ce2ecd88b57ecedef1530f890d737b6cf95d1ba/resource.tar.gz{, .log} |67.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |67.2%| [DL] $(B)/canondata_storage/1903280/5c2923264d785a87c86dd7095d632b6354624dc5/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1773845/29384e5593c1d8c2c9ee9307be07d1d1504ae89a/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1924537/1b41ca0aa67dd1e0b3321b69cbca20d119f1cde1/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1925842/341fc37dc897d9710faf3e494b064e93d76d8c61/resource.tar.gz{, .log} |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |67.2%| [DL] $(B)/canondata_storage/1937001/cf2822c292da12910b7e5a0fd062f9cafa22374e/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1937367/46e259d63bb269eac924a02dbf163b759073a96a/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1600758/5d223afc08b0c616f7a151a55660aa50e5a078a3/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1937150/c49758d527ec85011ab8f1e29da739cbd14731c8/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1936947/6e1bb4d77970d55a143ad038e277a4ab9866d9a5/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1889210/a6f1d19efb8c2d66757fb3f23bc191e0ff7fca4e/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1871002/fb3dce8e5e8c0a86fa3b3841c5b4dfd00310d4f2/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1903280/e6bf7ee13ef64bc10434d7740c7b7cfcb072066a/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1689644/b1f7674a51f07a84444f5ebf4e68dd7decb2debc/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1942278/a082f831494e5032b97d462b87753ef9c3f0a5df/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1946324/4bc0ddf4ca6b7e4f4e1de76060425e0c30bc65fb/resource.tar.gz{, .log} |67.2%| [DL] $(B)/canondata_storage/1925842/5d65124ba39bfe8e6fec32f860c67c97e48531b0/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1923547/be7e687ea36299e4a042c1495c58b793c69141af/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1946324/e871328b5487b9b2c440f1dd14b427a10459f3e7/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1871182/6f64c40116f70a4d635a7fc8d77b59f1c2902999/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/212715/94c4be0ca75f4b548ec8f83ac182396ab697f86d/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1809005/ad7c074711ee8d1675aebabbf8025a2c8bd317d8/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1900335/510f56d38547b11bc3a5c27da03d6e71466bb828/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1600758/d71e8e715781d39882e1a1876aa775946961dc49/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1936947/44e14ea63b2c348af47a6bfcf39d44d85b07321c/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1871182/fa5250ae9e3c72e63f7ba97c09d348f117270160/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1900335/1c84908d492197ead2c896624a2389b6dc3780ab/resource.tar.gz{, .log} |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |67.3%| [DL] $(B)/canondata_storage/1599023/0e73c4fa67e9c960ff5312b7132f6c7465a2e8d9/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1903885/5bfe97f5876ada641c26fbdc01de3a321c5117a2/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1924537/56ebaf0fac792671b356863555a2c9672be8a889/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1937424/bd676d9ef123703690e03ed87d87e5057ac9f7c7/resource.tar.gz{, .log} |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |67.3%| [DL] $(B)/canondata_storage/1889210/eecb0781dab14320b0f96bfa31a980580d1d7ba2/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1903885/f9d45bc250f07f42a2353007c7f2648896a84384/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1871182/c8ce39b7abe3399c49b5207663c8bb6922411d50/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1903885/c8ed7244a5448efc4a28b5df8fbd77bc4288e041/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1923547/995d9d96bbba94053a60009ae7ba99979f31a5bf/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1773845/151e1e36181dc4f51864bb618bfd0ac1b52111fc/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1871002/baafe386a63dbfebad074ea63e64c14ff9aa7ce2/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1931696/564c43f4aa944aa26e85d54f2d25c16b9ce359f6/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1923547/94f377eaa1d93890e1345ac4940cc6fa07bddd4f/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/1937424/4471ebb7a52881a5c9ab19f8481dc8d0c8b2c21e/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1809005/15480770433f84d119d7fe097c7c74e81f5f1e71/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1031349/ff2d90d606cdc417d573d7d2f32329f10cf0be11/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1847551/a859756d282f251d1600ae15b6c205384047fb83/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1814674/712a09ba024a489ac40fb8f6a036e48974fe809d/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1925821/2978b18b76f4a1f7b0e4690d2015acea4775834c/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1599023/5eeb37b7a60896a1dd87c5c5ea8dea5d33c2134c/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1937001/205481a8623c17e2bed6fe61c2cf8cadb9a35844/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1937429/21da85bae1b4363f9d35ac14bdb3122767615cb5/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1597364/dba0843848a9b6c75e97f9c07beb339bd899e83a/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1936947/0ad6ee1282daf17d09361563e96adcbb4dd32437/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1942100/8fb0a7a6c71d8992f7b5d0fc7d2d03c809f0254b/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1936273/ad5bb5518d18e6806e72772975bede630b68c916/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1903885/bdae153e5f47955d12d65f85a611b12ff6a92b2f/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1937150/c1acae706dd71ce088fc48a032c252e2fac078b9/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1775319/8ac8c87858e0db34f5a3c99b3f4ca1084cccbace/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1937424/71aa4d14c8d66bba5d69f626e865a747fde28f75/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1775319/16e55349c1d8a123c91f7d512b301ac22c034701/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1936273/e97f761c072d3e8f44f6bc8a298df5508572dd64/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1689644/2bd6d3fb78f1d7cb3b8de730f65e151f606e2b42/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1900335/dd59ce09b5b70054bb239659c9dedc5218a4d0cd/resource.tar.gz{, .log} |67.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |67.4%| [DL] $(B)/canondata_storage/1925821/6ac3fd5e5dd20ee6d3841e1231c1129dee1f6a05/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1936997/82e8e136c5cb6c83d4a44aa387dbf64338ed57ae/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1937367/e5d3b4a217429148a8315cf4e228d45b21f861fe/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1923547/9239cf6dc7870b94856822425e617d80d75f9a89/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1937001/352ec8625074d0f91076901e1e506e122e8a0fe6/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1942173/93413c7f437227eab2052810218d1df60ced3a52/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1784826/664a8fd8dece5fbba1057a5f4bdc597c2c7b2e59/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1773845/e1901cc86dc30911ad22db641ffb6fe66c04423b/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1937492/ff27734bcb37c413b13864458b4334e93e0d3308/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1936842/51593b2a750dbb036388d012a30fa937edaab5f0/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1031349/f9e5528e64f4bcdb4154fd10489bc2c93c9230cf/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1031349/0b9bcb16a38e69c55142d62ab5b476d514cf83bf/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1937150/a3ed05ae8ad4fea60a051f6171424c733487f045/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1600758/fa72a23c77bab9a775b9e8e822e0be1a9841d508/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1773845/6c44bb7a3842ecf9adf65f1679c6e8b589fec21a/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1936273/9193d75f8d6b5367c9ef3700c0a94d57ec6a3352/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1689644/76bd2942df187ba04bb9771a46cdadf0d1dbe01c/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1900335/b69eef2db8bbb87629e850b8626d51a7c2f99f1c/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1924537/85d9d2dc5ead7566100ca824520016d0c6b8d113/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1899731/c90b03e90440900b48d7af60d2e03d478d5e354f/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1942100/636230304ba87d5b90b5566e93fadf7c12da6a8e/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1031349/ce2302f84e58b9b15afe3898be33e7dcfaa01063/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1923547/67f6df540c55c53542953e1bf74b7234a7231c48/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1784826/6fb10875fc2d13209580debefd9e32c0586b2ae6/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1777230/c670638311f49020b53d30b7f38b56ace838101e/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1889210/74544419c972160350a9c20f583a6dcc6f5d9b40/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1917492/cf57bdebe9d9af3fecbb7cd419893dd2ae22667e/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1942173/dbfab3fd6b2a084258584e8ee47fd89f14e189da/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1031349/b34d6646a07c5cb3362856012fec19ee3306256d/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1871182/a8f0dda19ece2eb39da3b275b4504de52525ed97/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1871102/a105d0f5f9856af79134cb48c8f21a1b942134a2/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1880306/0c5ef34fdd8425c29c71bb31e0e955648c9186ba/resource.tar.gz{, .log} |67.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/client/bin/sqs |67.6%| [LD] {RESULT} $(B)/ydb/core/ymq/client/bin/sqs |67.6%| [DL] $(B)/canondata_storage/1942100/2d6c7e378366673856333d19c3501c45eed6b4cd/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1931696/34a23b0fbb8cda9112778eff3500850ab0b81fdb/resource.tar.gz{, .log} |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/priorities/service/service.cpp |67.6%| [DL] $(B)/canondata_storage/1936842/5461a7f0f4d722c81cba2eff5dd1d41bf3a77f80/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1889210/24a25999f164b13ed263c37581db046794ca3fa6/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1130705/173afc6d44db00f6f42767e88bce00b623a40335/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1871102/fc62e492471256a62165f341a79346abd3d08986/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1600758/e4cf89c10a0c8ed92967210019872e3f5d0ba6b9/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1597364/50d0ff496a786c8f009d7afa268d209155aef6ce/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/212715/b9f267b2022a251b638e7a1f1ebeb788c308ed2f/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1781765/a9bb192df522b281951b02a8ad80c7fbaa8b1717/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1942671/18f32d5eb8ab2aab65012dda63f9cfd635ed3680/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1942100/34bf60ad890ba4690ed9d3377dd96472d59bed69/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1942173/5dda369a5c566435d55e882d65f0212fa3dfb906/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1903280/419b5c18140d44a17c33d80899398c8647846b33/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1784826/c8ce54c6ece9e34ad4006150cfd33aa59537e273/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1880306/ee64d24fc7c0bd8fa221eca8eb309837e5c0fe9d/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1847551/155e040f6efb509114e481612c26cc3259caa89d/resource.tar.gz{, .log} |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/service/service.cpp |67.7%| [DL] $(B)/canondata_storage/1942173/1055029c046ccc9d6feeae4f468d618044a7fa75/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1925821/2762f8f29ee80e9d69a3fae7ac21750c067803da/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1942525/16208faf1c4299915ddd3945e2017b318594867c/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1920236/b91be0c508f3325775d30c05e1d48d09dbf039fa/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1937424/1f14a59ff195cdb4e3cda1c103baeeeee659e945/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1937424/567d7f4e2a03fd773183d9e7015f2f468ea57566/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1814674/de17576700fc11fc02ec994a616abc5adadd5f40/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1936842/e2f5b27b418549665a04de58de4b4e487f33c292/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1936273/640ea425b9d5a6140c315077f2a83bba387482d8/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1917492/5ce972a5c2e3d600308091645f162df219851507/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1937429/b424baaadf0728e7424d639c06b7246427532e0b/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1889210/a4abb800446905e7d80fe38237bce315efaf5daf/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1936997/26840f7e11cb9eef225eaf1c7e2dc7e15d3b69c3/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1889210/a25c62064c6b3aebb0148e4cc231d4df4bb7bd7c/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1936997/f0d91ddfd51cc8a6414fa05ea0aac05c34813467/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1784117/ed4d4136ae1bf7a366a93d130e4c3e74aa7566cd/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1871182/a09ccd00a4b0358de052d958e9948bdc99497247/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1784117/357d3ccdef7d0372b6d86bbe259ca7f35b60e595/resource.tar.gz{, .log} |67.7%| [DL] $(B)/canondata_storage/1937424/c9a4c8efbcba2c1a1772ede4bf146f439970ae1a/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/212715/7f481604d75f86fddead511124887e2e0fa01e78/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1937492/7f01a8f9ac63e111f95c1b473211464f75350133/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1923547/22c2c509a5434879db9a69ad8b9605c384a0c07b/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1924537/bb09f7f7f49f479d6bdbad2ad3eb185564d33ca0/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1937001/7109df5869c8df84d1eced32a121709a7a6081d6/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1775059/0211445827e77a089557f709a929c720409a58d4/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1881367/164f3c6886439a33c9799bda28227bc201c09eb7/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1937367/e2a772964cf46b8a14a828d48a136378216522b9/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1936842/356668b054049036b6fc6ae585623a4cb1b29102/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1880306/bae66b3e317c04615399cbe68d1b1628bb7a6b67/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1942525/26eb3bd3f3177ba00d382b62045c570f72937d8e/resource.tar.gz{, .log} |67.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |67.8%| [AR] {RESULT} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |67.8%| [DL] $(B)/canondata_storage/1784826/0e334a6b657f494d6225ebade2ce12411632a8e5/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1871102/272188d52656e4b0f1c180e9407fd7cd898e5045/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1031349/5baef42837a5c7e8f75ff06754ea8ff7be02b259/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1889210/414a59e63d9da4dbc9c919df47879a3079faff08/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1925842/bc33ee76e726ade051594823272684ec4117339d/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1937150/19f3cf1ec3946e665195d75146c9af1ad0df2747/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1871002/31d5df73f869f6fcf8bde774aab16576da3e6aa0/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1936947/ef3e5fbc5fb23bc80e348df0815b2958ed5e589d/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1781765/cf2d7def7c41b9fae02dc0acaab8437909472138/resource.tar.gz{, .log} |67.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |67.8%| [DL] $(B)/canondata_storage/1942671/431d6f4e1a38d9a83c442de2f50cfc3e38e449d6/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/937458/e483a0c8e724beb228563eb224be67227cd805b3/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1903885/71ee43a73f62c943cd2c83ad3cb710bb8b1d9fb0/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1937027/7dd93f39b29f9f9faa0d9501189c6cde9f06926b/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1889210/0ae374e78057abad24c02b5788c385884d8c559f/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1937027/840ae09a36bdc9a4737f612d0787fa5691189018/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1942671/4db54c8ba9dedccdc8391210d1657c5ca4bd34ec/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1942525/d140db22959141111740879cdd2464012e1a4760/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/212715/7a6807b4e165da760d46e0887816887571bfacbc/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1903885/dc53b4edac607ebf3b277ca9598c7c26218fd737/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1777230/07925bc76f621b8c24d146f499334eda41f5710e/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1936842/fa36495c13878b6808528b4f14deedabaaaf4b52/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1775059/0320e0a444559c89851159b0ca77b3fb930f0227/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1784826/837b0487932600ba51f58ab5300b34e847536f72/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1881367/e98bbd650c45a3f4f6bc628cf8be62baa88c6183/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1871002/2fcd813e80c98be1c3c62b9a854d09ccca8851f8/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1937429/7495c8355df97f85fa824cc601aaf3eb891c07d7/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1942173/badfb45c9ff8847ac34b8c1fc73d36f02f754caa/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1937424/022b4c4aaf443124c76bb3e388177d9b3de00044/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1937150/be16d4af021ec170de66b93263bcd36bcba25641/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1937429/8dfaad7a4316e425c9a664520399cae79a4471ab/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/937458/2c8d5c047a2d1f115b2b21f5412518c762d2aa0d/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1777230/96896022731d921e0e3ef80f527dbcadef5d13f9/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1925821/97ab382df374f58dbf4509c69cb8d6f0df937287/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1777230/c6bb3b20e729a321dd2f32060118095ac77f2dba/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1936947/3881735ec94cf6af5bd90b1c7efcaa7c1bad584b/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1937492/b472fd4c22edefd63722fcdafc178d25f35c8edf/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1130705/151a45bbb65479e0367fc50d4fa7085f38b11c36/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1031349/2beab0bd51f525f804474df3adc530a07847479a/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1871182/18e14cc850154a330057b23fc8c6576e30e17147/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1871102/afb11e7450182b29736d5d351c8e22acd046f1d9/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1936273/2dcf3705881ddc62a114cf70453bfa6ad7f7d225/resource.tar.gz{, .log} |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |68.0%| [DL] $(B)/canondata_storage/1924537/40c66e62107c2a9e3733dec809479087bdd8f6d6/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1917492/c506d630588c442847f7a867bd50c315238502e6/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1937001/739f5ccbe8d31f87c515ca8f825e82c32f06f5a1/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1924537/c5db08849456fd743b1ee29541c5e4a60ede833f/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1871002/b59ed2ad938015ca28be6d459030014e4b6ff1ea/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1936273/921006dac2a4100d3f0822b61dc56296f0c6ef83/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1899731/a8e487f28e21f36eb70986f5e3381840f4f35bc0/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1917492/490d2547d88a61b315b5aab0d1f524b08d4d202a/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1937367/43e9598c5fc4f9a8466e4891deda513069430997/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1925842/0dddd8be953c72538d28f43c54fb364cfe8111e7/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1903885/50fd147bcde0799910ee9f4cbb71ec257f43ca4f/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1775319/f824086f9aede9fe69b74b082af09c546782c449/resource.tar.gz{, .log} |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |68.1%| [DL] $(B)/canondata_storage/1903885/e571a3fbea26622636c12f349d5811739c3c6677/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1923547/fea898f087e0f27f17f93176391f1a45065a7fa5/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1775319/53c99957370beeb350847e11554dd471106250fd/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1946324/bb6f9b1bdca4cf325d4b3c175cc7c05431da0bee/resource.tar.gz{, .log} |68.0%| [DL] $(B)/canondata_storage/1775059/e0d64b1cb8ae8bb7052270bb8eeef5ba21a3c131/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1847551/652d7dbdeb88758415d87d3e7c2cc15c983c3ea8/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1880306/a64cbd36324c0aa2db14c1bae670848250f6f405/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1599023/7ca825f9742a1a057b05a268d19a99cd8f57127e/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1917492/b09f81119d6db779ff6e194090c647867842db23/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1689644/34ad75626afa74e843eda917420447310062489d/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1880306/d9c0e7be0cd0986dec7319115d94c6ed554b6ac1/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1889210/5f0f82e4a2bed51403d8667507a43b3b2e40bfb4/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1775059/4ce689cacc1b04e7b955e62a2269c8180fca36bb/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1600758/bba12fda8a5a68a3753c70d51907e240b2e6a66b/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1871002/fd83b9fcca23643110586ceb3cb213cea846db7b/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1597364/26c9cd4ddf7d11c6a72eed900146bed3a8e037de/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1847551/14f613117c7e3a3941ccff240390414bf6219eb7/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1925821/b41ee142eb0ecec97fb696b52ade07057abd9b3c/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1871182/996a8d2d865b3f19f5c68201bf7e2cfe8f1268f4/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1031349/a955c852651ea9f8124bef13bd770d8d15af6c2e/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1942173/2b6d37b434944472410a121082ca65dee724c848/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/995452/7d7eb4b4cb892b72a47c31068e42a9aeef5fa875/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1773845/3e79b21e1668f131709c246df5e9fced8d4bf38f/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1925842/54dd0ebb803d2e27e6086b3d88e35fed569d9a96/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1773845/7b0ece7963d2534b82e6da693f82ff79c8bfc07f/resource.tar.gz{, .log} |68.1%| [DL] $(B)/canondata_storage/1936947/8c234556c048ec2212784e80981b34176fe97cf6/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1784117/498626a06fb2650088930c390bdc96147a0bc505/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1925821/3149e5c24f2e47440679ea6c5e1f6d7e1b2b75ac/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1937492/a2da5ad850b8a2bacde60e0e3cf33053277777c6/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1777230/af30d016cca75b9d11b6ed54e7d270e255deb404/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1936842/50b264d4daedd2dfbc510cffe988c135e4f73a28/resource.tar.gz{, .log} |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/proxy_private.cpp |68.2%| [DL] $(B)/canondata_storage/1773845/d2141055cd31d948959dc44b2043b73e0eedb61e/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1936842/97245a35fa0fd90edbac42284ba4ffdc229ef791/resource.tar.gz{, .log} |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor/service/service.cpp |68.2%| [DL] $(B)/canondata_storage/1923547/3837386c5673f42d0a262b53fa145c1210e06267/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1936947/2fc43e3b7bf2ac6312b395248938656a7fa50fcc/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1817427/5d304fd6b37c848dfc0dd95f9f02b44991b176c5/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1600758/6d0d27fad1cf46a244c609129a6009834bc45a9a/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1917492/d983c8e69867e7a5af2aad3db8b5eeebdf959284/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/212715/b9d67d9e85a77fd7731aa5719cd4ecc8994a16b9/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1784826/1d88e578beafe01d6bba5ff3a3b2fecf2c6033c8/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1775059/7b52ffc33b8c7dedbb0053fd02466c7710fcea86/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1871182/02c4e8ba45dea18da2d4af195dc4a2de592050d0/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1937001/23c4a86c24169a35556c576ee32ff34654fb44f6/resource.tar.gz{, .log} |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/proxy_private.cpp |68.2%| [DL] $(B)/canondata_storage/1130705/bafe275fa937679d2b25012fec947db4686c5a93/resource.tar.gz{, .log} |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/service/service.cpp |68.2%| [DL] $(B)/canondata_storage/1871002/6b2cac692d2e9ff1dd117cc22a0fd99527bb7e4e/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1942100/0c8472aade15448e1084d8c96bfb0a5ee21afb4e/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1130705/4556d96ceeebd1ca514c0387ef0b269cb852e13b/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1784826/2218e5c9d19235479cfb35f6537a97d87cb1a514/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1942415/7197d6f538e589afc0cd6fe1285c07fd138fb450/resource.tar.gz{, .log} |68.2%| [DL] $(B)/canondata_storage/1871102/5190906b0c0babdbbc337b471790697bf5591d3a/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1925842/ec6e9d018b38ccaf9fc6296a792f1e60022c1c22/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1900335/78b0311d619a60a4d1b9cef34b0261de23138f5f/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1777230/1db1903a6e0dabe5575aead91fb71d857f3f9a30/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1130705/a877e9a38d4cdcd3a3048f1fe39ff52ef1e78652/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1599023/4e9b507a0cbcf5cfc31288de53bbb8560bb1a4bf/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/937458/cc57ea281d0b003d397eca8623f6324d4f1e6ded/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1937001/0a62c6e91e29cdeb1135736130cced7fc45c219d/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1942100/50ef34247500569eb5a4a2bd9f3afffdda4ff62e/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1775059/c78334a5a54c55b78c6157e0006c3af42c43b3aa/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1899731/3bbf0846401e09b064add80d60e61e7654f87412/resource.tar.gz{, .log} |68.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |68.3%| [DL] $(B)/canondata_storage/1784826/3f48892aaa657aedfc21dffbd75e86e3221b71e3/resource.tar.gz{, .log} |68.3%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |68.3%| [DL] $(B)/canondata_storage/1597364/3d3a2dffac5b64baabad6f932284c93dcb205cd6/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1600758/99308b4324dde12d46c32387dd23cb39768d4365/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1936842/8f78d4e91e4f9982eb78e4a8b888794c6f76e3cf/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1689644/40519f669ee0ff4cd14681f4648f099da23d476a/resource.tar.gz{, .log} |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |68.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |68.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |68.3%| [DL] $(B)/canondata_storage/1942525/f7240bfb895abd9165a9251745a77a5737396a6b/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1924537/9d702629c20241b52be3899488a0fbc1c3dc0a5e/resource.tar.gz{, .log} |68.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |68.4%| [DL] $(B)/canondata_storage/1773845/8df154f4c78ff2cb24f0eca84702e3c40b845284/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/995452/cf615d0761fdf54ff78f8d33100e0f379784db10/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1942415/b35d2514a5150e9f12a175bf916b9aef176e9b54/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1817427/1d09a6a9bd95b3d23b0ad7e5fb8ca247962a9167/resource.tar.gz{, .log} |68.3%| [DL] $(B)/canondata_storage/1942525/71aa87e8531eaa616ea40214f4172330acf1be1c/resource.tar.gz{, .log} |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |68.4%| [DL] $(B)/canondata_storage/1784826/25cbabce687b21eded79fabb140f901221253ab9/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1925821/5c7988bca7ff7631d849ea3fc0177b71ea70a9e8/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1925821/e20a6041e7f58f4d79973b167aed78646db5868f/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1784826/23b1299e7f12d5cf020984a5f7c964801c31ebc5/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1942173/a6fc778ac459c83f8c2a8bab8c997c6c223d9eff/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1942173/e772b9f5e7fdee47a02e467e47e5db2ae21c1ecf/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1936947/e9b2989833eb2cb143a6b33579463fddacfe47db/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1916746/23de079a06c649cbc7ea9c207ee17f83d4a16a8d/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1942278/4753a3574c7d4c4cc4a6ef5262a4559e7e493c80/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1936947/ac258f02a615b46fc7a88b9fef9062f73aca53fa/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1871002/41ae725b67896da2823a1ca29e32600f981785c2/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1942415/5dd4bbc3b4370798b80250a55a4da5d1863033ad/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1847551/98babfb2e71230fbc636c0bf4e21403b16782b74/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1936842/73fe0e78069055b4c244798fc9c15ebb1173a692/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1599023/1b6e8347ca7cf43e4ffb87f89e02cf72c8adfa32/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1600758/743fec0dc57746c777c8f1b3b8fd0c82a29a2914/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1917492/d5eb47a4ac49b0fe0cd9d069e3197c610317bc8c/resource.tar.gz{, .log} |68.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |68.5%| [DL] $(B)/canondata_storage/1942525/b841c1f7e178a6bdcbcc7188f97e9d64098db934/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1924537/de922a973d80db7430ccc36b4b196b0fe3a08ff2/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937429/5bcfa7fa889e048eab4fac33f32363a8c63e5b0b/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1600758/4facc3887d7be655ebe11f112eb8a7dc7a544811/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1880306/f04a57e691589def2f527c532c54402c486d5974/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937424/67ccb4bc28f59f5eaedbfe7e4d59615be370bf27/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1784826/ae144ae65f45caf0cf861d9528ef4fc3e1c5e830/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1923547/14c0d60ad63ffaedb974b51b52039901f095b5c5/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1924537/bc0aa6d2dc96c8e2d21b35c367a15ca1ca298c7c/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1814674/3660a2396e26152d8c8f050da9f28116b76739a2/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1599023/227f2914a8aa929f2f2a755dc19dd4060d33f2c5/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937150/69ba5a293eaaa6b3e25fe140a1e593fd4605b88a/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1817427/6ea5274dd6217b2229e46445dc75d3ec401bb15f/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937492/fae2471f79672290055b05939c32d42b13b0819b/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1903885/e4adce0662ce21f7571f319c0ebb24df1985df0e/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937027/4608fb8cff903881d29660feb5fbd40491ccea1b/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937424/058ac03eb1d0747a8ff4e9834da6c7421cc76622/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1900335/45fa7b62bfc436d95c883178870ebc86b564d87c/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937424/ca0bc12088c1a293fde3df7327441001cc5f0af1/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1777230/1e94bfc4170d2c00272e8b088c9a3c26c6d066d2/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1775319/581989ddfd844cd7fb811fb9f47c5b23d36a9346/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1923547/556a26215837428f8bd2b4b512f313930615d82b/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1924537/4ece22823ee95186ecb519415692146559b02395/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1784117/9a53e0c31670253d78108c8a3b4f81fc219a1c68/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1931696/966609c330b2f749a4acb766d57d3b5dcca3d7f5/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1923547/9e635ae8d87d6d91f29e1dd2b0f82d9958ebefe5/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1871182/cdd8fb9ea4ddd53c6670aa1140203ceb50634749/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937492/2bb2455c1ebb5ccab2ce4acc1aa8fb7defa3f4b8/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1881367/943a50aaa7841517b3581cb3efc1c4693dfe6c56/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942173/2f9cbf1b9614aff7e11f14fc7938938d0790e3ab/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1899731/0bc935d3f61810d330a6462fb133ddcc4ac126c6/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937001/589144012e0eb6b64ff634e7a287dfd36b22cdfd/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937001/3df1bf80f5738c3f0205526961db8957f75fdaea/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1946324/1db652d5b002ab03b5138ef5dd01126c2deab600/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1600758/8967dbeed4cbcf01ab4f5cf532c0a6b1652e0625/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1599023/0fde09ac1ad0a850ad1ab93edf9eee9e0688dde1/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936273/98e4b41d6221eb1e25a8689e7a8c9e8e9f83c75f/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936842/e733b8969cd6cdf87ea1ec454d4e62279f34b3ac/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1809005/e8a59d866b0d94fc2277cc98140dae6c5e6c1510/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1903885/9ebea4f8b0d9c14e629045992b2a0566b4da0814/resource.tar.gz{, .log} |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |68.6%| [DL] $(B)/canondata_storage/1946324/a73667b195068cad6a1c7af344e8899b2a9f8586/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1924537/8515a8b3787c5105aaf393c0e112ddb75d305ccf/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936947/a5f83e5d38179c14126d53519dc062cef98113ec/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1925842/8b22a63573110228fc6a5e75beb97252b4db0e2a/resource.tar.gz{, .log} |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |68.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/client/bin/sqs |68.6%| [DL] $(B)/canondata_storage/1942100/f94ab3eb2009e356ba2cba2e6a416914ebfc9469/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1923547/d0656f40d6f986668efaffc1efccda90baa2359a/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1600758/0b2ec7f57dbbd2c69f7894fb1ef04f94367de8d9/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1784826/cbc63541f63d78da712c6e11ae70c4ee10dfb428/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1784117/eb116fd507d59419f5df95216e2268a87630509b/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1924537/c187e1509ff19817db0786b643e77f46ea364ba0/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1130705/2dbc543e7e2156e1086b7eff9aaab72ade9022c4/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937492/eae233b11f0c715bdde5a31914dc1e293f0a9fbe/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1880306/0dff39a47da7c6db82403c4d9d953892f43da982/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1689644/6a03d147fd8c24ca8f22e2c016de5b0418f13570/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1031349/3fb25bad7a135d8493b2fd4782bc9ca920c7e4e4/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1031349/45aada6c316544e03166fc51527848ab05146f50/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937429/64e39c366e0b462b94fd9e04f579348331e65cd1/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871182/03581f8f43b6630387f93dcffb64efda102a5104/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1936842/de8a3d5f5dbc206e6c8aac1877a6c2c6816ea52f/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1923547/b6378128d274e5d1ef2e0c1c37e1cdcb2bbd21c4/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1777230/4b5479e2ebed213e8e8d9a64aa0b5a72bb3ea4dd/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871002/e0ca733858945e7ec95821f93c3af63825d4d919/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1130705/851b827e92b1d2a782f09dc8f909cdc1f88c0a5d/resource.tar.gz{, .log} |68.7%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |68.7%| [DL] $(B)/canondata_storage/1936842/848a979971caeba2efc272b5418157aab954923a/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1777230/3c117824725bda13a89aad6b07b22541746fa215/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1777230/85281113cfa8b551c6ab2fb41421ab9120c1851f/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1775319/3515b86fb929979a6751f93bd43a0291eaa01262/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937027/71af45db04c1ee6fdd37f84594c6cfe28ff65598/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1775319/f5d325e9942124752494893299a6edbdfb1a1d2d/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/995452/f3edc5905f3fec9aade63210a7de845a74964f60/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1809005/91364d63a5af40ff53018c04d9aede4888eea14e/resource.tar.gz{, .log} |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/replica.cpp |68.8%| [DL] $(B)/canondata_storage/1924537/23db2f3171675edbfb7d81888413e9e5893c1dcb/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1881367/db84cf65a0fe23688d717b9be3cef15f9249c865/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942100/dea54d4f81130d9797cdfcdac410831269e46559/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937424/ef4272c0e98c55575149317381e8efaa85a26157/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1773845/fe357240ad41d1044e07d94e45c2e6ad7022cddd/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1946324/dc6ee267af5d1b0e264188916e19d0d7a07f9201/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937001/891378d9aa3b40f9bb17b962cd367cec5ae2d3e9/resource.tar.gz{, .log} |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/replica.cpp |68.8%| [DL] $(B)/canondata_storage/1925842/3bbdcdb1d64d89357b8a4a5a80903c46df42d63e/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1773845/ebbc0e7a6553d487ca6f9443345b87dc94e5ba64/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1773845/1e4cdc9374a98062b8e39a6ad511b5fc378113ba/resource.tar.gz{, .log} |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |68.8%| [DL] $(B)/canondata_storage/1937027/b9d8bf5296438b5378e7a452d0f1d00c40561e66/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937367/f0fc4d0046eeecd5aedc367d24e7c146f804556a/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1924537/ed5c3cfadad0d4915690e6595935fd0ac4b575d5/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936273/f7ac782bb4f6fe95601764c0efdfb9f8d7bb7d49/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1903885/804adfc45546340f178ce737d7f1d1e8feb56e81/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1923547/673d4b5ed96219bc5abbb4d4204d1361da772ae8/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1809005/c97035bd7617563b46a0820134ee4ac1af15af52/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1599023/84d6e9c08e0a2fb91653fb36e754c1950d7f4a7e/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942415/2b3301623e3d03377711deea0f47238c2650379b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1600758/7c4439739defd21f99d1592092fc7cbb0e2c282d/resource.tar.gz{, .log} |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |68.9%| [DL] $(B)/canondata_storage/1936273/313b77ac54cb289ac0c886126fe9dfbb6b4d0cf6/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1773845/6676eb441f225906913d6af3ed308493a06ab168/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1847551/2505f7fa026ee9e2d5013e7854c2b1b29ddac476/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1773845/6921a43b8331634020d1aa346b91c56ad87a3ae0/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937492/e9b42bd48624d6b2ad306186fefd6a9293482be2/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937424/6ef1f652a39663221b6531b56b64227e1ee24197/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1880306/2c99349d3c5fcb053d4b6ce0a8557550c848af18/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1936842/5d467f81c7a480afb7bd5525dfdd3f1e79630494/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937150/8a03b22cb41a5d45a74b6bace2f08e86727532d3/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1923547/cc66e2ee27834e5a88500b4e8ce11e850cd2de16/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1871182/f81e0439a02cfc84ec46562f3fada5312be3e21d/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942671/586e1cad89b59a85e10d70e6019aeefccc0f0382/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937027/31125da8bc31fe5a5232f3a169fa8a2431a89df9/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1784826/cbf6ad4c227ab017bb5ebc2f4ab5719247fa9785/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1931696/cc756dc950b218e9f3589a791267d21773207f44/resource.tar.gz{, .log} |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |68.9%| [DL] $(B)/canondata_storage/1871102/40bae405b3e45c05f8213a4d6de07ce04c617d22/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1936842/34f8feb0276ff4ef51cbeb94b6e56bd74f191048/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937429/30d3e476b0604091faf300d00ac05dc03b916b08/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/937458/70ccebe5fbe5864b01d9dd1a04ed7658001b110c/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937027/ca37dc23c4a42a42fb6cfd05c1ad5ae3f4853941/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942671/d37704c7d0acd85d95c86ed2cbc5abdedc6da1fe/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1880306/bb6abca9465bc61a78aff64ccce71d1aa9416680/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1781765/028f42f897160b53900546b39900217bb2eb9fb1/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1946324/b33c6fdfc40b5508ecac58ab31e2d2f4fe0617fd/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1784117/3885f0a76b64a32a48487f8866602d3fff1e416a/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942525/79c6b0061ac9b7af78aeb1e98efd080a410caa89/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1775319/4f0c679fa773e90cc66a570ed4a5f0d8cc31f2c2/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1924537/8b609a36ae618dd93dae76d33a498930f0df8908/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1942525/02313b653cd90bd52d23ab748eeb6f19dd31efb9/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1936273/866bbbad6d025c44baaf3df88c5cf7edb202eed7/resource.tar.gz{, .log} |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |69.0%| [DL] $(B)/canondata_storage/937458/55f0812aa779206291ac8c4b283b7a80472fd1fb/resource.tar.gz{, .log} |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |69.0%| [DL] $(B)/canondata_storage/1600758/2bae720d354fef176d7a7ae70957b1a227ff538a/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1880306/553cec1ec75c6f92ca7aa0593ca041b68c096464/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/995452/b8907c48ceee21fc9f22e90025a80d21b7f812d0/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1847551/ccf33d99ba5f1b411488f6139e835241216d9532/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1031349/eb01bd7ff66ea4e3791b69ec15b5a2ac35547ace/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1871002/2a28301cd702f47961195a0e9d71a1a846884662/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1925821/03b39acc689a2972a275bb747421c2da6d4a7ca2/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/937458/9a583559753b9ebbe934c023f3a211aa7e017405/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1600758/164a788024a2adf2945e5df7b5b1983ab8de1a1b/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1899731/2ec8224db091f2a7362c5e4ce595bc50329b8311/resource.tar.gz{, .log} |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/test_connection/test_data_streams.cpp |69.0%| [DL] $(B)/canondata_storage/1937027/fb2f5e7f8384ed26549a5b12ab64879613b094df/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1937001/504ba0e32db3717a9a770414a8a028855403e56f/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1931696/e1e81addd8ea3e15863a8ba2a48dd9580611eaa7/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1923547/fd6a07dc80ba28f96de9cc3ede62013c2ff4f35e/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1937367/c43db192f475421f2559d93dbe396ac1a811fd89/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1784826/02898379a4c0c69ac2a74e54bad0c911e0b55bc0/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1889210/e26b9c7fc72b580fe82c1126f535456e73306c2c/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1031349/d6f6fbd690e2387ef546b9d231ad34955cbea3f2/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1937027/07d1655ee1666f7cc6e979f137fa5a6d3f866455/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1942278/423e9b8a01d3f9d43497e4cb2ee7041e47daf356/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1917492/b1f6880fce0f4bdb598f464d2cc793c782661bc5/resource.tar.gz{, .log} |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |69.1%| [DL] $(B)/canondata_storage/1599023/65f04a32ef767a9d58baa3504831aac82a785d9c/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1937027/b16c09e9a13e802c8e9af73cf9508048ead5f2e5/resource.tar.gz{, .log} |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/test_connection/test_data_streams.cpp |69.1%| [DL] $(B)/canondata_storage/1937367/518bbcf510ad7a43c5e77746bafd21ed0e3fdc6e/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1889210/fede666d039e0167053e2c4cfe8623cff4b33d24/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1937492/d66e77714299db72286b6a554eb40992ddbc4b9b/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1931696/4ad92bebbe0a55859a86bf8023661b81c90b894e/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1899731/061987f55a4633fbb100deb15792166741b6bddd/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1937150/a36e1dbd38eb3982d2ac3bfe25ca0672dc9d7f6d/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1597364/13b42f67efe0808381a2e9549fe4850dbad7a463/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1916746/fc9859eda7833569c636bd5c91d3cefea7eb47fa/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1936947/c075b3a6b857003250f6fcdaddd6e5508fb9d58f/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1917492/bb4a6167e361b901902dd3149427c5029802cf99/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1936273/a60f9999d0698e9bb3fb56c37d5b3b1e5d6d9c95/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1784117/a5ac79faa57763376eaf89f447411b1d96378091/resource.tar.gz{, .log} |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |69.2%| [DL] $(B)/canondata_storage/1942278/c7e94a55443ba1bfb954699e3753bab75896bf89/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1900335/d4e82c318baf1ed34d6266f6481c0a7670c611fe/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1814674/a5c2cbff45dd20fa22702fa4b2539a64145b10f1/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1903280/6ac862756a9225bab7885d29cf2289a6202a5ff1/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1937492/a3be8907a794dd8afc1b0615834f797b64dd9927/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1946324/c73c9eddeafc3ff13ff62d961df837bb6725d750/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1923547/f8815f5c37d8a37467fb21ce650719d84ad34373/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1871002/4cb15fb9e597ca755ed7e9f8f31c5eafa9b5a582/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1880306/468b163936a9bc33f15e62d2d0026dcdb00b8520/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1942100/0c1b1bb025932861fb70abad9310240dbe73a50c/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1937150/8facf8f2f4f1dbe0881f83a275f035467ce8f3bd/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1942525/e82383a0853340bf9ac348d8e641aeedc7c24e1b/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1031349/2509d6f19c7d08d2f97888cb86cd1f893cd619db/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1031349/fbbf24f543f1b677d727bca56c1443dfe538b3b6/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1031349/7d5eff370031e75ffe32ec32582d27203420e6b7/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1937027/d467c683292423d8a40039f4803bb61721a8cd03/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1936997/9befa0c3335c1ce55cbe46da2b9eb6fd58679c56/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1903280/cfc00695f60d304a5b897d2cf0fdcda9f6f0bc03/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1871102/093ef1237a5eb90e2e1f6670f45824dd7aa652e1/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/212715/1628fc53f3f62f8d00cdc3a2832cfcfea9a015c3/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1936947/709cd8d6c15b98e118e6f2bf0b58e3a7a05592dc/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1942671/136488dd722e833e1c1e1c8bc98c69cae0134648/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1903885/e15ecedc064b62e14bd146feca52d1d6acd0bb64/resource.tar.gz{, .log} |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |69.2%| [DL] $(B)/canondata_storage/1917492/1ed6d08398686e90568735860251083949d84e4e/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1942525/493b103fe236994f6bd102fa072d08bd1a2e5f4a/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1814674/522ed289227f8ca49d5b5d2d75ab25980e8e24b7/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1903280/fbbb08f81e8431c873a84474187acbd073ef4018/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1937429/114f8ad7d2fefa7b1548a3d84a3909986ebb4e65/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1917492/11f230eb792116e595ab03312b67142ea47d20e0/resource.tar.gz{, .log} |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |69.3%| [DL] $(B)/canondata_storage/1936947/2bc3e51a8b9883f1a1d8b98124fe921cba1fca45/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1599023/c48eea35b704ce8968912971b5424c6d295839a1/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1871182/7949078f204bbdbafbcf0efaa8e1e8cbc661f9ab/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1936273/0d6f42ca322b97862bb9eb8744733aceed4802ba/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1946324/da6d46e38db4b05c0745cd0fc3b082c37b3cdbab/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1775319/38406f106ad293bc18eb9f25f8a585b0fc6a7034/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1924537/3ef7ee54911365a79534947d32d1e7c271e9edf3/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1937367/26ffadbe955b9e88125bb0a27831ce1640a50e2a/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1931696/8efbe84ad728243c3e1c1cdb30d3b3f31d345567/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1871102/be2c02bcbeb4e2d0d7243f891f9cd59b613c9586/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1871102/a14717a3c8c558cf8fefe6d46cd5b04ed47ccc80/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1923547/b78c71ff76583fef098d28babd2f10e4e7cbccc9/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1936273/675c4946b21871cc600565c50ebb14d6b242301d/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1920236/e3a40a0f1b28089f5bcc00a85b3176919dc509ac/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1942173/244e633354167f84bd1643b15c8646303e7785de/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1809005/108bbf4b6cd9ab5b73dedcf18b5aa453d5b82a70/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1942525/425976ea7290bcb7451d335640e8599ffb55fa7a/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1889210/9885815b3706cdb615855cdf9706b1ee426b3f80/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1889210/25929c9307ecf4e0bdf1647e711c682147acf305/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1871002/7fafa598e7f20625a7c57887ea10ebeee83ea3a5/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1923547/4f4b5d3dd0bd075220c6a8df315e3056e8ca2ac8/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1775059/b19c8660bf25c5ff6689bee92a8ca8837c638c17/resource.tar.gz{, .log} |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |69.4%| [DL] $(B)/canondata_storage/1775059/f321b7af9d96556e34658539453b2887f1c38930/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1942671/3559361cb2a96fdebafda74938d6d6c5f595f8a5/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1599023/5ebe01f73e8e346a61b42aadb75da1a518ed1660/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1936842/0add181b74256050def99faca33d757d5b5d9213/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1900335/c9d0ca605faf2698cf00f33d61c4609eb54f9408/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1871002/cee2cad2ea8ef95806a2aa2a8821680484bbe3b2/resource.tar.gz{, .log} |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |69.4%| [DL] $(B)/canondata_storage/1031349/9307f22e787d6672ca7ce676234156a5d522b352/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1599023/53262e114e5fb21cb58c259e812c31e2f63afae0/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1903280/7cc9f1de74341758d8f5a97318a2f3f942a54b15/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1936947/77a3a4e86d91d5fe22d4a25bee6ca3f56b15653e/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/937458/ea9bb2a5f9f6868f4e251937f810b7466fb20b69/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/995452/716ad95d1f07400d4492e3dea5246cf9ad9ac9e5/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1937424/0a7fa81182305af7b414a8e11e361266a61bc724/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1936947/263a4aa7d0f3b612765b8f33f9c77526e5f5aa78/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1597364/81324d44a583c1e929d9afce72a977f6bafb04dd/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1775059/3015fdb690d45c556ed1066a415637cc49d6ec88/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1889210/10a2f6c7c73e83596767c28aa17294d6794df9f2/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1924537/b026db360d04d5258e7809234e0e43766eca50c6/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1942671/c9c83131b391b0a13b103155f61dd4f9a78f6ce6/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1881367/ee39bf74f87a4d157fac936390f8e3e30882b7ef/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1781765/a90279cc9a67c5059fc23ab2db51011dafb37555/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1871182/4f7621e3e8578e759c3947c8f07cc08181387ffd/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1942173/faa0388e8ff65e27dc14e716b65cbd83441fd698/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1900335/be004cdbc67866ce1cb15f7c85503d8962b948a4/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1599023/fd394c006ab90839bd43e8a0999dbcda754af8eb/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1937429/73a35abfefcb4c30cf44393e3335cf2af34209e7/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/937458/b4627e6d6be4f5c698896c8236ab5f6f65070d11/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1936947/79f6f05a619e566dcfd3200df680cadf79a1ceda/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1814674/6222a4327ec3a132645a3145eb274ab71016ac00/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1817427/fe4f0f55fa639e868048e3677847ba676115c30c/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1942173/eee032b3354d69a6e319d8f41f249f7e53178373/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1937367/24a3ed09a524cab36402a50f39546eeec677142d/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1880306/b2c00ff823e390f0263acf2dbb68c876e0b31abd/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1781765/eaf8b4f54dbd9300a96708f39f699380d90b82a9/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1130705/757d10cb32f3f15562b523da2252a50eeaba7592/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1931696/7e23ac78acb0bf6a43a98f8d07393450402130e6/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1936273/c3891df321db69a340c035f6ee3e3b82d4bdbf8e/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1925821/cfff423c8d7238e8abebca9535bd33e932257ec1/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1871182/a433ef127601aa9f74dedeb4efed9b9f728cdc6c/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1942671/114b21a6d17bc5be63ccf80717cafe8c74702dff/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1777230/e4dcbc908eebc2925492abf82160d5dc404358d6/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1871182/a5143e37b158b01c1abee6d3aa96332d56e08679/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937027/d6800001b04dd48ddf438b36e325ad1cf97cb1c0/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1936273/16a5945b6fd0dd622c0f5a29d4b7818e35b19526/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1925821/6aa17395400a3fafbb1eacfdd2dd26ec26994660/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1936947/a4dfb10814524145ff4772935d09e5a668f36c7c/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1917492/070995acd2c8a2466496cea9a294777a34e981c8/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1931696/9060622cf11e385e8664f10b6994b4d7cd39f3ee/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1936842/baf80494bd9561ab5e7825bd062823ca1bfa64ba/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942100/0c8d8b78baeb89dd0643b1eaf6b779508990e6bb/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1920236/b75e9728ed12152b2d9ddc60dd94c08dfc4796e2/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1597364/e093effc5b67d50506b993781809e66ea91b4bca/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942525/74bf94126143e9723be7f582af97f7e4178aca36/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937027/06ef7ef6dee3ee697013fd133a8e8a843e5f5de9/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1903280/afac309a2db105c8b5b6044c02f5c6755972bae8/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1809005/2c57f3023075d58f8075081492e1ebcaa65b94b3/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1936842/aecf4970df1ec06496312636476de0e7b19c3ebc/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1773845/df65899dab8a6000128816aea623c99e5f1dd537/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1600758/68f814e1b8c48a7c8c799660d583bea56db0cae3/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1903885/ca691fdd45e5fd4ff7f0de337a847a572abfc30b/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1900335/4b60bb5e71999895e5687b055a2f48946e4a072b/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1903280/24119f69ce7a44754a5937aa5bfe43a55ebc0544/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1916746/2ce040fc217c9c67c9501b51bd145ed612d2e4f7/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/212715/2e75ebcaab6ea23aae1ed39a602e4ad780c354ab/resource.tar.gz{, .log} |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |69.6%| [DL] $(B)/canondata_storage/1946324/be96ad9cdb7bebb78c68ecc4a7b291982b0e9f1e/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937027/fc46b3cc97880e1d193902512d82ecde372bc654/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1903885/1f5c633d9ef5c6b22274dcefd1b823de60aa2a36/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937027/dd79a308e7a46e9ee81ea6630da3b4a111a26336/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1903280/ee99ccd66f40b93152bfac693040f9212fb7c86f/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1923547/c233f36376ececec4d4b8799ba0c97c5a0a0b109/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1942173/61b2e01110faecf0f5c4f213e07ca9a795ac4758/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1597364/50672d9edb4f5c65065b9ef1e197812ffea3d4ab/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1942671/caec24c7829bb07b3e5d07ae4de6f86179394486/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1871002/e16c2456d2585dc5260dae8153efad865802ebff/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1130705/6ffd9ee62f7f1ead96b9e0706567eed65aef89a0/resource.tar.gz{, .log} |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |69.7%| [DL] $(B)/canondata_storage/1900335/e1d65d264295642feda5f8ea8dd5531cef75bc40/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1942173/a466b507289bc354fe44bf0c86d9f81646344db8/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1917492/53254226c3bcc22f2f64563cade92bbf7d103511/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1937424/8993c081ff6be9c32469da328d475149e2821dd0/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1031349/f5895ed6e69da88b0fc4924a0a0c6ab4bb0b1724/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1781765/f3b4483a271a53c7042af53dd89e7eaa7933954c/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1881367/550f3b79ddf2520d4c20e67e83a71edceeb0c664/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1871002/5a5d63ea223b3a8a0646cdb551c6e739db3ec1b5/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1937027/0d4e7463a2cca915bc28cdfa63111f875afc615b/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1942100/4ec2a1b49d221a247c90e1d642077630614a2f1c/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1936842/f15786b1a2c120b062d233a3acf6481caeba1cc1/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1880306/e4ccac619cc79d4b07e7e803e386d47da238c793/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1600758/e20b72387b5160905fc6ab9c9b1db35181c0aab8/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1689644/dfcc9726c89ffc827ee88d69c2a34d897f6c9a22/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1937001/6a56dcab007ee7dae62350ff55c93dfb66c55be9/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1889210/a01034cad0321e484aa98a1919f9803e0731c5c8/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1871182/5c4b689d6652e455bd5534102389cc50193dc744/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1942100/d8dcb117190dd26bf18428ea90360b8472802f4a/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1942278/5eae7303204cecb578258cdbb0730e6f301fe9ed/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1903885/86384881f884d02499a1a1c2d428dbffe5562509/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1775059/f8f056a0190a716df840d5350581b5176f1620e0/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/995452/5cca323a1119285bbfb44f019f5dc9be6361e6b3/resource.tar.gz{, .log} |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/logging.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/finalizer_actor.cpp |69.8%| [DL] $(B)/canondata_storage/937458/f96a7bf5332a169a8ce992c48c7edc92ee1f8d6f/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1775059/e6328418d209e6f2afe65be714175e5a3ade006c/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1942671/6d2caf417069cf56304b9a87bee6e46557c6040d/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1942100/d9edbf8667aac002ce0c8844e68538839402ad3d/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1871182/8741170d9243172a408ff5d126ef5ae65b3c3de0/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1031349/c3543d8f1cc01d601eeee6bc8f381c30a592c6ad/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1781765/4a8585d94943c333a58ed548ecb65ad34b52755f/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1931696/8382830b676a61af36d1344910d51cd1bf39f3ef/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1775059/4b281f6de1ebcb83a84d5b91cdbf1d4228a88f67/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1775059/8f09ac6b5b3163a6b48a01cc498b9df38d463d0a/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/937458/cb7ada421497d2e974c2fde615e498ee3c1fe8cf/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1881367/ad25b910d63584c57089eae59d027766b4eaa76c/resource.tar.gz{, .log} |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/logging.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/finalizer_actor.cpp |69.8%| [DL] $(B)/canondata_storage/1925842/3826c71d0a906529f5506f86d9d3c6a16d5aef14/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1600758/3e1c972b67010ef976f8710e1ee2f4efbc022be3/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1784117/562608e5eb2c9a9b9076bc8caa84f8c27bb8d804/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1942671/174666f6b0943a6e3b50e2853a026a9e5306df50/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1937001/441781d594b64769bedacb579efb911f22209130/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1937027/911c4fb8122dc5d7733b150e068edcf272fa4a83/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1917492/711d3fac29661e54e8d79bda0ff96166d8ae283f/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1599023/cef1288a3a3f22f12b4d2a0b8bd80a0e7250701b/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1942525/81dfcdf279dd45758cc7cc418c3fe9b12f2e6066/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1925842/e57e96bc0849393d23e4c2749e610a2eea073ec0/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1923547/78a9f0a15afb28041ed1c9bf17e22144af9d87d0/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1942525/b9299e5debda55b6ec2d51671d37c03bdf672e59/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1937150/4e9b56e1d1bc0e96e8da0e9d08a0b6ac9492ef3a/resource.tar.gz{, .log} |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |69.9%| [DL] $(B)/canondata_storage/1942525/43a9b27bd71c75014ae789d65d577314e37262fc/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1900335/0fda279e8dbc42dfa916afebb80172e4d6c2bfb9/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1900335/7d71d797a341c73a27d37b4ad44eff7a6300965d/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1031349/f83fa171747712e0fbf290ce2fe9f17cc2679c05/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1937027/a3de41ffd24fbd15ac4a4f974e41beecda0f1147/resource.tar.gz{, .log} |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |69.9%| [DL] $(B)/canondata_storage/1889210/e83d0680db32f18ae0fc05b1d22a19d24deaf2e2/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1937027/b34cd762829e7bec8c91f56a9ebd21a8720c2fb7/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1937001/a40201d9703e1f0a566dafa26fbe3ce28327ccbb/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1936842/118804db24c1cfa3c8dcaa7cee1354cbe5b3d933/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1871102/76ef40ba4c47f7efe65d7bad7234c744fd5830db/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1775319/e518084f9a6f7560212f360435984047c6cdf17e/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1903280/8668619c47aeb76bd072ccb1766ddd8397f57a04/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1773845/67e34a6f8d63716e511a557d7164ba4684e7c32c/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1809005/4f269cc7890e7d43a65cb3699c5c4dfb301b1577/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1031349/8221fc254d5fbec8ed6f97695721bdf1adc20225/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1937150/af1149e4ecbbaf59deead854c81e1ca2a679d76d/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1903885/68bd9a70978575acf2efa3516be7bb1d450b0d4f/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1599023/bc279b2dfef04f4a94e7c85a598af9d954bef4ff/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1931696/12a17fd03ea37900d110696f266c04ad62432625/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1936997/ff8901ef8c0bb5b2132f64a3a6c568591cfc3cc7/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1889210/f053f10d689490bf5100a7fbf8cc00cf1b09e227/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/212715/1d5e3cd59753ff0c77fb4968cc3520790b529523/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1946324/5a0f5c697ea00923466b0cb0991a1c2a5af1384a/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1937424/15437eeaafd0fe50e7d85ae31a223a08a54e09a5/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1600758/85ce7147e6a553c51a5d28db0989bd29a0aa0a8b/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1936997/6a7178ff3312fb6732ca319b267bb1a987c57c73/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1937027/de4be6db5a9d9653a2d7cf00ba5ccfe48c1b3a99/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1942415/eb3b960b9379ed168e3265e38f52b5ecd2264129/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1903885/76dd143f5f10ca68ad8503d2a14f2098d64e2e72/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1031349/4e362e41dd365ac933a1de3f249df5eea8bb185d/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/212715/a907bc0539ff9e52b20ab4615eab35bda7220afb/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1871182/4d40c08aee85fb231923402132a00b2ae47137cc/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1925842/610b04b045139518e5a3bec8a997851c724acae2/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1903280/1a83c50872a07e15c5461f201b027b8b1cf142a1/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1937429/44565291a008d35ab2663966004d6717f2618b42/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1817427/c7633434a5ba4df67f128f604a8708310158a1a6/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1917492/ec43eca86102041177f140bc47a05783a6966105/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1937027/2dab2bc49e185bfd04165d91b8a45f43e85735e2/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1689644/bb7a5ebb839768b3371fdb6466d95c49c7caa5bc/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1937492/ac17fc910522968a2c86c54ef70ff70f08d08871/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1903280/76ac83783dd253263cbbfa647528ead00c7b0238/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1942278/0bea467ec952d32bb910b3d7bd336a8d0e758469/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1946324/6828126e82ac7c9623f8b25fa82fef255c53fba5/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1925842/80a317691e3de1dccaaf65a2e0ac2eda115fe088/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1923547/8ad70f7c12e1ac27e62098253e8dcdce5a61fe2e/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1031349/c13f065489973f70d7c46a11f6ca4ae035ad584f/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1936997/4c96904082a08349976603bc8711f3c66e870d86/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1773845/8dea106443923a91389548a0f46eaff49228c517/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1784826/05ab9e9d8749e041f30f4fb272518bbf7bc091b0/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1936997/0369012b4079b3fe371b0e69a32dd2ddf31664b0/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1946324/c4e3e08799ff2867f35fb0960060a07338ecc49d/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1925821/e6400c81a69303c23d02b835c07822136f1644aa/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1937492/b1c27c23ed7b20add05d293f02d7d6eb09176974/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1937001/1cbaa0990b057cd081f509e8a6410c993209697e/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/937458/c4452645e3437dc640f4297668664a507105b886/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1031349/201452dd8c883b2adcbf46cb075c912d25efe67e/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1936273/d81f255ee5d0712542ca44dabb01842e1a996c48/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1899731/f6b32a6820fc036afae21f367915e7bc82284241/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/937458/ca874ae4a90e1527826d17c1da5f3d3dad325887/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1777230/2492c292c3dcbe5dcfc10230e1b6f79478cb2008/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1903885/85783ac2a357850f457cbdc94d8685f602517f63/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1781765/cf4791e13b24747d9e6fb3bfc11e0fdb45a964c9/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1600758/46c823d9545fa3ae54937e43128b4c7eb42457e8/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1923547/c7c2b8305045ef487bf309f434bfa96167619151/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1936947/5b90602802f2aab592030a044b196ce79f713168/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1871002/f97d407256b2c37a8aa0d391cd742e47a1681d82/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1775059/a930e411af43ce1b309d1de7c970eb2ac2eef1e1/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1925842/aef0e0012573a9964b38282d14d79db58aac0dc6/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1925842/3305983375fd65f2565c804b57aadeec6345c6c4/resource.tar.gz{, .log} |70.1%| [DL] $(B)/canondata_storage/1942671/612b8e5cf42e27086abb71abea474d6f2f73b914/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1924537/24871a42dc8e7365f6316f556fa6616a3c687a13/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/937458/451cb5773bbc54fc1287d09034d5251907c23f31/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1031349/ac3fdb59ac5555bb04f369156daa910ae69f4f8f/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1937027/751768eb2f05a82d5c0ae53923fca610307cec52/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1814674/2001d4056ebb44c911c0d3db631fa11dcd77416c/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1773845/7025c8692c966d9205ab1a92960e3c48ba756e3e/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1937027/486e59249c373752409ccb9df757ae063d64d546/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1937367/c99cfb4b780550a1a456fcf97cd04e4ab58600f3/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1937367/a874490aa85e801e6e6c639be280467bb36af7f1/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1880306/3ec645abf253c80395cd29e55e1be69e2faef49e/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1936273/8ea0dd0bb7dcfcb9060145c85aba7872eea15de5/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1130705/278b00e80d012b1440a24faff121f276542a077b/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1936997/e7ff46e7163e77dd9b23cd9ab89fe7871775680d/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1925842/b328515b1ab9021b09eade67e865074624010fca/resource.tar.gz{, .log} |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/resources_cleaner_actor.cpp |70.2%| [DL] $(B)/canondata_storage/1784826/d2a1b732d518bb4ef49d545b2ddf9c004a5d5c75/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1031349/6aa08de3e733fc9c427c11399a2b1f3449285551/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1937492/7826fe0d6b2cfb712d11a7f0758863664d172cb9/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1931696/76a7bb8a2aaec831535cf7ca5b0ffb62ccdb717e/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/937458/e5719cd256fe3fd898e8ebe6df280521ffd29040/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1946324/933392091e1245b98f15c78824d0d3d0a7b628f4/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1847551/bd3dbf8cccccd7565b8c57bb32bc464524c7d3d9/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1130705/1ce6c7c35a3d1f6575dcd3f9fb981d727082535c/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1784826/bfd74085acfc22cb51377d828fa050c7eec78e01/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1942525/7eaf18e64bfa1d6edd49db8f4631910a91c4c75d/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1031349/5cd6d3a72668cbea9853b86e5da2f0f315f35bc5/resource.tar.gz{, .log} |70.2%| [DL] $(B)/canondata_storage/1925821/779282f32823aab27221fa8bf110b57ff2bbac48/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1889210/052ee36577b14824b146640268a61fb1664c0f7d/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1936997/a36e4ac0da388a8e1ac773455c73c5a459846a00/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1937424/1f3cd125c2d8eafb2ebb1dbc7c974f4f15ef1793/resource.tar.gz{, .log} |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/resources_cleaner_actor.cpp |70.3%| [DL] $(B)/canondata_storage/1889210/9134d9e30423bbc1dffa9f6443fbc36d9fb3203d/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/212715/d60846525976f02b9239763e1005de6db9c8ab32/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/212715/819f960bc7971689bf0f1a064927d4dbbb8d14ae/resource.tar.gz{, .log} |70.3%| [CC] {tool} $(B)/ydb/core/protos/msgbus.pb.cc |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_delete.cpp |70.3%| [DL] $(B)/canondata_storage/1942525/4286c760bf658fe8e2079ca57866e3a8a4db409d/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1937001/4dd27f939fe3c71df6f01e05fa91d84a683d38c8/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1847551/07a1ee212fb783eaad6ea733a57fa549a1b1dc94/resource.tar.gz{, .log} |70.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.pb.cc |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_delete.cpp |70.3%| [DL] $(B)/canondata_storage/1925842/8a20a06ffe6b76e06a0c1b84ed9c57f5101194a7/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1775059/b57c9040709a7b012953cf170d04a292adc8d3d3/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1814674/77d41c903ddd926a62e64221c321109c23757fa8/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1937027/0366696fd0201dfaa1aecd3d2cb555d7273bb4cf/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1942173/8cb2c995567808fa2edb42fbcac76f18f5beb954/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1942525/b372d3c86a68f2b33a18c3b61b0b7b8f739a0353/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1942100/feb30512dfd66a00939625497bf9f58185f577d7/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1775059/3cb7d014d70b84dbcb84645fa987dd9d47d7fd6c/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1936947/7a33d500072033ae868c5c1e2bf951de3ed26f10/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1942100/3595abeb97126d66b5812bb7d87545f013abf4c7/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1937001/a770b7e950bbaeaf08ef4bbb336b7e3683a914ce/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1936947/1186d579cc7e3197ae0471c3293b328d56f77001/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1031349/d8eb143d414178bd8981d657c74e48bf742cd256/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1942415/ecf45b8d311b13ba55e2de94295cabed9b642863/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1880306/93d030294156d877ed8ba9d30b77c11acb35db50/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1777230/76c87f5149b4cc512c6f2bf26a9c9176f0baa7c3/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/995452/c405e325028ee1281de45ef9165ede08f8eb7da2/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1923547/c9f8df039ef6cb4cc282d85e6cf6b18304fcc59a/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1809005/267ad7c5746fbf2df0910127dee92fe0f2b9e67b/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1903885/c99336662dd85cc4dbf2e30aa3726a822664376a/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1937150/c7bb7eb0808c7675c7bc402cc66327cbbcc95893/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1920236/5181d12d98fbd8d9942aee7bad394d888116e5c2/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1917492/3e26f0a479a954710fee0605dc1c7add0903656f/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1937429/f07b84667ac042b441b980139436c1568397aded/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1936273/0d86ad2b4c27fcc90610fc18283a8b444dba82f9/resource.tar.gz{, .log} |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/test_connection/test_monitoring.cpp |70.4%| [DL] $(B)/canondata_storage/1937429/0252e3c99211f482c7e07e8f941c9d3bc7c92fd0/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/937458/cc91b4ff58ad72a38b85199e20d2e07e805a9a2b/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1031349/076467d819158b21cec57980925415e6cf3dc8e6/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1936842/e15468da5c6a430935df259a2106604daa68ad66/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1937424/ccd563fc2effadff9042f6221e0daf47ca9cf4b8/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1923547/e2d4955e81c4f7962197732fb8ffe5f144012239/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1880306/c2ca13a1907a4ca0d7268cb9eec4bdd9f56fcd83/resource.tar.gz{, .log} |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |70.4%| [DL] $(B)/canondata_storage/1937150/8af84cdae63c27872de09da76cddd708de02e35b/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1871102/69be1cf486f07d50c602df988fd0308f3c43bd08/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1937424/a5ab1e58b34f389c8ed7c2351f8aa1b0172a465c/resource.tar.gz{, .log} |70.4%| [DL] $(B)/canondata_storage/1775059/7880e12b17712e34b83d0a19290f284c039a3892/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1925821/749a27e1f0785a298c980a8c65ff97e6fae386df/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1871182/3b9e2e4f5dbfff6f44c6e90910cd2ae8ae44d6a8/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1871002/7df99048c8549742c36a1795f330427e8ff4cd1c/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/995452/d2b404e58f5d9cea6dc9ba9a54cf102589dc0901/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1942525/a26edc49ac3c9a8155d4006bedb7f8cc9675ee3d/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1809005/eaed2cb21c41d953b2deee98fb8e34f4399b8dbc/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/995452/7ec622aec5d016fd39433889930514dc0a6b650d/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1889210/1f0152f3c0f7d63c8452e9855872f2e930cd4c7e/resource.tar.gz{, .log} |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |70.5%| [DL] $(B)/canondata_storage/1847551/b5271b855258a3785a7443c84e7858d6b86eb228/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1889210/c1b0707098b27716037274ba14a9fc58ec6d54ce/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1917492/b0f4eb54959c42e0495a20239215b517297d9d01/resource.tar.gz{, .log} |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |70.5%| [DL] $(B)/canondata_storage/1931696/8b5249e741c0ac6b5882b8f37eb2fa8ea628e403/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1773845/c2740b835a2bd143c8d121693abd92ce773d1c6b/resource.tar.gz{, .log} |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/test_connection/test_monitoring.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/common/pinger.cpp |70.5%| [DL] $(B)/canondata_storage/1775059/e9552b9a4d6e86a0dce623a0dcdac76be34f22bb/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1942278/103eb8901f0e15b8dd5b192c5876a19cf4d707c6/resource.tar.gz{, .log} |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |70.5%| [DL] $(B)/canondata_storage/1775319/ca8674d999cbbb16d2b8ee3ea7569f01aeebffef/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1937429/8c415fc988c547984fa23f72063d4859ddd65412/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1942173/65cfefa7d3092976dd84664ea3bae8eced26e317/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1871002/72263a808838fefc0abe1dc2ac9fac8909e4a5a1/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1871102/cd71a11115697d28bc32a43c2288da1af731328c/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1917492/b65198c694e7e35ad3c848d38e764bf62e05535a/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1936947/a99026e839b7e22714c2a9a81971a3b5e3ed1eb4/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1817427/ec2c0e753826fc58a07f9a969c60590ba2b2f7d8/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1942278/e4e1a8efcf36e5a4266b5e39e982a61c7f5ad4d1/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1871182/e1454565fb4deac664d23aea283a15fc31e080a6/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1809005/9c9521692eb4e7097120f2dbbf0ff153301478e9/resource.tar.gz{, .log} |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |70.6%| [DL] $(B)/canondata_storage/1899731/f13813265d02f2bab07ec1fa74995b07aef66427/resource.tar.gz{, .log} |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |70.6%| [DL] $(B)/canondata_storage/1600758/d550f6a68107f128a0baaebaff84ef0c6691b095/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1889210/02c3d838178ec7378a674d9517d94cf16e6f7cb4/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1903885/36b7eb9d918e0ee90b18e7dfac3ec36336c26b5e/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1925821/236e8d3234ab439980869279c30846a1eb87487f/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1923547/6e472e1d38aac703a38a4ec87e075fcfb18210c0/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1936947/bdcead4c91b8f1faeec3f1fbf12ee03ca7392e44/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1809005/d592b29a4027fce87035ae23c7d20e060c12e900/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1937424/4bf3629a378a97c2134d5c9ef82b431269f7812a/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1937150/c7ef5e7ab0593d2cfcd01b9f6de38d47362d86ae/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1031349/506ae7e8d4f20418c9124d112729390d56f60276/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1936842/45561f6cfd09b2c9c24d1d0eb74eb99fd3c0f61b/resource.tar.gz{, .log} |70.6%| [DL] $(B)/canondata_storage/1942525/a6ac4439d79862fb911d22d6b67d06afe7ccdcb2/resource.tar.gz{, .log} |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/common/pinger.cpp |70.7%| [DL] $(B)/canondata_storage/1942278/a5b73649957467a15d6799bcb6222e175bde02e1/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1937429/6f0b11a050cc6c6080ee22988ff3362313e9fdca/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1781765/51d24e14e139f66d2e27548a413616e7d1e3f90d/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1784117/be18c27ada732fb4f62d659dc78acd8896fe727c/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1871102/b46931b83ca87df6a7e16b1851216c6a79ea251a/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1130705/3deff34d248db1fb5a54ca6f66a2bd921ca5f5fe/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1809005/4aecbb89e2aab10b1cb5ae10988314918fa89ed3/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1899731/d359e310c721425bf92779c6cc495a90085858b1/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1937429/59daf3f7c51f8657273dfe73b2d89a936c71e04c/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1937001/003af8bfc6ea23cbffd44d7103d71819e726e57c/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1689644/577f2591bb739414dd9c9b23b696925de3ea61fa/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1942671/e2fe28e6febee3982bd56745d31a403e581f247f/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1936947/4e75efdf8bb6c4502b7bcfedc52bbdf182bdb39c/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1925821/46adefeb17892d8d0db52b6bd1c5cbf83fc892ff/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1937001/601e94a23ec26980c16840b1ec99d6084037513f/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1936947/e11cf6314c9fd176eeb82eb6187eef2b36985a2d/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1937367/6753adf109c979219bfffa5389a252ae034aa308/resource.tar.gz{, .log} |70.7%| [CC] {tool} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |70.7%| [DL] $(B)/canondata_storage/1937367/1710911e4cee83432c347ca77fc35e2630f78589/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1942415/5f0ec6d9c04156bf00348913fc51614b979e220d/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1942525/1159b122a0dc77fe26cec831747a249913a7783d/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1916746/5a3af070be0e3803da460e9b8077af974f329983/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1899731/35c5b505df7f2be9fcca6f830802312313cb4fc5/resource.tar.gz{, .log} |70.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |70.7%| [DL] $(B)/canondata_storage/1924537/481c71becc4b20198444748993508fe1ded84514/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1903885/816633aaba150966f45785296717d160cf702a05/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1889210/3d889b385570041dbbd2165e00510547b2c1144d/resource.tar.gz{, .log} |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |70.7%| [DL] $(B)/canondata_storage/1871002/1225bd6f3bfe747dac447600fe2c2b276a24bd59/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/937458/818e067fe83fe9b2daba4296b6b1e552d869fd55/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/937458/3d1a9b67a8f957c10751d36ba94e9a0235239a0d/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1937429/6057f80f29e4cbcc11dacefe2338aa3f30c80dc8/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1784826/000103add1e1f60c471b7c43f6b4a44a1a2734bf/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1942173/5b2cc71cf1cf6cdf932029ae85f8889f18d81d77/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1936842/c62861d65748dca3fd75e9393720a48de8395467/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1942173/5421fc36d7b7a0370a706e889dcf62e4f6abf424/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1784117/1ee1d854fe43f6981a4bf1da95e36e8be387e233/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1889210/cdea4d984d293e4c4894b43fbddd80f6768144c4/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1847551/37b6d30f0db871c667c3895752450e72de2125d9/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1903280/d42e99dc4fef588809a37cacbb5855333c1c2edb/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1937429/d03442e328dca2de744539eee34693d8645faba4/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1881367/0038fdd5944649d910caa3afaa1f132a60fb35b8/resource.tar.gz{, .log} |70.8%| [CC] {tool} $(B)/ydb/core/protos/console.grpc.pb.cc |70.8%| [DL] $(B)/canondata_storage/1597364/1370803313e5d0237dc90749ce72827c7dd82536/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1942525/bdd139810f50778f152db9396ade27a31f707314/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1936273/1ba42e2c47cd3429011228159c1fdf43dd1881b7/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1903280/45bea04670ac04ec37c2deab5ff4a786ae244430/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1031349/1e1ff3377d9e6463687741aa3509395b92a00445/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1942173/a88e613f98b9308632d7651072259231cab1e791/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1942525/968864b3be3864b00b0f0fb54ee97438202376c6/resource.tar.gz{, .log} |70.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.grpc.pb.cc |70.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |70.9%| [DL] $(B)/canondata_storage/1937429/82c91013a516db34237d53cdad4ae5a77a3c568b/resource.tar.gz{, .log} |70.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |70.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |70.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |70.8%| [DL] $(B)/canondata_storage/1773845/e19fed515bf1f2a7f0b738a3e17a516cd55cdf4a/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1936997/fbfc46046cfa3a913150834618e28cd82c05d5b0/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1784826/66abdb7f6ea46c5c7564f94f24f2656a0e2aa349/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1784826/14d74cf07b9bc3ef5bc3a0c5040b886c7cc0007e/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1784826/61f7d79c6b081f267865b1f3c0c8b51fcae1ebaa/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1871102/5b57bbc366ed87ccb54f2fce62c4a3214ac10518/resource.tar.gz{, .log} |70.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |70.9%| [DL] $(B)/canondata_storage/1784826/3ecd87c8ccd1a18f9d4f216f75472f1a834938f2/resource.tar.gz{, .log} |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers.cpp |70.9%| [DL] $(B)/canondata_storage/1777230/166430366a3ee35292b0a0e4ec6ba38e166ec8c5/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1942100/090fa9e99dfe7f43e6470439372ea4a84a495992/resource.tar.gz{, .log} |70.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |70.9%| [DL] $(B)/canondata_storage/1689644/57f5e520abfb96651cc218a0d82eb6ee0fe38907/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1899731/355eddae33a3318d608f8973d7978cafbe97d4cd/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1936947/343756f2ef88ab4d13ae0d8e1780223f838da842/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1942278/f85b5ff273f15c86ee649e6dcb392b4194b897cc/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1784117/392a16b8c9d13c4b4284c3048ca355142cf1aa9e/resource.tar.gz{, .log} |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers.cpp |70.9%| [DL] $(B)/canondata_storage/1903280/55ee056094134146d6b228e0e2827a4a0b1bae59/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1871182/49dbac5c2482cb10e39b8801a9850d0528746393/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1880306/975391d46ff9d241fae3efa496fefe1b49dc5396/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1781765/fd9ac83e51987bc944359ff67a2d8ec5051d37a8/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1599023/6add8cb499cc3b1dca20f22c9b17ae29fbfe727d/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1600758/6a579ce429b85915b6ff135574bc65e433d5b02a/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1599023/9fb10775fd57dc9adafaafe2a658f6533a20dc46/resource.tar.gz{, .log} |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/stopper_actor.cpp |70.9%| [DL] $(B)/canondata_storage/1889210/2fbf7f68942208b15ab6eb23b14b78640f078541/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1936842/9a35ead5b9e859efd416e87982485153dd2ce155/resource.tar.gz{, .log} |70.9%| [DL] $(B)/canondata_storage/1936947/2bc1f88e24977d85753d38b3cac45a372d34ec2f/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1899731/bf1552ea5a722ea8bc2de463418359c419c09386/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1031349/8ad72b2dd458e088eb333e5553dfd99dbae9e9d7/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1923547/408f17dd7de3f1f4f32904831b08b3c57e38a7c7/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1931696/0e52d5b9778b2943992171dc32150f40daf8bfa5/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1031349/f562047a0458cc3f13d0bd9bc809240f0048d755/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1809005/df0d5940a3b3a38ba468a035aba7ce54440f0891/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1847551/8a02f6b80ca1ec66d793b87dd2cd04bc727861e5/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1809005/f38e3f8d804c3c736510cb1eca690761b713a4b0/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1817427/0c40572784ba0c378f9763d962c3c5e8b7787ec6/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1784826/0c338d21c57ec0e55d25f0c5a7d66d262578559c/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1942100/7cc0999fbc2528b08c47f3289c99f1f628ae5fd4/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1937150/89c1317ed6832f17c721211f8be3858ffaac0d95/resource.tar.gz{, .log} |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/stopper_actor.cpp |71.0%| [DL] $(B)/canondata_storage/937458/c396c80e8362440af36c99b3904127e9b520ebb6/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1130705/2c54111b512b354592ae02009bd206b6b4bc7a92/resource.tar.gz{, .log} |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |71.0%| [DL] $(B)/canondata_storage/1814674/8156a7ce6ad6eceb82586ac4874de57d87023039/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1031349/a8c086bf83c7b097d941bd5f51b9690bf204f31f/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1871002/01b60ff3bfc2c8aa5cff8ebbe693bdbbfe6a1c1c/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1942415/bc58f4bafc7f7c25e28d8dc76fd80da6616f0b89/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1871182/90cd88ea2a475c617af2c5f379760c2ad7b2e034/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1942278/40ea988eaa18293a322e85a441a68b521e416660/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1942525/7afd327b2333310058f64e7a69fd4186c099b4fa/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1871002/09c7103fe942f664e52ea9943175fcce1c927b80/resource.tar.gz{, .log} |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |71.0%| [DL] $(B)/canondata_storage/1917492/f6af24e9333bc438fcad14a4e8bac6e6b0e07d87/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1937027/555a559b5825a201986d8c31f3e51fe1196d9726/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1937492/3cc712c3196f7027398ddcbbf6597f57ccd7dfeb/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1889210/46413869b9a6422f358888eff087d092b7ec0356/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1923547/6bb261b87a2d0ef492e8f1a5fd897369c7118506/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1931696/0a5f01ad7bf7c863b92eab0e8aff7f87ecb60e51/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1881367/3848e32ce807b5f10bb012e51d0ebe5ff6708554/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1817427/dd0485c09a30d742e18a0ec21502ddd405eb8be8/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1775319/57a50169b57016de03af26313596e9e552bfb0b3/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1773845/ab4dbe9a0023541b50ed970cdbec735405c6ed77/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1925842/bac0b87c8df73e5d30ab0b57349de6b672768da8/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1937027/17767c14f451959962681cf3d7fdbcfc98a6f63b/resource.tar.gz{, .log} |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/actors_factory.cpp |71.1%| [DL] $(B)/canondata_storage/1942100/1a3ac34e6a22249edacf19f24601f290b692c3cb/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1924537/404644a5cd7e050e8d183aa0c8a5c70a417e4c80/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1942671/5a994316452c786807f2de3ed136ba6ab54f260c/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1031349/11dbfe14d756754bc2571a76f87cd1781da1b241/resource.tar.gz{, .log} |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/actors_factory.cpp |71.1%| [DL] $(B)/canondata_storage/1130705/9004be6a07264a5bf29a5f00e7ff1eb4e47458ad/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1937150/33871135ec615a5b145f2dd1de63f51229e4902b/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1899731/2a0d010c88d3668b64d2eceea551d15c4ea643a5/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1942100/2af4e4b4fdba37b11e481ee71a1d7bf5f7f785c0/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1871002/99771a64b313e4ed0d87c6e59cde6bafe069ce35/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1899731/d439cec3e297e3dd031f751fd90f3935129a1094/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1871102/a3f4b73c2abee8d4bf99b0344946712ef43a9193/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1775319/23bb6663a6c40fafcef1fd59c500bcee7e579cdd/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1871102/61453f3b91b2a15fb0e2d2c6c61875e360070851/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1920236/bb5064df2cdae357296347e4d44f50d713e3ae40/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1880306/c56e2bb8307f2239f8acf535a621c40b5a08a363/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1847551/6ea0f0d238a8a57c98cf719da4e87036e3ffdde6/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1925821/76f31386bb8c24a8c6deb4852e43a6b3c032e597/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1942415/6669484e62bb116018d49d82b2e465bc96b01a14/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1784117/7efe22d363ef190706ea7b98ae766129bbe108ed/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1889210/6219b71e060e8fdf87b91b39a594295ea521db49/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1936947/8207550781992515886b573c884a057b16fa83e3/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1773845/ae502fad4bd26f52e20a1f56c06b19e01b6100ad/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1936842/7066d2b363541e6b64af6ab839e11485df1f472d/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1817427/34bb8688e3affd3b54214743d3582c8d1694106f/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1817427/cd1ccdb594fb5c0d7ccd4116b70e61c92e494ff1/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1899731/e198b7ef79a77ed789fe5760772f063e72267539/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1924537/907e79379e1e72f9d09545e57f65dee63f42dbfe/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1937492/50fb7b13e9ba844f85b4d3655e8e191f40eff050/resource.tar.gz{, .log} |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/initializer_actor.cpp |71.2%| [DL] $(B)/canondata_storage/1925842/9d9b2ad4701e00c2bd94e410ea0b0b6ddc58fd38/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/937458/301394d7110b98554eb68f02df41115069509de8/resource.tar.gz{, .log} |71.2%| [CC] {tool} $(B)/ydb/core/protos/console_config.grpc.pb.cc |71.2%| [DL] $(B)/canondata_storage/1880306/c540459cb4387b8d7c2ab1f6a0dd2118f05c9809/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1942173/c4d7dbc720e57397caf847cd2616b1362110ddd2/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1942100/d22a096de8f9cd6961c70a84e39de8dcd39ce45e/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1130705/c87efbee3d7d51d89c25e9454a95a9e836e53b27/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1881367/207835e4d274fcf7987814492f265b341fdcd02b/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1942278/f9f1f95ac19bf87a37947afe2c4d14ccf42248f1/resource.tar.gz{, .log} |71.2%| [DL] $(B)/canondata_storage/1931696/1afb7fe05c694a846953b38edeceb5eee38c90d9/resource.tar.gz{, .log} |71.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.grpc.pb.cc |71.3%| [DL] $(B)/canondata_storage/1784826/fff6c7690453c14e8a0ab17cc1dbdc617c2e4169/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1889210/2d9182e0bd6dcb9ba0a784a38b456240cb386874/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/995452/bdcc1c962e2cb216859f83be46f0797cbc65b816/resource.tar.gz{, .log} |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/initializer_actor.cpp |71.3%| [DL] $(B)/canondata_storage/1130705/ea4630684a8ca3f006e81d74f06282c0ca946402/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1784826/876422ee5a31dd410c0abb7a3417f21835990576/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1597364/a4a11f25f9a25c3aeb4b614333c373013ce0dbe0/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1600758/eca34ca0feab807128d7b13432dc1134d34cb65c/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1946324/859eeab6372d0950962668f1ee4087cc149a1e7c/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1777230/ad26fa2df1ca85e707984d82325c42cb09d4b3da/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1937001/263d267ee75572162f813e618cad51ab3a452ca0/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1937027/93c656fa9af655b08186ebb07d44445ef304fbc9/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1916746/d19e7aec784b7578ca293f03447ef038017b522b/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1903885/4a384ef3fd6e8cf628d678d9322eef7d381022a7/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1600758/945d8bd5a89c655f23736d13a6248011860cd506/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1597364/d207979f510aaee5943f848bff1aebe434b82dfa/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1942671/03c23723b22c7621c14256361f488cfb15ee75e3/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1773845/38a5e42f094acedb001785c46756100166f2d154/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1130705/875e1c4c3d9395dfda0c99832ed76d7452435a53/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1871102/7f5d56703d2b94fe32fa31a38a20ab8e2a4279f2/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1600758/7b03b4e937e7ce456df18ab698865618521526f1/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1031349/f83156e7d6ffdceda631624d3f808fe3c1b393a8/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1777230/783a2910c4d77e5aa8c5d6ad3e840bc965864783/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/1600758/7854653343bd5226d6b3f3f5fa085f0193656cc7/resource.tar.gz{, .log} |71.3%| [DL] $(B)/canondata_storage/212715/e9c0ac99e18ec4ae27142c9c2577dc676b157d44/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1903280/8bd8bc14ef8f83b16c090d98fff065b7f9ded199/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1130705/9726e618a91dba7201c15a2e45d93b6d8fd96178/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/212715/3f9199021f498ba2943fb7c0535d2ebc21914487/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1809005/1e02e07ca10c72b9e1c4f8d753caa384f345ca23/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1916746/d3717a9cac09b32a4d5ddfdae32677177e3620f2/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1936842/fcb47a0e0def68bc18f22f65f762c31aed818052/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1871002/74160783fcdc3a479258b5a4bea49b3bd257f296/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1925842/3b19aeedb10a29b1dbd9b746d8269c577ec91ea2/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1889210/12a5d5a4658cc8076f89deb914f7f1f0f6ad647f/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1817427/93e2a35960dc2868ae1d1a162c39c2808f9cf84d/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1937367/6af906d8e8515951055311e09244912c4095ac7f/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1923547/7035195682d1d389b130d309e647c0e6bae0996a/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1031349/cb7f0d6f26f3c006a7ca4ee3cb2fee451343f519/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1937429/e11799bc6b03b95f687825951895ae651115cd1d/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1871102/9387e6320639d466974a3bb05a9090c015e83fc2/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1871182/221936951ba4de2ea4362b03723f3995ef8d3fe7/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1937367/ead83488482c124a8c95469b3b45c4c638595905/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1777230/a97ee3227d27cacdc966530fa6d636c72275674d/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1130705/20757ef3a83dd8dafce4369b1fa87cb0a0ae2b28/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1942415/668fd150f695e80692dda505ef688dce77166cc6/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1937001/ceb29095b5e35ed4cc1eb104072815b0be7ac715/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1937001/4aaa35652a4aadbbe10797f226b61d7445c2e45e/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1936947/fbd52f5ef5dc03aa0434d075e0a1299ea39b26ed/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1871002/7e2607acdc67e4a5b8020e384ddfda98482a3dd6/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1889210/86d0f0a9f5fd231dca8140f5809c568e15366735/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1031349/e7008f224fdaa1b7d924871c4b168a87665db0a9/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1903885/79c2973401eb1c01d914beb88eb4f2fdf68caaee/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1937424/f54290c1c9e8b8c01bdab19c1d6ef1f76de15d9c/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1936842/ef250a2d1fa4278f450bd1ca39ae94b0e4ccec23/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1809005/8d9f81d158800caf319f48dbee6f4ac4868563ac/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1936947/bdfafed4d47cdedd18d3728ba1a3488ab05e7c41/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1942173/cdd1c55686f268ec709b7b06494ceedf8dba76de/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1777230/f087d19aefc64f43c561b1716c8824e128ac8093/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1942100/070d287587bd5d2ed4158069a020e4772af81216/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1900335/94df111ee1e3cf59de6d62f855e8e85690405b51/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1942525/253d2e760c7825837b0d53337823dd5ebf4d6d89/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1880306/4d5d293606e8de46c7ff73818ff99e0fc57be13f/resource.tar.gz{, .log} |71.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |71.5%| [DL] $(B)/canondata_storage/1784117/b7687d9809a69c860dddf82e25a7baf09d85ad87/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1900335/8db5941a4ed2bc94d6ae42d0eae7b6c741fa5a59/resource.tar.gz{, .log} |71.5%| [AR] {RESULT} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |71.5%| [DL] $(B)/canondata_storage/1903280/927f0adda78bba191400c48aa84923902c441ddf/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1781765/a18a0c7b9f1f9ff355c6f36700ff285ecfd77f4b/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1903885/d8ef1e8fb573d41016d2ce617a6eb2b955cadf60/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1871182/dd4b9c2ec136d95ef56e810f40fc95467b04ecae/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1936842/34312fe123c805aa28f5163a73c7f09252f8e0a4/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/1925842/ddbfdd82a4dd25bf18b0261649ab89dcfd67553f/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1942671/157db22ce38fb6cce530ef150bd605411e8ebf46/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1900335/a93c9a2bd84a19d5ed7b813ddf3960f383b3d67c/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1809005/2a59475dc877549ac4197a291aacd77d92f24ab4/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1809005/6cb24cd095b4f0501ec39c6352d23fb4a3ba9958/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1847551/cea98224a0242fa122932bfd335599c5107ce35b/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1937424/6348070f1b3d5f51aed3ecef47d584233aafa986/resource.tar.gz{, .log} |71.5%| [DL] $(B)/canondata_storage/937458/0064e8ff05bc3832d392001e1067a4b8e592ee57/resource.tar.gz{, .log} |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |71.5%| [DL] $(B)/canondata_storage/1777230/915011f8f5c826c23aaf0fd2e67aa8d2fb1f93cd/resource.tar.gz{, .log} |71.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |71.6%| [DL] $(B)/canondata_storage/1899731/b0129b6b11699859a431a34fdada149de2dbc7ef/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1937367/a0981807726fa8e5aad90985bda23ee6596b1473/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1599023/d714848e02b570470fc7f4a8a1315869ac70b513/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1937429/b87108417827dee5e78de8f3f8c67e6b30765fd1/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1942173/50b4ae48e906d86b27ee0b68ed5a08b5ad6bf50e/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1923547/a709fdeee4741cf8fd3eee54587d1eeb75290aa3/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1599023/0bd57d257eeb1652a68140e9608a6813bf473a94/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1936842/636e5b20baf2cf59d9f38821f96eeb0a152b1897/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1871102/62570278011b2c51fb3ba23cde15a7bb184e27c4/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1942278/37f44c727aef72a7e55462a303dd42938366b6c2/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1777230/9ab8710baf20ae69a72eb29447a9d2bf1039585c/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1942415/4eadae4d7247ca7a82f53f147f81aef7d5caa5dc/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1924537/86b5be80156a54ea70bd90adf255e133305e5fa4/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1931696/b4c3ab5b0044d3419bb02eb27807ba3b9627f831/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1847551/e0a8e24122315ced755797fc5a2fd65992e28ce2/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1924537/8b664aa6c736022fbba07ebb5d7782851aaea5be/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1871002/e9746e2cfbb706bb72321a7abf9a224f0ef61b45/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1871102/0f954067db9c14aae8830105a157009ce2550f6c/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1903885/6dbb180a1fb0bec551465ca4925bd6f374b599c7/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1937001/6a20500553ba8b1dbf218cdb9db234c852b93f8e/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1942173/d348e22e98330a9e377a94d0f46bb524828381c2/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1880306/234eadcde1cd54bffae64f4516628981e02b093d/resource.tar.gz{, .log} |71.6%| [DL] $(B)/canondata_storage/1031349/12c4584507630678ad646e234c8e75078785de4d/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1031349/e4ef3d587c2530bf47672a5783f4b0d0a4b560fa/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1597364/8d657000086cecf224bc72e90af1c7594765fc47/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1775059/ab56aab54b522dc673269d54a69440f25fd25d9d/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1936842/cb5a8c69f3eb77766fab5ac551027f757f0abc05/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1900335/44d8407be2783234f3018d11eeb1589813e73bcf/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1689644/293b19e389f6ac00b10f915f27569a997d91474e/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1946324/eac6b8c7847ce1f23bab0871d4d46d46df2cadf7/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1871182/ddbad7d2ae0c078ae93ebccd9d41a0f24a8479bc/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1889210/485a27a6615ffb9228e2aa44a9e60526928bedc1/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1881367/1778652f0a952156dd3968c21e9af10b6a474237/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1942415/671b55e8616124598c7474d9969f72ec8014eef6/resource.tar.gz{, .log} |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |71.7%| [DL] $(B)/canondata_storage/1775059/79f40817d9be6347f8a0a937bdd3c46c326ab7d3/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1936997/4d6442f2c45d3e77a2ced29c096325d3ced197d8/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1775319/f708187e40ffeec8e975cd1bd21f8ca26d85dbd7/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1775059/af971073e577c583556209f50933dfff4201794c/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1881367/2d7a75f4178eeea3a2e83df99de305b10359458b/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1942671/2b244e8340f9afab99b1136c03a3466d54265dea/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1899731/945b5e7f9461fd64a5afc8ef7e202b25c09868e6/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1600758/14d5560c6b6df65b25a7d0e4e072602b1a2a8743/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1880306/3dc569e22abef14294acdad1d23118654806f3a5/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1889210/1220fbc43e6c9913dd69b912a91a021b32a209aa/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1775319/26ad9e02e51a11466b2f499d792316509ca07d30/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1775059/080d028abc916c9b548accf3f93e07548b901e22/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1031349/cff576920ea1bdea444026e6597e9d9719a47154/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1903885/49cf3745e9db8d8a1887752705c18b066cdcb225/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1130705/91b310c0e1986f35aa28766f3cd6650d916966a7/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1942100/178599a2b23ce6932b343bc5f863a036b0534c1c/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1889210/718540831bd2dba15f12341f6611010d7d655169/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1031349/d86a7eaf6f5bc2cdaedba52c0890601b8cc1d981/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1903280/cd929bf1ad1ba301bcf2976b4845d75f386c6cf5/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/212715/3898a96b3df9e749177ae140aa9739e06b0e4669/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1871182/5e06b08307574a72f79e9da297b863e3e09d864d/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1942100/551d394c490cdd39558aea14297d28dd74804b86/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1942100/4aa9e62b86e4c5c3e9c9dadf048cc0ff6ca8bc67/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1925842/620b4a12b99ac4a4ef526f742b6157cccada6f07/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1784117/cb10fc911ed03589097ad5a3bcbcd64029d4ed63/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1937027/2c2635807b1c895dd72532d28ab483f4c079524f/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1942278/14e1322c424babe14d336d0e30e11ebf1c359af0/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1936842/3070aa36e76249b07e9d35cb263dbb1236806278/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1781765/6df866e48931c0e70847b2260b3eea091b1ffa6c/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1784117/171e9b753021491729fde435744a1c6a7eab11a6/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1946324/8130a6ff70c94e35f3314a41fdd274c67d346f2d/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1936997/cf1a703bf40f5aee609a5f5135a4d554031effca/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1942173/8e89d11f444b9bfa5ce3b1040f7d67aa7d59d029/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1847551/9284ecf3eac8764c279b785b85bb154fba4c8f08/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1599023/2257627623f9ecc02660dc51c2da964bf24e60cf/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1814674/de906208dd4b33d2eb64a39da7463d9fe8dbd4e7/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1937027/3bbc108de55ced7d8ea9dde3d33b539d651376a2/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1775319/74bc7546a3c0c01c11b723666a17da4eca8c2c07/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1942415/f6f9af5cbb20343e1122f9a21a916296a441b2fb/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1899731/5f2ba051437dbbe71df0674617fe1a74e541bb6d/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1942100/d39849f28d468fa88039784446b53f38c2db1da1/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1777230/7bf27f01819182c286a6e9395b8c7d8d18f3fff1/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1937429/231d22d843eec78552d52ff0253bfa29e1a7a389/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1936273/364381182da99c268c68cfaa8dd7a6e282153b85/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1031349/1ad42ee65e67699c7849e156ebaee300f84f8937/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1130705/85069899508bcd3b8be2b6d75961f8852e8ff128/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1942415/0256128ac8ca0ee7db70a045de39aefe7d42898f/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1880306/3b0ac69e754a5bc45f3b426e0cb53bbecdce37e5/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1920236/b71e9d330355acb299c30c8f443a7df178347ab0/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1942525/0c03a9c206fe0b66d4716933185c93b3bcd09103/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1871102/8fb53a3a81ad5d5949727846153c9f6f58a0845e/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1599023/a09dc141000b40ce3e6095c62dea100bc7280e22/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1917492/064a3289ad6eaf99ba9f2a34e99fb15ca8194278/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1903280/e30d5fd7cc30b67d921c737fb0af1ae5c12759b0/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1920236/25167f14d72879f8881f693851d4290f3941dcbf/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1936842/c608d0442aed7541d0f8b13a5b587f4b16587009/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1599023/7241ba5b766444adac2e1b71063e3efa9d1ac6af/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1130705/f62fe18a04b048878dcf5b69770e9d14e5d379e4/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1871002/2eca5232e8e4d20f356cd7e26120cb51b472a03f/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/995452/798b97b59d948b81c59b61c7d9d39e722ebbfcc0/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1599023/b01cb10652577009aa8ecebf7aa8f50a4a8d4236/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1689644/4fd6684657e6d8973170a91936689f26f2b662fc/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1937367/8e6ed09f9acc0e3323bc0508667d24a148793a51/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1871102/487289822b55151d5bfd88d1dcd849a7a02d10b3/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1773845/4844b06fb73477a0ce945a8d2e558a686cbcbf98/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1899731/5f48750839c300c592c921895adce61b6bdd10c7/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1937429/0cd5c4c599538f9f8310cc0a7b67cdd6d3a2333f/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1871102/0f5b062694101798bb8a310f92ed9974c1a845c0/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1936842/08ca4a4a28db24dc522f19e01dabe6125d37fb33/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1942100/285e905e2474369b3f45b547f2afa647f1a81533/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1942278/2479c937ada2e75d2c6653f0cad3c43f5a4f06de/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1031349/593cf731fe784ac5bdeb5744a6013cfa7df8c284/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1599023/892497444bbacbe92ad2c557c09c697b859ad48d/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1899731/85df5ddd78ef06f5b299b58b79881c22b39759af/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1916746/50372851d94a84250091c501f470aca37411751a/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1847551/3a392ffe35b72cb523557617b3ab1c0abb94bee4/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1937429/581a41f498c09b791293b88fcb066123b9531421/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1889210/5799676c8b21a8a6ab9a23a98cfcb11d1e8db6cf/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1937150/0ce3b8d2f4edd9e0ed3b0820a3590fd6124c375e/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/937458/432610f205d490984e1977d219cad674f7aad6a5/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1936997/7e9b92fe2e738b03ed247110d482d009fa289ed5/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1936947/0d1daf9062d6c8b5dd4dc9b1f73c97791c044137/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1923547/63a9bf11f98a47ebc6a355858fc4c8179b67ce82/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1937492/8cdefcd6d0d86a9eaa2af7a5c2ce6fe8014c7ac2/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1031349/6bb24df9da4b98bf8c413d1c1c448000ac90a40d/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1937027/7e92a59557f254d8b58c96118ce2e626b197c0b1/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1773845/5f3eb5f2d67f8810998f3ac0dc6ca4e51a76482e/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1936947/21cc5de50f594b1190d08542a7f262a2327e625b/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1784117/0327cec5b34f60c96db651d3505ec161fd64bb5a/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1599023/1a5ae6170a572008429f35f362ba7a5e8f15d2db/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1817427/e644870a8f51ab795f33a09b1cdec1fcd2063713/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1942100/deb1f289b9c40e713d0d9f614e8c3a720d26b7b2/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1871002/243e3192fd5358940dd64b70c1fba15f8aaa24b7/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1784826/f008aeb63b134f27b8208ef2f9770992b33f3c2d/resource.tar.gz{, .log} |72.0%| [DL] $(B)/canondata_storage/1880306/94e1527dc445194ce3caa514976f24f846cc663f/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1936997/eb320c28d0358741aa9bf1e5ebf6f79c7b769b57/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1600758/4f71a10f67b6e33415a0e548a197be96b9261557/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1937429/8d4678be89a5cdafec6099b5b174e1923a6b1b84/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/937458/e65d62f7141c4085b03b6b3de39e5ba9d5868c49/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1773845/153cf0652cafcb3f7d3b789e3a4ceb4a9f7cfccf/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1899731/d7118ad96c050279cfcfe95a4f9577de9c404054/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1784826/10e350a94dce68db6577b228f7e65934ac1fc85e/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1597364/69d3fa450d1d1d788d13776cdd4c993faeab88d0/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1937001/79c03f49d007d946fea55dca11f80af0a81dd047/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1881367/a20f4e5c3e96e288ca11729296218a60f545a061/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1784826/fb40e5e64a539be6615e07cc5f6e889bcc1e4564/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1871182/1b070eaa6816a28c47f92666b5d664e443ea4c80/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1773845/2d431e7defad911934583a732f9c0713c566bb1c/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1942173/e32f1de19c4f2770a6f215d1dc22bc97e318bf22/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1942173/9e555000605607f149a299077d52b9ef3a944215/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1814674/65f4d58c5b2358f2ab558af11162f9a7adab9578/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1936273/7a32049e7d34640d0891b0eccadb21c671bd9ed5/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1814674/bc826e9b9202032dac82451ba4769076555fbab6/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1931696/bd23f2602b9ebbb7cd7a6085fa771927b4dc81e9/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1689644/21bb382fd3dd5b7958e963a35fdeff43f15acd6b/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1600758/25298d1c532d0c67aa8f12ec2205b095eb24cdb4/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1916746/21c597d64388f6c41b4782746fbaf7f1b842bcda/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/995452/b1f2dabe2f59f069a24ebbb0bcf5d5b69f26631e/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1600758/5edd0ab283f8fdd1cf03c1031ca810ce8554c459/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1917492/73fd38e9ffcd658585f52c248a634ae9046b0ff5/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1889210/796baf28896eb5aaad8828a0b6000e7d17563447/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1937001/2a7ef44323a9583b611e77f9451ecbcf9a39cd8f/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1946324/7382f2c221782186a6e8551e5722de6e9105c16c/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1937027/ec284f0d49e0f6e26fe5e8922028ffb903db3bc7/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1900335/3723346a2da176c5ee65dcf2ea559b19068a6488/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1889210/f7b9171f10cdcd52016b9256bc91d2696630be46/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1814674/6b45e34c475ed17d7555fbe7f3091282279c761a/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1937492/9ba008b22e29bc0b3fc3b0b722d6d7c245775122/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1925842/5ebb3b5676761f341f736480110bd8ab8a78d858/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1899731/a42b3541f94a87ba84e6f819e45d301c084940d9/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1937367/1aaf5198b7497ff892746fdaf479cd906eda5ef0/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1925821/6a1d049e384919d8478e0dc1a9ff789c7f500f07/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1599023/99c2356674b1e20f456cfa1987af5df85eb4bfa3/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1809005/867a928da4f0d2fb398d2c33bb67d6d401827633/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1900335/26b8736381377fc28b74c97ea9e134e6da21c6aa/resource.tar.gz{, .log} |72.2%| [CC] {tool} $(B)/ydb/core/protos/grpc.pb.cc |72.2%| [DL] $(B)/canondata_storage/1600758/48b7b4a69f7ce5991121870c9a2f027f9e2c7041/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1931696/6c4a36931a6a48d4590d231802e33da9ab40cb1e/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1924537/4c8ab803df15749c76bd45c30e057cec19cf79d5/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1600758/a42cbf1b9e8a45c29dfbe8c1c1f3d9fac0eb3d7d/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1936273/7c78e1e45ae282daee686c006624daa21a7c6ca6/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1942415/bce8c45faf79a59c214fe2bf46e33d9cc351ed18/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1942278/d3f67196e7e0096e289743f5dbfd5dc2f990f9e6/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1775319/2835bc5228be8e9c43a55ecd1a258cb2da433197/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1936273/9f3848d6f996b16a08afdfa5e23ec58b8aab3b97/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1773845/9450e6a9e418f128c33ccd34fc163a655ef7efb6/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1814674/8ecd58672b8e77093dcc9d63519f6e20b8155e91/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1942100/43fa07fed3cf8aed32ae0b5fedbb00bd2bab6b27/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1936273/ad3717b195ca16459f341d5ab440a7fc0685a6bb/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1923547/ab32f83f1ebe23caf0a6a3a190fc45da21e11461/resource.tar.gz{, .log} |72.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.pb.cc |72.3%| [DL] $(B)/canondata_storage/1942100/9017d73811f974261c96a9a333f3aaff5abeafff/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1937027/3adc3df76c101683c5032deee2bcc54230c6a1b0/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1031349/596c297595e75709124ce2ef96947a7ecc9a2056/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1597364/8ab87482c625c5d6a6e486201ec940e41b09bc3e/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1900335/8eba31ae2dcfd9245ad9327a1ac3ca89667336e2/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1130705/02b49b8f7de27e2bd653274fbb3967717e0d56f2/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1130705/0ef38e114204c1ac6d0d5ad14792a285fb1413c8/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1916746/6ecba111842446dcf0346e93a4db199bbedbdb10/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/212715/2cb1b5139d83aa48f3466b8892464b93f89797d0/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1937429/af1c6e6e04642438d43d596ae49e1f47c2f9a8bf/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1847551/5836720bd6edb7a20e88f4ea2ae09a4e4b561093/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1600758/72bf03e23e03cc6eea365311492e82f69d27547f/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1936997/1b75842b463219c1de23899c69a6f937818f5efa/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1784826/cd0b26ce347141d51c45b6088135a15077b56483/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1773845/f3e4c472dc37081782e19cd965bd65655fb94de9/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1942671/e5a8e9ea3cc95035ab65c78d12cce22189e05430/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1942525/5034185140ffa064b6ff5f40aec4f177acd3c5a1/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1937492/0916a4c5121c755975bb98db3f6bbff60eb63132/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1917492/cae80a2ca59a3c25b589fb2f7fdc5fafe6beed6a/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1871182/b2146249716fc2f308dff54f809c6f3e5b912d8d/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1936842/1230453fda0206fef63c6f7a723461640d941221/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1773845/3f7cf07086245864aa149e34543610596d0da304/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1880306/f9e06f45c25a1b68f533811cf1a5eba7085a7de4/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1784117/b9574b141cedb261a8563a97d9a372d9277da2ce/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/937458/c423db03e0e9f65962fec3378f160c7d70d32138/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1925842/a4b71373097359ba466e2713f3de746df8a53ab1/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1937367/3b134d615cd12a4e7acd4044dc106653bd43b397/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1942415/b6b41eb77627490bfce387dccb1eea7766e2bb71/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1600758/6536c12aedebd96c5fc71915b4cfc72252cfa630/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1931696/8786785a3fe08451381b624963d1fcdfd92ae069/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1923547/5fccde9e77cd24e0e9edadda2daa7eb56bde0a29/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1942415/1e09e4342cd71819f75a6b9adf843137f6d9a325/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1942671/17c7e87d808f783b60251714ee76f807b99866f5/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1130705/0028f368df473b7ebff2145046017939f0673670/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1946324/c8058686a7decbc255d7e28ecdcab6420a760e15/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1925821/4ae9bde3c1ecde0f833266f025b433a41c077ebf/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1880306/db71d33ed2525c218cc0dde3f5b7b9ce7f9aa317/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1942525/a6d79a71fe1f7b9d4dbe9fc0e8d7f38f783c44cf/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1781765/628d82040b97a4c0c6e6723ca4453171e6143b19/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1937150/5e5d899491feb90365a9064ef20b0b01f17c7419/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/937458/320b06d50641c62738ef0e2f333cec71c14bfc4b/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1847551/d84e968fa8a66e33f0268ab656a20e2b84ad109c/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1936842/15d1b251a19a947bc78bcd914d26903ce91d665f/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1814674/5f3c7350d8e72ada6a702e29e5f3bfddaa73df08/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1936273/20339d8b097f50e14eb6d266a222b77abdc42846/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1871102/bc396b8b31a3dc31af3e0918ca66137d03d31eff/resource.tar.gz{, .log} |72.4%| [DL] $(B)/canondata_storage/1942278/712088e94c8a5f29908b7a81c4185b8ab9ebca5f/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1937150/6ed1231d0735e7ff4ac5f603831c10709457ac3b/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1924537/7199a3a7eba1a101a7ecd6552b3df25cb9a6ef2b/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1809005/d82aaae062254cb5da7fc7348fa82d8f67f6ea93/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1924537/b112d187ebf731abc2b04c974853f91beb3dd74d/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1809005/bace128d842e0e2cef93390c0800c74269352290/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1880306/9f21ecea8b7e24ea53f2ae352301fe1250fdba57/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/212715/01c0c681be6c56b02f31f87454cd0dd3cd0e4ade/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1781765/97c29e53add37e5e221fbc6e22055fd1d8762911/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1130705/acc206ee45aa5b7f7e78d232059c9535f249dda0/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1923547/331b1de1b2a9544651bd249eccea1d8975558c09/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1031349/110747d194be1b3ec565c8629bddeb11bdda85c5/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1942415/5d0460f6a5e883e733ff9b871dd0c0a3717965d3/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1871182/6bfabc62940fdb174a55acbc4fb75e975f32df0b/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1903280/1302f1777838aa638bf5151db4710571d26da566/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1903885/4df104aa60634735da6e3543917f736870f3f18b/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1599023/b8ad35209f29e3ae308d0f48b10aeffda01d29c8/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1937429/089b4c182c7fc7602e8f7e7a4b535cbf50844bdb/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1936273/5c7aadbc9513617ac94bd8f103a74bc39b4edfe5/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/937458/8c856e93170e875aa0a26dc4a34de068016e4377/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1817427/c76f32a844a68e74a51674b34fdd7e00e417e1e1/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1937367/bc1ddd52a5c80a7e52dc1d8a7570c5b38a712194/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1942525/e7939b4cfb5e85a7bd57688517d44a82bd824253/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1777230/8d6bc20c3c548691ed47463aed0d508dcd185ce3/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1599023/aac9f84343918a59a2e7eb28e31ca1d36cf30297/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1917492/261649fbdcb7f81af5118b7c9c9f7b8353abbfd8/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1917492/ef839f70e5a2f493427f7f92ed00d26a993f6d4a/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1784826/fe2bef548a55eb11e26daaded455ba74fda33a1b/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1903885/f00a3197fa44aa3d49bf7fe1bbf0fed52ce265b9/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1942100/21245e81d28b28ef09d03385075d39472fbb3dba/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1942100/e137fd991c9f3857dfbc144867f53c199fe00560/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1814674/9cc588658d645e8972899f036025c6c4884f598d/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1936947/581aa6d896ffe57e25bdb8006459e912860e61fa/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1937367/c8f509a79779b30b722211cfeb063fe74251b5ea/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1937027/00f4d6f3eee88b3871a02f7dd00f16d78030cdd2/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1903280/e408d73e432cdcbd076f8502cb4502ad1d54ab5a/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1923547/61c7053456cbb3e809e03779a3f7621039603dac/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1599023/e9f8d240b4483477bcadcd3788795f2462724043/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1689644/d939c79f1c25569f7b8f4e5b740e070ad72d7ad7/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1880306/6d87e35267d9e4ac0736e1c4d17b92f0831eaba6/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1916746/8af1fb7747dc5b2dccf47bca5be44479c7ae6621/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1031349/00d772b75437904a4810c1baf5e317bde8a6e2b5/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1781765/42df89988fcd33edfdab26a81def80f0820c3235/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1600758/aad142702907f13e911494c1a7b312bad34f692a/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1784826/885e3ecf7da6faaa6a93df31c27314e793907f63/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1773845/4743168c84575c5ee74764d6369a8a7b6f309d6e/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1597364/5c8f443c3c4d257c5cc9ae09d46f62d2dbcbded8/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1889210/7f3c41a1a9a952dcc8b95a828e079ca0b5c57243/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1817427/46729b354b9b15ea89f67bf14fefd2face8b402b/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1923547/c3f064ea25dafaabdc78d527cb888e8c29c155df/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1903885/e665e50194f0fdfa8d7857ead4c5d407752c80c8/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1871102/fa4f080643cfe96ac90e96595d2e7db77c75a3b7/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1814674/8032c8c75c4a0135917efb7e8a36a553203d3792/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1937367/7890620b546312cfd9cbc4ee46166efc1a36450c/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1936273/b34296023a3ba5080f0236257e86efdd8a89093e/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1942100/b3af8dc6178472cab79e2c61d51edf8f4af32e2f/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1871002/212f691ac7c5e7cfc43a031ca90e23988dc4e5af/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1942671/a089042abddfd51839b75278c8f86236a82e2fbd/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/212715/89a4a534b98f0006d500be0545402722592e483c/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1775059/8eed37259d411fc80649c1b2311ad3abfd9ee15e/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1775059/3040ff3725a2cd9541d62c4c0f59acbf85d6e8e9/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1871102/c16b260c9474b6209b41c05f68145ad16f292a86/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1871182/29596dbacc893115441dfee9a88f44ddb7ebc4a2/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1936273/271f22955fd0fcf6e1856272701535b2e45b32ef/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1599023/6ea95a71ae6e3995d639ef495d263a106e521882/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1880306/51cf42e38aedb850f758a02645c1575dcd57d9e6/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1899731/ac2fe0744925be7f8cc1556d084138437b7eeab5/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1937150/752b46c5b03a79553eede6ce218ca961ba7c10c2/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1689644/4d9701667c235827e22152c557341a6339db2761/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1130705/984266b47af8d517834a10674242c9e900f41724/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1777230/978467ecd66b7473cbb5a78812196988c2151940/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1946324/96d594815fbe2bdfa11f7d8491a7bb58b8738a79/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1936273/e76bbb45fb3007593a65735b5c0016b79db0c798/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1880306/5d2fb97b23cd70975bc5d744391981f9d5595c04/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1777230/231bd09491bc6ae7a605dc4342b8955354e67f2a/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1920236/ea9b79a4af23814e47242a86125bfc9db48e103e/resource.tar.gz{, .log} |72.8%| [DL] $(B)/canondata_storage/1031349/fade0e1ab4ddcf96add4ba75388b76b0ae6970f8/resource.tar.gz{, .log} |72.8%| [DL] $(B)/canondata_storage/1031349/f5278948946380da3d5514360765e6ba76347c46/resource.tar.gz{, .log} |72.8%| [DL] $(B)/canondata_storage/1900335/a5a16b7313d07b162a608c1abeab1e68e6175117/resource.tar.gz{, .log} |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |72.8%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} |72.8%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |72.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |72.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |72.8%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/http/types.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/types.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/action.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/action.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/log_backend/log_backend_build.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/log_backend/log_backend_build.cpp |72.8%| [CC] {tool} $(B)/ydb/core/protos/console_config.pb.cc |72.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.pb.cc |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |72.8%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/http/xml.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/xml.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/validation/validators_ut.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/validators_ut.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/fq/ydb_over_fq.cpp |72.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |72.9%| [LD] {RESULT} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ydb_over_fq.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_monitoring.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_monitoring.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |72.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |72.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |72.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher_service.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage.cpp |72.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |72.9%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/_{70cb4fc9708c18f38ed236e25b.yasm ... ea9739c5637ddd163f7384c82f.rodata} |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher_service.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |72.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |72.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |72.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |72.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |72.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |72.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/_{70cb4fc9708c18f38ed236e25b.yasm ... ea9739c5637ddd163f7384c82f.rodata} |72.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |72.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |72.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/init.h_serialized.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |72.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |72.9%| [LD] {RESULT} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |72.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/init.h_serialized.cpp |73.0%| [JS] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/join_c09b118b667044a5f8150e6958.yasm |73.0%| [JS] {BAZEL_UPLOAD} $(B)/ydb/core/protos/join_c09b118b667044a5f8150e6958.yasm |73.0%| [AS] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/join_c09b118b667044a5f8150e6958.yasm |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |73.0%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/logs/log.cpp |73.0%| [AR] {RESULT} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/logs/log.cpp |73.0%| [AS] {BAZEL_UPLOAD} $(B)/ydb/core/protos/join_c09b118b667044a5f8150e6958.yasm |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/tablet_killer.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/tablet_killer.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp >> PgTest::DumpIntCells >> PgTest::DumpIntCells [GOOD] |73.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp >> PgTest::DumpStringCells |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/private_client/internal_service.cpp |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpIntCells [GOOD] |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_ut.cpp >> PgTest::DumpStringCells [GOOD] |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/private_client/internal_service.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp >> TErasureTypeTest::TestBlock42LossOfAllPossible2 |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_ut.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells [GOOD] |73.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/log_backend/log_backend.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/log_backend/log_backend.cpp |73.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestStripe22LossOfAllPossible2 |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step >> ValidationTests::CanCopyTo [GOOD] |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/memory_controller/memory_controller.cpp |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp >> ValidationTests::CanDispatchByTag [GOOD] |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__init_scheme.cpp >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/memory_controller/memory_controller.cpp >> SamplingControlTests::Simple [GOOD] |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/board_replica.cpp |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit >> SamplingControlTests::EdgeCaseLower [GOOD] >> ThrottlerControlTests::Overflow_1 [GOOD] |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__update_config.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_replica.cpp |73.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_config.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanDispatchByTag [GOOD] |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |73.0%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanCopyTo [GOOD] |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_event_filter.cpp |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tpc/ydb-tests-functional-tpc |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/ydb-tests-functional-tpc |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |73.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::Simple [GOOD] |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |73.1%| [AR] {RESULT} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a >> ThrottlerControlTests::Simple [GOOD] |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_event_filter.cpp |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseLower [GOOD] |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_1 [GOOD] >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common >> ThrottlerControlTests::LongIdle [GOOD] >> ThrottlerControlTests::Overflow_2 [GOOD] |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_epoch.cpp >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Simple [GOOD] |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.1%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] >> TErasureTypeTest::TestStripe42LossOfAllPossible2 >> Scheme::NonEmptyOwnedCellVec [GOOD] >> Scheme::TSerializedCellMatrix [GOOD] >> ValidationTests::MapType [GOOD] |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp >> SchemeBorders::Full [GOOD] |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::LongIdle [GOOD] |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_2 [GOOD] |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] >> Scheme::EmptyOwnedCellVec [GOOD] >> Scheme::CellVecTryParse [GOOD] >> Scheme::CompareOrder [GOOD] |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::TSerializedCellMatrix [GOOD] >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] >> Scheme::TSerializedCellVec [GOOD] |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::NonEmptyOwnedCellVec [GOOD] |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> SchemeBorders::Full [GOOD] |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::MapType [GOOD] |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/init/init_ut.cpp >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::EmptyOwnedCellVec [GOOD] |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/solomon/async_io/ut/dq_solomon_write_actor_ut.cpp |73.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] >> SchemeBorders::Partial [GOOD] >> TErasureTypeTest::TestStripe23LossOfAllPossible3 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::TSerializedCellVec [GOOD] Test command err: Serialize: 0.000068s Cells constructor: 0.000169s Parse: 0.000072s Copy: 0.000055s Move: 0.000017s |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::CompareOrder [GOOD] |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |73.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |73.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/solomon/async_io/ut/dq_solomon_write_actor_ut.cpp |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |73.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> SchemeBorders::Partial [GOOD] |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init_ut.cpp |73.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |73.2%| [AR] {RESULT} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |73.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::AdvancedCopyTo [GOOD] |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a >> TErasureTypeTest::TestBlock42PartialRestore0 >> ValidationTests::HasReservedPaths [GOOD] |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |73.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::AdvancedCopyTo [GOOD] |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |73.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::HasReservedPaths [GOOD] >> test_init.py::TestTpchInit::test_s1_s3 >> TErasureTypeTest::TestStripe33LossOfAllPossible3 >> test_generator.py::TestTpchGenerator::test_s1 |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/ut/main.cpp >> test_generator.py::TestTpchGenerator::test_s1_state >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/ut/main.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp >> test_init.py::TestTpchInit::test_s1_column_decimal >> test_init.py::TestTpchInit::test_s1_row |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_parts >> test_init.py::TestTpchInit::test_s1_row [GOOD] >> test_init.py::TestClickbenchInit::test_s1_s3 |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |73.2%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds >> test_generator.py::TestTpchGenerator::test_s1_parts >> test_init.py::TestClickbenchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column >> test_generator.py::TestTpcdsGenerator::test_s1_state >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb >> test_init.py::TestClickbenchInit::test_s1_column |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] >> test_init.py::TestClickbenchInit::test_s1_column [GOOD] >> test_init.py::TestClickbenchInit::test_s1_row >> test_generator.py::TestTpcdsGenerator::test_s1 |73.2%| [TA] $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/fq/grpc_service.cpp >> test_init.py::TestTpcdsInit::test_s1_column_decimal |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.2%| [TA] {RESULT} $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TErasureTypeTest::TestBlock23LossOfAllPossible3 >> SamplingControlTests::EdgeCaseUpper [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb >> test_init.py::TestTpcdsInit::test_s1_s3 |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpchInit::test_s1_column >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] >> test_init.py::TestTpchInit::test_s1_column [GOOD] >> test_init.py::TestTpcdsInit::test_s1_row >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseUpper [GOOD] |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |73.2%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_row [GOOD] |73.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |73.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest >> PersQueueCodecs::FromV1Codec [GOOD] |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |73.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |73.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/meta/meta_versions.cpp |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_column [GOOD] |73.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest >> PersQueueCodecs::FromV1Codec [GOOD] |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp >> PersQueueCodecs::ToV1Codec [GOOD] >> ErasureBrandNew::Block42_encode |73.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/meta/meta_versions.cpp |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 >> Metrics::CombineSubItems [GOOD] >> Metrics::SeveralTopItems [GOOD] |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest >> PersQueueCodecs::ToV1Codec [GOOD] |73.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp >> Metrics::OnlyOneItem [GOOD] |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.4%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq >> Scheme::YqlTypesMustBeDefined [GOOD] >> TErasureTypeTest::TestAllSpecies1of2 |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode >> TErasureTypeTest::TestStripe43LossOfAllPossible3 |73.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::CombineSubItems [GOOD] |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::YqlTypesMustBeDefined [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore1 >> TErasureTypeTest::TestBlock42PartialRestore2 |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp >> Scheme::UnsafeAppend [GOOD] |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::OnlyOneItem [GOOD] |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::SeveralTopItems [GOOD] |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest |73.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a >> Scheme::CompareUuidCells [GOOD] >> TErasureTypeTest::TestBlock22LossOfAllPossible2 |73.4%| [AR] {RESULT} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] >> TErasureTypeTest::TestBlock33LossOfAllPossible3 >> Metrics::SeveralSubItems [GOOD] |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> TErasureTypeTest::TestMirror3LossOfAllPossible3 |73.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/fq/libydb-services-fq.a |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |73.4%| [AR] {RESULT} $(B)/ydb/services/fq/libydb-services-fq.a >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::UnsafeAppend [GOOD] |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::CompareUuidCells [GOOD] >> TErasureTypeTest::TestBlockByteOrder [GOOD] >> TErasureTypeTest::TestBlock43LossOfAllPossible3 |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/grpc_service.cpp |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |73.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] >> TErasureTypeTest::TestEo [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlockByteOrder [GOOD] |73.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::SeveralSubItems [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> TErasureTypeTest::TestBlock42PartialRestore3 >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> TErasureTypeTest::TestStripe32LossOfAllPossible2 |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/kqprun.cpp >> TErasureTypeTest::TestBlock32LossOfAllPossible2 |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/init.h_serialized.cpp >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestEo [GOOD] |73.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |73.5%| [AR] {RESULT} $(B)/ydb/core/log_backend/libydb-core-log_backend.a >> Metrics::MoreThanFiveItems [GOOD] >> ErasureBrandNew::Block42_restore |73.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/init.h_serialized.cpp >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] >> Metrics::EmptyIssuesList [GOOD] >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::MoreThanFiveItems [GOOD] |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |73.5%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] >> HmacSha::HmacSha1 [GOOD] |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::EmptyIssuesList [GOOD] |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/dstool/ydb-dstool |73.5%| [TA] $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [LD] {RESULT} $(B)/ydb/apps/dstool/ydb-dstool |73.5%| [TA] $(B)/ydb/core/persqueue/codecs/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] >> TWeighedOrderingTest::SimpleSelectionTest [GOOD] >> TWeighedOrderingTest::WeighedOrderingTest [GOOD] >> TWeighedOrderingTest::WeighedSelectionTest [GOOD] >> ErasureBrandNew::Block42_encode [GOOD] >> ErasureBrandNew::Block42_chunked >> OperationLog::Size1 [GOOD] >> OperationLog::Size29 [GOOD] >> OperationLog::Size1000 >> OperationLog::Size8 [GOOD] |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |73.5%| [TA] {RESULT} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |73.5%| [LD] {RESULT} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |73.5%| [TA] {RESULT} $(B)/ydb/core/persqueue/codecs/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/hmac/ut/unittest >> HmacSha::HmacSha1 [GOOD] |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |73.6%| [LD] {RESULT} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest >> TWeighedOrderingTest::WeighedSelectionTest [GOOD] |73.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud >> OperationLog::Size1000 [GOOD] |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/oidc_proxy/mvp.cpp >> OperationLog::ConcurrentWrites |73.6%| [TS] {RESULT} ydb/core/fq/libs/hmac/ut/unittest |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl >> OperationLog::ConcurrentWrites [GOOD] |73.6%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/kv_workload/ydb-tests-functional-kv_workload |73.6%| [LD] {RESULT} $(B)/ydb/tests/functional/kv_workload/ydb-tests-functional-kv_workload |73.6%| [TS] {RESULT} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |73.6%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api >> result_convert::import_test [GOOD] |73.6%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/debug_tools/ut/unittest >> OperationLog::ConcurrentWrites [GOOD] |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/import_test >> result_convert::import_test [GOOD] |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> Json::BasicRendering [GOOD] |73.6%| [TA] $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> OldFormat::SameVersion [GOOD] >> OldFormat::DefaultRules [GOOD] >> OldFormat::PrevYear [GOOD] >> OldFormat::Trunk [GOOD] >> OldFormat::UnexpectedTrunk [GOOD] >> OldFormat::TooOld [GOOD] >> Checks::OpaqueMaps [GOOD] >> OldFormat::OldNbs [GOOD] >> Checks::ErrorInCheck [GOOD] >> VersionParser::Basic [GOOD] >> Checks::BasicIntChecks [GOOD] >> YdbVersion::DefaultSameVersion [GOOD] >> Checks::IntArrayValidation [GOOD] >> YdbVersion::DefaultPrevMajor [GOOD] >> Checks::BasicStringChecks [GOOD] >> YdbVersion::DefaultNextMajor [GOOD] >> Checks::MapValidation [GOOD] >> YdbVersion::DefaultHotfix [GOOD] >> YdbVersion::DefaultCompatible [GOOD] >> YdbVersion::DefaultNextYear [GOOD] >> YdbVersion::DefaultPrevYear [GOOD] >> YdbVersion::DefaultNewMajor [GOOD] >> YdbVersion::DefaultOldMajor [GOOD] >> YdbVersion::DefaultDifferentBuild [GOOD] >> YdbVersion::DefaultDifferentBuildIncompatible [GOOD] >> YdbVersion::LimitOld [GOOD] >> YdbVersion::LimitNew [GOOD] >> YdbVersion::CurrentCanLoadFrom [GOOD] >> YdbVersion::CurrentCanLoadFromAllOlder [GOOD] >> YdbVersion::CurrentCanLoadFromIncompatible [GOOD] >> YdbVersion::CurrentStoresReadableBy [GOOD] >> YdbVersion::StoredReadableBy [GOOD] >> YdbVersion::StoredReadableByIncompatible [GOOD] >> YdbVersion::StoredWithRules [GOOD] |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_proxy.cpp >> YdbVersion::StoredWithRulesIncompatible [GOOD] >> YdbVersion::OldNbsStored [GOOD] >> YdbVersion::OldNbsIncompatibleStored [GOOD] >> YdbVersion::NewNbsCurrent [GOOD] >> YdbVersion::NewNbsIncompatibleCurrent [GOOD] >> YdbVersion::OneAcceptedVersion [GOOD] >> YdbVersion::ForbiddenMinor [GOOD] >> YdbVersion::DefaultRulesWithExtraForbidden [GOOD] >> YdbVersion::ExtraAndForbidden [GOOD] >> YdbVersion::SomeRulesAndOtherForbidden [GOOD] >> YdbVersion::Component [GOOD] >> YdbVersion::OtherComponent [GOOD] >> YdbVersion::YDBAndNbs [GOOD] >> YdbVersion::DifferentYdbVersionsWithNBSRules [GOOD] >> YdbVersion::TrunkYDBAndNbs [GOOD] >> YdbVersion::TrunkAndStable [GOOD] >> YdbVersion::CompatibleWithSelf [GOOD] >> YdbVersion::PrintCurrentVersionProto [GOOD] |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp >> runner::import_test [GOOD] |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/json/ut/unittest >> Json::BasicRendering [GOOD] |73.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/libcore-config-init.a |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator_checks/unittest >> Checks::MapValidation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/version/ut/unittest >> YdbVersion::PrintCurrentVersionProto [GOOD] Test command err: Application: "ydb" |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/statistics_workload/statistics_workload |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/import_test >> runner::import_test [GOOD] |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_proxy.cpp >> ErasureBrandNew::Block42_chunked [GOOD] |73.6%| [TS] {RESULT} ydb/core/debug_tools/ut/unittest |73.6%| [AR] {RESULT} $(B)/ydb/core/config/init/libcore-config-init.a |73.6%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/import_test >> TFunctionsMetadataTest::Serialization >> TFunctionsMetadataTest::Serialization [GOOD] |73.6%| [TA] $(B)/ydb/core/fq/libs/metrics/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_chunked [GOOD] Test command err: totalSize# 497569235 period1# 3.082784s period2# 0.618177s MB/s1# 161.4025618 MB/s2# 804.897683 factor# 4.986895339 |73.6%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_checks/unittest |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/client/metadata/ut/unittest >> TFunctionsMetadataTest::Serialization [GOOD] |73.6%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/import_test |73.6%| [LD] {RESULT} $(B)/ydb/tools/statistics_workload/statistics_workload |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp >> ydb-tests-functional-dynumber::import_test [GOOD] >> ConfigValidation::SameStaticGroup [GOOD] >> ConfigValidation::StaticGroupSizesGrow [GOOD] >> ConfigValidation::StaticGroupSizesShrink [GOOD] >> ConfigValidation::VDiskChanged [GOOD] >> ConfigValidation::TooManyVDiskChanged [GOOD] |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests >> ydb-dstool::import_test [GOOD] |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/dynumber/import_test >> ydb-tests-functional-dynumber::import_test [GOOD] |73.7%| [TS] {RESULT} ydb/core/client/metadata/ut/unittest |73.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/metrics/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.7%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |73.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/validation/ut/unittest >> ConfigValidation::TooManyVDiskChanged [GOOD] |73.7%| [TS] {RESULT} ydb/tests/functional/dynumber/import_test |73.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/apps/dstool/import_test >> ydb-dstool::import_test [GOOD] |73.7%| [TS] {RESULT} ydb/core/viewer/json/ut/unittest >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |73.7%| [TA] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.7%| [TM] {RESULT} ydb/core/driver_lib/version/ut/unittest |73.7%| [TS] {RESULT} ydb/core/config/validation/ut/unittest |73.7%| [TS] {RESULT} ydb/apps/dstool/import_test |73.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |73.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe >> ValidatorBuilder::BuildSimpleValidator [GOOD] >> ValidatorBuilder::CanHaveMultipleType [GOOD] >> ValidatorBuilder::CanHaveDuplicateType [GOOD] >> ValidatorBuilder::CanCreateAllTypesOfNodes [GOOD] >> ValidatorBuilder::CreateMultitypeNode [GOOD] |73.7%| [TA] $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |73.7%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |73.7%| [TA] {RESULT} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a >> ydb-tests-functional-clickbench::import_test [GOOD] |73.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator_builder/unittest >> ValidatorBuilder::CreateMultitypeNode [GOOD] |73.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/local_ydb/local_ydb |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |73.7%| [LD] {RESULT} $(B)/ydb/public/tools/local_ydb/local_ydb |73.7%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |73.7%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.7%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |73.7%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |73.7%| [LD] {RESULT} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |73.7%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_builder/unittest |73.7%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |73.7%| [CC] {tool} $(B)/ydb/core/protos/config.pb.cc |73.7%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |73.8%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |73.8%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp >> Signer::Basic [GOOD] |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/signer/ut/unittest >> Signer::Basic [GOOD] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/ut/ydb-core-config-ut |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |73.8%| [TS] {RESULT} ydb/core/fq/libs/signer/ut/unittest |73.8%| [LD] {RESULT} $(B)/ydb/core/config/ut/ydb-core-config-ut |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/run_query.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.pb.cc |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/run_query.cpp >> ydb-tests-functional-tenants::import_test [GOOD] >> ydb-tests-functional-autoconfig::import_test [GOOD] >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/import_test >> ydb-tests-functional-autoconfig::import_test [GOOD] |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/import_test >> ydb-tests-functional-tenants::import_test [GOOD] |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/import_test >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] >> ydb-tests-fq-multi_plane::import_test [GOOD] |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/import_test >> ydb-tests-fq-multi_plane::import_test [GOOD] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/cfg/bin/ydb_configure |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc >> ydb-tests-functional-scheme_shard::import_test [GOOD] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |73.8%| [TS] {RESULT} ydb/tests/functional/autoconfig/import_test |73.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |73.8%| [LD] {RESULT} $(B)/ydb/tools/cfg/bin/ydb_configure |73.8%| [TS] {RESULT} ydb/tests/functional/tenants/import_test |73.8%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |73.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std >> ydb-tests-functional-audit::import_test [GOOD] >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |73.8%| [AR] {RESULT} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] >> ydb-tests-functional-serverless::import_test [GOOD] |73.8%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/import_test |73.8%| [TS] {RESULT} ydb/tests/fq/multi_plane/import_test |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/import_test >> ydb-tests-functional-audit::import_test [GOOD] |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |73.8%| [TS] {RESULT} ydb/tests/functional/scheme_shard/import_test |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/nemesis/driver/nemesis >> ydb-tests-functional-tpc::import_test [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] >> ConfigProto::ForbidNewRequired [GOOD] |73.9%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/import_test |73.9%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/driver/nemesis |73.9%| [TS] {RESULT} ydb/tests/functional/audit/import_test >> ydb-tests-fq-s3::import_test [GOOD] >> ydb-tests-functional-script_execution::import_test [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/import_test >> ydb-tests-functional-tpc::import_test [GOOD] |73.9%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache >> ydb-tests-functional-suite_tests::import_test [GOOD] >> ydb-tests-functional-sqs-large::import_test [GOOD] >> ydb-tests-fq-yds::import_test [GOOD] >> ydb-tests-functional-ydb_cli::import_test [GOOD] |73.9%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |73.9%| [TS] {RESULT} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/import_test >> ydb-tests-functional-script_execution::import_test [GOOD] >> ydb-tests-functional-api::import_test [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-suite_tests::import_test [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/import_test >> ydb-tests-fq-s3::import_test [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/import_test >> ydb-tests-functional-sqs-large::import_test [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |73.9%| [TS] {RESULT} ydb/tests/functional/tpc/import_test |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] >> ydbd_slice::import_test [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/import_test >> ydb-tests-functional-api::import_test [GOOD] >> ydb_recipe::import_test [GOOD] |73.9%| [TS] {RESULT} ydb/core/config/ut/unittest |73.9%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |73.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.9%| [TS] {RESULT} ydb/tests/functional/script_execution/import_test |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/ydbd_slice/bin/import_test >> ydbd_slice::import_test [GOOD] |73.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/import_test |73.9%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics >> replay::import_test [GOOD] |73.9%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/import_test >> ydb_recipe::import_test [GOOD] >> ydb-tests-functional-hive::import_test [GOOD] >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |73.9%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/import_test >> replay::import_test [GOOD] |73.9%| [TS] {RESULT} ydb/tests/functional/api/import_test |73.9%| [TS] {RESULT} ydb/tools/ydbd_slice/bin/import_test |73.9%| [TS] {RESULT} ydb/tests/fq/s3/import_test |73.9%| [TS] {RESULT} ydb/tests/functional/sqs/large/import_test |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/import_test >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |74.0%| [TS] {RESULT} ydb/tests/fq/yds/import_test |74.0%| [TS] {RESULT} ydb/public/tools/ydb_recipe/import_test >> StaticValidator::HostConfigs [GOOD] >> StaticValidator::Hosts [GOOD] >> StaticValidator::DomainsConfig [GOOD] >> ydb-tests-functional-kv_workload::import_test [GOOD] |74.0%| [TS] {RESULT} ydb/tests/functional/hive/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |74.0%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/import_test |74.0%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/import_test |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/static_validator/ut/unittest >> StaticValidator::DomainsConfig [GOOD] |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kv_workload/import_test >> ydb-tests-functional-kv_workload::import_test [GOOD] |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] >> statistics_workload::import_test [GOOD] |74.0%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/unittest |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a >> ydb-tests-fq-http_api::import_test [GOOD] >> ydb-tests-functional-cms::import_test [GOOD] |74.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a >> ydb-tests-functional-ttl::import_test [GOOD] |74.0%| [TS] {RESULT} ydb/tests/functional/kv_workload/import_test |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/statistics_workload/import_test >> statistics_workload::import_test [GOOD] |74.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/import_test >> ydb-tests-functional-cms::import_test [GOOD] |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-ttl::import_test [GOOD] >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> ydb-tests-functional-sqs-multinode::import_test [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] |74.0%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/import_test |74.0%| [TS] {RESULT} ydb/tools/statistics_workload/import_test >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] >> integrations_test.py::test_read_jtest_with_one_result [GOOD] >> MdbEndpoingGenerator::Legacy [GOOD] >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |74.0%| [TS] {RESULT} ydb/tests/fq/http_api/import_test |74.0%| [TS] {RESULT} ydb/tests/functional/cms/import_test |74.0%| [TS] {RESULT} ydb/tests/functional/ttl/import_test |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/jptk/001b84/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/jptk/001b84/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) |74.0%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-rename::import_test [GOOD] >> ydb_configure::import_test [GOOD] |74.0%| [TS] {RESULT} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest |74.0%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/import_test >> ydb-tests-functional-rename::import_test [GOOD] |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/import_test >> ydb_configure::import_test [GOOD] |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp >> ydb-tests-functional-config::import_test [GOOD] |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable >> ydb-tests-functional-scheme_tests::import_test [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/import_test >> ydb-tests-functional-config::import_test [GOOD] >> ydb-tests-functional-sqs-common::import_test [GOOD] |74.1%| [TS] {RESULT} ydb/tests/functional/rename/import_test |74.1%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] |74.1%| [TS] {RESULT} ydb/tools/cfg/bin/import_test |74.1%| [LD] {RESULT} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |74.1%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |74.1%| [TS] {RESULT} ydb/tests/functional/config/import_test |74.1%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable >> kqprun_recipe::import_test [GOOD] |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/tstool/tstool |74.1%| [LD] {RESULT} $(B)/ydb/tools/tstool/tstool |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |74.1%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |74.1%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test |74.1%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] >> ydb-tests-functional-restarts::import_test [GOOD] |74.1%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/import_test >> ydb-tests-functional-restarts::import_test [GOOD] >> ydb-tests-functional-canonical::import_test [GOOD] |74.1%| [TS] {RESULT} ydb/tests/functional/restarts/import_test |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] |74.1%| [TS] {RESULT} ydb/tests/functional/canonical/import_test |74.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |74.1%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |74.1%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common >> local_ydb::import_test [GOOD] |74.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/import_test >> local_ydb::import_test [GOOD] |74.1%| [TS] {RESULT} ydb/public/tools/local_ydb/import_test |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy >> tstool::import_test [GOOD] |74.1%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/tstool/import_test >> tstool::import_test [GOOD] |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/library/ut/ydb-tests-library-ut >> ydb-tests-functional-wardens::import_test [GOOD] |74.2%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut >> ydb-tests-fq-plans::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tools/tstool/import_test |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/import_test >> ydb-tests-functional-wardens::import_test [GOOD] |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |74.2%| [TS] {RESULT} ydb/tests/functional/wardens/import_test |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/import_test >> ydb-tests-fq-plans::import_test [GOOD] |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical >> ydb-tests-fq-restarts::import_test [GOOD] >> ydb-tests-tools-pq_read-test::import_test [GOOD] |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |74.2%| [TS] {RESULT} ydb/tests/fq/plans/import_test |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/import_test >> ydb-tests-tools-pq_read-test::import_test [GOOD] >> ydb-tests-functional-encryption::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/fq/restarts/import_test |74.2%| [TS] {RESULT} ydb/tests/tools/pq_read/test/import_test >> nemesis::import_test [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] >> ydb-tests-functional-large_serializable::import_test [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] >> ydb-tests-fq-mem_alloc::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/import_test >> nemesis::import_test [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/import_test >> ydb-tests-functional-large_serializable::import_test [GOOD] |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/meta/meta.cpp >> ydb-tests-functional-query_cache::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/functional/large_serializable/import_test |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config |74.2%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/import_test |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> ydb-tests-tools-nemesis-ut::import_test [GOOD] >> ydb-tests-functional-limits::import_test [GOOD] >> functional-sqs-merge_split_common_table-std::import_test [GOOD] >> ydb-tests-functional-blobstorage::import_test [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/import_test >> ydb-tests-functional-limits::import_test [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/import_test >> ydb-tests-tools-nemesis-ut::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/library/ut/py3test |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/functional/limits/import_test |74.2%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/import_test |74.2%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/functional/blobstorage/import_test |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/import_test >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] |74.2%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/import_test |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] |74.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/tests/tpch/tpch |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch |74.3%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test |74.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |74.3%| [AR] {RESULT} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |74.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |74.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tpc/ydb-tests-functional-tpc >> ydb_serializable::import_test [GOOD] |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/import_test >> ydb_serializable::import_test [GOOD] >> ydb-tests-functional-serializable::import_test [GOOD] |74.3%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/import_test |74.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/import_test >> ydb-tests-functional-serializable::import_test [GOOD] |74.3%| [TS] {RESULT} ydb/tests/functional/serializable/import_test >> ydb-tests-stability-ydb::import_test [GOOD] >> ydb-tests-functional-compatibility::import_test [GOOD] >> ydb-tests-functional-postgresql::import_test [GOOD] |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/import_test >> ydb-tests-stability-ydb::import_test [GOOD] |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/meta/meta.cpp |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/import_test >> ydb-tests-functional-compatibility::import_test [GOOD] |74.3%| [TS] {RESULT} ydb/tests/stability/ydb/import_test |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] |74.3%| [TS] {RESULT} ydb/tests/functional/compatibility/import_test |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |74.3%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/libydb-mvp-meta.a >> ydb-library-yaml_config-ut_transform::import_test [GOOD] |74.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/meta/bin/mvp_meta |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/meta/bin/mvp_meta |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/import_test >> ydb-library-yaml_config-ut_transform::import_test [GOOD] |74.3%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/import_test |74.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds >> ydb-tests-fq-common::import_test [GOOD] >> ydb-tests-library-ut::import_test [GOOD] |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/import_test >> ydb-tests-fq-common::import_test [GOOD] |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |74.3%| [TS] {RESULT} ydb/tests/fq/common/import_test |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/import_test >> ydb-tests-library-ut::import_test [GOOD] |74.3%| [TS] {RESULT} ydb/tests/library/ut/import_test |74.3%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch |74.3%| [LD] {RESULT} $(B)/ydb/mvp/meta/bin/mvp_meta |74.3%| [AR] {tool} $(B)/ydb/core/protos/libydb-core-protos.a |74.3%| [AR] {RESULT} $(B)/ydb/core/protos/libydb-core-protos.a |74.3%| [LD] {tool} $(B)/ydb/core/base/generated/codegen/codegen |74.3%| [LD] {RESULT} $(B)/ydb/core/base/generated/codegen/codegen |74.3%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/runtime_feature_flags.h |74.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |74.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |74.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts >> ErasureBrandNew::Block42_restore [GOOD] >> ErasureBrandNew::Block42_restore_benchmark |74.3%| [LD] {tool} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen >> ErasureBrandNew::Block42_restore_benchmark [GOOD] |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/apps/dstool/ydb-dstool |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_restore_benchmark [GOOD] Test command err: totalSize# 502359041 period1# 0.836314s period2# 0.658169s MB/s1# 600.6823286 MB/s2# 763.2675513 factor# 1.270667564 |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/codegen/codegen |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/kv_workload/ydb-tests-functional-kv_workload |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/tstool/tstool |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/cfg/bin/ydb_configure |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/statistics_workload/statistics_workload |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/ut/ydb-core-config-ut |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |74.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/local_ydb/local_ydb |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/driver/nemesis |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |74.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/pdisk_read.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_read.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |74.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |74.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/node_tablet_monitor.cpp >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/execute_queue.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/execute_queue.cpp |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |74.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |74.6%| [AR] {RESULT} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_sys.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_sys.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/test_tablet.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_tablet.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |74.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |74.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/selector/abstract/selector.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |74.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |74.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/selector/abstract/selector.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/insert_table/broken_dedup.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |74.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |74.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/insert_table/broken_dedup.cpp |74.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |74.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/key_validator.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/key_validator.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |74.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |74.7%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/service_impl.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service_impl.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator__init.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__init.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/sessions/sessions.cpp |74.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |74.7%| [AR] {RESULT} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/sessions/sessions.cpp |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/tablet_helpers.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tablet_helpers.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_runner.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_runner.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/actors/test_runtime.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/test_runtime.cpp |74.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |74.8%| [AR] {RESULT} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |74.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/sample_k.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/sample_k.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |74.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |74.9%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |74.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/columnshard.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/columnshard.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/zero_level.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/zero_level.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/vslots.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/vslots.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/cs_helper.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/cs_helper.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/tx_helpers.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tx_helpers.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/abstract.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/abstract.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/nodes/nodes.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/nodes/nodes.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |75.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/bootstrapper.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_replication.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/bootstrapper.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_replication.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/topic_sdk_test_setup.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/libydb_topic-ut-ut_utils.a |75.0%| [AR] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/libydb_topic-ut-ut_utils.a |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/topic_sdk_test_setup.cpp |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/libydb_topic-ut-ut_utils.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |75.0%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |75.0%| [AR] {RESULT} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_import.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_import.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_resolver.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_resolver.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/tenant_runtime.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tenant_runtime.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/request/request_actor.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/request_actor.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/fake_coordinator.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/fake_coordinator.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/common/owner.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/common/owner.cpp |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |75.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/events/events.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/events/events.cpp |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |75.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/pdisks.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/pdisks.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/basics/appdata.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/appdata.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |75.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/generic_manager.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/generic_manager.cpp |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/groups.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/shard_impl.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/groups.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/shard_impl.cpp |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |75.2%| [AR] {RESULT} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_update_config.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_update_config.cpp |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/common/config.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/common/config.cpp |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |75.2%| [AR] {RESULT} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/basics/services.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/services.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/basics/runtime.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/runtime.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_transform.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_transform.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/service.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/service.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/service/sysview_service.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/service/sysview_service.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |75.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |75.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |75.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/run.cpp |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/run.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_type_ann.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_type_ann.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_translate.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_translate.cpp >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_reset.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_reset.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |75.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |75.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |75.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/service.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |75.4%| [AR] {RESULT} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |75.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |75.4%| [AR] {RESULT} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ymq/ymq_proxy.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ymq/ymq_proxy.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_startup.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_startup.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |75.4%| [AR] {RESULT} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/constructor.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |75.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/constructor.cpp |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |75.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/erasure_checkers.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/erasure_checkers.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_export.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_export.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/basics/helpers.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/helpers.cpp |75.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |75.5%| [AR] {RESULT} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/downtime.cpp |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/downtime.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/query_actor/query_actor.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |75.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |75.5%| [AR] {RESULT} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/query_actor/query_actor.cpp |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |75.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |75.5%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_request.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_request.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_metrics.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_metrics.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/tablets/tablets.cpp |75.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |75.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/tablets/tablets.cpp |75.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_host.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_host.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/db_counters.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/db_counters.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ymq/grpc_service.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |75.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ymq/libydb-services-ymq.a |75.6%| [AR] {RESULT} $(B)/ydb/services/ymq/libydb-services-ymq.a |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ymq/grpc_service.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |75.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/processor.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/processor.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/checker_secret.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/checker_secret.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/secret.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/secret.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |75.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |75.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |75.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |75.7%| [AR] {RESULT} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/service/ext_counters.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/service/ext_counters.cpp |75.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |75.7%| [AR] {RESULT} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/alter.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/alter.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/read_balancer__balancing.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/read_balancer__balancing.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/abstract/common.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/common.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/node_whiteboard.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/node_whiteboard.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp >> TErasureTypeTest::TestAllSpecies1of2 [GOOD] >> TErasureTypeTest::TestAllSpecies2of2 |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/fetcher.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/fetcher.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |75.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |75.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |75.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_init.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_init.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tracing/tablet_info.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tracing/tablet_info.cpp |75.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tracing/libydb-core-tracing.a |75.9%| [AR] {RESULT} $(B)/ydb/core/tracing/libydb-core-tracing.a |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_api_handler.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/access.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_api_handler.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/abstract/initialization.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/access.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/initialization.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |75.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |75.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_init.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_init.cpp |75.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |75.9%| [AR] {RESULT} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/resource_broker.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/resource_broker.cpp |75.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet/libydb-core-tablet.a |75.9%| [AR] {RESULT} $(B)/ydb/core/tablet/libydb-core-tablet.a |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/actor/write.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/actor/write.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/common/schema.cpp |76.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |76.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |76.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |76.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/common/schema.cpp |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/snapshot.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/snapshot.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |76.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |76.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/manager.cpp |76.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |76.0%| [AR] {RESULT} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/manager.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/checker_access.cpp |76.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |76.1%| [AR] {RESULT} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/checker_access.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |76.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |76.1%| [AR] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_load_state.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_load_state.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/transaction.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/transaction.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/registration.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/registration.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/fetcher.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/fetcher.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/public_http/http_service.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/public_http/http_service.cpp |76.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/public_http/libydb-core-public_http.a |76.1%| [AR] {RESULT} $(B)/ydb/core/public_http/libydb-core-public_http.a |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/object.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/object.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/index.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ownerinfo.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/index.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ownerinfo.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |76.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |76.2%| [AR] {RESULT} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |76.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |76.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/restore.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/manager.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/restore.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/manager.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/database/database.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/database/database.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |76.2%| [AR] {RESULT} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/http.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/common_level.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/common_level.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/http.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |76.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/constructor.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/constructor.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |76.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_delivery_problem.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_delivery_problem.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/initializer.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/initializer.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pq_impl_app.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pq_impl_app.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/read_quoter.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/read_quoter.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/chunks.cpp |76.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |76.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/chunks.cpp |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/events/events.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/events/events.cpp |76.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |76.3%| [AR] {RESULT} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pq_l2_cache.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pq_l2_cache.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_response.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_response.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/abstract/fetcher.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/fetcher.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/modification.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/modification.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/write_quoter.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/write_quoter.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/datastreams/grpc_service.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/grpc_service.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |76.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |76.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/snapshot.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/snapshot.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/subscriber.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/subscriber.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/selector/backup/selector.cpp |76.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |76.4%| [AR] {RESULT} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/selector/backup/selector.cpp |76.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |76.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/read_balancer_app.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/user_info.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/read_balancer_app.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/user_info.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/partition_monitoring.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/partition_monitoring.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/behaviour.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/behaviour.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pq_impl.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pq_impl.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/optimizer.cpp |76.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |76.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/optimizer.cpp |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/partition_sourcemanager.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/partition_sourcemanager.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/fetch_request_actor.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/fetch_request_actor.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/read_balancer__balancing_app.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/read_balancer__balancing_app.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/partition_write.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/partition_write.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/object.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/object.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/partition_read.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/storage/s3/storage.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/partition_read.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/storage/s3/storage.cpp |76.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |76.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/partition_init.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/partition_init.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/alter_impl.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/alter_impl.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/datastreams/put_records_actor.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/put_records_actor.cpp |76.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |76.5%| [AR] {RESULT} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |76.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |76.5%| [AR] {RESULT} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/mirrorer.cpp |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/mirrorer.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/read_balancer.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/read_balancer.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/initializer.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/initializer.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/tx_initialize.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_initialize.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/service.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/service.cpp |76.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/libydb-services-metadata.a |76.6%| [AR] {RESULT} $(B)/ydb/services/metadata/libydb-services-metadata.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/bucket.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/bucket.cpp |76.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |76.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/partition.cpp |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/partition.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/sourceid.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/sourceid.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |76.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |76.6%| [AR] {RESULT} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |76.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |76.6%| [AR] {RESULT} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/logic.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/logic.cpp |76.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |76.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/initializer.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/initializer.cpp |76.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |76.6%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/account_read_quoter.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/account_read_quoter.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |76.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |76.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/common.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/common.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |76.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |76.7%| [AR] {RESULT} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |76.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |76.7%| [AR] {RESULT} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/event_helpers.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/event_helpers.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pq.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pq.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/partition_scale_request.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/partition_scale_manager.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/partition_scale_request.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/partition_scale_manager.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/cluster_tracker.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/cluster_tracker.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |76.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |76.7%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/optimizer.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |76.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |76.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |76.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |76.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/optimizer.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon/mon.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/mon.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/api_adapters.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/api_adapters.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon/sync_http_mon.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/sync_http_mon.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |76.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |76.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |76.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |76.8%| [AR] {RESULT} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |76.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |76.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cluster_info.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cluster_info.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon/async_http_mon.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/async_http_mon.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |76.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mon/libydb-core-mon.a |76.8%| [AR] {RESULT} $(B)/ydb/core/mon/libydb-core-mon.a |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |76.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |76.8%| [AR] {RESULT} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |76.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |76.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/writer/writer.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/writer.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |76.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |76.8%| [AR] {RESULT} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/common.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/common.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |76.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |76.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/logic.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/logger.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/granules.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/logger.cpp |76.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |76.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |76.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |76.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/logic.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/granules.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |76.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |76.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_dummy.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_dummy.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |76.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/libydb-services-ydb.a |76.9%| [AR] {RESULT} $(B)/ydb/services/ydb/libydb-services-ydb.a |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/test_shard_context.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_shard_context.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |76.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |76.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |77.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/collector.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/collector.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |77.0%| [AR] {RESULT} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |77.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |77.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |77.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/portions.cpp |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/portions.cpp |77.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |77.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |77.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/actor.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/actor.cpp |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/events.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/events.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |77.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |77.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |77.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |77.1%| [AR] {RESULT} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/write_actor.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/write_actor.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/topic.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/background_controller.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/background_controller.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/topic.cpp |77.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |77.1%| [AR] {RESULT} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |77.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |77.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |77.2%| [AR] {RESULT} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |77.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/snapshot.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/snapshot.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/retention.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/retention.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |77.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |77.2%| [AR] {RESULT} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |77.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |77.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/executor.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/executor.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |77.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |77.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon_alloc/monitor.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/monitor.cpp |77.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |77.3%| [AR] {RESULT} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/create_user.cpp |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/create_user.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |77.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |77.3%| [AR] {RESULT} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/node_tracker.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/node_tracker.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/table_description.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_description.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/send_message.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/send_message.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/schema.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/schema.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/metering.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/metering.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_pipe_req.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_pipe_req.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/manager.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/manager.cpp |77.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |77.4%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |77.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/factories.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/factories.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/proxy_service.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/proxy_service.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/table_creator/table_creator.cpp |77.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |77.4%| [AR] {RESULT} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/table_creator/table_creator.cpp |77.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/cfg.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cfg.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/common/ss_dialog.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/object.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/common/ss_dialog.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/object.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |77.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |77.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |77.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/purge.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/purge.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_users.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_users.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/receive_message.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/receive_message.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/table_settings.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_settings.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/actor.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/actor.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/purge_queue.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/counters/counters.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/purge_queue.cpp |77.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |77.5%| [AR] {RESULT} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/counters/counters.cpp |77.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/table_profiles.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_profiles.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/create_queue.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/info_collector.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/create_queue.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/info_collector.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/service.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/service.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/common/timeout.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/auth_factory.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_permissions.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/auth_factory.cpp |77.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_permissions.cpp |77.5%| [AR] {RESULT} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/common/timeout.cpp |77.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/delete_queue.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_queue.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_queues.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_queues.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_minikql_compile_and_exec.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_minikql_compile_and_exec.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ticket_parser.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ticket_parser.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/login_page.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/login_page.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |77.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/libydb-core-security.a |77.6%| [AR] {RESULT} $(B)/ydb/core/security/libydb-core-security.a |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |77.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |77.6%| [AR] {RESULT} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/libydb-core-security.a |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/sentinel.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/queue_leader.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queue_leader.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/count_queues.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/count_queues.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/delete_message.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_message.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/viewer.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/manager.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/manager.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/change_visibility.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_pq.cpp |77.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |77.7%| [AR] {RESULT} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_pq.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/change_visibility.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/column_families.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/column_families.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_query.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_query.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/delete_user.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_user.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |77.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |77.7%| [AR] {RESULT} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |77.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |77.7%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |77.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |77.7%| [AR] {RESULT} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_wb_req.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_wb_req.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_operation.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_operation.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/service_actor.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/service_actor.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/queue_schema.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queue_schema.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |77.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |77.8%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |77.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__set_config.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__set_config.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_storage.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_storage.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_browse.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_browse.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |77.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |77.8%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |77.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/populator.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/populator.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/util/failure_injection.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/failure_injection.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_request.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_request.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/tier/behaviour.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/tier/behaviour.cpp |77.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.global.a |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.global.a |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.global.a |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |77.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/main.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/main.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/viewer_request.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_request.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |77.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |78.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |78.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |78.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |78.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/rule/object.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/rule/object.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/service.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/rule/ss_fetcher.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/service.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/rule/ss_fetcher.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |78.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |78.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |78.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/rule/checker.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/rule/checker.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/util/memory_tracker.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/memory_tracker.cpp |78.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/util/libydb-core-util.a |78.1%| [AR] {RESULT} $(B)/ydb/core/util/libydb-core-util.a |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/util/libydb-core-util.a |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |78.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |78.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/vdisk_write.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/vdisk_write.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/rule/manager.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/rule/manager.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |78.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |78.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/rule/behaviour.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/rule/behaviour.cpp |78.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.global.a |78.1%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.global.a |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.global.a |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |78.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |78.1%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/time_cast/time_cast.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |78.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |78.1%| [AR] {RESULT} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/time_cast/time_cast.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/describe.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/describe.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |78.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/libydb-core-viewer.a |78.2%| [AR] {RESULT} $(B)/ydb/core/viewer/libydb-core-viewer.a |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |78.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |78.2%| [AR] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_cache.cpp |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_cache.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |78.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |78.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/rule/initializer.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/rule/initializer.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |78.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |78.2%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |78.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |78.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |78.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |78.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |78.3%| [AR] {RESULT} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |78.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |78.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |78.3%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |78.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |78.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |78.3%| [AR] {RESULT} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |78.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |78.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/cache.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/cache.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/worker.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/worker.cpp |78.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |78.4%| [AR] {RESULT} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/external_sources/object_storage.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/random.cpp |78.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |78.4%| [AR] {RESULT} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/random.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/object_storage.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |78.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |78.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |78.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |78.4%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |78.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |78.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |78.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/hash_slider.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_slider.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |78.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |78.5%| [AR] {RESULT} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |78.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/memory.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/memory.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/sharding.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/sharding.cpp |78.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |78.5%| [AR] {RESULT} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |78.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/state_server_interface.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/state_server_interface.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |78.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |78.6%| [AR] {RESULT} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |78.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__create_tenant.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__create_tenant.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/rule/ss_checker.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/rule/ss_checker.cpp |78.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.a |78.6%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.a |78.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.a |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |78.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |78.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |78.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__init_scheme.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__init_scheme.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |78.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |78.7%| [AR] {RESULT} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/common.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/common.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |78.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |78.7%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |78.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |78.7%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/tier/checker.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/tier/checker.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |78.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |78.7%| [AR] {RESULT} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/tier/object.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/tier/object.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/group_write.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/group_write.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |78.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/libydb-core-load_test.a |78.9%| [AR] {RESULT} $(B)/ydb/core/load_test/libydb-core-load_test.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/tier/manager.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/tier/manager.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |79.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |79.0%| [AR] {RESULT} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |79.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |79.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |79.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |79.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |79.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |79.0%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |79.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |79.1%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/tier/initializer.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/tier/initializer.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |79.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |79.1%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_local_minikql.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_local_minikql.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |79.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.2%| [AR] {RESULT} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_console.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_console.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |79.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |79.2%| [AR] {RESULT} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/load_test.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/load_test.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |79.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |79.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |79.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |79.3%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |79.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |79.3%| [AR] {RESULT} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |79.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |79.3%| [AR] {RESULT} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/config_helpers.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/config_helpers.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/controller.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/controller.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |79.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.4%| [AR] {RESULT} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |79.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |79.4%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__load_state.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__load_state.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/external_data.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/external_data.cpp |79.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.global.a |79.4%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.global.a |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.global.a |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |79.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |79.5%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |79.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |79.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |79.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |79.5%| [AR] {RESULT} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |79.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |79.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |79.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |79.6%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |79.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |79.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |79.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |79.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_write.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_write.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |79.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |79.6%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_local_enumerate_tablets.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_local_enumerate_tablets.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |79.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |79.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |79.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |79.7%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |79.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |79.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |79.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |79.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/export_common.cpp |79.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |79.7%| [AR] {RESULT} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_common.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |79.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |79.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |79.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |79.8%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/upload_stats.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/upload_stats.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |79.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |79.9%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/operation.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/operation.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_local_scheme_tx.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_local_scheme_tx.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/scan.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/scan.cpp |79.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |79.9%| [AR] {RESULT} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |79.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |79.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/discovery/discovery.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/discovery/discovery.cpp |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/discovery/libydb-core-discovery.a |80.0%| [AR] {RESULT} $(B)/ydb/core/discovery/libydb-core-discovery.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/import_s3.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/import_s3.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/restore_unit.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/restore_unit.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |80.0%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |80.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_tablet_kill.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_kill.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execution_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execution_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |80.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |80.1%| [AR] {RESULT} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__init.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__init.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_tx.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_http_server.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_http_server.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |80.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |80.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/table_settings.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/table_settings.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__write.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__write.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/grpc_server.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/grpc_server.cpp |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |80.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |80.3%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |80.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |80.3%| [AR] {RESULT} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |80.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |80.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |80.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |80.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |80.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |80.4%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_keyvalue.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_keyvalue.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |80.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |80.4%| [AR] {RESULT} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue.cpp |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |80.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |80.4%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |80.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |80.5%| [AR] {RESULT} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_types.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_types.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |80.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/run/librun.a |80.5%| [AR] {RESULT} $(B)/ydb/core/driver_lib/run/librun.a |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/run/librun.a |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |80.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |80.6%| [AR] {RESULT} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_state.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_state.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |80.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |80.6%| [AR] {RESULT} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/control/immediate_control_board_actor.cpp |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/immediate_control_board_actor.cpp |80.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/libydb-core-control.a |80.6%| [AR] {RESULT} $(B)/ydb/core/control/libydb-core-control.a |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/libydb-core-control.a |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |80.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |80.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/program/program.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/program/program.cpp |80.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/program/libcore-tx-program.a |80.6%| [AR] {RESULT} $(B)/ydb/core/tx/program/libcore-tx-program.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |80.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |80.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |80.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |80.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |80.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |80.7%| [AR] {RESULT} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/proxy/proxy.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/proxy.cpp |80.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |80.7%| [AR] {RESULT} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/reshuffle_kmeans.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/reshuffle_kmeans.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |80.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |80.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructor.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructor.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |80.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/iterator.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/iterator.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |80.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |80.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/remove_locks.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |80.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_locks.cpp |80.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/granule_view.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/granule_view.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_configs_manager.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/logger.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/logger.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_manager.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/filler.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/filler.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |80.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |80.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |80.8%| [AR] {RESULT} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/policy.cpp |80.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |80.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/policy.cpp |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/health_check/health_check.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/health_check/health_check.cpp |80.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/health_check/libydb-core-health_check.a |80.9%| [AR] {RESULT} $(B)/ydb/core/health_check/libydb-core-health_check.a |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/data_events/shard_writer.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/shard_writer.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/auth_factory.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/auth_factory.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_cms.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cms.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_configs_provider.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_provider.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |80.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/export_scan.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_scan.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/local_kmeans.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/local_kmeans.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/backup_unit.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/backup_unit.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/follower_edge.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/follower_edge.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |81.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |81.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/metadata.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/metadata.cpp |81.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |81.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |81.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |81.0%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |81.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |81.1%| [AR] {RESULT} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |81.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |81.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_collector.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/abstract.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/abstract.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |81.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |81.1%| [AR] {RESULT} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/backup_restore_common.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/backup_restore_common.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_sqs.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_sqs.cpp |81.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |81.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |81.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |81.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |81.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_login.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_login.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |81.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |81.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |81.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |81.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/add_data.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/add_data.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |81.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |81.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |81.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |81.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/kesus/grpc_service.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/kesus/grpc_service.cpp |81.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/kesus/libydb-services-kesus.a |81.2%| [AR] {RESULT} $(B)/ydb/services/kesus/libydb-services-kesus.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |81.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |81.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |81.2%| [AR] {RESULT} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |81.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |81.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |81.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |81.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |81.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |81.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/activation.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/activation.cpp |81.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |81.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |81.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |81.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |81.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |81.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/deleting.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/deleting.cpp |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |81.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |81.3%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/executor.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/executor.cpp |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |81.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/libcore-cms-console.a |81.3%| [AR] {RESULT} $(B)/ydb/core/cms/console/libcore-cms-console.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/add_index.cpp |81.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/add_index.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |81.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |81.4%| [AR] {RESULT} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |81.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |81.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |81.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |81.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/optimizer.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/optimizer.cpp |81.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |81.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |81.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/server/libcore-client-server.a |81.4%| [AR] {RESULT} $(B)/ydb/core/client/server/libcore-client-server.a |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |81.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |81.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |81.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |81.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |81.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |81.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |81.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_backup.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_backup.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_view.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_view.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |81.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |81.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |81.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |81.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |81.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |81.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |81.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |81.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |81.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |81.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |81.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/libydb-core-tx.a |81.5%| [AR] {RESULT} $(B)/ydb/core/tx/libydb-core-tx.a |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx.cpp |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |81.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |81.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |81.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_bsconfig.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_bsconfig.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_insert_table.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_insert_table.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |81.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |81.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |81.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |81.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |81.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/libydb-core-cms.a |81.6%| [AR] {RESULT} $(B)/ydb/core/cms/libydb-core-cms.a |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |81.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |81.6%| [AR] {RESULT} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |81.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |81.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |81.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |81.6%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |81.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/operation_helpers.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/operation_helpers.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |81.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |81.7%| [AR] {RESULT} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/common_helper.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/common_helper.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/blocks.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/blocks.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_fq.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_fq.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |81.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |81.7%| [AR] {RESULT} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_uncertain.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_uncertain.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/coro_tx.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/coro_tx.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_gc.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_gc.cpp |81.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |81.7%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/migrate.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/migrate.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |81.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |81.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |81.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |81.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |81.8%| [AR] {RESULT} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/http_req.cpp |81.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |81.8%| [AR] {RESULT} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/http_req.cpp |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_mon.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_mon.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |81.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |81.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/insert_table/insert_table.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/insert_table/insert_table.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_resolve.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_resolve.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/solomon/async_io/ut/ut_helpers.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/solomon/async_io/ut/ut_helpers.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/assimilator.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/assimilator.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_decommit.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_decommit.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |81.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |81.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__register_node.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__register_node.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_load.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_load.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/dq/actors/common/ut/retry_events_queue_ut.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/dq/actors/common/ut/retry_events_queue_ut.cpp |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |81.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |81.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |81.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |81.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |81.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |81.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |81.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |81.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |81.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/init/init.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/init/init.cpp |81.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |81.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |81.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_filter_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_filter_ut.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_parser_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_parser_ut.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/ydbd/main.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/ydbd/main.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |81.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/storage_group_info.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_group_info.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/local.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/local.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/sentinel_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/blob_depot.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/blob_depot.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/core/mvp_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/mvp_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_ut_common.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/labels_maintainer.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/labels_maintainer.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_ut_common.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/monitoring.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/monitoring.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |82.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |82.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |82.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/health/health.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/health/health.cpp |82.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |82.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |82.1%| [AR] {RESULT} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |82.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__load_state.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__load_state.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |82.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |82.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |82.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/space_monitor.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/space_monitor.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/balancer.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/balancer.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |82.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_statics.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_statics.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |82.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |82.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/test_client.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/test_client.cpp |82.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/libydb-core-testlib.a |82.2%| [AR] {RESULT} $(B)/ydb/core/testlib/libydb-core-testlib.a |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/domain_info.cpp |82.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |82.2%| [AR] {RESULT} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/domain_info.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/dynamic_nameserver.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/dynamic_nameserver.cpp |82.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/counters.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/counters.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |82.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |82.3%| [AR] {RESULT} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |82.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/test_server.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/test_server.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_log.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_log.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/http/http.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/http.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |82.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |82.3%| [AR] {RESULT} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |82.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__register_node.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__register_node.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_domains.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_domains.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/fill.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/fill.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/boot_queue.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/boot_queue.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |82.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/op_apply_config.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_apply_config.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |82.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |82.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |82.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/ut_utils.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/ut_utils.cpp |82.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/libydb_persqueue_public-ut-ut_utils.a |82.4%| [AR] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/libydb_persqueue_public-ut-ut_utils.a |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tablet_info.cpp |82.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/libydb_persqueue_public-ut-ut_utils.a |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tablet_info.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |82.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |82.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |82.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/utils.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/utils.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |82.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |82.4%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |82.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/storage_balancer.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_balancer.cpp |82.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |82.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |82.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |82.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/scrub.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/scrub.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/snapshot_from_chunks.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/snapshot_from_chunks.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_impl.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_impl.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |82.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/node_report.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/node_report.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/lease_holder.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_portion_from_chunks.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/lease_holder.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_portion_from_chunks.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/mon_main.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/mon_main.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |82.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |82.6%| [AR] {RESULT} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |82.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |82.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |82.6%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/grouper.cpp |82.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/grouper.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |82.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/proxy.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/proxy.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/bsc.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/bsc.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/garbage.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/garbage.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/get_group.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/get_group.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/testing.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/testing.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |82.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/request.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/request.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/node_info.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/node_info.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/drain.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/drain.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_pool.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_pool.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |82.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/libydb-core-mind.a |82.8%| [AR] {RESULT} $(B)/ydb/core/mind/libydb-core-mind.a |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |82.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/comm.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/read.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/comm.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/garbage_collection.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/read.cpp |82.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/garbage_collection.cpp |82.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |82.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |82.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |82.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |82.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |82.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |82.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |82.8%| [AR] {RESULT} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/config.cpp |82.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |82.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |82.9%| [AR] {RESULT} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |82.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/op_init_schema.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_init_schema.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/agent.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/agent.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/metrics.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/metrics.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/op_load.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_load.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/query.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/query.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |82.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |82.9%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |82.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/blocks.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/blocks.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |82.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |82.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |82.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/register_node.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/register_node.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |82.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |82.9%| [AR] {RESULT} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |82.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |82.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/event.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/event.cpp |83.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |83.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |83.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_trash.cpp |83.0%| [AR] {RESULT} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_trash.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |83.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |83.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/locks/locks.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/locks/locks.cpp |83.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/insert_table/path_info.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/insert_table/path_info.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |83.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |83.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/status.cpp |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/status.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |83.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |83.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |83.1%| [AR] {RESULT} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |83.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/given_id_range.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/given_id_range.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |83.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/insert_table/rt_insertion.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/insert_table/rt_insertion.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |83.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |83.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/long_tx_write.cpp |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/long_tx_write.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |83.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |83.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/events.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/events.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__status.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__status.cpp |83.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |83.1%| [AR] {RESULT} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |83.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |83.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |83.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/write.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |83.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |83.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |83.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/indexation.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/indexation.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |83.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |83.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/downtime_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |83.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |83.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |83.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |83.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay/query_proccessor.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay/main.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/main.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |83.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_ut_local.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/table_creator/table_creator_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_import_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/basic_usage_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/basic_usage_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |83.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_group/main.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_group/main.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tenants_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cluster_info_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |83.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/describe_topic_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/describe_topic_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/resource_broker_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/local_partition_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/local_partition_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |83.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |83.6%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |83.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_guardian.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_guardian.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_ut_configs.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |83.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/datastreams/datastreams_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/board_lookup.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_lookup.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |83.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |83.8%| [AR] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |83.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |83.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_kqp.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/ut_helpers.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |83.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |83.9%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |83.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/basic_usage_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/basic_usage_ut.cpp |83.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_query_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |84.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_ut_common.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_table_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/bsconfig/bsconfig_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/bsconfig/bsconfig_ut.cpp |84.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/health_check/health_check_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/viewer_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay/query_replay.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |84.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |84.2%| [AR] {RESULT} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |84.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_labeled.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay_yt/main.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |84.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/topic_to_table_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/topic_to_table_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/ut_helpers.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/ut/graph_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/appdata.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/appdata.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_replica.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_replica.cpp |84.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/query_actor/query_actor_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/blobsan/main.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/blobsan/main.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |84.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ut_ycsb.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |84.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/utils/actors/http_sender_actor_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/utils/actors/http_sender_actor_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_ut_pool.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/board_publish.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_publish.cpp |84.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/libydb-core-base.a |84.7%| [AR] {RESULT} $(B)/ydb/core/base/libydb-core-base.a |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/base/libydb-core-base.a |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ticket_parser_ut.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay/query_compiler.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |84.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |84.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |84.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |84.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |84.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |84.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |84.8%| [TA] $(B)/ydb/core/erasure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |84.8%| [TA] {RESULT} $(B)/ydb/core/erasure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |84.8%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |84.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/board_subscriber_ut.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |84.8%| [TA] $(B)/ydb/tests/functional/tpc/test-results/py3test/{meta.json ... results_accumulator.log} |84.8%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/test-results/py3test/{meta.json ... results_accumulator.log} |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service_ut.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/read_balancer__balancing.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/read_balancer__balancing.h_serialized.cpp |84.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |84.8%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |84.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |84.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |84.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |84.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |84.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |84.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |84.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |84.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |84.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |84.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |84.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/libcore-base-generated.a |84.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |84.9%| [AR] {RESULT} $(B)/ydb/core/base/generated/libcore-base-generated.a |84.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |84.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |84.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |84.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |84.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |84.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |84.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |84.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |84.9%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |84.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut/ydb-core-base-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/base/ut/ydb-core-base-ut |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |84.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |84.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |84.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |84.9%| [AR] {RESULT} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |84.9%| [LD] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |84.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |84.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |84.9%| [LD] {RESULT} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |84.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sequenceshard/public/ut/unittest |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut >> TBlobStorageIngressMatrix::MatrixTest [GOOD] >> TBlobStorageIngressMatrix::ShiftedBitVecBase [GOOD] >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] >> TBlobStorageIngressMatrix::VectorTestEmpty [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] |84.9%| [TS] {RESULT} ydb/core/tx/sequenceshard/public/ut/unittest |84.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] >> TBlobStorageIngressMatrix::VectorTest [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore1 [GOOD] >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] |85.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp >> TBlobStorageHullWriteSst::BlockOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/util/ut/ydb-core-util-ut >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] |85.0%| [LD] {RESULT} $(B)/ydb/core/util/ut/ydb-core-util-ut >> TopicNameConverterTest::LegacyStyleDoubleName [GOOD] >> TopicNameConverterTest::NoTopicName [GOOD] >> NameserviceConfigValidatorTests::TestEmptyConfig [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingId [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |85.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] |85.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::NoTopicName [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut >> BootstrapTabletsValidatorTests::TestUnknownNodeForTablet [GOOD] >> NameserviceConfigValidatorTests::TestAddNewNode [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingHostPort [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |85.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |85.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |85.0%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 >> TBlobStorageCompStrat::Test1 [GOOD] |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize >> TCircleBufStringStreamTest::TestNotAligned [GOOD] >> TCircleBufStringStreamTest::TestOverflow [GOOD] >> TCircleBufTest::EmptyTest [GOOD] >> TCircleBufTest::OverflowTest [GOOD] |85.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker_ut.cpp >> TCircularOperationQueueTest::ShouldStartEmpty [GOOD] >> TCircularQueueTest::Empty [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue10 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue100 [GOOD] >> TCircularOperationQueueTest::ShouldTryToStartAnotherOneWhenStartFails [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenHasWaitingAndStart [GOOD] >> TCircularQueueTest::ShouldNextMulti [GOOD] >> TCircularOperationQueueTest::RemoveNonExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue1 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight100 [GOOD] >> TCircularOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TCircularOperationQueueTest::ShouldShuffle [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight3 [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenNothingStarted [GOOD] >> TCircularOperationQueueTest::ShouldTolerateInaccurateTimer [GOOD] >> TCircularQueueTest::ShouldGetQueue [GOOD] >> TCircularQueueTest::ShouldNextSingleItem [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight1 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue3 [GOOD] >> TCircularOperationQueueTest::UseMinOperationRepeatDelayWhenTimeout [GOOD] >> TCircularOperationQueueTest::ShouldReturnExecTime [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight10 [GOOD] |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |85.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut >> TPriorityOperationQueueTest::ShouldUpdatePriorityReadyQueue [GOOD] >> TULID::Generate [GOOD] >> TULID::EveryBitOrder [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriorityWithRemove [GOOD] >> TWildcardTest::TestWildcards [GOOD] >> TSimpleCacheTest::TestSimpleCache [GOOD] >> TQueueInplaceTests::CleanInDestructor [GOOD] >> TTokenBucketTest::Limited [GOOD] >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] >> TPriorityOperationQueueTest::ShouldStartEmpty [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] |85.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp >> TTokenBucketTest::DelayCalculation [GOOD] >> TPriorityQueueTest::TestOrder [GOOD] >> TSimpleCacheTest::TestNotSoSimpleCache [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityWaitingQueue [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriority [GOOD] >> TQueueInplaceTests::TestSimpleInplace [GOOD] >> TTokenBucketTest::Unlimited [GOOD] >> TStrongTypeTest::DefaultConstructorDeleted [GOOD] >> TULID::HeadByteOrder [GOOD] >> TPriorityOperationQueueTest::UpdateNonExistingShouldReturnFalse [GOOD] >> TULID::ParseAndFormat [GOOD] >> TULID::TailByteOrder [GOOD] >> TWildcardTest::TestWildcard [GOOD] >> TStrongTypeTest::DefaultConstructorValue [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufTest::OverflowTest [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::ShouldStartInflight10 [GOOD] |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TStrongTypeTest::DefaultConstructorValue [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |85.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp >> TQueueBackpressureTest::CreateDelete [GOOD] >> TBlobStorageQueueTest::TMessageLost [GOOD] |85.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_common.cpp >> TBlobStorageHullFresh::SolomonStandCrash [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest |85.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut >> TFreshAppendixTest::IterateBackwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] >> TBlobStorageHullFresh::SimpleBackwardEnd [GOOD] |85.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] >> StatsFormat::AggregateStat [GOOD] |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::CreateDelete [GOOD] |85.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageQueueTest::TMessageLost [GOOD] |85.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |85.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackwardEnd [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] >> TPDiskTest::TestThatEveryValueOfEStateEnumKeepsItIntegerValue [GOOD] >> TPDiskTest::TestPDiskActorErrorState >> TPDiskTest::TestPDiskActorErrorState [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopStart |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] |85.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] |85.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |85.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp >> TYardTest::TestInit |85.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp >> TPDiskTest::TestPDiskActorPDiskStopStart [GOOD] >> TPDiskTest::TestPDiskOwnerRecreation >> TLogCache::Simple [GOOD] >> TLogCache::EraseRangeSingleMinElement [GOOD] >> TLogCache::EraseRangeSingleMidElement [GOOD] >> TPDiskUtil::PayloadParsingTest [GOOD] >> TPDiskUtil::SectorRestorator [GOOD] >> TPDiskUtil::SectorRestoratorOldNewHash [GOOD] >> TPDiskUtil::SectorPrint [GOOD] >> TLogCache::EraseRangeSingleMaxElement [GOOD] >> TPDiskUtil::TChunkIdFormatter [GOOD] >> TPDiskUtil::TOwnerPrintTest [GOOD] >> TPDiskUtil::TChunkStateEnumPrintTest [GOOD] >> TPDiskUtil::TIoResultEnumPrintTest [GOOD] >> TPDiskUtil::TIoTypeEnumPrintTest [GOOD] >> TPDiskUtil::TestNVMeSerial [GOOD] >> TPDiskUtil::TestDeviceList [GOOD] >> TPDiskUtil::TestBufferPool >> TPDiskRaces::KillOwnerWhileDeletingChunk |85.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp >> TBlobStoragePDiskCrypto::TestMixedStreamCypher [GOOD] >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher >> TBlobStorageHullDecimal::TestRoundToInt [GOOD] >> TBlobStorageHullDecimal::TestToUi64 [GOOD] >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher [GOOD] >> TBlockDeviceTest::TestDeviceWithSubmitGetThread >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector >> TYardTest::TestInit [GOOD] >> TYardTest::TestInitOnIncompleteFormat >> TBlockDeviceTest::TestDeviceWithSubmitGetThread [GOOD] >> TBlockDeviceTest::TestWriteSectorMapAllTypes >> TBlobStorageHullStorageRatio::Test [GOOD] >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |85.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp >> TBlobStorageDiskBlob::Merge [GOOD] >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] >> TBlobStorageHullDecimal::TestMkRatio [GOOD] >> TBlobStorageHullDecimal::TestMult [GOOD] >> TBlobStorageDiskBlob::CreateFromDistinctParts [GOOD] >> TBlobStorageDiskBlob::CreateIterate [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestToUi64 [GOOD] >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |85.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 >> TPDiskTest::TestPDiskOwnerRecreation [GOOD] >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark >> TYardTest::TestInitOnIncompleteFormat [GOOD] >> TYardTest::TestInitOwner |85.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |85.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp >> THullDsHeapItTest::HeapForwardIteratorAllEntities [GOOD] >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarDouble [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] >> THullDsGenericNWayIt::ForwardIteration [GOOD] >> THullDsGenericNWayIt::BackwardIteration [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] >> TYardTest::TestInitOwner [GOOD] >> TYardTest::TestIncorrectRequests >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMult [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageDiskBlob::CreateIterate [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] >> TYardTest::TestIncorrectRequests [GOOD] >> TYardTest::TestEmptyLogRead >> TActorTest::TestStateSwitch [GOOD] >> TPDiskUtil::TestBufferPool [GOOD] >> TPDiskUtil::SectorMap |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsGenericNWayIt::BackwardIteration [GOOD] >> TPDiskUtil::SectorMap [GOOD] >> TPDiskUtil::SectorMapStoreLoadFromFile [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector >> TYardTest::TestEmptyLogRead [GOOD] >> TYardTest::TestLogContinuityPersistence |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |85.1%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector |85.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner [GOOD] >> TPDiskTest::TestPDiskManyOwnersInitiation >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector |85.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |85.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |85.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |85.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestStateSwitch [GOOD] >> TBlockDeviceTest::TestWriteSectorMapAllTypes [GOOD] >> TBlockDeviceTest::WriteReadRestart >> TYardTest::TestLogContinuityPersistence [GOOD] >> TYardTest::TestLogContinuityPersistenceLarge >> TPDiskTest::TestPDiskManyOwnersInitiation [GOOD] >> TPDiskTest::TestVDiskMock |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] >> TActorTest::TestSendFromAnotherThread |85.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init >> TActorTest::TestHandleEvent [GOOD] >> TActorTest::TestGetCtxTime [GOOD] |85.2%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init >> TActorTest::TestCreateChildActor >> TActorTest::TestWaitFor >> TPDiskTest::TestVDiskMock [GOOD] >> TPDiskTest::TestRealFile >> TActorTest::TestCreateChildActor [GOOD] >> TActorTest::TestBlockEvents >> TActorTest::TestWaitFor [GOOD] |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a >> TActorTest::TestBlockEvents [GOOD] >> TActorTest::TestSendEvent [GOOD] >> TActorTest::TestSendAfterDelay >> TActorTest::TestSendAfterDelay [GOOD] |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |85.2%| [LD] {RESULT} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |85.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFor [GOOD] Test command err: ... waiting for value = 42 ... waiting for value = 42 (done) |85.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestGetCtxTime [GOOD] >> TActorTest::TestWaitForFirstEvent >> TActorTest::TestDie ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestBlockEvents [GOOD] Test command err: ... waiting for blocked 3 events ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 3 events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event (done) ... waiting for processed 2 more events ... waiting for processed 2 more events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for processed 3 more events ... waiting for processed 3 more events (done) |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> TActorTest::TestWaitForFirstEvent [GOOD] >> TActorTest::TestDie [GOOD] >> TActorTest::TestFilteredGrab >> TActorTest::TestFilteredGrab [GOOD] |85.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendAfterDelay [GOOD] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |85.2%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut >> TActorTest::TestSendFromAnotherThread [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector >> TYardTest::TestLogContinuityPersistenceLarge [GOOD] >> TYardTest::TestHttpInfo >> TDelayedResponsesTests::Test [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitForFirstEvent [GOOD] Test command err: ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) |85.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestFilteredGrab [GOOD] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest |85.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> TYardTest::TestHttpInfo [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist >> TYardTest::TestHttpInfoFileDoesntExist [GOOD] >> TYardTest::TestFirstRecordToKeep |85.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |85.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendFromAnotherThread [GOOD] |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |85.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |85.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TDelayedResponsesTests::Test [GOOD] |85.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 >> ReadBatcher::ReadBatcher |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 >> TYardTest::TestFirstRecordToKeep [GOOD] >> TIntrusiveStackTest::TestEmptyPop [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder >> TIntrusiveStackTest::TestPushPop [GOOD] |85.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut >> TBlobStorageHullCompactDeferredQueueTest::Basic |85.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest |85.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TIntervalSetTest::IntervalSetTestAdd >> TIntervalSetTest::IntervalSetTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalSetTestEmpty [GOOD] |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |85.2%| [LD] {RESULT} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut >> TPDiskTest::TestRealFile [GOOD] >> TPDiskTest::TestSIGSEGVInTUndelivered >> TIntervalSetTest::IntervalSetTestAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapUnion |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest |85.2%| [LD] {RESULT} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut >> ReadBatcher::Range >> TIntervalSetTest::IntervalMapUnion [GOOD] >> TIntervalSetTest::IntervalSetUnion |85.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/ut/ydb-core-util-ut |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TPDiskTest::TestSIGSEGVInTUndelivered [GOOD] >> TPDiskTest::TestPDiskOnDifferentKeys >> TIntervalSetTest::IntervalSetUnion [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector >> TPDiskTest::TestPDiskOnDifferentKeys [GOOD] |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut >> TPDiskTest::WrongPDiskKey >> TIntervalSetTest::IntervalMapUnionInplace [GOOD] >> TIntervalSetTest::IntervalSetUnionInplace >> TIntervalSetTest::IntervalSetUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetIntersection >> TPDiskTest::WrongPDiskKey [GOOD] >> TPDiskUtil::AtomicBlockCounterFunctional [GOOD] |85.2%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut >> TPDiskUtil::AtomicBlockCounterSeqno [GOOD] |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TPDiskUtil::Light [GOOD] >> TPDiskUtil::LightOverflow >> TIntervalSetTest::IntervalSetIntersection [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace >> TPDiskUtil::LightOverflow [GOOD] >> TPDiskUtil::DriveEstimator |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder [GOOD] >> TYardTest::TestDestroySystem >> TIntervalSetTest::IntervalSetIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetDifference |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TIntervalSetTest::IntervalSetDifference [GOOD] >> TIntervalSetTest::IntervalSetDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |85.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |85.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a >> ReadBatcher::ReadBatcher [GOOD] |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> TYardTest::TestDestroySystem [GOOD] >> TYardTest::TestDestructionWhileWritingChunk >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector >> TQueueBackpressureTest::PerfInFlight |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut >> TYardTest::TestDestructionWhileWritingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingChunk >> VDiskTest::HugeBlobWrite |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::ReadBatcher [GOOD] >> TYardTest::TestDestructionWhileReadingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingLog >> ReadBatcher::Range [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector >> TActorTest::TestScheduleEvent [GOOD] >> TActorTest::TestScheduleReaction [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] >> TActorTest::TestWaitFuture >> TYardTest::TestDestructionWhileReadingLog [GOOD] >> TYardTest::TestFormatInfo |85.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |85.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp >> TActorTest::TestWaitFuture [GOOD] >> TBlockDeviceTest::WriteReadRestart [GOOD] >> TColorLimitsTest::Colors [GOOD] >> TColorLimitsTest::OwnerFreeSpaceShare [GOOD] >> TLogCache::EraseRangeOnEmpty [GOOD] >> TLogCache::EraseRangeOutsideOfData [GOOD] >> TLogCache::EraseRangeSample [GOOD] >> TLogCache::EraseRangeAllExact [GOOD] >> TLogCache::EraseRangeAllAmple [GOOD] >> PDiskCompatibilityInfo::OldCompatible >> TYardTest::TestFormatInfo [GOOD] >> TYardTest::TestEnormousDisk >> TQueueBackpressureTest::PerfInFlight [GOOD] |85.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |85.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |85.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestScheduleReaction [GOOD] >> TBlobStorageHullSstIt::TestSeekBefore [GOOD] >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] >> PDiskCompatibilityInfo::OldCompatible [GOOD] >> PDiskCompatibilityInfo::Incompatible |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::Range [GOOD] |85.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFuture [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector [GOOD] |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut >> TYardTest::TestBadDeviceInit |85.3%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageGroupInfoIterTest::Domains [GOOD] >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfInFlight [GOOD] |85.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp >> PDiskCompatibilityInfo::Incompatible [GOOD] >> PDiskCompatibilityInfo::NewIncompatibleWithDefault >> TYardTest::TestBadDeviceInit [GOOD] >> TYardTest::TestChunkContinuity2 >> TQueueBackpressureTest::PerfTrivial |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut >> TYardTest::TestChunkContinuity2 [GOOD] >> TYardTest::TestChunkContinuity3000 |85.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp >> PDiskCompatibilityInfo::NewIncompatibleWithDefault [GOOD] >> PDiskCompatibilityInfo::Trunk |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary >> TYardTest::TestChunkContinuity3000 [GOOD] >> TYardTest::TestChunkContinuity9000 >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] |85.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] >> SysViewQueryHistory::TopReadBytesAdd [GOOD] >> PDiskCompatibilityInfo::Trunk [GOOD] >> PDiskCompatibilityInfo::SuppressCompatibilityCheck >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] >> TYardTest::TestChunkContinuity9000 [GOOD] >> TYardTest::TestChunkLock >> TBlobStorageGroupInfoIterTest::IteratorForward [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndNext [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> PDiskCompatibilityInfo::SuppressCompatibilityCheck [GOOD] >> TQueueBackpressureTest::PerfTrivial [GOOD] >> PDiskCompatibilityInfo::Migration >> TYardTest::TestChunkLock [GOOD] >> TYardTest::TestCheckSpace |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp >> TYardTest::TestCheckSpace [GOOD] >> TYardTest::TestBootingState >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] >> PDiskCompatibilityInfo::Migration [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopReadBytesAdd [GOOD] |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout >> TBlobStorageGroupInfoIterTest::IteratorForwardAndBackward [GOOD] >> TBlobStorageGroupInfoBlobMapTest::CheckCorrectBehaviourWithHashOverlow [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] >> TYardTest::TestBootingState [GOOD] >> TYardTest::Test3AsyncLog |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfTrivial [GOOD] |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] |85.4%| [TA] $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark >> TYardTest::TestWholeLogRead |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut/ydb-core-base-ut |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] >> TYardTest::TestWholeLogRead [GOOD] >> TYardTest::TestSysLogReordering >> TYardTest::Test3AsyncLog [GOOD] >> TYardTest::TestChunkDelete >> TPDiskTest::TestAbstractPDiskInterface [GOOD] >> TPDiskTest::TestChunkWriteRelease >> TYardTest::TestChunkDelete [GOOD] >> TYardTest::TestChunkForget |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut >> FormatTimes::ParseDuration [GOOD] |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> PDiskCompatibilityInfo::Migration [GOOD] Test command err: GREEN 0.5025125628 0 CYAN 0.8623115578 0.862 LIGHT_YELLOW 0.8934673367 0.893 YELLOW 0.9145728643 0.914 LIGHT_ORANGE 0.9306532663 0.93 PRE_ORANGE 0.9467336683 0.946 ORANGE 0.9668341709 0.966 RED 0.9879396985 0.987 BLACK 0.9979899497 0.997 2024-11-21T08:46:53.597893Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:53.621194Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13287933566166075195 MagicNextLogChunkReference: 10300113528406594952 MagicLogChunk: 13195484753818838855 MagicDataChunk: 14092787136941847904 MagicSysLogChunk: 14347161800300050043 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178813459835 (2024-11-21T08:46:53.459835Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:53.628324Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:53.640405Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:53.640710Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:53.640939Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:53.652271Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:53.660241Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:53.660537Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1266907 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:53.668449Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T08:46:53.688452Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1102} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2024-11-21T08:46:53.688687Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1266907 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T08:46:53.702447Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:53.724346Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13287933566166075195 MagicNextLogChunkReference: 10300113528406594952 MagicLogChunk: 13195484753818838855 MagicDataChunk: 14092787136941847904 MagicSysLogChunk: 14347161800300050043 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178813459835 (2024-11-21T08:46:53.459835Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:53.744412Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1300213 NonceLog# 1266907 NonceData# 1576718} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:46:53.752278Z node 1 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:46:53.752319Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 12288} PDiskId# 1 2024-11-21T08:46:53.752344Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:53.755236Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:53.755427Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:53.755495Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:53.760447Z node 1 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2024-11-21T08:46:53.881617Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:53.904059Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9887682316560469421 MagicNextLogChunkReference: 3185331040234202765 MagicLogChunk: 7754068345317695707 MagicDataChunk: 5789262078702165162 MagicSysLogChunk: 11824026462480238494 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178813829297 (2024-11-21T08:46:53.829297Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:53.916376Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:53.928313Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:53.928362Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:53.932399Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:53.932547Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:53.932589Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:53.944273Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1734252 CutLogId# [0:0:0] ownerRound# 4 PDiskId# 1 2024-11-21T08:46:53.952311Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T08:46:53.972439Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1102} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2024-11-21T08:46:53.976389Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [1:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1734252 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T08:46:53.990315Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:54.016352Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9887682316560469421 MagicNextLogChunkReference: 3185331040234202765 MagicLogChunk: 7754068345317695707 MagicDataChunk: 5789262078702165162 MagicSysLogChunk: 11824026462480238494 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178813829297 (2024-11-21T08:46:53.829297Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:54.028321Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1227431 NonceLog# 1734252 NonceData# 1840410} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [1:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:46:54.038637Z node 2 :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl_log.cpp:431} ... ge_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 12288} PDiskId# 1 2024-11-21T08:46:55.007595Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:55.012344Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:55.012618Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:55.012708Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:55.016501Z node 5 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [4:4294967295:0:0:0] OwnerId# 3 OwnerRound# 11 PDiskId# 1 2024-11-21T08:46:55.213691Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:55.240313Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9277276208524320882 MagicNextLogChunkReference: 16289182047447532631 MagicLogChunk: 3226135953157734614 MagicDataChunk: 17624610252586452723 MagicSysLogChunk: 10529758029644976272 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178815059915 (2024-11-21T08:46:55.059915Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:55.252376Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:55.256314Z node 6 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:55.256350Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:55.260462Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:55.264255Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:55.272247Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:55.272386Z node 6 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [5:_:0:0:0] FirstNonceToKeep# 1121885 CutLogId# [0:0:0] ownerRound# 12 PDiskId# 1 2024-11-21T08:46:55.280322Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T08:46:55.296350Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1102} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2024-11-21T08:46:55.296533Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [5:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1121885 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T08:46:55.304060Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:55.333563Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9277276208524320882 MagicNextLogChunkReference: 16289182047447532631 MagicLogChunk: 3226135953157734614 MagicDataChunk: 17624610252586452723 MagicSysLogChunk: 10529758029644976272 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178815059915 (2024-11-21T08:46:55.059915Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:55.340834Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1996123 NonceLog# 1121885 NonceData# 1771054} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [5:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:46:55.353560Z node 6 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:46:55.353617Z node 6 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 12288} PDiskId# 1 2024-11-21T08:46:55.353645Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:55.356419Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:55.360854Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:55.368248Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:55.368454Z node 6 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [5:4294967295:0:0:0] OwnerId# 3 OwnerRound# 13 PDiskId# 1 2024-11-21T08:46:55.373204Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1102} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2024-11-21T08:46:55.380297Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [5:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1121885 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T08:46:55.386339Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:55.412318Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9277276208524320882 MagicNextLogChunkReference: 16289182047447532631 MagicLogChunk: 3226135953157734614 MagicDataChunk: 17624610252586452723 MagicSysLogChunk: 10529758029644976272 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178815059915 (2024-11-21T08:46:55.059915Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:55.426840Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 3562385 NonceLog# 2801604 NonceData# 3311850} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [5:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:46:55.428268Z node 6 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:46:55.428308Z node 6 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 4 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 16384} PDiskId# 1 2024-11-21T08:46:55.428333Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 16384} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:55.432419Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:55.432784Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:55.432840Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:55.440480Z node 6 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [5:4294967295:0:0:0] OwnerId# 3 OwnerRound# 14 PDiskId# 1 |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |85.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |85.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |85.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a >> TYardTest::TestChunkReadRandomOffset >> TYardTest::TestChunkForget [GOOD] >> TYardTest::Test3HugeAsyncLog |85.4%| [TA] {RESULT} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |85.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |85.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] |85.4%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |85.4%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut >> FormatTimes::DurationUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] Test command err: [0:1:0:3:1]# 173 184 157 167 152 185 195 192 144 [0:1:1:1:1]# 189 195 192 171 157 161 167 155 196 [0:1:3:3:1]# 184 157 182 152 185 157 192 144 189 [0:1:3:4:0]# 148 154 155 158 194 160 156 163 140 [0:1:2:3:2]# 152 177 174 176 154 146 161 170 168 [0:1:1:2:1]# 157 167 152 189 195 192 171 157 161 [0:1:1:0:2]# 158 150 131 167 177 161 177 174 173 [0:1:3:0:1]# 161 155 171 196 154 167 184 157 182 [0:1:0:3:2]# 174 173 152 146 184 176 168 157 161 [0:1:2:2:0]# 163 140 161 148 162 159 168 178 190 [0:1:0:2:0]# 161 156 163 159 196 148 190 162 168 [0:1:3:2:1]# 152 185 157 192 144 189 161 155 171 [0:1:2:3:1]# 157 182 173 185 157 167 144 189 195 [0:1:3:1:2]# 157 161 170 131 190 158 161 178 167 [0:1:2:0:1]# 155 171 157 154 167 155 157 182 173 [0:1:3:0:2]# 131 190 158 161 178 167 173 152 177 [0:1:2:0:2]# 190 158 150 178 167 177 152 177 174 [0:1:2:4:1]# 154 167 155 157 182 173 185 157 167 [0:1:2:1:2]# 161 170 168 190 158 150 178 167 177 [0:1:2:4:2]# 178 167 177 152 177 174 176 154 146 [0:1:0:2:1]# 167 152 185 195 192 144 157 161 155 [0:1:0:0:0]# 190 162 168 174 148 154 177 158 194 [0:1:3:2:0]# 156 163 140 196 148 162 162 168 178 [0:1:1:0:1]# 171 157 161 167 155 196 182 173 184 [0:1:0:2:2]# 146 184 176 168 157 161 150 131 190 [0:1:1:0:0]# 178 190 162 155 174 148 160 177 158 [0:1:2:3:0]# 194 160 177 163 140 161 148 162 159 [0:1:2:4:0]# 154 155 174 194 160 177 163 140 161 [0:1:1:3:2]# 177 174 173 154 146 184 170 168 157 [0:1:2:1:1]# 144 189 195 155 171 157 154 167 155 [0:1:1:1:0]# 162 159 196 178 190 162 155 174 148 [0:1:1:3:1]# 182 173 184 157 167 152 189 195 192 [0:1:3:4:1]# 196 154 167 184 157 182 152 185 157 [0:1:1:4:2]# 167 177 161 177 174 173 154 146 184 [0:1:0:1:0]# 159 196 148 190 162 168 174 148 154 [0:1:3:4:2]# 161 178 167 173 152 177 184 176 154 [0:1:0:0:1]# 157 161 155 155 196 154 173 184 157 [0:1:1:4:0]# 155 174 148 160 177 158 140 161 156 [0:1:2:1:0]# 148 162 159 168 178 190 154 155 174 [0:1:2:0:0]# 168 178 190 154 155 174 194 160 177 [0:1:3:3:2]# 173 152 177 184 176 154 157 161 170 [0:1:0:4:0]# 174 148 154 177 158 194 161 156 163 [0:1:1:2:0]# 140 161 156 162 159 196 178 190 162 [0:1:0:1:1]# 195 192 144 157 161 155 155 196 154 [0:1:3:0:0]# 162 168 178 148 154 155 158 194 160 [0:1:3:1:1]# 192 144 189 161 155 171 196 154 167 [0:1:0:4:1]# 155 196 154 173 184 157 167 152 185 [0:1:2:2:1]# 185 157 167 144 189 195 155 171 157 [0:1:3:1:0]# 196 148 162 162 168 178 148 154 155 [0:1:2:2:2]# 176 154 146 161 170 168 190 158 150 [0:1:0:3:0]# 177 158 194 161 156 163 159 196 148 [0:1:3:3:0]# 158 194 160 156 163 140 196 148 162 [0:1:0:1:2]# 168 157 161 150 131 190 177 161 178 [0:1:3:2:2]# 184 176 154 157 161 170 131 190 158 [0:1:1:3:0]# 160 177 158 140 161 156 162 159 196 [0:1:1:2:2]# 154 146 184 170 168 157 158 150 131 [0:1:1:4:1]# 167 155 196 182 173 184 157 167 152 [0:1:1:1:2]# 170 168 157 158 150 131 167 177 161 [0:1:0:0:2]# 150 131 190 177 161 178 174 173 152 [0:1:0:4:2]# 177 161 178 174 173 152 146 184 176 mean# 166.6666667 dev# 15.11254078 >> FormatTimes::DurationMs [GOOD] >> Config::IncludeScope [GOOD] >> TBlobStorageHullSstIt::TestSeekToLast [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut >> Config::ExcludeScope [GOOD] >> TQueueBackpressureTest::IncorrectMessageId [GOOD] |85.4%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |85.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |85.4%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump >> StatsFormat::FullStat [GOOD] |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |85.4%| [LD] {RESULT} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut >> TPDiskTest::TestChunkWriteRelease [GOOD] >> TPDiskTest::TestLogWriteReadWithRestarts >> TYardTest::TestLogWriteRead |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::IncorrectMessageId [GOOD] |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |85.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp >> TYardTest::TestLogWriteRead [GOOD] >> TYardTest::TestLogWriteReadMedium |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |85.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |85.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a >> TYardTest::Test3HugeAsyncLog [GOOD] >> TYardTest::TestChunkFlushReboot >> TYardTest::TestLogWriteReadMedium [GOOD] >> TYardTest::TestLogWriteReadMediumWithHddSectorMap |85.5%| [AR] {RESULT} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed >> TFlatDatabasePgTest::BasicTypes >> TFlatDatabasePgTest::BasicTypes [GOOD] |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed >> TYardTest::TestChunkFlushReboot [GOOD] >> TYardTest::TestAllocateAllChunks |85.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] >> TYardTest::TestAllocateAllChunks [GOOD] >> TYardTest::TestChunkDeletionWhileWriting |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |85.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown >> TPDiskTest::TestLogWriteReadWithRestarts [GOOD] >> TPDiskTest::TestLogSpliceNonceJump >> TYardTest::TestLogWriteReadMediumWithHddSectorMap [GOOD] >> TYardTest::TestLogWriteReadLarge |85.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown >> TYardTest::TestLogWriteReadLarge [GOOD] >> TYardTest::TestLogWriteCutEqual |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |85.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp >> TYardTest::TestChunkDeletionWhileWriting [GOOD] |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TPDiskTest::TestLogSpliceNonceJump [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TFlatDatabasePgTest::BasicTypes [GOOD] |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] Test command err: STEP 1 STEP 2 StringToId# 63 numItems# 110271 |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/test/tool/surg/surg >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] >> TSTreeTest::Basic [GOOD] >> TSVecTest::Basic [GOOD] |85.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |85.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |85.5%| [TA] $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullFresh::SimpleForward [GOOD] >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] |85.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp >> TPDiskTest::TestMultipleLogSpliceNonceJump [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyLogWrite >> TBlobStorageHullFreshSegment::PerfAppendix |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TSVecTest::Basic [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkDeletionWhileWriting [GOOD] Test command err: 2024-11-21T08:46:53.948237Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:53.964867Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9411788975582354143 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 1658880 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T08:46:54.182165Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:54.196575Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 14521625233422125021 MagicNextLogChunkReference: 18179660011392815254 MagicLogChunk: 9801549022194585592 MagicDataChunk: 10453532137968478360 MagicSysLogChunk: 16773818990000833689 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178814090004 (2024-11-21T08:46:54.090004Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:54.204633Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:54.216289Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:54.216515Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:54.220336Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:54.220518Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:54.220588Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:54.272428Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1130061 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:54.359934Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:54.385179Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T08:46:54.396331Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T08:46:54.482380Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:54.482401Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T08:46:54.508307Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 8755563466299103368 MagicLogChunk: 10314688365359521833 MagicDataChunk: 11885646850878623123 MagicSysLogChunk: 15477306511470633765 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178814442353 (2024-11-21T08:46:54.442353Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:54.514624Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:54.520286Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:54.520319Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:54.520510Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:54.520675Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:54.520747Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:54.522287Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1298180 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:54.763046Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:54.780325Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 16214957827794521943 MagicNextLogChunkReference: 8399336477291207715 MagicLogChunk: 9801410881527996804 MagicDataChunk: 2732501481968501905 MagicSysLogChunk: 14161116593287557976 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178814635902 (2024-11-21T08:46:54.635902Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:54.792325Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:54.801272Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:54.801306Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:54.804369Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:54.804668Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:54.804812Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:54.860304Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1933610 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:55.017748Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:55.044303Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 1235842156248960184 MagicNextLogChunkReference: 10851621635335346018 MagicLogChunk: 11374393923491589804 MagicDataChunk: 16358909164415390991 MagicSysLogChunk: 135717642272287550 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178814932368 (2024-11-21T08:46:54.932368Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:55.058503Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:55.059502Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:55.059531Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskI ... -125 firstLsnToKeep# 0},},{chunkIdx# 2 users# 1 endOfSplice# 0 {owner# 3 lsn# 125-125 firstLsnToKeep# 0},},{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 125-125 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:46:57.848735Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:57.864533Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:46:57.868171Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 3} PDiskId# 1 2024-11-21T08:47:00.666082Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:00.692317Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12529897370539510731 MagicNextLogChunkReference: 15037901654302443536 MagicLogChunk: 10741200906326790059 MagicDataChunk: 10823799245425152036 MagicSysLogChunk: 4743622935212731280 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178820550317 (2024-11-21T08:47:00.550317Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:00.697805Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:00.704279Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:00.704314Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:00.708350Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:00.708608Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:00.708713Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:00.760348Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1630030 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:00.829851Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:00.864317Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12529897370539510731 MagicNextLogChunkReference: 15037901654302443536 MagicLogChunk: 10741200906326790059 MagicDataChunk: 10823799245425152036 MagicSysLogChunk: 4743622935212731280 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178820550317 (2024-11-21T08:47:00.550317Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:00.884371Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1673355 NonceLog# 1630030 NonceData# 1464810} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:47:00.900300Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:47:00.900350Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2024-11-21T08:47:00.900379Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:00.904381Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-1 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:00.904692Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-1 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:00.904744Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:00.912645Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:47:00.920288Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:919} PDiskId# 1 chunk owned by the system for ownerId# 3 can't read chunkIdx# 2 PDiskId# 1 2024-11-21T08:47:01.037917Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:01.060472Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 2772322828563105933 MagicNextLogChunkReference: 4190552683022055748 MagicLogChunk: 926407462037655720 MagicDataChunk: 11662569799903647013 MagicSysLogChunk: 7698307269611348806 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178820973252 (2024-11-21T08:47:00.973252Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:01.062029Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:01.068619Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:01.068664Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:01.072383Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:01.072640Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:01.072759Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:01.128365Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1224610 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:01.393901Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:01.420355Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 16777216000 bytes (16 GB) Guid: 12823346202040056000 MagicNextLogChunkReference: 16691504345704278998 MagicLogChunk: 8632976481697236955 MagicDataChunk: 3249947270878365455 MagicSysLogChunk: 5346051482427789975 MagicFormatChunk: 17332287817462050952 ChunkSize: 18874368 bytes (18 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178821167194 (2024-11-21T08:47:01.167194Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:01.436355Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:01.440326Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:01.440357Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:01.444382Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:01.444518Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:01.444566Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:01.484408Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1644116 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 |85.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/util/btree_benchmark/btree_benchmark >> TFreshAppendixTest::IterateForwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyLogWrite [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToLast [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskSysLogRead >> SysViewQueryHistory::TopDurationAdd [GOOD] |85.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [LD] {RESULT} $(B)/ydb/core/util/btree_benchmark/btree_benchmark >> TFreshAppendixTest::IterateForwardIncluding [GOOD] >> TPDiskTest::TestFakeErrorPDiskSysLogRead [GOOD] >> TFreshAppendixTest::IterateForwardExcluding [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkRead >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] >> TBlobStorageHullFresh::AppendixPerf |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] >> TBlobStorageIngress::IngressPartsWeMustHaveLocally [GOOD] >> TBlobStorageIngress::IngressLocalParts [GOOD] >> TBlobStorageIngress::IngressPrintDistribution [GOOD] |85.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopDurationAdd [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateForwardExcluding [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressPrintDistribution [GOOD] |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut >> TIntervalSetTest::IntervalVecTestEmpty |85.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TIntervalSetTest::IntervalVecTestEmpty [GOOD] >> TIntervalSetTest::IntervalVecTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecUnion >> TPDiskRaces::KillOwnerWhileDeletingChunk [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight >> TIntervalSetTest::IntervalVecUnion [GOOD] >> TIntervalSetTest::IntervalVecUnionInplace [GOOD] >> TIntervalSetTest::IntervalVecUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecIntersection [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> TIntervalSetTest::IntervalVecIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecDifference [GOOD] >> TIntervalSetTest::IntervalVecDifferenceInplaceSelf [GOOD] >> TIntrusiveFixedHashSetTest::TestEmptyFind [GOOD] >> TIntrusiveFixedHashSetTest::TestPushFindClear [GOOD] >> TIntrusiveHeapTest::TestEmpty [GOOD] >> TIntrusiveHeapTest::TestAddRemove [GOOD] >> TIntrusiveHeapTest::TestUpdateNoChange [GOOD] >> TIntrusiveHeapTest::TestUpdateIncrease [GOOD] >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> AddressClassifierTest::TestAddressExtraction [GOOD] >> AddressClassifierTest::TestAddressParsing [GOOD] >> AddressClassifierTest::TestClassfierWithAllIpTypes [GOOD] >> AddressClassifierTest::TestLabeledClassifier [GOOD] >> AddressClassifierTest::TestLabeledClassifierFromNetData [GOOD] >> TBitsTest::TestNaiveClz [GOOD] >> TBTreeTest::Basics [GOOD] >> TBTreeTest::ClearAndReuse [GOOD] >> TBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TBTreeTest::RandomInsertInplace >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite |85.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TYardTest::TestSysLogReordering [GOOD] >> TYardTest::TestStartingPoints |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite [GOOD] >> TPDiskTest::PDiskRestart |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] >> TYardTest::TestStartingPoints [GOOD] >> TYardTest::TestWhiteboard >> TFragmentedBufferTest::TestWriteRead [GOOD] >> TFragmentedBufferTest::TestOverwriteRead [GOOD] >> TFragmentedBufferTest::TestIsNotMonolith [GOOD] >> TFragmentedBufferTest::TestSetMonolith [GOOD] >> TFragmentedBufferTest::TestReplaceWithSetMonolith [GOOD] >> THazardTest::CachedPointers [GOOD] >> THazardTest::AutoProtectedPointers [GOOD] >> THyperLogCounterTest::TestGetSet [GOOD] >> THyperLogCounterTest::TestIncrement [GOOD] >> THyperLogCounterTest::TestAddRandom >> TPDiskTest::PDiskRestart [GOOD] >> TPDiskTest::PDiskRestartManyLogWrites |85.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TPDiskTest::PDiskRestartManyLogWrites [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve >> DSProxyStrategyTest::Restore_block42 |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut >> TCircleBufTest::SimpleTest [GOOD] >> THyperLogCounterTest::TestAddRandom [GOOD] >> TCircleBufTest::PtrTest [GOOD] >> TPDiskErrorStateTests::BasicErrorReason [GOOD] >> TLsnAllocTrackerTests::Test1 [GOOD] >> THyperLogCounterTest::TestAddFixed [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse >> TPDiskErrorStateTests::Basic [GOOD] >> THyperLogCounterTest::TestHybridIncrement [GOOD] >> TPDiskErrorStateTests::Basic2 [GOOD] >> THyperLogCounterTest::TestHybridAdd [GOOD] >> TIntervalSetTest::IntervalMapTestEmpty [GOOD] >> TIntervalSetTest::IntervalMapTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAdd >> TIntervalSetTest::IntervalMapTestAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapIntersection >> TBlobStorageHullFreshSegment::PerfAppendix [GOOD] >> TBlobStorageHullFreshSegment::PerfSkipList |85.6%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TIntervalSetTest::IntervalMapIntersection [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalMapDifference |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut >> SysViewQueryHistory::StableMerge [GOOD] >> TIntervalSetTest::IntervalMapDifference [GOOD] >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe |85.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::RandomInsertInplace |85.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |85.6%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [AR] {RESULT} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a >> TCacheCacheTest::Random >> TYardTest::TestWhiteboard [GOOD] >> TYardTest::TestMultiYardStartingPoints >> TCacheCacheTest::Random [GOOD] >> TCacheTest::TestUnboundedMapCache [GOOD] >> TCacheTest::EnsureNoLeakAfterUnboundedCacheOnMapDtor [GOOD] >> TCacheTest::TestSizeBasedOverflowCallback [GOOD] >> TCacheTest::TestLruCache [GOOD] >> TCacheTest::EnsureNoLeakAfterLruCacheDtor [GOOD] >> TCacheTest::Test2QCache [GOOD] >> TCacheTest::EnsureNoLeakAfterQ2CacheDtor [GOOD] >> TCacheTest::TestUpdateItemSize [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight1 [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight2 [GOOD] >> TCircularOperationQueueTest::CheckOnDoneNotExisting [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotRunning [GOOD] >> TCircularOperationQueueTest::CheckRemoveRunning [GOOD] >> TCircularOperationQueueTest::CheckRemoveWaiting [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotExisting [GOOD] >> TCircularOperationQueueTest::CheckTimeout [GOOD] >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved [GOOD] >> TCircularOperationQueueTest::RemoveExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::BasicRPSCheck [GOOD] >> TCircularOperationQueueTest::BasicRPSCheckWithRound [GOOD] >> TCircularOperationQueueTest::CheckWakeupAfterStop [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted2 [GOOD] >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TPDiskErrorStateTests::Basic2 [GOOD] |85.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 >> TYardTest::TestMultiYardStartingPoints [GOOD] >> TYardTest::TestSysLogOverwrite >> TBlobStorageHullSstIt::TestSeekToFirst [GOOD] >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge [GOOD] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |85.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] Test command err: 0.27294 |85.6%| [TA] $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |85.6%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> TTrackable::TVector [GOOD] >> TTrackable::TList [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] >> TTrackable::TString [GOOD] >> TVDiskConfigTest::RtmrProblem1 [GOOD] >> TVDiskConfigTest::RtmrProblem2 [GOOD] >> TVDiskConfigTest::ThreeLevels [GOOD] |85.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |85.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_counters.cpp |85.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] |85.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |85.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut >> TBlobStorageSyncNeighborsTest::CheckRevLookup [GOOD] >> TBlobStorageSyncNeighborsTest::CheckIsMyDomain [GOOD] >> TBlobStorageSyncNeighborsTest::CheckFailDomainsIterators [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] |85.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut >> TYardTest::TestSysLogOverwrite [GOOD] >> TYardTest::TestUpsAndDownsAtTheBoundary |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] Test command err: testing erasure none main# 0 main# 1 Checked 2 cases, took 0 us testing erasure block-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 291185 us testing erasure mirror-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 512 cases, took 41 us testing erasure block-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 429 us testing erasure mirror-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 64 cases, took 5 us testing erasure block-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 5177 us testing erasure stripe-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 447 us |85.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TString [GOOD] |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp >> TConcurrentRWHashTest::TInsertTest [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTest [GOOD] >> TCircularQueueTest::ShouldRemoveCurrentLast [GOOD] >> TCircularQueueTest::ShouldRemoveCurrent [GOOD] >> TCircularQueueTest::ShouldNotPushTwice [GOOD] >> TCowBTreeTest::Empty [GOOD] >> TCircularQueueTest::ShouldPush [GOOD] >> TCowBTreeTest::ClearAndReuse [GOOD] >> TConcurrentRWHashTest::TEraseTest [GOOD] >> TCowBTreeTest::MultipleSnapshots >> TCircularQueueTest::ShouldNotRemoveMissing [GOOD] >> TConcurrentRWHashTest::TEmptyGetTest [GOOD] >> TCircularQueueTest::ShouldRemove [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads >> TCowBTreeTest::Basics [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTestFunc [GOOD] >> TConcurrentRWHashTest::TRemoveTest [GOOD] |85.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |85.7%| [AR] {RESULT} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::ThreeLevels [GOOD] >> TBlobStorageSyncNeighborsTest::IterateOverAllDisks [GOOD] >> TBlobStorageSyncNeighborsTest::SerDes [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskIterators [GOOD] >> TCircleBufStringStreamTest::TestAligned [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] >> DSProxyStrategyTest::Restore_mirror3dc |85.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk >> TVDiskConfigTest::JustConfig [GOOD] >> TVDiskConfigTest::Basic [GOOD] >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |85.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |85.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |85.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |85.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |85.7%| [LD] {RESULT} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut >> TPDiskTest::TestLogSpliceChunkReserve [GOOD] >> TPDiskTest::SpaceColor [GOOD] >> TPDiskTest::RecreateWithInvalidPDiskKey |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufStringStreamTest::TestAligned [GOOD] >> TIncrHugeBasicTest::Recovery [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] >> TLogPriorityMuteTests::MuteUntilTest [GOOD] >> TLogPriorityMuteTests::AtomicMuteUntilTest [GOOD] >> TLogPriorityMuteTests::UnmuteTest [GOOD] >> TLogPriorityMuteTests::AtomicUnmuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteDurationTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteDurationTest [GOOD] >> TOneOneQueueTests::TestSimpleEnqueueDequeue [GOOD] >> TOneOneQueueTests::CleanInDestructor [GOOD] >> TOneOneQueueTests::ReadIterator [GOOD] >> TPageMapTest::TestResize [GOOD] >> TPageMapTest::TestRandom >> TPDiskTest::RecreateWithInvalidPDiskKey [GOOD] >> TPDiskTest::SmallDisk10Gb |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] >> TResizableCircleBufTest::Test1 [GOOD] >> TResizableCircleBufTest::Test2 [GOOD] >> TTrackable::TBuffer [GOOD] >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] |85.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Recovery [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] >> TPDiskTest::SmallDisk10Gb [GOOD] >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart |85.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TBuffer [GOOD] >> CodecsTest::Basic [GOOD] >> CodecsTest::NaturalNumbersAndZero [GOOD] >> CodecsTest::LargeAndRepeated [GOOD] >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] >> NaiveFragmentWriterTest::Basic [GOOD] >> TopTest::Test2 [GOOD] |85.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp >> TBTreeTest::RandomInsertInplace [GOOD] >> TBTreeTest::RandomInsertThreadSafe >> TYardTest::TestUpsAndDownsAtTheBoundary [GOOD] >> TYardTest::TestUnflushedChunk |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] >> VarLengthIntCodec::BasicTest64 [GOOD] >> VarLengthIntCodec::Random32 >> TLsnMngrTests::AllocLsnForLocalUse2Threads [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads >> TBlobStorageHullHugeChain::AllocFreeAllocTest [GOOD] >> VarLengthIntCodec::Random32 [GOOD] >> VarLengthIntCodec::Random64 |85.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] |85.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> VarLengthIntCodec::Random64 [GOOD] |85.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> NaiveFragmentWriterTest::Basic [GOOD] |85.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test2 [GOOD] |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] >> TBlobStorageHullHugeHeap::BorderValues [GOOD] >> TBlobStorageSyncLogDsk::AddByOne [GOOD] >> TBlobStorageSyncLogDsk::AddFive [GOOD] >> TBlobStorageSyncLogDsk::ComplicatedSerializeWithOverlapping [GOOD] >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] >> TYardTest::TestUnflushedChunk [GOOD] >> TYardTest::TestRedZoneSurvivability |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::Random64 [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> TChainLayoutBuilder::TestProdConf [GOOD] >> THugeHeapCtxTests::Basic [GOOD] >> TSyncNeighborsTests::SerDes3 [GOOD] >> TBlobStorageHullHugeHeap::RecoveryMode [GOOD] >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] >> TYardTest::TestChunkReadRandomOffset [GOOD] >> TYardTest::TestChunkWriteRead |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::BorderValues [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] >> TPageMapTest::TestRandom [GOOD] >> TPageMapTest::TestIntrusive [GOOD] >> TPageMapTest::TestSimplePointer [GOOD] >> TPageMapTest::TestSharedPointer [GOOD] >> TPageMapTest::TestSimplePointerFull >> TPageMapTest::TestSimplePointerFull [GOOD] >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TBsLocalRecovery::StartStopNotEmptyDB >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart [GOOD] >> TPDiskTest::TestChunkWriteCrossOwner |85.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] |85.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> THugeHeapCtxTests::Basic [GOOD] |85.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/blobsan/blobsan >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest >> TYardTest::TestRedZoneSurvivability [GOOD] >> TYardTest::TestSlay >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] |85.7%| [LD] {RESULT} $(B)/ydb/tools/blobsan/blobsan |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |85.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut >> TBlobStorageHullHugeChain::HeapAllocSmall [GOOD] >> TBlobStorageHullHugeDefs::FreeRes1 [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] |85.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |85.7%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] >> TYardTest::TestSlay [GOOD] >> TYardTest::TestSlayRace |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TYardTest::TestChunkWriteRead [GOOD] >> TYardTest::TestChunkWriteReadWithHddSectorMap >> TYardTest::TestSlayRace [GOOD] >> TYardTest::TestSlayRecreate >> HullReplWriteSst::Basic |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeDefs::FreeRes1 [GOOD] >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] >> TChainLayoutBuilder::TestMilestoneId [GOOD] >> TYardTest::TestSlayRecreate [GOOD] >> TYardTest::TestSlayLogWriteRaceActor |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] Test command err: 2024-11-21T08:46:57.071198Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:57.088344Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 582314293000817068 MagicNextLogChunkReference: 10365302281034071403 MagicLogChunk: 10379001150785864597 MagicDataChunk: 10291992429935246325 MagicSysLogChunk: 14355356038934059956 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178816904536 (2024-11-21T08:46:56.904536Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:57.093187Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:57.100308Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:57.100524Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:57.100676Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.108286Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.108520Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:57.108590Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1246748 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:57.112309Z node 1 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_RESERVED_DELETE_ON_QUARANTINE ownerId# 3 PDiskId# 1 2024-11-21T08:46:57.230049Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:57.242167Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 3406601495313023097 MagicNextLogChunkReference: 7777238532113786853 MagicLogChunk: 1652592894346684455 MagicDataChunk: 213330211563244673 MagicSysLogChunk: 2824362893666647572 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178817165240 (2024-11-21T08:46:57.165240Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:57.247509Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:57.252279Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:57.252309Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:57.252485Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.252655Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.252732Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:57.260426Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1871142 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:57.284305Z node 2 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_COMMITTED_DELETE_IN_PROGRESS ownerId# 3 PDiskId# 1 2024-11-21T08:46:57.422850Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:57.443647Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 15645907147690270628 MagicNextLogChunkReference: 6629626738152381928 MagicLogChunk: 14451726975412545245 MagicDataChunk: 12474368960446987334 MagicSysLogChunk: 14729718311454163704 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178817371549 (2024-11-21T08:46:57.371549Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:57.456260Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:57.469245Z node 3 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:57.469264Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:57.469388Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.469527Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.469589Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:57.472086Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1893470 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:57.491331Z node 3 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_COMMITTED_DELETE_IN_PROGRESS ownerId# 3 PDiskId# 1 2024-11-21T08:46:57.609734Z node 4 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:57.632327Z node 4 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 10186872363129661912 MagicNextLogChunkReference: 17688066205143472655 MagicLogChunk: 8974676061052355217 MagicDataChunk: 13342585033062354597 MagicSysLogChunk: 3447084825498744994 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178817557362 (2024-11-21T08:46:57.557362Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:57.640285Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:57.648269Z node 4 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:57.648297Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:57.649729Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.656283Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.657404Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:57.660375Z node 4 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1150188 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:57.666472Z node 4 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_COMMITTED_DELETE_IN_PROGRESS ownerId# 3 PDiskId# 1 2024-11-21T08:46:57.727135Z node 5 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:57.756337Z node 5 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 13421 ... firstLsnToKeep# 0},},{chunkIdx# 94 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 0},},{chunkIdx# 95 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 0},},{chunkIdx# 96 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 0},},{chunkIdx# 97 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 0},},{chunkIdx# 98 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 0},},{chunkIdx# 99 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 100 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 101 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 102 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 103 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 104 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 105 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 106 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 107 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 108 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 109 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 110 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 111 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 112 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 113 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 114 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 115 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 116 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 117 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 118 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 119 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 120 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 121 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 122 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 123 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 124 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 125 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 126 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 127 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 128 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 129 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 130 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 131 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 0},},{chunkIdx# 132 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 133 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 134 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 135 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 136 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 137 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 138 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 139 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 140 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 141 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 142 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 143 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 144 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 145 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 146 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 147 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 148 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 149 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 150 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 151 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 152 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 153 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 154 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 155 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 156 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 157 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 158 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 159 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 160 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 161 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 162 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 163 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:12.458360Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:12.458587Z node 30 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [f:4294967295:0:0:0] OwnerId# 3 OwnerRound# 101 PDiskId# 1 2024-11-21T08:47:12.459914Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# false StatusFlags# IsValid Results.size# 2} PDiskId# 1 2024-11-21T08:47:12.514474Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 1 OffsetInChunk# 12288} nextPosition# { ChunkIdx# 33 OffsetInChunk# 1069056} isEndOfLog# false StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T08:47:12.551315Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 33 OffsetInChunk# 1069056} nextPosition# { ChunkIdx# 66 OffsetInChunk# 40960} isEndOfLog# false StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T08:47:12.718361Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 66 OffsetInChunk# 40960} nextPosition# { ChunkIdx# 98 OffsetInChunk# 1097728} isEndOfLog# false StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T08:47:12.792268Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 98 OffsetInChunk# 1097728} nextPosition# { ChunkIdx# 131 OffsetInChunk# 69632} isEndOfLog# false StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T08:47:12.862511Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 131 OffsetInChunk# 69632} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T08:47:13.441790Z node 31 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:13.480353Z node 31 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 18209192371248112569 MagicNextLogChunkReference: 17767223297904556899 MagicLogChunk: 5191832278781437529 MagicDataChunk: 6995060944392562793 MagicSysLogChunk: 11480748822518502307 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178833304788 (2024-11-21T08:47:13.304788Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:13.500310Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:13.516276Z node 31 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:13.516317Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:13.520386Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:13.528227Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:13.536226Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:13.544452Z node 31 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [10:_:0:0:0] FirstNonceToKeep# 1373652 CutLogId# [0:0:0] ownerRound# 102 PDiskId# 1 2024-11-21T08:47:13.564278Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T08:47:13.591326Z node 31 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 4 vDiskId# [11:_:0:0:0] FirstNonceToKeep# 1373653 CutLogId# [0:0:0] ownerRound# 103 PDiskId# 1 2024-11-21T08:47:13.600295Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 4 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T08:47:13.656417Z node 31 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 3 chunk is owner by another owner. chunk's owner# 4 request's owner# 3 PDiskId# 1 2024-11-21T08:47:13.656570Z node 31 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 chunk is owner by another owner. chunk's owner# 3 request's owner# 4 PDiskId# 1 >> TBsLocalRecovery::WriteRestartReadHuge >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] >> TBsVDiskGC::TGCManyVPutsDelTabletTest >> TCowBTreeTest::RandomInsertInplace [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe >> TYardTest::TestSlayLogWriteRaceActor [GOOD] >> TYardTest::TestStartingPointReboots |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestMilestoneId [GOOD] |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh >> TLsnMngrTests::AllocLsnForLocalUse10Threads [GOOD] >> TOutOfSpaceStateTests::TestLocal [GOOD] >> TOutOfSpaceStateTests::TestGlobal [GOOD] >> TYardTest::TestChunkWriteReadWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadMultiple |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh >> TBsVDiskExtreme::SimpleGetFromEmptyDB |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> TBsDbStat::ChaoticParallelWrite_DbStat >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TOutOfSpaceStateTests::TestGlobal [GOOD] |85.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |85.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node >> TBsVDiskExtremeHuge::Simple3Put3GetFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction >> TBsVDiskBadBlobId::PutBlobWithBadId >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh >> TBsVDiskExtreme::SimpleGetFromEmptyDB [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh >> TBsVDiskManyPutGet::ManyPutGet >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/blobsan/blobsan |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut >> TBsVDiskRepl3::SyncLogTest >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction >> TBsVDiskGC::GCPutKeepIntoEmptyDB >> TBsVDiskRepl1::ReplProxyKeepBits |85.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |85.8%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |85.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp >> TBsVDiskExtreme::Simple3Put3GetFresh |85.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp |85.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |85.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh >> TBsVDiskGC::TGCManyVPutsDelTabletTest [GOOD] >> TBsVDiskManyPutGet::ManyMultiSinglePutGet >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh >> TBsVDiskExtremeHuge::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction |85.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskBadBlobId::PutBlobWithBadId [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath |85.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |85.8%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |85.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |85.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh >> TBsVDiskGC::GCPutKeepIntoEmptyDB [GOOD] >> TBsVDiskGC::GCPutBarrierVDisk0NoSync |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a >> TopTest::Test1 [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh >> TBsVDiskExtreme::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtreme::Simple3Put3GetCompaction >> TBTreeTest::RandomInsertThreadSafe [GOOD] >> TBTreeTest::DuplicateKeysInplace >> TBsVDiskRepl3::SyncLogTest [GOOD] >> THugeMigration::ExtendMap_HugeBlobs >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath [GOOD] >> TBsVDiskDefrag::DefragEmptyDB >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction >> TBTreeTest::DuplicateKeysInplace [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test1 [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction >> TSyncNeighborsTests::SerDes2 [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction >> TBsVDiskManyPutGet::ManyPutGet [GOOD] >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeStandard [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TBTreeTest::ShouldCallDtorsInplace [GOOD] >> TBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TBTreeTest::Concurrent >> TCowBTreeTest::MultipleSnapshots [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction >> TBsVDiskDefrag::DefragEmptyDB [GOOD] >> TBsVDiskDefrag::Defrag50PercentGarbage >> TBsVDiskGC::GCPutBarrierVDisk0NoSync [GOOD] >> TBsVDiskGC::GCPutBarrierSync >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction >> TYardTest::TestStartingPointReboots [GOOD] >> TYardTest::TestRestartAtNonceJump >> TBsVDiskRepl1::ReplProxyKeepBits [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk |85.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/mrrun/mrrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh >> TBsVDiskExtreme::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh |85.8%| [LD] {RESULT} $(B)/ydb/library/yql/tools/mrrun/mrrun |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/mrrun/mrrun |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh >> TBsVDiskManyPutGet::ManyMultiSinglePutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet |85.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction >> TCowBTreeTest::RandomInsertThreadSafe [GOOD] >> TCowBTreeTest::SnapshotCascade [GOOD] >> TCowBTreeTest::SnapshotRollback >> TYardTest::TestRestartAtNonceJump [GOOD] >> TYardTest::TestRestartAtChunkEnd |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] >> TYardTest::TestLogWriteCutEqual [GOOD] >> TYardTest::TestLogWriteCutEqualRandomWait >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh |85.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |85.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction [GOOD] |85.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly >> TYardTest::TestRestartAtChunkEnd [GOOD] >> TYardTestRestore::TestRestore15 >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> TYardTestRestore::TestRestore15 [GOOD] >> THugeMigration::ExtendMap_HugeBlobs [GOOD] >> THugeMigration::ExtendMap_SmallBlobsBecameHuge >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRepl1::ReplProxyData >> TBsVDiskManyPutGet::ManyMultiPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] |85.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/dqrun |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |85.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun >> TBTreeTest::Concurrent [GOOD] >> TBTreeTest::IteratorDestructor [GOOD] >> TCacheCacheTest::MoveToWarm [GOOD] >> TCacheCacheTest::EvictNext [GOOD] >> CompressionTest::lz4_generator_basic [GOOD] >> CompressionTest::lz4_generator_deflates [GOOD] >> StLog::Basic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTestRestore::TestRestore15 [GOOD] Test command err: 2024-11-21T08:46:56.571940Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:56.592347Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 11041641937259605241 MagicNextLogChunkReference: 7595899957437682564 MagicLogChunk: 6171482875507818016 MagicDataChunk: 10715264475099739103 MagicSysLogChunk: 8681504232298436353 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178816422406 (2024-11-21T08:46:56.422406Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:56.600308Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:56.608308Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:56.608567Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:56.612376Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:56.612585Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:56.612670Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:56.729577Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:56.756318Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T08:46:56.765464Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T08:46:56.930398Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:56.930414Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T08:46:56.953618Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 9812304739447712211 MagicLogChunk: 17971040055998749788 MagicDataChunk: 15812355199724098689 MagicSysLogChunk: 12168577164641472878 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178816840497 (2024-11-21T08:46:56.840497Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:56.968301Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:56.984324Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:56.984363Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:56.988361Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:56.988530Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:56.988606Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:56.999280Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 6 vDiskId# [3:_:0:0:0] FirstNonceToKeep# 1828155 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:56.999454Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 4 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1828155 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:57.008280Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1828155 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:57.008441Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 5 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1828155 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:57.008572Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 7 vDiskId# [4:_:0:0:0] FirstNonceToKeep# 1828155 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:59.202562Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:59.236439Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 9812304739447712211 MagicLogChunk: 17971040055998749788 MagicDataChunk: 15812355199724098689 MagicSysLogChunk: 12168577164641472878 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178816840497 (2024-11-21T08:46:56.840497Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:59.260325Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1235733 NonceLog# 1834174 NonceData# 1207283} LogHeadChunkIdx# 106 LogHeadChunkPreviousNonce# 1830703 Owner[3]# [0:4294967295:0:0:0] Owner[4]# [1:4294967295:0:0:0] Owner[5]# [2:4294967295:0:0:0] Owner[6]# [3:4294967295:0:0:0] Owner[7]# [4:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:46:59.303165Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 112 SectorIdx# 411 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 112 OffsetInChunk# 1683456} PDiskId# 1 2024-11-21T08:46:59.303205Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 112 OffsetInChunk# 1683456} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:59.303345Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 106 users# 5 endOfSplice# 0 {owner# 3 lsn# 850-1018 firstLsnToKeep# 1001}, {owner# 4 lsn# 850-1019 firstLsnToKeep# 1001}, {owner# 5 lsn# 850-1018 firstLsnToKeep# 1001}, {owner# 6 lsn# 850-1019 firstLsnToKeep# 1001}, {owner# 7 lsn# 849-1018 firstLsnToKeep# 1001},},{chunkIdx# 107 users# 5 endOfSplice# 0 {owner# 3 lsn# 1019-1188 firstLsnToKeep# 1001}, {owner# 4 lsn# 1020-1188 firstLsnToKeep# 1001}, {owner# 5 lsn# 1019-1188 firstLsnToKeep# 1001}, {owner# 6 lsn# 1020-1188 firstLsnToKeep# 1001}, {owner# 7 lsn# 1019-1188 firstLsnToKeep# 1001},},{chunkIdx# 108 users# 5 endOfSplice# 0 {owner# 3 lsn# 1189-1358 firstLsnToKeep# 1001}, {owner# 4 lsn# 1189-1358 firstLsnToKeep# 1001}, {owner# 5 lsn# 1189-1358 firstLsnToKeep# 1001}, {owner# 6 lsn# 1189-1358 firstLsnToKeep# 1001}, {owner# 7 lsn# 1189-1357 firstLsnToKeep# 1001},},{chunkIdx# 109 users# 5 endOfSplice# 0 {owner# 3 lsn# 1359-1527 firstLsnToKeep# 1001}, {owner# 4 lsn# 1359-1528 firstLsnToKeep# 1001}, {owner# 5 lsn# 1359-1527 firstLsnToKeep# 1001}, {owner# 6 lsn# 1359-1528 firstLsnToKeep# 1001}, {owner# 7 lsn# 1358-1527 firstLsnToKeep# 1001},},{chunkIdx# 110 users# 5 endOfSplice# 0 {owner# 3 lsn# 1528-1697 firstLsnToKeep# 1001}, {owner# 4 lsn# 1529-1697 firstLsnToKeep# 1001}, {owner# 5 lsn# 1528-1697 firstLsnToKeep# 1001}, {owner# 6 lsn# 1529-1697 firstLsnToKeep# 1001}, {owner# 7 lsn# 1528-1697 firstLsnToKeep# 1001},},{chunkIdx# 111 users# 5 endOfSplice# 0 {owner# 3 lsn# 1698-1867 firstLsnToKeep# 1001}, {owner# 4 lsn# 1698-1867 firstLsnToKeep# 1001}, {owner# 5 lsn# 1698-1867 firstLsnToKeep# 1001}, {owner# 6 lsn# 1698-1867 firstLsnToKeep# 1001}, {owner# 7 lsn# 1698-1866 firstLsnToKeep# 1001},},{chunkIdx# 112 users# 5 endOfSplice# 0 {owner# 3 lsn# 1868-2011 firstLsnToKeep# 1001}, {owner# 4 lsn# 1868-2011 firstLsnToKeep# 1001}, {owner# 5 lsn# 1868-2011 firstLsnToKeep# 1001}, {owner# 6 lsn# 1868-2011 firstLsnToKeep# 1001}, {owner# 7 lsn# 1867-2011 firstLsnToKeep# 1001},},] PDiskId# 1 2024-11-21T08:46:59.303491Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 106 users# 5 endOfSplice# 0 {owner# 3 lsn# 850-1018 firstLsnToKeep# 1001}, {owner# 4 lsn# 850-1019 firstLsnToKeep# 1001}, {owner# 5 lsn# 850-1018 firstLsnToKeep# 1001}, {owner# 6 lsn# 850-1019 firstLsnToKeep# 1001}, {owner# 7 lsn# 849-1018 firstLsnToKeep# 1001},},{chunkIdx# 107 users# 5 endOfSplice# 0 {owner# 3 lsn# 1019-1188 firstLsnToKeep# 1001}, {owner# 4 lsn# 1020-1188 firstLsnToKeep# 1001}, {owner# 5 lsn# 1019-1188 firstLsnToKeep# 1001}, {owner# 6 lsn# 1020-1188 firstLsnToKeep# 1001}, {owner# 7 lsn# 1019-1188 firstLsnToKeep# 1001},},{chunkIdx# 108 users# 5 endOfSplice# 0 {owner# 3 lsn# 1189-1358 firstLsnToKeep# 1001}, {owner# 4 lsn# 1189-1358 firstLsnToKeep# 1001}, {owner# 5 lsn# 1189-1358 firstLsnToKeep# 1001}, {owner# 6 lsn# 1189-1358 firstLsnToKeep# 1001}, {owner# 7 lsn# 1189-1357 firstLsnToKeep# 1001},},{chunkIdx# 109 users# 5 endOfSplice# 0 {owner# 3 lsn# 1359-1527 firstLsnToKeep# 1001}, {owner# 4 lsn# 1359-1528 firstLsnToKeep# 1001}, {owner# 5 lsn# 1359-1527 firstLsnToKeep# 1001}, {owner# 6 lsn# 1359-1528 firstLsnToKeep# 1001}, {owner# 7 lsn# 1358-1527 firstLsnToKeep# 1001},},{chunkIdx# 110 users# 5 endOfSplice# 0 {owner# 3 lsn# 1528-1697 firstLsnToKeep# 1001}, {owner# 4 lsn# 1529-1 ... d# 1 2024-11-21T08:47:21.540305Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:21.540336Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:21.544345Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:21.544514Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:21.544559Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:21.612446Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1701175 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:21.616410Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T08:47:21.880009Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:21.896361Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 4275926580399689655 MagicNextLogChunkReference: 1550455400852879036 MagicLogChunk: 13683793087049832104 MagicDataChunk: 13374156175593782201 MagicSysLogChunk: 1202719663416213753 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178841451502 (2024-11-21T08:47:21.451502Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:21.908546Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1908438 NonceLog# 1701175 NonceData# 1938225} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:47:21.916283Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:47:21.917713Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 2 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:21.917752Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 2 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:21.917867Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-508 firstLsnToKeep# 0},},{chunkIdx# 2 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:21.922771Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-508 firstLsnToKeep# 0},},{chunkIdx# 2 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:21.922859Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:21.982910Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:47:21.984905Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 508} PDiskId# 1 2024-11-21T08:47:22.199653Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:22.199895Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 4275926580399689655 MagicNextLogChunkReference: 1550455400852879036 MagicLogChunk: 13683793087049832104 MagicDataChunk: 13374156175593782201 MagicSysLogChunk: 1202719663416213753 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178841451502 (2024-11-21T08:47:21.451502Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:22.201938Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 3407707 NonceLog# 3047321 NonceData# 3110481} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:47:22.208290Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:47:22.220280Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 1701683 " with nonceJumpLogPageHeader2->PreviousNonce# "# 1701683 PDiskId# 1 2024-11-21T08:47:22.221913Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 3 SectorIdx# 186 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 3 OffsetInChunk# 761856} PDiskId# 1 2024-11-21T08:47:22.221946Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 3 OffsetInChunk# 761856} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:22.222087Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-508 firstLsnToKeep# 0},},{chunkIdx# 2 users# 1 endOfSplice# 0 {owner# 3 lsn# 509-1016 firstLsnToKeep# 0},},{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 1017-1202 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:22.223504Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-508 firstLsnToKeep# 0},},{chunkIdx# 2 users# 1 endOfSplice# 0 {owner# 3 lsn# 509-1016 firstLsnToKeep# 0},},{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 1017-1202 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:22.223579Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:22.304525Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:47:22.314936Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1202} PDiskId# 1 2024-11-21T08:47:22.385768Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:22.403386Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 2030490058090855679 MagicNextLogChunkReference: 13783497537350697405 MagicLogChunk: 14726323152271132470 MagicDataChunk: 12279420885103094235 MagicSysLogChunk: 12051843189808178287 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178842362978 (2024-11-21T08:47:22.362978Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:22.412310Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:22.420279Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:22.420398Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:22.424352Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:22.432276Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:22.432504Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:22.488356Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1828161 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction >> TYardTest::TestChunkWriteReadMultiple [GOOD] >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> StLog::Basic [GOOD] Test command err: Producer 0 worked for 0.7403312661 seconds Producer 1 worked for 0.3266726137 seconds Consumer 0 worked for 2.186444126 seconds Consumer 1 worked for 3.507626254 seconds Consumer 2 worked for 2.353554518 seconds Consumer 3 worked for 3.45574289 seconds >> TBsVDiskRepl1::ReplProxyData [GOOD] >> TBsVDiskRepl1::ReplEraseDiskRestore |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp >> THugeMigration::ExtendMap_SmallBlobsBecameHuge [GOOD] >> THugeMigration::RollbackMap_HugeBlobs |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] >> NaiveFragmentWriterTest::Long >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction >> TBlobStorageSyncLogDsk::SeveralChunks [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_OnePageIndexed [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_SeveralPagesIndexed [GOOD] >> TBlobStorageSyncLogDsk::TrimLog [GOOD] >> NaiveFragmentWriterTest::Long [GOOD] >> ReorderCodecTest::Basic [GOOD] >> RunLengthCodec::BasicTest32 [GOOD] >> RunLengthCodec::BasicTest64 [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] >> TBsLocalRecovery::WriteRestartReadHuge [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock >> TCowBTreeTest::SnapshotRollback [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/read_balancer__balancing.h_serialized.cpp >> TBlobStorageHullFresh::AppendixPerf [GOOD] >> TBlobStorageHullFresh::AppendixPerf_Tune |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::TrimLog [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> RunLengthCodec::BasicTest64 [GOOD] |85.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/read_balancer__balancing.h_serialized.cpp |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] |85.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] |85.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> THugeMigration::RollbackMap_HugeBlobs [GOOD] >> TMonitoring::ReregisterTest |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] >> TMonitoring::ReregisterTest [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TMonitoring::ReregisterTest [GOOD] Test command err: RUN TEST SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp >> TCowBTreeTest::SnapshotRollbackEarlyErase [GOOD] >> TCowBTreeTest::ShouldCallDtorsInplace [GOOD] >> TCowBTreeTest::ShouldCallDtorsThreadSafe >> TCowBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TEventPriorityQueueTest::TestPriority [GOOD] >> TFastTlsTest::IterationAfterThreadDeath >> TFastTlsTest::IterationAfterThreadDeath [GOOD] >> TFastTlsTest::ManyThreadLocals [GOOD] >> TFastTlsTest::ManyConcurrentKeys >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk [GOOD] >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart >> TFastTlsTest::ManyConcurrentKeys [GOOD] >> TFifoQueueTest::ShouldPushPop [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead2 [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead3 [GOOD] >> TFragmentedBufferTest::Test3WriteRead [GOOD] >> TFragmentedBufferTest::Test5WriteRead [GOOD] >> TFragmentedBufferTest::TestGetMonolith [GOOD] >> TFragmentedBufferTest::CopyFrom [GOOD] >> TFragmentedBufferTest::ReadWriteRandom |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 >> TCowBTreeTest::MultipleSnapshotsWithClear [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc |85.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |85.9%| [AR] {RESULT} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadWhole >> test.py::test[column_order-insert_with_reorder_cols--Debug] |85.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly >> test.py::test[join-inner_grouped_by_expr--Plan] >> TYardTest::TestChunkWriteReadWhole [GOOD] >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap >> test.py::test[table_range-range_over_regexp--Plan] |85.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |85.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a >> test.py::test[produce-reduce_by_struct-default.txt-Results] >> TBsVDiskGC::GCPutBarrierSync [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync >> test.py::test[join-anyjoin_common_nodup-off-Debug] [SKIPPED] >> test.py::test[join-anyjoin_common_nodup-off-Plan] |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Plan] >> TFragmentedBufferTest::ReadWriteRandom [GOOD] >> test.py::test[join-anyjoin_common_nodup-off-Plan] [SKIPPED] >> test.py::test[join-anyjoin_common_nodup-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q75-default.txt-ForceBlocks] >> test.py::test[join-bush_in--Debug] >> test.py::test[join-join_and_distinct_key-off-Debug] |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap [GOOD] >> TYardTest::TestChunkWrite20Read02 >> TYardTest::TestChunkWrite20Read02 [GOOD] >> TYardTest::TestChunkUnlock |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TFragmentedBufferTest::ReadWriteRandom [GOOD] >> test.py::test[blocks-string_as_agg_key--Results] >> TYardTest::TestChunkUnlock [GOOD] >> TYardTest::TestChunkUnlockHarakiri |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp >> TYardTest::TestChunkUnlockHarakiri [GOOD] >> TYardTest::TestChunkUnlockRestart >> test.py::test[column_group-hint_anon-single-Plan] [SKIPPED] >> test.py::test[column_group-hint_anon-single-Results] [SKIPPED] >> test.py::test[column_order-select_plain_nosimple-default.txt-Analyze] >> TYardTest::TestChunkUnlockRestart [GOOD] >> TYardTest::TestChunkReserve >> TYardTest::TestChunkReserve [GOOD] >> TYardTest::TestChunkRecommit >> test.py::test[table_range-range_over_regexp--Plan] [GOOD] >> test.py::test[table_range-range_over_regexp--Results] >> test.py::test[select-select_all_from_concat_anon-default.txt-Debug] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Analyze] >> TYardTest::TestChunkRecommit [GOOD] >> TYardTest::TestChunkRestartRecommit >> test.py::test[join-inner_grouped_by_expr--Plan] [GOOD] >> test.py::test[join-inner_grouped_by_expr--Results] >> test.py::test[column_order-insert--Plan] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc [GOOD] >> TCowBTreeTest::DuplicateKeysInplace >> test.py::test[action-action_eval_cluster_table--Debug] >> test.py::test[expr-minmax_for_complex_types-default.txt-ForceBlocks] >> test.py::test[pg-pg_corr_sort_limit2-default.txt-Debug] >> TYardTest::TestChunkRestartRecommit [GOOD] >> TYardTest::TestDamagedFirstRecordToKeep >> DSProxyStrategyTest::Restore_block42 [GOOD] |85.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |85.9%| [AR] {RESULT} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a >> test.py::test[column_order-insert_with_reorder_cols--Debug] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Plan] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Results] >> TCowBTreeTest::DuplicateKeysInplace [GOOD] >> TCowBTreeTest::DuplicateKeysThreadSafe >> test.py::test[action-action_eval_cluster_table--Analyze] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] >> TBsVDiskRepl1::ReplEraseDiskRestore [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_block42 [GOOD] |85.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |85.9%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |85.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp >> test.py::test[action-action_opt_args-default.txt-Analyze] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp >> test.py::test[table_range-range_over_regexp--Results] [GOOD] >> test.py::test[join-join_and_distinct_key-off-Debug] [GOOD] >> test.py::test[join-join_and_distinct_key-off-ForceBlocks] >> test.py::test[seq_mode-simple1-default.txt-Debug] >> test.py::test[table_range-range_slash--Debug] >> test.py::test[join-join_and_distinct_key-off-ForceBlocks] [SKIPPED] >> test.py::test[join-join_and_distinct_key-off-Plan] [GOOD] >> test.py::test[join-join_and_distinct_key-off-Results] >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart [GOOD] >> TBsVDiskRepl3::AnubisTest [GOOD] >> TBsVDiskRepl3::ReplPerf >> TCowBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TCowBTreeTest::IteratorDestructor [GOOD] >> TCowBTreeTest::Concurrent >> test.py::test[join-join_and_distinct_key-off-Results] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-Analyze] >> test.py::test[column_order-select_plain_nosimple-default.txt-Analyze] [GOOD] >> test.py::test[insert-replace_inferred--Debug] >> TBsVDiskDefrag::Defrag50PercentGarbage [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh >> test.py::test[column_order-select_plain_nosimple-default.txt-Debug] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Analyze] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Debug] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] >> TYardTest::TestDamagedFirstRecordToKeep [GOOD] >> TYardTest::TestDamageAtTheBoundary ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl1::ReplEraseDiskRestore [GOOD] Test command err: 2024-11-21T08:47:34.556353Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:47:34.696782Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6179377349905641391] 2024-11-21T08:47:35.800372Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> test.py::test[column_order-union_all_positional-default.txt-Debug] >> test.py::test[select-select_all_from_concat_anon-default.txt-Debug] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-ForceBlocks] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction >> test.py::test[blocks-add_int64--Results] >> test.py::test[action-action_eval_cluster_table--Analyze] [GOOD] |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a >> test.py::test[action-action_eval_cluster_table--Debug] >> test.py::test[produce-reduce_by_struct-default.txt-Results] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Results] [GOOD] >> test.py::test[column_order-join_nosimple--Debug] >> test.py::test[pg-pg_corr_sort_limit2-default.txt-Debug] [GOOD] >> test.py::test[action-action_opt_args-default.txt-Analyze] [GOOD] >> test.py::test[action-action_opt_args-default.txt-Debug] >> test.py::test[produce-reduce_multi_in-sorted-Debug] >> test.py::test[column_order-join_nosimple--Debug] [SKIPPED] >> test.py::test[column_order-join_nosimple--Plan] [SKIPPED] >> test.py::test[action-action_eval_cluster_table--Debug] [GOOD] >> test.py::test[action-action_eval_cluster_table--Plan] >> test.py::test[pg-pg_corr_sort_limit2-default.txt-Plan] >> test.py::test[seq_mode-simple1-default.txt-Debug] [GOOD] >> test.py::test[column_order-join_nosimple--Results] >> test.py::test[column_order-join_nosimple--Results] [SKIPPED] >> test.py::test[column_order-union_all-default.txt-Debug] >> test.py::test[pg-pg_corr_sort_limit2-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_corr_sort_limit2-default.txt-Results] >> test.py::test[action-action_eval_cluster_table--Plan] [GOOD] >> test.py::test[seq_mode-simple1-default.txt-ForceBlocks] >> test.py::test[expr-list_takeskipwhile-default.txt-ForceBlocks] >> test.py::test[action-action_eval_cluster_table--Results] >> test.py::test[join-join_semi_correlation_in_order_by-off-Analyze] [GOOD] >> test.py::test[join-pullup_left-off-Analyze] >> test.py::test[join-bush_in--Debug] [GOOD] >> test.py::test[join-bush_in--Plan] [GOOD] >> test.py::test[join-bush_in--Results] >> test.py::test[join-join_semi_correlation_in_order_by-off-Debug] >> TCowBTreeTest::Concurrent [GOOD] >> TCowBTreeTest::Alignment [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] >> test.py::test[pg-values-default.txt-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Debug] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-ForceBlocks] >> test.py::test[table_range-range_slash--Debug] [GOOD] >> test.py::test[table_range-range_slash--Plan] [GOOD] >> test.py::test[join-inner_grouped_by_expr--Results] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--Analyze] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-ForceBlocks] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Plan] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Results] >> test.py::test[table_range-range_slash--Results] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Results] [GOOD] >> test.py::test[join-starjoin_unused_keys--Analyze] [SKIPPED] >> test.py::test[column_order-union_all_positional-default.txt-Debug] [GOOD] >> test.py::test[column_order-union_all_positional-default.txt-ForceBlocks] >> test.py::test[join-starjoin_unused_keys--Debug] [SKIPPED] >> test.py::test[join-starjoin_unused_keys--ForceBlocks] >> test.py::test[join-starjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-starjoin_unused_keys--Plan] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Plan] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] Test command err: 2024-11-21T08:47:17.332772Z :BS_VDISK_PUT ERROR: VDISK[0:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:1:10:0:0:10:1] Marker# BSVS43 2024-11-21T08:47:18.446434Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T08:47:18.446439Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:2:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T08:47:18.446466Z :BS_SKELETON ERROR: VDISK[0:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2024-11-21T08:47:18.446481Z :BS_SKELETON ERROR: VDISK[0:_:0:2:1]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2024-11-21T08:47:18.446787Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T08:47:18.447098Z :BS_SKELETON ERROR: VDISK[0:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 >> test.py::test[join-starjoin_unused_keys--Plan] [SKIPPED] >> test.py::test[join-starjoin_unused_keys--Results] [SKIPPED] >> test.py::test[json-json_query/example--Analyze] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp >> test.py::test[insert-replace_inferred--Debug] [GOOD] >> test.py::test[insert-replace_inferred--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCowBTreeTest::Alignment [GOOD] Test command err: Producer 0 worked for 0.4083266208 seconds Producer 1 worked for 0.3911394801 seconds Consumer 0 worked for 1.097540042 seconds on a snapshot of size 80000 Consumer 1 worked for 1.523798489 seconds on a snapshot of size 160000 Consumer 2 worked for 1.959602269 seconds on a snapshot of size 240000 Consumer 3 worked for 2.361824449 seconds on a snapshot of size 320000 Consumers had 11999997 successful seeks |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp >> test.py::test[select-select_all_from_concat_anon-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-Plan] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] >> test.py::test[action-action_opt_args-default.txt-Debug] [GOOD] >> test.py::test[action-action_opt_args-default.txt-ForceBlocks] >> test.py::test[seq_mode-simple1-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-simple1-default.txt-Plan] [GOOD] >> test.py::test[seq_mode-simple1-default.txt-Results] >> TBsLocalRecovery::WriteRestartReadHugeDecreased [GOOD] >> TBsOther1::PoisonPill >> test.py::test[pg-tpcds-q75-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Plan] [GOOD] >> test.py::test[expr-minmax_for_complex_types-default.txt-ForceBlocks] [GOOD] >> TYardTest::TestDamageAtTheBoundary [GOOD] >> TYardTest::TestCutMultipleLogChunks >> test.py::test[pg-tpcds-q75-default.txt-Results] >> test.py::test[expr-minmax_for_complex_types-default.txt-Plan] >> test.py::test[column_order-union_all-default.txt-Debug] [GOOD] >> test.py::test[column_order-union_all-default.txt-Plan] [GOOD] >> test.py::test[column_order-union_all-default.txt-Results] >> test.py::test[expr-minmax_for_complex_types-default.txt-Plan] [GOOD] >> test.py::test[expr-minmax_for_complex_types-default.txt-Results] >> test.py::test[action-action_eval_cluster_table--Results] [GOOD] >> test.py::test[action-action_eval_cluster_table--Debug] [GOOD] >> test.py::test[action-action_eval_cluster_table--ForceBlocks] >> test.py::test[column_order-select_plain_nosimple-default.txt-Debug] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-ForceBlocks] >> test.py::test[action-empty_do-default.txt-Debug] >> TYardTest::TestEnormousDisk [GOOD] >> test.py::test[order_by-order_by_list_of_strings--ForceBlocks] >> TYardTest::TestCutMultipleLogChunks [GOOD] >> TYardTest::TestChunkPriorityBlock >> test.py::test[join-pullup_left-off-Analyze] [GOOD] >> test.py::test[column_order-union_all_positional-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-pullup_left-off-Debug] >> test.py::test[join-join_semi_correlation_in_order_by-off-Debug] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-ForceBlocks] >> test.py::test[column_order-union_all_positional-default.txt-Plan] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-ForceBlocks] [SKIPPED] >> test.py::test[join-join_semi_correlation_in_order_by-off-Plan] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-Results] >> test.py::test[table_range-range_slash--Results] [GOOD] >> test.py::test[table_range-range_tables_with_view--Debug] >> test.py::test[blocks-combine_hashed_minmax_double--Results] >> TYardTest::TestChunkPriorityBlock [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-Results] [GOOD] >> test.py::test[blocks-add_int64--Results] [GOOD] >> test.py::test[blocks-sort_two_desc--Analyze] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] [GOOD] >> test.py::test[column_order-union_all_positional-default.txt-Results] >> test.py::test[column_order-insert--Plan] [GOOD] >> test.py::test[json-json_query/example--Analyze] [GOOD] >> test.py::test[json-json_query/example--Debug] >> test.py::test[join-left_cast_to_string--Analyze] >> test.py::test[seq_mode-simple1-default.txt-Results] [GOOD] >> test.py::test[table_range-range_over_filter_udf--Analyze] [SKIPPED] >> test.py::test[select-select_all_ordered-default.txt-Analyze] >> test.py::test[join-join_semi_correlation_in_order_by--Analyze] [GOOD] >> test.py::test[column_order-insert--Results] >> test.py::test[produce-reduce_multi_in-sorted-Debug] [GOOD] >> test.py::test[pg-pg_corr_sort_limit2-default.txt-Results] [GOOD] >> test.py::test[pg-pg_in_dict_key_with_stable_pickle-default.txt-Debug] >> test.py::test[produce-reduce_multi_in-sorted-Plan] >> test.py::test[table_range-range_over_filter_udf--Debug] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf--ForceBlocks] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf--Plan] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf--Results] [SKIPPED] >> test.py::test[join-join_semi_correlation_in_order_by--Debug] >> test.py::test[produce-reduce_multi_in-sorted-Plan] [GOOD] >> test.py::test[action-action_opt_args-default.txt-ForceBlocks] [GOOD] >> test.py::test[table_range-table_funcs_expr--Analyze] >> test.py::test[produce-reduce_multi_in-sorted-Results] >> test.py::test[action-action_opt_args-default.txt-Plan] [GOOD] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestEnormousDisk [GOOD] Test command err: 2024-11-21T08:46:45.382076Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.396674Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T08:46:45.412411Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T08:46:45.489716Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.489741Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T08:46:45.508317Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 16032992717246526160 MagicLogChunk: 768534714654957176 MagicDataChunk: 3267483579505710672 MagicSysLogChunk: 4903739858731779101 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805444401 (2024-11-21T08:46:45.444401Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.524342Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:45.536310Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:45.536549Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:45.544354Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.549923Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.553392Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:45.564375Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1578253 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:45.588056Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.591233Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 16032992717246526160 MagicLogChunk: 768534714654957176 MagicDataChunk: 3267483579505710672 MagicSysLogChunk: 4903739858731779101 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805444401 (2024-11-21T08:46:45.444401Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.592660Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1263759 NonceLog# 1578253 NonceData# 1609773} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:46:45.593434Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:46:45.593463Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2024-11-21T08:46:45.593481Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:45.593621Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.594134Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.594190Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:45.709730Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.720373Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 16032992717246526160 MagicLogChunk: 768534714654957176 MagicDataChunk: 3267483579505710672 MagicSysLogChunk: 4903739858731779101 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805444401 (2024-11-21T08:46:45.444401Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.725205Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 2667800 NonceLog# 2739829 NonceData# 2828578} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:46:45.730094Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:46:45.730122Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2024-11-21T08:46:45.730143Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:45.730286Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.740260Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.747773Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:45.800524Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:46:45.898191Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.908732Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T08:46:45.912366Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T08:46:45.963266Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.963288Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T08:46:45.972315Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 10614105984208031209 MagicLogChunk: 16650403490285977767 MagicDataChunk: 16536249883611758994 MagicSysLogChunk: 12154780573998651916 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805932573 (2024-11-21T08:46:45.932573Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.980301Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:45.988358Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:45.988391Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:45.988588Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.988810Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.988910Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:46.012938Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0 ... k_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:47:34.015858Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 97 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 397312} PDiskId# 1 2024-11-21T08:47:34.031145Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 397312} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:34.038669Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-1 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:34.100286Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-1 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:34.115117Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:34.117234Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:47:36.253645Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:36.272358Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 3745620375307378922 MagicNextLogChunkReference: 18389047709664035584 MagicLogChunk: 4740994638030383286 MagicDataChunk: 7235925899870410214 MagicSysLogChunk: 18186732138329840165 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178813522528 (2024-11-21T08:46:53.522528Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:36.577646Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 6658628 NonceLog# 4748188 NonceData# 4247415118} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:47:36.589300Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:47:37.408172Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 126331 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 517451776} PDiskId# 1 2024-11-21T08:47:37.411935Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 517451776} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:37.417817Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-16 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:37.425664Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-16 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:37.433621Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:37.435101Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:47:38.389775Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:38.408354Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 3745620375307378922 MagicNextLogChunkReference: 18389047709664035584 MagicLogChunk: 4740994638030383286 MagicDataChunk: 7235925899870410214 MagicSysLogChunk: 18186732138329840165 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178813522528 (2024-11-21T08:46:53.522528Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:38.555596Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 7778172 NonceLog# 6654652 NonceData# 4249068299} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:47:38.567749Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:47:39.973925Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 32000 SectorIdx# 119960 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 32000 OffsetInChunk# 491356160} PDiskId# 1 2024-11-21T08:47:39.979393Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 32000 OffsetInChunk# 491356160} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:39.986104Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-17 firstLsnToKeep# 0},},{chunkIdx# 32000 users# 1 endOfSplice# 0 {owner# 3 lsn# 17-17 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:39.996304Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-17 firstLsnToKeep# 0},},{chunkIdx# 32000 users# 1 endOfSplice# 0 {owner# 3 lsn# 17-17 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:40.010845Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:40.013654Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:47:40.389709Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:40.417912Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 3745620375307378922 MagicNextLogChunkReference: 18389047709664035584 MagicLogChunk: 4740994638030383286 MagicDataChunk: 7235925899870410214 MagicSysLogChunk: 18186732138329840165 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178813522528 (2024-11-21T08:46:53.522528Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:40.712911Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 9120577 NonceLog# 7946326 NonceData# 4250420892} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:47:40.732192Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:47:41.768323Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 32001 SectorIdx# 18915 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 32001 OffsetInChunk# 77475840} PDiskId# 1 2024-11-21T08:47:41.772464Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 32001 OffsetInChunk# 77475840} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:41.779163Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-17 firstLsnToKeep# 0},},{chunkIdx# 32000 users# 1 endOfSplice# 0 {owner# 3 lsn# 17-18 firstLsnToKeep# 0},},{chunkIdx# 32001 users# 1 endOfSplice# 0 {owner# 3 lsn# 18-18 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:41.788109Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-17 firstLsnToKeep# 0},},{chunkIdx# 32000 users# 1 endOfSplice# 0 {owner# 3 lsn# 17-18 firstLsnToKeep# 0},},{chunkIdx# 32001 users# 1 endOfSplice# 0 {owner# 3 lsn# 18-18 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:41.794624Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:41.795829Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:47:41.799460Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:419} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 132608 Offset# 4294967295 Size# 128 2024-11-21T08:47:41.799469Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:919} PDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 32002 PDiskId# 1 >> test.py::test[column_order-union_all-default.txt-Results] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--Debug] >> test.py::test[action-action_opt_args-default.txt-Results] >> test.py::test[column_order-union_all_positional_columns_count_fail--Debug] [SKIPPED] >> test.py::test[column_order-union_all_positional_columns_count_fail--Plan] [SKIPPED] >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] >> test.py::test[join-bush_in--Results] [GOOD] >> test.py::test[join-cbo_4tables--Debug] [SKIPPED] >> test.py::test[join-cbo_4tables--Plan] >> test.py::test[join-cbo_4tables--Plan] [SKIPPED] >> test.py::test[expr-list_takeskipwhile-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-cbo_4tables--Results] [SKIPPED] >> test.py::test[join-flatten_columns2--Debug] >> test.py::test[insert-replace_inferred--ForceBlocks] [GOOD] >> test.py::test[insert-replace_inferred--Plan] >> test.py::test[expr-list_takeskipwhile-default.txt-Plan] >> test.py::test[insert-replace_inferred--Plan] [GOOD] >> test.py::test[action-empty_do-default.txt-Debug] [GOOD] >> test.py::test[insert-replace_inferred--Results] >> test.py::test[action-empty_do-default.txt-Plan] [GOOD] >> test.py::test[action-empty_do-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkPriorityBlock [GOOD] Test command err: 2024-11-21T08:46:57.271034Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:57.292378Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 15161170730562333241 MagicNextLogChunkReference: 10809413521481211714 MagicLogChunk: 13522112012735180945 MagicDataChunk: 18129338930126160176 MagicSysLogChunk: 8521313532167868581 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178817205387 (2024-11-21T08:46:57.205387Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:57.302558Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:57.308306Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:57.308550Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:57.311526Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.312332Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.312482Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:57.369139Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2012409 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:57.591540Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:57.624409Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 6856779756431084919 MagicNextLogChunkReference: 6825604500892924392 MagicLogChunk: 6492945245472358276 MagicDataChunk: 3089020599173701562 MagicSysLogChunk: 11613702380323306618 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178817463935 (2024-11-21T08:46:57.463935Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:57.635261Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:57.636288Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:57.636318Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:57.640323Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.640510Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:57.640583Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:57.684501Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2053301 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:06.965712Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:07.000314Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 2576712991164597304 MagicNextLogChunkReference: 7875318861620221323 MagicLogChunk: 4796973585388274378 MagicDataChunk: 13913840936571974072 MagicSysLogChunk: 10720459484626423746 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178826882565 (2024-11-21T08:47:06.882565Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:07.016308Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:07.028287Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:07.028341Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:07.032319Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:07.032500Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:07.032576Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:07.056343Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1287535 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:08.785937Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:08.800307Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 6080427775941414342 MagicNextLogChunkReference: 6591062205945353386 MagicLogChunk: 14655495673625466044 MagicDataChunk: 13966874352673922012 MagicSysLogChunk: 7938456181399519086 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178828667657 (2024-11-21T08:47:08.667657Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:08.808303Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:08.812463Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:08.812501Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:08.816327Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:08.816459Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:08.816504Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:08.880468Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2014517 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:13.113547Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:13.136371Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 5242880000 bytes (5 GB) Guid: 1611773240166507018 MagicNextLogChunkReference: 12760391492064986098 MagicLogChunk: 13776793959733938935 MagicDataChunk: 243328588439254427 MagicSysLogChunk: 4116123972127181204 MagicFormatChunk: 17332287817462050952 ChunkSize: 6291456 bytes (6 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178832917460 (2024-11-21T08:47:12.917460Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:13.152330Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:13.160398Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 20 ... 720Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:42.060339Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 14082757412820819401 MagicNextLogChunkReference: 12312375286345788850 MagicLogChunk: 2102854133242285198 MagicDataChunk: 1926404602782545116 MagicSysLogChunk: 839281302864692064 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178861289793 (2024-11-21T08:47:41.289793Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:42.076503Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 5356288 NonceLog# 5880967 NonceData# 4710429} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:47:42.088266Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:47:42.156563Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1776 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 7274496} PDiskId# 1 2024-11-21T08:47:42.156604Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 7274496} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:42.160355Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 13-15 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:42.160624Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 13-15 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:47:42.160703Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:42.168425Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T08:47:42.345661Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:42.380317Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T08:47:42.404317Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T08:47:42.634087Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:42.634108Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T08:47:42.660338Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 14160427957127748114 MagicLogChunk: 12003116140375474560 MagicDataChunk: 8352685867958133854 MagicSysLogChunk: 3299504146949532134 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178862454467 (2024-11-21T08:47:42.454467Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:42.668596Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:42.672278Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:42.672304Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:42.672435Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:42.680237Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:42.684237Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:42.684405Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1318968 CutLogId# [1:7439651565634329590:2050] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:42.748276Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},{chunkIdx# 2 users# 0 endOfSplice# 0},{chunkIdx# 3 users# 0 endOfSplice# 0},{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-3 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T08:47:42.748298Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-3 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T08:47:42.813713Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:42.824755Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 14160427957127748114 MagicLogChunk: 12003116140375474560 MagicDataChunk: 8352685867958133854 MagicSysLogChunk: 3299504146949532134 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178862454467 (2024-11-21T08:47:42.454467Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:42.844322Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1432165 NonceLog# 1320769 NonceData# 1405128} LogHeadChunkIdx# 4 LogHeadChunkPreviousNonce# 1320496 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:47:42.864306Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 4 SectorIdx# 273 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 4 OffsetInChunk# 1118208} PDiskId# 1 2024-11-21T08:47:42.864342Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4 OffsetInChunk# 1118208} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:42.865021Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-4 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T08:47:42.865214Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-4 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T08:47:42.865290Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:43.129708Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:43.152357Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 4567704769467392802 MagicNextLogChunkReference: 8595333301555061349 MagicLogChunk: 3903295766497381100 MagicDataChunk: 10009787064304810449 MagicSysLogChunk: 14359619691211356480 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178863010853 (2024-11-21T08:47:43.010853Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:43.172302Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:43.178241Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:43.178269Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:43.180274Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:43.186251Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:43.192247Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:43.225848Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1449434 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 >> test.py::test[expr-list_takeskipwhile-default.txt-Plan] [GOOD] >> test.py::test[expr-list_takeskipwhile-default.txt-Results] >> test.py::test[pg-values-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-values-default.txt-Plan] [GOOD] >> test.py::test[blocks-string_as_agg_key--Results] [GOOD] >> test.py::test[pg-values-default.txt-Results] >> test.py::test[blocks-top_sort_one_asc--Debug] >> test.py::test[select-select_all_ordered-default.txt-Analyze] [GOOD] >> test.py::test[column_order-union_all_positional-default.txt-Results] [GOOD] >> test.py::test[compute_range-yql-12941-default.txt-Analyze] [SKIPPED] >> test.py::test[compute_range-yql-12941-default.txt-Debug] [SKIPPED] >> test.py::test[select-select_all_ordered-default.txt-Debug] >> test.py::test[produce-process_with_python_stream--Debug] >> test.py::test[compute_range-yql-12941-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[table_range-range_tables_with_view--Debug] [GOOD] >> test.py::test[produce-process_with_python_stream--Debug] [SKIPPED] >> test.py::test[produce-process_with_python_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_with_python_stream--Plan] >> test.py::test[compute_range-yql-12941-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-yql-12941-default.txt-Results] [SKIPPED] >> test.py::test[count-boolean_count--Analyze] >> test.py::test[table_range-range_tables_with_view--Plan] [GOOD] >> test.py::test[table_range-range_tables_with_view--Results] >> test.py::test[expr-minmax_for_complex_types-default.txt-Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Analyze] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Debug] [SKIPPED] >> test.py::test[produce-process_with_python_stream--Plan] [SKIPPED] >> test.py::test[produce-process_with_python_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-Analyze] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-Debug] >> test.py::test[action-empty_do-default.txt-Results] [GOOD] >> test.py::test[action-eval_folder--Debug] >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Plan] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock [GOOD] >> TPDiskRaces::Decommit >> test.py::test[produce-reduce_all_opt-default.txt-Debug] [SKIPPED] >> test.py::test[pg-pg_in_dict_key_with_stable_pickle-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_in_dict_key_with_stable_pickle-default.txt-Plan] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Plan] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[pg-pg_in_dict_key_with_stable_pickle-default.txt-Plan] [GOOD] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_star--Debug] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp >> test.py::test[blocks-sort_two_desc--Analyze] [GOOD] >> test.py::test[blocks-sort_two_desc--Debug] >> test.py::test[expr-list_takeskipwhile-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct--Plan] >> test.py::test[produce-reduce_all_opt-default.txt-Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_star--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_star--Plan] >> test.py::test[produce-reduce_all_opt-default.txt-Results] [SKIPPED] >> test.py::test[join-left_cast_to_string--Analyze] [GOOD] >> test.py::test[join-left_cast_to_string--Debug] >> test.py::test[join-pullup_left-off-Debug] [GOOD] >> test.py::test[action-action_opt_args-default.txt-Results] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Analyze] >> test.py::test[produce-reduce_with_presort_diff_order--Analyze] >> test.py::test[pg-pg_in_dict_key_with_stable_pickle-default.txt-Results] >> test.py::test[expr-literal_true-default.txt-Analyze] >> test.py::test[pg-tpcds-q75-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Analyze] >> test.py::test[aggregate-group_by_hop_only_distinct--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Analyze] >> test.py::test[join-pullup_left-off-ForceBlocks] >> test.py::test[aggregate-group_by_hop_star--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_star--Results] [SKIPPED] >> test.py::test[join-join_semi_correlation_in_order_by--Debug] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] [GOOD] |86.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |86.0%| [TA] $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[join-pullup_left-off-ForceBlocks] [SKIPPED] >> test.py::test[join-pullup_left-off-Plan] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf--Debug] >> test.py::test[pg-values-default.txt-Results] [GOOD] >> test.py::test[compute_range-yql-12941-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-yql-12941-default.txt-Plan] >> test.py::test[pg_duplicated-ambigous_order_by_with_duplicates--Analyze] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_with_duplicates--Debug] >> test.py::test[join-join_semi_correlation_in_order_by--ForceBlocks] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp >> test.py::test[compute_range-yql-12941-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-yql-12941-default.txt-Results] [SKIPPED] >> test.py::test[compute_range-yql-13489-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-yql-13489-default.txt-Plan] >> test.py::test[action-action_eval_cluster_table--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in-sorted-Results] [GOOD] |86.0%| [TA] {RESULT} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a >> test.py::test[pg_duplicated-ambigous_order_by_with_duplicates--Debug] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_with_duplicates--ForceBlocks] [SKIPPED] >> test.py::test[compute_range-yql-13489-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-yql-13489-default.txt-Results] [SKIPPED] >> test.py::test[csee-closure_in_l1_and_l2-default.txt-Debug] |86.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a >> test.py::test[join-pullup_left-off-Results] >> test.py::test[insert-replace_inferred--Results] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--Analyze] [SKIPPED] >> test.py::test[insert_monotonic-non_existing_fail--Debug] >> test.py::test[action-action_eval_cluster_table--Plan] >> test.py::test[produce-reduce_typeinfo--Debug] [SKIPPED] >> test.py::test[produce-reduce_typeinfo--Plan] >> test.py::test[pg_duplicated-ambigous_order_by_with_duplicates--Plan] >> test.py::test[join-pullup_left-off-Results] [GOOD] |86.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp >> test.py::test[column_order-select_plain_nosimple-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--Debug] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_with_duplicates--Plan] [SKIPPED] |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a >> test.py::test[produce-reduce_typeinfo--Plan] [SKIPPED] >> test.py::test[produce-reduce_typeinfo--Results] [SKIPPED] >> test.py::test[action-action_eval_cluster_table--Plan] [GOOD] >> test.py::test[action-action_eval_cluster_table--Results] >> test.py::test[join-pullup_renaming--Analyze] >> test.py::test[column_order-select_plain_nosimple-default.txt-Plan] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] >> test.py::test[pg_duplicated-ambigous_order_by_with_duplicates--Results] >> test.py::test[insert_monotonic-non_existing_fail--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order--Debug] >> test.py::test[table_range-range_tables_with_view--Results] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--Debug] >> test.py::test[insert_monotonic-non_existing_fail--Plan] >> test.py::test[select-select_all_ordered-default.txt-Debug] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--Plan] [SKIPPED] >> test.py::test[action-eval_folder--Debug] [GOOD] >> test.py::test[action-eval_folder--Plan] >> test.py::test[select-select_all_ordered-default.txt-ForceBlocks] >> test.py::test[join-bush_dis_in_in-off-ForceBlocks] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] [GOOD] >> test.py::test[expr-opt_list_map-default.txt-Analyze] >> test.py::test[insert_monotonic-non_existing_fail--Results] >> test.py::test[action-eval_folder--Plan] [GOOD] >> test.py::test[action-eval_folder--Results] >> test.py::test[expr-literal_true-default.txt-Analyze] [GOOD] >> test.py::test[pg-pg_in_dict_key_with_stable_pickle-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_left_one-default.txt-Results] >> test.py::test[expr-literal_true-default.txt-Debug] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] [GOOD] >> test.py::test[aggregate-percentiles_ungrouped--Analyze] >> test.py::test[expr-lds_empty_compare-default.txt-Analyze] >> test.py::test[pg-pg_like_cast-default.txt-Debug] >> test.py::test[order_by-order_by_list_of_strings--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Plan] [GOOD] >> test.py::test[json-json_query/example--Debug] [GOOD] >> test.py::test[table_range-range_tables_with_view--Plan] >> test.py::test[json-json_query/example--ForceBlocks] >> test.py::test[table_range-table_funcs_expr--Analyze] [GOOD] >> test.py::test[table_range-table_funcs_expr--Debug] >> test.py::test[order_by-order_by_list_of_strings--Results] >> test.py::test[column_order-insert--Results] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Analyze] >> test.py::test[blocks-sort_two_desc--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--Results] [GOOD] >> test.py::test[blocks-sort_two_desc--ForceBlocks] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp >> test.py::test[count-boolean_count--Analyze] [GOOD] >> test.py::test[blocks-combine_hashed_set--Analyze] >> TBsLocalRecovery::StartStopNotEmptyDB [GOOD] >> TBsLocalRecovery::WriteRestartRead >> test.py::test[blocks-top_sort_one_asc--Debug] [GOOD] >> test.py::test[csee-closure_in_l1_and_l2-default.txt-Debug] [GOOD] >> test.py::test[csee-closure_in_l1_and_l2-default.txt-Plan] >> test.py::test[join-left_cast_to_string--Debug] [GOOD] >> test.py::test[join-left_cast_to_string--ForceBlocks] >> test.py::test[count-boolean_count--Debug] >> test.py::test[csee-closure_in_l1_and_l2-default.txt-Plan] [GOOD] >> test.py::test[csee-closure_in_l1_and_l2-default.txt-Results] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Analyze] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Debug] >> test.py::test[action-eval_input_output_table_subquery--Analyze] >> test.py::test[pg-tpcds-q93-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Debug] >> test.py::test[blocks-top_sort_one_asc--Plan] [GOOD] >> test.py::test[blocks-top_sort_one_asc--Results] |86.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Debug] >> test.py::test[join-pullup_renaming--Analyze] [GOOD] >> test.py::test[action-eval_folder--Results] [GOOD] >> test.py::test[action-eval_like--Debug] >> test.py::test[join-join_semi_correlation_in_order_by--ForceBlocks] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--Plan] >> test.py::test[join-pullup_renaming--Debug] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/cms/cms_ut.cpp >> test.py::test[join-join_semi_correlation_in_order_by--Plan] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--Results] >> test.py::test[action-action_eval_cluster_table--Results] [GOOD] >> test.py::test[action-eval_filter--Analyze] >> test.py::test[join-flatten_columns2--Debug] [GOOD] >> test.py::test[join-flatten_columns2--Plan] >> test.py::test[table_range-tablepath_with_non_existing--Debug] [GOOD] |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/cms/cms_ut.cpp >> test.py::test[expr-opt_list_map-default.txt-Analyze] [GOOD] >> test.py::test[expr-opt_list_map-default.txt-Debug] >> test.py::test[insert_monotonic-non_existing_fail--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk--Analyze] >> test.py::test[table_range-tablepath_with_non_existing--Plan] >> test.py::test[join-flatten_columns2--Plan] [GOOD] >> test.py::test[join-flatten_columns2--Results] >> test.py::test[produce-reduce_with_presort_diff_order--Analyze] [GOOD] >> test.py::test[expr-literal_true-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_like_cast-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_like_cast-default.txt-Plan] >> test.py::test[pg_duplicated-ambigous_order_by_with_duplicates--Results] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--Plan] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Debug] >> test.py::test[produce-reduce_with_presort_diff_order--Debug] >> test.py::test[pg-pg_like_cast-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_like_cast-default.txt-Results] >> test.py::test[expr-literal_true-default.txt-ForceBlocks] >> test.py::test[produce-process_multi_in_trivial_lambda--Analyze] >> TBsOther1::PoisonPill [GOOD] >> TBsOther1::ChaoticParallelWrite >> test.py::test[table_range-tablepath_with_non_existing--Results] |86.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |86.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a >> test.py::test[aggregate-percentiles_ungrouped--Analyze] [GOOD] >> TBsVDiskRepl3::ReplPerf [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Results] [GOOD] >> test.py::test[order_by-order_by_missing_project_column_as_table--Analyze] [SKIPPED] >> test.py::test[order_by-order_by_missing_project_column_as_table--Debug] [SKIPPED] >> test.py::test[order_by-order_by_missing_project_column_as_table--ForceBlocks] |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a >> test.py::test[expr-lds_empty_compare-default.txt-Analyze] [GOOD] >> test.py::test[expr-lds_empty_compare-default.txt-Debug] >> test.py::test[order_by-order_by_missing_project_column_as_table--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-percentiles_ungrouped--Debug] >> test.py::test[order_by-order_by_missing_project_column_as_table--Plan] [SKIPPED] >> test.py::test[order_by-order_by_missing_project_column_as_table--Results] [SKIPPED] >> test.py::test[csee-closure_in_l1_and_l2-default.txt-Results] [GOOD] >> test.py::test[csee-yql-7237--Debug] >> test.py::test[pg-insert--Analyze] |86.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |86.0%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a >> test.py::test[blocks-combine_hashed_set--Analyze] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Plan] [GOOD] >> test.py::test[blocks-combine_hashed_set--Debug] >> test.py::test[select-select_all_ordered-default.txt-Results] >> test.py::test[join-bush_dis_in_in-off-ForceBlocks] [SKIPPED] >> test.py::test[blocks-sort_two_desc--ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl3::ReplPerf [GOOD] Test command err: 2024-11-21T08:47:29.380695Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:47:29.416436Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13322594143277018442] 2024-11-21T08:47:29.548463Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:47:35.821138Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:47:36.028411Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13480407812211700438] 2024-11-21T08:47:37.048378Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:47:46.051892Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:47:46.428368Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4416479838102472165] 2024-11-21T08:47:47.793320Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> test.py::test[blocks-sort_two_desc--Plan] >> test.py::test[action-eval_input_output_table_subquery--Analyze] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Debug] >> test.py::test[join-bush_dis_in_in-off-Plan] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Results] [GOOD] >> test.py::test[join-left_cast_to_string--ForceBlocks] [GOOD] >> test.py::test[join-left_cast_to_string--Plan] [GOOD] >> test.py::test[join-force_merge_join-default.txt-Analyze] >> test.py::test[blocks-sort_two_desc--Plan] [GOOD] >> test.py::test[blocks-sort_two_desc--Results] |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a >> test.py::test[join-left_cast_to_string--Results] >> test.py::test[json-json_query/example--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Debug] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Debug] [GOOD] >> test.py::test[join-join_comp_map_table-off-Debug] >> test.py::test[json-json_query/example--Plan] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-ForceBlocks] >> test.py::test[action-eval_like--Debug] [GOOD] >> test.py::test[action-eval_like--Plan] [GOOD] >> test.py::test[join-lookupjoin_inner_2o-off-Analyze] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] >> test.py::test[join-join_comp_map_table-off-Debug] [SKIPPED] >> test.py::test[json-json_query/example--Results] >> test.py::test[action-eval_like--Results] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Analyze] >> test.py::test[join-join_comp_map_table-off-Plan] [SKIPPED] >> test.py::test[join-join_comp_map_table-off-Results] >> test.py::test[column_order-insert_with_new_cols--Analyze] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Debug] >> test.py::test[join-join_comp_map_table-off-Results] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--Debug] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--Plan] >> test.py::test[pg-pg_like_cast-default.txt-Results] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] [GOOD] >> test.py::test[pg-record_from_table_row-default.txt-Debug] >> test.py::test[table_range-range_tables_with_view--Plan] [GOOD] >> test.py::test[table_range-range_tables_with_view--Results] >> test.py::test[join-join_table_conflict_fail--Plan] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--Results] >> test.py::test[column_order-select_subquery-default.txt-Analyze] >> test.py::test[join-equi_join_three_asterisk--Analyze] [GOOD] >> test.py::test[join-equi_join_three_asterisk--Debug] >> test.py::test[action-eval_filter--Analyze] [GOOD] >> test.py::test[expr-literal_true-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-literal_true-default.txt-Plan] >> test.py::test[action-eval_filter--Debug] >> test.py::test[join-pullup_renaming--Debug] [GOOD] >> test.py::test[join-pullup_renaming--ForceBlocks] >> test.py::test[expr-literal_true-default.txt-Plan] [GOOD] >> test.py::test[expr-literal_true-default.txt-Results] >> test.py::test[blocks-top_sort_one_asc--Results] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Debug] >> test.py::test[table_range-tablepath_with_non_existing--Results] [GOOD] >> test.py::test[tpch-q4-default.txt-Debug] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Debug] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-ForceBlocks] >> test.py::test[expr-opt_list_map-default.txt-Debug] [GOOD] >> test.py::test[expr-opt_list_map-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_udf--Debug] [GOOD] >> test.py::test[produce-process_multi_in_trivial_lambda--Analyze] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf--Plan] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Analyze] >> test.py::test[produce-process_multi_in_trivial_lambda--Debug] >> test.py::test[binding-table_range_binding-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf--Results] >> test.py::test[expr-lds_empty_compare-default.txt-Debug] [GOOD] >> test.py::test[expr-lds_empty_compare-default.txt-ForceBlocks] >> test.py::test[join-flatten_columns2--Results] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Plan] >> test.py::test[join-inmem_with_null_key--Debug] >> test.py::test[binding-table_range_binding-default.txt-Plan] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] >> test.py::test[join-lookupjoin_inner_2o-off-Analyze] [GOOD] >> test.py::test[join-lookupjoin_inner_2o-off-Debug] >> test.py::test[pg-insert--Analyze] [GOOD] >> test.py::test[action-eval_like--Results] [GOOD] >> test.py::test[action-eval_python3_ann--Debug] [SKIPPED] >> test.py::test[table_range-table_funcs_expr--Debug] [GOOD] >> test.py::test[expr-literal_true-default.txt-Results] [GOOD] >> test.py::test[table_range-table_funcs_expr--ForceBlocks] >> test.py::test[pg-insert--Debug] >> test.py::test[action-eval_python3_ann--Plan] [SKIPPED] >> test.py::test[action-eval_python3_ann--Results] >> test.py::test[expr-pickle-default.txt-Analyze] >> test.py::test[blocks-combine_hashed_set--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_set--ForceBlocks] >> test.py::test[action-eval_python3_ann--Results] [SKIPPED] >> test.py::test[action-eval_python_signature--Debug] >> test.py::test[table_range-range_tables_with_view--Results] [GOOD] >> test.py::test[aggregate-percentiles_ungrouped--Debug] [GOOD] >> test.py::test[aggregate-percentiles_ungrouped--ForceBlocks] >> test.py::test[join-join_table_conflict_fail--Results] [GOOD] >> test.py::test[join-join_table_conflict_fail-off-Debug] [SKIPPED] >> test.py::test[join-join_table_conflict_fail-off-Plan] >> test.py::test[action-eval_python_signature--Debug] [SKIPPED] >> test.py::test[action-eval_python_signature--Plan] [SKIPPED] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp >> test.py::test[join-join_table_conflict_fail-off-Plan] [SKIPPED] >> test.py::test[join-join_table_conflict_fail-off-Results] >> test.py::test[tpch-q15-default.txt-Analyze] >> TBsLocalRecovery::WriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartRead >> test.py::test[column_order-select_subquery-default.txt-Analyze] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Debug] >> test.py::test[action-eval_python_signature--Results] >> test.py::test[join-force_merge_join-default.txt-Analyze] [GOOD] >> test.py::test[join-force_merge_join-default.txt-Debug] >> test.py::test[pg-tpcds-q93-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_distinct--Analyze] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Debug] |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp >> test.py::test[pg-record_from_table_row-default.txt-Debug] [GOOD] >> test.py::test[action-eval_python_signature--Results] [SKIPPED] >> test.py::test[pg-record_from_table_row-default.txt-Plan] [GOOD] >> test.py::test[action-eval_regexp--Debug] >> test.py::test[pg-record_from_table_row-default.txt-Results] >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Analyze] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Analyze] [GOOD] >> test.py::test[join-left_cast_to_string--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Debug] >> test.py::test[count-boolean_count--Debug] [GOOD] >> test.py::test[count-boolean_count--ForceBlocks] >> test.py::test[action-eval_input_output_table_subquery--Debug] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--ForceBlocks] >> test.py::test[join-equi_join_three_asterisk--Debug] [GOOD] >> test.py::test[join-equi_join_three_asterisk--ForceBlocks] >> test.py::test[join-left_semi_with_other--Analyze] >> test.py::test[json-json_query/example--Results] [GOOD] >> test.py::test[json-json_value/on_empty_cast_default_exception--Analyze] [SKIPPED] >> test.py::test[json-json_value/on_empty_cast_default_exception--Debug] [SKIPPED] >> test.py::test[produce-process_multi_in_trivial_lambda--Debug] [GOOD] >> test.py::test[produce-process_multi_in_trivial_lambda--ForceBlocks] >> test.py::test[json-json_value/on_empty_cast_default_exception--ForceBlocks] [SKIPPED] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Plan] [GOOD] >> test.py::test[expr-lds_empty_compare-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-lds_empty_compare-default.txt-Plan] >> test.py::test[json-json_value/on_empty_cast_default_exception--Plan] [SKIPPED] >> test.py::test[json-json_value/on_empty_cast_default_exception--Results] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] >> test.py::test[expr-lds_empty_compare-default.txt-Plan] [GOOD] >> test.py::test[expr-lds_empty_compare-default.txt-Results] >> test.py::test[compute_range-in3-default.txt-Debug] [SKIPPED] >> test.py::test[join-pullup_renaming--ForceBlocks] [GOOD] >> test.py::test[join-pullup_renaming--Plan] [GOOD] >> test.py::test[join-pullup_renaming--Results] >> test.py::test[compute_range-in3-default.txt-Plan] [SKIPPED] >> TBsOther1::ChaoticParallelWrite [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload >> test.py::test[pg-tpcds-q93-default.txt-Results] [GOOD] >> test.py::test[pg-with_rec_trivial-default.txt-Analyze] >> test.py::test[compute_range-in3-default.txt-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off-Debug] [GOOD] >> test.py::test[join-lookupjoin_inner_2o-off-ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off-Plan] >> test.py::test[blocks-type_and_callable_stats--Debug] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Plan] >> test.py::test[compute_range-multiply_limit_with_nulls-default.txt-Debug] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off-Plan] [GOOD] >> test.py::test[join-lookupjoin_inner_2o-off-Results] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq--Analyze] >> test.py::test[compute_range-multiply_limit_with_nulls-default.txt-Plan] [SKIPPED] >> test.py::test[blocks-type_and_callable_stats--Plan] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Results] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] >> test.py::test[join-join_table_conflict_fail-off-Results] [GOOD] >> test.py::test[compute_range-multiply_limit_with_nulls-default.txt-Results] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Results] [SKIPPED] >> test.py::test[count-boolean_count--Debug] >> test.py::test[expr-opt_list_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-opt_list_map-default.txt-Plan] [GOOD] >> test.py::test[expr-opt_list_map-default.txt-Results] >> test.py::test[pg-record_from_table_row-default.txt-Results] [GOOD] >> test.py::test[pg-select_case-default.txt-Debug] >> TBsLocalRecovery::MultiPutWriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge >> test.py::test[action-eval_filter--Debug] [GOOD] >> test.py::test[action-eval_filter--ForceBlocks] >> test.py::test[expr-pickle-default.txt-Analyze] [GOOD] >> test.py::test[expr-pickle-default.txt-Debug] >> test.py::test[pg-insert--Debug] [GOOD] >> test.py::test[pg-insert--ForceBlocks] >> test.py::test[join-inmem_with_null_key--Debug] [GOOD] >> test.py::test[join-inmem_with_null_key--Plan] [GOOD] >> test.py::test[join-inmem_with_null_key--Results] >> test.py::test[join-join_without_correlation_and_dict_access-off-Debug] [SKIPPED] >> test.py::test[join-join_without_correlation_and_dict_access-off-Plan] [SKIPPED] >> test.py::test[join-join_without_correlation_and_dict_access-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off-Debug] [SKIPPED] >> TPDiskUtil::DriveEstimator [GOOD] >> test.py::test[join-join_without_correlation_names-off-Plan] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off-Results] [SKIPPED] >> test.py::test[join-left_all-off-Debug] [SKIPPED] >> test.py::test[join-left_all-off-Plan] [SKIPPED] >> test.py::test[join-left_all-off-Results] >> TPDiskUtil::OffsetParsingCorrectness >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[binding-tie_bad_count_fail--Debug] [SKIPPED] >> test.py::test[binding-tie_bad_count_fail--Plan] [SKIPPED] >> test.py::test[binding-tie_bad_count_fail--Results] >> test.py::test[action-eval_regexp--Debug] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Analyze] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Debug] >> test.py::test[join-left_all-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_left--Debug] >> TPDiskUtil::OffsetParsingCorrectness [GOOD] >> TPDiskUtil::FormatSectorMap [GOOD] >> test.py::test[action-eval_regexp--Plan] [GOOD] >> test.py::test[action-eval_regexp--Results] >> test.py::test[column_order-select_subquery-default.txt-Debug] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-ForceBlocks] >> test.py::test[json-json_value/on_empty_cast_default_exception--Results] [GOOD] >> test.py::test[json-json_value/on_error-default.txt-Analyze] >> test.py::test[aggregate-percentiles_ungrouped--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_set--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_set--Plan] [GOOD] >> test.py::test[blocks-combine_hashed_set--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] Test command err: testing erasure block-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 51 us testing erasure stripe-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 231286 us testing erasure block-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 803897 us testing erasure stripe-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 42 us testing erasure stripe-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 16146 us testing erasure stripe-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 560229 us >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-ForceBlocks] >> test.py::test[tpch-q15-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q15-default.txt-Debug] >> test.py::test[aggregate-percentiles_ungrouped--Plan] [GOOD] >> test.py::test[aggregate-percentiles_ungrouped--Results] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX >> test.py::test[column_order-insert_with_new_cols--Debug] [GOOD] >> test.py::test[column_order-insert_with_new_cols--ForceBlocks] >> test.py::test[join-left_semi_with_other--Analyze] [GOOD] >> test.py::test[join-left_semi_with_other--Debug] >> test.py::test[join-equi_join_three_asterisk--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_asterisk--Plan] [GOOD] >> test.py::test[join-equi_join_three_asterisk--Results] >> test.py::test[expr-lds_empty_compare-default.txt-Results] [GOOD] >> test.py::test[expr-list_concat-default.txt-Analyze] >> TBsDbStat::ChaoticParallelWrite_DbStat [GOOD] >> TBsHuge::Simple >> test.py::test[join-force_merge_join-default.txt-Debug] [GOOD] >> test.py::test[join-force_merge_join-default.txt-ForceBlocks] >> test.py::test[pg-select_join_left_one-default.txt-Results] [GOOD] >> test.py::test[pg-select_literals-default.txt-Analyze] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] [GOOD] >> test.py::test[action-define_simple_action-default.txt-Analyze] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> test.py::test[join-inmem_with_null_key--Results] [GOOD] >> test.py::test[join-inmem_with_set_key--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskUtil::FormatSectorMap [GOOD] Test command err: 2024-11-21T08:46:45.071097Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.080244Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 4418829173871905230 MagicNextLogChunkReference: 17152675160307150892 MagicLogChunk: 7066501957140801182 MagicDataChunk: 6963338667968279249 MagicSysLogChunk: 11574703938218509254 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805027334 (2024-11-21T08:46:45.027334Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.080262Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:543} PDiskId# 1Can't start due to a guid error expected# 4418829173871905229 on-disk# 4418829173871905230 PDiskId# 1 2024-11-21T08:46:45.295889Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.304316Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 18017006795195247230 MagicNextLogChunkReference: 15047715531412866926 MagicLogChunk: 2272884290107297408 MagicDataChunk: 4263524765853591011 MagicSysLogChunk: 11401207337441583986 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805133822 (2024-11-21T08:46:45.133822Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.308322Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:45.312472Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:45.312510Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:45.316404Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.316640Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.316734Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:45.317517Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1395554 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:45.318451Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1395554 StartingPoints: {} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T08:46:45.323450Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.328298Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 18017006795195247230 MagicNextLogChunkReference: 15047715531412866926 MagicLogChunk: 2272884290107297408 MagicDataChunk: 4263524765853591011 MagicSysLogChunk: 11401207337441583986 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805133822 (2024-11-21T08:46:45.133822Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.332414Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1425750 NonceLog# 1395554 NonceData# 1274400} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:46:45.333667Z node 2 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:46:45.333690Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2024-11-21T08:46:45.333709Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:45.336430Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.340254Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.343021Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:45.343316Z node 2 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2024-11-21T08:46:45.404718Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.404869Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 16977636104662460929 MagicNextLogChunkReference: 3435969029292240594 MagicLogChunk: 18205734760216905698 MagicDataChunk: 12328912189788066961 MagicSysLogChunk: 12055708303354672037 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805378277 (2024-11-21T08:46:45.378277Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.406348Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:45.407363Z node 3 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:45.407380Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:45.407912Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.408057Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.408130Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:45.409621Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1282147 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:45.412372Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2111} removed owner from chunks Keeper OwnerId# 3 PDiskId# 1 2024-11-21T08:46:45.412385Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.412396Z node 3 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2145} KillOwner ownerId# 3 ownerRound# 2 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2024-11-21T08:46:45.416419Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 4 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1282147 CutLogId# [0:0:0] ownerRound# 3 PDiskId# 1 2024-11-21T08:46:45.417500Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2111} removed owner from chunks Keeper OwnerId# 4 PDiskId# 1 2024-11-21T08:46:45.417519Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.417528Z node 3 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2145} KillOwner ownerId# 4 ownerRound# 3 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2024-11-21T08:46:45.418855Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 5 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1282147 CutLogId# [0:0:0] ownerRound# 4 PDiskId# 1 2024-11-21T08:46:45.419561Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2111} removed owner from chunks Keeper OwnerId# 5 PDiskId# 1 2024-11-21T08:46:45.419575Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.419584Z node 3 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2145} KillOwner ownerId# 5 ownerRound# 4 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2024-11-21T08:46:45.420460Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cp ... e: 1168 (current sizeof: 1168) TimestampUs: 1732178811186107 (2024-11-21T08:46:51.186107Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:51.262626Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:51.276285Z node 8 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:51.276320Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:51.276533Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:51.276699Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:51.276763Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:51.281268Z node 8 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1275742 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:51.461669Z node 9 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:51.472758Z node 9 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5731398152785299288 MagicNextLogChunkReference: 4392734476918289939 MagicLogChunk: 9353136994827762402 MagicDataChunk: 12559012015775672739 MagicSysLogChunk: 7572403453768414490 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178811348921 (2024-11-21T08:46:51.348921Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:51.484348Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:51.492310Z node 9 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:51.492341Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:51.492767Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:51.496331Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:51.504291Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:51.508333Z node 9 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2009318 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:51.516683Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 2009318 StartingPoints: {} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T08:46:51.532681Z node 9 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:998} HandlePoison, PDiskThread stopped PDiskId# 1 2024-11-21T08:46:51.534020Z node 9 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:51.576792Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5731398152785299288 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T08:46:51.685617Z node 10 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:51.687090Z node 10 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 2278259961396017982 MagicNextLogChunkReference: 7079915648495553435 MagicLogChunk: 5023779404856903887 MagicDataChunk: 10261586339246303160 MagicSysLogChunk: 12935844339058567193 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178811621159 (2024-11-21T08:46:51.621159Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:51.692332Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:51.693127Z node 10 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:51.693152Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:51.693871Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:51.694635Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:51.694715Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:51.696165Z node 10 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1164444 CutLogId# [0:0:0] ownerRound# 14 PDiskId# 1 2024-11-21T08:46:51.700470Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T08:46:51.720762Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [2:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1164444 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {2}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {3..982} PDiskId# 1 2024-11-21T08:46:51.726058Z node 10 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:51.745411Z node 10 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2278259961396017982 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 >> test.py::test[tpch-q4-default.txt-Debug] [GOOD] >> test.py::test[tpch-q4-default.txt-Plan] [GOOD] >> test.py::test[tpch-q4-default.txt-Results] >> test.py::test[pg-with_rec_trivial-default.txt-Analyze] [GOOD] >> test.py::test[pg-with_rec_trivial-default.txt-Debug] >> test.py::test[count-boolean_count--ForceBlocks] [GOOD] >> test.py::test[count-boolean_count--Plan] [GOOD] >> test.py::test[count-boolean_count--Results] >> test.py::test[produce-process_multi_in_trivial_lambda--ForceBlocks] [GOOD] >> test.py::test[produce-process_multi_in_trivial_lambda--Plan] >> test.py::test[expr-opt_list_map-default.txt-Results] [GOOD] >> test.py::test[expr-struct_builtins-default.txt-Analyze] >> test.py::test[produce-process_multi_in_trivial_lambda--Plan] [GOOD] >> test.py::test[produce-process_multi_in_trivial_lambda--Results] >> test.py::test[action-eval_regexp--Results] [GOOD] >> test.py::test[action-eval_taggedtype-default.txt-Debug] >> test.py::test[join-lookupjoin_inner_empty_subq--Analyze] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq--Debug] >> test.py::test[binding-tie_bad_count_fail--Results] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Results] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Debug] >> test.py::test[action-eval_input_output_table_subquery--Plan] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Results] >> test.py::test[expr-pickle-default.txt-Debug] [GOOD] >> test.py::test[expr-pickle-default.txt-ForceBlocks] >> test.py::test[aggregate-group_compact_sorted_distinct--Debug] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--ForceBlocks] >> test.py::test[blocks-bitcast_block--Debug] >> test.py::test[pg-select_case-default.txt-Debug] [GOOD] >> test.py::test[pg-select_case-default.txt-Plan] [GOOD] >> test.py::test[pg-select_case-default.txt-Results] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> test.py::test[produce-reduce_with_presort_diff_order--Debug] [GOOD] >> test.py::test[produce-reduce_with_presort_diff_order--ForceBlocks] >> TBsHuge::Simple [GOOD] >> TBsHuge::SimpleErasureNone >> test.py::test[csee-yql-7237--Debug] [GOOD] >> test.py::test[csee-yql-7237--Plan] [GOOD] >> test.py::test[csee-yql-7237--Results] >> test.py::test[action-eval_filter--ForceBlocks] [GOOD] >> test.py::test[action-eval_filter--Plan] [GOOD] >> test.py::test[action-eval_filter--Results] >> test.py::test[pg-insert--ForceBlocks] [GOOD] >> test.py::test[pg-insert--Plan] [GOOD] >> test.py::test[pg-insert--Results] >> test.py::test[table_range-table_funcs_expr--ForceBlocks] [GOOD] >> test.py::test[table_range-table_funcs_expr--Plan] >> test.py::test[json-json_value/on_error-default.txt-Analyze] [GOOD] >> test.py::test[json-json_value/on_error-default.txt-Debug] >> test.py::test[blocks-top_sort_two_desc--Debug] [GOOD] >> test.py::test[blocks-top_sort_two_desc--ForceBlocks] >> test.py::test[table_range-table_funcs_expr--Plan] [GOOD] >> test.py::test[table_range-table_funcs_expr--Results] >> test.py::test[expr-list_concat-default.txt-Analyze] [GOOD] >> test.py::test[expr-list_concat-default.txt-Debug] >> test.py::test[blocks-combine_hashed_set--Results] [GOOD] >> test.py::test[blocks-date_top_sort--Analyze] >> test.py::test[join-pullup_renaming--Results] [GOOD] >> test.py::test[join-pullup_renaming-off-Analyze] >> test.py::test[pg-select_literals-default.txt-Analyze] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Plan] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Results] >> test.py::test[pg-select_literals-default.txt-Debug] >> test.py::test[action-define_simple_action-default.txt-Analyze] [GOOD] >> test.py::test[action-define_simple_action-default.txt-Debug] >> test.py::test[produce-process_multi_in_trivial_lambda--Results] [GOOD] >> test.py::test[produce-reduce_all_list_stream-default.txt-Analyze] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream-default.txt-Debug] [SKIPPED] >> TBsHuge::SimpleErasureNone [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart >> test.py::test[join-left_semi_with_other--Debug] [GOOD] >> test.py::test[join-left_semi_with_other--ForceBlocks] >> test.py::test[produce-reduce_all_list_stream-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_keytuple--Analyze] >> test.py::test[join-force_merge_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-force_merge_join-default.txt-Plan] >> test.py::test[aggregate-group_by_rollup_udf--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Debug] >> test.py::test[pg-select_case-default.txt-Results] [GOOD] >> test.py::test[pg-select_common_type_except-default.txt-Debug] >> test.py::test[pg-with_rec_trivial-default.txt-Debug] [GOOD] >> test.py::test[pg-with_rec_trivial-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] >> test.py::test[count-boolean_count--Debug] [GOOD] >> test.py::test[count-boolean_count--Plan] [GOOD] >> test.py::test[count-boolean_count--Results] >> test.py::test[aggregate-percentiles_ungrouped--Results] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Analyze] >> test.py::test[join-force_merge_join-default.txt-Plan] [GOOD] >> test.py::test[join-force_merge_join-default.txt-Results] >> test.py::test[action-eval_filter--Results] [GOOD] >> test.py::test[action-eval_result_label--Analyze] >> test.py::test[action-eval_taggedtype-default.txt-Debug] [GOOD] >> test.py::test[join-inmem_with_set_key--Debug] [GOOD] >> test.py::test[join-inmem_with_set_key--Plan] [GOOD] >> test.py::test[join-inmem_with_set_key--Results] >> test.py::test[action-eval_input_output_table_subquery--Results] [GOOD] >> test.py::test[action-eval_like--Analyze] >> test.py::test[expr-struct_builtins-default.txt-Analyze] [GOOD] >> test.py::test[expr-struct_builtins-default.txt-Debug] >> test.py::test[pg-insert--Results] [GOOD] >> test.py::test[pg-join_using3-default.txt-Analyze] >> test.py::test[expr-pickle-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-pickle-default.txt-Plan] [GOOD] >> test.py::test[expr-pickle-default.txt-Results] >> test.py::test[action-eval_taggedtype-default.txt-Plan] [GOOD] >> test.py::test[action-eval_taggedtype-default.txt-Results] >> test.py::test[count-boolean_count--Results] [GOOD] >> test.py::test[count-count_nullable--Analyze] >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks >> test.py::test[join-lookupjoin_inner_empty_subq--Debug] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq--ForceBlocks] >> test.py::test[tpch-q15-default.txt-Debug] [GOOD] >> test.py::test[blocks-bitcast_block--Debug] [GOOD] >> test.py::test[tpch-q15-default.txt-ForceBlocks] >> test.py::test[blocks-bitcast_block--Plan] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Debug] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Plan] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] >> test.py::test[column_order-insert_with_new_cols--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Plan] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Results] >> test.py::test[tpch-q4-default.txt-Results] [GOOD] >> test.py::test[tpch-q7-default.txt-Debug] >> test.py::test[blocks-bitcast_block--Results] >> test.py::test[column_order-select_subquery-default.txt-Results] [GOOD] >> test.py::test[compute_range-in-default.txt-Analyze] [SKIPPED] >> test.py::test[compute_range-in-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-in-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[compute_range-in-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-in-default.txt-Results] >> test.py::test[json-json_value/on_error-default.txt-Debug] [GOOD] >> test.py::test[json-json_value/on_error-default.txt-ForceBlocks] >> test.py::test[aggregate-group_compact_sorted_distinct--ForceBlocks] [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] >> test.py::test[compute_range-in-default.txt-Results] [SKIPPED] >> test.py::test[compute_range-merge_adjacent-default.txt-Analyze] [SKIPPED] >> test.py::test[compute_range-merge_adjacent-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-merge_adjacent-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[compute_range-merge_adjacent-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-merge_adjacent-default.txt-Results] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Analyze] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Plan] [SKIPPED] >> test.py::test[aggregate-group_compact_sorted_distinct--Plan] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] >> test.py::test[blocks-date_top_sort--Analyze] [GOOD] >> test.py::test[blocks-date_top_sort--Debug] >> test.py::test[pg-select_literals-default.txt-Debug] [GOOD] >> test.py::test[pg-select_literals-default.txt-ForceBlocks] >> test.py::test[compute_range-norange-default.txt-Results] [SKIPPED] >> test.py::test[count-count_all_grouped-empty-Analyze] >> test.py::test[action-define_simple_action-default.txt-Debug] [GOOD] >> test.py::test[action-define_simple_action-default.txt-ForceBlocks] >> test.py::test[action-eval_taggedtype-default.txt-Results] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Debug] >> test.py::test[table_range-table_funcs_expr--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Analyze] >> test.py::test[join-pullup_renaming-off-Analyze] [GOOD] >> test.py::test[join-pullup_renaming-off-Debug] >> test.py::test[expr-pickle-default.txt-Results] [GOOD] >> test.py::test[expr-struct_literal_bind-default.txt-Analyze] >> test.py::test[produce-reduce_multi_in_keytuple--Analyze] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Debug] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Analyze] >> test.py::test[pg-select_common_type_except-default.txt-Debug] [GOOD] >> test.py::test[pg-select_common_type_except-default.txt-Plan] [GOOD] >> test.py::test[pg-select_common_type_except-default.txt-Results] >> test.py::test[join-equi_join_three_asterisk--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off-Analyze] >> test.py::test[expr-list_concat-default.txt-Debug] [GOOD] >> test.py::test[expr-list_concat-default.txt-ForceBlocks] >> test.py::test[join-left_join_right_pushdown_nested_left--Debug] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Plan] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] Test command err: None domains 1 new (ns): 103.7973442 None domains 1 old (ns): 100.0720253 None domains 9 new (ns): 104.2860115 None domains 9 old (ns): 42.92886928 Mirror3 domains 4 new (ns): 44.58895493 Mirror3 domains 4 old (ns): 37.6521791 Mirror3 domains 9 new (ns): 44.05472994 Mirror3 domains 9 old (ns): 36.48384541 4Plus2Block domains 8 new (ns): 47.00860041 4Plus2Block domains 8 old (ns): 42.8242293 4Plus2Block domains 9 new (ns): 51.25858275 4Plus2Block domains 9 old (ns): 50.59638614 ErasureMirror3of4 domains 8 new (ns): 46.52572062 ErasureMirror3of4 domains 8 old (ns): 44.76543548 ErasureMirror3of4 domains 9 new (ns): 43.36217506 ErasureMirror3of4 domains 9 old (ns): 37.44972109 >> test.py::test[pg-with_rec_trivial-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-with_rec_trivial-default.txt-Plan] [GOOD] >> test.py::test[pg-with_rec_trivial-default.txt-Results] >> test.py::test[blocks-top_sort_two_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Plan] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Results] >> test.py::test[action-eval_like--Analyze] [GOOD] >> test.py::test[action-eval_like--Debug] >> test.py::test[join-left_semi_with_other--ForceBlocks] [GOOD] >> test.py::test[join-left_semi_with_other--Plan] [GOOD] >> test.py::test[join-left_semi_with_other--Results] >> test.py::test[action-eval_result_label--Analyze] [GOOD] >> test.py::test[action-eval_result_label--Debug] >> test.py::test[blocks-bitcast_block--Results] [GOOD] >> test.py::test[blocks-block_input_sys_columns--Debug] [SKIPPED] >> test.py::test[blocks-block_input_sys_columns--Plan] [SKIPPED] >> test.py::test[blocks-block_input_sys_columns--Results] [SKIPPED] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Analyze] [GOOD] >> test.py::test[expr-struct_builtins-default.txt-Debug] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Debug] >> test.py::test[expr-struct_builtins-default.txt-ForceBlocks] >> test.py::test[pg-join_using3-default.txt-Analyze] [GOOD] >> test.py::test[pg-join_using3-default.txt-Debug] >> test.py::test[blocks-block_input_various_types-v3-Debug] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Plan] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg--Debug] >> test.py::test[column_group-hint_anon-single-Debug] [SKIPPED] >> test.py::test[column_group-hint_anon-single-Plan] [SKIPPED] >> test.py::test[column_group-hint_anon-single-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-Debug] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-Plan] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--Debug] [SKIPPED] >> test.py::test[count-boolean_count--Results] [GOOD] >> test.py::test[count-count_by_nulls--Debug] >> test.py::test[join-force_merge_join-default.txt-Results] [GOOD] >> test.py::test[join-join_comp_common_table-off-Analyze] >> test.py::test[column_group-hint_non_yson_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--Results] [SKIPPED] >> test.py::test[column_group-length-single-Debug] [SKIPPED] >> test.py::test[column_group-length-single-Plan] [SKIPPED] >> test.py::test[column_group-length-single-Results] [SKIPPED] >> test.py::test[column_order-select_distinct_star-default.txt-Debug] >> test.py::test[count-count_nullable--Analyze] [GOOD] >> test.py::test[count-count_nullable--Debug] >> test.py::test[json-json_value/on_error-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_value/on_error-default.txt-Plan] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Plan] >> test.py::test[pg-with_rec_trivial-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_proc-default.txt-Analyze] >> test.py::test[json-json_value/on_error-default.txt-Results] |86.0%| [TA] $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[pg-select_literals-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_literals-default.txt-Plan] [GOOD] >> test.py::test[pg-select_literals-default.txt-Results] >> test.py::test[count-count_all_grouped-empty-Analyze] [GOOD] >> test.py::test[count-count_all_grouped-empty-Debug] >> test.py::test[action-define_simple_action-default.txt-ForceBlocks] [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync >> test.py::test[type_v3-ignore_v3_hint-opt-Analyze] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Debug] >> test.py::test[pg-select_common_type_except-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-Debug] >> test.py::test[action-define_simple_action-default.txt-Plan] [GOOD] >> test.py::test[action-define_simple_action-default.txt-Results] >> test.py::test[expr-struct_literal_bind-default.txt-Analyze] [GOOD] >> test.py::test[expr-struct_literal_bind-default.txt-Debug] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Analyze] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Debug] >> test.py::test[join-lookupjoin_inner_empty_subq--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq--Plan] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] >> test.py::test[csee-yql-7237--Results] [GOOD] >> test.py::test[datetime-date_cast-default.txt-Debug] >> test.py::test[join-equi_join_three_asterisk-off-Analyze] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off-Debug] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] [GOOD] >> test.py::test[aggregate-null_type-default.txt-Analyze] >> test.py::test[tpch-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q15-default.txt-Plan] [GOOD] >> test.py::test[tpch-q15-default.txt-Results] >> test.py::test[blocks-top_sort_two_desc--Results] [GOOD] >> test.py::test[column_group-groups-lookup-Analyze] [SKIPPED] >> test.py::test[column_group-groups-lookup-Debug] [SKIPPED] >> test.py::test[column_group-groups-lookup-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-lookup-Plan] >> test.py::test[blocks-date_top_sort--Debug] [GOOD] >> test.py::test[blocks-date_top_sort--ForceBlocks] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Debug] [GOOD] >> test.py::test[column_group-groups-lookup-Plan] [SKIPPED] >> test.py::test[column_group-groups-lookup-Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--Analyze] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--Debug] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--Plan] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--Results] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Plan] [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] >> test.py::test[column_order-insert_with_new_cols--Results] [GOOD] >> test.py::test[compute_range-in2-default.txt-Analyze] [SKIPPED] >> test.py::test[compute_range-in2-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-in2-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[compute_range-in2-default.txt-Plan] [SKIPPED] >> test.py::test[join-pullup_renaming-off-Debug] [GOOD] >> test.py::test[join-pullup_renaming-off-ForceBlocks] [SKIPPED] >> test.py::test[join-pullup_renaming-off-Plan] [GOOD] >> test.py::test[join-pullup_renaming-off-Results] [GOOD] >> test.py::test[join-right_trivial--Analyze] >> test.py::test[pg-select_literals-default.txt-Results] [GOOD] >> test.py::test[action-eval_result_label--Debug] [GOOD] >> test.py::test[action-eval_result_label--ForceBlocks] >> test.py::test[expr-list_concat-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-list_concat-default.txt-Plan] [GOOD] >> test.py::test[expr-list_concat-default.txt-Results] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] >> test.py::test[column_group-insert_diff_groups1_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Analyze] >> test.py::test[action-eval_like--Debug] [GOOD] >> test.py::test[action-eval_like--ForceBlocks] >> test.py::test[expr-struct_builtins-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-struct_builtins-default.txt-Plan] [GOOD] >> test.py::test[expr-struct_builtins-default.txt-Results] >> test.py::test[compute_range-in2-default.txt-Results] [SKIPPED] >> test.py::test[count-count--Analyze] >> test.py::test[pg-select_qstarref1-default.txt-Analyze] >> test.py::test[json-json_value/on_error-default.txt-Results] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--Analyze] >> test.py::test[produce-reduce_with_presort_diff_order--Debug] [GOOD] >> test.py::test[produce-reduce_with_presort_diff_order--Plan] [GOOD] >> test.py::test[produce-reduce_with_presort_diff_order--Results] >> test.py::test[aggregate-subquery_aggregation--Debug] [GOOD] >> test.py::test[aggregate-subquery_aggregation--ForceBlocks] >> test.py::test[action-define_simple_action-default.txt-Results] [GOOD] >> test.py::test[action-eval_capture--Analyze] >> test.py::test[join-inmem_with_set_key--Results] [GOOD] >> test.py::test[join-inner_trivial_from_concat--Debug] >> test.py::test[pg-join_using3-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using3-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] Test command err: 2024-11-21T08:47:19.516363Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:19.573203Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:19.605063Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708536Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708545Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:2] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708558Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708558Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:2] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708567Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708575Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:3] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708604Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:3] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708609Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:2] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:22.708614Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:3] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2024-11-21T08:47:35.299732Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299732Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299747Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299755Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299763Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299769Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299770Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:472:0:0:4:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299776Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299777Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299779Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:915:0:0:5:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299786Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:472:0:0:4:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299787Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299795Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:915:0:0:5:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299798Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:472:0:0:4:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2024-11-21T08:47:35.299806Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:915:0:0:5:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } >> test.py::test[count-count_all_grouped-empty-Debug] [GOOD] >> test.py::test[count-count_all_grouped-empty-ForceBlocks] >> test.py::test[pg_catalog-pg_proc-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_proc-default.txt-Debug] >> test.py::test[count-count_nullable--Debug] [GOOD] >> test.py::test[count-count_nullable--ForceBlocks] >> test.py::test[produce-reduce_multi_in_keytuple--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--ForceBlocks] >> test.py::test[produce-reduce_with_presort_diff_order--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_with_presort_diff_order--Plan] [GOOD] >> test.py::test[produce-reduce_with_presort_diff_order--Results] >> test.py::test[table_range-concat_sorted_max_tables--Plan] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Results] >> test.py::test[expr-struct_literal_bind-default.txt-Debug] [GOOD] >> test.py::test[expr-struct_literal_bind-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] [GOOD] >> test.py::test[join-lookupjoin_semi--Analyze] >> test.py::test[tpch-q7-default.txt-Debug] [GOOD] >> test.py::test[tpch-q7-default.txt-Plan] [GOOD] >> test.py::test[tpch-q7-default.txt-Results] >> test.py::test[expr-struct_builtins-default.txt-Results] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Analyze] >> test.py::test[aggregate-null_type-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-null_type-default.txt-Debug] >> test.py::test[type_v3-ignore_v3_hint-opt-Debug] [GOOD] >> test.py::test[count-count_by_nulls--Debug] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-ForceBlocks] >> test.py::test[count-count_by_nulls--Plan] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-Debug] [GOOD] >> test.py::test[expr-list_concat-default.txt-Results] [GOOD] >> test.py::test[expr-list_extend-default.txt-Analyze] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Debug] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--ForceBlocks] >> test.py::test[count-count_by_nulls--Results] >> test.py::test[join-join_comp_common_table-off-Analyze] [GOOD] >> test.py::test[join-join_comp_common_table-off-Debug] >> test.py::test[pg-select_from_columns_qstar-default.txt-Plan] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] >> test.py::test[join-equi_join_three_asterisk-off-Debug] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off-ForceBlocks] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk-off-Plan] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off-Results] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] [GOOD] >> test.py::test[action-evaluate_pure--Debug] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test.py::test[join-left_join_with_self_aggr-off-Debug] [SKIPPED] >> test.py::test[join-left_join_with_self_aggr-off-Plan] [SKIPPED] >> test.py::test[join-left_join_with_self_aggr-off-Results] [SKIPPED] >> test.py::test[join-left_semi_with_other--Debug] >> test.py::test[join-inner_all-off-Analyze] >> test.py::test[join-left_semi_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Analyze] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Analyze] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Debug] >> test.py::test[action-eval_result_label--ForceBlocks] [GOOD] >> test.py::test[action-eval_result_label--Plan] [GOOD] >> test.py::test[action-eval_result_label--Results] >> test.py::test[key_filter-empty_range_over_dynamic--Analyze] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--Debug] >> test.py::test[join-right_trivial--Analyze] [GOOD] >> test.py::test[join-right_trivial--Debug] >> test.py::test[pg-select_qstarref1-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Debug] >> test.py::test[action-eval_capture--Analyze] [GOOD] >> test.py::test[action-eval_capture--Debug] >> test.py::test[tpch-q15-default.txt-Results] [GOOD] >> test.py::test[aggregate-subquery_aggregation--ForceBlocks] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Plan] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Results] >> test.py::test[action-eval_like--ForceBlocks] [GOOD] >> test.py::test[tpch-q19-default.txt-Analyze] >> test.py::test[action-eval_like--Plan] [GOOD] >> test.py::test[action-eval_like--Results] >> test.py::test[table_range-concat_sorted_max_tables--Results] [GOOD] >> test.py::test[table_range-range_with_view--Analyze] >> test.py::test[count-count_all_grouped-empty-ForceBlocks] [GOOD] >> test.py::test[count-count_all_grouped-empty-Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Debug] [GOOD] >> test.py::test[count-count--Analyze] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] >> test.py::test[blocks-date_top_sort--ForceBlocks] [GOOD] >> test.py::test[count-count--Debug] >> test.py::test[blocks-date_top_sort--Plan] [GOOD] >> test.py::test[blocks-date_top_sort--Results] >> test.py::test[count-count_all_grouped-empty-Results] >> test.py::test[pg-join_using3-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt-Debug] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt-Plan] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt-Results] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns_scalar-default.txt-Debug] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Analyze] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Debug] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp >> test.py::test[count-count_nullable--ForceBlocks] [GOOD] >> test.py::test[count-count_nullable--Plan] [GOOD] >> test.py::test[count-count_nullable--Results] >> test.py::test[expr-struct_literal_bind-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-struct_literal_bind-default.txt-Plan] [GOOD] >> test.py::test[expr-struct_literal_bind-default.txt-Results] >> test.py::test[pg-join_using3-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using3-default.txt-Results] >> test.py::test[join-lookupjoin_semi--Analyze] [GOOD] >> test.py::test[join-lookupjoin_semi--Debug] >> test.py::test[datetime-date_cast-default.txt-Debug] [GOOD] >> test.py::test[datetime-date_cast-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_cast-default.txt-Results] |86.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp >> test.py::test[expr-list_extend-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_result_label--Results] [GOOD] >> test.py::test[expr-list_extend-default.txt-Debug] >> test.py::test[action-eval_table_with_view-default.txt-Analyze] >> test.py::test[count-count_by_nulls--Results] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Debug] >> test.py::test[type_v3-ignore_v3_hint-opt-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Plan] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] >> test.py::test[join-inner_trivial_from_concat--Debug] [GOOD] >> test.py::test[join-inner_trivial_from_concat--Plan] [GOOD] >> test.py::test[join-inner_trivial_from_concat--Results] >> test.py::test[join-inner_all-off-Analyze] [GOOD] >> test.py::test[join-inner_all-off-Debug] >> test.py::test[count-count_all_grouped-empty-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Results] |86.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |86.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a >> test.py::test[table_range-limit_with_table_path_over_sorted_range--ForceBlocks] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Plan] [GOOD] >> test.py::test[action-eval_like--Results] [GOOD] >> test.py::test[action-nested_subquery--Analyze] >> test.py::test[join-lookupjoin_semi_1o--Analyze] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Debug] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Analyze] >> test.py::test[expr-struct_literal_bind-default.txt-Results] [GOOD] >> test.py::test[expr-xor-default.txt-Analyze] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a >> test.py::test[action-eval_capture--Debug] [GOOD] >> test.py::test[action-eval_capture--ForceBlocks] >> test.py::test[tpch-q7-default.txt-Results] [GOOD] >> test.py::test[type_v3-type_subset--Debug] [SKIPPED] >> test.py::test[type_v3-type_subset--Plan] [SKIPPED] >> test.py::test[type_v3-type_subset--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Debug] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Plan] >> test.py::test[pg-select_qstarref1-default.txt-Debug] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-ForceBlocks] >> test.py::test[pg_catalog-pg_proc-default.txt-Debug] [GOOD] >> test.py::test[aggregate-null_type-default.txt-Debug] [GOOD] >> test.py::test[aggregate-null_type-default.txt-ForceBlocks] >> test.py::test[action-evaluate_pure--Debug] [GOOD] >> test.py::test[action-evaluate_pure--Plan] [GOOD] >> test.py::test[action-evaluate_pure--Results] >> test.py::test[udf-named_args_for_script_with_posargs2--Plan] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] [SKIPPED] >> test.py::test[udf-udaf_distinct--Debug] [SKIPPED] >> test.py::test[udf-udaf_distinct--Plan] [SKIPPED] >> test.py::test[udf-udaf_distinct--Results] [SKIPPED] >> test.py::test[udf-wrong_args_fail--Debug] [SKIPPED] >> test.py::test[udf-wrong_args_fail--Plan] [SKIPPED] >> test.py::test[udf-wrong_args_fail--Results] >> test.py::test[pg-select_from_columns_scalar-default.txt-Debug] [GOOD] >> test.py::test[pg-select_from_columns_scalar-default.txt-Plan] [GOOD] >> test.py::test[pg-select_from_columns_scalar-default.txt-Results] >> test.py::test[join-right_trivial--Debug] [GOOD] >> test.py::test[join-right_trivial--ForceBlocks] >> test.py::test[action-dep_world_quote_code-default.txt-Debug] >> test.py::test[pg_catalog-pg_proc-default.txt-ForceBlocks] >> test.py::test[key_filter-empty_range_over_dynamic--Debug] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--ForceBlocks] >> test.py::test[tpch-q19-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q19-default.txt-Debug] >> test.py::test[aggregate-subquery_aggregation--Results] [GOOD] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Analyze] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-ForceBlocks] >> test.py::test[table_range-range_with_view--Analyze] [GOOD] >> test.py::test[pg-join_using3-default.txt-Results] [GOOD] >> test.py::test[pg-name--Analyze] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Debug] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-ForceBlocks] >> test.py::test[column_order-select_distinct_star-default.txt-Results] [GOOD] >> test.py::test[column_order-select_where-default.txt-Debug] >> test.py::test[produce-reduce_lambda-default.txt-Results] >> test.py::test[table_range-range_with_view--Debug] >> test.py::test[count-count_nullable--Results] [GOOD] >> test.py::test[csee-expr_in_l1-default.txt-Analyze] >> test.py::test[blocks-combine_all_avg--Debug] [GOOD] >> test.py::test[blocks-combine_all_avg--Plan] [GOOD] >> test.py::test[blocks-combine_all_avg--Results] >> test.py::test[expr-list_extend-default.txt-Debug] [GOOD] >> test.py::test[expr-list_extend-default.txt-ForceBlocks] >> test.py::test[action-eval_table_with_view-default.txt-Analyze] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Analyze] >> test.py::test[join-lookupjoin_semi--Debug] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Debug] [GOOD] >> test.py::test[join-lookupjoin_semi--ForceBlocks] >> test.py::test[count-count_no_grouping-default.txt-Plan] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] >> test.py::test[blocks-date_top_sort--Results] [GOOD] >> test.py::test[blocks-partial_blocks1--Analyze] >> test.py::test[join-split_to_list_as_key-off-Analyze] >> test.py::test[action-eval_table_with_view-default.txt-Debug] >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] >> TBsVDiskOutOfSpace::WriteUntilOrangeZone [GOOD] >> TBsVDiskOutOfSpace::WriteUntilYellowZone >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Analyze] [GOOD] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Debug] >> test.py::test[action-eval_capture--ForceBlocks] [GOOD] >> test.py::test[action-eval_capture--Plan] [GOOD] >> test.py::test[action-eval_capture--Results] >> test.py::test[action-nested_subquery--Analyze] [GOOD] >> test.py::test[action-nested_subquery--Debug] >> test.py::test[pg-select_from_columns_scalar-default.txt-Results] [GOOD] >> test.py::test[pg-select_having-default.txt-Debug] >> test.py::test[expr-xor-default.txt-Analyze] [GOOD] >> test.py::test[expr-xor-default.txt-Debug] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] [GOOD] >> test.py::test[tpch-q14-default.txt-Analyze] >> test.py::test[join-inner_all-off-Debug] [GOOD] >> test.py::test[join-inner_all-off-ForceBlocks] [SKIPPED] >> test.py::test[join-inner_all-off-Plan] [GOOD] >> test.py::test[join-inner_all-off-Results] [GOOD] >> test.py::test[join-join_table_conflict_fail--Analyze] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--Debug] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--ForceBlocks] >> test.py::test[join-lookupjoin_semi_1o--Debug] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--ForceBlocks] >> test.py::test[join-inner_trivial_from_concat--Results] [GOOD] >> test.py::test[join-join_right_cbo--Debug] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp >> test.py::test[join-join_table_conflict_fail--ForceBlocks] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--Plan] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--Results] >> test.py::test[action-evaluate_pure--Results] [GOOD] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Debug] >> test.py::test[produce-reduce_multi_in_keytuple--Results] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Analyze] >> test.py::test[count-count_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Debug] >> test.py::test[pg-select_qstarref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Plan] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Results] |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp >> test.py::test[udf-wrong_args_fail--Results] [GOOD] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Debug] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Debug] >> test.py::test[key_filter-empty_range_over_dynamic--ForceBlocks] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--Plan] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--Results] >> test.py::test[pg-name--Analyze] [GOOD] >> test.py::test[pg-name--Debug] >> test.py::test[join-right_trivial--ForceBlocks] [GOOD] >> test.py::test[join-right_trivial--Plan] >> test.py::test[action-eval_capture--Results] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Analyze] >> test.py::test[join-lookupjoin_semi--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi--Plan] [GOOD] >> test.py::test[join-lookupjoin_semi--Results] >> test.py::test[join-join_comp_common_table-off-Debug] [GOOD] >> test.py::test[join-join_comp_common_table-off-ForceBlocks] [SKIPPED] >> test.py::test[join-join_comp_common_table-off-Plan] [GOOD] >> test.py::test[join-join_comp_common_table-off-Results] [GOOD] >> test.py::test[join-join_table_conflict_fail-off-Analyze] [SKIPPED] >> test.py::test[join-join_table_conflict_fail-off-Debug] [SKIPPED] >> test.py::test[datetime-date_cast-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-Debug] >> test.py::test[expr-to_hashed_set_list_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Plan] >> test.py::test[join-right_trivial--Plan] [GOOD] >> test.py::test[join-right_trivial--Results] >> test.py::test[csee-expr_in_l1-default.txt-Analyze] [GOOD] >> test.py::test[csee-expr_in_l1-default.txt-Debug] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp >> test.py::test[aggregate-null_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-null_type-default.txt-Plan] [GOOD] >> test.py::test[aggregate-null_type-default.txt-Results] >> test.py::test[join-left_semi_with_other--Debug] [GOOD] >> test.py::test[join-left_semi_with_other--Plan] [GOOD] >> test.py::test[join-left_semi_with_other--Results] >> test.py::test[join-join_table_conflict_fail-off-ForceBlocks] [SKIPPED] >> test.py::test[join-join_table_conflict_fail-off-Plan] [SKIPPED] >> test.py::test[join-join_table_conflict_fail-off-Results] >> test.py::test[blocks-partial_blocks1--Analyze] [GOOD] >> test.py::test[blocks-partial_blocks1--Debug] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Results] >> test.py::test[count-count--Debug] [GOOD] >> test.py::test[count-count--ForceBlocks] >> test.py::test[type_v3-insert_struct_v3_with_native--Analyze] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Debug] >> test.py::test[produce-reduce_with_presort_diff_order--Results] [GOOD] >> test.py::test[produce-reduce_with_python_row-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_with_python_row-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_with_python_row-default.txt-Results] [SKIPPED] >> test.py::test[sampling-bind_expr_udf--Debug] >> test.py::test[join-split_to_list_as_key-off-Analyze] [GOOD] >> test.py::test[join-split_to_list_as_key-off-Debug] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Debug] [GOOD] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-ForceBlocks] >> test.py::test[expr-list_extend-default.txt-ForceBlocks] [GOOD] |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp >> test.py::test[table_range-range_with_view--Debug] [GOOD] >> test.py::test[table_range-range_with_view--ForceBlocks] >> test.py::test[action-eval_table_with_view-default.txt-Debug] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-ForceBlocks] >> test.py::test[expr-list_extend-default.txt-Plan] [GOOD] >> test.py::test[expr-list_extend-default.txt-Results] >> test.py::test[tpch-q19-default.txt-Debug] [GOOD] >> test.py::test[tpch-q19-default.txt-ForceBlocks] >> test.py::test[join-join_table_conflict_fail--Results] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Debug] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Plan] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Results] >> test.py::test[join-join_without_correlation_and_struct_access--Analyze] >> test.py::test[expr-xor-default.txt-Debug] [GOOD] >> test.py::test[expr-xor-default.txt-ForceBlocks] >> test.py::test[column_order-select_where-default.txt-Debug] [GOOD] >> test.py::test[column_order-select_where-default.txt-Plan] [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] >> test.py::test[action-nested_subquery--Debug] [GOOD] >> test.py::test[action-nested_subquery--ForceBlocks] >> test.py::test[pg-select_having-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] [GOOD] >> test.py::test[pg-select_having-default.txt-Plan] [GOOD] >> test.py::test[pg-select_having-default.txt-Results] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Debug] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q14-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q14-default.txt-Debug] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Plan] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] >> test.py::test[pg-select_qstarref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_sort_project_same_asc-default.txt-Analyze] >> test.py::test[join-lookupjoin_semi_1o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Plan] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Results] >> test.py::test[produce-reduce_with_presort_diff_order--Results] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Debug] >> test.py::test[pg_catalog-pg_proc-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_catalog-pg_proc-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_proc-default.txt-Results] >> test.py::test[sampling-read--Analyze] >> test.py::test[key_filter-empty_range_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-tzdate--Analyze] [SKIPPED] >> test.py::test[key_filter-tzdate--Debug] [SKIPPED] >> test.py::test[key_filter-tzdate--ForceBlocks] >> test.py::test[solomon-Subquery-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-Subquery-default.txt-Results] [SKIPPED] >> test.py::test[table_range-concat_with_view--Debug] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Debug] [GOOD] >> test.py::test[bigdate-bitcast_datetime64-default.txt-ForceBlocks] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Results] [GOOD] >> test.py::test[file-file_list_simple--Analyze] >> test.py::test[key_filter-tzdate--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-tzdate--Plan] [SKIPPED] >> test.py::test[key_filter-tzdate--Results] [SKIPPED] >> test.py::test[key_filter-yql-14157--Analyze] >> TPDiskRaces::Decommit [GOOD] >> TPDiskRaces::DecommitWithInflight >> test.py::test[weak_field-hor_join_with_mix_weak_access--Debug] [GOOD] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Plan] [GOOD] >> test.py::test[expr-list_extend-default.txt-Results] [GOOD] >> test.py::test[expr-non_persistable_inner_select_fail--Analyze] [SKIPPED] >> test.py::test[expr-non_persistable_inner_select_fail--Debug] [SKIPPED] >> test.py::test[expr-non_persistable_inner_select_fail--ForceBlocks] [SKIPPED] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Debug] [GOOD] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Plan] [GOOD] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] >> test.py::test[join-join_table_conflict_fail-off-Results] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Analyze] >> test.py::test[expr-non_persistable_inner_select_fail--Plan] [SKIPPED] >> test.py::test[expr-non_persistable_inner_select_fail--Results] >> test.py::test[csee-expr_in_l1-default.txt-Debug] [GOOD] >> test.py::test[csee-expr_in_l1-default.txt-ForceBlocks] >> test.py::test[action-eval_if_guard-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Debug] >> test.py::test[blocks-partial_blocks1--Debug] [GOOD] >> test.py::test[blocks-partial_blocks1--ForceBlocks] >> test.py::test[aggregate-null_type-default.txt-Results] [GOOD] >> test.py::test[aggregate-parsetype_constness-default.txt-Analyze] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-name--Debug] [GOOD] >> test.py::test[pg-name--ForceBlocks] >> test.py::test[column_order-select_where-default.txt-Results] [GOOD] >> test.py::test[compute_range-startswith-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-startswith-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-startswith-default.txt-Results] [SKIPPED] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Debug] >> test.py::test[join-lookupjoin_semi--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off-Analyze] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Plan] [GOOD] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Results] >> test.py::test[expr-xor-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-xor-default.txt-Plan] [GOOD] >> test.py::test[join-right_trivial--Results] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--Analyze] >> test.py::test[join-split_to_list_as_key-off-Debug] [GOOD] >> test.py::test[join-split_to_list_as_key-off-ForceBlocks] [SKIPPED] >> test.py::test[join-split_to_list_as_key-off-Plan] [GOOD] >> test.py::test[join-split_to_list_as_key-off-Results] [GOOD] >> test.py::test[expr-xor-default.txt-Results] >> test.py::test[join-join_right_cbo--Debug] [GOOD] >> test.py::test[join-join_right_cbo--Plan] >> test.py::test[pg-select_having-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_full_one-default.txt-Debug] >> test.py::test[key_filter-convert--Analyze] >> test.py::test[action-eval_table_with_view-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Plan] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Results] >> test.py::test[join-join_right_cbo--Plan] [GOOD] >> test.py::test[join-join_right_cbo--Results] >> test.py::test[blocks-combine_all_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Debug] >> test.py::test[action-nested_subquery--ForceBlocks] [GOOD] >> test.py::test[action-nested_subquery--Plan] [GOOD] >> test.py::test[action-nested_subquery--Results] >> test.py::test[join-join_without_correlation_and_struct_access--Analyze] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Debug] >> test.py::test[sampling-read--Analyze] [GOOD] >> test.py::test[sampling-read--Debug] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[action-runtime_type_kind-default.txt-Debug] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] [GOOD] >> test.py::test[column_order-join_nosimple--Analyze] [SKIPPED] >> test.py::test[column_order-join_nosimple--Debug] [SKIPPED] >> test.py::test[column_order-join_nosimple--ForceBlocks] >> test.py::test[pg-select_sort_project_same_asc-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_sort_project_same_asc-default.txt-Debug] >> test.py::test[file-file_list_simple--Analyze] [GOOD] >> test.py::test[table_range-range_with_view--ForceBlocks] [GOOD] >> test.py::test[file-file_list_simple--Debug] >> test.py::test[table_range-range_with_view--Plan] [GOOD] >> test.py::test[table_range-range_with_view--Results] >> test.py::test[action-dep_world_quote_code-default.txt-Results] [GOOD] >> test.py::test[action-eval_atom_wrong_type_expr--Debug] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Plan] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Results] [SKIPPED] >> test.py::test[action-eval_each_input_table-default.txt-Debug] >> test.py::test[join-lookupjoin_semi_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Analyze] >> test.py::test[column_order-join_nosimple--ForceBlocks] [SKIPPED] >> test.py::test[column_order-join_nosimple--Plan] [SKIPPED] >> test.py::test[column_order-join_nosimple--Results] [SKIPPED] >> test.py::test[compute_range-decimal-default.txt-Analyze] [SKIPPED] >> test.py::test[compute_range-decimal-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-decimal-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[tpch-q14-default.txt-Debug] [GOOD] >> test.py::test[tpch-q14-default.txt-ForceBlocks] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-ForceBlocks] >> test.py::test[bigdate-bitcast_datetime64-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Plan] [GOOD] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Results] >> test.py::test[compute_range-decimal-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-decimal-default.txt-Results] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-Analyze] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-Results] [SKIPPED] >> test.py::test[count-count_nullable_sub-default.txt-Debug] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Plan] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Results] >> test.py::test[sampling-bind_expr_udf--Debug] [GOOD] >> test.py::test[sampling-bind_expr_udf--Plan] [GOOD] >> test.py::test[sampling-bind_expr_udf--Results] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Debug] >> test.py::test[expr-non_persistable_inner_select_fail--Results] [GOOD] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Analyze] >> test.py::test[action-eval_if_guard-default.txt-Debug] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-ForceBlocks] >> test.py::test[compute_range-multiply_limit_for_single_key-default.txt-Analyze] [SKIPPED] >> test.py::test[compute_range-multiply_limit_for_single_key-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-multiply_limit_for_single_key-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[compute_range-multiply_limit_for_single_key-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-multiply_limit_for_single_key-default.txt-Results] [SKIPPED] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-Analyze] >> test.py::test[type_v3-insert_struct_v3_with_native--Debug] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--ForceBlocks] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Results] [GOOD] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-Analyze] >> test.py::test[csee-expr_in_l1-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-expr_in_l1-default.txt-Plan] [GOOD] >> test.py::test[csee-expr_in_l1-default.txt-Results] >> test.py::test[key_filter-yql-14157--Analyze] [GOOD] >> test.py::test[key_filter-yql-14157--Debug] >> test.py::test[expr-xor-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Analyze] >> test.py::test[tpch-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q19-default.txt-Plan] >> test.py::test[join-lookupjoin_semi_2o--Analyze] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Debug] >> test.py::test[aggregate-parsetype_constness-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-parsetype_constness-default.txt-Debug] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Debug] [GOOD] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Plan] >> test.py::test[action-nested_subquery--Results] [GOOD] >> test.py::test[tpch-q19-default.txt-Plan] [GOOD] >> test.py::test[action-pass_subquery_as_param-default.txt-Analyze] >> test.py::test[tpch-q19-default.txt-Results] >> test.py::test[pg-select_between-default.txt-Debug] >> test.py::test[join-lookupjoin_semi_1o-off-Analyze] [GOOD] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Plan] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off-Debug] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Results] >> test.py::test[blocks-partial_blocks1--ForceBlocks] [GOOD] >> test.py::test[blocks-partial_blocks1--Plan] >> test.py::test[action-eval_table_with_view-default.txt-Results] [GOOD] >> test.py::test[action-eval_taggedtype-default.txt-Analyze] >> test.py::test[pg_catalog-pg_proc-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-Analyze] >> test.py::test[join-left_semi_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_inner--Debug] >> test.py::test[join-star_join_inners_vk_sorted--Analyze] [GOOD] >> test.py::test[blocks-partial_blocks1--Plan] [GOOD] >> test.py::test[blocks-partial_blocks1--Results] >> test.py::test[produce-reduce_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Debug] >> test.py::test[pg-select_join_full_one-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_full_one-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_full_one-default.txt-Results] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Results] [GOOD] >> test.py::test[bigdate-presort-default.txt-Analyze] >> test.py::test[join-star_join_inners_vk_sorted--Debug] >> test.py::test[count-count--ForceBlocks] [GOOD] >> test.py::test[count-count--Plan] [GOOD] >> test.py::test[count-count--Results] >> test.py::test[key_filter-convert--Analyze] [GOOD] >> test.py::test[key_filter-convert--Debug] >> test.py::test[pg-select_sort_project_same_asc-default.txt-Debug] [GOOD] >> test.py::test[pg-select_sort_project_same_asc-default.txt-ForceBlocks] >> test.py::test[csee-expr_in_l1-default.txt-Results] [GOOD] >> test.py::test[csee-l2_dup_l1-default.txt-Analyze] >> test.py::test[action-runtime_type_kind-default.txt-Debug] [GOOD] >> test.py::test[action-runtime_type_kind-default.txt-Plan] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Debug] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Analyze] [GOOD] >> test.py::test[pg-name--ForceBlocks] [GOOD] >> test.py::test[pg-name--Plan] [GOOD] >> test.py::test[pg-name--Results] >> test.py::test[action-runtime_type_kind-default.txt-Results] >> test.py::test[table_range-concat_with_view--Debug] [GOOD] >> test.py::test[table_range-concat_with_view--Plan] [GOOD] >> test.py::test[table_range-concat_with_view--Results] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Plan] [GOOD] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-Debug] >> test.py::test[action-eval_each_input_table-default.txt-Results] >> test.py::test[join-lookupjoin_semi_empty-off-Debug] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Analyze] [GOOD] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Debug] >> test.py::test[sampling-read--Debug] [GOOD] >> test.py::test[sampling-read--ForceBlocks] >> test.py::test[join-join_right_cbo--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Debug] >> test.py::test[csee-expr_in_l1_and_l0-default.txt-Results] [GOOD] >> test.py::test[csee-same_complete_l2-default.txt-Debug] >> test.py::test[distinct-distinct_window-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-Results] >> test.py::test[sampling-bind_expr_udf--Results] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Debug] >> test.py::test[file-file_list_simple--Debug] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Plan] >> test.py::test[file-file_list_simple--ForceBlocks] >> test.py::test[flatten_by-flatten_few_fields--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Debug] >> test.py::test[action-eval_if_guard-default.txt-Plan] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-Analyze] [GOOD] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-Debug] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Plan] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] >> test.py::test[tpch-q14-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q14-default.txt-Plan] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[tpch-q12-default.txt-Analyze] >> test.py::test[blocks-partial_blocks1--Results] [GOOD] >> test.py::test[aggregate-parsetype_constness-default.txt-Debug] [GOOD] >> test.py::test[aggregate-parsetype_constness-default.txt-ForceBlocks] >> test.py::test[action-pass_subquery_as_param-default.txt-Analyze] [GOOD] >> test.py::test[action-pass_subquery_as_param-default.txt-Debug] >> test.py::test[join-lookupjoin_semi_2o--Debug] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--ForceBlocks] >> test.py::test[blocks-string_len_and_cmp--Analyze] >> test.py::test[join-join_without_correlation_and_struct_access--Debug] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--ForceBlocks] >> test.py::test[pg-select_between-default.txt-Debug] [GOOD] >> test.py::test[pg-select_between-default.txt-Plan] [GOOD] >> test.py::test[pg-select_between-default.txt-Results] >> test.py::test[action-runtime_type_kind-default.txt-Results] [GOOD] >> test.py::test[action-eval_taggedtype-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_taggedtype-default.txt-Debug] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-Analyze] [GOOD] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-Debug] >> test.py::test[agg_apply-min-default.txt-Debug] >> test.py::test[join-lookupjoin_semi_1o-off-Debug] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Results] [GOOD] >> test.py::test[csee-complete_l2-default.txt-Debug] >> test.py::test[join-lookupjoin_semi_1o-off-ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o-off-Plan] >> test.py::test[pg-select_sort_project_same_asc-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_sort_project_same_asc-default.txt-Plan] >> test.py::test[join-lookupjoin_semi_1o-off-Plan] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off-Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off-Analyze] >> test.py::test[pg-select_sort_project_same_asc-default.txt-Plan] [GOOD] >> test.py::test[pg-select_sort_project_same_asc-default.txt-Results] >> test.py::test[action-eval_each_input_table-default.txt-Results] [GOOD] >> test.py::test[action-eval_filter--Debug] >> test.py::test[key_filter-yql-14157--Debug] [GOOD] >> test.py::test[key_filter-yql-14157--ForceBlocks] >> test.py::test[bigdate-presort-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-presort-default.txt-Debug] >> test.py::test[csee-l2_dup_l1-default.txt-Analyze] [GOOD] >> test.py::test[csee-l2_dup_l1-default.txt-Debug] >> test.py::test[pg-name--Results] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-Analyze] >> test.py::test[action-eval_if_guard-default.txt-Results] [GOOD] >> test.py::test[action-nested_action-default.txt-Analyze] >> test.py::test[produce-reduce_multi_in-empty-Debug] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Plan] >> test.py::test[tpch-q19-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Analyze] >> test.py::test[key_filter-convert--Debug] [GOOD] >> test.py::test[key_filter-convert--ForceBlocks] >> test.py::test[type_v3-insert_struct_v3_with_native--ForceBlocks] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Plan] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-Debug] [GOOD] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_semi_empty-off-Debug] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty-off-Plan] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Analyze] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python_presort_stream-default.txt-Analyze] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] >> test.py::test[csee-same_complete_l2-default.txt-Debug] [GOOD] >> test.py::test[csee-same_complete_l2-default.txt-Plan] [GOOD] >> test.py::test[csee-same_complete_l2-default.txt-Results] >> test.py::test[join-star_join_inners_vk_sorted--Debug] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--ForceBlocks] >> test.py::test[pg-select_between-default.txt-Results] [GOOD] >> test.py::test[pg-select_is_null-default.txt-Debug] >> test.py::test[produce-reduce_with_python_presort_stream-default.txt-Analyze] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream-default.txt-Results] [SKIPPED] >> test.py::test[schema-copy-read_schema-Analyze] >> test.py::test[join-lookupjoin_inner--Debug] [GOOD] >> test.py::test[join-lookupjoin_inner--Plan] [GOOD] >> test.py::test[join-lookupjoin_inner--Results] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased >> test.py::test[pg-select_sort_project_same_asc-default.txt-Results] [GOOD] >> test.py::test[pg-select_table2-default.txt-Analyze] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Debug] [GOOD] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-ForceBlocks] >> test.py::test[sampling-read--ForceBlocks] [GOOD] >> test.py::test[sampling-read--Plan] [GOOD] >> test.py::test[sampling-read--Results] >> test.py::test[action-pass_subquery_as_param-default.txt-Debug] [GOOD] >> test.py::test[action-pass_subquery_as_param-default.txt-ForceBlocks] >> test.py::test[file-file_list_simple--ForceBlocks] [GOOD] >> test.py::test[file-file_list_simple--Plan] [GOOD] >> test.py::test[file-file_list_simple--Results] >> test.py::test[tpch-q12-default.txt-Analyze] [GOOD] >> test.py::test[blocks-string_len_and_cmp--Analyze] [GOOD] >> test.py::test[tpch-q12-default.txt-Debug] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-Debug] [GOOD] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-ForceBlocks] >> test.py::test[agg_apply-min-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-min-default.txt-Plan] >> test.py::test[blocks-combine_hashed_avg--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Plan] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Results] >> test.py::test[table_range-concat_with_view--Results] [GOOD] >> test.py::test[table_range-each_with_non_existing-all_fail-Debug] [SKIPPED] >> test.py::test[table_range-each_with_non_existing-all_fail-Plan] >> test.py::test[sampling-bind_small_rate-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Results] >> test.py::test[blocks-string_len_and_cmp--Debug] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-Debug] [GOOD] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-ForceBlocks] >> test.py::test[tpch-q14-default.txt-Results] [GOOD] >> test.py::test[tpch-q8-default.txt-Analyze] >> test.py::test[agg_apply-min-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-min-default.txt-Results] >> test.py::test[table_range-each_with_non_existing-all_fail-Plan] [SKIPPED] >> test.py::test[csee-complete_l2-default.txt-Debug] [GOOD] >> test.py::test[csee-complete_l2-default.txt-Plan] [GOOD] >> test.py::test[table_range-each_with_non_existing-all_fail-Results] >> test.py::test[csee-complete_l2-default.txt-Results] >> test.py::test[action-eval_taggedtype-default.txt-Debug] [GOOD] >> test.py::test[action-eval_taggedtype-default.txt-ForceBlocks] >> test.py::test[aggregate-parsetype_constness-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-parsetype_constness-default.txt-Plan] [GOOD] >> test.py::test[aggregate-parsetype_constness-default.txt-Results] >> test.py::test[join-mapjoin_with_empty_struct-off-Analyze] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off-Debug] >> test.py::test[csee-same_complete_l2-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_scale-default.txt-Debug] >> test.py::test[join-lookupjoin_semi_2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Plan] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Results] >> test.py::test[flatten_by-flatten_few_fields--Debug] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--ForceBlocks] >> test.py::test[pg-select_join_full_one-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_inner3-default.txt-Debug] >> test.py::test[join-join_without_correlation_and_struct_access--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Plan] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Results] >> test.py::test[action-eval_filter--Debug] [GOOD] >> test.py::test[action-eval_filter--Plan] [GOOD] >> test.py::test[action-eval_filter--Results] >> test.py::test[csee-l2_dup_l1-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Debug] >> test.py::test[type_v3-decimal_yt_llvm--Analyze] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Debug] >> test.py::test[csee-l2_dup_l1-default.txt-ForceBlocks] >> test.py::test[pg-order_by_input_columns-default.txt-Analyze] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-Debug] >> test.py::test[action-nested_action-default.txt-Analyze] [GOOD] >> test.py::test[action-nested_action-default.txt-Debug] >> TBsLocalRecovery::ChaoticWriteRestart [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased >> test.py::test[type_v3-insert_struct_v3_with_native--Results] [GOOD] >> test.py::test[type_v3-json--Analyze] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-Plan] [GOOD] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_star-off-Analyze] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Debug] >> test.py::test[pg-select_is_null-default.txt-Debug] [GOOD] >> test.py::test[pg-select_is_null-default.txt-Plan] [GOOD] >> test.py::test[pg-select_is_null-default.txt-Results] >> test.py::test[sampling-read--Results] [GOOD] >> test.py::test[sampling-reduce--Analyze] >> test.py::test[schema-copy-read_schema-Analyze] [GOOD] >> test.py::test[schema-copy-read_schema-Debug] >> test.py::test[file-file_list_simple--Results] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Analyze] >> test.py::test[pg-select_table2-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_table2-default.txt-Debug] >> test.py::test[key_filter-convert--ForceBlocks] [GOOD] >> test.py::test[key_filter-convert--Plan] [GOOD] >> test.py::test[key_filter-convert--Results] >> test.py::test[agg_apply-min-default.txt-Results] [GOOD] >> test.py::test[agg_phases-count_all_opt-default.txt-Debug] >> test.py::test[csee-complete_l2-default.txt-Results] [GOOD] >> test.py::test[csee-const_body_diff_lambda-default.txt-Debug] >> test.py::test[distinct-distinct_window-default.txt-Results] [GOOD] >> test.py::test[expr-as_dict_list_key-default.txt-Debug] >> test.py::test[bigdate-presort-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Results] [GOOD] >> test.py::test[sampling-map--Debug] >> test.py::test[aggregate-parsetype_constness-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-default-Analyze] [SKIPPED] >> test.py::test[key_filter-yql-14157--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Debug] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Plan] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Results] >> test.py::test[action-pass_subquery_as_param-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-pass_subquery_as_param-default.txt-Plan] [GOOD] >> test.py::test[action-pass_subquery_as_param-default.txt-Results] >> test.py::test[bigdate-presort-default.txt-ForceBlocks] >> test.py::test[action-eval_filter--Results] [GOOD] >> test.py::test[action-eval_resourcetype-default.txt-Debug] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Plan] [GOOD] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Results] >> test.py::test[bigdate-table_yt_key_filter-default-Debug] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-default-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-default-Plan] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-default-Results] [SKIPPED] >> test.py::test[binding-table_range_binding-default.txt-Analyze] >> test.py::test[key_filter-yql-14157--Plan] [GOOD] >> test.py::test[key_filter-yql-14157--Results] >> test.py::test[join-lookupjoin_inner--Results] [GOOD] >> test.py::test[table_range-each_with_non_existing-all_fail-Results] [GOOD] >> test.py::test[join-mapjoin_dup_key--Debug] >> test.py::test[table_range-range_over_filter--Debug] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--Plan] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--Results] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-Plan] [GOOD] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-Results] >> test.py::test[table_range-range_over_filter--Plan] [SKIPPED] >> test.py::test[table_range-range_over_filter--Results] [SKIPPED] >> test.py::test[tpch-q14-default.txt-Debug] >> test.py::test[action-eval_taggedtype-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_taggedtype-default.txt-Plan] [GOOD] >> test.py::test[action-eval_taggedtype-default.txt-Results] >> test.py::test[csee-same_free_closure_size2_expr-default.txt-Results] [GOOD] >> test.py::test[csee-same_l1-default.txt-Analyze] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-Plan] [GOOD] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-Results] >> test.py::test[datetime-date_tz_scale-default.txt-Debug] [GOOD] >> test.py::test[datetime-date_tz_scale-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_tz_scale-default.txt-Results] >> test.py::test[blocks-string_len_and_cmp--Debug] [GOOD] >> test.py::test[blocks-string_len_and_cmp--ForceBlocks] >> test.py::test[csee-l2_dup_l1-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-l2_dup_l1-default.txt-Plan] [GOOD] >> test.py::test[csee-l2_dup_l1-default.txt-Results] >> test.py::test[pg-select_is_null-default.txt-Results] [GOOD] >> test.py::test[pg-select_literals-default.txt-Debug] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] >> test.py::test[type_v3-decimal_yt_llvm--Debug] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--ForceBlocks] >> test.py::test[tpch-q12-default.txt-Debug] [GOOD] >> test.py::test[tpch-q12-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_semi_2o--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Analyze] >> test.py::test[pg-order_by_input_columns-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-ForceBlocks] >> test.py::test[action-nested_action-default.txt-Debug] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off-Debug] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-off-Plan] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off-Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Analyze] >> test.py::test[pg-select_join_inner3-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_inner3-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_inner3-default.txt-Results] >> test.py::test[action-nested_action-default.txt-ForceBlocks] >> test.py::test[type_v3-json--Analyze] [GOOD] >> test.py::test[type_v3-json--Debug] >> test.py::test[action-pass_subquery_as_param-default.txt-Results] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Debug] [GOOD] >> test.py::test[agg_apply-opt_len_count_all-default.txt-Analyze] >> test.py::test[window-generic/aggregations_include_current--Plan] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Results] >> test.py::test[key_filter-convert--Results] [GOOD] >> test.py::test[key_filter-empty_range--Analyze] >> test.py::test[pg_duplicated-order_by_with_duplicates-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Analyze] >> test.py::test[expr-as_dict_list_key-default.txt-Debug] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Analyze] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Debug] >> test.py::test[sampling-reduce--Analyze] [GOOD] >> test.py::test[sampling-reduce--Debug] >> test.py::test[flatten_by-flatten_few_fields--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Plan] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Results] >> test.py::test[blocks-combine_hashed_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--Debug] >> test.py::test[expr-as_dict_list_key-default.txt-Plan] [GOOD] >> test.py::test[expr-as_dict_list_key-default.txt-Results] >> test.py::test[count-count--Results] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Analyze] >> test.py::test[action-eval_taggedtype-default.txt-Results] [GOOD] >> test.py::test[action-runtime_format_free_args_code-default.txt-Analyze] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Results] [GOOD] >> test.py::test[expr-static_zip-default.txt-Analyze] >> test.py::test[csee-const_body_diff_lambda-default.txt-Debug] [GOOD] >> test.py::test[csee-const_body_diff_lambda-default.txt-Plan] [GOOD] >> test.py::test[csee-const_body_diff_lambda-default.txt-Results] >> test.py::test[agg_phases-count_all_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-count_all_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-count_all_opt-default.txt-Results] >> test.py::test[schema-copy-read_schema-Debug] [GOOD] >> test.py::test[schema-copy-read_schema-ForceBlocks] >> test.py::test[pg-select_table2-default.txt-Debug] [GOOD] >> test.py::test[pg-select_table2-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite_star-off-Debug] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-ForceBlocks] >> test.py::test[action-eval_resourcetype-default.txt-Debug] [GOOD] >> test.py::test[action-eval_resourcetype-default.txt-Plan] [GOOD] >> test.py::test[csee-l2_dup_l1-default.txt-Results] [GOOD] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Analyze] >> test.py::test[csee-nested_closure_in_l1_and_l2_unordered-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-Analyze] >> test.py::test[join-join_without_correlation_and_struct_access--Results] [GOOD] >> test.py::test[join-left_trivial--Analyze] >> test.py::test[join-mapjoin_early_rewrite_star-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_star-off-Plan] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Analyze] >> test.py::test[csee-same_l1-default.txt-Analyze] [GOOD] >> test.py::test[csee-same_l1-default.txt-Debug] >> test.py::test[action-eval_resourcetype-default.txt-Results] >> test.py::test[pg-select_literals-default.txt-Debug] [GOOD] >> test.py::test[pg-select_literals-default.txt-Plan] >> test.py::test[datetime-date_tz_scale-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Debug] >> test.py::test[pg-select_literals-default.txt-Plan] [GOOD] >> test.py::test[pg-select_literals-default.txt-Results] >> test.py::test[type_v3-decimal_yt_llvm--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Plan] >> test.py::test[bigdate-presort-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-presort-default.txt-Plan] [GOOD] >> test.py::test[bigdate-presort-default.txt-Results] >> test.py::test[binding-table_range_binding-default.txt-Analyze] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Debug] >> test.py::test[type_v3-decimal_yt_llvm--Plan] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Results] >> test.py::test[expr-as_dict_list_key-default.txt-Results] [GOOD] >> test.py::test[sampling-map--Debug] [GOOD] >> test.py::test[expr-as_dict_tuple_key-default.txt-Debug] >> test.py::test[sampling-map--Plan] [GOOD] >> test.py::test[sampling-map--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Analyze] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Debug] >> test.py::test[pg-order_by_input_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-Plan] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-Results] >> test.py::test[key_filter-yql-14157--Results] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Analyze] >> test.py::test[action-nested_action-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-nested_action-default.txt-Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Results] >> test.py::test[tpch-q8-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q8-default.txt-Debug] >> test.py::test[action-nested_action-default.txt-Results] >> test.py::test[join-join_without_correlation_and_struct_access--Results] [GOOD] >> test.py::test[join-left_join_null_column-off-Debug] [SKIPPED] >> test.py::test[join-left_join_null_column-off-Plan] [SKIPPED] >> test.py::test[join-left_join_null_column-off-Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group--Analyze] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Debug] >> test.py::test[csee-const_body_diff_lambda-default.txt-Results] [GOOD] >> test.py::test[datetime-all_timezones-default.txt-Debug] >> test.py::test[agg_apply-opt_len_count_all-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-opt_len_count_all-default.txt-Debug] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Analyze] [GOOD] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Debug] >> test.py::test[join-left_join_right_pushdown_nested_right--Debug] >> test.py::test[blocks-string_len_and_cmp--ForceBlocks] [GOOD] >> test.py::test[blocks-string_len_and_cmp--Plan] [GOOD] >> test.py::test[blocks-string_len_and_cmp--Results] >> test.py::test[action-eval_resourcetype-default.txt-Results] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--Debug] [SKIPPED] >> test.py::test[action-mixed_eval_typeof_world1--Plan] [SKIPPED] >> test.py::test[action-mixed_eval_typeof_world1--Results] >> test.py::test[type_v3-json--Debug] [GOOD] >> test.py::test[type_v3-json--ForceBlocks] >> test.py::test[file-where_key_in_get_file_content--Debug] [GOOD] >> test.py::test[file-where_key_in_get_file_content--ForceBlocks] >> test.py::test[pg-select_join_inner3-default.txt-Results] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Analyze] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Debug] >> test.py::test[pg-select_literals-default.txt-Results] [GOOD] >> test.py::test[pg-select_plusminus_unary-default.txt-Debug] >> test.py::test[tpch-q14-default.txt-Debug] [GOOD] >> test.py::test[tpch-q14-default.txt-Plan] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] >> test.py::test[join-mapjoin_dup_key--Debug] [GOOD] >> test.py::test[join-mapjoin_dup_key--Plan] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] >> test.py::test[pg-select_proj_ref_distinct_on_star-default.txt-Debug] >> test.py::test[action-runtime_format_free_args_code-default.txt-Analyze] [GOOD] >> test.py::test[action-runtime_format_free_args_code-default.txt-Debug] >> test.py::test[expr-static_zip-default.txt-Analyze] [GOOD] >> test.py::test[expr-static_zip-default.txt-Debug] >> test.py::test[type_v3-decimal_yt_llvm--Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Analyze] >> test.py::test[tpch-q12-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q12-default.txt-Plan] [GOOD] >> test.py::test[tpch-q12-default.txt-Results] >> test.py::test[pg-select_table2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_table2-default.txt-Plan] [GOOD] >> test.py::test[pg-select_table2-default.txt-Results] >> test.py::test[join-left_trivial--Analyze] [GOOD] >> test.py::test[join-left_trivial--Debug] >> test.py::test[key_filter-empty_range--Analyze] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-Analyze] [GOOD] >> test.py::test[schema-copy-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-copy-read_schema-Plan] [GOOD] >> test.py::test[schema-copy-read_schema-Results] >> test.py::test[join-star_join_inners_vk_sorted--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-Analyze] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Debug] >> test.py::test[key_filter-empty_range--Debug] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Analyze] [GOOD] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Debug] >> test.py::test[sampling-reduce--Debug] [GOOD] >> test.py::test[sampling-reduce--ForceBlocks] >> test.py::test[pg-pg_types_dict--Analyze] [SKIPPED] >> test.py::test[pg-pg_types_dict--Debug] [SKIPPED] >> test.py::test[pg-pg_types_dict--ForceBlocks] [SKIPPED] >> test.py::test[pg-pg_types_dict--Plan] [SKIPPED] >> test.py::test[pg-pg_types_dict--Results] [SKIPPED] >> test.py::test[pg-select_alias_partial-default.txt-Analyze] >> test.py::test[distinct-distinct_count_only-default.txt-Debug] >> test.py::test[action-nested_action-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Analyze] >> test.py::test[sampling-map--Results] [GOOD] >> test.py::test[sampling-sample-default.txt-Debug] >> test.py::test[bigdate-presort-default.txt-Results] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-Analyze] >> test.py::test[expr-as_dict_tuple_key-default.txt-Debug] [GOOD] >> test.py::test[expr-as_dict_tuple_key-default.txt-Plan] >> test.py::test[csee-same_l1-default.txt-Debug] [GOOD] >> test.py::test[csee-same_l1-default.txt-ForceBlocks] |86.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |86.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a >> test.py::test[expr-as_dict_tuple_key-default.txt-Plan] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Results] [GOOD] >> test.py::test[expr-as_dict_tuple_key-default.txt-Results] >> test.py::test[flatten_by-flatten_with_group_by_expr--Analyze] |86.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a >> test.py::test[agg_apply-opt_len_count_all-default.txt-Debug] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Analyze] [GOOD] >> test.py::test[datetime-all_timezones-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-opt_len_count_all-default.txt-ForceBlocks] >> test.py::test[lambda-lambda_simple-default.txt-Debug] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Debug] [GOOD] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-ForceBlocks] >> test.py::test[join-opt_on_opt_side_with_group--Debug] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Debug] [GOOD] >> test.py::test[pg-select_proj_ref_distinct_on_star-default.txt-Debug] [GOOD] >> test.py::test[datetime-all_timezones-default.txt-Plan] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--ForceBlocks] >> test.py::test[pg-select_proj_ref_distinct_on_star-default.txt-Plan] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Plan] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[pg-select_proj_ref_distinct_on_star-default.txt-Results] >> test.py::test[file-where_key_in_get_file_content--ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Results] >> test.py::test[expr-static_zip-default.txt-Debug] [GOOD] >> test.py::test[datetime-all_timezones-default.txt-Results] >> test.py::test[expr-static_zip-default.txt-ForceBlocks] >> test.py::test[action-runtime_quote_code-default.txt-Debug] >> test.py::test[join-mapjoin_dup_key-off-Debug] [SKIPPED] >> test.py::test[pg-select_plusminus_unary-default.txt-Debug] [GOOD] >> test.py::test[join-mapjoin_dup_key-off-Plan] [SKIPPED] >> test.py::test[blocks-combine_hashed_count--Debug] [GOOD] >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test.py::test[schema-copy-read_schema-Results] [GOOD] >> test.py::test[blocks-string_len_and_cmp--Results] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Plan] [GOOD] >> test.py::test[pg-select_plusminus_unary-default.txt-Plan] [GOOD] >> test.py::test[pg-select_plusminus_unary-default.txt-Results] >> test.py::test[join-mapjoin_opt_vs_2xopt--Debug] >> test.py::test[schema-insert-schema-Analyze] >> test.py::test[file-where_key_in_get_file_content--Results] >> test.py::test[action-runtime_format_free_args_code-default.txt-Debug] [GOOD] >> test.py::test[type_v3-json--ForceBlocks] [GOOD] >> test.py::test[pg-select_table2-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--Plan] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Analyze] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Debug] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Debug] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Analyze] >> test.py::test[count-count_no_grouping-default.txt-ForceBlocks] >> test.py::test[tpch-q14-default.txt-Results] [GOOD] >> test.py::test[expr-as_dict_tuple_key-default.txt-Results] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-Analyze] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Results] [GOOD] >> test.py::test[action-runtime_format_free_args_code-default.txt-ForceBlocks] >> test.py::test[type_v3-json--Plan] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--ForceBlocks] >> test.py::test[type_v3-insert_struct_v3_wo_native--Debug] >> test.py::test[join-yql-14829_leftonly-off-Debug] >> test.py::test[type_v3-json--Results] >> test.py::test[blocks-combine_hashed_count--Results] >> test.py::test[tpch-q2-default.txt-Debug] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Analyze] >> test.py::test[produce-reduce_with_python_row_repack-default.txt-Debug] [SKIPPED] >> test.py::test[expr-as_struct_syntax-default.txt-Debug] >> test.py::test[binding-table_range_binding-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_with_python_row_repack-default.txt-Plan] [SKIPPED] >> test.py::test[pg-select_alias_partial-default.txt-Analyze] [GOOD] >> test.py::test[produce-reduce_with_python_row_repack-default.txt-Results] [SKIPPED] >> test.py::test[pg-select_alias_partial-default.txt-Debug] >> test.py::test[sampling-map-keyfilter-Debug] >> test.py::test[binding-table_range_binding-default.txt-ForceBlocks] >> test.py::test[window-generic/aggregations_include_current--Results] [GOOD] >> test.py::test[window-generic/session_aliases--Debug] |86.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp >> test.py::test[action-subquery_merge1-default.txt-Analyze] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Debug] >> test.py::test[csee-same_l1-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-left_trivial--Debug] [GOOD] >> test.py::test[csee-same_l1-default.txt-Plan] [GOOD] >> test.py::test[csee-same_l1-default.txt-Results] >> test.py::test[join-left_trivial--ForceBlocks] >> test.py::test[distinct-distinct_count_only-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-ForceBlocks] >> test.py::test[datetime-all_timezones-default.txt-Results] [GOOD] >> test.py::test[datetime-date_diff_compare-default.txt-Debug] >> test.py::test[binding-table_from_binding-default.txt-Analyze] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-Debug] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Debug] [GOOD] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-ForceBlocks] >> test.py::test[pg-select_plusminus_unary-default.txt-Results] [GOOD] >> test.py::test[pg-select_sort_project_expr-default.txt-Debug] >> test.py::test[agg_apply-opt_len_count_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-opt_len_count_all-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-opt_len_count_all-default.txt-Results] >> test.py::test[tpch-q12-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Analyze] >> test.py::test[sampling-reduce--ForceBlocks] [GOOD] >> test.py::test[sampling-reduce--Plan] [GOOD] >> test.py::test[pg-select_proj_ref_distinct_on_star-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery_scalar-default.txt-Debug] >> test.py::test[action-runtime_quote_code-default.txt-Debug] [GOOD] >> test.py::test[sampling-reduce--Results] >> test.py::test[flatten_by-flatten_with_group_by_expr--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Debug] >> test.py::test[datetime-date_tz_table_sort_desc--Results] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Debug] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Debug] >> test.py::test[action-runtime_quote_code-default.txt-Plan] [GOOD] >> test.py::test[action-runtime_quote_code-default.txt-Results] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Plan] [GOOD] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Results] >> test.py::test[lambda-lambda_simple-default.txt-Debug] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-ForceBlocks] >> test.py::test[tpch-q8-default.txt-Debug] [GOOD] >> test.py::test[tpch-q8-default.txt-ForceBlocks] >> test.py::test[file-where_key_in_get_file_content--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Analyze] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Debug] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Plan] [GOOD] >> test.py::test[schema-insert-schema-Analyze] [GOOD] >> test.py::test[schema-insert-schema-Debug] >> test.py::test[sampling-sample-default.txt-Debug] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Analyze] [GOOD] >> test.py::test[sampling-sample-default.txt-Plan] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Debug] >> test.py::test[sampling-sample-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Results] [GOOD] >> test.py::test[join-mergejoin_small_primary--Analyze] >> test.py::test[action-runtime_format_free_args_code-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-runtime_format_free_args_code-default.txt-Plan] [GOOD] >> test.py::test[action-runtime_format_free_args_code-default.txt-Results] >> test.py::test[expr-static_zip-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-static_zip-default.txt-Plan] [GOOD] >> test.py::test[expr-static_zip-default.txt-Results] >> test.py::test[type_v3-json--Results] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Analyze] >> test.py::test[expr-as_struct_syntax-default.txt-Debug] [GOOD] >> test.py::test[expr-as_struct_syntax-default.txt-Plan] >> test.py::test[csee-same_l1-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_parallel_indep--Analyze] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Debug] >> test.py::test[join-opt_on_opt_side_with_group--ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Plan] >> test.py::test[expr-as_struct_syntax-default.txt-Plan] [GOOD] >> test.py::test[expr-as_struct_syntax-default.txt-Results] >> test.py::test[agg_apply-opt_len_count_all-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_signed-default.txt-Analyze] >> test.py::test[join-opt_on_opt_side_with_group--Plan] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Results] >> test.py::test[join-left_join_right_pushdown_nested_right--Debug] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right--Plan] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] >> test.py::test[agg_phases-count_all_opt-default.txt-Results] [GOOD] >> test.py::test[agg_phases-min-default.txt-Debug] >> test.py::test[blocks-combine_hashed_count--Results] [GOOD] >> test.py::test[blocks-date_add_interval--Debug] >> test.py::test[action-subquery_merge1-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-ForceBlocks] >> test.py::test[count-count_no_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Plan] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] >> test.py::test[action-runtime_quote_code-default.txt-Results] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Debug] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Results] [GOOD] >> test.py::test[produce-process_sorted_desc_multi_out--Analyze] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out--Debug] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out--Plan] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[pg-select_alias_partial-default.txt-Debug] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-ForceBlocks] >> test.py::test[join-yql-14829_leftonly-off-Debug] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Analyze] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Debug] >> test.py::test[produce-process_streaming-default.txt-Analyze] >> test.py::test[pg-select_sort_project_expr-default.txt-Debug] [GOOD] >> test.py::test[pg-select_sort_project_expr-default.txt-Plan] [GOOD] >> test.py::test[pg-select_sort_project_expr-default.txt-Results] >> test.py::test[binding-table_from_binding-default.txt-Debug] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-ForceBlocks] >> test.py::test[join-yql-14829_leftonly-off-ForceBlocks] [SKIPPED] >> test.py::test[action-runtime_format_free_args_code-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery_scalar-default.txt-Debug] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-Plan] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Analyze] >> test.py::test[pg-select_subquery_scalar-default.txt-Plan] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-Results] >> test.py::test[pg-select_subquery_scalar-default.txt-Results] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Plan] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--Debug] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--Plan] [GOOD] >> test.py::test[datetime-date_diff_compare-default.txt-Debug] [GOOD] >> test.py::test[datetime-date_diff_compare-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_diff_compare-default.txt-Results] >> test.py::test[join-left_trivial--ForceBlocks] [GOOD] >> test.py::test[join-left_trivial--Plan] [GOOD] >> test.py::test[join-left_trivial--Results] >> test.py::test[join-yql-14829_leftonly-off-Results] [GOOD] >> test.py::test[join-yql-16011--Analyze] [SKIPPED] >> test.py::test[join-yql-16011--Debug] [SKIPPED] >> test.py::test[expr-static_zip-default.txt-Results] [GOOD] >> test.py::test[expr-variant_list_sort-default.txt-Analyze] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Results] >> test.py::test[sampling-map-keyfilter-Debug] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] >> test.py::test[sampling-map-keyfilter-Plan] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--ForceBlocks] [GOOD] >> test.py::test[join-yql-16011--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Plan] >> test.py::test[join-yql-16011--Plan] [SKIPPED] >> test.py::test[join-yql-16011--Results] [SKIPPED] >> test.py::test[join-yql-4275-off-Analyze] >> test.py::test[distinct-distinct_count_only-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-Results] >> test.py::test[sampling-map-keyfilter-Results] >> test.py::test[expr-as_struct_syntax-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Debug] >> test.py::test[expr-as_variant_enum-default.txt-Debug] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Plan] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] >> test.py::test[sampling-sample-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_filter-default.txt-Debug] >> test.py::test[join-mergejoin_small_primary--Analyze] [GOOD] >> test.py::test[join-mergejoin_small_primary--Debug] >> test.py::test[key_filter-empty_range--Debug] [GOOD] >> test.py::test[key_filter-empty_range--ForceBlocks] >> test.py::test[lambda-lambda_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Plan] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Results] >> test.py::test[sampling-reduce--Results] [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Analyze] >> test.py::test[type_v3-mixed_with_columns--Analyze] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Debug] >> test.py::test[type_v3-insert_struct_v3_wo_native--Debug] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] >> test.py::test[case-case_size_eq_cast-default.txt-Debug] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-ForceBlocks] >> test.py::test[count-count_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Analyze] >> test.py::test[binding-table_range_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Plan] >> test.py::test[agg_apply-sum_signed-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-sum_signed-default.txt-Debug] >> test.py::test[pg-select_subquery_scalar-default.txt-Results] [GOOD] >> test.py::test[pg-select_union-default.txt-Debug] >> test.py::test[binding-table_range_binding-default.txt-Plan] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] >> test.py::test[pg-select_sort_project_expr-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Debug] >> test.py::test[dq-precompute_parallel_indep--Analyze] [GOOD] >> test.py::test[dq-precompute_parallel_indep--Debug] >> test.py::test[action-subquery_merge1-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] >> test.py::test[schema-insert-schema-Debug] [GOOD] >> test.py::test[schema-insert-schema-ForceBlocks] >> test.py::test[datetime-date_diff_compare-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_cast-default.txt-Debug] >> test.py::test[produce-process_streaming-default.txt-Analyze] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Debug] >> test.py::test[flatten_by-flatten_with_group_by_expr--Debug] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--ForceBlocks] >> test.py::test[type_v3-decimal_yt_nollvm--Debug] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] >> test.py::test[action-subquery_orderby1-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Results] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_impossible_cast--Analyze] [SKIPPED] >> test.py::test[datetime-date_tz_impossible_cast--Debug] [SKIPPED] >> test.py::test[datetime-date_tz_impossible_cast--ForceBlocks] [SKIPPED] >> test.py::test[datetime-date_tz_impossible_cast--Plan] [SKIPPED] >> test.py::test[pg-select_alias_partial-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-Plan] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-Results] >> test.py::test[agg_phases-min-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Analyze] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Debug] >> test.py::test[expr-variant_list_sort-default.txt-Analyze] [GOOD] >> test.py::test[expr-variant_list_sort-default.txt-Debug] >> test.py::test[datetime-date_tz_impossible_cast--Results] >> test.py::test[agg_phases-min-default.txt-Results] >> test.py::test[distinct-distinct_count_only-default.txt-Results] [GOOD] >> test.py::test[expr-as_variant_enum-default.txt-Debug] [GOOD] >> test.py::test[expr-as_variant_enum-default.txt-Plan] >> test.py::test[distinct-distinct_join-default.txt-Analyze] >> test.py::test[sampling-map-keyfilter-Results] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-read--Debug] >> test.py::test[binding-table_from_binding-default.txt-Plan] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-Results] >> test.py::test[join-yql-4275-off-Analyze] [GOOD] >> test.py::test[join-yql-4275-off-Debug] >> test.py::test[expr-as_variant_enum-default.txt-Plan] [GOOD] >> test.py::test[expr-as_variant_enum-default.txt-Results] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Analyze] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Debug] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--ForceBlocks] >> test.py::test[lambda-lambda_simple-default.txt-Results] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-Analyze] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Debug] >> test.py::test[action-subquery_merge1-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Analyze] >> test.py::test[type_v3-mixed_with_columns--Debug] [GOOD] >> test.py::test[type_v3-mixed_with_columns--ForceBlocks] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct--Debug] >> test.py::test[join-opt_on_opt_side_with_group--Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-off-Analyze] >> test.py::test[join-mergejoin_small_primary--Debug] [GOOD] >> test.py::test[join-mergejoin_small_primary--ForceBlocks] >> test.py::test[join-left_trivial--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-Analyze] >> test.py::test[aggregate-group_by_session_only_distinct--Debug] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Plan] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Plan] >> test.py::test[count-count_nullable_sub-default.txt-Analyze] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Debug] >> test.py::test[action-subquery_orderby1-default.txt-Results] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Debug] >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Results] >> test.py::test[agg_apply-sum_signed-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_signed-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_one_count-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Results] >> test.py::test[case-case_size_eq_cast-default.txt-Plan] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Results] >> test.py::test[pg-select_union-default.txt-Debug] [GOOD] >> test.py::test[pg-select_union-default.txt-Plan] >> test.py::test[blocks-add_uint64_opt2--Analyze] >> test.py::test[tpch-q8-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q8-default.txt-Plan] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--Debug] >> test.py::test[sampling-subquery_filter-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_projection_array_corr-default.txt-ForceBlocks] >> test.py::test[pg-select_union-default.txt-Plan] [GOOD] >> test.py::test[pg-select_union-default.txt-Results] >> test.py::test[expr-as_variant_enum-default.txt-Results] [GOOD] >> test.py::test[expr-cast_from_utf8-default.txt-Debug] >> test.py::test[datetime-date_tz_impossible_cast--Results] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Analyze] >> test.py::test[tpch-q8-default.txt-Plan] [GOOD] >> test.py::test[tpch-q8-default.txt-Results] >> test.py::test[tpch-q2-default.txt-Debug] [GOOD] >> test.py::test[tpch-q2-default.txt-Plan] [GOOD] >> test.py::test[tpch-q2-default.txt-Results] >> test.py::test[blocks-date_add_interval--Debug] [GOOD] >> test.py::test[blocks-date_add_interval--Plan] [GOOD] >> test.py::test[blocks-date_add_interval--Results] >> test.py::test[sampling-subquery_filter-default.txt-Plan] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_filter-default.txt-Results] >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Plan] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Results] >> TBsVDiskOutOfSpace::WriteUntilYellowZone [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB >> test.py::test[window-generic/session_aliases--Debug] [GOOD] >> test.py::test[window-generic/session_aliases--Plan] >> test.py::test[blocks-add_int32--Analyze] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-ForceBlocks] >> test.py::test[window-generic/session_aliases--Plan] [GOOD] >> test.py::test[window-generic/session_aliases--Results] >> test.py::test[expr-variant_list_sort-default.txt-Debug] [GOOD] >> test.py::test[expr-variant_list_sort-default.txt-ForceBlocks] >> test.py::test[produce-process_streaming-default.txt-Debug] [GOOD] >> test.py::test[produce-process_streaming-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_join-default.txt-Analyze] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-Debug] >> test.py::test[pg-select_alias_partial-default.txt-Results] [GOOD] >> test.py::test[pg-select_common_type_union-default.txt-Analyze] >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Plan] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] >> test.py::test[pg-select_subquery2_qstar-default.txt-Debug] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-Analyze] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-Debug] >> test.py::test[datetime-date_tz_cast-default.txt-Debug] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Plan] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] >> test.py::test[flatten_by-flatten_with_group_by_expr--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Plan] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] >> test.py::test[schema-insert-schema-ForceBlocks] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Results] [GOOD] >> test.py::test[sampling-read--Debug] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Analyze] >> test.py::test[datetime-date_tz_cast-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_tz_cast-default.txt-Results] >> test.py::test[join-mergejoin_small_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_small_primary--Plan] >> test.py::test[flatten_by-flatten_by_typed_table--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Plan] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Results] >> test.py::test[join-yql-4275-off-Debug] [GOOD] >> test.py::test[join-yql-4275-off-ForceBlocks] [SKIPPED] >> TBsVDiskRange::RangeGetFromEmptyDB [GOOD] >> test.py::test[join-yql-4275-off-Plan] [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction >> test.py::test[sampling-subquery_multiple_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-ForceBlocks] >> test.py::test[aggr_factory-bitor-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Debug] >> test.py::test[schema-insert-schema-Plan] [GOOD] >> test.py::test[schema-insert-schema-Results] >> test.py::test[sampling-read--Plan] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-off-Analyze] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-off-Debug] >> test.py::test[sampling-read--Results] >> test.py::test[dq-precompute_parallel_indep--Debug] [GOOD] >> test.py::test[dq-precompute_parallel_indep--ForceBlocks] [SKIPPED] >> test.py::test[dq-precompute_parallel_indep--Plan] [GOOD] >> test.py::test[dq-precompute_parallel_indep--Results] >> test.py::test[join-mergejoin_small_primary--Plan] [GOOD] >> test.py::test[join-mergejoin_small_primary--Results] >> test.py::test[type_v3-decimal_yt_nollvm--Results] [GOOD] >> test.py::test[join-yql-4275-off-Results] [GOOD] >> test.py::test[json-json_value/on_error_cast_value_exception--Analyze] [SKIPPED] >> test.py::test[json-json_value/on_error_cast_value_exception--Debug] [SKIPPED] >> test.py::test[json-json_value/on_error_cast_value_exception--ForceBlocks] [SKIPPED] >> test.py::test[json-json_value/on_error_cast_value_exception--Plan] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o-off-Analyze] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Plan] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-Debug] >> test.py::test[agg_apply-avg_numeric-default.txt-Results] >> test.py::test[type_v3-mixed_with_columns--ForceBlocks] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Plan] >> test.py::test[sampling-subquery_filter-default.txt-Results] [GOOD] >> test.py::test[schema-limit_directread--Debug] >> test.py::test[dq-precompute_parallel_indep--Results] [GOOD] >> test.py::test[expr-expr_add_literal_nulls-default.txt-Analyze] >> test.py::test[view-view_with_library--Analyze] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] [GOOD] >> test.py::test[json-json_value/on_error_cast_value_exception--Results] >> test.py::test[join-mergejoin_force_one_sorted-off-Analyze] >> test.py::test[blocks-add_uint64_opt2--Analyze] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Debug] >> test.py::test[type_v3-mixed_with_columns--Plan] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Results] >> test.py::test[agg_apply-sum_signed-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-sum_signed-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-sum_signed-default.txt-Results] >> test.py::test[distinct-distinct_one_count-default.txt-Analyze] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Debug] >> test.py::test[pg-select_union-default.txt-Results] [GOOD] >> test.py::test[expr-cast_from_utf8-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_from_utf8-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_from_utf8-default.txt-Results] >> test.py::test[join-mapjoin_with_empty_struct--Debug] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct--Plan] [GOOD] >> test.py::test[blocks-add_int32--Analyze] [GOOD] >> test.py::test[blocks-add_int32--Debug] >> test.py::test[pg-select_win_expr_order-default.txt-Debug] >> test.py::test[join-mapjoin_with_empty_struct--Results] >> test.py::test[count-count_nullable_sub-default.txt-Debug] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-ForceBlocks] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Plan] [GOOD] >> test.py::test[expr-variant_list_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-variant_list_sort-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Results] [GOOD] >> test.py::test[dq-pool_trees_whitelist--Debug] [SKIPPED] >> test.py::test[dq-pool_trees_whitelist--Plan] [SKIPPED] >> test.py::test[dq-pool_trees_whitelist--Results] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Results] >> test.py::test[expr-variant_list_sort-default.txt-Results] >> test.py::test[pg-select_common_type_union-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_common_type_union-default.txt-Debug] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] >> test.py::test[dq-pool_trees_whitelist--Results] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-Debug] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-Plan] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-Results] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Debug] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Plan] >> test.py::test[flatten_by-flatten_by_typed_table--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr--Analyze] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Results] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Debug] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] [GOOD] >> test.py::test[type_v3-type_subset--Analyze] [SKIPPED] >> test.py::test[type_v3-type_subset--Debug] [SKIPPED] >> test.py::test[type_v3-type_subset--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-type_subset--Plan] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Plan] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Results] [SKIPPED] >> test.py::test[expr-cast_string_implicit-default.txt-Debug] >> test.py::test[case-case_val_then_else-default.txt-Analyze] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Debug] >> test.py::test[agg_apply-avg_numeric-default.txt-Results] [GOOD] >> test.py::test[agg_apply-pg_int4-default.txt-Debug] >> test.py::test[type_v3-type_subset--Results] [SKIPPED] >> test.py::test[union-union_column_extention-default.txt-Analyze] >> test.py::test[agg_apply-sum_signed-default.txt-Results] [GOOD] >> test.py::test[agg_phases-min_by_null-default.txt-Analyze] >> test.py::test[sampling-read--Results] [GOOD] >> test.py::test[sampling-reduce-with_premap-Debug] >> test.py::test[json-json_value/on_error_cast_value_exception--Results] [GOOD] >> test.py::test[json-jsondocument/json_exists-default.txt-Analyze] >> test.py::test[limit-limit_skip_take-default.txt-Debug] [GOOD] >> test.py::test[produce-process_streaming-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-ForceBlocks] >> test.py::test[produce-process_streaming-default.txt-Plan] >> test.py::test[distinct-distinct_join-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-ForceBlocks] >> test.py::test[type_v3-mixed_with_columns--Results] [GOOD] >> test.py::test[schema-insert-schema-Results] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-Analyze] >> test.py::test[schema-read_schema_other--Analyze] >> test.py::test[schema-limit_directread--Debug] [GOOD] >> test.py::test[expr-expr_add_literal_nulls-default.txt-Analyze] [GOOD] >> test.py::test[expr-expr_add_literal_nulls-default.txt-Debug] >> test.py::test[view-view_with_library--Analyze] [GOOD] >> test.py::test[expr-cast_from_utf8-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Plan] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Results] >> test.py::test[schema-limit_directread--Plan] [GOOD] >> test.py::test[schema-limit_directread--Results] >> test.py::test[view-view_with_library--Debug] >> test.py::test[key_filter-empty_range--ForceBlocks] [GOOD] >> test.py::test[key_filter-empty_range--Plan] >> test.py::test[expr-empty_list_ops1-default.txt-Debug] >> test.py::test[join-opt_on_opt_side_with_group-off-Debug] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-off-ForceBlocks] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group-off-Plan] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] Test command err: 2024-11-21T08:47:58.048349Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2710} PDiskId# 1 ownerId# 5 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 5 ownerRound# 101 lsn# 18 PDiskId# 1 2024-11-21T08:47:58.631433Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2710} PDiskId# 1 ownerId# 3 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 3 ownerRound# 101 lsn# 17 PDiskId# 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] Test command err: 2024-11-21T08:48:04.083993Z :BS_VDISK_GET CRIT: VDISK[0:_:0:0:0]: TEvVGetResult: Result message is too large; size# 67108001 orig# {ExtrQuery# [5000:1:0:0:0:100000:1] sh# 257 sz# 99743 c# 0}{ExtrQuery# [5000:1:1:0:0:100000:1] sh# 257 sz# 99743 c# 1}{ExtrQuery# [5000:1:2:0:0:100000:1] sh# 257 sz# 99743 c# 2}{ExtrQuery# [5000:1:3:0:0:100000:1] sh# 257 sz# 99743 c# 3}{ExtrQuery# [5000:1:4:0:0:100000:1] sh# 257 sz# 99743 c# 4}{ExtrQuery# [5000:1:5:0:0:100000:1] sh# 257 sz# 99743 c# 5}{ExtrQuery# [5000:1:6:0:0:100000:1] sh# 257 sz# 99743 c# 6}{ExtrQuery# [5000:1:7:0:0:100000:1] sh# 257 sz# 99743 c# 7}{ExtrQuery# [5000:1:8:0:0:100000:1] sh# 257 sz# 99743 c# 8}{ExtrQuery# [5000:1:9:0:0:100000:1] sh# 257 sz# 99743 c# 9}{ExtrQuery# [5000:1:10:0:0:100000:1] sh# 257 sz# 99743 c# 10}{ExtrQuery# [5000:1:11:0:0:100000:1] sh# 257 sz# 99743 c# 11}{ExtrQuery# [5000:1:12:0:0:100000:1] sh# 257 sz# 99743 c# 12}{ExtrQuery# [5000:1:13:0:0:100000:1] sh# 257 sz# 99743 c# 13}{ExtrQuery# [5000:1:14:0:0:100000:1] sh# 257 sz# 99743 c# 14}{ExtrQuery# [5000:1:15:0:0:100000:1] sh# 257 sz# 99743 c# 15}{ExtrQuery# [5000:1:16:0:0:100000:1] sh# 257 sz# 99743 c# 16}{ExtrQuery# [5000:1:17:0:0:100000:1] sh# 257 sz# 99743 c# 17}{ExtrQuery# [5000:1:18:0:0:100000:1] sh# 257 sz# 99743 c# 18}{ExtrQuery# [5000:1:19:0:0:100000:1] sh# 257 sz# 99743 c# 19}{ExtrQuery# [5000:1:20:0:0:100000:1] sh# 257 sz# 99743 c# 20}{ExtrQuery# [5000:1:21:0:0:100000:1] sh# 257 sz# 99743 c# 21}{ExtrQuery# [5000:1:22:0:0:100000:1] sh# 257 sz# 99743 c# 22}{ExtrQuery# [5000:1:23:0:0:100000:1] sh# 257 sz# 99743 c# 23}{ExtrQuery# [5000:1:24:0:0:100000:1] sh# 257 sz# 99743 c# 24}{ExtrQuery# [5000:1:25:0:0:100000:1] sh# 257 sz# 99743 c# 25}{ExtrQuery# [5000:1:26:0:0:100000:1] sh# 257 sz# 99743 c# 26}{ExtrQuery# [5000:1:27:0:0:100000:1] sh# 257 sz# 99743 c# 27}{ExtrQuery# [5000:1:28:0:0:100000:1] sh# 257 sz# 99743 c# 28}{ExtrQuery# [5000:1:29:0:0:100000:1] sh# 257 sz# 99743 c# 29}{ExtrQuery# [5000:1:30:0:0:100000:1] sh# 257 sz# 99743 c# 30}{ExtrQuery# [5000:1:31:0:0:100000:1] sh# 257 sz# 99743 c# 31}{ExtrQuery# [5000:1:32:0:0:100000:1] sh# 257 sz# 99743 c# 32}{ExtrQuery# [5000:1:33:0:0:100000:1] sh# 257 sz# 99743 c# 33}{ExtrQuery# [5000:1:34:0:0:100000:1] sh# 257 sz# 99743 c# 34}{ExtrQuery# [5000:1:35:0:0:100000:1] sh# 257 sz# 99743 c# 35}{ExtrQuery# [5000:1:36:0:0:100000:1] sh# 257 sz# 99743 c# 36}{ExtrQuery# [5000:1:37:0:0:100000:1] sh# 257 sz# 99743 c# 37}{ExtrQuery# [5000:1:38:0:0:100000:1] sh# 257 sz# 99743 c# 38}{ExtrQuery# [5000:1:39:0:0:100000:1] sh# 257 sz# 99743 c# 39}{ExtrQuery# [5000:1:40:0:0:100000:1] sh# 257 sz# 99743 c# 40}{ExtrQuery# [5000:1:41:0:0:100000:1] sh# 257 sz# 99743 c# 41}{ExtrQuery# [5000:1:42:0:0:100000:1] sh# 257 sz# 99743 c# 42}{ExtrQuery# [5000:1:43:0:0:100000:1] sh# 257 sz# 99743 c# 43}{ExtrQuery# [5000:1:44:0:0:100000:1] sh# 257 sz# 99743 c# 44}{ExtrQuery# [5000:1:45:0:0:100000:1] sh# 257 sz# 99743 c# 45}{ExtrQuery# [5000:1:46:0:0:100000:1] sh# 257 sz# 99743 c# 46}{ExtrQuery# [5000:1:47:0:0:100000:1] sh# 257 sz# 99743 c# 47}{ExtrQuery# [5000:1:48:0:0:100000:1] sh# 257 sz# 99743 c# 48}{ExtrQuery# [5000:1:49:0:0:100000:1] sh# 257 sz# 99743 c# 49}{ExtrQuery# [5000:1:50:0:0:100000:1] sh# 257 sz# 99743 c# 50}{ExtrQuery# [5000:1:51:0:0:100000:1] sh# 257 sz# 99743 c# 51}{ExtrQuery# [5000:1:52:0:0:100000:1] sh# 257 sz# 99743 c# 52}{ExtrQuery# [5000:1:53:0:0:100000:1] sh# 257 sz# 99743 c# 53}{ExtrQuery# [5000:1:54:0:0:100000:1] sh# 257 sz# 99743 c# 54}{ExtrQuery# [5000:1:55:0:0:100000:1] sh# 257 sz# 99743 c# 55}{ExtrQuery# [5000:1:56:0:0:100000:1] sh# 257 sz# 99743 c# 56}{ExtrQuery# [5000:1:57:0:0:100000:1] sh# 257 sz# 99743 c# 57}{ExtrQuery# [5000:1:58:0:0:100000:1] sh# 257 sz# 99743 c# 58}{ExtrQuery# [5000:1:59:0:0:100000:1] sh# 257 sz# 99743 c# 59}{ExtrQuery# [5000:1:60:0:0:100000:1] sh# 257 sz# 99743 c# 60}{ExtrQuery# [5000:1:61:0:0:100000:1] sh# 257 sz# 99743 c# 61}{ExtrQuery# [5000:1:62:0:0:100000:1] sh# 257 sz# 99743 c# 62}{ExtrQuery# [5000:1:63:0:0:100000:1] sh# 257 sz# 99743 c# 63}{ExtrQuery# [5000:1:64:0:0:100000:1] sh# 257 sz# 99743 c# 64}{ExtrQuery# [5000:1:65:0:0:100000:1] sh# 257 sz# 99743 c# 65}{ExtrQuery# [5000:1:66:0:0:100000:1] sh# 257 sz# 99743 c# 66}{ExtrQuery# [5000:1:67:0:0:100000:1] sh# 257 sz# 99743 c# 67}{ExtrQuery# [5000:1:68:0:0:100000:1] sh# 257 sz# 99743 c# 68}{ExtrQuery# [5000:1:69:0:0:100000:1] sh# 257 sz# 99743 c# 69}{ExtrQuery# [5000:1:70:0:0:100000:1] sh# 257 sz# 99743 c# 70}{ExtrQuery# [5000:1:71:0:0:100000:1] sh# 257 sz# 99743 c# 71}{ExtrQuery# [5000:1:72:0:0:100000:1] sh# 257 sz# 99743 c# 72}{ExtrQuery# [5000:1:73:0:0:100000:1] sh# 257 sz# 99743 c# 73}{ExtrQuery# [5000:1:74:0:0:100000:1] sh# 257 sz# 99743 c# 74}{ExtrQuery# [5000:1:75:0:0:100000:1] sh# 257 sz# 99743 c# 75}{ExtrQuery# [5000:1:76:0:0:100000:1] sh# 257 sz# 99743 c# 76}{ExtrQuery# [5000:1:77:0:0:100000:1] sh# 257 sz# 99743 c# 77}{ExtrQuery# [5000:1:78:0:0:100000:1] sh# 257 sz# 99743 c# 78}{ExtrQuery# [5000:1:79:0:0:100000:1] sh# 257 sz# 99743 c# 79}{ExtrQuery# [5000:1:80:0:0:100000:1] sh# 257 sz# 99743 c# 80}{ExtrQuery# [5000:1:81:0:0:100000:1] sh# 257 sz# 99743 c# 81}{ExtrQuery# [5000:1:82:0:0:100000:1] sh# 257 sz# 99743 c# 82}{ExtrQuery# [5000:1:83:0:0:100000:1] sh# 257 sz# 99743 c# 83}{ExtrQuery# [5000:1:84:0:0:100000:1] sh# 257 sz# 99743 c# 84}{ExtrQuery# [5000:1:85:0:0:100000:1] sh# 257 sz# 99743 c# 85}{ExtrQuery# [5000:1:86:0:0:100000:1] sh# 257 sz# 99743 c# 86}{ExtrQuery# [5000:1:87:0:0:100000:1] sh# 257 sz# 99743 c# 87}{ExtrQuery# [5000:1:88:0:0:100000:1] sh# 257 sz# 99743 c# 88}{ExtrQuery# [5000:1:89:0:0:100000:1] sh# 257 sz# 99743 c# 89}{ExtrQuery# [5000:1:90:0:0:100000:1] sh# 257 sz# 99743 c# 90}{ExtrQuery# [5000:1:91:0:0:100000:1] sh# 257 sz# 99743 c# 91}{ExtrQuery# [5000:1:92:0:0:100000:1] sh# 257 sz# 99743 c# 92}{ExtrQuery# [5000:1:93:0:0:100000:1] sh# 257 sz# 99743 c# 93}{ExtrQuery# [5000:1:94:0:0:100000:1] sh# 257 sz# 99743 c# 94}{ExtrQuery# [5000:1:95:0:0:100000:1] sh# 257 sz# 99743 c# 95}{ExtrQuery# [5000:1:96:0:0:100000:1] sh# 257 sz# 99743 c# 96}{ExtrQuery# [5000:1:97:0:0:100000:1] sh# 257 sz# 99743 c# 97}{ExtrQuery# [5000:1:98:0:0:100000:1] sh# 257 sz# 99743 c# 98}{ExtrQuery# [5000:1:99:0:0:100000:1] sh# 257 sz# 99743 c# 99}{ExtrQuery# [5000:1:100:0:0:100000:1] sh# 257 sz# 99743 c# 100}{ExtrQuery# [5000:1:101:0:0:100000:1] sh# 257 sz# 99743 c# 101}{ExtrQuery# [5000:1:102:0:0:100000:1] sh# 257 sz# 99743 c# 102}{ExtrQuery# [5000:1:103:0:0:100000:1] sh# 257 sz# 99743 c# 103}{ExtrQuery# [5000:1:104:0:0:100000:1] sh# 257 sz# 99743 c# 104}{ExtrQuery# [5000:1:105:0:0:100000:1] sh# 257 sz# 99743 c# 105}{ExtrQuery# [5000:1:106:0:0:100000:1] sh# 257 sz# 99743 c# 106}{ExtrQuery# [5000:1:107:0:0:100000:1] sh# 257 sz# 99743 c# 107}{ExtrQuery# [5000:1:108:0:0:100000:1] sh# 257 sz# 99743 c# 108}{ExtrQuery# [5000:1:109:0:0:100000:1] sh# 257 sz# 99743 c# 109}{ExtrQuery# [5000:1:110:0:0:100000:1] sh# 257 sz# 99743 c# 110}{ExtrQuery# [5000:1:111:0:0:100000:1] sh# 257 sz# 99743 c# 111}{ExtrQuery# [5000:1:112:0:0:100000:1] sh# 257 sz# 99743 c# 112}{ExtrQuery# [5000:1:113:0:0:100000:1] sh# 257 sz# 99743 c# 113}{ExtrQuery# [5000:1:114:0:0:100000:1] sh# 257 sz# 99743 c# 114}{ExtrQuery# [5000:1:115:0:0:100000:1] sh# 257 sz# 99743 c# 115}{ExtrQuery# [5000:1:116:0:0:100000:1] sh# 257 sz# 99743 c# 116}{ExtrQuery# [5000:1:117:0:0:100000:1] sh# 257 sz# 99743 c# 117}{ExtrQuery# [5000:1:118:0:0:100000:1] sh# 257 sz# 99743 c# 118}{ExtrQuery# [5000:1:119:0:0:100000:1] sh# 257 sz# 99743 c# 119}{ExtrQuery# [5000:1:120:0:0:100000:1] sh# 257 sz# 99743 c# 120}{ExtrQuery# [5000:1:121:0:0:100000:1] sh# 257 sz# 99743 c# 121}{ExtrQuery# [5000:1:122:0:0:100000:1] sh# 257 sz# 99743 c# 122}{ExtrQuery# [5000:1:123:0:0:100000:1] sh# 257 sz# 99743 c# 123}{ExtrQuery# [5000:1:124:0:0:100000:1] sh# 257 sz# 99743 c# 124}{ExtrQuery# [5000:1:125:0:0:100000:1] sh# 257 sz# 99743 c# 125}{ExtrQuery# [5000:1:126:0:0:100000:1] sh# 257 sz# 99743 c# 126}{ExtrQuery# [5000:1:127:0:0:100000:1] sh# 257 sz# 99743 c# 127}{ExtrQuery# [5000:1:128:0:0:100000:1] sh# 257 sz# 99743 c# 128}{ExtrQuery# [5000:1:129:0:0:100000:1] sh# 257 sz# 99743 c# 129}{ExtrQuery# [5000:1:130:0:0:100000:1] sh# 257 sz# 99743 c# 130}{ExtrQuery# [5000:1:131:0:0:100000:1] sh# 257 sz# 99743 c# 131}{ExtrQuery# [5000:1:132:0:0:100000:1] sh# 257 sz# 99743 c# 132}{ExtrQuery# [5000:1:133:0:0:100000:1] sh# 257 sz# 99743 c# 133}{ExtrQuery# [5000:1:134:0:0:100000:1] sh# 257 sz# 99743 c# 134}{ExtrQuery# [5000:1:135:0:0:100000:1] sh# 257 sz# 99743 c# 135}{ExtrQuery# [5000:1:136:0:0:100000:1] sh# 257 sz# 99743 c# 136}{ExtrQuery# [5000:1:137:0:0:100000:1] sh# 257 sz# 99743 c# 137}{ExtrQuery# [5000:1:138:0:0:100000:1] sh# 257 sz# 99743 c# 138}{ExtrQuery# [5000:1:139:0:0:100000:1] sh# 257 sz# 99743 c# 139}{ExtrQuery# [5000:1:140:0:0:100000:1] sh# 257 sz# 99743 c# 140}{ExtrQuery# [5000:1:141:0:0:100000:1] sh# 257 sz# 99743 c# 141}{ExtrQuery# [5000:1:142:0:0:100000:1] sh# 257 sz# 99743 c# 142}{ExtrQuery# [5000:1:143:0:0:100000:1] sh# 257 sz# 99743 c# 143}{ExtrQuery# [5000:1:144:0:0:100000:1] sh# 257 sz# 99743 c# 144}{ExtrQuery# [5000:1:145:0:0:100000:1] sh# 257 sz# 99743 c# 145}{ExtrQuery# [5000:1:146:0:0:100000:1] sh# 257 sz# 99743 c# 146}{ExtrQuery# [5000:1:147:0:0:100000:1] sh# 257 sz# 99743 c# 147}{ExtrQuery# [5000:1:148:0:0:100000:1] sh# 257 sz# 99743 c# 148}{ExtrQuery# [5000:1:149:0:0:100000:1] sh# 257 sz# 99743 c# 149}{ExtrQuery# [5000:1:150:0:0:100000:1] sh# 257 sz# 99743 c# 150}{ExtrQuery# [5000:1:151:0:0:100000:1] sh# 257 sz# 99743 c# 151}{ExtrQuery# [5000:1:152:0:0:100000:1] sh# 257 sz# 99743 c# 152}{ExtrQuery# [5000:1:153:0:0:100000:1] sh# 257 sz# 99743 c# 153}{ExtrQuery# [5000:1:154:0:0:100000:1] sh# 257 sz# 99743 c# 154}{ExtrQuery# [5000:1:155:0:0:100000:1] sh# 257 sz# 99743 c# 155}{ExtrQuery# [5000:1:156:0:0:100000:1] sh# 257 sz# 99743 c# 156}{ExtrQuery# [5000:1:157:0:0:100000:1] sh# 257 sz# 99743 c# 157}{ExtrQuery# [5000:1:158:0:0:100000:1] sh# 257 sz# 99743 c# 158}{ExtrQuery# [5000:1:159:0:0:100000:1] sh# 257 sz# 99743 c# 159}{ExtrQuery# [5000:1:160:0:0:100000:1] sh# 257 sz# 99743 c# 160}{ExtrQuery# [5000:1:161:0:0:100000:1] sh# 257 sz# 99743 c# 161}{ExtrQuery# [5000:1:162:0:0:100000:1] sh# 257 sz# 99743 c# 162}{ExtrQuery# [5000:1:163:0:0:100000:1] sh# 257 sz# 99743 c# 163}{ExtrQuery# [5000:1:164:0:0:100000:1] sh# 257 sz# 99743 c# 164}{ExtrQuery# [5000:1:165:0:0:100000:1] sh# 257 sz# 99743 c# 165}{ExtrQuery# [5000:1:166:0:0:100000:1] sh# 257 sz# 99743 c# 166}{ExtrQuery# [5000:1:167:0:0:100000:1] sh# 257 sz# 99743 c# 167}{ExtrQuery# [5000:1:168:0:0:100000:1] sh# 257 sz# 99743 c# 168}{ExtrQuery# [5000:1:169:0:0:100000:1] sh# 257 sz# 99743 c# 169}{ExtrQuery# [5000:1:170:0:0:100000:1] sh# 257 sz# 99743 c# 170}{ExtrQuery# [5000:1:171:0:0:100000:1] sh# 257 sz# 99743 c# 171}{ExtrQuery# [5000:1:172:0:0:100000:1] sh# 257 sz# 99743 c# 172}{ExtrQuery# [5000:1:173:0:0:100000:1] sh# 257 sz# 99743 c# 173}{ExtrQuery# [5000:1:174:0:0:100000:1] sh# 257 sz# 99743 c# 174}{ExtrQuery# [5000:1:175:0:0:100000:1] sh# 257 sz# 99743 c# 175}{ExtrQuery# [5000:1:176:0:0:100000:1] sh# 257 sz# 99743 c# 176}{ExtrQuery# [5000:1:177:0:0:100000:1] sh# 257 sz# 99743 c# 177}{ExtrQuery# [5000:1:178:0:0:100000:1] sh# 257 sz# 99743 c# 178}{ExtrQuery# [5000:1:179:0:0:100000:1] sh# 257 sz# 99743 c# 179}{ExtrQuery# [5000:1:180:0:0:100000:1] sh# 257 sz# 99743 c# 180}{ExtrQuery# [5000:1:181:0:0:100000:1] sh# 257 sz# 99743 c# 181}{ExtrQuery# [5000:1:182:0:0:100000:1] sh# 257 sz# 99743 c# 182}{ExtrQuery# [5000:1:183:0:0:100000:1] sh# 257 sz# 99743 c# 183}{ExtrQuery# [5000:1:184:0:0:100000:1] sh# 257 sz# 99743 c# 184}{ExtrQuery# [5000:1:185:0:0:100000:1] sh# 257 sz# 99743 c# 185}{ExtrQuery# [5000:1:186:0:0:100000:1] sh# 257 sz# 99743 c# 186}{ExtrQuery# [5000:1:187:0:0:100000:1] sh# 257 sz# 99743 c# 187}{ExtrQuery# [5000:1:188:0:0:100000:1] sh# 257 sz# 99743 c# 188}{ExtrQuery# [5000:1:189:0:0:100000:1] sh# 257 sz# 99743 c# 189}{ExtrQuery# [5000:1:190:0:0:100000:1] sh# 257 sz# 99743 c# 190}{ExtrQuery# [5000:1:191:0:0:100000:1] sh# 257 sz# 99743 ... sz# 99743 c# 484}{ExtrQuery# [5000:1:485:0:0:100000:1] sh# 257 sz# 99743 c# 485}{ExtrQuery# [5000:1:486:0:0:100000:1] sh# 257 sz# 99743 c# 486}{ExtrQuery# [5000:1:487:0:0:100000:1] sh# 257 sz# 99743 c# 487}{ExtrQuery# [5000:1:488:0:0:100000:1] sh# 257 sz# 99743 c# 488}{ExtrQuery# [5000:1:489:0:0:100000:1] sh# 257 sz# 99743 c# 489}{ExtrQuery# [5000:1:490:0:0:100000:1] sh# 257 sz# 99743 c# 490}{ExtrQuery# [5000:1:491:0:0:100000:1] sh# 257 sz# 99743 c# 491}{ExtrQuery# [5000:1:492:0:0:100000:1] sh# 257 sz# 99743 c# 492}{ExtrQuery# [5000:1:493:0:0:100000:1] sh# 257 sz# 99743 c# 493}{ExtrQuery# [5000:1:494:0:0:100000:1] sh# 257 sz# 99743 c# 494}{ExtrQuery# [5000:1:495:0:0:100000:1] sh# 257 sz# 99743 c# 495}{ExtrQuery# [5000:1:496:0:0:100000:1] sh# 257 sz# 99743 c# 496}{ExtrQuery# [5000:1:497:0:0:100000:1] sh# 257 sz# 99743 c# 497}{ExtrQuery# [5000:1:498:0:0:100000:1] sh# 257 sz# 99743 c# 498}{ExtrQuery# [5000:1:499:0:0:100000:1] sh# 257 sz# 99743 c# 499}{ExtrQuery# [5000:1:500:0:0:100000:1] sh# 257 sz# 99743 c# 500}{ExtrQuery# [5000:1:501:0:0:100000:1] sh# 257 sz# 99743 c# 501}{ExtrQuery# [5000:1:502:0:0:100000:1] sh# 257 sz# 99743 c# 502}{ExtrQuery# [5000:1:503:0:0:100000:1] sh# 257 sz# 99743 c# 503}{ExtrQuery# [5000:1:504:0:0:100000:1] sh# 257 sz# 99743 c# 504}{ExtrQuery# [5000:1:505:0:0:100000:1] sh# 257 sz# 99743 c# 505}{ExtrQuery# [5000:1:506:0:0:100000:1] sh# 257 sz# 99743 c# 506}{ExtrQuery# [5000:1:507:0:0:100000:1] sh# 257 sz# 99743 c# 507}{ExtrQuery# [5000:1:508:0:0:100000:1] sh# 257 sz# 99743 c# 508}{ExtrQuery# [5000:1:509:0:0:100000:1] sh# 257 sz# 99743 c# 509}{ExtrQuery# [5000:1:510:0:0:100000:1] sh# 257 sz# 99743 c# 510}{ExtrQuery# [5000:1:511:0:0:100000:1] sh# 257 sz# 99743 c# 511}{ExtrQuery# [5000:1:512:0:0:100000:1] sh# 257 sz# 99743 c# 512}{ExtrQuery# [5000:1:513:0:0:100000:1] sh# 257 sz# 99743 c# 513}{ExtrQuery# [5000:1:514:0:0:100000:1] sh# 257 sz# 99743 c# 514}{ExtrQuery# [5000:1:515:0:0:100000:1] sh# 257 sz# 99743 c# 515}{ExtrQuery# [5000:1:516:0:0:100000:1] sh# 257 sz# 99743 c# 516}{ExtrQuery# [5000:1:517:0:0:100000:1] sh# 257 sz# 99743 c# 517}{ExtrQuery# [5000:1:518:0:0:100000:1] sh# 257 sz# 99743 c# 518}{ExtrQuery# [5000:1:519:0:0:100000:1] sh# 257 sz# 99743 c# 519}{ExtrQuery# [5000:1:520:0:0:100000:1] sh# 257 sz# 99743 c# 520}{ExtrQuery# [5000:1:521:0:0:100000:1] sh# 257 sz# 99743 c# 521}{ExtrQuery# [5000:1:522:0:0:100000:1] sh# 257 sz# 99743 c# 522}{ExtrQuery# [5000:1:523:0:0:100000:1] sh# 257 sz# 99743 c# 523}{ExtrQuery# [5000:1:524:0:0:100000:1] sh# 257 sz# 99743 c# 524}{ExtrQuery# [5000:1:525:0:0:100000:1] sh# 257 sz# 99743 c# 525}{ExtrQuery# [5000:1:526:0:0:100000:1] sh# 257 sz# 99743 c# 526}{ExtrQuery# [5000:1:527:0:0:100000:1] sh# 257 sz# 99743 c# 527}{ExtrQuery# [5000:1:528:0:0:100000:1] sh# 257 sz# 99743 c# 528}{ExtrQuery# [5000:1:529:0:0:100000:1] sh# 257 sz# 99743 c# 529}{ExtrQuery# [5000:1:530:0:0:100000:1] sh# 257 sz# 99743 c# 530}{ExtrQuery# [5000:1:531:0:0:100000:1] sh# 257 sz# 99743 c# 531}{ExtrQuery# [5000:1:532:0:0:100000:1] sh# 257 sz# 99743 c# 532}{ExtrQuery# [5000:1:533:0:0:100000:1] sh# 257 sz# 99743 c# 533}{ExtrQuery# [5000:1:534:0:0:100000:1] sh# 257 sz# 99743 c# 534}{ExtrQuery# [5000:1:535:0:0:100000:1] sh# 257 sz# 99743 c# 535}{ExtrQuery# [5000:1:536:0:0:100000:1] sh# 257 sz# 99743 c# 536}{ExtrQuery# [5000:1:537:0:0:100000:1] sh# 257 sz# 99743 c# 537}{ExtrQuery# [5000:1:538:0:0:100000:1] sh# 257 sz# 99743 c# 538}{ExtrQuery# [5000:1:539:0:0:100000:1] sh# 257 sz# 99743 c# 539}{ExtrQuery# [5000:1:540:0:0:100000:1] sh# 257 sz# 99743 c# 540}{ExtrQuery# [5000:1:541:0:0:100000:1] sh# 257 sz# 99743 c# 541}{ExtrQuery# [5000:1:542:0:0:100000:1] sh# 257 sz# 99743 c# 542}{ExtrQuery# [5000:1:543:0:0:100000:1] sh# 257 sz# 99743 c# 543}{ExtrQuery# [5000:1:544:0:0:100000:1] sh# 257 sz# 99743 c# 544}{ExtrQuery# [5000:1:545:0:0:100000:1] sh# 257 sz# 99743 c# 545}{ExtrQuery# [5000:1:546:0:0:100000:1] sh# 257 sz# 99743 c# 546}{ExtrQuery# [5000:1:547:0:0:100000:1] sh# 257 sz# 99743 c# 547}{ExtrQuery# [5000:1:548:0:0:100000:1] sh# 257 sz# 99743 c# 548}{ExtrQuery# [5000:1:549:0:0:100000:1] sh# 257 sz# 99743 c# 549}{ExtrQuery# [5000:1:550:0:0:100000:1] sh# 257 sz# 99743 c# 550}{ExtrQuery# [5000:1:551:0:0:100000:1] sh# 257 sz# 99743 c# 551}{ExtrQuery# [5000:1:552:0:0:100000:1] sh# 257 sz# 99743 c# 552}{ExtrQuery# [5000:1:553:0:0:100000:1] sh# 257 sz# 99743 c# 553}{ExtrQuery# [5000:1:554:0:0:100000:1] sh# 257 sz# 99743 c# 554}{ExtrQuery# [5000:1:555:0:0:100000:1] sh# 257 sz# 99743 c# 555}{ExtrQuery# [5000:1:556:0:0:100000:1] sh# 257 sz# 99743 c# 556}{ExtrQuery# [5000:1:557:0:0:100000:1] sh# 257 sz# 99743 c# 557}{ExtrQuery# [5000:1:558:0:0:100000:1] sh# 257 sz# 99743 c# 558}{ExtrQuery# [5000:1:559:0:0:100000:1] sh# 257 sz# 99743 c# 559}{ExtrQuery# [5000:1:560:0:0:100000:1] sh# 257 sz# 99743 c# 560}{ExtrQuery# [5000:1:561:0:0:100000:1] sh# 257 sz# 99743 c# 561}{ExtrQuery# [5000:1:562:0:0:100000:1] sh# 257 sz# 99743 c# 562}{ExtrQuery# [5000:1:563:0:0:100000:1] sh# 257 sz# 99743 c# 563}{ExtrQuery# [5000:1:564:0:0:100000:1] sh# 257 sz# 99743 c# 564}{ExtrQuery# [5000:1:565:0:0:100000:1] sh# 257 sz# 99743 c# 565}{ExtrQuery# [5000:1:566:0:0:100000:1] sh# 257 sz# 99743 c# 566}{ExtrQuery# [5000:1:567:0:0:100000:1] sh# 257 sz# 99743 c# 567}{ExtrQuery# [5000:1:568:0:0:100000:1] sh# 257 sz# 99743 c# 568}{ExtrQuery# [5000:1:569:0:0:100000:1] sh# 257 sz# 99743 c# 569}{ExtrQuery# [5000:1:570:0:0:100000:1] sh# 257 sz# 99743 c# 570}{ExtrQuery# [5000:1:571:0:0:100000:1] sh# 257 sz# 99743 c# 571}{ExtrQuery# [5000:1:572:0:0:100000:1] sh# 257 sz# 99743 c# 572}{ExtrQuery# [5000:1:573:0:0:100000:1] sh# 257 sz# 99743 c# 573}{ExtrQuery# [5000:1:574:0:0:100000:1] sh# 257 sz# 99743 c# 574}{ExtrQuery# [5000:1:575:0:0:100000:1] sh# 257 sz# 99743 c# 575}{ExtrQuery# [5000:1:576:0:0:100000:1] sh# 257 sz# 99743 c# 576}{ExtrQuery# [5000:1:577:0:0:100000:1] sh# 257 sz# 99743 c# 577}{ExtrQuery# [5000:1:578:0:0:100000:1] sh# 257 sz# 99743 c# 578}{ExtrQuery# [5000:1:579:0:0:100000:1] sh# 257 sz# 99743 c# 579}{ExtrQuery# [5000:1:580:0:0:100000:1] sh# 257 sz# 99743 c# 580}{ExtrQuery# [5000:1:581:0:0:100000:1] sh# 257 sz# 99743 c# 581}{ExtrQuery# [5000:1:582:0:0:100000:1] sh# 257 sz# 99743 c# 582}{ExtrQuery# [5000:1:583:0:0:100000:1] sh# 257 sz# 99743 c# 583}{ExtrQuery# [5000:1:584:0:0:100000:1] sh# 257 sz# 99743 c# 584}{ExtrQuery# [5000:1:585:0:0:100000:1] sh# 257 sz# 99743 c# 585}{ExtrQuery# [5000:1:586:0:0:100000:1] sh# 257 sz# 99743 c# 586}{ExtrQuery# [5000:1:587:0:0:100000:1] sh# 257 sz# 99743 c# 587}{ExtrQuery# [5000:1:588:0:0:100000:1] sh# 257 sz# 99743 c# 588}{ExtrQuery# [5000:1:589:0:0:100000:1] sh# 257 sz# 99743 c# 589}{ExtrQuery# [5000:1:590:0:0:100000:1] sh# 257 sz# 99743 c# 590}{ExtrQuery# [5000:1:591:0:0:100000:1] sh# 257 sz# 99743 c# 591}{ExtrQuery# [5000:1:592:0:0:100000:1] sh# 257 sz# 99743 c# 592}{ExtrQuery# [5000:1:593:0:0:100000:1] sh# 257 sz# 99743 c# 593}{ExtrQuery# [5000:1:594:0:0:100000:1] sh# 257 sz# 99743 c# 594}{ExtrQuery# [5000:1:595:0:0:100000:1] sh# 257 sz# 99743 c# 595}{ExtrQuery# [5000:1:596:0:0:100000:1] sh# 257 sz# 99743 c# 596}{ExtrQuery# [5000:1:597:0:0:100000:1] sh# 257 sz# 99743 c# 597}{ExtrQuery# [5000:1:598:0:0:100000:1] sh# 257 sz# 99743 c# 598}{ExtrQuery# [5000:1:599:0:0:100000:1] sh# 257 sz# 99743 c# 599}{ExtrQuery# [5000:1:600:0:0:100000:1] sh# 257 sz# 99743 c# 600}{ExtrQuery# [5000:1:601:0:0:100000:1] sh# 257 sz# 99743 c# 601}{ExtrQuery# [5000:1:602:0:0:100000:1] sh# 257 sz# 99743 c# 602}{ExtrQuery# [5000:1:603:0:0:100000:1] sh# 257 sz# 99743 c# 603}{ExtrQuery# [5000:1:604:0:0:100000:1] sh# 257 sz# 99743 c# 604}{ExtrQuery# [5000:1:605:0:0:100000:1] sh# 257 sz# 99743 c# 605}{ExtrQuery# [5000:1:606:0:0:100000:1] sh# 257 sz# 99743 c# 606}{ExtrQuery# [5000:1:607:0:0:100000:1] sh# 257 sz# 99743 c# 607}{ExtrQuery# [5000:1:608:0:0:100000:1] sh# 257 sz# 99743 c# 608}{ExtrQuery# [5000:1:609:0:0:100000:1] sh# 257 sz# 99743 c# 609}{ExtrQuery# [5000:1:610:0:0:100000:1] sh# 257 sz# 99743 c# 610}{ExtrQuery# [5000:1:611:0:0:100000:1] sh# 257 sz# 99743 c# 611}{ExtrQuery# [5000:1:612:0:0:100000:1] sh# 257 sz# 99743 c# 612}{ExtrQuery# [5000:1:613:0:0:100000:1] sh# 257 sz# 99743 c# 613}{ExtrQuery# [5000:1:614:0:0:100000:1] sh# 257 sz# 99743 c# 614}{ExtrQuery# [5000:1:615:0:0:100000:1] sh# 257 sz# 99743 c# 615}{ExtrQuery# [5000:1:616:0:0:100000:1] sh# 257 sz# 99743 c# 616}{ExtrQuery# [5000:1:617:0:0:100000:1] sh# 257 sz# 99743 c# 617}{ExtrQuery# [5000:1:618:0:0:100000:1] sh# 257 sz# 99743 c# 618}{ExtrQuery# [5000:1:619:0:0:100000:1] sh# 257 sz# 99743 c# 619}{ExtrQuery# [5000:1:620:0:0:100000:1] sh# 257 sz# 99743 c# 620}{ExtrQuery# [5000:1:621:0:0:100000:1] sh# 257 sz# 99743 c# 621}{ExtrQuery# [5000:1:622:0:0:100000:1] sh# 257 sz# 99743 c# 622}{ExtrQuery# [5000:1:623:0:0:100000:1] sh# 257 sz# 99743 c# 623}{ExtrQuery# [5000:1:624:0:0:100000:1] sh# 257 sz# 99743 c# 624}{ExtrQuery# [5000:1:625:0:0:100000:1] sh# 257 sz# 99743 c# 625}{ExtrQuery# [5000:1:626:0:0:100000:1] sh# 257 sz# 99743 c# 626}{ExtrQuery# [5000:1:627:0:0:100000:1] sh# 257 sz# 99743 c# 627}{ExtrQuery# [5000:1:628:0:0:100000:1] sh# 257 sz# 99743 c# 628}{ExtrQuery# [5000:1:629:0:0:100000:1] sh# 257 sz# 99743 c# 629}{ExtrQuery# [5000:1:630:0:0:100000:1] sh# 257 sz# 99743 c# 630}{ExtrQuery# [5000:1:631:0:0:100000:1] sh# 257 sz# 99743 c# 631}{ExtrQuery# [5000:1:632:0:0:100000:1] sh# 257 sz# 99743 c# 632}{ExtrQuery# [5000:1:633:0:0:100000:1] sh# 257 sz# 99743 c# 633}{ExtrQuery# [5000:1:634:0:0:100000:1] sh# 257 sz# 99743 c# 634}{ExtrQuery# [5000:1:635:0:0:100000:1] sh# 257 sz# 99743 c# 635}{ExtrQuery# [5000:1:636:0:0:100000:1] sh# 257 sz# 99743 c# 636}{ExtrQuery# [5000:1:637:0:0:100000:1] sh# 257 sz# 99743 c# 637}{ExtrQuery# [5000:1:638:0:0:100000:1] sh# 257 sz# 99743 c# 638}{ExtrQuery# [5000:1:639:0:0:100000:1] sh# 257 sz# 99743 c# 639}{ExtrQuery# [5000:1:640:0:0:100000:1] sh# 257 sz# 99743 c# 640}{ExtrQuery# [5000:1:641:0:0:100000:1] sh# 257 sz# 99743 c# 641}{ExtrQuery# [5000:1:642:0:0:100000:1] sh# 257 sz# 99743 c# 642}{ExtrQuery# [5000:1:643:0:0:100000:1] sh# 257 sz# 99743 c# 643}{ExtrQuery# [5000:1:644:0:0:100000:1] sh# 257 sz# 99743 c# 644}{ExtrQuery# [5000:1:645:0:0:100000:1] sh# 257 sz# 99743 c# 645}{ExtrQuery# [5000:1:646:0:0:100000:1] sh# 257 sz# 99743 c# 646}{ExtrQuery# [5000:1:647:0:0:100000:1] sh# 257 sz# 99743 c# 647}{ExtrQuery# [5000:1:648:0:0:100000:1] sh# 257 sz# 99743 c# 648}{ExtrQuery# [5000:1:649:0:0:100000:1] sh# 257 sz# 99743 c# 649}{ExtrQuery# [5000:1:650:0:0:100000:1] sh# 257 sz# 99743 c# 650}{ExtrQuery# [5000:1:651:0:0:100000:1] sh# 257 sz# 99743 c# 651}{ExtrQuery# [5000:1:652:0:0:100000:1] sh# 257 sz# 99743 c# 652}{ExtrQuery# [5000:1:653:0:0:100000:1] sh# 257 sz# 99743 c# 653}{ExtrQuery# [5000:1:654:0:0:100000:1] sh# 257 sz# 99743 c# 654}{ExtrQuery# [5000:1:655:0:0:100000:1] sh# 257 sz# 99743 c# 655}{ExtrQuery# [5000:1:656:0:0:100000:1] sh# 257 sz# 99743 c# 656}{ExtrQuery# [5000:1:657:0:0:100000:1] sh# 257 sz# 99743 c# 657}{ExtrQuery# [5000:1:658:0:0:100000:1] sh# 257 sz# 99743 c# 658}{ExtrQuery# [5000:1:659:0:0:100000:1] sh# 257 sz# 99743 c# 659}{ExtrQuery# [5000:1:660:0:0:100000:1] sh# 257 sz# 99743 c# 660}{ExtrQuery# [5000:1:661:0:0:100000:1] sh# 257 sz# 99743 c# 661}{ExtrQuery# [5000:1:662:0:0:100000:1] sh# 257 sz# 99743 c# 662}{ExtrQuery# [5000:1:663:0:0:100000:1] sh# 257 sz# 99743 c# 663}{ExtrQuery# [5000:1:664:0:0:100000:1] sh# 257 sz# 99743 c# 664}{ExtrQuery# [5000:1:665:0:0:100000:1] sh# 257 sz# 99743 c# 665}{ExtrQuery# [5000:1:666:0:0:100000:1] sh# 257 sz# 99743 c# 666}{ExtrQuery# [5000:1:667:0:0:100000:1] sh# 257 sz# 99743 c# 667}{ExtrQuery# [5000:1:668:0:0:100000:1] sh# 257 sz# 99743 c# 668}{ExtrQuery# [5000:1:669:0:0:100000:1] sh# 257 sz# 99743 c# 669}{ExtrQuery# [5000:1:670:0:0:100000:1] sh# 257 sz# 99743 c# 670}{ExtrQuery# [5000:1:671:0:0:100000:1] sh# 257 sz# 99743 c# 671}{ExtrQuery# [5000:1:672:0:0:17027:1] sh# 257 sz# 16770 c# 672} {MsgQoS} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0}; VDISK CAN NOT REPLY ON TEvVGet REQUEST >> test.py::test[key_filter-empty_range--Plan] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Results] [GOOD] >> test.py::test[key_filter-empty_range--Results] >> test.py::test[agg_apply-min-default.txt-Analyze] >> test.py::test[expr-variant_list_sort-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--Analyze] >> test.py::test[blocks-add_uint64_opt2--Debug] [GOOD] >> test.py::test[blocks-add_uint64_opt2--ForceBlocks] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery_scalar2-default.txt-Debug] >> test.py::test[blocks-date_add_interval--Results] [GOOD] >> test.py::test[blocks-date_less_scalar--Debug] >> test.py::test[join-opt_on_opt_side_with_group-off-Results] [GOOD] >> test.py::test[join-premap_merge_with_remap-off-Analyze] >> test.py::test[datetime-date_tz_cast-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Debug] >> test.py::test[join-mergejoin_small_primary--Results] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-Analyze] >> test.py::test[join-mergejoin_force_one_sorted-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-Debug] >> test.py::test[join-lookupjoin_inner_1o-off-Debug] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o-off-Plan] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-Results] >> test.py::test[sampling-subquery_multiple_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Plan] >> test.py::test[join-mapjoin_with_empty_struct--Results] [GOOD] >> test.py::test[pg-sublink_projection_array_corr-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-add_int32--Debug] [GOOD] >> test.py::test[blocks-add_int32--ForceBlocks] >> test.py::test[join-lookupjoin_inner_1o-off-Results] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt-Analyze] >> test.py::test[pg-select_win_expr_order-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_expr_order-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_expr_order-default.txt-Results] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] >> test.py::test[join-mergejoin_choose_primary-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary-off-Plan] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--Debug] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--Plan] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Results] >> test.py::test[distinct-distinct_one_count-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_force_align2--Plan] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align3-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_force_align3-off-Plan] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst--Debug] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--Plan] >> test.py::test[agg_phases-min-default.txt-Results] [GOOD] >> test.py::test[agg_phases-sum-default.txt-Debug] >> test.py::test[aggr_factory-bitor-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-ForceBlocks] >> test.py::test[window-generic/session_aliases--Results] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-Debug] >> test.py::test[join-mergejoin_force_align3-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted--Debug] >> test.py::test[flatten_by-flatten_expr--Analyze] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Plan] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--Plan] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--Results] >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> test.py::test[type_v3-json--Debug] >> test.py::test[expr-cast_string_implicit-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_string_implicit-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_string_implicit-default.txt-Results] >> test.py::test[schema-limit_directread--Results] [GOOD] >> test.py::test[schema-select_all-row_spec-Debug] >> test.py::test[flatten_by-flatten_expr--Debug] >> test.py::test[count-count_nullable_sub-default.txt-Results] >> test.py::test[agg_apply-pg_int4-default.txt-Debug] [GOOD] >> test.py::test[json-jsondocument/json_exists-default.txt-Analyze] [GOOD] >> test.py::test[json-jsondocument/json_exists-default.txt-Debug] >> test.py::test[agg_apply-pg_int4-default.txt-Plan] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] [GOOD] >> test.py::test[agg_phases-min_by_null-default.txt-Analyze] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-min_by_null-default.txt-Debug] >> test.py::test[schema-read_schema_other--Analyze] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Debug] >> test.py::test[schema-read_schema_other--Debug] >> test.py::test[expr-expr_add_literal_nulls-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_add_literal_nulls-default.txt-ForceBlocks] >> test.py::test[agg_apply-pg_int4-default.txt-Results] >> test.py::test[flatten_by-flatten_with_join--Analyze] >> test.py::test[case-case_val_then_else-default.txt-Debug] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-ForceBlocks] >> test.py::test[expr-empty_list_ops1-default.txt-Debug] [GOOD] >> test.py::test[expr-empty_list_ops1-default.txt-Plan] >> test.py::test[tpch-q8-default.txt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Analyze] >> test.py::test[udf-udaf_lambda-default.txt-Analyze] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-Debug] >> test.py::test[expr-empty_list_ops1-default.txt-Plan] [GOOD] >> test.py::test[expr-empty_list_ops1-default.txt-Results] >> test.py::test[limit-limit_skip_take-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_expr-default.txt-Analyze] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[distinct-distinct_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-Plan] >> test.py::test[agg_apply-min-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-min-default.txt-Debug] >> test.py::test[flatten_by-flatten_dict--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_dict--Debug] >> test.py::test[blocks-add_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Plan] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Results] >> test.py::test[pg-select_common_type_union-default.txt-Debug] [GOOD] >> test.py::test[pg-select_subquery_scalar2-default.txt-Debug] [GOOD] >> test.py::test[pg-select_common_type_union-default.txt-ForceBlocks] >> test.py::test[pg-select_subquery_scalar2-default.txt-Plan] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-Plan] [GOOD] >> test.py::test[produce-reduce_all_expr-default.txt-Plan] [SKIPPED] >> test.py::test[limit-limit_skip_take-default.txt-Results] >> test.py::test[produce-reduce_all_expr-default.txt-Results] [SKIPPED] >> test.py::test[sampling-map-dynamic-Analyze] >> test.py::test[distinct-distinct_join-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-Results] >> test.py::test[join-mapjoin_sharded-default.txt-Analyze] [GOOD] >> test.py::test[view-view_with_library--Debug] [GOOD] >> test.py::test[view-view_with_library--ForceBlocks] >> test.py::test[join-mapjoin_sharded-default.txt-Debug] >> test.py::test[pg-select_subquery_scalar2-default.txt-Results] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Analyze] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] >> test.py::test[join-premap_merge_with_remap-off-Analyze] [GOOD] >> test.py::test[join-premap_merge_with_remap-off-Debug] >> test.py::test[join-mergejoin_small_primary-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-Debug] >> test.py::test[expr-cast_string_implicit-default.txt-Results] [GOOD] >> test.py::test[expr-constraints_of--Debug] >> test.py::test[blocks-add_int32--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int32--Plan] [GOOD] >> test.py::test[blocks-add_int32--Results] >> test.py::test[pg-select_win_expr_order-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_sum_null-default.txt-Debug] >> test.py::test[join-mergejoin_force_one_sorted-off-Debug] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted-off-Plan] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Analyze] >> test.py::test[flatten_by-flatten_expr--Debug] [GOOD] >> test.py::test[flatten_by-flatten_expr--ForceBlocks] >> test.py::test[distinct-distinct_one_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Results] >> test.py::test[agg_phases-sum-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-sum-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-sum-default.txt-Results] >> test.py::test[union-union_column_extention-default.txt-Debug] [GOOD] >> test.py::test[union-union_column_extention-default.txt-ForceBlocks] >> test.py::test[schema-select_all-row_spec-Debug] [GOOD] >> test.py::test[schema-select_all-row_spec-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] Test command err: 2024-11-21T08:47:57.421279Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:434:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421285Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:74:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421290Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:871:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421292Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:852:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421295Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:234:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421298Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:794:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421300Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:69:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421301Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:862:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421303Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:191:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421304Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:205:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421479Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:531:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421487Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:905:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421491Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:341:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421493Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:594:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421495Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:842:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421497Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:613:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421499Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:31:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421501Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:472:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421502Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:711:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421504Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:424:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421599Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:298:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421604Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:541:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421606Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:16:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421607Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:890:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421609Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:361:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421610Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:210:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421612Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:667:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421614Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:963:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421616Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:118:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421618Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:147:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421699Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:308:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421701Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:113:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421703Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:65:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421705Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:949:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421706Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:512:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421708Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:171:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421709Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:779:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421711Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:438:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421712Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:45:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421714Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:376:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421791Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:546:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421793Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:857:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421795Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:609:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421797Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:551:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421798Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:195:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421800Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:672:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421801Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:64:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421803Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:439:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421804Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:716:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421806Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:448:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421877Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:380:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421879Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:579:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421880Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:764:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421882Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:915:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421883Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:108:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421886Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:278:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421888Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:920:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421889Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:370:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421891Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:176:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421893Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:619:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421977Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:371:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421980Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:463:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421982Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:521:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421987Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:30:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421989Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:511:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421991Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:861:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421993Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:98:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421994Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:390:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421997Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:492:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.421998Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:404:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422070Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:618:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422072Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:725:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422074Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:866:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422075Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:506:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422078Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:847:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422080Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:983:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422082Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:584:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422084Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:229:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422086Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:225:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422088Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:696:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422157Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:652:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422159Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:750:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422161Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:322:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422163Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:6:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422164Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:103:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422166Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:21:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422168Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:200:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422170Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:774:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422171Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:1:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422174Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:409:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422248Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:823:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422250Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:89:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422251Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:929:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422253Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:997:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422255Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:720:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422257Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:706:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422258Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:157:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422260Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:895:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422262Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:585:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422263Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:580:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422359Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:137:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422364Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:954:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422366Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:682:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422367Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:958:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422369Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:701:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422371Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:784:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422372Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:264:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422374Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:939:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422375Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:337:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422376Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:614:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422444Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:419:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422446Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:891:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422447Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:837:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422448Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:516:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422450Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:395:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422452Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:161:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422453Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:133:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422455Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:502:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422457Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:244:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422458Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:36:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422526Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:832:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422527Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:653:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422529Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:274:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422531Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:482:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422532Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:648:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422534Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:346:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422535Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:924:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422536Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:681:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422538Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:973:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422539Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:239:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422586Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:99:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422588Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:162:0:0:66560:1] Marker# BSVS08 2024-11-21T08:47:57.422589Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:900:0:0:66560:1] Marker# BSVS08 >> test.py::test[json-jsondocument/json_exists-default.txt-Debug] [GOOD] >> test.py::test[json-jsondocument/json_exists-default.txt-ForceBlocks] >> test.py::test[agg_apply-pg_int4-default.txt-Results] [GOOD] >> test.py::test[expr-empty_list_ops1-default.txt-Results] [GOOD] >> test.py::test[expr-list_comp-default.txt-Debug] >> test.py::test[pg-select_subquery_scalar2-default.txt-Results] [GOOD] >> test.py::test[pg-select_substring-default.txt-Debug] >> test.py::test[aggregate-histogram_cdf-default.txt-Debug] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Plan] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] >> test.py::test[schema-read_schema_other--Debug] [GOOD] >> test.py::test[schema-read_schema_other--ForceBlocks] >> test.py::test[expr-expr_add_literal_nulls-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-expr_add_literal_nulls-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_add_literal_nulls-default.txt-Results] >> test.py::test[agg_apply-sum_type-default.txt-Debug] >> test.py::test[blocks-add_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Analyze] >> test.py::test[flatten_by-flatten_with_join--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Debug] >> test.py::test[type_v3-ignore_v3_pragma--Analyze] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Debug] >> test.py::test[count-count_nullable_sub-default.txt-Results] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-Analyze] >> test.py::test[limit-limit_skip_take-default.txt-Results] [GOOD] >> test.py::test[lineage-list_literal3-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-list_literal3-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-list_literal3-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal3-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-list_literal3-default.txt-Results] [SKIPPED] >> test.py::test[type_v3-json--Debug] [GOOD] >> test.py::test[type_v3-json--Plan] >> test.py::test[join-lookupjoin_bug7646_subst--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o--Debug] >> test.py::test[lineage-select_all-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-select_all-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_all-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-Debug] [SKIPPED] >> test.py::test[type_v3-json--Plan] [GOOD] >> test.py::test[type_v3-json--Results] >> test.py::test[case-case_val_then_else-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Plan] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Results] >> test.py::test[aggr_factory-bitor-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Results] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-Analyze] >> test.py::test[blocks-add_int32--Results] [GOOD] >> test.py::test[blocks-bitcast_scalar--Analyze] >> test.py::test[schema-select_all-row_spec_hide_sort-Analyze] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Debug] >> test.py::test[sampling-map-dynamic-Analyze] [GOOD] >> test.py::test[sampling-map-dynamic-Debug] >> test.py::test[lineage-topsort-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-Results] [SKIPPED] >> test.py::test[match_recognize-simple_paritioning-default.txt-Analyze] >> test.py::test[udf-udaf_lambda-default.txt-Debug] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-ForceBlocks] >> test.py::test[agg_apply-min-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-min-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_force_no_sorted--Debug] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted--Plan] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted--Results] >> test.py::test[view-view_with_library--ForceBlocks] [GOOD] >> test.py::test[view-view_with_library--Plan] [GOOD] >> test.py::test[view-view_with_library--Results] >> test.py::test[join-mapjoin_sharded-default.txt-Debug] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt-ForceBlocks] >> test.py::test[expr-constraints_of--Debug] [GOOD] >> test.py::test[expr-constraints_of--Plan] [GOOD] >> test.py::test[expr-constraints_of--Results] |86.0%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[join-mergejoin_semi_to_inner--Analyze] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Debug] >> test.py::test[pg-select_win_sum_null-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_sum_null-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_sum_null-default.txt-Results] >> test.py::test[expr-expr_add_literal_nulls-default.txt-Results] [GOOD] >> test.py::test[expr-expr_op_in_paren-default.txt-Analyze] >> test.py::test[schema-select_all-row_spec-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Debug] >> test.py::test[join-mergejoin_small_primary-off-Debug] [GOOD] >> test.py::test[key_filter-empty_range--Results] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_small_primary-off-Plan] [GOOD] >> test.py::test[limit-dynamic_limit--Analyze] [SKIPPED] >> test.py::test[join-mergejoin_small_primary-off-Results] [GOOD] >> test.py::test[limit-dynamic_limit--Debug] [SKIPPED] >> test.py::test[join-opt_on_opt_side--Analyze] >> test.py::test[limit-dynamic_limit--ForceBlocks] >> test.py::test[pg-select_common_type_union-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_common_type_union-default.txt-Plan] [GOOD] >> test.py::test[pg-select_common_type_union-default.txt-Results] >> test.py::test[limit-dynamic_limit--ForceBlocks] [SKIPPED] >> test.py::test[limit-dynamic_limit--Plan] [SKIPPED] >> test.py::test[limit-dynamic_limit--Results] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt-Results] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-process-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-process-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-process-default.txt-ForceBlocks] >> test.py::test[pg-select_substring-default.txt-Debug] [GOOD] >> test.py::test[pg-select_substring-default.txt-Plan] [GOOD] >> test.py::test[pg-select_substring-default.txt-Results] >> test.py::test[sampling-reduce-with_premap-Debug] [GOOD] >> test.py::test[union-union_column_extention-default.txt-ForceBlocks] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Plan] [GOOD] >> test.py::test[sampling-reduce-with_premap-Plan] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Results] >> test.py::test[sampling-reduce-with_premap-Results] >> test.py::test[lineage-process-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-process-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-process-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-Results] [SKIPPED] >> test.py::test[lineage-scalar_context--Analyze] [SKIPPED] >> test.py::test[lineage-scalar_context--Debug] [SKIPPED] >> test.py::test[lineage-scalar_context--ForceBlocks] [SKIPPED] >> test.py::test[lineage-scalar_context--Plan] [SKIPPED] >> test.py::test[lineage-scalar_context--Results] >> test.py::test[flatten_by-flatten_dict--Debug] [GOOD] >> test.py::test[flatten_by-flatten_dict--ForceBlocks] >> test.py::test[expr-list_comp-default.txt-Debug] [GOOD] >> test.py::test[expr-list_comp-default.txt-Plan] [GOOD] >> test.py::test[expr-list_comp-default.txt-Results] >> test.py::test[window-presort_window_partition_by_table-default.txt-Debug] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-Plan] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] >> test.py::test[json-jsondocument/json_exists-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-jsondocument/json_exists-default.txt-Plan] [GOOD] >> test.py::test[json-jsondocument/json_exists-default.txt-Results] >> test.py::test[lineage-scalar_context--Results] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads--Analyze] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Analyze] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Debug] >> test.py::test[agg_apply-sum_type-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_type-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-sum_type-default.txt-Results] >> test.py::test[distinct-distinct_join-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-Analyze] >> test.py::test[flatten_by-flatten_expr--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_expr--Plan] [GOOD] >> test.py::test[flatten_by-flatten_expr--Results] >> test.py::test[case-case_val_then_else-default.txt-Results] [GOOD] >> test.py::test[column_group-groups-perusage-Analyze] [SKIPPED] >> test.py::test[column_group-groups-perusage-Debug] [SKIPPED] >> test.py::test[column_group-groups-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-perusage-Plan] [SKIPPED] >> test.py::test[column_group-groups-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Analyze] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Debug] >> test.py::test[csee-same_l1_expr-default.txt-Analyze] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-Debug] >> test.py::test[type_v3-json--Results] [GOOD] >> test.py::test[type_v3-non_strict--Debug] >> test.py::test[expr-constraints_of--Results] [GOOD] >> test.py::test[expr-current_tz-default.txt-Debug] >> test.py::test[schema-read_schema_other--ForceBlocks] [GOOD] >> test.py::test[schema-read_schema_other--Plan] >> test.py::test[column_group-hint_diff_grp_fail--Debug] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Results] [SKIPPED] >> test.py::test[column_group-publish-single-Analyze] [SKIPPED] >> test.py::test[column_group-publish-single-Debug] [SKIPPED] >> test.py::test[column_group-publish-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-publish-single-Plan] [SKIPPED] >> test.py::test[column_group-publish-single-Results] [SKIPPED] >> test.py::test[column_order-insert_reorder_without_columnorder--Analyze] >> test.py::test[distinct-distinct_one_count-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-Analyze] >> test.py::test[schema-read_schema_other--Plan] [GOOD] >> test.py::test[schema-read_schema_other--Results] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-Debug] >> test.py::test[join-premap_merge_with_remap-off-Debug] [GOOD] >> test.py::test[join-premap_merge_with_remap-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_merge_with_remap-off-Plan] [GOOD] >> test.py::test[join-premap_merge_with_remap-off-Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Analyze] >> test.py::test[blocks-bitcast_scalar--Analyze] [GOOD] >> test.py::test[blocks-bitcast_scalar--Debug] >> test.py::test[type_v3-ignore_v3_pragma--Debug] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--ForceBlocks] >> test.py::test[match_recognize-simple_paritioning-default.txt-Analyze] [GOOD] >> test.py::test[match_recognize-simple_paritioning-default.txt-Debug] >> test.py::test[view-view_with_library--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Analyze] >> test.py::test[pg-select_substring-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_count-default.txt-Debug] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_nullable--Debug] >> test.py::test[flatten_by-flatten_with_join--Debug] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Debug] [GOOD] >> test.py::test[flatten_by-flatten_with_join--ForceBlocks] >> test.py::test[schema-select_all-row_spec_hide_sort-ForceBlocks] >> test.py::test[union-union_column_extention-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Analyze] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] >> test.py::test[agg_apply-sum_type-default.txt-Results] [GOOD] >> test.py::test[agg_phases-count_all-default.txt-Debug] >> test.py::test[json-jsondocument/json_exists-default.txt-Results] [GOOD] >> test.py::test[json-jsondocument/select--Analyze] >> test.py::test[udf-udaf_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-Plan] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-Results] >> test.py::test[join-mergejoin_force_no_sorted--Results] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted-off-Plan] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off-Plan] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort--Debug] >> test.py::test[schema-select_all-row_spec_hide_sort-Debug] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] >> test.py::test[join-opt_on_opt_side--Analyze] [GOOD] >> test.py::test[join-opt_on_opt_side--Debug] >> test.py::test[agg_apply-min-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-min-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-min-default.txt-Results] >> test.py::test[join-mapjoin_sharded-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt-Plan] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt-Results] >> test.py::test[pg-select_common_type_union-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_inner_equi-default.txt-Analyze] >> test.py::test[sampling-map-dynamic-Debug] [GOOD] >> test.py::test[sampling-map-dynamic-ForceBlocks] >> test.py::test[expr-expr_op_in_paren-default.txt-Analyze] [GOOD] >> test.py::test[expr-expr_op_in_paren-default.txt-Debug] >> test.py::test[flatten_by-flatten_expr--Results] [GOOD] >> test.py::test[hor_join-merge_multiouts_all--Analyze] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_all--Debug] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_all--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_all--Plan] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_all--Results] [SKIPPED] >> test.py::test[hor_join-skip_sampling--Analyze] >> test.py::test[join-mergejoin_semi_to_inner--Debug] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--ForceBlocks] >> test.py::test[aggr_factory-bitor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Analyze] >> test.py::test[optimizers-unused_columns_window_no_payloads--Analyze] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--Debug] >> test.py::test[agg_phases-min_by_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min_by_null-default.txt-ForceBlocks] >> test.py::test[schema-read_schema_other--Results] [GOOD] >> test.py::test[schema-user_schema_override--Analyze] >> test.py::test[pg-select_win_sum_null-default.txt-Results] [GOOD] >> test.py::test[pg-single_input_filter_over_join-default.txt-Debug] >> test.py::test[join-lookupjoin_inner_1o--Debug] [GOOD] >> test.py::test[join-lookupjoin_inner_1o--Plan] [GOOD] >> test.py::test[join-lookupjoin_inner_1o--Results] >> test.py::test[distinct-distinct_union_all-default.txt-Analyze] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-Debug] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_union_all-default.txt-Debug] >> test.py::test[column_order-insert_reorder_without_columnorder--Analyze] [GOOD] >> test.py::test[column_order-insert_reorder_without_columnorder--Debug] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Analyze] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Debug] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Debug] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-ForceBlocks] >> test.py::test[expr-current_tz-default.txt-Debug] [GOOD] >> test.py::test[expr-current_tz-default.txt-Plan] [GOOD] >> test.py::test[match_recognize-simple_paritioning-default.txt-Debug] [GOOD] >> test.py::test[sampling-reduce-with_premap-Results] [GOOD] >> test.py::test[match_recognize-simple_paritioning-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_default-default.txt-Debug] >> test.py::test[expr-current_tz-default.txt-Results] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] [GOOD] >> test.py::test[pg-select_win_count-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_count-default.txt-Plan] >> test.py::test[weak_field-optimize_weak_fields_map--Analyze] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Debug] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] [GOOD] >> test.py::test[window-udaf_no_merge-default.txt-Debug] >> test.py::test[schema-select_all-yamred_dsv-Debug] >> test.py::test[flatten_by-flatten_dict--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_dict--Plan] [GOOD] >> test.py::test[flatten_by-flatten_dict--Results] >> test.py::test[dq-precompute_tree-default.txt-Analyze] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-Debug] >> test.py::test[blocks-bitcast_scalar--Debug] [GOOD] >> test.py::test[pg-select_win_count-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_count-default.txt-Results] >> test.py::test[schema-select_all-row_spec_hide_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] >> test.py::test[agg_apply-min-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-Analyze] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-ForceBlocks] >> test.py::test[blocks-bitcast_scalar--ForceBlocks] >> test.py::test[type_v3-ignore_v3_pragma--ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Plan] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Results] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Analyze] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Debug] >> test.py::test[json-jsondocument/select--Analyze] [GOOD] >> test.py::test[json-jsondocument/select--Debug] >> test.py::test[type_v3-non_strict--Debug] [GOOD] >> test.py::test[type_v3-non_strict--Plan] [GOOD] >> test.py::test[type_v3-non_strict--Results] >> test.py::test[udf-udaf_lambda-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Analyze] >> test.py::test[pg-select_join_inner_equi-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_join_inner_equi-default.txt-Debug] >> test.py::test[expr-expr_op_in_paren-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_op_in_paren-default.txt-ForceBlocks] >> test.py::test[hor_join-skip_sampling--Analyze] [GOOD] >> test.py::test[hor_join-skip_sampling--Debug] >> test.py::test[join-opt_on_opt_side--Debug] [GOOD] >> test.py::test[join-opt_on_opt_side--ForceBlocks] >> test.py::test[agg_phases-count_all-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-count_all-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-count_all-default.txt-Results] >> test.py::test[pg-single_input_filter_over_join-default.txt-Debug] [GOOD] >> test.py::test[pg-single_input_filter_over_join-default.txt-Plan] [GOOD] >> test.py::test[pg-single_input_filter_over_join-default.txt-Results] >> test.py::test[schema-user_schema_override--Analyze] [GOOD] >> test.py::test[schema-user_schema_override--Debug] >> test.py::test[aggregate-list_nullable--Debug] [GOOD] >> test.py::test[aggregate-list_nullable--Plan] [GOOD] >> test.py::test[aggregate-list_nullable--Results] >> test.py::test[aggr_factory-bottom-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Debug] >> test.py::test[expr-current_tz-default.txt-Results] [GOOD] >> test.py::test[expr-distinct_from_containers-default.txt-Debug] >> test.py::test[optimizers-unused_columns_window_no_payloads--Debug] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--ForceBlocks] >> test.py::test[csee-same_l1_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-Plan] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-Results] >> test.py::test[flatten_by-flatten_with_join--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Plan] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Results] >> test.py::test[sampling-map-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-map-dynamic-Plan] [GOOD] >> test.py::test[sampling-map-dynamic-Results] >> test.py::test[join-mapjoin_sharded-default.txt-Results] [GOOD] >> test.py::test[join-mapjoin_unused_keys--Analyze] [SKIPPED] >> test.py::test[join-mapjoin_unused_keys--Debug] [SKIPPED] >> test.py::test[expr-list_comp-default.txt-Results] [GOOD] >> test.py::test[join-mapjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_unused_keys--Plan] >> test.py::test[expr-list_to_from_tuple-default.txt-Debug] >> test.py::test[join-mergejoin_semi_to_inner--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Plan] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Results] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Analyze] >> test.py::test[agg_phases-sum-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-Debug] >> test.py::test[join-mapjoin_unused_keys--Plan] [SKIPPED] >> test.py::test[join-mapjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--Analyze] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--Debug] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--Plan] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--Results] [SKIPPED] >> test.py::test[join-mergejoin_big_primary--Analyze] >> test.py::test[type_v3-ignore_v3_pragma--Results] [GOOD] >> test.py::test[view-secure--Analyze] [SKIPPED] >> test.py::test[view-secure--Debug] [SKIPPED] >> test.py::test[view-secure--ForceBlocks] [SKIPPED] >> test.py::test[view-secure--Plan] [SKIPPED] >> test.py::test[view-secure--Results] >> test.py::test[column_order-insert_reorder_without_columnorder--Debug] [GOOD] >> test.py::test[column_order-insert_reorder_without_columnorder--ForceBlocks] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Debug] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-ForceBlocks] >> test.py::test[join-lookupjoin_inner_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-Debug] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o-off-Plan] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o--Debug] >> test.py::test[schema-select_all-yamred_dsv-Debug] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Plan] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Results] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-ForceBlocks] [SKIPPED] >> test.py::test[distinct-distinct_union_all-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-ForceBlocks] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Plan] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Results] [GOOD] >> test.py::test[join-star_join_semionly--Analyze] >> test.py::test[blocks-combine_all_decimal_max-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Plan] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] >> test.py::test[type_v3-non_strict--Results] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Debug] >> test.py::test[match_recognize-simple_paritioning-default.txt-ForceBlocks] [GOOD] >> test.py::test[match_recognize-simple_paritioning-default.txt-Plan] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Debug] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--ForceBlocks] >> test.py::test[window-udaf_no_merge-default.txt-Debug] [GOOD] >> test.py::test[window-udaf_no_merge-default.txt-Plan] [GOOD] >> test.py::test[window-udaf_no_merge-default.txt-Results] >> test.py::test[sampling-subquery_default-default.txt-Debug] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-Plan] >> test.py::test[union_all-union_all_fields-default.txt-Analyze] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Debug] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-Debug] >> test.py::test[match_recognize-simple_paritioning-default.txt-Results] >> test.py::test[blocks-bitcast_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-bitcast_scalar--Plan] [GOOD] >> test.py::test[blocks-bitcast_scalar--Results] >> test.py::test[pg-single_input_filter_over_join-default.txt-Results] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-Debug] >> test.py::test[sampling-subquery_default-default.txt-Plan] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-Results] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-ForceBlocks] >> test.py::test[json-jsondocument/select--Debug] [GOOD] >> test.py::test[expr-expr_op_in_paren-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-expr_op_in_paren-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_op_in_paren-default.txt-Results] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star1--Debug] >> test.py::test[json-jsondocument/select--ForceBlocks] >> test.py::test[csee-same_l1_expr-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Analyze] >> test.py::test[pg-select_win_count-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_expr_agg-default.txt-Debug] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-Results] >> test.py::test[pg-select_join_inner_equi-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_inner_equi-default.txt-ForceBlocks] >> test.py::test[expr-distinct_from_containers-default.txt-Debug] [GOOD] >> test.py::test[expr-distinct_from_containers-default.txt-Plan] [GOOD] >> test.py::test[expr-distinct_from_containers-default.txt-Results] >> test.py::test[aggregate-list_nullable--Results] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--Debug] >> test.py::test[join-opt_on_opt_side--ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side--Plan] [GOOD] >> test.py::test[join-opt_on_opt_side--Results] >> test.py::test[select-dict_with_few_keys-default.txt-Analyze] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Debug] >> test.py::test[schema-user_schema_override--Debug] [GOOD] >> test.py::test[schema-user_schema_override--ForceBlocks] >> test.py::test[sampling-map-dynamic-Results] [GOOD] >> test.py::test[schema-skip_complex_type2--Analyze] >> test.py::test[join-mergejoin_big_primary--Analyze] [GOOD] >> test.py::test[join-mergejoin_big_primary--Debug] >> test.py::test[optimizers-unused_columns_window_no_payloads--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--Plan] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] >> test.py::test[expr-list_to_from_tuple-default.txt-Debug] [GOOD] >> test.py::test[expr-list_to_from_tuple-default.txt-Plan] [GOOD] >> test.py::test[expr-list_to_from_tuple-default.txt-Results] >> test.py::test[hor_join-skip_sampling--Debug] [GOOD] >> test.py::test[hor_join-skip_sampling--ForceBlocks] >> test.py::test[join-mergejoin_narrows_output_sort--Debug] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Results] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field-Debug] >> test.py::test[join-mergejoin_semi_to_inner--Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-Analyze] >> test.py::test[view-secure--Results] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Analyze] >> test.py::test[join-mergejoin_narrows_output_sort--Plan] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort--Results] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--Results] [GOOD] >> test.py::test[in-small_in_YQL-19183-ansi-Analyze] >> test.py::test[dq-precompute_tree-default.txt-Debug] [GOOD] >> test.py::test[blocks-bitcast_scalar--Results] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[dq-precompute_tree-default.txt-Plan] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-Results] [GOOD] >> test.py::test[expr-as_tuple_syntax-default.txt-Analyze] >> test.py::test[blocks-combine_all_avg_filter--Analyze] >> test.py::test[window-udaf_no_merge-default.txt-Results] [GOOD] >> test.py::test[window-udaf_window--Debug] [SKIPPED] >> test.py::test[window-udaf_window--Plan] [SKIPPED] >> test.py::test[window-udaf_window--Results] [SKIPPED] >> test.py::test[window-win_func_aggr_4func_no_part--Debug] >> TPDiskRaces::DecommitWithInflight [GOOD] >> TPDiskRaces::DecommitWithInflightMock >> test.py::test[expr-expr_op_in_paren-default.txt-Results] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Analyze] >> test.py::test[blocks-date_less_scalar--Debug] [GOOD] >> test.py::test[blocks-date_less_scalar--Plan] [GOOD] >> test.py::test[blocks-date_less_scalar--Results] >> test.py::test[sampling-subquery_default-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Analyze] >> test.py::test[match_recognize-simple_paritioning-default.txt-Results] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset--Analyze] >> test.py::test[sampling-subquery_expr-default.txt-Debug] >> test.py::test[join-star_join_semionly--Analyze] [GOOD] >> test.py::test[join-star_join_semionly--Debug] >> test.py::test[distinct-distinct_union_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-Results] >> test.py::test[union_all-union_all_fields-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-ForceBlocks] >> test.py::test[type_v3-replace_diff_layout--Debug] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Plan] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Results] >> test.py::test[column_order-insert_reorder_without_columnorder--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_reorder_without_columnorder--Plan] [GOOD] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] >> test.py::test[agg_phases_agg_apply-max-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-Results] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Plan] >> test.py::test[ansi_idents-string_escaping-default.txt-Debug] >> test.py::test[weak_field-optimize_weak_fields_map--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Plan] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Analyze] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Debug] >> test.py::test[pg-sublink_projection_exists_corr-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Analyze] >> test.py::test[pg-strings_to_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-Results] >> test.py::test[pg-select_win_expr_agg-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_expr_agg-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_expr_agg-default.txt-Results] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-ForceBlocks] >> test.py::test[expr-distinct_from_containers-default.txt-Results] [GOOD] >> test.py::test[expr-ensure_ok-default.txt-Debug] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] >> test.py::test[json-jsondocument/select--ForceBlocks] [GOOD] >> test.py::test[json-jsondocument/select--Plan] [GOOD] >> test.py::test[json-jsondocument/select--Results] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] [GOOD] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Analyze] >> test.py::test[aggr_factory-bottom-default.txt-Debug] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-ForceBlocks] >> test.py::test[pg-select_join_inner_equi-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_join_inner_equi-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_inner_equi-default.txt-Results] >> test.py::test[flatten_by-flatten_with_resource--Analyze] [SKIPPED] >> test.py::test[flatten_by-flatten_with_resource--Debug] [SKIPPED] >> test.py::test[flatten_by-flatten_with_resource--ForceBlocks] [SKIPPED] >> test.py::test[flatten_by-flatten_with_resource--Plan] [SKIPPED] >> test.py::test[flatten_by-flatten_with_resource--Results] [SKIPPED] >> test.py::test[flexible_types-unused_types-default.txt-Analyze] [SKIPPED] >> test.py::test[flexible_types-unused_types-default.txt-Debug] [SKIPPED] >> test.py::test[flexible_types-unused_types-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[flexible_types-unused_types-default.txt-Plan] [SKIPPED] >> test.py::test[agg_phases-min_by_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[flexible_types-unused_types-default.txt-Results] >> test.py::test[agg_phases-min_by_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-min_by_null-default.txt-Results] >> test.py::test[schema-skip_complex_type2--Analyze] [GOOD] >> test.py::test[schema-skip_complex_type2--Debug] >> test.py::test[flexible_types-unused_types-default.txt-Results] [SKIPPED] >> test.py::test[hor_join-out_hor_join-default.txt-Analyze] >> test.py::test[type_v3-replace_diff_layout--Results] [GOOD] >> test.py::test[udf-python_struct--Debug] [SKIPPED] >> test.py::test[udf-python_struct--Plan] [SKIPPED] >> test.py::test[udf-python_struct--Results] >> test.py::test[select-dict_with_few_keys-default.txt-Debug] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-ForceBlocks] >> test.py::test[udf-python_struct--Results] [SKIPPED] >> test.py::test[udf-same_udf_modules--Debug] >> test.py::test[expr-list_to_from_tuple-default.txt-Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Debug] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Plan] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] >> test.py::test[blocks-combine_all_min_filter--Analyze] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Debug] >> test.py::test[schema-user_schema_override--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_2o--Debug] [GOOD] >> test.py::test[join-lookupjoin_inner_2o--Plan] [GOOD] >> test.py::test[join-lookupjoin_inner_2o--Results] >> test.py::test[weak_field-weak_field_in_group_by--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Debug] >> test.py::test[join-opt_on_opt_side--Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-Analyze] [GOOD] >> test.py::test[in-small_in_YQL-19183-ansi-Analyze] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-Debug] >> test.py::test[schema-user_schema_override--Plan] >> test.py::test[join-premap_common_multiparents--Analyze] |86.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> test.py::test[json-jsondocument/select--Results] [GOOD] >> test.py::test[schema-user_schema_override--Plan] [GOOD] >> test.py::test[schema-user_schema_override--Results] >> test.py::test[join-mergejoin_big_primary--Debug] [GOOD] >> test.py::test[join-mergejoin_big_primary--ForceBlocks] >> test.py::test[blocks-combine_all_avg_filter--Analyze] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--Debug] >> test.py::test[expr-as_tuple_syntax-default.txt-Analyze] [GOOD] >> test.py::test[expr-as_tuple_syntax-default.txt-Debug] >> test.py::test[optimizers-combinebykey_fields_subset--Analyze] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset--Debug] >> test.py::test[in-small_in_YQL-19183-ansi-Debug] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Analyze] >> test.py::test[key_filter-mixed_opt_bounds--Analyze] [SKIPPED] >> test.py::test[key_filter-mixed_opt_bounds--Debug] [SKIPPED] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Analyze] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Debug] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] [GOOD] >> test.py::test[view-view_with_lambda--Analyze] >> test.py::test[weak_field-optimize_weak_fields_map--Results] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Analyze] |86.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[key_filter-mixed_opt_bounds--ForceBlocks] [SKIPPED] |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> test.py::test[key_filter-mixed_opt_bounds--Plan] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Results] >> test.py::test[distinct-distinct_union_all-default.txt-Results] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_order_in_corr-default.txt-Debug] >> test.py::test[key_filter-mixed_opt_bounds--Plan] [SKIPPED] >> test.py::test[key_filter-mixed_opt_bounds--Results] [SKIPPED] >> test.py::test[join-star_join_semionly--Debug] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field-Debug] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field-Plan] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field-Results] >> test.py::test[hor_join-skip_sampling--ForceBlocks] [GOOD] >> test.py::test[hor_join-skip_sampling--Plan] [GOOD] >> test.py::test[hor_join-skip_sampling--Results] >> test.py::test[expr-as_variant_enum-default.txt-Analyze] >> test.py::test[ansi_idents-string_escaping-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Analyze] [GOOD] >> test.py::test[join-star_join_semionly--ForceBlocks] >> test.py::test[key_filter-nile_pred--Analyze] >> test.py::test[agg_phases-count_all-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-ForceBlocks] >> test.py::test[expr-ensure_ok-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Debug] >> test.py::test[union_all-union_all_fields-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Results] >> test.py::test[action-eval_if-default.txt-Analyze] >> test.py::test[agg_phases-count_null-default.txt-Debug] >> test.py::test[pg-select_join_inner_equi-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_left_const-default.txt-Analyze] >> test.py::test[expr-ensure_ok-default.txt-Plan] [GOOD] >> test.py::test[expr-ensure_ok-default.txt-Results] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Debug] >> test.py::test[pg-select_win_expr_agg-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_lead_lag-default.txt-Debug] >> test.py::test[join-mergejoin_narrows_output_sort--Results] [GOOD] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-Results] >> test.py::test[sampling-subquery_expr-default.txt-Debug] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Plan] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Plan] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] >> test.py::test[distinct-distinct_star1--Debug] [GOOD] >> test.py::test[distinct-distinct_star1--Plan] [GOOD] >> test.py::test[distinct-distinct_star1--Results] >> test.py::test[window-win_func_aggr_4func_no_part--Debug] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Plan] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Results] >> test.py::test[udf-same_udf_modules--Debug] [GOOD] >> test.py::test[udf-same_udf_modules--Plan] [GOOD] >> test.py::test[udf-same_udf_modules--Results] >> test.py::test[sampling-subquery_expr-default.txt-Plan] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Results] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join--Debug] >> test.py::test[schema-user_schema_override--Results] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Analyze] >> test.py::test[hor_join-out_hor_join-default.txt-Analyze] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Debug] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] [GOOD] >> test.py::test[expr-sets-default.txt-Debug] >> test.py::test[schema-skip_complex_type2--Debug] [GOOD] >> test.py::test[schema-skip_complex_type2--ForceBlocks] >> test.py::test[expr-ensure_ok-default.txt-Results] [GOOD] >> test.py::test[expr-expr_yql_from_string-default.txt-Debug] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Debug] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-ForceBlocks] >> test.py::test[in-small_in_YQL-19183-ansi-Debug] [GOOD] >> test.py::test[in-small_in_YQL-19183-ansi-ForceBlocks] >> test.py::test[expr-as_tuple_syntax-default.txt-Debug] [GOOD] >> test.py::test[expr-as_tuple_syntax-default.txt-ForceBlocks] >> test.py::test[view-view_with_lambda--Analyze] [GOOD] >> test.py::test[view-view_with_lambda--Debug] >> test.py::test[join-premap_common_multiparents--Analyze] [GOOD] >> test.py::test[join-premap_common_multiparents--Debug] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Results] [GOOD] >> test.py::test[expr-as_variant_enum-default.txt-Analyze] [GOOD] >> test.py::test[expr-as_variant_enum-default.txt-Debug] >> test.py::test[select-dict_with_few_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Plan] >> test.py::test[weak_field-weak_field_in_group_by--Debug] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--ForceBlocks] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Analyze] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-Debug] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Debug] >> test.py::test[pg_catalog-pg_tables-default.txt-Analyze] >> test.py::test[blocks-combine_all_avg_filter--Debug] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--ForceBlocks] >> test.py::test[select-dict_with_few_keys-default.txt-Plan] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Results] >> test.py::test[weak_field-weak_field_to_yson--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Debug] >> test.py::test[join-mergejoin_semi_to_inner-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_semi_to_inner-off-Plan] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-Results] [GOOD] >> test.py::test[join-nested_semi_join--Analyze] >> test.py::test[blocks-combine_all_min_filter--Debug] [GOOD] >> test.py::test[blocks-combine_all_min_filter--ForceBlocks] >> test.py::test[ansi_idents-string_escaping-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-Results] >> test.py::test[schema-select_all_inferschema-extra_field-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Debug] >> test.py::test[join-mergejoin_big_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary--Plan] >> test.py::test[join-lookupjoin_inner_2o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Debug] >> test.py::test[udf-same_udf_modules--Results] [GOOD] >> test.py::test[udf-trivial_udf--Debug] >> test.py::test[key_filter-nile_pred--Analyze] [GOOD] >> test.py::test[key_filter-nile_pred--Debug] >> test.py::test[aggregate-percentile_and_avg_grouped--Debug] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--Plan] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] >> test.py::test[pg-select_win_lead_lag-default.txt-Debug] [GOOD] >> test.py::test[join-mergejoin_big_primary--Plan] [GOOD] >> test.py::test[join-mergejoin_big_primary--Results] >> test.py::test[pg-select_win_lead_lag-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_lead_lag-default.txt-Results] >> test.py::test[pg-sublink_where_in-default.txt-Debug] [GOOD] >> test.py::test[action-eval_if-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_if-default.txt-Debug] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut >> test.py::test[hor_join-skip_sampling--Results] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-Analyze] >> test.py::test[pg-select_join_left_const-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_join_left_const-default.txt-Debug] >> test.py::test[agg_phases-count_null-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_order_in_corr-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-count_null-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_order_in_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_order_in_corr-default.txt-Results] >> test.py::test[pg-sublink_where_in-default.txt-ForceBlocks] >> test.py::test[union_all-union_all_fields-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Analyze] >> test.py::test[distinct-distinct_having_no_agg-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] >> test.py::test[optimizers-combinebykey_fields_subset--Debug] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset--ForceBlocks] >> test.py::test[aggr_factory-def_value_with_keys-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Analyze] >> test.py::test[agg_phases-count_null-default.txt-Results] |86.1%| [LD] {RESULT} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-ForceBlocks] >> test.py::test[select-dict_lookup_by_key-default.txt-Analyze] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Debug] >> test.py::test[expr-expr_yql_from_string-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_yql_from_string-default.txt-Plan] [GOOD] >> test.py::test[in-small_in_YQL-19183-ansi-ForceBlocks] [GOOD] >> test.py::test[in-small_in_YQL-19183-ansi-Plan] [GOOD] >> test.py::test[in-small_in_YQL-19183-ansi-Results] >> test.py::test[expr-expr_yql_from_string-default.txt-Results] >> test.py::test[sampling-subquery_expr-default.txt-Results] [GOOD] >> test.py::test[schema-limit_simple--Debug] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Plan] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Results] >> test.py::test[ansi_idents-string_escaping-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Analyze] >> test.py::test[expr-as_tuple_syntax-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-as_tuple_syntax-default.txt-Plan] [GOOD] >> test.py::test[expr-as_tuple_syntax-default.txt-Results] >> test.py::test[expr-as_variant_enum-default.txt-Debug] [GOOD] >> test.py::test[join-star_join_semionly--ForceBlocks] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Results] [GOOD] >> test.py::test[join-star_join_semionly--Plan] [GOOD] >> test.py::test[join-star_join_semionly--Results] >> test.py::test[select-result_size_limit_with_fill--Analyze] [SKIPPED] >> test.py::test[select-result_size_limit_with_fill--Debug] [SKIPPED] >> test.py::test[select-result_size_limit_with_fill--ForceBlocks] [SKIPPED] >> test.py::test[select-result_size_limit_with_fill--Plan] >> test.py::test[pg_catalog-pg_tables-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_tables-default.txt-Debug] >> test.py::test[agg_phases_agg_apply-max-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-Debug] >> test.py::test[expr-as_variant_enum-default.txt-ForceBlocks] >> test.py::test[select-result_size_limit_with_fill--Plan] [SKIPPED] >> test.py::test[select-result_size_limit_with_fill--Results] [SKIPPED] >> test.py::test[select-reuse_named_node-default.txt-Analyze] >> test.py::test[agg_phases-min_by_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases-percentile-default.txt-Analyze] >> test.py::test[window-win_func_aggr_4func_no_part--Results] [GOOD] >> test.py::test[schema-skip_complex_type2--ForceBlocks] [GOOD] >> test.py::test[schema-skip_complex_type2--Plan] [GOOD] >> test.py::test[schema-skip_complex_type2--Results] >> test.py::test[window-win_func_aggr_4func_sort_desc--Debug] >> test.py::test[distinct-distinct_star1--Results] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt-Debug] [SKIPPED] >> test.py::test[dq-dq_replicate_ok-default.txt-Plan] [SKIPPED] >> test.py::test[dq-dq_replicate_ok-default.txt-Results] [SKIPPED] >> test.py::test[dq-mem_limit--Debug] [SKIPPED] >> test.py::test[dq-mem_limit--Plan] [SKIPPED] >> test.py::test[dq-mem_limit--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix--Debug] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix--Plan] [SKIPPED] >> test.py::test[weak_field-weak_field_to_yson--Debug] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--ForceBlocks] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Debug] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-ForceBlocks] >> test.py::test[udf-trivial_udf--Debug] [GOOD] >> test.py::test[udf-trivial_udf--Plan] [GOOD] >> test.py::test[udf-trivial_udf--Results] >> test.py::test[weak_field-weak_field_in_group_by--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Plan] [GOOD] >> test.py::test[dq-precompute_parallel_mix--Results] [SKIPPED] >> test.py::test[view-view_with_lambda--Debug] [GOOD] >> test.py::test[view-view_with_lambda--ForceBlocks] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Debug] >> test.py::test[expr-sets-default.txt-Debug] [GOOD] >> test.py::test[expr-sets-default.txt-Plan] [GOOD] >> test.py::test[expr-sets-default.txt-Results] >> test.py::test[weak_field-weak_field_in_group_by--Results] >> test.py::test[join-premap_common_multiparents--Debug] [GOOD] >> test.py::test[join-premap_common_multiparents--ForceBlocks] >> test.py::test[in-small_in_YQL-19183-ansi-Results] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-Analyze] >> test.py::test[aggr_factory-bottom-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_if-default.txt-Debug] [GOOD] >> test.py::test[blocks-combine_all_min_filter--ForceBlocks] [GOOD] >> test.py::test[pg-select_win_lead_lag-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Results] >> test.py::test[action-eval_if-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_min_filter--Plan] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] >> test.py::test[expr-expr_yql_from_string-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_max_null-default.txt-Debug] >> test.py::test[expr-implicit_bitcast_fail--Debug] [SKIPPED] >> test.py::test[expr-implicit_bitcast_fail--Plan] [SKIPPED] >> test.py::test[expr-implicit_bitcast_fail--Results] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> test.py::test[blocks-combine_all_avg_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--Plan] [GOOD] >> test.py::test[join-nested_semi_join--Analyze] [GOOD] >> test.py::test[blocks-date_less_scalar--Results] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Debug] |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> test.py::test[expr-as_tuple_syntax-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Debug] >> test.py::test[key_filter-nile_pred--Debug] [GOOD] >> test.py::test[key_filter-nile_pred--ForceBlocks] >> test.py::test[blocks-combine_all_avg_filter--Results] >> test.py::test[join-nested_semi_join--Debug] >> test.py::test[expr-cast_struct-default.txt-Analyze] >> test.py::test[aggregate-aggregate_key_column-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Debug] >> test.py::test[insert-anonymous_tables-default.txt-Analyze] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Results] [GOOD] >> test.py::test[expr-list_builtins_opt-default.txt-Analyze] |86.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> test.py::test[join-nested_semi_join--Debug] [GOOD] >> test.py::test[join-nested_semi_join--Plan] [GOOD] >> test.py::test[join-nested_semi_join--Results] >> test.py::test[pg-sublink_order_in_corr-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Debug] >> test.py::test[schema-select_all_inferschema_limit--Debug] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Plan] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Results] >> test.py::test[join-mergejoin_big_primary--Results] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-Debug] >> test.py::test[join-mergejoin_force_align1-off-Analyze] >> test.py::test[select-dict_lookup_by_key-default.txt-Debug] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-ForceBlocks] >> test.py::test[pg-sublink_where_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Results] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Plan] [GOOD] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Results] >> test.py::test[udf-trivial_udf--Results] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> test.py::test[udf-udf_empty--Debug] >> test.py::test[select-reuse_named_node-default.txt-Analyze] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Debug] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Debug] >> test.py::test[schema-skip_complex_type2--Results] [GOOD] >> test.py::test[select-anon_clash--Analyze] [SKIPPED] >> test.py::test[select-anon_clash--Debug] [SKIPPED] >> test.py::test[select-anon_clash--ForceBlocks] [SKIPPED] >> test.py::test[select-anon_clash--Plan] [SKIPPED] >> test.py::test[select-anon_clash--Results] >> test.py::test[optimizers-combinebykey_fields_subset--ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-Analyze] >> test.py::test[optimizers-combinebykey_fields_subset--Plan] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Debug] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-ForceBlocks] >> test.py::test[expr-as_variant_enum-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-as_variant_enum-default.txt-Plan] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Debug] [GOOD] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Plan] [GOOD] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Results] >> test.py::test[pg_catalog-pg_tables-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_tables-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_semi_1o--Debug] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Plan] [GOOD] >> test.py::test[expr-as_variant_enum-default.txt-Plan] [GOOD] >> test.py::test[expr-as_variant_enum-default.txt-Results] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] [GOOD] >> test.py::test[expr-implicit_bitcast_fail--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Results] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Debug] >> test.py::test[view-view_with_lambda--ForceBlocks] [GOOD] >> test.py::test[view-view_with_lambda--Plan] >> test.py::test[schema-limit_simple--Debug] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-Analyze] [GOOD] >> test.py::test[schema-limit_simple--Plan] [GOOD] >> test.py::test[schema-limit_simple--Results] >> test.py::test[insert-append_missing_null-default.txt-Debug] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Plan] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] >> test.py::test[expr-list_takeskipwhile-default.txt-Debug] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> test.py::test[weak_field-weak_field_to_yson--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Plan] >> test.py::test[agg_phases-percentile-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-percentile-default.txt-Debug] >> test.py::test[view-view_with_lambda--Plan] [GOOD] >> test.py::test[view-view_with_lambda--Results] >> test.py::test[blocks-combine_all_min_filter--Results] [GOOD] >> test.py::test[blocks-date_add_interval--Analyze] >> test.py::test[weak_field-weak_field_to_yson--Plan] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] >> test.py::test[join-star_join_semionly--Results] [GOOD] >> test.py::test[json-json_exists/passing_exception--Analyze] [SKIPPED] >> test.py::test[action-eval_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_if-default.txt-Plan] >> test.py::test[json-json_exists/passing_exception--Debug] [SKIPPED] >> test.py::test[weak_field-weak_field_in_group_by--Results] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Analyze] >> test.py::test[action-eval_if-default.txt-Plan] [GOOD] >> test.py::test[action-eval_if-default.txt-Results] >> test.py::test[expr-list_builtins_opt-default.txt-Analyze] [GOOD] >> test.py::test[expr-list_builtins_opt-default.txt-Debug] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> test.py::test[expr-cast_struct-default.txt-Analyze] [GOOD] >> test.py::test[json-json_exists/passing_exception--ForceBlocks] [SKIPPED] >> test.py::test[expr-cast_struct-default.txt-Debug] >> test.py::test[schema-select_all_inferschema_limit--Results] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Debug] >> test.py::test[blocks-combine_all_avg_filter--Results] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Results] >> test.py::test[json-json_exists/passing_exception--Plan] [SKIPPED] >> test.py::test[json-json_exists/passing_exception--Results] >> test.py::test[blocks-complex_scalars--Analyze] >> test.py::test[join-nested_semi_join--Debug] [GOOD] >> test.py::test[join-nested_semi_join--ForceBlocks] >> test.py::test[expr-as_variant_enum-default.txt-Results] [GOOD] >> test.py::test[expr-empty_list_ops3-default.txt-Analyze] >> test.py::test[weak_field-weak_field_num_access--Debug] [GOOD] >> test.py::test[weak_field-weak_field_num_access--ForceBlocks] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-ForceBlocks] >> test.py::test[select-anon_clash--Results] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Analyze] >> test.py::test[select-dict_lookup_by_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Plan] >> test.py::test[select-reuse_named_node-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Analyze] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Debug] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Plan] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Results] [SKIPPED] >> test.py::test[order_by-literal_empty_list_sort--Analyze] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Results] [GOOD] >> test.py::test[expr-backtick_escape-default.txt-Debug] >> test.py::test[select-dict_lookup_by_key-default.txt-Plan] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] >> test.py::test[pg-sublink_where_in-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_where_in_corr-default.txt-Analyze] >> test.py::test[key_filter-nile_pred--ForceBlocks] [GOOD] >> test.py::test[key_filter-nile_pred--Plan] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-ForceBlocks] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> test.py::test[udf-udf_empty--Debug] [GOOD] >> test.py::test[udf-udf_empty--Plan] [GOOD] >> test.py::test[udf-udf_empty--Results] >> test.py::test[join-premap_common_multiparents--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents--Plan] [GOOD] >> test.py::test[join-premap_common_multiparents--Results] >> test.py::test[key_filter-nile_pred--Results] >> test.py::test[pg-select_win_max_null-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_max_null-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_max_null-default.txt-Results] >> test.py::test[agg_phases-count_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases-min_null-default.txt-Debug] >> test.py::test[pg_catalog-pg_tables-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_catalog-pg_tables-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_tables-default.txt-Results] >> test.py::test[expr-list_takeskipwhile-default.txt-Debug] [GOOD] >> test.py::test[expr-list_takeskipwhile-default.txt-Plan] [GOOD] >> test.py::test[expr-list_takeskipwhile-default.txt-Results] >> test.py::test[blocks-date_add_interval--Analyze] [GOOD] >> test.py::test[blocks-date_add_interval--Debug] >> test.py::test[expr-sets-default.txt-Results] [GOOD] >> test.py::test[expr-struct_literal_members-default.txt-Debug] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] [GOOD] >> test.py::test[view-view_with_lambda--Results] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-Analyze] [GOOD] >> test.py::test[datetime-date_bitcast-default.txt-Analyze] >> test.py::test[weak_field-weak_field_aggregation--Analyze] >> test.py::test[join-nested_semi_join--Results] [GOOD] >> test.py::test[join-premap_common_multiparents-off-Debug] [SKIPPED] >> test.py::test[join-premap_common_multiparents-off-Plan] >> test.py::test[join-lookupjoin_semi_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Debug] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty-off-Plan] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty-off-Results] [SKIPPED] >> test.py::test[schema-limit_simple--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Debug] >> test.py::test[weak_field-weak_field_to_yson--Results] [GOOD] >> test.py::test[window-full/aggregations_compact--Analyze] >> test.py::test[window-win_func_aggr_4func_sort_desc--Debug] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--Plan] >> test.py::test[expr-cast_struct-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_struct-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_unused_keys--Debug] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys--Plan] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys--Results] >> test.py::test[blocks-complex_scalars--Analyze] [GOOD] >> test.py::test[blocks-complex_scalars--Debug] >> test.py::test[window-win_func_aggr_4func_sort_desc--Plan] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] >> test.py::test[join-premap_common_multiparents-off-Plan] [SKIPPED] >> test.py::test[join-premap_common_multiparents-off-Results] [SKIPPED] >> test.py::test[distinct-distinct_star-default.txt-Debug] >> test.py::test[join-premap_context_dep--Debug] >> test.py::test[action-eval_if-default.txt-Results] [GOOD] >> test.py::test[action-eval_percentile-default.txt-Analyze] >> test.py::test[join-lookupjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite-off-Debug] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite-off-Plan] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Debug] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-ForceBlocks] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Debug] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Plan] [GOOD] >> test.py::test[udf-udf_empty--Results] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-Debug] >> test.py::test[join-mapjoin_early_rewrite-off-Plan] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align1-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-Debug] >> test.py::test[pg-sublink_projection_array-default.txt-Results] [GOOD] >> test.py::test[expr-empty_list_ops3-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Debug] >> test.py::test[expr-empty_list_ops3-default.txt-Debug] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Debug] [SKIPPED] >> test.py::test[expr-backtick_escape-default.txt-Debug] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Plan] >> test.py::test[expr-backtick_escape-default.txt-Plan] [GOOD] >> test.py::test[expr-backtick_escape-default.txt-Results] >> test.py::test[json-json_exists/passing_exception--Results] [GOOD] >> test.py::test[lambda-lambda_udf--Analyze] >> test.py::test[pg-select_join_left_const-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_left_const-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Plan] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Debug] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Plan] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Results] >> test.py::test[expr-list_builtins_opt-default.txt-Debug] [GOOD] >> test.py::test[expr-list_takeskipwhile-default.txt-Results] [GOOD] >> test.py::test[expr-literal_struct_member-default.txt-Debug] >> test.py::test[select-reuse_named_node-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_where_in_corr-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_where_in_corr-default.txt-Debug] >> test.py::test[select-reuse_named_node-default.txt-Plan] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Results] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Analyze] >> test.py::test[expr-list_builtins_opt-default.txt-ForceBlocks] >> test.py::test[blocks-date_sub_interval_scalar--Debug] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Analyze] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Plan] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Debug] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_very_complex_type--Debug] >> test.py::test[select-complex_filter_with_order-default.txt-Analyze] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Debug] >> test.py::test[blocks-date_sub_interval_scalar--Results] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[optimizers-combinebykey_fields_subset--Plan] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-Plan] >> test.py::test[insert-append_missing_null-default.txt-Debug] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-ForceBlocks] >> test.py::test[pg_catalog-pg_tables-default.txt-Results] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-Debug] [GOOD] >> test.py::test[weak_field-weak_field_num_access--ForceBlocks] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-ForceBlocks] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Analyze] [SKIPPED] >> test.py::test[weak_field-weak_field_num_access--Plan] [GOOD] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Debug] [SKIPPED] >> test.py::test[weak_field-weak_field_num_access--Results] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Debug] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] >> test.py::test[aggregate-aggregate_key_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-Results] >> test.py::test[aggregate-aggregate_key_column-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] >> test.py::test[key_filter-nile_pred--Results] [GOOD] >> test.py::test[produce-process_rows_sorted_desc_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Plan] [SKIPPED] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-Analyze] [SKIPPED] >> test.py::test[join-nested_semi_join--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_list-default.txt-Debug] [SKIPPED] >> test.py::test[join-nested_semi_join--Plan] [GOOD] >> test.py::test[produce-reduce_all_list-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[join-nested_semi_join--Results] >> test.py::test[produce-reduce_all_list-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_by_struct-default.txt-Analyze] >> test.py::test[key_filter-split_input_with_key_filter2--Analyze] [SKIPPED] >> test.py::test[key_filter-split_input_with_key_filter2--Debug] [SKIPPED] >> test.py::test[key_filter-split_input_with_key_filter2--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-split_input_with_key_filter2--Plan] [SKIPPED] >> test.py::test[key_filter-split_input_with_key_filter2--Results] [SKIPPED] >> test.py::test[key_filter-uuid--Analyze] [SKIPPED] >> test.py::test[key_filter-uuid--Debug] [SKIPPED] >> test.py::test[key_filter-uuid--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-uuid--Plan] [SKIPPED] >> test.py::test[aggr_factory-bottom-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Analyze] >> test.py::test[window-empty/aggregations_leadlag--Analyze] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Debug] >> test.py::test[action-eval_percentile-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_percentile-default.txt-Debug] >> test.py::test[expr-cast_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-cast_struct-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_struct-default.txt-Results] >> test.py::test[weak_field-weak_field_aggregation--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Debug] >> test.py::test[expr-struct_literal_members-default.txt-Debug] [GOOD] >> test.py::test[expr-struct_literal_members-default.txt-Plan] [GOOD] >> test.py::test[expr-struct_literal_members-default.txt-Results] >> test.py::test[datetime-date_bitcast-default.txt-Analyze] [GOOD] >> test.py::test[datetime-date_bitcast-default.txt-Debug] >> test.py::test[agg_phases-min_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min_null-default.txt-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Debug] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Plan] >> test.py::test[blocks-complex_scalars--Debug] [GOOD] >> test.py::test[blocks-complex_scalars--ForceBlocks] >> test.py::test[expr-backtick_escape-default.txt-Results] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Debug] >> test.py::test[schema-select_all-row_spec_diff_sort2-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] >> test.py::test[agg_phases-min_null-default.txt-Results] >> test.py::test[hor_join-out_hor_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Plan] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Results] >> test.py::test[window-full/aggregations_compact--Analyze] [GOOD] >> test.py::test[window-full/aggregations_compact--Debug] >> test.py::test[pg-select_win_max_null-default.txt-Results] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Results] [GOOD] >> test.py::test[expr-literal_struct_member-default.txt-Debug] [GOOD] >> test.py::test[expr-literal_struct_member-default.txt-Plan] [GOOD] >> test.py::test[expr-literal_struct_member-default.txt-Results] >> test.py::test[pg-select_yql_type--Debug] >> test.py::test[lambda-lambda_udf--Analyze] [GOOD] >> test.py::test[lambda-lambda_udf--Debug] >> test.py::test[select-sampleselect-1000-Analyze] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Analyze] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Debug] >> test.py::test[expr-empty_list_ops3-default.txt-Debug] [GOOD] >> test.py::test[expr-empty_list_ops3-default.txt-ForceBlocks] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] [GOOD] >> test.py::test[bigdate-arithmetic-default.txt-Debug] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_no_infer--Debug] >> test.py::test[blocks-date_add_interval--Debug] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_where_in-default.txt-Results] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k >> test.py::test[blocks-date_add_interval--ForceBlocks] >> test.py::test[select-complex_filter_with_order-default.txt-Debug] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-ForceBlocks] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] [GOOD] >> test.py::test[join-premap_common_multiparents--Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Plan] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Debug] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--ForceBlocks] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Debug] >> test.py::test[join-premap_common_multiparents_no_premap-off-Analyze] >> test.py::test[expr-cast_struct-default.txt-Results] [GOOD] >> test.py::test[expr-cast_type_bind-default.txt-Analyze] >> test.py::test[expr-list_builtins_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-list_builtins_opt-default.txt-Plan] [GOOD] >> test.py::test[expr-list_builtins_opt-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] >> test.py::test[expr-struct_literal_members-default.txt-Results] [GOOD] >> test.py::test[expr-struct_slice-default.txt-Debug] >> test.py::test[weak_field-weak_field_num_access--Results] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-ForceBlocks] >> test.py::test[window-win_func_aggr_4func_no_part--Analyze] >> test.py::test[produce-reduce_by_struct-default.txt-Analyze] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Debug] >> test.py::test[expr-literal_struct_member-default.txt-Results] [GOOD] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Debug] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Analyze] >> test.py::test[expr-cast_reverse_list-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Results] >> test.py::test[insert-append_missing_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-Plan] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-Results] >> test.py::test[schema-select_all-row_spec_extra_sort-Debug] >> test.py::test[blocks-date_sub_interval_scalar--Results] [GOOD] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |86.1%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> test.py::test[action-eval_percentile-default.txt-Debug] [GOOD] >> test.py::test[action-eval_percentile-default.txt-ForceBlocks] >> test.py::test[blocks-decimal_comparison--Debug] >> test.py::test[select-sampleselect-1000-Analyze] [GOOD] >> test.py::test[select-sampleselect-1000-Debug] >> test.py::test[union_all-mix_map_and_read-default.txt-Debug] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-Plan] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] >> test.py::test[weak_field-weak_field_aggregation--Debug] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--ForceBlocks] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> test.py::test[join-nested_semi_join--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-Analyze] >> test.py::test[schema-user_schema_no_infer--Debug] [GOOD] >> test.py::test[schema-user_schema_no_infer--Plan] [GOOD] >> test.py::test[schema-user_schema_no_infer--Results] >> test.py::test[lambda-lambda_udf--Debug] [GOOD] >> test.py::test[lambda-lambda_udf--ForceBlocks] >> test.py::test[join-premap_context_dep--Debug] [GOOD] >> test.py::test[join-premap_context_dep--Plan] [GOOD] >> test.py::test[join-premap_context_dep--Results] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> test.py::test[pg-sublink_where_in_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_in_corr-default.txt-ForceBlocks] >> test.py::test[aggr_factory-bitand-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Debug] >> test.py::test[insert-anonymous_tables-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-Plan] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-Results] >> test.py::test[join-premap_common_multiparents_no_premap-off-Analyze] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-Debug] >> test.py::test[blocks-complex_scalars--ForceBlocks] [GOOD] >> test.py::test[blocks-complex_scalars--Plan] [GOOD] >> test.py::test[blocks-complex_scalars--Results] >> test.py::test[expr-empty_list_ops3-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-empty_list_ops3-default.txt-Plan] >> test.py::test[pg-sublink_where_in-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Debug] |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[key_filter-uuid--Plan] [SKIPPED] |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Debug] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-ForceBlocks] >> test.py::test[expr-empty_list_ops3-default.txt-Plan] [GOOD] >> test.py::test[expr-empty_list_ops3-default.txt-Results] |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> test.py::test[expr-cast_type_bind-default.txt-Analyze] [GOOD] >> test.py::test[expr-cast_type_bind-default.txt-Debug] >> test.py::test[expr-struct_slice-default.txt-Debug] [GOOD] >> test.py::test[expr-struct_slice-default.txt-Plan] >> test.py::test[join-mergejoin_force_align1-off-Debug] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align1-off-Plan] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-Results] [GOOD] >> test.py::test[expr-struct_slice-default.txt-Plan] [GOOD] >> test.py::test[expr-struct_slice-default.txt-Results] >> test.py::test[bigdate-arithmetic-default.txt-Debug] [GOOD] >> test.py::test[bigdate-arithmetic-default.txt-Plan] [GOOD] >> test.py::test[bigdate-arithmetic-default.txt-Results] >> test.py::test[order_by-literal_empty_list_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Plan] >> test.py::test[join-pullup_left--Analyze] >> test.py::test[expr-list_builtins_opt-default.txt-Results] [GOOD] >> test.py::test[expr-list_takeskipwhile-default.txt-Analyze] >> test.py::test[expr-cast_reverse_list-default.txt-Results] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Debug] >> test.py::test[window-empty/aggregations_leadlag--Debug] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--ForceBlocks] >> test.py::test[order_by-literal_empty_list_sort--Plan] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Results] >> test.py::test[select-complex_filter_with_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Plan] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Results] >> test.py::test[datetime-date_bitcast-default.txt-Debug] [GOOD] >> test.py::test[datetime-date_bitcast-default.txt-ForceBlocks] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Debug] [GOOD] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Plan] >> test.py::test[window-win_func_aggr_4func_no_part--Analyze] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Debug] >> test.py::test[pg-select_yql_type--Debug] [GOOD] >> test.py::test[pg-select_yql_type--Plan] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Analyze] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Debug] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Debug] >> test.py::test[schema-select_all-row_spec_extra_sort-Plan] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Plan] [GOOD] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Results] >> test.py::test[schema-select_all-row_spec_extra_sort-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] >> test.py::test[pg-select_yql_type--Plan] [GOOD] >> test.py::test[pg-select_yql_type--Results] >> test.py::test[blocks-date_add_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-date_add_interval--Plan] [GOOD] >> test.py::test[blocks-date_add_interval--Results] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Debug] >> test.py::test[schema-user_schema_no_infer--Results] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Debug] >> test.py::test[insert-append_missing_null-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Analyze] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-on-Analyze] [SKIPPED] >> test.py::test[bigdate-table_yt_native-on-Debug] [SKIPPED] >> test.py::test[action-eval_percentile-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_percentile-default.txt-Plan] [GOOD] >> test.py::test[action-eval_percentile-default.txt-Results] >> test.py::test[expr-empty_list_ops3-default.txt-Results] [GOOD] >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-Analyze] >> test.py::test[bigdate-table_yt_native-on-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_native-on-Plan] [SKIPPED] >> test.py::test[bigdate-table_yt_native-on-Results] [SKIPPED] >> test.py::test[binding-anon_table_binding-default.txt-Analyze] >> test.py::test[agg_phases-percentile-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-percentile-default.txt-ForceBlocks] >> test.py::test[join-premap_common_inner_both_sides-off-Analyze] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-Debug] >> test.py::test[join-mapjoin_on_very_complex_type--Debug] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type--Plan] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Debug] [GOOD] >> test.py::test[select-sampleselect-1000-Debug] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_on_very_complex_type--Results] >> test.py::test[select-sampleselect-1000-ForceBlocks] >> test.py::test[expr-struct_slice-default.txt-Results] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Debug] >> test.py::test[window-full/aggregations_compact--Debug] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Debug] >> test.py::test[window-full/aggregations_compact--ForceBlocks] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Plan] >> test.py::test[blocks-decimal_comparison--Debug] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Results] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Plan] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] >> test.py::test[weak_field-weak_field_aggregation--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Plan] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] >> test.py::test[pg-select_join_left_const-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_join_left_const-default.txt-Plan] [GOOD] >> test.py::test[blocks-decimal_comparison--Plan] [GOOD] >> test.py::test[blocks-decimal_comparison--Results] >> test.py::test[distinct-distinct_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-Plan] >> test.py::test[blocks-complex_scalars--Results] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Analyze] >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Results] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Results] [GOOD] >> test.py::test[in-in_ansi_empty-default.txt-Analyze] >> test.py::test[distinct-distinct_star-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-Results] >> test.py::test[pg-select_join_left_const-default.txt-Results] >> test.py::test[join-pullup_left--Analyze] [GOOD] >> test.py::test[join-pullup_left--Debug] >> test.py::test[select-complex_filter_with_order-default.txt-Results] [GOOD] >> test.py::test[select-exists_false-default.txt-Analyze] >> test.py::test[insert-anonymous_tables-default.txt-Results] [GOOD] >> test.py::test[insert-drop_sortness-desc-Analyze] >> test.py::test[expr-cast_type_bind-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_type_bind-default.txt-ForceBlocks] >> test.py::test[order_by-literal_empty_list_sort--Results] [GOOD] >> test.py::test[order_by-native_desc_sort--Analyze] [SKIPPED] >> test.py::test[order_by-native_desc_sort--Debug] [SKIPPED] >> test.py::test[order_by-native_desc_sort--ForceBlocks] >> test.py::test[bigdate-arithmetic-default.txt-Results] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Debug] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Debug] >> test.py::test[lambda-lambda_udf--ForceBlocks] [GOOD] >> test.py::test[expr-list_takeskipwhile-default.txt-Analyze] [GOOD] >> test.py::test[lambda-lambda_udf--Plan] [GOOD] >> test.py::test[expr-list_takeskipwhile-default.txt-Debug] >> test.py::test[lambda-lambda_udf--Results] >> test.py::test[order_by-native_desc_sort--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort--Plan] [SKIPPED] >> test.py::test[order_by-native_desc_sort--Results] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Analyze] >> test.py::test[action-eval_percentile-default.txt-Results] [GOOD] >> test.py::test[action-eval_python_signature--Analyze] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Analyze] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Debug] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Plan] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-order_by_list_of_strings--Analyze] >> test.py::test[action-eval_python_signature--Debug] [SKIPPED] >> test.py::test[action-eval_python_signature--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_python_signature--Plan] [SKIPPED] >> test.py::test[agg_phases-min_null-default.txt-Results] [GOOD] >> test.py::test[action-eval_python_signature--Results] [SKIPPED] >> test.py::test[action-runtime_repr_code-default.txt-Analyze] >> test.py::test[agg_phases-min_opt-default.txt-Debug] >> test.py::test[expr-common_type_for_resource_and_data--Debug] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Plan] >> test.py::test[join-premap_common_multiparents_no_premap-off-Debug] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_common_multiparents_no_premap-off-Plan] >> test.py::test[join-premap_context_dep--Results] [GOOD] >> test.py::test[join-pullup_context_dep-off-Debug] [SKIPPED] >> test.py::test[join-pullup_context_dep-off-Plan] [SKIPPED] >> test.py::test[pg-tpcds-q03-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Debug] >> test.py::test[expr-common_type_for_resource_and_data--Plan] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-Plan] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-Results] [GOOD] >> test.py::test[join-premap_merge_with_remap--Analyze] >> test.py::test[join-pullup_context_dep-off-Results] [SKIPPED] >> test.py::test[join-pullup_cross--Debug] >> test.py::test[expr-common_type_for_resource_and_data--Results] >> test.py::test[binding-anon_table_binding-default.txt-Analyze] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-Debug] >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Debug] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--ForceBlocks] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] [GOOD] >> test.py::test[select-where_in-default.txt-Analyze] >> test.py::test[pg-select_yql_type--Results] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Debug] >> test.py::test[window-win_func_aggr_4func_no_part--Debug] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Debug] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--ForceBlocks] >> test.py::test[schema-user_schema_patch_columns--Plan] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] >> test.py::test[select-sampleselect-1000-ForceBlocks] [GOOD] >> test.py::test[select-sampleselect-1000-Plan] [GOOD] >> test.py::test[select-sampleselect-1000-Results] >> test.py::test[pg-sublink_where_in_corr-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--ForceBlocks] [GOOD] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> test.py::test[pg-sublink_where_in_corr-default.txt-Plan] >> test.py::test[join-premap_common_inner_both_sides-off-Debug] [GOOD] >> test.py::test[datetime-date_bitcast-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_common_inner_both_sides-off-Plan] >> test.py::test[datetime-date_bitcast-default.txt-Plan] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Analyze] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Plan] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Debug] >> test.py::test[in-in_ansi_empty-default.txt-Analyze] [GOOD] >> test.py::test[in-in_ansi_empty-default.txt-Debug] >> test.py::test[expr-list_takeskipwhile-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_type_bind-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_udf--Results] [GOOD] >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[expr-cast_type_bind-default.txt-Plan] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-Plan] [GOOD] >> test.py::test[datetime-date_bitcast-default.txt-Results] >> test.py::test[pg-sublink_where_in_corr-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_type_bind-default.txt-Results] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Analyze] >> test.py::test[window-empty/aggregations_leadlag--Results] >> test.py::test[blocks-distinct_pure_all--Debug] >> test.py::test[pg-sublink_where_in_corr-default.txt-Results] >> test.py::test[join-premap_common_inner_both_sides-off-Results] [GOOD] >> test.py::test[join-premap_map_cross--Analyze] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Debug] [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Analyze] [GOOD] >> test.py::test[select-exists_false-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Plan] [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Debug] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] >> test.py::test[select-exists_false-default.txt-Debug] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> test.py::test[insert-drop_sortness-desc-Analyze] [GOOD] >> test.py::test[insert-drop_sortness-desc-Debug] >> test.py::test[produce-reduce_by_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-runtime_repr_code-default.txt-Analyze] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Plan] [GOOD] >> test.py::test[action-runtime_repr_code-default.txt-Debug] >> test.py::test[produce-reduce_by_struct-default.txt-Results] >> test.py::test[blocks-date_add_interval--Results] [GOOD] >> test.py::test[join-pullup_left--Debug] [GOOD] >> test.py::test[blocks-date_group_by--Analyze] >> test.py::test[join-pullup_left--ForceBlocks] >> test.py::test[expr-common_type_for_resource_and_data--Results] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Debug] [GOOD] >> test.py::test[join-premap_merge_with_remap--Analyze] [GOOD] >> test.py::test[join-premap_merge_with_remap--Debug] >> test.py::test[expr-tagged_runtime-default.txt-Plan] >> test.py::test[weak_field-weak_field_aggregation--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Analyze] >> test.py::test[expr-dict_common_type--Debug] [SKIPPED] >> test.py::test[expr-dict_common_type--Plan] [SKIPPED] >> test.py::test[expr-dict_common_type--Results] >> test.py::test[expr-tagged_runtime-default.txt-Plan] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Results] >> test.py::test[union_all-union_all_fields-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Results] >> test.py::test[expr-dict_common_type--Results] [SKIPPED] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Debug] >> test.py::test[pg-tpcds-q09-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] >> test.py::test[agg_phases-min_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min_opt-default.txt-Plan] >> test.py::test[agg_phases-min_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-min_opt-default.txt-Results] >> test.py::test[insert-append_sorted-to_sorted_desc-Analyze] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Debug] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Debug] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-Debug] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] [GOOD] >> test.py::test[select-result_rows_limit--Debug] [SKIPPED] >> test.py::test[select-result_rows_limit--Plan] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Plan] >> test.py::test[select-where_in-default.txt-Analyze] [GOOD] >> test.py::test[select-where_in-default.txt-Debug] >> test.py::test[expr-cast_type_bind-default.txt-Results] [GOOD] >> test.py::test[expr-decimal_bytes-default.txt-Analyze] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Plan] [GOOD] >> test.py::test[window-full/aggregations_compact--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Plan] [GOOD] >> test.py::test[window-full/aggregations_compact--Plan] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] >> test.py::test[select-result_rows_limit--Plan] [SKIPPED] >> test.py::test[select-result_rows_limit--Results] [SKIPPED] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] >> test.py::test[window-full/aggregations_compact--Plan] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-ForceBlocks] >> test.py::test[pg-sublink_having_exists-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Results] >> test.py::test[window-full/aggregations_compact--Results] >> test.py::test[select-sampleselect-1000-Results] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Analyze] >> test.py::test[blocks-pg--ForceBlocks] >> test.py::test[in-in_ansi_empty-default.txt-Debug] [GOOD] >> test.py::test[in-in_ansi_empty-default.txt-ForceBlocks] >> test.py::test[join-premap_map_cross--Analyze] [GOOD] >> test.py::test[select-substring-default.txt-Debug] >> test.py::test[action-runtime_repr_code-default.txt-Debug] [GOOD] >> test.py::test[join-premap_map_cross--Debug] >> test.py::test[action-runtime_repr_code-default.txt-ForceBlocks] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Analyze] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Debug] >> test.py::test[aggr_factory-bottom-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Plan] >> test.py::test[blocks-decimal_op_decimal_scalar--Debug] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--ForceBlocks] >> test.py::test[window-win_func_aggr_4func_no_part--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Debug] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Results] >> test.py::test[window-win_func_aggr_4func_no_part--Plan] >> test.py::test[aggr_factory-bottom-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Results] >> test.py::test[union_all-union_all_fields-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Debug] >> test.py::test[insert-drop_sortness-desc-Debug] [GOOD] >> test.py::test[insert-drop_sortness-desc-ForceBlocks] >> test.py::test[join-mapjoin_on_very_complex_type--Results] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce--Debug] >> test.py::test[expr-tagged_runtime-default.txt-Results] [GOOD] >> test.py::test[select-exists_false-default.txt-Debug] [GOOD] >> test.py::test[select-exists_false-default.txt-ForceBlocks] >> test.py::test[window-win_func_aggr_4func_no_part--Plan] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Results] >> test.py::test[expr-unicode_literals-default.txt-Debug] >> test.py::test[order_by-order_by_list_of_strings--Debug] [GOOD] >> test.py::test[pg-select_join_left_const-default.txt-Results] [GOOD] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[expr-partial_columns_in_mem_aggr-default.txt-Results] [GOOD] >> test.py::test[blocks-date_group_by--Analyze] [GOOD] >> test.py::test[blocks-date_group_by--Debug] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-Analyze] >> test.py::test[join-pullup_cross--Debug] [GOOD] >> test.py::test[join-pullup_cross--Plan] [GOOD] >> test.py::test[join-pullup_cross--Results] >> test.py::test[datetime-date_bitcast-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star1--Analyze] >> test.py::test[pg-sublink_having_exists-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Debug] >> test.py::test[pg-tpcds-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-Debug] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-Analyze] [GOOD] >> test.py::test[join-pullup_left--ForceBlocks] [GOOD] >> test.py::test[join-pullup_left--Plan] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Results] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Analyze] >> test.py::test[expr-decimal_bytes-default.txt-Analyze] [GOOD] >> test.py::test[expr-decimal_bytes-default.txt-Debug] >> test.py::test[join-pullup_left--Results] >> test.py::test[distinct-distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_result-default.txt-Analyze] >> test.py::test[pg-sublink_where_in_corr-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Analyze] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Analyze] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Debug] >> test.py::test[select-where_in-default.txt-Debug] [GOOD] >> test.py::test[select-where_in-default.txt-ForceBlocks] >> test.py::test[in-in_ansi_empty-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_ansi_empty-default.txt-Plan] [GOOD] >> test.py::test[in-in_ansi_empty-default.txt-Results] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Results] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Debug] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Debug] [GOOD] |86.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] [GOOD] >> test.py::test[aggregate-compare_by_tuple--Analyze] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Plan] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Debug] >> test.py::test[action-runtime_repr_code-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-runtime_repr_code-default.txt-Plan] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[expr-list_takeskipwhile-default.txt-Debug] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-runtime_repr_code-default.txt-Plan] [GOOD] >> test.py::test[action-runtime_repr_code-default.txt-Results] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] >> test.py::test[binding-anon_table_binding-default.txt-Plan] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-Results] |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp >> test.py::test[select-substring-default.txt-Debug] [GOOD] >> test.py::test[select-substring-default.txt-Plan] [GOOD] >> test.py::test[select-substring-default.txt-Results] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> test.py::test[blocks-distinct_pure_all--Debug] [GOOD] >> test.py::test[blocks-distinct_pure_all--Plan] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] |86.1%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Debug] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> test.py::test[join-premap_map_cross--Debug] [GOOD] >> test.py::test[join-premap_map_cross--ForceBlocks] >> test.py::test[union_all-union_all_trivial-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Plan] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-Debug] >> test.py::test[blocks-decimal_op_decimal_scalar--ForceBlocks] [GOOD] >> TPDiskRaces::DecommitWithInflightMock [GOOD] >> TPDiskRaces::KillOwnerWhileDecommitting >> test.py::test[blocks-decimal_op_decimal_scalar--Plan] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Analyze] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Debug] >> test.py::test[union_all-union_all_trivial-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Results] >> test.py::test[weak_field-weak_field_long_name--Debug] [GOOD] >> test.py::test[weak_field-weak_field_long_name--ForceBlocks] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Debug] [GOOD] >> test.py::test[join-premap_merge_with_remap--Debug] [GOOD] >> test.py::test[join-premap_merge_with_remap--ForceBlocks] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_star1--Analyze] [GOOD] >> test.py::test[distinct-distinct_star1--Debug] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] >> test.py::test[join-premap_map_cross-off-Analyze] >> test.py::test[insert-drop_sortness-desc-ForceBlocks] [GOOD] >> test.py::test[expr-unicode_literals-default.txt-Debug] [GOOD] >> test.py::test[expr-unicode_literals-default.txt-Plan] [GOOD] >> test.py::test[expr-unicode_literals-default.txt-Results] >> test.py::test[insert-drop_sortness-desc-Plan] [GOOD] >> test.py::test[insert-drop_sortness-desc-Results] >> test.py::test[in-in_ansi_empty-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Analyze] >> test.py::test[window-empty/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Analyze] >> test.py::test[select-exists_false-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_false-default.txt-Plan] [GOOD] >> test.py::test[select-exists_false-default.txt-Results] >> test.py::test[expr-expr_yql_data-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Analyze] [GOOD] >> test.py::test[select-substring-default.txt-Results] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-Debug] >> test.py::test[action-runtime_repr_code-default.txt-Results] [GOOD] >> test.py::test[agg_phases-max_opt-default.txt-Analyze] >> test.py::test[expr-decimal_bytes-default.txt-Debug] [GOOD] >> test.py::test[expr-decimal_bytes-default.txt-ForceBlocks] >> test.py::test[join-pullup_cross--Results] [GOOD] >> test.py::test[agg_phases-percentile-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-percentile-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-percentile-default.txt-Results] >> test.py::test[expr-expr_yql_data-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Results] >> test.py::test[window-win_func_aggr_4func_no_part--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Analyze] >> test.py::test[pg-tpcds-q45-default.txt-Debug] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Debug] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q22-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-Plan] >> test.py::test[join-pullup_exclusion-off-Debug] [SKIPPED] >> test.py::test[binding-anon_table_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-date_group_by--Debug] [GOOD] >> test.py::test[blocks-date_group_by--ForceBlocks] >> test.py::test[dq-precompute_result-default.txt-Analyze] [GOOD] >> test.py::test[dq-precompute_result-default.txt-Debug] >> test.py::test[pg-tpcds-q22-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-Results] >> test.py::test[schema-yamred_dsv_select_from_dict--Debug] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Plan] >> test.py::test[join-pullup_exclusion-off-Plan] [SKIPPED] >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[join-pullup_left--Debug] >> test.py::test[binding-bind_select-default.txt-Analyze] >> test.py::test[select-where_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-where_in-default.txt-Plan] [GOOD] >> test.py::test[select-where_in-default.txt-Results] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-ForceBlocks] >> test.py::test[schema-yamred_dsv_select_from_dict--Plan] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] >> test.py::test[blocks-pg--ForceBlocks] [GOOD] >> test.py::test[blocks-pg--Plan] >> test.py::test[union_all-union_all_trivial-default.txt-Results] [GOOD] >> test.py::test[view-all_from_view--Debug] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> test.py::test[pg-tpcds-q02-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] >> test.py::test[insert-drop_sortness-desc-Results] [GOOD] >> test.py::test[join-equi_join_by_expr-off-Analyze] >> test.py::test[aggregate-compare_by_tuple--Analyze] [GOOD] >> test.py::test[aggregate-compare_by_tuple--Debug] >> test.py::test[blocks-pg--Plan] [GOOD] >> test.py::test[blocks-pg--Results] >> test.py::test[expr-unicode_literals-default.txt-Results] [GOOD] >> test.py::test[file-file_constness--Debug] >> test.py::test[agg_phases-min_opt-default.txt-Results] [GOOD] >> test.py::test[agg_phases-sum_opt-default.txt-Debug] >> test.py::test[weak_field-weak_field_long_name--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Plan] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Results] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-Debug] [GOOD] >> test.py::test[join-pullup_left--Results] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-ForceBlocks] >> test.py::test[expr-expr_yql_data-default.txt-Results] [GOOD] >> test.py::test[expr-int_literals_negative-default.txt-Debug] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] [GOOD] >> test.py::test[select-anon_clash--Debug] [SKIPPED] >> test.py::test[select-anon_clash--Plan] [SKIPPED] >> test.py::test[select-anon_clash--Results] >> test.py::test[join-premap_map_cross-off-Analyze] [GOOD] >> test.py::test[select-exists_false-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce--Debug] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce--Plan] [GOOD] >> test.py::test[select-refselect--Analyze] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-ForceBlocks] >> test.py::test[join-premap_map_cross-off-Debug] >> test.py::test[agg_phases-max_opt-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-max_opt-default.txt-Debug] >> test.py::test[distinct-distinct_star1--Debug] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Analyze] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Debug] >> test.py::test[distinct-distinct_star1--ForceBlocks] >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q26-default.txt-Debug] >> test.py::test[blocks-interval_add_date_scalar--Analyze] >> test.py::test[in-in_sorted_by_tuple--Analyze] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Debug] >> test.py::test[select-where_in-default.txt-Results] [GOOD] >> test.py::test[select-where_with_lambda--Analyze] >> test.py::test[join-premap_map_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_map_cross--Plan] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Plan] >> test.py::test[select-select_concrete_detailed_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Plan] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] >> test.py::test[aggr_factory-bottom-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Debug] >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Plan] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Debug] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] >> test.py::test[join-premap_map_cross--Results] >> test.py::test[binding-bind_select-default.txt-Analyze] [GOOD] >> test.py::test[binding-bind_select-default.txt-Debug] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> test.py::test[window-full/aggregations_compact--Results] [GOOD] >> test.py::test[window-full/leadlag--Analyze] >> test.py::test[expr-decimal_bytes-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-string_with--Analyze] >> test.py::test[expr-decimal_bytes-default.txt-Plan] [GOOD] >> test.py::test[expr-decimal_bytes-default.txt-Results] >> test.py::test[pg-tpcds-q45-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-ForceBlocks] >> test.py::test[dq-precompute_result-default.txt-Debug] [GOOD] >> test.py::test[dq-precompute_result-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[dq-precompute_result-default.txt-Plan] |86.1%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |86.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> test.py::test[window-win_func_over_group_by_list_names--Analyze] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Debug] >> test.py::test[window-win_func_order_by_udf_empty_rank--Debug] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Plan] >> test.py::test[join-equi_join_by_expr-off-Analyze] [GOOD] >> test.py::test[join-equi_join_by_expr-off-Debug] >> test.py::test[window-win_func_order_by_udf_empty_rank--Plan] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Debug] >> test.py::test[select-tablepathprefix-default.txt-Debug] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-Plan] [GOOD] >> test.py::test[dq-precompute_result-default.txt-Plan] [GOOD] >> test.py::test[dq-precompute_result-default.txt-Results] [GOOD] >> test.py::test[expr-as_dict_tuple_key-default.txt-Analyze] >> test.py::test[weak_field-weak_field_long_name--Results] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Analyze] >> test.py::test[insert-append_sorted-to_sorted_desc-Debug] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-Results] >> test.py::test[insert-append_sorted-to_sorted_desc-ForceBlocks] >> test.py::test[pg-tpcds-q02-default.txt-Results] [GOOD] >> test.py::test[select-anon_clash--Results] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Debug] >> test.py::test[pg-tpcds-q19-default.txt-Debug] >> test.py::test[blocks-date_group_by--ForceBlocks] [GOOD] >> test.py::test[blocks-date_group_by--Plan] [GOOD] >> test.py::test[blocks-date_group_by--Results] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-int_literals_negative-default.txt-Debug] [GOOD] >> test.py::test[expr-int_literals_negative-default.txt-Plan] [GOOD] >> test.py::test[expr-int_literals_negative-default.txt-Results] >> test.py::test[join-premap_merge_with_remap--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_with_remap--Plan] [GOOD] >> test.py::test[join-premap_merge_with_remap--Results] >> test.py::test[view-all_from_view--Debug] [GOOD] >> test.py::test[view-all_from_view--Plan] [GOOD] >> test.py::test[view-all_from_view--Results] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-Plan] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-Results] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base_fail--Analyze] [SKIPPED] >> test.py::test[simple_columns-simple_columns_base_fail--Debug] >> test.py::test[select-refselect--Analyze] [GOOD] >> test.py::test[select-refselect--Debug] >> test.py::test[simple_columns-simple_columns_base_fail--Debug] [SKIPPED] >> test.py::test[simple_columns-simple_columns_base_fail--ForceBlocks] [SKIPPED] >> test.py::test[simple_columns-simple_columns_base_fail--Plan] >> test.py::test[select-where_with_lambda--Analyze] [GOOD] >> test.py::test[select-where_with_lambda--Debug] >> test.py::test[aggregate-compare_by_tuple--Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_base_fail--Plan] [SKIPPED] >> test.py::test[simple_columns-simple_columns_base_fail--Results] >> test.py::test[expr-decimal_bytes-default.txt-Results] [GOOD] >> test.py::test[expr-empty_list_ops2-default.txt-Analyze] >> test.py::test[agg_phases-sum_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-sum_opt-default.txt-Plan] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[order_by-order_by_list_of_strings--Debug] [GOOD] >> test.py::test[aggregate-compare_by_tuple--ForceBlocks] >> test.py::test[binding-bind_select-default.txt-Debug] [GOOD] >> test.py::test[binding-bind_select-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q26-default.txt-Debug] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q26-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q26-default.txt-Results] >> test.py::test[agg_phases-sum_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-sum_opt-default.txt-Results] >> test.py::test[blocks-string_with--Analyze] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Analyze] [GOOD] >> test.py::test[join-premap_map_cross-off-Debug] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Debug] >> test.py::test[join-premap_map_cross-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_map_cross-off-Plan] [GOOD] >> test.py::test[join-premap_map_cross-off-Results] >> test.py::test[distinct-distinct_star1--ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star1--Plan] [GOOD] >> test.py::test[distinct-distinct_star1--Results] >> test.py::test[blocks-string_with--Debug] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] [GOOD] >> test.py::test[join-mergejoin_force_per_link--Debug] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link--Plan] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link--Results] >> test.py::test[join-premap_map_cross-off-Results] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Analyze] >> test.py::test[join-mergejoin_force_per_link--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names--Debug] >> test.py::test[expr-int_literals_negative-default.txt-Results] [GOOD] >> test.py::test[join-premap_map_cross--Results] [GOOD] >> test.py::test[join-premap_merge_inner--Analyze] >> test.py::test[window-row_number_to_map_multiple-default.txt-Debug] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-ForceBlocks] >> test.py::test[view-all_from_view--Results] [GOOD] >> test.py::test[join-pullup_left--Debug] [GOOD] >> test.py::test[join-pullup_left--Plan] [GOOD] >> test.py::test[join-pullup_left--Results] >> test.py::test[select-tablepathprefix-default.txt-Results] [GOOD] >> test.py::test[expr-len--Debug] >> test.py::test[seq_mode-shared_named_expr-default.txt-Debug] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-limit-dynamic-Analyze] >> test.py::test[weak_field-optimize_weak_fields_map--Debug] >> test.py::test[expr-as_dict_tuple_key-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Results] >> test.py::test[expr-as_dict_tuple_key-default.txt-Debug] >> test.py::test[bigdate-int_cast-default.txt-Debug] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Plan] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Results] >> test.py::test[window-full/leadlag--Analyze] [GOOD] >> test.py::test[window-full/leadlag--Debug] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[join-pullup_left--Results] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Plan] [GOOD] >> test.py::test[join-equi_join_by_expr-off-Debug] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_col-default.txt-Results] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Analyze] >> test.py::test[pg-tpcds-q19-default.txt-Results] >> test.py::test[window-leading/aggregations_leadlag--Analyze] [GOOD] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> test.py::test[join-equi_join_by_expr-off-ForceBlocks] [SKIPPED] >> test.py::test[join-equi_join_by_expr-off-Plan] [GOOD] >> test.py::test[join-equi_join_by_expr-off-Results] >> test.py::test[window-leading/aggregations_leadlag--Debug] >> test.py::test[join-equi_join_by_expr-off-Results] [GOOD] >> test.py::test[join-flatten_columns2-off-Analyze] >> test.py::test[in-in_sorted_by_tuple--Debug] [GOOD] >> test.py::test[in-in_sorted_by_tuple--ForceBlocks] >> test.py::test[simple_columns-simple_columns_base_fail--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Analyze] >> test.py::test[pg-tpcds-q26-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Debug] >> test.py::test[expr-empty_list_ops2-default.txt-Analyze] [GOOD] >> test.py::test[expr-empty_list_ops2-default.txt-Debug] >> test.py::test[file-file_constness--Debug] [GOOD] >> test.py::test[file-file_constness--Plan] [GOOD] >> test.py::test[file-file_constness--Results] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] [GOOD] >> test.py::test[window-win_func_rank_by_all--Debug] >> test.py::test[select-refselect--Debug] [GOOD] >> test.py::test[select-refselect--ForceBlocks] >> test.py::test[binding-bind_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-bind_select-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Results] [GOOD] >> test.py::test[select-where_with_lambda--Debug] [GOOD] >> test.py::test[select-where_with_lambda--ForceBlocks] >> test.py::test[binding-bind_select-default.txt-Results] >> test.py::test[pg-tpcds-q81-default.txt-Analyze] >> test.py::test[blocks-date_group_by--Results] [GOOD] >> test.py::test[blocks-date_sub_scalar--Analyze] >> test.py::test[select-corr_name_in_select-default.txt-Debug] [GOOD] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[select-corr_name_in_select-default.txt-Plan] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Results] >> test.py::test[distinct-distinct_star1--Results] [GOOD] >> test.py::test[dq-read_cost-default.txt-Analyze] >> test.py::test[blocks-string_with--Debug] [GOOD] >> test.py::test[blocks-string_with--ForceBlocks] >> test.py::test[join-premap_merge_extrasort1--Analyze] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Debug] >> test.py::test[limit-limit-dynamic-Analyze] [GOOD] >> test.py::test[limit-limit-dynamic-Debug] >> test.py::test[seq_mode-shared_named_expr-default.txt-Debug] [GOOD] >> test.py::test[seq_mode-shared_named_expr-default.txt-Plan] [GOOD] >> test.py::test[seq_mode-shared_named_expr-default.txt-Results] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Debug] >> test.py::test[sampling-topsort-default.txt-Analyze] >> test.py::test[pg-tpcds-q19-default.txt-Results] [GOOD] >> test.py::test[join-premap_merge_with_remap--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Analyze] >> test.py::test[join-premap_merge_inner--Analyze] [GOOD] >> test.py::test[join-premap_merge_inner--Debug] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Debug] >> test.py::test[join-flatten_columns2-off-Analyze] [GOOD] >> test.py::test[join-flatten_columns2-off-Debug] >> test.py::test[pg-tpcds-q25-default.txt-Debug] >> test.py::test[expr-as_dict_tuple_key-default.txt-Debug] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Debug] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Debug] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Plan] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] >> test.py::test[expr-as_dict_tuple_key-default.txt-ForceBlocks] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] >> TFileStoreWithReboots::CreateDrop >> test.py::test[expr-len--Debug] [GOOD] >> test.py::test[expr-len--Plan] >> test.py::test[agg_phases-percentile-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-Analyze] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Analyze] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Debug] >> test.py::test[expr-len--Plan] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Debug] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--ForceBlocks] >> test.py::test[expr-len--Results] >> test.py::test[aggregate-compare_by_tuple--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by_tuple--Plan] [GOOD] >> test.py::test[aggregate-compare_by_tuple--Results] >> test.py::test[expr-empty_list_ops2-default.txt-Debug] [GOOD] >> test.py::test[expr-empty_list_ops2-default.txt-ForceBlocks] >> test.py::test[window-win_func_over_group_by_list_names--Debug] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--ForceBlocks] >> test.py::test[expr-longint_builtins-default.txt-Plan] >> test.py::test[window-row_number_to_map_multiple-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Plan] >> test.py::test[binding-bind_select-default.txt-Results] [GOOD] >> test.py::test[binding-compact_named_subq_actions--Analyze] [SKIPPED] >> test.py::test[binding-compact_named_subq_actions--Debug] >> test.py::test[window-row_number_to_map_multiple-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Debug] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] >> test.py::test[pg-tpcds-q30-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Results] >> test.py::test[seq_mode-shared_named_expr-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Debug] >> test.py::test[binding-compact_named_subq_actions--Debug] [SKIPPED] >> test.py::test[binding-compact_named_subq_actions--ForceBlocks] [SKIPPED] >> test.py::test[binding-compact_named_subq_actions--Plan] [SKIPPED] >> test.py::test[binding-compact_named_subq_actions--Results] [SKIPPED] >> test.py::test[binding-drop_binding--Analyze] >> test.py::test[file-file_constness--Results] [GOOD] >> test.py::test[join-pullup_left--Results] [GOOD] >> test.py::test[join-pullup_left-off-Debug] [SKIPPED] >> test.py::test[join-pullup_left-off-Plan] [SKIPPED] >> test.py::test[join-pullup_left-off-Results] [SKIPPED] >> test.py::test[join-pullup_renaming--Debug] >> test.py::test[file-file_skip_take--Debug] |86.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_ut.cpp >> test.py::test[limit-empty_read_after_limit-default.txt-Debug] >> test.py::test[in-in_sorted_by_tuple--ForceBlocks] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Plan] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Results] >> test.py::test[select-refselect--ForceBlocks] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Results] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Debug] >> test.py::test[agg_phases-max_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-max_opt-default.txt-ForceBlocks] >> test.py::test[window-full/leadlag--Debug] [GOOD] >> test.py::test[window-full/leadlag--ForceBlocks] >> test.py::test[select-refselect--Plan] [GOOD] >> test.py::test[select-refselect--Results] >> test.py::test[select-where_with_lambda--ForceBlocks] [GOOD] >> test.py::test[select-where_with_lambda--Plan] [GOOD] >> test.py::test[select-where_with_lambda--Results] |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp >> test.py::test[dq-read_cost-default.txt-Analyze] [GOOD] >> test.py::test[dq-read_cost-default.txt-Debug] >> test.py::test[sampling-topsort-default.txt-Analyze] [GOOD] >> test.py::test[sampling-topsort-default.txt-Debug] >> test.py::test[pg-tpcds-q81-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Debug] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Analyze] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Debug] >> test.py::test[weak_field-weak_field_join_condition--Debug] >> test.py::test[limit-limit-dynamic-Debug] [GOOD] >> test.py::test[limit-limit-dynamic-ForceBlocks] >> test.py::test[blocks-string_with--ForceBlocks] [GOOD] >> test.py::test[blocks-string_with--Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Debug] [GOOD] >> test.py::test[join-premap_merge_extrasort1--ForceBlocks] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Debug] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-ForceBlocks] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[expr-len--Results] [GOOD] >> test.py::test[expr-list_replicate-default.txt-Debug] >> test.py::test[blocks-string_with--Results] >> test.py::test[window-win_func_rank_by_all--Debug] [GOOD] >> test.py::test[window-win_func_rank_by_all--Plan] [GOOD] >> test.py::test[join-premap_merge_inner--Debug] [GOOD] >> test.py::test[join-premap_merge_inner--ForceBlocks] >> test.py::test[window-win_func_rank_by_all--Results] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-Debug] >> test.py::test[blocks-date_sub_scalar--Analyze] [GOOD] >> test.py::test[blocks-date_sub_scalar--Debug] >> test.py::test[pg-tpcds-q25-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] >> test.py::test[pg-tpcds-q31-default.txt-Debug] >> test.py::test[expr-empty_list_ops2-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-empty_list_ops2-default.txt-Plan] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Debug] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--ForceBlocks] >> test.py::test[select-refselect--Results] [GOOD] >> test.py::test[binding-drop_binding--Analyze] [GOOD] >> test.py::test[binding-drop_binding--Debug] >> test.py::test[expr-empty_list_ops2-default.txt-Results] >> test.py::test[join-flatten_columns2-off-Debug] [GOOD] >> test.py::test[join-flatten_columns2-off-ForceBlocks] [SKIPPED] >> test.py::test[join-flatten_columns2-off-Plan] [GOOD] >> test.py::test[join-flatten_columns2-off-Results] >> test.py::test[select-select_all_from_concat_anon-default.txt-Analyze] >> test.py::test[expr-as_dict_tuple_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-as_dict_tuple_key-default.txt-Plan] >> test.py::test[expr-longint_builtins-default.txt-Plan] [GOOD] >> test.py::test[expr-longint_builtins-default.txt-Results] >> test.py::test[blocks-distinct_pure_keys--Debug] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Plan] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Results] >> test.py::test[select-where_with_lambda--Results] [GOOD] >> test.py::test[seq_mode-simple1-default.txt-Analyze] >> test.py::test[expr-as_dict_tuple_key-default.txt-Plan] [GOOD] >> test.py::test[expr-as_dict_tuple_key-default.txt-Results] >> test.py::test[join-flatten_columns2-off-Results] [GOOD] >> test.py::test[join-join_and_distinct_key-off-Analyze] >> test.py::test[agg_phases-sum_opt-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Debug] >> test.py::test[file-file_skip_take--Debug] [GOOD] >> test.py::test[file-file_skip_take--Plan] [GOOD] >> test.py::test[file-file_skip_take--Results] >> test.py::test[dq-read_cost-default.txt-Debug] [GOOD] >> test.py::test[dq-read_cost-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[dq-read_cost-default.txt-Plan] [GOOD] >> test.py::test[dq-read_cost-default.txt-Results] >> test.py::test[insert-append_sorted-to_sorted_desc-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Plan] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] [GOOD] >> test.py::test[dq-read_cost-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-max-default.txt-Debug] >> test.py::test[epochs-reset_sortness_on_append--Analyze] >> test.py::test[limit-empty_read_after_limit-default.txt-Debug] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-ForceBlocks] >> test.py::test[expr-list_replicate-default.txt-Debug] [GOOD] >> test.py::test[sampling-topsort-default.txt-Debug] [GOOD] >> test.py::test[expr-list_replicate-default.txt-Plan] [GOOD] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-ForceBlocks] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] [GOOD] >> test.py::test[window-udaf_window--Analyze] [SKIPPED] >> test.py::test[window-udaf_window--Debug] >> test.py::test[expr-list_replicate-default.txt-Results] >> test.py::test[sampling-topsort-default.txt-ForceBlocks] |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |86.1%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] [GOOD] >> test.py::test[select-use_cluster-default.txt-Analyze] >> test.py::test[join-premap_merge_inner--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_inner--Plan] [GOOD] >> test.py::test[limit-limit-dynamic-ForceBlocks] [GOOD] >> test.py::test[window-udaf_window--Debug] [SKIPPED] >> test.py::test[window-udaf_window--ForceBlocks] [SKIPPED] >> test.py::test[window-udaf_window--Plan] [SKIPPED] >> test.py::test[window-udaf_window--Results] [SKIPPED] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-Analyze] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] >> TFileStoreWithReboots::Create >> test.py::test[pg-tpcds-q81-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Debug] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Debug] [GOOD] >> test.py::test[expr-longint_builtins-default.txt-Results] [GOOD] >> test.py::test[expr-opt_try_member-default.txt-Analyze] >> test.py::test[join-premap_merge_extrasort1--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Plan] >> test.py::test[join-premap_merge_inner--Results] >> test.py::test[expr-as_dict_tuple_key-default.txt-Results] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Analyze] >> test.py::test[limit-limit-dynamic-Plan] [GOOD] >> test.py::test[limit-limit-dynamic-Results] >> test.py::test[join-mergejoin_with_different_key_names--Debug] [GOOD] >> test.py::test[expr-empty_list_ops2-default.txt-Results] [GOOD] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-Analyze] >> test.py::test[select-dict_with_few_keys-default.txt-Debug] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Plan] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Results] >> test.py::test[pg-tpcds-q81-default.txt-ForceBlocks] >> test.py::test[file-file_skip_take--Results] [GOOD] >> test.py::test[flatten_by-flatten_and_where--Debug] >> test.py::test[join-premap_merge_extrasort1--Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Results] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--ForceBlocks] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Plan] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names--Plan] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names--Results] >> test.py::test[select-select_all_from_concat_anon-default.txt-Analyze] [GOOD] >> test.py::test[blocks-string_with--Results] [GOOD] >> test.py::test[blocks-struct_type--Analyze] >> test.py::test[blocks-interval_add_date_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Plan] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Results] >> test.py::test[window-win_func_rank_by_all--Results] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Debug] >> test.py::test[binding-drop_binding--Debug] [GOOD] >> test.py::test[binding-drop_binding--ForceBlocks] >> test.py::test[aggregate-compare_by_tuple--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--Analyze] >> test.py::test[epochs-reset_sortness_on_append--Analyze] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--Debug] >> test.py::test[seq_mode-simple1-default.txt-Analyze] [GOOD] >> test.py::test[window-full/leadlag--ForceBlocks] [GOOD] >> test.py::test[window-full/leadlag--Plan] >> test.py::test[window-win_func_over_group_by_list_names--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Plan] [GOOD] >> test.py::test[window-full/leadlag--Plan] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-Plan] >> test.py::test[expr-list_replicate-default.txt-Results] [GOOD] >> test.py::test[expr-pg_try_member--Debug] [SKIPPED] >> test.py::test[expr-pg_try_member--Plan] [SKIPPED] >> test.py::test[join-pullup_renaming--Debug] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Results] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Results] >> test.py::test[expr-pg_try_member--Results] >> test.py::test[join-pullup_renaming--Plan] >> test.py::test[join-join_and_distinct_key-off-Analyze] [GOOD] >> test.py::test[window-full/leadlag--Results] >> test.py::test[pg-tpcds-q31-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-Results] >> test.py::test[expr-pg_try_member--Results] [SKIPPED] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Debug] >> test.py::test[join-pullup_renaming--Plan] [GOOD] >> test.py::test[join-pullup_renaming--Results] >> test.py::test[limit-limit-dynamic-Results] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Results] [GOOD] >> test.py::test[in-in_with_cast-default.txt-Analyze] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Debug] [GOOD] >> test.py::test[select-use_cluster-default.txt-Analyze] [GOOD] >> test.py::test[select-use_cluster-default.txt-Debug] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] |86.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |86.1%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> test.py::test[limit-empty_read_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Plan] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Results] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-Analyze] >> test.py::test[expr-opt_try_member-default.txt-Analyze] [GOOD] >> test.py::test[expr-opt_try_member-default.txt-Debug] >> test.py::test[expr-common_type_for_resource_and_data--Analyze] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Debug] >> test.py::test[weak_field-weak_field_join_condition--Debug] [GOOD] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-Debug] >> test.py::test[weak_field-weak_field_join_condition--Plan] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--Results] >> test.py::test[blocks-struct_type--Analyze] [GOOD] >> test.py::test[blocks-struct_type--Debug] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-Debug] >> test.py::test[pg-tpcds-q43-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Results] >> test.py::test[select-dict_with_few_keys-default.txt-Results] [GOOD] >> test.py::test[select-digits--Debug] >> test.py::test[sampling-topsort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-topsort-default.txt-Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Analyze] >> test.py::test[insert-drop_sortness--Analyze] >> test.py::test[join-premap_merge_inner--Results] [GOOD] >> test.py::test[join-premap_no_premap--Analyze] >> test.py::test[binding-drop_binding--ForceBlocks] [GOOD] >> test.py::test[binding-drop_binding--Plan] >> test.py::test[agg_phases-max_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-max_opt-default.txt-Plan] >> test.py::test[pg-tpcds-q81-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Plan] >> test.py::test[sampling-topsort-default.txt-Results] >> test.py::test[binding-drop_binding--Plan] [GOOD] >> test.py::test[binding-drop_binding--Results] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Plan] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] >> test.py::test[blocks-distinct_pure_keys--Results] [GOOD] >> test.py::test[blocks-group_by_complex_key--Debug] >> test.py::test[agg_phases-max_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-max_opt-default.txt-Results] >> test.py::test[pg-tpcds-q81-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Results] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Plan] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Plan] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Results] >> test.py::test[window-leading/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Plan] >> test.py::test[select-use_cluster-default.txt-Debug] [GOOD] >> test.py::test[select-use_cluster-default.txt-ForceBlocks] >> test.py::test[window-leading/aggregations_leadlag--Plan] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Results] >> test.py::test[aggregate-group_by_gs_grouping--Analyze] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--Debug] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Analyze] >> test.py::test[blocks-date_sub_scalar--Debug] [GOOD] >> test.py::test[blocks-date_sub_scalar--ForceBlocks] >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-Debug] >> test.py::test[pg-tpcds-q31-default.txt-Results] [GOOD] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[select-select_all_from_concat_anon-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Debug] >> test.py::test[in-in_with_cast-default.txt-Analyze] [GOOD] >> test.py::test[in-in_with_cast-default.txt-Debug] >> test.py::test[blocks-interval_add_date_scalar--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval--Analyze] >> test.py::test[expr-opt_try_member-default.txt-Debug] [GOOD] >> test.py::test[expr-opt_try_member-default.txt-ForceBlocks] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-Debug] [GOOD] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-ForceBlocks] >> test.py::test[expr-common_type_for_resource_and_data--Debug] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--ForceBlocks] >> test.py::test[binding-drop_binding--Results] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-Analyze] >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Debug] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[seq_mode-simple1-default.txt-Analyze] [GOOD] >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Analyze] >> test.py::test[select-digits--Debug] [GOOD] >> test.py::test[select-digits--Plan] >> test.py::test[pg-tpcds-q81-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Analyze] >> test.py::test[blocks-struct_type--Debug] [GOOD] >> test.py::test[insert-drop_sortness--Analyze] [GOOD] >> test.py::test[blocks-struct_type--ForceBlocks] >> test.py::test[insert-drop_sortness--Debug] >> test.py::test[join-premap_no_premap--Analyze] [GOOD] >> test.py::test[join-premap_no_premap--Debug] >> test.py::test[flatten_by-flatten_and_where--Debug] [GOOD] >> test.py::test[flatten_by-flatten_and_where--Plan] [GOOD] >> test.py::test[flatten_by-flatten_and_where--Results] >> test.py::test[epochs-reset_sortness_on_append--Debug] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--ForceBlocks] >> test.py::test[join-pullup_renaming--Results] [GOOD] >> test.py::test[select-digits--Plan] [GOOD] >> test.py::test[select-digits--Results] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Results] [GOOD] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Debug] >> test.py::test[join-premap_merge_extrasort1-off-Analyze] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Debug] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[limit-limit-dynamic-Results] [GOOD] >> test.py::test[select-use_cluster-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-use_cluster-default.txt-Plan] [GOOD] >> test.py::test[select-use_cluster-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Debug] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[join-join_and_distinct_key-off-Analyze] [GOOD] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-ForceBlocks] >> test.py::test[window-full/leadlag--Results] [GOOD] >> test.py::test[window-full/session_compact--Analyze] >> test.py::test[aggr_factory-max-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-max-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-max-default.txt-Results] >> test.py::test[weak_field-weak_field_join_condition--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Debug] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-ForceBlocks] >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names--Results] [GOOD] >> test.py::test[join-opt_on_opt_side--Debug] >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithInflyLimit5 >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-ForceBlocks] >> test.py::test[expr-opt_try_member-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-opt_try_member-default.txt-Plan] [GOOD] >> test.py::test[expr-opt_try_member-default.txt-Results] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-Analyze] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-Debug] >> test.py::test[select-digits--Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Debug] >> test.py::test[blocks-interval_sub_interval--Analyze] [GOOD] >> test.py::test[blocks-interval_sub_interval--Debug] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Analyze] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Debug] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Debug] >> test.py::test[in-in_with_cast-default.txt-Debug] [GOOD] >> test.py::test[in-in_with_cast-default.txt-ForceBlocks] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-Plan] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--ForceBlocks] [GOOD] >> test.py::test[blocks-group_by_complex_key--Debug] [GOOD] >> test.py::test[blocks-group_by_complex_key--Plan] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Plan] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Results] >> test.py::test[schema-select_all_inferschema_limit--Analyze] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Debug] >> test.py::test[pg-tpcds-q62-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Results] >> test.py::test[aggregate-group_by_gs_grouping--Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--ForceBlocks] >> test.py::test[blocks-group_by_complex_key--Results] >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Analyze] >> test.py::test[insert-drop_sortness--Debug] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Results] [GOOD] >> TImportTests::ShouldCheckQuotas >> test.py::test[window-win_func_part_by_expr_new-default.txt-Analyze] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Debug] >> test.py::test[table_range-range_slash--Analyze] >> test.py::test[insert-drop_sortness--ForceBlocks] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Results] >> test.py::test[flatten_by-flatten_and_where--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Debug] >> test.py::test[blocks-struct_type--ForceBlocks] [GOOD] >> test.py::test[blocks-struct_type--Plan] [GOOD] >> test.py::test[blocks-struct_type--Results] >> test.py::test[expr-opt_try_member-default.txt-Results] [GOOD] >> test.py::test[expr-struct_slice-default.txt-Analyze] >> test.py::test[window-leading/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Analyze] >> test.py::test[pg-tpcds-q34-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q35-default.txt-Debug] >> test.py::test[join-premap_merge_extrasort1-off-Debug] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-ForceBlocks] >> test.py::test[window-full/session_compact--Analyze] [GOOD] >> test.py::test[window-full/session_compact--Debug] >> test.py::test[binding-named_node_corr_names-default.txt-Debug] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-ForceBlocks] >> test.py::test[join-premap_merge_extrasort1-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off-Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Results] [GOOD] >> test.py::test[join-premap_merge_inner-off-Analyze] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> test.py::test[epochs-reset_sortness_on_append--ForceBlocks] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--Plan] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--Results] >> test.py::test[expr-common_type_for_resource_and_data--Results] [GOOD] >> test.py::test[expr-dict_common_type--Analyze] [SKIPPED] >> test.py::test[expr-dict_common_type--Debug] [SKIPPED] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Debug] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Plan] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] >> TImportTests::ShouldCheckQuotas [GOOD] >> TImportTests::NoACLOption >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-Plan] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-Results] >> test.py::test[window-win_fuse_window-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-max_opt-default.txt-Results] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Plan] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] >> test.py::test[agg_phases-sum_null-default.txt-Analyze] >> test.py::test[expr-dict_common_type--ForceBlocks] [SKIPPED] >> test.py::test[expr-dict_common_type--Plan] [SKIPPED] >> test.py::test[expr-dict_common_type--Results] [SKIPPED] >> test.py::test[expr-literal_list_element-default.txt-Analyze] >> test.py::test[join-premap_no_premap--Debug] [GOOD] >> test.py::test[join-premap_no_premap--ForceBlocks] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Results] [GOOD] >> test.py::test[blocks-interval_sub_interval--Debug] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Debug] >> test.py::test[blocks-interval_sub_interval--ForceBlocks] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Debug] >> test.py::test[select-multi_source_issue-default.txt-Debug] [GOOD] >> test.py::test[in-in_with_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_cast-default.txt-Plan] [GOOD] >> test.py::test[in-in_with_cast-default.txt-Results] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-Results] >> test.py::test[table_range-range_slash--Analyze] [GOOD] >> test.py::test[table_range-range_slash--Debug] >> test.py::test[select-multi_source_issue-default.txt-Plan] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] >> TImportTests::NoACLOption [GOOD] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Results] >> test.py::test[schema-select_all_inferschema_limit--Debug] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--ForceBlocks] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[join-pullup_renaming--Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q1-default.txt-Debug] >> test.py::test[expr-struct_slice-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Debug] >> test.py::test[expr-struct_slice-default.txt-Debug] >> test.py::test[aggr_factory-max-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi_list-default.txt-Debug] >> test.py::test[pg-select_proj_ref_group_by_qcol-default.txt-Results] [GOOD] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-Analyze] >> test.py::test[pg-tpcds-q89-default.txt-Debug] [GOOD] >> test.py::test[join-opt_on_opt_side--Debug] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Debug] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_grouping--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Debug] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--Plan] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Results] >> test.py::test[aggregate-group_by_gs_grouping--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::NoACLOption [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:49:00.529823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:49:00.529843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:49:00.529847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:49:00.529850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:49:00.529854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:49:00.529856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:49:00.529862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:49:00.529995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:00.544004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:49:00.544026Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:00.547673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:00.548539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:49:00.548581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:49:00.550086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:49:00.550253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:49:00.550350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:00.550421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:49:00.551322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:00.551617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:00.551628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:00.551673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:49:00.551680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:49:00.551686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:49:00.551701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:49:00.553001Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:49:00.574106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:49:00.574208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:00.574281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:49:00.574329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:49:00.574337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:00.575228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:00.575255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:49:00.575319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:00.575330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:49:00.575334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:49:00.575340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:49:00.575712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:00.575722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:49:00.575728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:49:00.576013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:00.576021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:00.576026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:49:00.576033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:49:00.576583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:49:00.576902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:49:00.576964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:49:00.577154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:00.577178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:49:00.577185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:49:00.577238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:49:00.577245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:49:00.577271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:49:00.577282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:49:00.577658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:00.577667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:49:00.577708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:00.577713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:49:00.577795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:00.577802Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:49:00.577814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:49:00.577818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:49:00.577827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:49:00.577832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:49:00.577837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:49:00.577841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:49:00.577851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:49:00.577856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:49:00.577861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:49:00.578140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:49:00.578153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:49:00.578158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:49:00.578164Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:49:00.578168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:49:00.578181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... om tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2024-11-21T08:49:01.307171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 281474976710758 at step: 5000003 2024-11-21T08:49:01.307256Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:01.307274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:49:01.307282Z node 2 :FLAT_TX_SCHEMESHARD INFO: TRestore TPropose, opId: 281474976710758:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T08:49:01.307300Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 128 -> 129 2024-11-21T08:49:01.307330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:23493 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C20125B2-E39B-4CCD-BA65-3FCE0663A3BE amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:49:01.320759Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:01.320782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:49:01.320867Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:01.320874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 2 2024-11-21T08:49:01.320988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:49:01.321000Z node 2 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 281474976710758:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:49:01.321162Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T08:49:01.321173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T08:49:01.321178Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2024-11-21T08:49:01.321183Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:49:01.321190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:49:01.321207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2024-11-21T08:49:01.328645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Erasing txId 281474976710758 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:23493 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 592F98BB-9499-4F2B-A208-3BE864411D66 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2024-11-21T08:49:01.383733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2024-11-21T08:49:01.383767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710758, tablet: 72075186233409546, partId: 0 2024-11-21T08:49:01.383800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2024-11-21T08:49:01.383816Z node 2 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 281474976710758:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 321 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2024-11-21T08:49:01.383832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710758:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:01.383837Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:49:01.383842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710758:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:49:01.383851Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 129 -> 240 2024-11-21T08:49:01.383911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TRestore, opId# 281474976710758:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:49:01.384458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:49:01.384551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:49:01.384562Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710758:0 ProgressState 2024-11-21T08:49:01.384576Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2024-11-21T08:49:01.384580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2024-11-21T08:49:01.384586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2024-11-21T08:49:01.384600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:123:2149] message: TxId: 281474976710758 2024-11-21T08:49:01.384610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2024-11-21T08:49:01.384616Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2024-11-21T08:49:01.384620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2024-11-21T08:49:01.384644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:49:01.385039Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2024-11-21T08:49:01.385057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2024-11-21T08:49:01.385456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:49:01.385468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:284:2274] TestWaitNotification: OK eventTxId 101 2024-11-21T08:49:01.385575Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:49:01.385629Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 59us result status StatusSuccess 2024-11-21T08:49:01.385756Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[window-presort_window_order_by_table-default.txt-Analyze] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Debug] >> test.py::test[blocks-group_by_complex_key--Results] [GOOD] >> test.py::test[blocks-if--Debug] >> test.py::test[pg-tpcds-q89-default.txt-ForceBlocks] >> test.py::test[join-opt_on_opt_side--Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Plan] [GOOD] >> test.py::test[join-opt_on_opt_side--Results] >> test.py::test[blocks-struct_type--Results] [GOOD] >> test.py::test[column_group-insert_diff_groups2_fail--Analyze] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--Debug] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--Plan] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] >> test.py::test[column_group-insert_diff_groups2_fail--Plan] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--Results] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Analyze] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Debug] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Plan] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert_with_reorder_cols--Analyze] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> test.py::test[join-premap_merge_inner-off-Analyze] [GOOD] >> test.py::test[insert-drop_sortness--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_inner-off-Debug] >> test.py::test[insert-drop_sortness--Plan] [GOOD] >> test.py::test[insert-drop_sortness--Results] >> test.py::test[in-in_with_cast-default.txt-Results] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Analyze] >> test.py::test[expr-literal_list_element-default.txt-Analyze] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-Plan] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-Results] >> test.py::test[expr-literal_list_element-default.txt-Debug] >> test.py::test[agg_phases-sum_null-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-sum_null-default.txt-Debug] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] [GOOD] >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithInflyLimit5 [GOOD] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[expr-flatmap_by_map_lazy_list-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Debug] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--Results] [GOOD] >> test.py::test[expr-cast_string_implicit-default.txt-Analyze] >> test.py::test[weak_field-weak_field_join_where--Debug] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Results] >> test.py::test[select-optional_as_warn-default.txt-Debug] >> test.py::test[pg-tpcds-q35-default.txt-Debug] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Plan] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Results] >> test.py::test[window-win_func_cume_dist_ansi-default.txt-Results] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Analyze] >> test.py::test[blocks-interval_sub_interval--ForceBlocks] [GOOD] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Results] >> test.py::test[blocks-interval_sub_interval--Plan] [GOOD] >> test.py::test[blocks-interval_sub_interval--Results] >> test.py::test[table_range-range_slash--Debug] [GOOD] >> test.py::test[table_range-range_slash--ForceBlocks] >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc >> test.py::test[schema-select_all_inferschema_limit--ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Make20MergeOperationsWithInflyLimit5 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:48:59.795983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:48:59.796012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:48:59.796017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:48:59.796023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:48:59.796037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:48:59.796041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:48:59.796051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:48:59.796142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:48:59.807678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:48:59.807705Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:48:59.812004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:48:59.812824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:48:59.812855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:48:59.814400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:48:59.814608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:48:59.814722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:59.814823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:48:59.815779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:59.816090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:48:59.816104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:59.816152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:48:59.816161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:48:59.816168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:48:59.816184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:48:59.817943Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:48:59.836167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:48:59.836303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:59.836385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:48:59.836457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:48:59.836466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:59.840705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:59.840773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:48:59.840858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:59.840873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:48:59.840879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:48:59.840885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:48:59.848761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:59.848794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:48:59.848805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:48:59.849567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:59.849582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:59.849602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:59.849610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:48:59.850341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:48:59.850838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:48:59.850901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:48:59.851134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:59.851171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:48:59.851183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:59.851241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:48:59.851249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:59.851291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:48:59.851305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:48:59.851787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:48:59.851795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:48:59.851846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:59.851853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:48:59.851955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:59.851962Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:48:59.851976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:48:59.851981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:48:59.851988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:48:59.851994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:48:59.852000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:48:59.852005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:48:59.852016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:48:59.852022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:48:59.852027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:48:59.852412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:48:59.852429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:48:59.852434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:48:59.852440Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:48:59.852445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:48:59.852459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 075186233409583 2024-11-21T08:49:02.407430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#172:0 HandleReply TEvSplitAck, at schemeshard: 72057594046678944, message: OperationCookie: 172 TabletId: 72075186233409583 2024-11-21T08:49:02.407572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 172:0 131 -> 132 2024-11-21T08:49:02.407601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 24 2024-11-21T08:49:02.408249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 172:0, at schemeshard: 72057594046678944 2024-11-21T08:49:02.408295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 172:0, at schemeshard: 72057594046678944 2024-11-21T08:49:02.408333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:02.408340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 172, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:49:02.408417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:02.408423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2312], at schemeshard: 72057594046678944, txId: 172, path id: 2 2024-11-21T08:49:02.408440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 172:0, at schemeshard: 72057594046678944 2024-11-21T08:49:02.408447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 172:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:49:02.408455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186233409582 on partitioning changed splitOp# 172 at tablet 72057594046678944 2024-11-21T08:49:02.408464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186233409583 on partitioning changed splitOp# 172 at tablet 72057594046678944 2024-11-21T08:49:02.408671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 23 PathOwnerId: 72057594046678944, cookie: 172 2024-11-21T08:49:02.408685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 23 PathOwnerId: 72057594046678944, cookie: 172 2024-11-21T08:49:02.408690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 172 2024-11-21T08:49:02.408696Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 172, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 23 2024-11-21T08:49:02.408702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 25 2024-11-21T08:49:02.408720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 172, ready parts: 0/1, is published: true 2024-11-21T08:49:02.409471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 172:0 from tablet: 72057594046678944 to tablet: 72075186233409582 cookie: 72057594046678944:37 msg type: 269553158 2024-11-21T08:49:02.409499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 172:0 from tablet: 72057594046678944 to tablet: 72075186233409583 cookie: 72057594046678944:38 msg type: 269553158 2024-11-21T08:49:02.409909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 172 2024-11-21T08:49:02.410345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 172:0, at schemeshard: 72057594046678944, message: OperationCookie: 172 TabletId: 72075186233409582 2024-11-21T08:49:02.410356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 172:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409582, at schemeshard: 72057594046678944 2024-11-21T08:49:02.410412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 172:0, at schemeshard: 72057594046678944, message: OperationCookie: 172 TabletId: 72075186233409583 2024-11-21T08:49:02.410416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 172:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409583, at schemeshard: 72057594046678944 2024-11-21T08:49:02.410428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#172:0 progress is 1/1 2024-11-21T08:49:02.410431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 172 ready parts: 1/1 2024-11-21T08:49:02.410436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 172, ready parts: 1/1, is published: true 2024-11-21T08:49:02.410444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1689:3242] message: TxId: 172 2024-11-21T08:49:02.410448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 172 ready parts: 1/1 2024-11-21T08:49:02.410467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 172:0 2024-11-21T08:49:02.410471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 172:0 2024-11-21T08:49:02.410510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 24 2024-11-21T08:49:02.411049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 172:0, at schemeshard: 72057594046678944 2024-11-21T08:49:02.411466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 172:0, at schemeshard: 72057594046678944 2024-11-21T08:49:02.411490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 172:0 2024-11-21T08:49:02.411530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 172: got EvNotifyTxCompletionResult 2024-11-21T08:49:02.411535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 172: satisfy waiter [1:4990:6139] 2024-11-21T08:49:02.411699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 937 RawX2: 4294969998 } TabletId: 72075186233409582 State: 4 2024-11-21T08:49:02.411711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409582, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:49:02.411796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 946 RawX2: 4294970005 } TabletId: 72075186233409583 State: 4 2024-11-21T08:49:02.411804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409583, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:49:02.412567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:37 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:49:02.412867Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 37 TxId_Deprecated: 37 TabletID: 72075186233409582 Forgetting tablet 72075186233409582 2024-11-21T08:49:02.412972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 37 ShardOwnerId: 72057594046678944 ShardLocalIdx: 37, at schemeshard: 72057594046678944 2024-11-21T08:49:02.413057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 23 2024-11-21T08:49:02.413153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:38 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:49:02.413518Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 38 TxId_Deprecated: 38 TabletID: 72075186233409583 Forgetting tablet 72075186233409583 2024-11-21T08:49:02.472669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 38 ShardOwnerId: 72057594046678944 ShardLocalIdx: 38, at schemeshard: 72057594046678944 2024-11-21T08:49:02.472800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 22 2024-11-21T08:49:02.474607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2024-11-21T08:49:02.474622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2024-11-21T08:49:02.474841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2024-11-21T08:49:02.474850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 TestWaitNotification: OK eventTxId 172 2024-11-21T08:49:02.475144Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:49:02.475194Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 62us result status StatusSuccess 2024-11-21T08:49:02.475311Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 123 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 21 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 20 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 20 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[schema-select_all_inferschema_limit--Plan] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-Debug] >> test.py::test[flatten_by-flatten_by_typed_table--Results] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Plan] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Results] >> test.py::test[insert-drop_sortness--Results] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Analyze] >> test.py::test[binding-named_node_corr_names-default.txt-Results] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Analyze] >> DataShardSnapshots::MvccSnapshotTailCleanup >> test.py::test[aggr_factory-multi_list-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-multi_list-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-multi_list-default.txt-Results] >> test.py::test[expr-struct_slice-default.txt-Debug] [GOOD] >> test.py::test[expr-struct_slice-default.txt-ForceBlocks] >> test.py::test[column_order-insert_with_reorder_cols--Analyze] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Debug] >> DataShardSnapshots::LockedWriteReuseAfterCommit >> test.py::test[tpch-q1-default.txt-Debug] [GOOD] >> test.py::test[tpch-q1-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q89-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Plan] >> test.py::test[window-full/session_compact--Debug] [GOOD] >> test.py::test[window-full/session_compact--ForceBlocks] >> test.py::test[join-premap_no_premap--ForceBlocks] [GOOD] >> test.py::test[join-premap_no_premap--Plan] [GOOD] >> test.py::test[join-premap_no_premap--Results] >> test.py::test[pg-tpcds-q81-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Debug] >> test.py::test[expr-literal_list_element-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-Plan] >> test.py::test[pg-tpcds-q89-default.txt-Plan] [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns >> test.py::test[insert-double_append_to_anonymous--Analyze] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Debug] >> test.py::test[expr-literal_list_element-default.txt-ForceBlocks] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-Results] >> test.py::test[window-presort_window_order_by_table-default.txt-Debug] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-ForceBlocks] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Results] >> test.py::test[join-premap_merge_inner-off-Debug] [GOOD] >> test.py::test[join-premap_merge_inner-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_merge_inner-off-Plan] >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> test.py::test[join-opt_on_opt_side--Results] [GOOD] >> test.py::test[join-premap_common_inner--Debug] >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc [GOOD] >> test.py::test[join-premap_merge_inner-off-Plan] [GOOD] >> test.py::test[expr-cast_string_implicit-default.txt-Analyze] [GOOD] >> test.py::test[join-premap_merge_inner-off-Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Analyze] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Debug] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-ForceBlocks] >> test.py::test[expr-cast_string_implicit-default.txt-Debug] >> test.py::test[aggregate-group_by_gs_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Analyze] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Plan] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-Debug] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Results] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Plan] [GOOD] >> test.py::test[blocks-interval_sub_interval--Results] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] >> test.py::test[coalesce-coalesce_sugar-default.txt-Analyze] >> test.py::test[table_range-range_slash--ForceBlocks] [GOOD] >> test.py::test[table_range-range_slash--Plan] [GOOD] >> test.py::test[table_range-range_slash--Results] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-Debug] [GOOD] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-ForceBlocks] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Results] [GOOD] >> test.py::test[schema-select_field-schema-Analyze] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Debug] >> test.py::test[insert_monotonic-several1-default.txt-Analyze] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Debug] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Analyze] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Debug] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> test.py::test[select-optional_as_warn-default.txt-Debug] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-Plan] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-Results] >> test.py::test[pg-tpcds-q89-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_trigger-default.txt-Analyze] >> test.py::test[expr-struct_slice-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-struct_slice-default.txt-Plan] [GOOD] >> test.py::test[expr-struct_slice-default.txt-Results] >> test.py::test[blocks-if--Debug] [GOOD] >> test.py::test[blocks-if--Plan] [GOOD] >> test.py::test[expr-literal_list_element-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-literal_list_element-default.txt-Plan] [GOOD] >> test.py::test[expr-literal_list_element-default.txt-Results] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Results] [GOOD] >> test.py::test[blocks-if--Results] >> test.py::test[weak_field-weak_field_join_where--Results] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Debug] >> test.py::test[aggr_factory-every-default.txt-Debug] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Debug] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--ForceBlocks] >> test.py::test[join-premap_nonseq_flatmap--Analyze] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Debug] >> test.py::test[tpch-q1-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q1-default.txt-Plan] [GOOD] >> test.py::test[tpch-q1-default.txt-Results] >> test.py::test[aggr_factory-multi_list-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Debug] >> DataShardSnapshots::LockedWriteReuseAfterCommit [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess >> test.py::test[insert-double_append_to_anonymous--Debug] [GOOD] >> test.py::test[insert-double_append_to_anonymous--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Analyze] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Debug] >> test.py::test[expr-cast_string_implicit-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_string_implicit-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Analyze] >> test.py::test[window-presort_window_order_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Plan] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] >> test.py::test[schema-select_field-schema-Analyze] [GOOD] >> test.py::test[schema-select_field-schema-Debug] >> test.py::test[coalesce-coalesce_sugar-default.txt-Analyze] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Debug] >> test.py::test[table_range-range_slash--Results] [GOOD] >> test.py::test[table_range-range_tables_with_view--Analyze] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-Plan] [GOOD] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-Results] >> TKqpScanData::UnboxedValueSize >> TKqpScanData::UnboxedValueSize [GOOD] >> TKqpScanData::EmptyColumns >> test.py::test[expr-literal_list_element-default.txt-Results] [GOOD] >> test.py::test[expr-literal_strings-default.txt-Analyze] >> TKqpScanData::EmptyColumns [GOOD] >> test.py::test[agg_phases-sum_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-sum_null-default.txt-ForceBlocks] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> test.py::test[window-win_lead_in_mem-default.txt-Debug] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-Plan] >> test.py::test[expr-struct_slice-default.txt-Results] [GOOD] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Analyze] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] [GOOD] >> test.py::test[lineage-join_as_struct-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-join_as_struct-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-join_as_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[window-full/session_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/session_compact--Plan] >> test.py::test[pg_catalog-pg_trigger-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_trigger-default.txt-Debug] >> test.py::test[window-win_lead_in_mem-default.txt-Plan] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-Results] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-ForceBlocks] >> test.py::test[lineage-join_as_struct-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-join_as_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-ForceBlocks] >> TPDiskRaces::KillOwnerWhileDecommitting [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight >> test.py::test[window-full/session_compact--Plan] [GOOD] >> test.py::test[window-full/session_compact--Results] >> test.py::test[lineage-select_group_by_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Results] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Debug] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-ForceBlocks] >> test.py::test[select-optional_as_warn-default.txt-Results] [GOOD] >> test.py::test[select-refselect-1000-Debug] [SKIPPED] >> test.py::test[select-refselect-1000-Plan] [SKIPPED] >> test.py::test[select-refselect-1000-Results] [SKIPPED] >> test.py::test[select-scalar_subquery-default.txt-Debug] >> test.py::test[pg-select_proj_ref_order_by_star-default.txt-Results] [GOOD] >> test.py::test[blocks-if--Results] [GOOD] >> test.py::test[blocks-json_document_type--Debug] >> test.py::test[pg-select_sort_project_same_desc-default.txt-Analyze] >> test.py::test[lineage-select_group_by_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Plan] [SKIPPED] >> test.py::test[window-win_func_part_by_expr_new-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] >> test.py::test[join-premap_common_inner--Debug] [GOOD] >> test.py::test[join-premap_common_inner--Plan] >> test.py::test[lineage-window_one-default.txt-Results] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-or_distributive--Analyze] [SKIPPED] >> test.py::test[optimizers-or_distributive--Debug] [SKIPPED] >> test.py::test[optimizers-or_distributive--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-or_distributive--Plan] [SKIPPED] >> test.py::test[optimizers-or_distributive--Results] >> test.py::test[insert_monotonic-several1-default.txt-Debug] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-ForceBlocks] >> test.py::test[join-premap_common_inner--Plan] [GOOD] >> test.py::test[join-premap_common_inner--Results] >> test.py::test[optimizers-or_distributive--Results] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Analyze] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[flatten_by-flatten_by_typed_table--Results] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Analyze] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Debug] >> test.py::test[column_order-insert_with_reorder_cols--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Plan] >> test.py::test[schema-select_field-schema-Debug] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Debug] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] >> test.py::test[table_range-range_tables_with_view--Analyze] [GOOD] >> test.py::test[table_range-range_tables_with_view--Debug] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Results] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Plan] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Results] >> test.py::test[schema-select_field-schema-ForceBlocks] >> test.py::test[coalesce-coalesce_sugar-default.txt-Debug] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-ForceBlocks] >> test.py::test[expr-cast_string_implicit-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-cast_string_implicit-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_string_implicit-default.txt-Results] >> test.py::test[expr-literal_strings-default.txt-Analyze] [GOOD] >> test.py::test[expr-literal_strings-default.txt-Debug] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[pg-tpcds-q35-default.txt-Debug] [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze >> test.py::test[window-presort_window_order_by_table-default.txt-Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Analyze] >> test.py::test[window-win_lead_in_mem-default.txt-Results] [GOOD] >> test.py::test[insert-double_append_to_anonymous--ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Debug] >> test.py::test[insert-double_append_to_anonymous--Plan] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Results] >> test.py::test[weak_field-weak_field_real_col-default.txt-Debug] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Plan] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Analyze] [GOOD] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Debug] >> test.py::test[pg_catalog-pg_trigger-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_trigger-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Debug] [GOOD] >> test.py::test[pg-select_sort_project_same_desc-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_sort_project_same_desc-default.txt-Debug] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Debug] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--ForceBlocks] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] >> test.py::test[agg_phases_agg_apply-sum_opt-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Analyze] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Plan] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Debug] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Plan] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-Analyze] >> test.py::test[blocks-date_sub_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_scalar--Plan] [GOOD] >> test.py::test[blocks-date_sub_scalar--Results] >> test.py::test[expr-cast_string_implicit-default.txt-Results] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--Analyze] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Plan] [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Plan] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] >> test.py::test[coalesce-coalesce_sugar-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Debug] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Plan] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Results] >> test.py::test[join-premap_common_inner--Results] [GOOD] >> test.py::test[join-premap_context_dep-off-Debug] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> test.py::test[schema-select_field-schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_field-schema-Plan] >> test.py::test[expr-literal_strings-default.txt-Debug] [GOOD] >> test.py::test[expr-literal_strings-default.txt-ForceBlocks] >> test.py::test[insert_monotonic-several1-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Plan] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Results] >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Plan] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Results] >> test.py::test[table_range-range_tables_with_view--Debug] [GOOD] >> test.py::test[table_range-range_tables_with_view--ForceBlocks] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-Debug] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-Plan] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-Results] [SKIPPED] >> test.py::test[binding-named_node_corr_names-default.txt-Debug] >> test.py::test[coalesce-coalesce_sugar-default.txt-Plan] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] >> test.py::test[join-premap_context_dep-off-Debug] [SKIPPED] >> test.py::test[join-premap_context_dep-off-Plan] [SKIPPED] >> test.py::test[join-premap_context_dep-off-Results] [SKIPPED] >> test.py::test[aggregate-aggregation_with_named_node--Debug] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--ForceBlocks] >> test.py::test[column_order-insert_with_reorder_cols--Results] [GOOD] >> test.py::test[schema-select_field-schema-Plan] [GOOD] >> test.py::test[schema-select_field-schema-Results] >> test.py::test[column_order-join--Analyze] [SKIPPED] >> test.py::test[column_order-join--Debug] [SKIPPED] >> test.py::test[column_order-join--ForceBlocks] [SKIPPED] >> test.py::test[column_order-join--Plan] [SKIPPED] >> test.py::test[column_order-join--Results] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Analyze] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] [GOOD] >> test.py::test[window-current/session--Debug] >> test.py::test[join-premap_no_premap--Results] [GOOD] >> test.py::test[join-premap_no_premap-off-Analyze] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> test.py::test[column_order-join--Results] [SKIPPED] >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-Analyze] >> test.py::test[window-full/session_compact--Results] [GOOD] >> test.py::test[window-generic/session_aliases--Analyze] >> test.py::test[window-win_func_lead_lag_worm--Analyze] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Debug] >> test.py::test[pg_catalog-pg_trigger-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_catalog-pg_trigger-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_trigger-default.txt-Results] >> test.py::test[aggr_factory-every-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] [GOOD] >> test.py::test[binding-tie_scalar_context-default.txt-Analyze] >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> test.py::test[pg-select_sort_project_same_desc-default.txt-Debug] [GOOD] >> test.py::test[pg-select_sort_project_same_desc-default.txt-ForceBlocks] |86.2%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> test.py::test[insert-double_append_to_anonymous--Results] [GOOD] >> test.py::test[insert-replace_inferred--Analyze] >> test.py::test[blocks-json_document_type--Debug] [GOOD] >> test.py::test[blocks-json_document_type--Plan] [GOOD] >> test.py::test[blocks-json_document_type--Results] >> TCertificateCheckerTest::CheckSubjectDns >> test.py::test[aggr_factory-min-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Debug] >> test.py::test[insert_monotonic-several1-default.txt-Results] [GOOD] >> TFileStoreWithReboots::Create [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Debug] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--Analyze] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Plan] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Results] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> test.py::test[join-alias_where_group-off-Analyze] >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> test.py::test[pg_catalog-pg_trigger-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-Analyze] >> test.py::test[tpch-q11-default.txt-Analyze] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-Debug] >> test.py::test[agg_phases-sum_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Debug] [GOOD] >> test.py::test[schema-select_field-schema-Results] [GOOD] >> test.py::test[schema-user_schema_mix2--Analyze] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Debug] >> test.py::test[agg_phases-sum_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-sum_null-default.txt-Results] >> test.py::test[solomon-BadDownsamplingDisabled--Debug] [SKIPPED] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] [GOOD] >> test.py::test[column_group-hint_anon-perusage-Analyze] [SKIPPED] >> test.py::test[solomon-BadDownsamplingDisabled--Plan] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-Debug] [SKIPPED] >> test.py::test[solomon-BadDownsamplingDisabled--Results] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-Plan] [SKIPPED] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Debug] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Debug] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Plan] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_anon-single-Analyze] [SKIPPED] >> test.py::test[column_group-hint_anon-single-Debug] [SKIPPED] >> test.py::test[column_group-hint_anon-single-ForceBlocks] [SKIPPED] >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:48:55.430268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:48:55.430297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:48:55.430302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:48:55.430307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:48:55.430314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:48:55.430317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:48:55.430326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:48:55.430413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:48:55.444447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:48:55.444477Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:48:55.447453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:48:55.447586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:48:55.447635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:48:55.451031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:48:55.451143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:48:55.451302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:55.451524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:48:55.452371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:55.452706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:48:55.452725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:55.452744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:48:55.452754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:48:55.452761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:48:55.452816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:48:55.454402Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:48:55.474115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:48:55.474213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:55.474297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:48:55.474352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:48:55.474362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:55.477963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:55.478008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:48:55.478092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:55.478107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:48:55.478112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:48:55.478119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:48:55.478759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:55.478776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:48:55.478782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:48:55.479480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:55.479493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:55.479500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:55.479507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:48:55.480167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:48:55.486810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:48:55.486888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:48:55.487112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:55.487154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:48:55.487173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:55.487238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:48:55.487246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:55.487283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:48:55.487295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:48:55.492791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:48:55.492817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:48:55.492870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:55.492875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:48:55.492951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:55.492960Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:48:55.492975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:48:55.492978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:48:55.492983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:48:55.492987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:48:55.492990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:48:55.492993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:48:55.493012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:48:55.493018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:48:55.493020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... sState, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [49:325:2313] sender: [49:326:2058] recipient: [49:316:2306] 2024-11-21T08:49:08.815040Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1001:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T08:49:08.815067Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1001, partId: 0, tablet: 72075186233409546 Leader for TabletID 72075186233409546 is [49:325:2313] sender: [49:333:2058] recipient: [49:15:2062] 2024-11-21T08:49:08.816549Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1001, tablet: 72075186233409546, partId: 0 2024-11-21T08:49:08.816577Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1001:0, at schemeshard: 72057594046678944, message: TxId: 1001 Origin: 72075186233409546 Status: OK 2024-11-21T08:49:08.816583Z node 49 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#1001:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T08:49:08.816591Z node 49 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1001:0 3 -> 128 2024-11-21T08:49:08.817282Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1001:0, at schemeshard: 72057594046678944 2024-11-21T08:49:08.817317Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1001:0, at schemeshard: 72057594046678944 2024-11-21T08:49:08.817322Z node 49 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#1001:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:49:08.817329Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1001 ready parts: 1/1 2024-11-21T08:49:08.817370Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1001 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:49:08.817676Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1001:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1001 msg type: 269090816 2024-11-21T08:49:08.817702Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1001, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1001 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1001 at step: 5000003 2024-11-21T08:49:08.817759Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:08.817774Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1001 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 210453399658 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:49:08.817779Z node 49 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#1001:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T08:49:08.817792Z node 49 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1001:0 128 -> 240 2024-11-21T08:49:08.817814Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:49:08.817822Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: Erasing txId 1001 2024-11-21T08:49:08.818119Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:08.818127Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1001, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:49:08.818152Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1001, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:49:08.818165Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:08.818169Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1001, path id: 2 2024-11-21T08:49:08.818172Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1001, path id: 3 2024-11-21T08:49:08.818218Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1001:0, at schemeshard: 72057594046678944 2024-11-21T08:49:08.818222Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1001:0 ProgressState 2024-11-21T08:49:08.818232Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1001:0 progress is 1/1 2024-11-21T08:49:08.818237Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1001 ready parts: 1/1 2024-11-21T08:49:08.818241Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1001, ready parts: 1/1, is published: false 2024-11-21T08:49:08.818244Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1001 ready parts: 1/1 2024-11-21T08:49:08.818249Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1001:0 2024-11-21T08:49:08.818253Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1001:0 2024-11-21T08:49:08.818276Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:49:08.818281Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1001, publications: 2, subscribers: 0 2024-11-21T08:49:08.818285Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1001, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T08:49:08.818289Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1001, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T08:49:08.818391Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1001 2024-11-21T08:49:08.818402Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1001 2024-11-21T08:49:08.818405Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1001 2024-11-21T08:49:08.818408Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1001, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:49:08.818411Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:49:08.818510Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1001 2024-11-21T08:49:08.818520Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1001 2024-11-21T08:49:08.818524Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1001 2024-11-21T08:49:08.818527Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1001, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:49:08.818530Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:49:08.818536Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1001, subscribers: 0 2024-11-21T08:49:08.819079Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1001 2024-11-21T08:49:08.819139Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1001 TestModificationResult got TxId: 1001, wait until txId: 1001 TestWaitNotification wait txId: 1001 2024-11-21T08:49:08.819186Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1001: send EvNotifyTxCompletion 2024-11-21T08:49:08.819193Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1001 2024-11-21T08:49:08.819252Z node 49 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1001, at schemeshard: 72057594046678944 2024-11-21T08:49:08.819267Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1001: got EvNotifyTxCompletionResult 2024-11-21T08:49:08.819271Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1001: satisfy waiter [49:369:2350] TestWaitNotification: OK eventTxId 1001 2024-11-21T08:49:08.819333Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:49:08.819358Z node 49 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_1" took 35us result status StatusSuccess 2024-11-21T08:49:08.819412Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_1" PathDescription { Self { Name: "FS_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "FS_1" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 1 FileSystemId: "FS_1" FolderId: "folder" CloudId: "cloud" BlockSize: 4096 BlocksCount: 4096 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Version: 1 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> test.py::test[expr-literal_strings-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-literal_strings-default.txt-Plan] [GOOD] >> test.py::test[expr-literal_strings-default.txt-Results] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Debug] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-ForceBlocks] >> test.py::test[select-scalar_subquery-default.txt-Results] [GOOD] >> test.py::test[select-select_all-default.txt-Debug] >> test.py::test[aggregate-aggregation_with_named_node--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Plan] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Results] >> test.py::test[window-generic/session_aliases--Analyze] [GOOD] >> test.py::test[window-generic/session_aliases--Debug] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-Analyze] [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze [GOOD] >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-Debug] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart >> test.py::test[table_range-range_tables_with_view--ForceBlocks] [GOOD] >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> test.py::test[binding-tie_scalar_context-default.txt-Analyze] [GOOD] >> test.py::test[binding-tie_scalar_context-default.txt-Debug] >> test.py::test[insert-replace_inferred--Analyze] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Results] [GOOD] >> test.py::test[join-pullup_random--Analyze] >> test.py::test[blocks-json_document_type--Results] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-Debug] [GOOD] >> test.py::test[blocks-pg_tofrom--Debug] >> test.py::test[pg-select_sort_project_same_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_sort_project_same_desc-default.txt-Plan] [GOOD] >> test.py::test[pg-select_sort_project_same_desc-default.txt-Results] >> test.py::test[binding-named_node_corr_names-default.txt-Plan] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-Results] >> test.py::test[join-premap_no_premap-off-Analyze] [GOOD] >> test.py::test[join-premap_no_premap-off-Debug] >> DataShardSnapshots::LockedWriteBulkUpsertConflict |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> test.py::test[join-alias_where_group-off-Analyze] [GOOD] >> test.py::test[join-alias_where_group-off-Debug] >> test.py::test[pg_catalog-table_constraints-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-Debug] >> test.py::test[expr-literal_strings-default.txt-Results] [GOOD] >> test.py::test[expr-minmax_for_complex_types-default.txt-Analyze] >> test.py::test[aggr_factory-every-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Debug] >> DataShardSnapshots::VolatileSnapshotSplit >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-Analyze] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Debug] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Plan] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] >> test.py::test[schema-user_schema_mix2--Analyze] [GOOD] >> test.py::test[schema-user_schema_mix2--Debug] >> test.py::test[window-win_multiaggr_list-default.txt-Results] [GOOD] >> test.py::test[window-win_over_few_partitions--Debug] >> test.py::test[window-win_func_lead_lag_worm--Debug] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--ForceBlocks] >> DataShardSnapshots::VolatileSnapshotMerge >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Plan] >> test.py::test[select-select_all-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Debug] [GOOD] >> test.py::test[select-select_all-default.txt-Plan] [GOOD] >> test.py::test[select-select_all-default.txt-Results] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx0 >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-Debug] [GOOD] >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q02-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Results] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx2 >> test.py::test[binding-named_node_corr_names-default.txt-Results] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Debug] >> DataShardSnapshots::MvccSnapshotAndSplit >> test.py::test[aggr_factory-min-default.txt-ForceBlocks] >> test.py::test[pg-select_sort_project_same_desc-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Results] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Analyze] >> test.py::test[aggregate-compare_by--Analyze] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-ForceBlocks] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[join-premap_context_dep-off-Results] [SKIPPED] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Plan] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] [GOOD] >> test.py::test[table_range-range_over_regexp--Debug] >> test.py::test[join-pullup_random--Analyze] [GOOD] >> test.py::test[binding-tie_scalar_context-default.txt-Debug] [GOOD] >> test.py::test[join-pullup_random--Debug] >> test.py::test[binding-tie_scalar_context-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Debug] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Plan] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Results] >> test.py::test[expr-minmax_for_complex_types-default.txt-Analyze] [GOOD] >> test.py::test[expr-minmax_for_complex_types-default.txt-Debug] >> test.py::test[tpch-q11-default.txt-Debug] [GOOD] >> test.py::test[tpch-q11-default.txt-ForceBlocks] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 >> test.py::test[window-win_func_nth_value-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-Debug] >> test.py::test[pg_catalog-table_constraints-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-ForceBlocks] >> test.py::test[window-current/session--Debug] [GOOD] >> test.py::test[join-alias_where_group-off-Debug] [GOOD] >> test.py::test[join-alias_where_group-off-ForceBlocks] [SKIPPED] >> test.py::test[join-alias_where_group-off-Plan] [GOOD] >> test.py::test[join-alias_where_group-off-Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Analyze] |86.2%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[column_group-hint_anon-single-ForceBlocks] [SKIPPED] >> test.py::test[select-select_all-default.txt-Results] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Debug] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> test.py::test[blocks-pg_tofrom--Debug] [GOOD] >> test.py::test[blocks-pg_tofrom--Plan] [GOOD] >> test.py::test[window-current/session--Plan] [GOOD] >> test.py::test[window-current/session--Results] >> test.py::test[aggregate-group_by_rollup_key_check--Analyze] >> test.py::test[blocks-pg_tofrom--Results] >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-Plan] [GOOD] >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-Results] >> DataShardSnapshots::LockedWriteBulkUpsertConflict [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[table_range-range_tables_with_view--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix2--Debug] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Analyze] [GOOD] >> test.py::test[schema-user_schema_mix2--ForceBlocks] >> test.py::test[pg-select_subquery2_qstar-default.txt-Debug] >> test.py::test[blocks-date_sub_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_comparison--Analyze] >> test.py::test[table_range-range_over_regexp--Debug] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Debug] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Plan] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Debug] >> test.py::test[agg_phases-sum_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all-default.txt-Analyze] >> test.py::test[window-generic/session_aliases--Debug] [GOOD] >> test.py::test[window-generic/session_aliases--ForceBlocks] >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 >> test.py::test[aggregate-compare_by--Analyze] [GOOD] >> test.py::test[aggregate-compare_by--Debug] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Analyze] >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 >> test.py::test[binding-tie_scalar_context-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-tie_scalar_context-default.txt-Plan] [GOOD] >> test.py::test[binding-tie_scalar_context-default.txt-Results] >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 [GOOD] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[insert-replace_inferred--Analyze] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] >> test.py::test[aggr_factory-min-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Results] >> test.py::test[window-win_func_lead_lag_worm--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Plan] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Results] >> test.py::test[join-premap_no_premap-off-Debug] [GOOD] >> test.py::test[join-premap_no_premap-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_no_premap-off-Plan] >> test.py::test[join-pullup_random--Debug] [GOOD] >> test.py::test[join-pullup_random--ForceBlocks] >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> test.py::test[csee-lambda_in_l0_and_l1-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_io-default.txt-Analyze] >> test.py::test[join-premap_no_premap-off-Plan] [GOOD] >> test.py::test[join-premap_no_premap-off-Results] [GOOD] >> test.py::test[join-pullup_random-off-Analyze] >> test.py::test[expr-minmax_for_complex_types-default.txt-Debug] [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart >> test.py::test[pg_catalog-table_constraints-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-Plan] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 [GOOD] >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasure4Plus2Block >> test.py::test[aggregate-group_by_rollup_key_check--Analyze] [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check--Debug] >> test.py::test[pg_catalog-table_constraints-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-Results] >> test.py::test[blocks-decimal_comparison--Analyze] [GOOD] >> test.py::test[blocks-decimal_comparison--Debug] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Results] [GOOD] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Analyze] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[expr-double_join_with_list_from_range--Analyze] [GOOD] >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Debug] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Plan] >> test.py::test[agg_phases_agg_apply-count_all-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all-default.txt-Debug] >> test.py::test[blocks-pg_tofrom--Results] [GOOD] >> test.py::test[blocks-sort_two_mix--Debug] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 [GOOD] >> test.py::test[binding-tie_scalar_context-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Analyze] >> test.py::test[join-anyjoin_merge_nodup-off-Analyze] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Debug] >> test.py::test[select-select_all_ordered-default.txt-Plan] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Results] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] [GOOD] >> test.py::test[bitcast_implicit-add_bitcast-default.txt-Debug] >> DataShardSnapshots::LockedWriteDistributedCommitAborted [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict >> test.py::test[schema-user_schema_mix2--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix2--Plan] [GOOD] >> test.py::test[schema-user_schema_mix2--Results] >> test.py::test[window-win_func_nth_value-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-ForceBlocks] >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3dc >> test.py::test[pg-select_subquery2_qstar-default.txt-Debug] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-ForceBlocks] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 [GOOD] >> test.py::test[window-current/session--Results] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Debug] >> test.py::test[pg_catalog-table_constraints-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Analyze] >> test.py::test[tpch-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q11-default.txt-Plan] [GOOD] >> test.py::test[join-pullup_random-off-Analyze] [GOOD] >> test.py::test[join-pullup_random-off-Debug] >> test.py::test[datetime-date_tz_io-default.txt-Analyze] [GOOD] >> test.py::test[datetime-date_tz_io-default.txt-Debug] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Analyze] [GOOD] >> test.py::test[tpch-q11-default.txt-Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Plan] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Debug] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Analyze] [GOOD] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Debug] >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> test.py::test[join-pullup_random--ForceBlocks] [GOOD] >> test.py::test[join-pullup_random--Plan] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx1 >> test.py::test[blocks-decimal_comparison--Debug] [GOOD] >> test.py::test[blocks-decimal_comparison--ForceBlocks] >> test.py::test[schema-user_schema_mix2--Results] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Analyze] >> test.py::test[join-pullup_random--Plan] [GOOD] >> test.py::test[join-pullup_random--Results] >> test.py::test[select-select_all_ordered-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Debug] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[table_range-range_over_regexp--Debug] [GOOD] >> test.py::test[bitcast_implicit-add_bitcast-default.txt-Debug] [GOOD] >> test.py::test[bitcast_implicit-add_bitcast-default.txt-Plan] [GOOD] >> test.py::test[bitcast_implicit-add_bitcast-default.txt-Results] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] [GOOD] >> test.py::test[window-win_peephole-default.txt-Analyze] >> DataShardSnapshots::MvccSnapshotLockedWrites [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Analyze] [GOOD] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Debug] >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 >> test.py::test[aggr_factory-median-default.txt-Debug] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Analyze] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Debug] >> test.py::test[aggr_factory-median-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_key_check--Debug] [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check--ForceBlocks] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc >> test.py::test[window-generic/session_aliases--ForceBlocks] [GOOD] >> test.py::test[window-generic/session_aliases--Plan] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Analyze] >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc >> test.py::test[window-generic/session_aliases--Results] >> test.py::test[datetime-date_tz_io-default.txt-Debug] [GOOD] >> test.py::test[datetime-date_tz_io-default.txt-ForceBlocks] >> test.py::test[pg-select_subquery2_qstar-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Plan] >> test.py::test[aggregate-compare_by--Debug] [GOOD] >> test.py::test[aggregate-compare_by--ForceBlocks] >> test.py::test[blocks-sort_two_mix--Debug] [GOOD] >> test.py::test[blocks-sort_two_mix--Plan] [GOOD] >> test.py::test[blocks-sort_two_mix--Results] >> test.py::test[window-win_func_lead_lag_worm--Results] [GOOD] >> test.py::test[window-win_func_special--Analyze] >> test.py::test[window-distinct_over_window_struct-default.txt-Debug] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Plan] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] >> test.py::test[window-win_over_few_partitions--Debug] [GOOD] >> test.py::test[window-win_over_few_partitions--Plan] [GOOD] >> test.py::test[window-win_over_few_partitions--Results] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-ForceBlocks] >> test.py::test[pg-select_subquery2_qstar-default.txt-Plan] [GOOD] >> test.py::test[bitcast_implicit-add_bitcast-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint8--Debug] >> test.py::test[schema-yamred_dsv_select_from_dict--Analyze] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Debug] >> TFileStoreWithReboots::CreateDrop [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit >> test.py::test[blocks-decimal_comparison--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_comparison--Plan] [GOOD] >> test.py::test[blocks-decimal_comparison--Results] |86.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--Debug] |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |86.2%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[window-win_peephole-default.txt-Analyze] [GOOD] >> test.py::test[window-win_peephole-default.txt-Debug] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Debug] [GOOD] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-ForceBlocks] >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block |86.2%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> test.py::test[join-pullup_random-off-Debug] [GOOD] >> test.py::test[join-pullup_random-off-ForceBlocks] [SKIPPED] >> test.py::test[join-pullup_random-off-Plan] [GOOD] >> test.py::test[join-pullup_random-off-Results] [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-ForceBlocks] [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit >> test.py::test[window-win_func_nth_value-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-Results] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block [GOOD] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[expr-minmax_for_complex_types-default.txt-Debug] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:48:52.558371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:48:52.558399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:48:52.558406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:48:52.558411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:48:52.558417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:48:52.558421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:48:52.558430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:48:52.558511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:48:52.572569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:48:52.572595Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:48:52.574562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:48:52.574652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:48:52.574689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:48:52.576454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:48:52.576509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:48:52.576599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:52.576744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:48:52.577189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:52.577431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:48:52.577440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:52.577449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:48:52.577454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:48:52.577459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:48:52.577488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:48:52.578498Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:48:52.590736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:48:52.590817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:52.590873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:48:52.590909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:48:52.590915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:52.591582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:52.591603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:48:52.591652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:52.591663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:48:52.591666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:48:52.591670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:48:52.591950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:52.591957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:48:52.591960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:48:52.592179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:52.592186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:52.592190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:52.592196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:48:52.592655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:48:52.592955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:48:52.592996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:48:52.593172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:48:52.593198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:48:52.593215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:52.593263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:48:52.593269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:48:52.593298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:48:52.593306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:48:52.593606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:48:52.593615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:48:52.593653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:48:52.593658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:48:52.593723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:48:52.593728Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:48:52.593737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:48:52.593740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:48:52.593744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:48:52.593747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:48:52.593750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:48:52.593753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:48:52.593760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:48:52.593765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:48:52.593767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... chemeshard: 72057594046678944 2024-11-21T08:49:16.236838Z node 84 :FLAT_TX_SCHEMESHARD INFO: TDropFileStore::TPropose, operationId: 1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:49:16.236845Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T08:49:16.236871Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:49:16.237022Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:49:16.237061Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:49:16.237275Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:16.237326Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:49:16.237395Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:49:16.237847Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T08:49:16.237873Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2024-11-21T08:49:16.237969Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:16.237988Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 360777255017 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:49:16.237996Z node 84 :FLAT_TX_SCHEMESHARD INFO: TDropFileStore::TPropose, operationId: 1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T08:49:16.238012Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:49:16.238035Z node 84 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T08:49:16.238039Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:49:16.238047Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:49:16.238055Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:49:16.238060Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T08:49:16.238066Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:49:16.238071Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T08:49:16.238074Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T08:49:16.238093Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:49:16.238098Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 0 2024-11-21T08:49:16.238102Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T08:49:16.238105Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:49:16.238259Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:49:16.238268Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:49:16.238605Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:49:16.238628Z node 84 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:16.238632Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:49:16.238651Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:49:16.238669Z node 84 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:16.238673Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [84:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T08:49:16.238677Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [84:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T08:49:16.238765Z node 84 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:49:16.238774Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:49:16.238778Z node 84 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:49:16.238782Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:49:16.238786Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:49:16.238821Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:49:16.238826Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:49:16.238833Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:49:16.238867Z node 84 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:49:16.238875Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:49:16.238880Z node 84 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:49:16.238884Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T08:49:16.238887Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:49:16.238895Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T08:49:16.238941Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T08:49:16.242260Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:16.242744Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:49:16.242788Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:49:16.243020Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:49:16.243039Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T08:49:16.243107Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T08:49:16.243114Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T08:49:16.243172Z node 84 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T08:49:16.243187Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:49:16.243191Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [84:403:2384] TestWaitNotification: OK eventTxId 1002 2024-11-21T08:49:16.243255Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:49:16.243281Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_3" took 36us result status StatusPathDoesNotExist 2024-11-21T08:49:16.243313Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/FS_3\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/FS_3" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[join-anyjoin_merge_nodup-off-Debug] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-ForceBlocks] [SKIPPED] >> test.py::test[join-anyjoin_merge_nodup-off-Plan] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Analyze] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Results] [GOOD] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Debug] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--ForceBlocks] >> test.py::test[blocks-combine_all_max_filter_opt--Debug] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--ForceBlocks] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Debug] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> test.py::test[join-bush_dis_in_in-off-Analyze] >> test.py::test[window-win_func_special--Analyze] [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts >> test.py::test[datetime-date_tz_io-default.txt-ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_io-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_tz_io-default.txt-Results] >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[window-win_func_special--Debug] >> test.py::test[blocks-sort_two_mix--Results] [GOOD] >> test.py::test[blocks-filter_partial_expr--Analyze] >> test.py::test[blocks-top_sort_two_asc--Debug] >> test.py::test[join-pullup_random--Results] [GOOD] >> test.py::test[join-star_join_inners--Analyze] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Plan] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Results] >> test.py::test[schema-yamred_dsv_select_from_dict--Debug] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--ForceBlocks] >> test.py::test[blocks-add_uint8--Debug] [GOOD] >> test.py::test[blocks-add_uint8--Plan] [GOOD] >> test.py::test[blocks-add_uint8--Results] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Plan] [GOOD] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Results] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[pg-select_subquery2_qstar-default.txt-Plan] [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable >> test.py::test[select-tablename_with_table_row-default.txt-Debug] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Plan] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 >> test.py::test[aggregate-group_by_rollup_key_check--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check--Plan] [GOOD] >> test.py::test[datetime-date_tz_io-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Analyze] >> test.py::test[window-win_peephole-default.txt-Debug] [GOOD] >> test.py::test[window-win_peephole-default.txt-ForceBlocks] >> test.py::test[agg_phases_agg_apply-count_all-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all-default.txt-ForceBlocks] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx3 >> test.py::test[window-distinct_over_window_struct-default.txt-Results] [GOOD] >> test.py::test[window-empty/aggregations--Debug] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 >> DataShardSnapshots::MvccSnapshotReadLockedWrites [GOOD] >> test.py::test[window-generic/session_aliases--Results] [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> test.py::test[window-win_by_simple-default.txt-Analyze] >> test.py::test[join-bush_dis_in_in-off-Analyze] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Debug] >> test.py::test[aggregate-compare_by--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by--Plan] [GOOD] >> test.py::test[aggregate-compare_by--Results] >> test.py::test[pg-tpch-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Debug] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Results] [GOOD] >> test.py::test[blocks-filter_partial_expr--Analyze] [GOOD] >> test.py::test[blocks-filter_partial_expr--Debug] >> test.py::test[produce-discard_reduce_lambda-default.txt-Analyze] >> test.py::test[window-win_over_few_partitions--Results] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Debug] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts >> test.py::test[schema-yamred_dsv_select_from_dict--ForceBlocks] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Plan] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] >> test.py::test[window-win_func_nth_value-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Debug] [GOOD] >> test.py::test[blocks-add_uint8--Results] [GOOD] >> test.py::test[blocks-bitcast_scalar--Debug] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Results] [GOOD] >> test.py::test[expr-unicode_literals-default.txt-Analyze] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--ForceBlocks] >> test.py::test[blocks-combine_all_max_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Plan] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Results] >> test.py::test[join-star_join_inners--Analyze] [GOOD] >> test.py::test[join-star_join_inners--Debug] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[join-pullup_random-off-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-Analyze] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--ForceBlocks] [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Plan] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] >> test.py::test[select-tablename_with_table_row-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Debug] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Analyze] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> test.py::test[aggr_factory-median-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Debug] >> test.py::test[window-win_by_simple-default.txt-Analyze] [GOOD] >> test.py::test[window-win_by_simple-default.txt-Debug] >> test.py::test[window-win_func_special--Debug] [GOOD] >> test.py::test[window-win_func_special--ForceBlocks] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit >> test.py::test[distinct-distinct_groupby-default.txt-Analyze] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Debug] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[aggregate-group_by_rollup_key_check--Plan] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Analyze] [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> test.py::test[produce-discard_reduce_lambda-default.txt-Debug] >> test.py::test[expr-unicode_literals-default.txt-Analyze] [GOOD] >> test.py::test[expr-unicode_literals-default.txt-Debug] >> test.py::test[blocks-filter_partial_expr--Debug] [GOOD] >> test.py::test[blocks-filter_partial_expr--ForceBlocks] >> test.py::test[blocks-top_sort_two_asc--Debug] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Plan] [GOOD] >> test.py::test[blocks-bitcast_scalar--Debug] [GOOD] >> test.py::test[blocks-bitcast_scalar--Plan] >> test.py::test[blocks-top_sort_two_asc--Results] >> test.py::test[blocks-bitcast_scalar--Plan] [GOOD] >> test.py::test[blocks-bitcast_scalar--Results] >> test.py::test[window-win_peephole-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Debug] [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> test.py::test[window-win_peephole-default.txt-Plan] [GOOD] >> test.py::test[window-win_peephole-default.txt-Results] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Plan] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 >> test.py::test[blocks-combine_all_max_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_min--Analyze] >> test.py::test[join-star_join_inners--Debug] [GOOD] >> test.py::test[join-star_join_inners--ForceBlocks] >> test.py::test[select-backtick_with_escapes-default.txt-Analyze] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Debug] >> test.py::test[tpch-q20-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q20-default.txt-Debug] >> test.py::test[distinct-distinct_groupby-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-ForceBlocks] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> test.py::test[window-win_func_special--ForceBlocks] [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 >> test.py::test[window-win_func_special--Plan] [GOOD] >> test.py::test[window-win_func_special--Results] >> test.py::test[expr-unicode_literals-default.txt-Debug] [GOOD] |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> test.py::test[expr-unicode_literals-default.txt-ForceBlocks] >> test.py::test[window-win_by_simple-default.txt-Debug] [GOOD] >> test.py::test[window-win_by_simple-default.txt-ForceBlocks] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> test.py::test[window-win_peephole-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Debug] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Debug] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-ForceBlocks] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit [GOOD] >> test.py::test[aggregate-compare_by--Results] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Analyze] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] >> test.py::test[blocks-bitcast_scalar--Results] [GOOD] >> test.py::test[blocks-boolean_ops--Debug] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey >> test.py::test[blocks-top_sort_two_asc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Debug] >> test.py::test[blocks-filter_partial_expr--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_partial_expr--Plan] [GOOD] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[window-win_func_nth_value-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min--Analyze] [GOOD] >> test.py::test[blocks-combine_all_min--Debug] >> test.py::test[select-backtick_with_escapes-default.txt-Debug] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-ForceBlocks] >> test.py::test[agg_phases_agg_apply-count_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-filter_partial_expr--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit [GOOD] Test command err: 2024-11-21T08:49:04.138505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:04.139016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:04.139051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004513/r3tmp/tmpnfV3jl/pdisk_1.dat 2024-11-21T08:49:04.249230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:04.272305Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:04.314993Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:49:04.315344Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:49:04.315400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:04.315420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:04.326067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:04.430907Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:49:04.430935Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:49:04.430974Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:49:04.455343Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:49:04.455602Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:49:04.455617Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:49:04.455677Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:49:04.455710Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:49:04.455724Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:49:04.455795Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:49:04.457466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:04.457793Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:49:04.457804Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:49:04.473554Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:04.473778Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:04.473871Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:49:04.473930Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:04.492032Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:04.492299Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:04.492329Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:04.492491Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:04.492500Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:04.492508Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:04.492554Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:04.496973Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:49:04.497052Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:04.497079Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:49:04.497083Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:04.497087Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:49:04.497091Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:04.497231Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:04.497239Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:04.497397Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:49:04.497418Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:49:04.497435Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:04.497440Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:04.497447Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:49:04.497454Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:04.497461Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:04.497467Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:49:04.497472Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:04.497476Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:04.497481Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:49:04.497486Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:04.497505Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:49:04.497510Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:04.497528Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:04.497590Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:49:04.497601Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:49:04.497617Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:49:04.497623Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:04.497628Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:49:04.497632Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:49:04.497637Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:04.497683Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:04.497687Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:49:04.497693Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:04.497696Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:04.497706Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:49:04.497709Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:04.497713Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:49:04.497715Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:04.497719Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:04.497877Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:04.497885Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:04.497889Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:04.497896Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:49:04.497905Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:04.498374Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:49:04.498388Z ... 888, tableId# 2, last full compaction# 1970-01-01T00:00:04.040233Z 2024-11-21T08:49:24.089671Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T08:49:24.089676Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T08:49:24.089682Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2024-11-21T08:49:24.089689Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T08:49:24.089699Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T08:49:24.089708Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T08:49:24.089710Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T08:49:24.089713Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2024-11-21T08:49:24.089717Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T08:49:24.089721Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T08:49:24.089729Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T08:49:24.089733Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T08:49:24.089735Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2024-11-21T08:49:24.089738Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T08:49:24.089762Z node 7 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [7:944:2745], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:49:24.089786Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T08:49:24.089791Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T08:49:24.089795Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2024-11-21T08:49:24.089800Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T08:49:24.089811Z node 7 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [7:944:2745], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 300 } } ComputeActorStats { } 2024-11-21T08:49:24.089882Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:24.089902Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 116 } } ComputeActorStats { } 2024-11-21T08:49:24.089959Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:24.090592Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T08:49:24.090614Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T08:49:24.090617Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:24.090621Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2024-11-21T08:49:24.090745Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T08:49:24.090759Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T08:49:24.090764Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:24.090767Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2024-11-21T08:49:24.104684Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T08:49:24.104708Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715667 ProcessProposeKqpTransaction 2024-11-21T08:49:24.104912Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yjvkt5h6tnnp47491d4hq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTAyNGEzMzUtZDYxYjAyMjctY2U2ZmQ5YTUtZDc0OTg3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2024-11-21T08:49:24.105541Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:1068:2865], Recipient [7:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T08:49:24.105570Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:49:24.105580Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2024-11-21T08:49:24.105587Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2024-11-21T08:49:24.105597Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T08:49:24.105618Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:49:24.105623Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:49:24.105629Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:24.105634Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:49:24.105649Z node 7 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T08:49:24.105654Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:49:24.105658Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:24.105663Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:49:24.105667Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:49:24.105681Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T08:49:24.105756Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:1068:2865], 0} after executionsCount# 1 2024-11-21T08:49:24.105764Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1068:2865], 0} sends rowCount# 2, bytes# 72, quota rows left# 999, quota bytes left# 5242808, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:49:24.105783Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1068:2865], 0} finished in read 2024-11-21T08:49:24.105794Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:49:24.105797Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:49:24.105801Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:24.105805Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:24.105816Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:49:24.105820Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:24.105824Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T08:49:24.105829Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:49:24.105846Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:49:24.106060Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:1068:2865], Recipient [7:631:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:49:24.106068Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> test.py::test[agg_phases_agg_apply-count_all-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all-default.txt-Results] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Analyze] |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--Analyze] >> test.py::test[expr-unicode_literals-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-unicode_literals-default.txt-Plan] [GOOD] >> test.py::test[expr-unicode_literals-default.txt-Results] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock >> test.py::test[distinct-distinct_groupby-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Results] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> test.py::test[join-star_join_inners--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners--Plan] [GOOD] >> test.py::test[join-star_join_inners--Results] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[join-bush_dis_in_in-off-Debug] [GOOD] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block >> test.py::test[window-win_by_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Debug] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Plan] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Results] >> test.py::test[aggregate-group_by_column-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Debug] >> test.py::test[window-win_by_simple-default.txt-Plan] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Debug] >> test.py::test[window-empty/aggregations--Debug] [GOOD] >> test.py::test[window-empty/aggregations--Plan] [GOOD] >> test.py::test[window-empty/aggregations--Results] >> test.py::test[window-win_by_simple-default.txt-Plan] [GOOD] >> test.py::test[window-win_by_simple-default.txt-Results] >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> test.py::test[pg-tpch-q13-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Plan] |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> test.py::test[window-win_func_special--Results] [GOOD] >> test.py::test[ypath-empty_range-dynamic-Analyze] [SKIPPED] >> test.py::test[pg-tpch-q13-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] >> test.py::test[select-backtick_with_escapes-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Plan] >> test.py::test[ypath-empty_range-dynamic-Debug] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-ForceBlocks] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-Plan] >> test.py::test[tpch-q20-default.txt-Debug] [GOOD] >> test.py::test[tpch-q20-default.txt-ForceBlocks] >> test.py::test[select-backtick_with_escapes-default.txt-Plan] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Results] |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[window-win_peephole-default.txt-Results] [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Plan] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Results] >> test.py::test[blocks-filter_partial_expr--Results] [GOOD] >> test.py::test[blocks-pg_from_dates--Analyze] >> test.py::test[ypath-empty_range-dynamic-Plan] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test.py::test[expr-unicode_literals-default.txt-Results] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-Analyze] >> test.py::test[order_by-order_by_mul_columns-default.txt-Analyze] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Debug] >> test.py::test[blocks-boolean_ops--Debug] [GOOD] >> test.py::test[blocks-boolean_ops--Plan] [GOOD] >> test.py::test[blocks-boolean_ops--Results] >> test.py::test[aggregate-disable_blocks_with_spilling--Analyze] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--Debug] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 >> test.py::test[aggregate-group_by_column-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-ForceBlocks] >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWritesLimitedPerKey [GOOD] Test command err: 2024-11-21T08:49:11.356787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:11.357753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:11.357785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044ef/r3tmp/tmpLbHWqG/pdisk_1.dat 2024-11-21T08:49:11.492827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:11.511841Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:11.556284Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:49:11.556568Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:49:11.556612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:11.556630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:11.567959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:11.670700Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:49:11.670719Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:49:11.670744Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:49:11.678064Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:49:11.678267Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:49:11.678280Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:49:11.678338Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:49:11.678369Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:49:11.678381Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:49:11.678442Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:49:11.678777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:11.678988Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:49:11.678999Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:49:11.695012Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:11.695215Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:11.695298Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:49:11.695353Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:11.704262Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:11.704443Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:11.704473Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:11.704642Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:11.704651Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:11.704657Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:11.704700Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:11.709965Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:49:11.710026Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:11.710048Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:49:11.710053Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:11.710057Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:49:11.710062Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:11.710182Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:11.710188Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:11.710317Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:49:11.710337Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:49:11.710353Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:11.710358Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:11.710364Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:49:11.710371Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:11.710377Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:11.710383Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:49:11.710388Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:11.710391Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:11.710396Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:49:11.710401Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:11.710418Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:49:11.710422Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:11.710442Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:11.710480Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:49:11.710489Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:49:11.710504Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:49:11.710511Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:11.710515Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:49:11.710521Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:49:11.710525Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:11.710565Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:11.710569Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:49:11.710576Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:11.710580Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:11.710590Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:49:11.710594Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:11.710598Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:49:11.710602Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:11.710607Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:11.710747Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:11.710755Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:11.710759Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:11.710766Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:49:11.710775Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:11.711373Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:49:11.711385Z ... ng: false } RuntimeSettings { TimeoutMs: 300000 ExecType: DATA UseSpilling: false StatsMode: DQ_STATS_MODE_NONE } } TxBody: cleared Tasks TxBody: injected Locks 2024-11-21T08:49:27.366889Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [7:987:2792], Recipient [7:631:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 987 RawX2: 30064773864 } TxBody: " \0018\000`\200\200\200\005jI\010\001\0329\n!\tY\001\000\000\000\000\000\000\021\000\000\001\000\000\020\000\001\030\001 \004)\000\001\205\000\000\000\000\0010\002\020\200\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T08:49:27.366899Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:27.366939Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [7:631:2536], Recipient [7:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:49:27.366945Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:49:27.366960Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:27.366993Z node 7 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 345, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T08:49:27.367004Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 345, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T08:49:27.367022Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:49:27.367037Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T08:49:27.367042Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:49:27.367046Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:27.367051Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:49:27.367059Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T08:49:27.367074Z node 7 :TX_DATASHARD TRACE: Activated operation [0:281474976715669] at 72075186224037888 2024-11-21T08:49:27.367079Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T08:49:27.367083Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:27.367086Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T08:49:27.367090Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T08:49:27.367098Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T08:49:27.367111Z node 7 :TX_DATASHARD TRACE: Operation [0:281474976715669] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193400 2024-11-21T08:49:27.367123Z node 7 :TX_DATASHARD TRACE: KqpCommitLock LockId: 345 DataShard: 72075186224037888 Generation: 1 Counter: 4 SchemeShard: 72057594046644480 PathId: 2 2024-11-21T08:49:27.367130Z node 7 :TX_DATASHARD TRACE: Committing changes lockId# 345 in localTid# 1001 shard# 72075186224037888 2024-11-21T08:49:27.367178Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:49:27.367189Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:27.367192Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T08:49:27.367196Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:27.367200Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:27.367208Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is DelayComplete 2024-11-21T08:49:27.367212Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:27.367215Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:27.367219Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:27.367229Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T08:49:27.367235Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:27.367239Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037888 has finished 2024-11-21T08:49:27.367374Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:27.367382Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:27.367388Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:49:27.367402Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715669 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 133 } } ComputeActorStats { } 2024-11-21T08:49:27.385703Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T08:49:27.385728Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715670 ProcessProposeKqpTransaction 2024-11-21T08:49:27.385893Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd6yjyt86mf2raqsxw4sm4j3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Yjk2NjU3YTYtZWJhNzE1YjAtODllNDYzNDgtYTQzMDU5ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2024-11-21T08:49:27.386247Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:1009:2817], Recipient [7:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T08:49:27.386274Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:49:27.386284Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T08:49:27.386290Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3000/18446744073709551615 2024-11-21T08:49:27.386299Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2024-11-21T08:49:27.386313Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T08:49:27.386317Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:49:27.386322Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:27.386326Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:49:27.386338Z node 7 :TX_DATASHARD TRACE: Activated operation [0:11] at 72075186224037888 2024-11-21T08:49:27.386342Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T08:49:27.386345Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:27.386349Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:49:27.386356Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:49:27.386368Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T08:49:27.386421Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:1009:2817], 0} after executionsCount# 1 2024-11-21T08:49:27.386427Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1009:2817], 0} sends rowCount# 3, bytes# 72, quota rows left# 998, quota bytes left# 5242808, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:49:27.386442Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1009:2817], 0} finished in read 2024-11-21T08:49:27.386450Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T08:49:27.386453Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:49:27.386457Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:27.386460Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:27.386468Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T08:49:27.386471Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:27.386475Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:11] at 72075186224037888 has finished 2024-11-21T08:49:27.386479Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:49:27.386495Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:49:27.386661Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:1009:2817], Recipient [7:631:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:49:27.386670Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 23 } }, { items { uint32_value: 3 } items { uint32_value: 31 } } >> test.py::test[blocks-top_sort_two_mix--Debug] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Plan] >> test.py::test[blocks-combine_all_min--Debug] [GOOD] >> test.py::test[blocks-combine_all_min--ForceBlocks] >> test.py::test[blocks-top_sort_two_mix--Plan] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Results] |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> test.py::test[blocks-pg_from_dates--Analyze] [GOOD] >> test.py::test[blocks-pg_from_dates--Debug] |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> test.py::test[aggregate-aggrs_no_grouping--Debug] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--Plan] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--Results] >> test.py::test[produce-discard_reduce_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_stream--Analyze] [SKIPPED] >> test.py::test[expr-variant_tuple_comp-default.txt-Analyze] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-Debug] >> test.py::test[window-win_by_simple-default.txt-Results] [GOOD] >> test.py::test[window-win_func_in_lib--Analyze] >> test.py::test[aggr_factory-mode-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Results] >> test.py::test[select-backtick_with_escapes-default.txt-Results] [GOOD] >> test.py::test[select-bin_ops_long_concat-default.txt-Analyze] >> test.py::test[distinct-distinct_groupby-default.txt-Results] [GOOD] >> test.py::test[epochs-use_and_drop_anonymous--Analyze] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> test.py::test[epochs-use_and_drop_anonymous--Analyze] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Debug] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] Test command err: 2024-11-21T08:49:03.964296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:03.964872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:03.964906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004538/r3tmp/tmpvNL1lj/pdisk_1.dat 2024-11-21T08:49:04.083647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:04.102311Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:04.144734Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:49:04.145069Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:49:04.145120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:04.145140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:04.155815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:04.259615Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:49:04.259646Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:49:04.259692Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:49:04.268270Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:49:04.268552Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:49:04.268571Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:49:04.268625Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:49:04.268666Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:49:04.268681Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:49:04.268755Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:49:04.269203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:04.269530Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:49:04.269544Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:49:04.284962Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:04.285186Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:04.285279Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:49:04.285340Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:04.292748Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:04.292971Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:04.293002Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:04.293175Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:04.293185Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:04.293192Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:04.293249Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:04.297284Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:49:04.297399Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:04.297441Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:49:04.297447Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:04.297452Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:49:04.297459Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:04.297628Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:04.297636Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:04.297799Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:49:04.297824Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:49:04.297839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:04.297845Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:04.297854Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:49:04.297862Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:04.297871Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:04.297879Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:49:04.297884Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:04.297888Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:04.297893Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:49:04.297899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:04.297919Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:49:04.297923Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:04.297951Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:04.298012Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:49:04.298024Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:49:04.298046Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:49:04.298054Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:04.298058Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:49:04.298064Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:49:04.298068Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:04.298119Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:04.298123Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:49:04.298130Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:04.298133Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:04.298147Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:49:04.298150Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:04.298154Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:49:04.298157Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:04.298163Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:04.298364Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:04.298371Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:04.298375Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:04.298385Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:49:04.298396Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:04.298895Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:49:04.298909Z ... on unit StoreAndSendOutRS 2024-11-21T08:49:28.868489Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit StoreAndSendOutRS 2024-11-21T08:49:28.868494Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T08:49:28.868497Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit StoreAndSendOutRS 2024-11-21T08:49:28.868503Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit PrepareKqpDataTxInRS 2024-11-21T08:49:28.868506Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit PrepareKqpDataTxInRS 2024-11-21T08:49:28.868510Z node 7 :TX_DATASHARD TRACE: Prepare InReadsets from 72075186224037889 to 72075186224037888 2024-11-21T08:49:28.868513Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T08:49:28.868516Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit PrepareKqpDataTxInRS 2024-11-21T08:49:28.868519Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T08:49:28.868522Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T08:49:28.868527Z node 7 :TX_DATASHARD TRACE: Prepare for loading readset for [900:281474976715668] at 72075186224037888 source=72075186224037889 target=72075186224037888 2024-11-21T08:49:28.868531Z node 7 :TX_DATASHARD TRACE: Expected 1 readsets for [900:281474976715668] at 72075186224037888 2024-11-21T08:49:28.868546Z node 7 :TX_DATASHARD TRACE: Filled readset for [900:281474976715668] from=72075186224037889 to=72075186224037888origin=72075186224037889 2024-11-21T08:49:28.868551Z node 7 :TX_DATASHARD TRACE: Remain 0 read sets for [900:281474976715668] at 72075186224037888 2024-11-21T08:49:28.868556Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T08:49:28.868559Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T08:49:28.868562Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T08:49:28.868565Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T08:49:28.868572Z node 7 :TX_DATASHARD TRACE: Operation [900:281474976715668] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191852 2024-11-21T08:49:28.868611Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:49:28.868617Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:28.868620Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T08:49:28.868623Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit CompleteOperation 2024-11-21T08:49:28.868626Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:49:28.868647Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is DelayComplete 2024-11-21T08:49:28.868651Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit CompleteOperation 2024-11-21T08:49:28.868653Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:28.868656Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:28.868660Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T08:49:28.868663Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:28.868666Z node 7 :TX_DATASHARD TRACE: Execution plan for [900:281474976715668] at 72075186224037888 has finished 2024-11-21T08:49:28.868671Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:28.868675Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:49:28.868678Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:28.868681Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:28.868751Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:49:28.868756Z node 7 :TX_DATASHARD TRACE: Complete execution for [700:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:49:28.868773Z node 7 :TX_DATASHARD DEBUG: Complete [700 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [7:960:2751], exec latency: 652 ms, propose latency: 652 ms 2024-11-21T08:49:28.868785Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T08:49:28.868790Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T08:49:28.868796Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:49:28.868894Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:738:2604], Recipient [7:1074:2852]: {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T08:49:28.868899Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:28.868904Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 2024-11-21T08:49:28.868930Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:28.868934Z node 7 :TX_DATASHARD TRACE: Complete execution for [700:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:49:28.868939Z node 7 :TX_DATASHARD DEBUG: Complete [700 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [7:960:2751], exec latency: 202 ms, propose latency: 202 ms 2024-11-21T08:49:28.868944Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T08:49:28.868947Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:28.868985Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2604]: {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T08:49:28.868988Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:28.868991Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2024-11-21T08:49:28.869372Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:28.869380Z node 7 :TX_DATASHARD TRACE: Complete execution for [800:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:49:28.869387Z node 7 :TX_DATASHARD DEBUG: Complete [800 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [7:999:2786], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:49:28.869396Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 800 txid# 281474976715666 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2024-11-21T08:49:28.869400Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:28.869481Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2604]: {TEvReadSet step# 800 txid# 281474976715666 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2024-11-21T08:49:28.869484Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:28.869488Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715666 2024-11-21T08:49:28.869513Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 7 Status: STATUS_NOT_FOUND { items { int32_value: 1 } items { int32_value: 10 } } { items { int32_value: 2 } items { int32_value: 20 } } 2024-11-21T08:49:28.869760Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:28.869765Z node 7 :TX_DATASHARD TRACE: Complete execution for [900:281474976715668] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:49:28.869770Z node 7 :TX_DATASHARD DEBUG: Complete [900 : 281474976715668] from 72075186224037888 at tablet 72075186224037888 send result to client [7:1033:2813], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:49:28.869776Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 900 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 3} 2024-11-21T08:49:28.869780Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:28.869823Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2604]: {TEvReadSet step# 900 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 3} 2024-11-21T08:49:28.869827Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:28.869830Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715668 { items { int32_value: 2 } items { int32_value: 20 } } 2024-11-21T08:49:28.869909Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 7 Status: STATUS_NOT_FOUND 2024-11-21T08:49:28.870040Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715667 LockNode: 7 Status: STATUS_NOT_FOUND { items { int32_value: 2 } items { int32_value: 20 } } >> test.py::test[epochs-use_and_drop_anonymous--ForceBlocks] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Plan] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Results] [SKIPPED] >> test.py::test[expr-callable-default.txt-Analyze] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> test.py::test[aggregate-group_by_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Plan] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> test.py::test[aggregate-group_by_column-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Results] >> test.py::test[join-star_join_inners--Results] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-Analyze] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> test.py::test[agg_phases_agg_apply-count_all-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Analyze] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> test.py::test[blocks-top_sort_two_mix--Results] [GOOD] >> test.py::test[case-case_many_val--Debug] [SKIPPED] >> test.py::test[order_by-order_by_mul_columns-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-ForceBlocks] >> test.py::test[blocks-boolean_ops--Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Debug] >> test.py::test[case-case_many_val--Plan] [SKIPPED] >> test.py::test[case-case_many_val--Results] >> DataShardSnapshots::UncommittedChangesRenameTable [GOOD] >> test.py::test[case-case_many_val--Results] [SKIPPED] >> test.py::test[coalesce-coalesce_few_opt--Debug] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--Debug] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-disable_blocks_with_spilling--Plan] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--Results] >> test.py::test[aggregate-disable_blocks_with_spilling--Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Analyze] >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> test.py::test[window-win_peephole-default.txt-Debug] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> test.py::test[blocks-pg_from_dates--Debug] [GOOD] >> test.py::test[blocks-pg_from_dates--ForceBlocks] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc [GOOD] >> test.py::test[window-win_func_in_lib--Analyze] [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> test.py::test[blocks-combine_all_min--ForceBlocks] [GOOD] >> test.py::test[expr-callable-default.txt-Analyze] [GOOD] >> test.py::test[blocks-combine_all_min--Plan] [GOOD] >> test.py::test[expr-callable-default.txt-Debug] >> test.py::test[blocks-combine_all_min--Results] >> test.py::test[window-win_func_in_lib--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable [GOOD] Test command err: 2024-11-21T08:49:11.212187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:11.212798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:11.212825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044f5/r3tmp/tmpszoeKx/pdisk_1.dat 2024-11-21T08:49:11.327163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:11.346138Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:11.388422Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:49:11.388622Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:49:11.388657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:11.388672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:11.399198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:11.506876Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:49:11.506907Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:49:11.506947Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:49:11.523416Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:49:11.523655Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:49:11.523671Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:49:11.523733Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:49:11.523769Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:49:11.523783Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:49:11.523852Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:49:11.524282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:11.524901Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:49:11.524935Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:49:11.539276Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:11.539530Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:11.539625Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:49:11.539685Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:11.547269Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:11.547461Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:11.547488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:11.547653Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:11.547663Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:11.547669Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:11.547719Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:11.551201Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:49:11.551261Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:11.551285Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:49:11.551290Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:11.551294Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:49:11.551299Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:11.551423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:11.551430Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:11.551576Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:49:11.551594Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:49:11.551607Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:11.551611Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:11.551617Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:49:11.551625Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:11.551631Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:11.551639Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:49:11.551645Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:11.551648Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:11.551654Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:49:11.551660Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:11.551678Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:49:11.551683Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:11.551704Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:11.551756Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:49:11.551767Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:49:11.551786Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:49:11.551794Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:11.551798Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:49:11.551804Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:49:11.551808Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:11.551853Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:11.551857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:49:11.551863Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:11.551866Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:11.551877Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:49:11.551881Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:11.551884Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:49:11.551887Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:11.551892Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:11.552135Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:49:11.552144Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:49:11.562421Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:11.562452Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:11.562460Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:11.562474Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 08:49:30.763325Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:30.763361Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:924:2742], Recipient [8:924:2742]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:49:30.763366Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:49:30.763382Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:30.763419Z node 8 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T08:49:30.763427Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T08:49:30.763444Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:49:30.763458Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T08:49:30.763463Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:49:30.763468Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:30.763473Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:49:30.763482Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v1000/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v400/18446744073709551615 2024-11-21T08:49:30.763496Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2024-11-21T08:49:30.763501Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T08:49:30.763505Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:30.763508Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T08:49:30.763512Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T08:49:30.763524Z node 8 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715661 not found 2024-11-21T08:49:30.763533Z node 8 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715661 expected 1:0 found 0:0 2024-11-21T08:49:30.763547Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2024-11-21T08:49:30.763555Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T08:49:30.763559Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T08:49:30.763562Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:30.763566Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:30.763573Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2024-11-21T08:49:30.763577Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:30.763580Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:30.763584Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:30.763593Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T08:49:30.763596Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:30.763600Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2024-11-21T08:49:30.763616Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:30.763620Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:30.763627Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2024-11-21T08:49:30.763640Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:30.763729Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=MjgwYTlmZi1iYWRhNzdhMS1iMDY3NTIzMS1kNzlhYWNmNA==, ActorId: [8:802:2649], ActorState: ExecuteState, TraceId: 01jd6yk2439rx8ecqg4mhefzzt, Create QueryResponse for error on request, msg: 2024-11-21T08:49:30.763862Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6yk2439rx8ecqg4mhefzzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MjgwYTlmZi1iYWRhNzdhMS1iMDY3NTIzMS1kNzlhYWNmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:49:30.763915Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [8:952:2649], Recipient [8:924:2742]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 952 RawX2: 34359741017 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2024-11-21T08:49:30.763920Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:30.763938Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:924:2742], Recipient [8:924:2742]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:49:30.763942Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:49:30.763951Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:30.763971Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T08:49:30.763981Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:49:30.763987Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:49:30.763991Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:49:30.763994Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:30.763998Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:49:30.764004Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v1000/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2024-11-21T08:49:30.764010Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T08:49:30.764014Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:49:30.764017Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:30.764021Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T08:49:30.764024Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T08:49:30.764033Z node 8 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193454 2024-11-21T08:49:30.764042Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2024-11-21T08:49:30.764053Z node 8 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:49:30.764061Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:49:30.764065Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T08:49:30.764068Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:30.764071Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:30.764076Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:49:30.764086Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2024-11-21T08:49:30.764090Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:30.764093Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:30.764096Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:30.764102Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:49:30.764105Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:30.764111Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T08:49:30.764118Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:30.764122Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:30.764127Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:31.152453Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [8:969:2775], Recipient [8:924:2742]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:31.152480Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:31.152491Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:968:2774], serverId# [8:969:2775], sessionId# [0:0:0] 2024-11-21T08:49:31.152515Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [8:557:2484], Recipient [8:924:2742]: NKikimr::TEvDataShard::TEvGetOpenTxs >> test.py::test[expr-variant_tuple_comp-default.txt-Debug] [GOOD] >> test.py::test[tpch-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q20-default.txt-Plan] [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> test.py::test[select-bin_ops_long_concat-default.txt-Analyze] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-ForceBlocks] >> test.py::test[tpch-q20-default.txt-Results] >> test.py::test[select-bin_ops_long_concat-default.txt-Debug] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> test.py::test[window-empty/aggregations--Results] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Debug] |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |86.3%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Plan] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] >> test.py::test[join-star_join_inners_vk_sorted-off-Analyze] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-Debug] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc [GOOD] |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |86.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> test.py::test[aggr_factory-corellation-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Debug] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> test.py::test[aggregate-group_by_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-Analyze] >> test.py::test[order_by-order_by_mul_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Plan] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[produce-process_with_python_stream--Analyze] [SKIPPED] >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Plan] [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> test.py::test[window-win_peephole-default.txt-Debug] [GOOD] >> test.py::test[window-win_peephole-default.txt-Plan] [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> test.py::test[window-win_peephole-default.txt-Results] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2024-11-21T08:49:32.446188Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:32.446897Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:32.446955Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T08:49:32.446968Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:32.446972Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T08:49:32.446977Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:32.446985Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.446991Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T08:49:32.446995Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T08:49:32.447081Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.447093Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2024-11-21T08:49:32.447108Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:49:32.449126Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T08:49:32.449764Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T08:49:32.449801Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.450055Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T08:49:32.450076Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitConfigStep 2024-11-21T08:49:32.450124Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T08:49:32.450169Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2024-11-21T08:49:32.450740Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Completed. 2024-11-21T08:49:32.450746Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2024-11-21T08:49:32.450752Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:32.450823Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:49:32.450879Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.450886Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2024-11-21T08:49:32.457763Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:32.458342Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:32.458389Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T08:49:32.458395Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:32.458398Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2024-11-21T08:49:32.458402Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:32.458408Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.458413Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T08:49:32.458417Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T08:49:32.458495Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.458501Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:408:2363], now have 1 active actors on pipe 2024-11-21T08:49:32.458513Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:49:32.458549Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:32.459037Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:32.459057Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.459195Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:32.459210Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T08:49:32.459253Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T08:49:32.459279Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:417:2370] 2024-11-21T08:49:32.459814Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T08:49:32.459820Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:417:2370] 2024-11-21T08:49:32.459827Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:32.459885Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T08:49:32.459935Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.459940Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:420:2372], now have 1 active actors on pipe 2024-11-21T08:49:32.460290Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.460300Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:430:2377], now have 1 active actors on pipe 2024-11-21T08:49:32.460344Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:49:32.460348Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:430:2377] destroyed 2024-11-21T08:49:32.460353Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.460356Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:432:2378], now have 1 active actors on pipe 2024-11-21T08:49:32.460422Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:49:32.460426Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:432:2378] destroyed 2024-11-21T08:49:32.852155Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:32.852831Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:32.852882Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T08:49:32.852888Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:32.852892Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T08:49:32.852897Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:32.852903Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.852909Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T08:49:32.852913Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T08:49:32.853019Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.853027Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:260:2252], now have 1 active actors on pipe 2024-11-21T08:49:32.853041Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:49:32.853084Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T08:49:32.853636Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWa ... d from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T08:49:32.885443Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T08:49:32.885467Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.885612Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 5 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T08:49:32.885629Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Step TInitConfigStep 2024-11-21T08:49:32.885682Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Step TInitInternalFieldsStep 2024-11-21T08:49:32.885717Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:479:2417] 2024-11-21T08:49:32.886155Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Completed. 2024-11-21T08:49:32.886163Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:479:2417] 2024-11-21T08:49:32.886169Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:32.886244Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2024-11-21T08:49:32.886320Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.886326Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:482:2419], now have 1 active actors on pipe 2024-11-21T08:49:32.893094Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:32.893778Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:32.893834Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T08:49:32.893840Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:32.893845Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2024-11-21T08:49:32.893849Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:32.893855Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.893860Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T08:49:32.893864Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T08:49:32.893962Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.893968Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:533:2458], now have 1 active actors on pipe 2024-11-21T08:49:32.893982Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:49:32.894015Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:32.894611Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:32.894631Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.894735Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 6 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:32.894755Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T08:49:32.894803Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T08:49:32.894835Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:542:2465] 2024-11-21T08:49:32.895274Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T08:49:32.895285Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:542:2465] 2024-11-21T08:49:32.895291Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:32.895356Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T08:49:32.895416Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.895421Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:545:2467], now have 1 active actors on pipe 2024-11-21T08:49:32.895836Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.895846Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:555:2472], now have 1 active actors on pipe 2024-11-21T08:49:32.895865Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.895869Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:556:2473], now have 1 active actors on pipe 2024-11-21T08:49:32.895880Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.895883Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:557:2473], now have 1 active actors on pipe 2024-11-21T08:49:32.906164Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:32.906184Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:562:2477], now have 1 active actors on pipe 2024-11-21T08:49:32.910470Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:32.910991Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:32.911048Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T08:49:32.911054Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:32.911080Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:32.911147Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:32.911152Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T08:49:32.911167Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T08:49:32.911206Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T08:49:32.911229Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:621:2524] 2024-11-21T08:49:32.911934Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDiskStatusStep 2024-11-21T08:49:32.912113Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitMetaStep 2024-11-21T08:49:32.912140Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInfoRangeStep 2024-11-21T08:49:32.912176Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataRangeStep 2024-11-21T08:49:32.912199Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataStep 2024-11-21T08:49:32.912220Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T08:49:32.912227Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:621:2524] 2024-11-21T08:49:32.912236Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:32.912277Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T08:49:32.912309Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T08:49:32.916291Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:49:32.916307Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:556:2473] destroyed 2024-11-21T08:49:32.916317Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:49:32.916320Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:555:2472] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Debug] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TDatabaseResolverTests::DataStreams_Serverless >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TDatabaseResolverTests::DataStreams_Dedicated >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> test.py::test[window-win_func_in_lib--Debug] [GOOD] >> test.py::test[window-win_func_in_lib--ForceBlocks] >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] [GOOD] >> test.py::test[pg-uuid_from_pg-default.txt-Debug] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> test.py::test[coalesce-coalesce_few_opt--Debug] [GOOD] >> test.py::test[blocks-pg_from_dates--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_from_dates--Plan] [GOOD] >> test.py::test[blocks-pg_from_dates--Results] >> test.py::test[coalesce-coalesce_few_opt--Plan] >> test.py::test[select-bin_ops_long_concat-default.txt-Debug] [GOOD] >> test.py::test[select-bin_ops_long_concat-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_min--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2024-11-21T08:49:33.644847Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:33.652674Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:33.652783Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T08:49:33.652799Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:33.652804Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T08:49:33.652809Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:33.652816Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:33.652824Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T08:49:33.652829Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T08:49:33.652969Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:33.652983Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2024-11-21T08:49:33.653001Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:49:33.655063Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T08:49:33.665232Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T08:49:33.665296Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:33.665552Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T08:49:33.665581Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitConfigStep 2024-11-21T08:49:33.665661Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T08:49:33.665720Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2024-11-21T08:49:33.666315Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Completed. 2024-11-21T08:49:33.666321Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2024-11-21T08:49:33.666329Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:33.666398Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:49:33.666476Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:33.666485Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2024-11-21T08:49:33.713750Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:33.714389Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:33.714442Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T08:49:33.714448Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:33.714453Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2024-11-21T08:49:33.714458Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:33.714465Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:33.714471Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T08:49:33.714475Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T08:49:33.714581Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:33.714588Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:408:2363], now have 1 active actors on pipe 2024-11-21T08:49:33.714606Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:49:33.714655Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:33.720708Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:33.720738Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:33.720886Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T08:49:33.720904Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T08:49:33.720957Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T08:49:33.720988Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:417:2370] 2024-11-21T08:49:33.721503Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T08:49:33.721509Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:417:2370] 2024-11-21T08:49:33.721516Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:33.721587Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T08:49:33.721665Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:33.721673Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:420:2372], now have 1 active actors on pipe 2024-11-21T08:49:33.722060Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:33.722067Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:428:2377], now have 1 active actors on pipe 2024-11-21T08:49:33.722097Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvStatus 2024-11-21T08:49:33.722110Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:33.722113Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:430:2378], now have 1 active actors on pipe 2024-11-21T08:49:33.722141Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T08:49:33.722158Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvStatus 2024-11-21T08:49:33.722178Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T08:49:33.722231Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:49:33.722235Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:428:2377] destroyed 2024-11-21T08:49:33.722289Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:49:33.722292Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:430:2378] destroyed 2024-11-21T08:49:34.035452Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:34.036060Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:34.036112Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T08:49:34.036119Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:34.036123Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T08:49:34.036128Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:34.036135Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:34.036141Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T08:49:34.036145Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T08:49:34.036261Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:34.036268Z node 3 ... PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:542:2465] 2024-11-21T08:49:34.058127Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:34.058184Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T08:49:34.058240Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:34.058245Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:545:2467], now have 1 active actors on pipe 2024-11-21T08:49:34.058597Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:34.058605Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:553:2472], now have 1 active actors on pipe 2024-11-21T08:49:34.058626Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:34.058630Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:554:2473], now have 1 active actors on pipe 2024-11-21T08:49:34.058638Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvStatus 2024-11-21T08:49:34.058676Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T08:49:34.058690Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvStatus 2024-11-21T08:49:34.058711Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T08:49:34.058718Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:34.058721Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:555:2473], now have 1 active actors on pipe 2024-11-21T08:49:34.058740Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvStatus 2024-11-21T08:49:34.058756Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T08:49:34.072508Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:34.072534Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:563:2480], now have 1 active actors on pipe 2024-11-21T08:49:34.109737Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:49:34.110394Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:49:34.110442Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T08:49:34.110448Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:49:34.110493Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:49:34.110580Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:49:34.110584Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T08:49:34.110601Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T08:49:34.110641Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T08:49:34.110668Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:622:2527] 2024-11-21T08:49:34.111275Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDiskStatusStep 2024-11-21T08:49:34.111460Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitMetaStep 2024-11-21T08:49:34.111488Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInfoRangeStep 2024-11-21T08:49:34.111521Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataRangeStep 2024-11-21T08:49:34.111543Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataStep 2024-11-21T08:49:34.111547Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T08:49:34.111553Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:622:2527] 2024-11-21T08:49:34.111560Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:49:34.111600Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T08:49:34.111642Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T08:49:34.111731Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:49:34.111737Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:554:2473] destroyed 2024-11-21T08:49:34.111746Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:49:34.111750Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:553:2472] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 82 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 82 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 97 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 97 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 40 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 40 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> test.py::test[coalesce-coalesce_few_opt--Plan] [GOOD] >> test.py::test[coalesce-coalesce_few_opt--Results] >> test.py::test[expr-callable-default.txt-Debug] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Debug] [GOOD] >> test.py::test[expr-callable-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_max_filter_opt--Plan] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Results] |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2024-11-21T08:49:34.706420Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. >> test.py::test[aggregate-group_by_hop_static-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-Debug] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] [GOOD] >> test.py::test[order_by-ordered_fill--Analyze] >> test.py::test[expr-variant_tuple_comp-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-Plan] >> DstCreator::KeyColumnsSizeMismatch >> DstCreator::ExistingDst >> TSchemeShardAuditSettings::AlterSubdomain >> test.py::test[expr-variant_tuple_comp-default.txt-Plan] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2024-11-21T08:49:34.428815Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } |86.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] Test command err: 2024-11-21T08:49:10.734212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:10.734770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:10.734803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044fe/r3tmp/tmpaUc36w/pdisk_1.dat 2024-11-21T08:49:10.841206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:10.858785Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:10.901589Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:49:10.901921Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:49:10.901973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:10.901994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:10.912628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:11.016372Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:49:11.016398Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:49:11.016440Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:49:11.028793Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:49:11.029090Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:49:11.029113Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:49:11.029180Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:49:11.029224Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:49:11.029241Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:49:11.029328Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:49:11.029817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:11.030137Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:49:11.030151Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:49:11.047130Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:11.047354Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:11.047447Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:49:11.047505Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:11.055058Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:11.055237Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:11.055268Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:11.055432Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:11.055441Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:11.055448Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:11.055498Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:11.058950Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:49:11.059022Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:11.059046Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:49:11.059052Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:11.059058Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:49:11.059063Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:11.059191Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:11.059199Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:11.059347Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:49:11.059369Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:49:11.059383Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:11.059388Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:11.059396Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:49:11.059404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:11.059412Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:11.059421Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:49:11.059426Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:11.059429Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:11.059434Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:49:11.059440Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:11.059460Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:49:11.059464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:11.059487Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:11.059537Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:49:11.059549Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:49:11.059568Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:49:11.059576Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:11.059581Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:49:11.059587Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:49:11.059591Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:11.059637Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:11.059642Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:49:11.059645Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:11.059649Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:11.059663Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:49:11.059666Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:11.059670Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:49:11.059673Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:11.059679Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:11.059844Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:11.059852Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:11.059856Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:11.059863Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:49:11.059874Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:11.060575Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:49:11.060588Z ... 21T08:49:31.902513Z node 8 :KQP_EXECUTER DEBUG: ActorId: [8:1945:3220] TxId: 281474976715669. Ctx: { TraceId: 01jd6yk3729a8mp3585sj2veep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=OWNkOTkzOTQtODVhMzdjYTgtNjhmNGMzNGItMzIzN2Y3MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000529s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 2024-11-21T08:49:31.902558Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWNkOTkzOTQtODVhMzdjYTgtNjhmNGMzNGItMzIzN2Y3MzE=, ActorId: [8:1935:3220], ActorState: ExecuteState, TraceId: 01jd6yk3729a8mp3585sj2veep, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T08:49:31.902608Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OWNkOTkzOTQtODVhMzdjYTgtNjhmNGMzNGItMzIzN2Y3MzE=, ActorId: [8:1935:3220], ActorState: ExecuteState, TraceId: 01jd6yk3729a8mp3585sj2veep, txInfo Status: Committed Kind: ReadOnly TotalDuration: 7.754 ServerDuration: 7.723 QueriesCount: 2 2024-11-21T08:49:31.902686Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWNkOTkzOTQtODVhMzdjYTgtNjhmNGMzNGItMzIzN2Y3MzE=, ActorId: [8:1935:3220], ActorState: ExecuteState, TraceId: 01jd6yk3729a8mp3585sj2veep, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:49:31.902775Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OWNkOTkzOTQtODVhMzdjYTgtNjhmNGMzNGItMzIzN2Y3MzE=, ActorId: [8:1935:3220], ActorState: ExecuteState, TraceId: 01jd6yk3729a8mp3585sj2veep, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:49:31.902782Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWNkOTkzOTQtODVhMzdjYTgtNjhmNGMzNGItMzIzN2Y3MzE=, ActorId: [8:1935:3220], ActorState: ExecuteState, TraceId: 01jd6yk3729a8mp3585sj2veep, EndCleanup, isFinal: 0 2024-11-21T08:49:31.902796Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OWNkOTkzOTQtODVhMzdjYTgtNjhmNGMzNGItMzIzN2Y3MzE=, ActorId: [8:1935:3220], ActorState: ExecuteState, TraceId: 01jd6yk3729a8mp3585sj2veep, Sent query response back to proxy, proxyRequestId: 12, proxyId: [8:162:2148] { items { uint32_value: 1 } items { uint32_value: 10 } } 2024-11-21T08:49:32.027717Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [8:2002:3241], Recipient [8:2004:3242]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:32.028698Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [8:2002:3241], Recipient [8:2004:3242]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:32.028748Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [8:2002:3241], Recipient [8:2004:3242]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:32.030233Z node 8 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [8:2004:3242] 2024-11-21T08:49:32.030294Z node 8 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:32.030685Z node 8 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:32.030818Z node 8 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:32.030969Z node 8 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:32.030979Z node 8 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:32.030985Z node 8 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:32.031028Z node 8 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:32.031037Z node 8 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2024-11-21T08:49:32.031059Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 0 siblings to be activated: wait to activation from: 2024-11-21T08:49:32.031066Z node 8 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:49:32.031084Z node 8 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [8:2036:3267] 2024-11-21T08:49:32.031089Z node 8 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:32.031093Z node 8 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2024-11-21T08:49:32.031097Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:32.031102Z node 8 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2024-11-21T08:49:32.031185Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvRegisterTablet TabletId# 72075186224037888 ProcessingParams { Version: 1 PlanResolution: 100 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 }} 2024-11-21T08:49:32.031205Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72075186224037888 2024-11-21T08:49:32.031214Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] SEND to Sender# [8:2004:3242] {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 1000} 2024-11-21T08:49:32.031527Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [8:2004:3242], Recipient [8:2004:3242]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:32.031534Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:32.031556Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2024-11-21T08:49:32.031636Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 276168711, Sender [8:2037:3268], Recipient [8:2004:3242]: NKikimr::NDataShard::TEvChangeExchange::TEvSplitAck 2024-11-21T08:49:32.031719Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [8:25:2072], Recipient [8:2004:3242]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 1000} 2024-11-21T08:49:32.031727Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T08:49:32.031732Z node 8 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2024-11-21T08:49:32.031738Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:32.036723Z node 8 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:32.036740Z node 8 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2024-11-21T08:49:32.036748Z node 8 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2024-11-21T08:49:32.036864Z node 8 :TX_DATASHARD NOTICE: TTxChangeExchangeSplitAck Execute, at tablet# 72075186224037888 2024-11-21T08:49:32.036870Z node 8 :TX_DATASHARD NOTICE: TTxChangeExchangeSplitAck Complete, at tablet# 72075186224037888 2024-11-21T08:49:32.037926Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 1000 NextAcquireStep: 1000 2024-11-21T08:49:32.037974Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [8:25:2072], Recipient [8:2004:3242]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 1000 NextReadStep# 1000 ReadStep# 1000 } 2024-11-21T08:49:32.037981Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T08:49:32.037987Z node 8 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1000 next step 1000 2024-11-21T08:49:32.128485Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 1100 2024-11-21T08:49:32.128576Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 1100 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 8 bucket 0 2024-11-21T08:49:32.128702Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1100} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 9 bucket 0 ... blocked step 1100 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 9 bucket 1 ... blocked step 1100 at node 9 bucket 1 2024-11-21T08:49:32.980389Z node 8 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 3000 in 1.000000s at 2.950000s 2024-11-21T08:49:32.981192Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 2000 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2024-11-21T08:49:32.981330Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 2000 ... observed step 2000 at node 8 bucket 0 2024-11-21T08:49:32.981374Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 9 bucket 0 ... blocked step 2000 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 9 bucket 1 ... blocked step 2000 at node 9 bucket 1 2024-11-21T08:49:33.561623Z node 8 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 1.000000s at 3.950000s 2024-11-21T08:49:33.568822Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 3000 2024-11-21T08:49:33.568910Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 3000 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 8 bucket 0 2024-11-21T08:49:33.569029Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 9 bucket 0 ... blocked step 3000 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 9 bucket 1 ... blocked step 3000 at node 9 bucket 1 ... upsert finished before unblocking node 2 >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> test.py::test[aggr_factory-mode-default.txt-Results] [GOOD] >> test.py::test[window-win_peephole-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Debug] >> test.py::test[window-yql-14179-default.txt-Debug] >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> DstCreator::KeyColumnNameMismatch >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-ForceBlocks] >> TVPatchTests::PatchPartGetError >> test.py::test[join-star_join_inners_vk_sorted-off-Debug] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-ForceBlocks] >> TVPatchTests::PatchPartGetError [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-ForceBlocks] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-off-Plan] >> test.py::test[expr-callable-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-callable-default.txt-Plan] [GOOD] >> test.py::test[expr-callable-default.txt-Results] >> test.py::test[join-star_join_inners_vk_sorted-off-Plan] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-Results] [GOOD] >> test.py::test[join-strict_keys-off-Analyze] >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static-default.txt-Plan] >> test.py::test[pg-uuid_from_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-uuid_from_pg-default.txt-Plan] >> test.py::test[blocks-pg_from_dates--Results] [GOOD] >> test.py::test[join-strict_keys-off-Analyze] [SKIPPED] >> test.py::test[join-strict_keys-off-Debug] [SKIPPED] >> test.py::test[join-strict_keys-off-ForceBlocks] >> DstCreator::EmptyReplicationConfig [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Debug] >> test.py::test[aggregate-group_by_hop_static-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Analyze] >> test.py::test[pg-uuid_from_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-uuid_from_pg-default.txt-Results] >> TCmsTest::RequestRestartServicesRejectSecond >> test.py::test[join-strict_keys-off-ForceBlocks] [SKIPPED] >> test.py::test[column_order-union_all_positional-default.txt-Analyze] >> test.py::test[join-strict_keys-off-Plan] >> DstCreator::KeyColumnNameMismatch [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-ForceBlocks] >> test.py::test[join-strict_keys-off-Plan] [SKIPPED] >> test.py::test[join-strict_keys-off-Results] >> test.py::test[blocks-combine_hashed_count--Analyze] [GOOD] >> test.py::test[blocks-combine_hashed_count--Debug] >> TCmsTest::WalleRebootDownNode >> test.py::test[window-win_func_in_lib--ForceBlocks] [GOOD] >> test.py::test[window-win_func_in_lib--Plan] >> test.py::test[window-win_func_in_lib--Plan] [GOOD] >> test.py::test[window-win_func_in_lib--Results] >> test.py::test[select-bin_ops_long_concat-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-bin_ops_long_concat-default.txt-Plan] >> test.py::test[coalesce-coalesce_few_opt--Results] [GOOD] >> test.py::test[column_group-hint_anon-perusage-Debug] [SKIPPED] >> test.py::test[blocks-combine_all_max_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_sum--Debug] >> test.py::test[order_by-ordered_fill--Analyze] [GOOD] >> test.py::test[order_by-ordered_fill--Debug] >> test.py::test[select-bin_ops_long_concat-default.txt-Plan] [GOOD] >> test.py::test[select-bin_ops_long_concat-default.txt-Results] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[column_group-hint_anon-perusage-Plan] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Debug] >> test.py::test[expr-variant_tuple_comp-default.txt-Results] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2024-11-21T08:49:36.757505Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T08:49:36.757762Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2024-11-21T08:49:36.757789Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T08:49:36.757845Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2024-11-21T08:49:36.757855Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T08:49:36.757888Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2024-11-21T08:49:36.757899Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[column_group-hint_anon_groups-disable-Debug] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Plan] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Debug] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Results] [SKIPPED] >> test.py::test[column_group-length-perusage-Debug] [SKIPPED] >> test.py::test[column_group-length-perusage-Plan] [SKIPPED] >> test.py::test[column_group-length-perusage-Results] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Debug] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Plan] >> test.py::test[column_group-min_group-default.txt-Plan] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:49:35.962902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:49:35.962925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:49:35.962930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:49:35.962935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:49:35.962946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:49:35.962949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:49:35.962957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:49:35.963024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:35.979941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:49:35.979962Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:35.990524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:35.991288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:49:35.991315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:49:35.992285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:49:35.994874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:49:35.994962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:35.995020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:49:35.995680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:35.995898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:35.995905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:35.995939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:49:35.995945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:49:35.995950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:49:35.995960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:49:35.997132Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:49:36.030611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:49:36.030680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:36.030730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:49:36.030781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:49:36.030789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:36.040455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:36.040486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:49:36.040529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:36.040539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:49:36.040543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:49:36.040548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:49:36.040987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:36.040994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:49:36.040998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:49:36.041235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:36.041241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:36.041246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:49:36.041251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:49:36.041775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:49:36.042054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:49:36.042094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:49:36.042232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:36.042249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:49:36.042254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:49:36.042295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:49:36.042301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:49:36.042323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:49:36.042333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:49:36.048429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:36.048441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:49:36.048479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:36.048484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:49:36.048552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:49:36.048559Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:49:36.048573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:49:36.048577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:49:36.048582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:49:36.048587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:49:36.048592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:49:36.048596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:49:36.048608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:49:36.048614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:49:36.048618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:49:36.048892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:49:36.048902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:49:36.048906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:49:36.048910Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:49:36.048914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:49:36.048925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ags: 2 } ExecLevel: 0 TxId: 175 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:49:36.925218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T08:49:36.925235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T08:49:36.925240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T08:49:36.925246Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2024-11-21T08:49:36.925252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:49:36.925448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T08:49:36.925486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T08:49:36.925490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T08:49:36.925495Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2024-11-21T08:49:36.925500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T08:49:36.925519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T08:49:36.928135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2024-11-21T08:49:36.928183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2024-11-21T08:49:36.932635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:49:36.932686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:49:36.932715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2024-11-21T08:49:36.932728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:49:36.932732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T08:49:36.932780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2024-11-21T08:49:36.932824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:49:36.932838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T08:49:36.933011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T08:49:36.936797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2024-11-21T08:49:36.939907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:49:36.939926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:49:36.939968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T08:49:36.940001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:49:36.940006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2024-11-21T08:49:36.940012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2024-11-21T08:49:36.940100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T08:49:36.940110Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2024-11-21T08:49:36.940123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2024-11-21T08:49:36.940130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T08:49:36.940137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2024-11-21T08:49:36.940143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T08:49:36.940148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2024-11-21T08:49:36.940152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2024-11-21T08:49:36.940176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T08:49:36.940182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2024-11-21T08:49:36.940186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2024-11-21T08:49:36.940189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2024-11-21T08:49:36.940376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T08:49:36.940389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T08:49:36.940394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2024-11-21T08:49:36.940399Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2024-11-21T08:49:36.940403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:49:36.940546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T08:49:36.940556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T08:49:36.940560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2024-11-21T08:49:36.940563Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2024-11-21T08:49:36.940567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T08:49:36.940579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2024-11-21T08:49:36.940625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:49:36.940630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T08:49:36.940649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2024-11-21T08:49:36.940675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:49:36.940681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T08:49:36.940692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:49:36.951721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T08:49:36.951857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T08:49:36.951874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:49:36.956862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2024-11-21T08:49:36.957138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2024-11-21T08:49:36.957147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2024-11-21T08:49:36.957344Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2024-11-21T08:49:36.957400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2024-11-21T08:49:36.957407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2459:4451] TestWaitNotification: OK eventTxId 175 >> test.py::test[tpch-q20-default.txt-Results] [GOOD] >> test.py::test[tpch-q9-default.txt-Analyze] >> TCmsTest::ActionIssuePartialPermissions >> test.py::test[window-yql-14179-default.txt-Debug] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2024-11-21T08:49:35.492300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652051295082508:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:35.587810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002374/r3tmp/tmpyajIgG/pdisk_1.dat 2024-11-21T08:49:35.660367Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:35.688544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:35.688566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:35.689680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22434 TServer::EnableGrpc on GrpcPort 8137, node 1 2024-11-21T08:49:35.768423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:35.768436Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:35.768439Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:35.768470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:35.892600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:49:35.900968Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:49:35.902001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:35.941759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732178975940 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976010 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732178975940 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976010 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T08:49:35.969322Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:49:35.969346Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:49:35.969348Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T08:49:35.972244Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T08:49:36.125804Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178975968, tx_id: 281474976715658 } } } 2024-11-21T08:49:36.125877Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T08:49:36.126193Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479} 2024-11-21T08:49:36.126604Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976010 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainD ... 40405Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:36.540408Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:36.540443Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6584 2024-11-21T08:49:36.580552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:36.580573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:36.584480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:36.628604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:36.629984Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:49:36.636922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:36.657986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732178976675 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976717 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732178976675 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976717 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T08:49:36.675882Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:49:36.675904Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:49:36.675907Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T08:49:36.676086Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T08:49:36.929538Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178976703, tx_id: 281474976715658 } } } 2024-11-21T08:49:36.929600Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T08:49:36.929898Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479} 2024-11-21T08:49:36.930130Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976717 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T08:49:36.930152Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value >> test.py::test[window-yql-14179-default.txt-Plan] [GOOD] >> test.py::test[window-yql-14179-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2024-11-21T08:49:35.568267Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652052331554799:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0022c8/r3tmp/tmpcSDXAu/pdisk_1.dat 2024-11-21T08:49:35.682626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:49:35.743174Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8877 TServer::EnableGrpc on GrpcPort 20489, node 1 2024-11-21T08:49:35.832392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:35.832406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:35.832408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:35.832445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8877 2024-11-21T08:49:35.880263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:35.880292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:35.880904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:35.982340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:35.984851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:49:35.985581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:36.020032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732178976031 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976080 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732178976031 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976080 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T08:49:36.039646Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:49:36.039668Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:49:36.039670Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T08:49:36.039844Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T08:49:36.182111Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178976059, tx_id: 281474976715658 } } } 2024-11-21T08:49:36.182173Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T08:49:36.182469Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479} 2024-11-21T08:49:36.182860Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976080 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72 ... n GrpcPort 16007, node 2 2024-11-21T08:49:36.516863Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:36.516872Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:36.516874Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:36.516905Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7448 2024-11-21T08:49:36.552434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:36.552475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:36.554346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:36.597172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:36.598477Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:49:36.599053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:36.616743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732178976647 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976675 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732178976647 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976675 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T08:49:36.636871Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:49:36.636890Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:49:36.636892Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T08:49:36.644255Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T08:49:36.860334Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178976661, tx_id: 281474976715658 } } } 2024-11-21T08:49:36.860412Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T08:49:36.860732Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479} 2024-11-21T08:49:36.860957Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732178976675 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T08:49:36.860982Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config >> test.py::test[column_order-union_all_positional-default.txt-Analyze] [GOOD] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Results] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-ForceBlocks] [GOOD] >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] >> test.py::test[expr-callable-default.txt-Results] [GOOD] >> test.py::test[expr-empty_struct_tuple_types-default.txt-Analyze] >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> test.py::test[select-bin_ops_long_concat-default.txt-Results] [GOOD] >> test.py::test[select-discard-default.txt-Analyze] >> test.py::test[aggregate-group_by_mul_gs_ru--Analyze] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Debug] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest >> test.py::test[pg-uuid_from_pg-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_locks-default.txt-Debug] >> test.py::test[aggregate-aggrs_no_grouping--Results] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt-Debug] >> test.py::test[join-strict_keys-off-Results] [GOOD] >> test.py::test[join-trivial_view--Analyze] >> test.py::test[window-win_func_in_lib--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Analyze] >> test.py::test[expr-yql-10180-default.txt-Analyze] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Debug] >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] >> test.py::test[window-yql-14179-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_count--ForceBlocks] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> DataShardOutOfOrder::TestSnapshotReadPriority >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup-EvWrite >> DataShardTxOrder::ReadWriteReorder >> test.py::test[aggr_factory-corellation-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Results] >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState >> test.py::test[order_by-ordered_fill--Debug] [GOOD] >> test.py::test[order_by-ordered_fill--ForceBlocks] >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> test.py::test[expr-empty_struct_tuple_types-default.txt-Analyze] [GOOD] >> test.py::test[expr-empty_struct_tuple_types-default.txt-Debug] >> DataShardTxOrder::ReadWriteReorder [GOOD] |86.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |86.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup-EvWrite >> test.py::test[pg_catalog-pg_locks-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_locks-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_locks-default.txt-Results] |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[column_order-union_all_positional-default.txt-Analyze] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Debug] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Plan] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Results] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2024-11-21T08:49:41.247307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:49:41.247327Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:41.247341Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:41.249797Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:41.249896Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:49:41.249955Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:41.250825Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:41.259126Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:41.259241Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:41.259378Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:49:41.259391Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:49:41.259398Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:49:41.259433Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:41.262854Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:49:41.262897Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:41.262934Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:49:41.262940Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:49:41.262945Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:49:41.262949Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:41.263019Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.263025Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.263044Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:49:41.263059Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:49:41.263106Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:41.263112Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:41.263119Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:49:41.263124Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:49:41.263128Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:49:41.263133Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:49:41.263138Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:41.270832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:41.270852Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:41.270861Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:49:41.271278Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:49:41.271287Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:41.271306Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:49:41.271333Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:49:41.271342Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:49:41.271349Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:49:41.271356Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:41.271360Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:49:41.271365Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:49:41.271370Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:41.271427Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:41.271431Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:49:41.271434Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:49:41.271438Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:41.271446Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:49:41.271450Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:49:41.271454Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:49:41.271457Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:41.271461Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:41.296821Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:49:41.296846Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:41.296853Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:41.296863Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:49:41.296876Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:41.296980Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:212:2212], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:41.296986Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:41.296994Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:212:2212], sessionId# [0:0:0] 2024-11-21T08:49:41.297014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:49:41.297019Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:49:41.297056Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:41.297063Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:41.297068Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:49:41.297072Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:49:41.297769Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:49:41.297778Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:41.297815Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.297820Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.297826Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:41.297833Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:49:41.297837Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:49:41.297844Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:49:41.297848Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:49:41.297854Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:41.297859Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:49:41.297863Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:49:41.297867Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:49:41.297904Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:49:41.297908Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:41.297912Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:49:41.297916Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:49:41.297921Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:49:41.297929Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:41.297933Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:49:41.297936Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:41.297940Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:49:41.297950Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:49:41.297954Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:49:41.297957Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:49:41.297962Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:41.297965Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:41.297969Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeSna ... WaitInRS 2024-11-21T08:49:41.957710Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2024-11-21T08:49:41.957713Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T08:49:41.957716Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit ExecuteDataTx 2024-11-21T08:49:41.957720Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit ExecuteDataTx 2024-11-21T08:49:41.957776Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437185 with status COMPLETE 2024-11-21T08:49:41.957784Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:49:41.957793Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2024-11-21T08:49:41.957796Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2024-11-21T08:49:41.957799Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2024-11-21T08:49:41.957802Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2024-11-21T08:49:41.957854Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is DelayComplete 2024-11-21T08:49:41.957858Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2024-11-21T08:49:41.957861Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2024-11-21T08:49:41.957865Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2024-11-21T08:49:41.957870Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2024-11-21T08:49:41.957873Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2024-11-21T08:49:41.957876Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437185 has finished 2024-11-21T08:49:41.957880Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:41.957883Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T08:49:41.957886Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T08:49:41.957889Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T08:49:41.957914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:226:2221], Recipient [1:226:2221]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.957917Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.957922Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:41.957925Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:49:41.957928Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:49:41.957932Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2024-11-21T08:49:41.957935Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2024-11-21T08:49:41.957939Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.957942Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2024-11-21T08:49:41.957945Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:49:41.957950Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2024-11-21T08:49:41.958038Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2024-11-21T08:49:41.958043Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958046Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:49:41.958050Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T08:49:41.958053Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T08:49:41.958057Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958060Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T08:49:41.958063Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:41.958066Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:49:41.958074Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically complete end at 9437184 2024-11-21T08:49:41.958077Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically incomplete end at 9437184 2024-11-21T08:49:41.958080Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:12] at 9437184 2024-11-21T08:49:41.958084Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958087Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:41.958091Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T08:49:41.958094Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2024-11-21T08:49:41.958100Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958103Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T08:49:41.958106Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T08:49:41.958110Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2024-11-21T08:49:41.958114Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958117Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T08:49:41.958120Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T08:49:41.958123Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2024-11-21T08:49:41.958127Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958131Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T08:49:41.958134Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T08:49:41.958137Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2024-11-21T08:49:41.958140Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958143Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T08:49:41.958146Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:49:41.958151Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2024-11-21T08:49:41.958198Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2024-11-21T08:49:41.958204Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:49:41.958210Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958213Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:49:41.958216Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2024-11-21T08:49:41.958220Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2024-11-21T08:49:41.958247Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is DelayComplete 2024-11-21T08:49:41.958250Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2024-11-21T08:49:41.958254Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2024-11-21T08:49:41.958257Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2024-11-21T08:49:41.958261Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T08:49:41.958264Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2024-11-21T08:49:41.958267Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437184 has finished 2024-11-21T08:49:41.958271Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:41.958274Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:49:41.958277Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:49:41.958280Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:49:41.972640Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2024-11-21T08:49:41.972663Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2024-11-21T08:49:41.972675Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T08:49:41.972683Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2024-11-21T08:49:41.972701Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:41.972713Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2024-11-21T08:49:41.972903Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2024-11-21T08:49:41.972909Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2024-11-21T08:49:41.972917Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:41.972921Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2024-11-21T08:49:41.972929Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:49:41.972934Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> test.py::test[window-win_func_lead_lag_worm_with_part--Analyze] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Debug] >> test.py::test[expr-yql-10180-default.txt-Debug] [GOOD] >> test.py::test[expr-yql-10180-default.txt-ForceBlocks] >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest >> test.py::test[tpch-q9-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q9-default.txt-Debug] >> test.py::test[blocks-combine_all_sum--Debug] [GOOD] >> test.py::test[blocks-combine_all_sum--Plan] [GOOD] >> test.py::test[blocks-combine_all_sum--Results] >> test.py::test[blocks-combine_hashed_count--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_count--Plan] [GOOD] >> test.py::test[blocks-combine_hashed_count--Results] >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3dc [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> test.py::test[join-trivial_view--Analyze] [GOOD] >> test.py::test[join-trivial_view--Debug] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--Analyze] >> TargetDiscoverer::IndexedTable >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Debug] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--ForceBlocks] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[solomon-Basic-default.txt-Debug] [SKIPPED] >> test.py::test[solomon-Basic-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-Basic-default.txt-Results] [SKIPPED] >> test.py::test[solomon-BrokenJsonResponse--Debug] [SKIPPED] >> test.py::test[expr-empty_struct_tuple_types-default.txt-Debug] [GOOD] >> test.py::test[expr-empty_struct_tuple_types-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] Test command err: 2024-11-21T08:49:37.818520Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:37.826618Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T08:49:37.827974Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:37.828020Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T08:49:37.828766Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:37.828798Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T08:49:37.834573Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T08:49:37.834626Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T08:49:37.834692Z node 1 :CMS DEBUG: Using default config. 2024-11-21T08:49:37.834783Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T08:49:37.835773Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T08:49:37.835876Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T08:49:37.835925Z node 1 :CMS DEBUG: Using default config 2024-11-21T08:49:37.835957Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T08:49:37.864829Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T08:49:37.877088Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T08:49:37.877198Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T08:49:37.878659Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T08:49:37.878801Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T08:49:37.878809Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T08:49:37.878820Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T08:49:37.878824Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T08:49:37.878832Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T08:49:37.878862Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T08:49:37.880790Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T08:49:37.892491Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T08:49:37.929136Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T08:49:37.929206Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T08:49:37.929411Z node 1 :CMS INFO: Processing Wall-E request: TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "1" DryRun: false 2024-11-21T08:49:37.965944Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:37.965995Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:37.966078Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:37.966437Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5 ... 1, validity# 1970-01-01T00:03:00.028512Z, action# Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T08:49:42.510359Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T08:49:42.558508Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T08:49:42.636514Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:42.636631Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180028512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2024-11-21T08:49:42.636642Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.028512Z 2024-11-21T08:49:42.636791Z node 17 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T08:49:42.636801Z node 17 :CMS DEBUG: Resulting status: OK 2024-11-21T08:49:42.636814Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T08:49:42.636844Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T08:49:42.648996Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T08:49:42.649075Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T08:49:42.649218Z node 17 :CMS INFO: Processing Wall-E request: TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "18" DryRun: false 2024-11-21T08:49:42.660834Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:42.660878Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:42.660897Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:42.672849Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:42.672895Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:42.672912Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:42.673064Z node 17 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 } Schedule: true DryRun: false Priority: 20 2024-11-21T08:49:42.673074Z node 17 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 2024-11-21T08:49:42.673083Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Locked, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T08:49:42.673091Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '18': node state: 'Locked') 2024-11-21T08:49:42.673111Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:42.673164Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# Wall-E-r-2, owner# Wall-E, order# 2, priority# 20, body# User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2024-11-21T08:49:42.685145Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:42.685234Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 } Schedule: true DryRun: false Priority: 20 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'18\': node state: \'Locked\'" } RequestId: "Wall-E-r-2" Deadline: 420232024 } 2024-11-21T08:49:42.685294Z node 17 :CMS DEBUG: TTxStoreWalleTask Execute 2024-11-21T08:49:42.685326Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store wall-e task: id# task-1, requestId# Wall-E-r-2 2024-11-21T08:49:42.696681Z node 17 :CMS DEBUG: TTxStoreWalleTask Complete 2024-11-21T08:49:42.696725Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvStoreWalleTask { Task: { TaskId: task-1 RequestId: Wall-E-r-2 Owner: Permissions: [] HasSingleCompositeActionGroup: 0 } }, response# NKikimr::NCms::TEvCms::TEvWalleTaskStored { TaskId: task-1 } 2024-11-21T08:49:42.696808Z node 17 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCreateTaskRequest { TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "18" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvWalleCreateTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'18\': node state: \'Locked\'" } TaskId: "task-1" Hosts: "18" } 2024-11-21T08:49:42.709654Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:42.709698Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:42.709715Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:42.709852Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T08:49:42.709862Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T08:49:42.709872Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T08:49:42.709906Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T08:49:42.709926Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2024-11-21T08:49:42.709932Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T08:49:42.709942Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:42.709983Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.335048Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T08:49:42.709991Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T08:49:42.725269Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:42.725391Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180335048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T08:49:42.725549Z node 17 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T08:49:42.725561Z node 17 :CMS DEBUG: Resulting status: OK 2024-11-21T08:49:42.725576Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T08:49:42.725604Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T08:49:42.744641Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T08:49:42.744722Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T08:49:42.744873Z node 17 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2024-11-21T08:49:42.757677Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:42.757717Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:42.757733Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:42.757877Z node 17 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2024-11-21T08:49:42.757888Z node 17 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } 2024-11-21T08:49:42.757897Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T08:49:42.757932Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T08:49:42.757952Z node 17 :CMS DEBUG: Accepting permission: id# Wall-E-p-3, requestId# Wall-E-r-2, owner# Wall-E 2024-11-21T08:49:42.757959Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission Wall-E-p-3 until 586524-01-19T08:01:49Z) 2024-11-21T08:49:42.757971Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:42.758012Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-3, validity# 586524-01-19T08:01:49.551615Z, action# Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 2024-11-21T08:49:42.758022Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-2, owner# Wall-E 2024-11-21T08:49:42.776553Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:42.776671Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-3" Action { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T08:49:42.776730Z node 17 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-1" Hosts: "18" } } |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3dc [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt-Debug] [GOOD] >> test.py::test[solomon-BrokenJsonResponse--Plan] >> test.py::test[aggregate-avg_and_sum-default.txt-Plan] [GOOD] >> test.py::test[solomon-BrokenJsonResponse--Plan] [SKIPPED] >> test.py::test[solomon-BrokenJsonResponse--Results] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Debug] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Debug] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Results] [SKIPPED] >> test.py::test[solomon-Subquery-default.txt-Debug] [SKIPPED] >> test.py::test[aggregate-avg_and_sum-default.txt-Results] >> test.py::test[order_by-ordered_fill--ForceBlocks] [GOOD] >> test.py::test[order_by-ordered_fill--Plan] [GOOD] >> test.py::test[order_by-ordered_fill--Results] >> test.py::test[pg_catalog-pg_locks-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Debug] >> TargetDiscoverer::IndexedTable [GOOD] >> TCmsTest::SamePriorityRequest [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite >> test.py::test[expr-yql-10180-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Plan] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Results] >> test.py::test[column_order-insert--Debug] [GOOD] >> test.py::test[select-discard-default.txt-Analyze] [GOOD] >> test.py::test[select-discard-default.txt-Debug] >> test.py::test[column_order-insert--Plan] [GOOD] >> test.py::test[column_order-insert--Results] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[window-yql-14179-default.txt-Results] [GOOD] >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> test.py::test[expr-empty_struct_tuple_types-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-empty_struct_tuple_types-default.txt-Plan] [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock [GOOD] >> TPDiskRaces::OwnerRecreationRaces >> test.py::test[expr-empty_struct_tuple_types-default.txt-Results] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2024-11-21T08:49:44.240160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652090166175723:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:44.240220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002531/r3tmp/tmp9JFMYa/pdisk_1.dat 2024-11-21T08:49:44.377760Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18591 2024-11-21T08:49:44.420432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:44.420455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:44.424486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11245, node 1 2024-11-21T08:49:44.492188Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:44.492222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:44.492224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:44.492259Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:44.634257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:44.641287Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:49:44.703687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:44.841547Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732178984683, tx_id: 1 } } } 2024-11-21T08:49:44.841557Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T08:49:44.842447Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178984816, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T08:49:44.842453Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T08:49:44.846732Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178984816, tx_id: 281474976710658 } } } 2024-11-21T08:49:44.846739Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2024-11-21T08:49:44.846743Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2024-11-21T08:49:44.846750Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable >> test.py::test[aggregate-group_by_hop_compact--Analyze] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] Test command err: 2024-11-21T08:49:37.854436Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T08:49:37.869083Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:37.887820Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T08:49:37.887935Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T08:49:37.887989Z node 1 :CMS DEBUG: Using default config. 2024-11-21T08:49:37.888086Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T08:49:37.888380Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:37.888473Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T08:49:37.888839Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:37.888881Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T08:49:37.894635Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T08:49:37.894663Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T08:49:37.894705Z node 1 :CMS DEBUG: Using default config 2024-11-21T08:49:37.894730Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T08:49:37.915646Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T08:49:37.976891Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T08:49:37.977008Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T08:49:37.978269Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T08:49:37.978368Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T08:49:37.978374Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T08:49:37.978381Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T08:49:37.978385Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T08:49:37.978398Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T08:49:37.978440Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T08:49:37.978465Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T08:49:37.980100Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T08:49:38.020880Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T08:49:38.020941Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T08:49:38.052780Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:38.052816Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:38.052891Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:38.053186Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T08:49:43.725957Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T08:49:43.725966Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:43.726000Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.027000Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 2024-11-21T08:49:43.726023Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T08:49:43.784367Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T08:49:43.830856Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:43.830951Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Deadline: 180027000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2024-11-21T08:49:43.830962Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.027000Z 2024-11-21T08:49:43.855127Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T08:49:43.855213Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:43.855230Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:43.855241Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:43.855380Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T08:49:43.855389Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 2024-11-21T08:49:43.855399Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T08:49:43.855423Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T08:49:43.855439Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:43.855488Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T08:49:43.866484Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:43.866562Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420129512 } 2024-11-21T08:49:43.866686Z node 25 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T08:49:43.866696Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T08:49:43.866708Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T08:49:43.866731Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T08:49:43.882349Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T08:49:43.882435Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T08:49:43.893918Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:43.893954Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:43.893968Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:43.894107Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T08:49:43.894119Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T08:49:43.894129Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T08:49:43.894162Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T08:49:43.894182Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2024-11-21T08:49:43.894190Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T08:49:43.894198Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:43.894231Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.232536Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 2024-11-21T08:49:43.894240Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T08:49:43.905024Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:43.905103Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } Deadline: 180232536 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2024-11-21T08:49:43.905228Z node 25 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T08:49:43.905238Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T08:49:43.905249Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T08:49:43.905274Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T08:49:43.916988Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T08:49:43.917054Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T08:49:43.928402Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:43.928439Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:43.928453Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:43.928587Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T08:49:43.928598Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T08:49:43.928608Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T08:49:43.928642Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T08:49:43.928663Z node 25 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-2, owner# user 2024-11-21T08:49:43.928669Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T08:49:43.928678Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:43.928710Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.335560Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 2024-11-21T08:49:43.928718Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, owner# user 2024-11-21T08:49:43.942714Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:43.942798Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } Deadline: 180335560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } >> test.py::test[join-trivial_view--Debug] [GOOD] >> test.py::test[join-trivial_view--ForceBlocks] >> test.py::test[blocks-combine_all_sum--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--Debug] >> test.py::test[blocks-combine_hashed_count--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Analyze] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2024-11-21T08:49:38.850969Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:38.861676Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T08:49:38.876697Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:38.876748Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T08:49:38.877390Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:38.877419Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T08:49:38.885191Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T08:49:38.885231Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T08:49:38.885286Z node 1 :CMS DEBUG: Using default config. 2024-11-21T08:49:38.885369Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T08:49:38.892834Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T08:49:38.896272Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T08:49:38.896333Z node 1 :CMS DEBUG: Using default config 2024-11-21T08:49:38.896359Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T08:49:38.923727Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T08:49:38.936741Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T08:49:38.936807Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T08:49:38.937959Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T08:49:38.938038Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T08:49:38.938043Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T08:49:38.938050Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T08:49:38.938053Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T08:49:38.938060Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T08:49:38.938083Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T08:49:38.939577Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T08:49:38.952382Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T08:49:38.989182Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T08:49:38.989235Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T08:49:39.020471Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:39.020512Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:39.020580Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T08:49:39.020900Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... pdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2024-11-21T08:49:44.568478Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2024-11-21T08:49:44.568483Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2024-11-21T08:49:44.568487Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2024-11-21T08:49:44.568492Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2024-11-21T08:49:44.568571Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T08:49:44.568702Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T08:49:44.568717Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T08:49:44.568739Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T08:49:44.568750Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T08:49:44.568763Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T08:49:44.568773Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T08:49:44.568784Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T08:49:44.568793Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2024-11-21T08:49:44.568829Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2024-11-21T08:49:44.568837Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2024-11-21T08:49:44.568844Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2024-11-21T08:49:44.568867Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2024-11-21T08:49:44.568929Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2024-11-21T08:49:44.568952Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2024-11-21T08:49:44.568957Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2024-11-21T08:49:44.568960Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2024-11-21T08:49:44.580801Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2024-11-21T08:49:44.580828Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2024-11-21T08:49:44.593485Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:44.593521Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:44.593537Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2024-11-21T08:49:44.593666Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T08:49:44.593676Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2024-11-21T08:49:44.593686Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T08:49:44.593695Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T08:49:44.593698Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T08:49:44.593701Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T08:49:44.593705Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T08:49:44.593727Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T08:49:44.593735Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2024-11-21T08:49:44.593743Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:44.593774Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.130000Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2024-11-21T08:49:44.593788Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T08:49:44.604618Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:44.604697Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2024-11-21T08:49:44.604707Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.130000Z 2024-11-21T08:49:44.617352Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2024-11-21T08:49:44.617434Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T08:49:44.617450Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T08:49:44.617464Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2024-11-21T08:49:44.617589Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T08:49:44.617599Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2024-11-21T08:49:44.617608Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T08:49:44.617617Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T08:49:44.617638Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T08:49:44.617644Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2024-11-21T08:49:44.617651Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T08:49:44.617684Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.231512Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2024-11-21T08:49:44.617700Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T08:49:44.628361Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T08:49:44.628423Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780231512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2024-11-21T08:49:44.628537Z node 25 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T08:49:44.628544Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T08:49:44.628553Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T08:49:44.628567Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2024-11-21T08:49:44.628584Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2024-11-21T08:49:44.628589Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T08:49:44.649141Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T08:49:44.649209Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T08:49:44.649330Z node 25 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T08:49:44.649339Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T08:49:44.649350Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T08:49:44.649392Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2024-11-21T08:49:44.649410Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2024-11-21T08:49:44.649416Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T08:49:44.664522Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T08:49:44.664587Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasure4Plus2Block [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Plan] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Results] >> TargetDiscoverer::Negative ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: 2024-11-21T08:49:42.173728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:42.174697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:42.174734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004367/r3tmp/tmpDD77Mr/pdisk_1.dat 2024-11-21T08:49:42.362467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:42.390881Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:42.444736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:42.444770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:42.456723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:42.596487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:42.923398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T08:49:43.213641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:49:43.213676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:49:43.213686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:49:43.214880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:49:43.431041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:49:43.535101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yke9d65erqy3ykc2f44ex, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg1ZmM3NjAtZWY2OWY0YzAtMTZlYmI1OGQtZGJmMDdiYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:49:43.569783Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ykekx2phgwwp44vw1ev21, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ4MjJjMzEtYTgzYzU5Y2UtNzQzMjZhMzctZmQ1MjRjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2024-11-21T08:49:43.673159Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ykemz33by76f0208xqpvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg5YTk1NTAtOTc4OWIyMTItZmVlMzI5MGItYTAyNjMxYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T08:49:43.698481Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yker2f7gk40ag5mvzfka6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg5YTk1NTAtOTc4OWIyMTItZmVlMzI5MGItYTAyNjMxYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2024-11-21T08:49:44.100101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykf3mf9kjdk4d91mm4dh8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU4NWFjOWItZTdlZjQ5OGEtNWNlNGZhZWYtOGRmNGQzNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2024-11-21T08:49:44.705090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:44.705132Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:49:44.705161Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004367/r3tmp/tmphxc6ml/pdisk_1.dat 2024-11-21T08:49:44.823820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:44.839412Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:44.887203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:44.887233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:44.898029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:45.014149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:45.262684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:45.555554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:49:45.555575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:49:45.555583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:49:45.564414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:49:45.770020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:49:45.834667Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ykgjk3z91fcyahecq76ma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2JhM2IzNmUtNWEyOWMyNWItZTNlYzA2YjctNTMzZGNlY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:49:45.905748Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ykgw8aja6h4qdfndkryay, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGIyNDhmOTItOTZkNzU4OWUtYjUwNTI5ZjEtZWYxY2U4MmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2024-11-21T08:49:46.705312Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykhn460xjesq8czk8ax04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDMxNDk0NDYtZWMyNzUyYTktZTkxZWViMWUtYWZjZWE0YWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:49:46.716887Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ykhpj57et6ng9emmm6zwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmM1ZDRiMGItYzZlMzQ3MmYtMjVkYWQ3YTEtNjRiOGZmNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 >> TargetDiscoverer::SystemObjects |86.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> test.py::test[aggr_factory-corellation-default.txt-Results] [GOOD] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasure4Plus2Block [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-Analyze] >> test.py::test[order_by-ordered_fill--Results] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--Analyze] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Plan] |86.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> test.py::test[tpch-q9-default.txt-Debug] [GOOD] >> test.py::test[tpch-q9-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_mul_gs_ru--Plan] [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite [GOOD] |86.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |86.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> test.py::test[window-win_func_lead_lag_worm_with_part--Debug] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--ForceBlocks] >> DataShardTxOrder::ImmediateBetweenOnline_Init >> TargetDiscoverer::Dirs |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[solomon-Subquery-default.txt-Debug] [SKIPPED] >> test.py::test[window-empty/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-full/session_compact--Debug] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials >> TargetDiscoverer::SystemObjects [GOOD] >> TargetDiscoverer::Basic >> test.py::test[aggregate-avg_and_sum_float--Debug] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Plan] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T08:49:43.461596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:43.462147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:43.462168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00435e/r3tmp/tmpnQwSkd/pdisk_1.dat 2024-11-21T08:49:43.589560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:43.630329Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:43.678716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:43.678746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:43.689349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:43.795932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:43.810460Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:43.810685Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:43.810771Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:49:43.810816Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:43.818819Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:43.819006Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:43.819033Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:43.819184Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:43.819200Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:43.819207Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:43.819255Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:43.822689Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:49:43.822762Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:43.822787Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:49:43.822792Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:43.822797Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:49:43.822802Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:43.822943Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:43.822950Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:43.823097Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:49:43.823117Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:49:43.823129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:43.823133Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:43.823139Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:49:43.823146Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:43.823153Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:43.823161Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:49:43.823166Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:43.823170Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:43.823175Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:49:43.823181Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:43.823202Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:49:43.823207Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:43.823230Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:43.823283Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:49:43.823294Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:49:43.823311Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:49:43.823319Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:43.823323Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:49:43.823328Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:49:43.823332Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:43.823376Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:43.823380Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:49:43.823383Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:43.823386Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:43.823397Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:49:43.823400Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:43.823404Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:49:43.823406Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:43.823411Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:43.823652Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:49:43.823660Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:49:43.836606Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:43.836636Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:43.836642Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:43.836654Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:49:43.836670Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:44.039090Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:44.039119Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:44.039129Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:49:44.039151Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:49:44.039157Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:49:44.039219Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:44.039229Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:49:44.039234Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:49:44.039239Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:49:44.040836Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:49:44.040859Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:44.040991Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:44.040998Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:44.041004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:44.041011Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:49:44.041016Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:49:44.041024Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... . Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1008:2807], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 55 DurationUs: 2000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 10 FinishTimeMs: 1732178988718 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 4 BuildCpuTimeUs: 6 WaitInputTimeUs: 1041 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732178988716 } MaxMemoryUsage: 1048576 } 2024-11-21T08:49:48.718764Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1008:2807] 2024-11-21T08:49:48.718773Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T08:49:48.718780Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T08:49:48.718895Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1009:2808], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 131 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 90 FinishTimeMs: 1732178988718 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 79 BuildCpuTimeUs: 11 WaitInputTimeUs: 985 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732178988717 } MaxMemoryUsage: 1048576 } 2024-11-21T08:49:48.718901Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1009:2808] 2024-11-21T08:49:48.718907Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T08:49:48.718913Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T08:49:48.718992Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1010:2809], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 137 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 109 FinishTimeMs: 1732178988718 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 103 BuildCpuTimeUs: 6 WaitInputTimeUs: 926 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732178988717 } MaxMemoryUsage: 1048576 } 2024-11-21T08:49:48.718997Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1010:2809] 2024-11-21T08:49:48.719003Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T08:49:48.719009Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T08:49:48.719099Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1011:2810], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 122 DurationUs: 2000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 86 FinishTimeMs: 1732178988719 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 73 BuildCpuTimeUs: 13 WaitInputTimeUs: 1198 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732178988717 } MaxMemoryUsage: 1048576 } 2024-11-21T08:49:48.719105Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1011:2810] 2024-11-21T08:49:48.719111Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T08:49:48.719116Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T08:49:48.719149Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1012:2811], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 65 DurationUs: 2000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 37 FinishTimeMs: 1732178988719 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 21 BuildCpuTimeUs: 16 WaitInputTimeUs: 1394 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732178988717 } MaxMemoryUsage: 1048576 } 2024-11-21T08:49:48.719154Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1012:2811] 2024-11-21T08:49:48.719160Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], 2024-11-21T08:49:48.719165Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1013:2812], 2024-11-21T08:49:48.719184Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1013:2812], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 108 DurationUs: 2000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 56 FinishTimeMs: 1732178988719 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 47 BuildCpuTimeUs: 9 WaitInputTimeUs: 1462 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732178988717 } MaxMemoryUsage: 1048576 } 2024-11-21T08:49:48.719189Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1013:2812] 2024-11-21T08:49:48.719227Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:49:48.719237Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd6ykkjac8mas5payz66azv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjliNjEzNWEtNmJkNDM5OTAtZjdhYTZjODYtY2FkYTY5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000885s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> test.py::test[aggregate-avg_and_sum-default.txt-Results] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--Debug] [SKIPPED] >> test.py::test[aggregate-disable_blocks_with_spilling--Plan] [SKIPPED] >> test.py::test[aggregate-disable_blocks_with_spilling--Results] [SKIPPED] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Debug] >> test.py::test[expr-empty_struct_tuple_types-default.txt-Results] [GOOD] >> test.py::test[expr-expr_cast-default.txt-Analyze] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> test.py::test[expr-yql-10180-default.txt-Results] [GOOD] >> test.py::test[flexible_types-evaluate_expr-default.txt-Analyze] [SKIPPED] >> test.py::test[flexible_types-evaluate_expr-default.txt-Debug] >> test.py::test[flexible_types-evaluate_expr-default.txt-Debug] [SKIPPED] >> test.py::test[flexible_types-evaluate_expr-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[flexible_types-evaluate_expr-default.txt-Plan] [SKIPPED] >> test.py::test[flexible_types-evaluate_expr-default.txt-Results] [SKIPPED] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Analyze] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Results] [GOOD] >> test.py::test[pragma-file-default.txt-Debug] >> TargetDiscoverer::Negative [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2024-11-21T08:49:44.212873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:49:44.212900Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:44.212926Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:44.221842Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:44.221995Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:49:44.222060Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:44.223093Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:44.249604Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:44.249741Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:44.249878Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:49:44.249893Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:49:44.249900Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:49:44.249938Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:44.257127Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:49:44.257195Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:44.257233Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:49:44.257239Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:49:44.257243Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:49:44.257249Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:44.257331Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:44.257338Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:44.257370Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:49:44.257387Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:49:44.257433Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:44.257439Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:44.257446Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:49:44.257451Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:49:44.257454Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:49:44.257459Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:49:44.257464Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:44.272363Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:44.272386Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:44.272394Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:49:44.272782Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:49:44.272792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:44.272815Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:49:44.272846Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:49:44.272855Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:49:44.272863Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:49:44.272871Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:44.272876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:49:44.272881Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:49:44.272885Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:44.272942Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:44.272947Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:49:44.272950Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:49:44.272953Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:44.272962Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:49:44.272965Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:49:44.272969Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:49:44.272972Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:44.272976Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:44.296612Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:49:44.296641Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:44.296649Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:44.296661Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:49:44.296678Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:44.296800Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:44.296808Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:44.296816Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:49:44.296837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:49:44.296841Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:49:44.296880Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:44.296888Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:44.296893Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:49:44.296898Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:49:44.297646Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:49:44.297658Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:44.297704Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:44.297710Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:44.297719Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:44.297727Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:49:44.297731Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:49:44.297739Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:49:44.297744Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:49:44.297751Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:44.297755Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:49:44.297760Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:49:44.297764Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:49:44.297805Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:49:44.297809Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:44.297813Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:49:44.297817Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:49:44.297821Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:49:44.297831Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:44.297835Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:49:44.297839Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:44.297843Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:49:44.297855Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:49:44.297859Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:49:44.297862Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:49:44.297868Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:44.297871Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:44.297875Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:48.569557Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:49:48.569611Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2024-11-21T08:49:48.569618Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569623Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2024-11-21T08:49:48.569646Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2024-11-21T08:49:48.569649Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569652Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2024-11-21T08:49:48.569682Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2024-11-21T08:49:48.569686Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569689Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2024-11-21T08:49:48.569701Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2024-11-21T08:49:48.569705Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569709Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2024-11-21T08:49:48.569720Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T08:49:48.569723Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569726Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T08:49:48.569737Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T08:49:48.569740Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569742Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T08:49:48.569750Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T08:49:48.569752Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569755Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T08:49:48.569768Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T08:49:48.569772Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569774Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T08:49:48.569782Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T08:49:48.569784Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569787Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T08:49:48.569798Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T08:49:48.569801Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569804Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T08:49:48.569814Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T08:49:48.569817Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569820Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T08:49:48.569826Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T08:49:48.569829Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569832Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T08:49:48.569844Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T08:49:48.569847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.569850Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T08:49:48.569861Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:48.569865Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2024-11-21T08:49:48.569872Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T08:49:48.569877Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T08:49:48.569881Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:48.569902Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:48.569906Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2024-11-21T08:49:48.569912Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T08:49:48.569918Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T08:49:48.569921Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:48.569939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:48.569944Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2024-11-21T08:49:48.569950Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T08:49:48.569955Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T08:49:48.569959Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:48.569973Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:48.569977Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T08:49:48.569983Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T08:49:48.569987Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T08:49:48.569990Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:48.570013Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T08:49:48.570016Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.570019Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T08:49:48.570032Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T08:49:48.570034Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.570037Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T08:49:48.570047Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T08:49:48.570050Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.570053Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T08:49:48.570063Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T08:49:48.570066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:48.570069Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> test.py::test[column_order-insert--Results] [GOOD] >> test.py::test[column_order-values-default.txt-Debug] >> TargetDiscoverer::Basic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2024-11-21T08:49:48.669815Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652106626385974:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:48.669910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00237e/r3tmp/tmp4sKnmx/pdisk_1.dat 2024-11-21T08:49:48.872596Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:48.984461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:48.984493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:48.986878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24059 TServer::EnableGrpc on GrpcPort 13307, node 1 2024-11-21T08:49:49.080437Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:49.080451Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:49.080453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:49.080493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:49.203210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:49.214493Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:49:49.222623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:49:49.340539Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:49:49.345123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:49.448354Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732178989254, tx_id: 1 } } } 2024-11-21T08:49:49.448371Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T08:49:49.456791Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178989345, tx_id: 281474976715658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732178989380, tx_id: 281474976715659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T08:49:49.456804Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T08:49:50.100560Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178989345, tx_id: 281474976715658 } } } 2024-11-21T08:49:50.100572Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2024-11-21T08:49:50.100577Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TargetDiscoverer::InvalidCredentials [GOOD] |86.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/ut/ydb-core-control-ut |86.4%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut >> test.py::test[aggregate-group_by_hop_compact--Debug] [GOOD] >> TestYmqHttpProxy::TestSendMessage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2024-11-21T08:49:48.714723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652105669091268:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:48.714777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00239d/r3tmp/tmpJP28VO/pdisk_1.dat 2024-11-21T08:49:49.117941Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:49.119308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:49.119322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:49.132706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29917 TServer::EnableGrpc on GrpcPort 18744, node 1 2024-11-21T08:49:50.268417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:50.268430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:50.268432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:50.268482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:50.742987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:50.995907Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2024-11-21T08:49:50.995931Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } >> test.py::test[aggregate-group_by_hop_compact--ForceBlocks] >> test.py::test[join-trivial_view--ForceBlocks] [GOOD] >> test.py::test[join-trivial_view--Plan] >> test.py::test[order_by-warn_offset_wo_sort--Analyze] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_compact--Plan] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only--Analyze] >> test.py::test[join-trivial_view--Plan] [GOOD] >> test.py::test[join-trivial_view--Results] >> test.py::test[order_by-warn_offset_wo_sort--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2024-11-21T08:49:41.800192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:49:41.800233Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:41.800256Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:41.809034Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:41.809492Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:49:41.809854Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:41.812988Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:41.846862Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:41.847015Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:41.847303Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:49:41.847318Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:49:41.847325Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:49:41.847364Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:41.852632Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:49:41.852697Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:41.852735Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:49:41.852741Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:49:41.852746Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:49:41.852752Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:41.852840Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.852847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.852873Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:49:41.852896Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:49:41.852940Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:41.852947Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:41.852955Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:49:41.852961Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:49:41.852964Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:49:41.852970Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:49:41.852975Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:41.877074Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:41.877097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:41.877106Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:49:41.877662Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:49:41.877673Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:41.877693Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:49:41.877724Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:49:41.877734Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:49:41.877745Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:49:41.877752Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:41.877758Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:49:41.877764Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:49:41.877767Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:41.877900Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:41.877906Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:49:41.877910Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:49:41.877914Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:41.877925Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:49:41.877928Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:49:41.877932Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:49:41.877935Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:41.877940Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:41.904566Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:49:41.904599Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:41.904607Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:41.904619Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:49:41.904636Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:41.904775Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:41.904782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:41.904791Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:49:41.904815Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:49:41.904820Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:49:41.904857Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:41.904865Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:41.904870Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:49:41.904875Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:49:41.905944Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:49:41.905952Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:41.905992Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.905997Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:41.906005Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:41.906011Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:49:41.906015Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:49:41.906022Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:49:41.906027Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:49:41.906033Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:41.906037Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:49:41.906041Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:49:41.906045Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:49:41.906086Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:49:41.906089Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:41.906093Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:49:41.906096Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:49:41.906100Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:49:41.906109Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:41.906112Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:49:41.906115Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:41.906119Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:49:41.906130Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:49:41.906133Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:49:41.906137Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:49:41.906142Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:41.906145Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:41.906149Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... eady operations at 9437184 2024-11-21T08:49:51.349575Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.349604Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.349628Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:49:51.349644Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T08:49:51.349652Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.349662Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:51.349715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.349733Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.349740Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:51.349746Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T08:49:51.349752Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.349756Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:51.349760Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:51.349784Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.349789Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.349795Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:51.349800Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T08:49:51.349804Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.349807Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:51.349810Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:51.349813Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:51.349831Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.349835Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.349841Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:51.349846Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T08:49:51.349849Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.349853Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:51.349873Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.349877Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.349882Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:51.349887Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T08:49:51.349890Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.349906Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.349909Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.349915Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:51.349920Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T08:49:51.349923Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.349937Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.349941Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.349946Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:51.349951Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T08:49:51.349955Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.349958Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.349962Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2024-11-21T08:49:51.349970Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T08:49:51.349989Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.350029Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.350033Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.350038Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:49:51.350043Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T08:49:51.350046Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.350062Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:51.350065Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2024-11-21T08:49:51.350072Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:51.350076Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:51.350148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:329:2302]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T08:49:51.350155Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:51.350161Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2024-11-21T08:49:51.350180Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:329:2302]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T08:49:51.350183Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:51.350186Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2024-11-21T08:49:51.350201Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:329:2302]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T08:49:51.350204Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:51.350207Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2024-11-21T08:49:51.350222Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:329:2302]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T08:49:51.350225Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:51.350229Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2024-11-21T08:49:51.350246Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:329:2302]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T08:49:51.350249Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:51.350252Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2024-11-21T08:49:51.350264Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:329:2302]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T08:49:51.350269Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:51.350272Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2024-11-21T08:49:51.350289Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:329:2302]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T08:49:51.350293Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:51.350296Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2024-11-21T08:49:51.350309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:329:2302]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T08:49:51.350313Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:51.350316Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2024-11-21T08:49:50.725357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652116198188036:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:50.725478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0022f8/r3tmp/tmp8NoWvv/pdisk_1.dat 2024-11-21T08:49:50.803929Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:50.824808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:50.824836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:50.826286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21888 TServer::EnableGrpc on GrpcPort 18968, node 1 2024-11-21T08:49:50.872625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:50.872638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:50.872640Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:50.872676Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:51.026819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:51.051080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:51.234531Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732178991088, tx_id: 1 } } } 2024-11-21T08:49:51.234549Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T08:49:51.239397Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178991172, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T08:49:51.239408Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T08:49:51.562622Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178991172, tx_id: 281474976710658 } } } 2024-11-21T08:49:51.562636Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2024-11-21T08:49:51.562641Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TargetDiscoverer::Dirs [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TestKinesisHttpProxy::GoodRequestPutRecords >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] |86.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |86.4%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2024-11-21T08:49:50.749441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652113546467762:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002348/r3tmp/tmp4Cllsu/pdisk_1.dat 2024-11-21T08:49:50.958992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:49:51.123878Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:51.133422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:51.133444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:51.153130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8129 TServer::EnableGrpc on GrpcPort 3197, node 1 2024-11-21T08:49:51.528580Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:51.528591Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:51.528593Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:51.528624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:51.836743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:51.839123Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:49:51.839791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:52.024921Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_types/credentials/login/login.cpp:192: Invalid user } } } 2024-11-21T08:49:52.024939Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_types/credentials/login/login.cpp:192: Invalid user } >> test.py::test[blocks-combine_hashed_count_filter--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--Plan] >> test.py::test[blocks-combine_hashed_count_filter--Plan] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--Results] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> test.py::test[aggr_factory-histogram-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-Debug] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[aggregate-group_by_mul_gs_ru--Plan] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2024-11-21T08:49:50.712170Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652114866873389:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:50.716282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002324/r3tmp/tmpavoEUI/pdisk_1.dat 2024-11-21T08:49:50.841671Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6298 TServer::EnableGrpc on GrpcPort 25326, node 1 2024-11-21T08:49:51.007073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:51.007105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:51.009494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:51.064503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:51.064516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:51.064517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:51.064548Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:51.436634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:51.449010Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:51.461763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:51.514471Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732178991487, tx_id: 1 } } } 2024-11-21T08:49:51.514487Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T08:49:51.528646Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732178991501, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T08:49:51.528660Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T08:49:51.541789Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178991522, tx_id: 281474976715659 } }] } } 2024-11-21T08:49:51.541804Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2024-11-21T08:49:52.781455Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732178991522, tx_id: 281474976715659 } } } 2024-11-21T08:49:52.781473Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2024-11-21T08:49:52.781477Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table >> test.py::test[blocks-date_less_or_equal_scalar--Analyze] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Debug] >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> TestKinesisHttpProxy::MissingAction >> test.py::test[expr-expr_cast-default.txt-Analyze] [GOOD] >> test.py::test[expr-expr_cast-default.txt-Debug] |86.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |86.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> test.py::test[select-discard-default.txt-Debug] [GOOD] >> test.py::test[select-discard-default.txt-ForceBlocks] >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Debug] |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |86.4%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2024-11-21T08:49:42.153579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:42.154162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:42.154190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042ff/r3tmp/tmpv9c37S/pdisk_1.dat 2024-11-21T08:49:42.403178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:42.407299Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2024-11-21T08:49:42.407321Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T08:49:42.427289Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:42.496731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:42.496766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:42.508651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:42.628395Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2024-11-21T08:49:42.628433Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T08:49:42.628493Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2024-11-21T08:49:42.628586Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2024-11-21T08:49:42.628595Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2024-11-21T08:49:42.629000Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T08:49:42.629014Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T08:49:42.629019Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2024-11-21T08:49:42.629023Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2024-11-21T08:49:42.629992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:42.647105Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:42.647289Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:42.647354Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:631:2537] 2024-11-21T08:49:42.647400Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:42.683451Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:42.683634Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:42.683660Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:42.683800Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:42.683817Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:42.683823Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:42.683865Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:42.695613Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:49:42.695696Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:42.695730Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:647:2546] 2024-11-21T08:49:42.695735Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:42.695740Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:49:42.695745Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:42.695882Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:631:2537], Recipient [1:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:42.695890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:42.696023Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:49:42.696042Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:49:42.696057Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2539], Recipient [1:631:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:42.696062Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:42.696068Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:627:2534], serverId# [1:636:2539], sessionId# [0:0:0] 2024-11-21T08:49:42.696076Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:42.696082Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:42.696088Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:49:42.696093Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:42.696097Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:42.696101Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:49:42.696107Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:42.696126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:636:2539] 2024-11-21T08:49:42.696131Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:42.696150Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:42.696190Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:49:42.696199Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:49:42.699861Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:49:42.699897Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:42.699904Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:49:42.699912Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:49:42.699918Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:42.699979Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:42.699985Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:49:42.699989Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:42.699992Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:42.700008Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:49:42.700012Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:42.700016Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:49:42.700020Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:42.700025Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:42.700275Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:42.700285Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:42.700290Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:42.700299Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:49:42.700309Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:42.700776Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 HANDLE EvProposeTransaction marker# C0 2024-11-21T08:49:42.700790Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 step# 501 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T08:49:42.700848Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:648:2547], Recipient [1:631:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:49:42.700856Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:49:42.907763Z node 1 :TX_COORDINATOR DEBUG: Transaction 281474976715657 has been planned 2024-11-21T08:49:42.907795Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T08:49:42.907802Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72075186224037888 2024-11-21T08:49:42.907859Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1500 in 0.500000s at 1.450000s 2024-11-21T08:49:42.907963Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1000, txid# 281474976715 ... 1T08:49:52.749211Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1634:2418], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:49:52.749222Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1634:2418], 0} finished in read 2024-11-21T08:49:52.749229Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T08:49:52.749231Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:49:52.749234Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:52.749237Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:52.749246Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T08:49:52.749249Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:52.749252Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2024-11-21T08:49:52.749258Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:49:52.749273Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:49:52.749487Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1634:2418], Recipient [3:1213:2356]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:49:52.749495Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T08:49:52.779716Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ykqke2865a3bf428mzxcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2NmZjEyYTUtN2FjOTk3N2EtODY2ZTJmOTctMjZjMmE1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:49:52.780233Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1652:2419], Recipient [3:1213:2356]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T08:49:52.780264Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:49:52.780277Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T08:49:52.780291Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:49:52.780295Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:49:52.780299Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:52.780302Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:49:52.780311Z node 3 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T08:49:52.780315Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:49:52.780318Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:52.780322Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:49:52.780325Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:49:52.780342Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T08:49:52.780383Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:49:52.780389Z node 3 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2024-11-21T08:49:52.780394Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1652:2419], 0} after executionsCount# 1 2024-11-21T08:49:52.780402Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1652:2419], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:49:52.780415Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1652:2419], 0} finished in read 2024-11-21T08:49:52.780429Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:49:52.780433Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:49:52.780436Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:52.780439Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:52.780446Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:49:52.780449Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:52.780452Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T08:49:52.780456Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:49:52.780470Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:49:52.780707Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1652:2419], Recipient [3:1213:2356]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:49:52.780718Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2024-11-21T08:49:52.824737Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ykqmd6fqa66x537jzh1th, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjRkNTMyNi0xZmJkZGE0OS01ZWIyNTg5Yi0zYTdlYzli, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:49:52.825296Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1676:2420], Recipient [3:1213:2356]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T08:49:52.825328Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:49:52.825339Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2024-11-21T08:49:52.825352Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:49:52.825355Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:49:52.825359Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:52.825372Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:49:52.825380Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2024-11-21T08:49:52.825384Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:49:52.825387Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:52.825392Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:49:52.825395Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:49:52.825412Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T08:49:52.825455Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:49:52.825461Z node 3 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2024-11-21T08:49:52.825466Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1676:2420], 0} after executionsCount# 1 2024-11-21T08:49:52.825473Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1676:2420], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:49:52.825486Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1676:2420], 0} finished in read 2024-11-21T08:49:52.825496Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:49:52.825500Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:49:52.825503Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:49:52.825507Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:49:52.825512Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:49:52.825515Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:49:52.825519Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2024-11-21T08:49:52.825523Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:49:52.825535Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:49:52.825731Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1676:2420], Recipient [3:1213:2356]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:49:52.825740Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T08:49:52.825963Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [3:195:2110], Recipient [3:1213:2356]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 2 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> test.py::test[pragma-file-default.txt-Debug] [GOOD] >> test.py::test[pragma-file-default.txt-Plan] [GOOD] >> test.py::test[pragma-file-default.txt-Results] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> test.py::test[aggregate-group_by_hop_only--Analyze] [GOOD] >> test.py::test[aggregate-group_by_hop_only--Debug] >> test.py::test[window-win_func_lead_lag_worm_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Plan] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] >> TYardTest::TestLogWriteCutEqualRandomWait [GOOD] >> TYardTest::TestLogWriteCutUnequal >> test.py::test[column_order-values-default.txt-Debug] [GOOD] >> test.py::test[column_order-values-default.txt-Plan] [GOOD] >> test.py::test[column_order-values-default.txt-Results] >> DataShardTxOrder::ZigZag_oo8_dirty >> test.py::test[order_by-warn_offset_wo_sort--Debug] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--ForceBlocks] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> TestYmqHttpProxy::TestCreateQueue >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> test.py::test[hor_join-empty_out_hor_join-default.txt-Analyze] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Debug] >> test.py::test[tpch-q9-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q9-default.txt-Plan] [GOOD] >> test.py::test[tpch-q9-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2024-11-21T08:49:49.887375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:49:49.887396Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:49.887412Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:49.889932Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:49.890040Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:49:49.890097Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:49.890970Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:49.899662Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:49.899790Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:49.899931Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:49:49.899944Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:49:49.899950Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:49:49.899990Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:49.903583Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:49:49.903622Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:49.903657Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:49:49.903663Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:49:49.903668Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:49:49.903674Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:49.903735Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:49.903742Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:49.903761Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:49:49.903775Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:49:49.903815Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:49.903823Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:49.903830Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:49:49.903836Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:49:49.903840Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:49:49.903845Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:49:49.903850Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:49.914151Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:49.914173Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:49.914184Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:49:49.914780Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:49:49.914788Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:49.914806Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:49:49.914836Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:49:49.914847Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:49:49.914857Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:49:49.914865Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:49.914872Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:49:49.914878Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:49:49.914882Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:49.914951Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:49.914955Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:49:49.914959Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:49:49.914962Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:49.914972Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:49:49.914975Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:49:49.914979Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:49:49.914982Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:49.914987Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:49.945213Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:49:49.945242Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:49.945249Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:49.945260Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:49:49.945276Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:49.945413Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:49.945420Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:49.945428Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:49:49.945449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:49:49.945453Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:49:49.945698Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:49.945706Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:49.945710Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:49:49.945716Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:49:49.947255Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:49:49.947265Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:49.947305Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:49.947310Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:49.947319Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:49.947327Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:49:49.947331Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:49:49.947338Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:49:49.947343Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:49:49.947349Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:49.947353Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:49:49.947357Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:49:49.947361Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:49:49.947406Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:49:49.947410Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:49.947413Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:49:49.947417Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:49:49.947420Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:49:49.947429Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:49.947432Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:49:49.947435Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:49.947439Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:49:49.947451Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:49:49.947455Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:49:49.947458Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:49:49.947463Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:49.947466Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:49.947470Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... d event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T08:49:54.061742Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:54.061745Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T08:49:54.061763Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:54.061769Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 98 Flags# 0} 2024-11-21T08:49:54.061775Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:54.061780Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 99 Flags# 0} 2024-11-21T08:49:54.061784Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T08:49:54.061788Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 100 Flags# 0} 2024-11-21T08:49:54.062166Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:54.062174Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2024-11-21T08:49:54.062185Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T08:49:54.062193Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T08:49:54.062198Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:54.062227Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:54.062230Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2024-11-21T08:49:54.062235Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:54.062240Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:54.062259Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:54.062264Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2024-11-21T08:49:54.062272Z node 1 :TX_DATASHARD DEBUG: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2024-11-21T08:49:54.062281Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:54.062284Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2024-11-21T08:49:54.062290Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:54.062294Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:54.062311Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:54.062315Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T08:49:54.062321Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:49:54.062325Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:54.062388Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T08:49:54.062392Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:54.062397Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T08:49:54.062410Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T08:49:54.062414Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:54.062417Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T08:49:54.062424Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:227:2222], Recipient [1:433:2383]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T08:49:54.062427Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T08:49:54.062431Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2024-11-21T08:49:54.062444Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T08:49:54.062453Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2024-11-21T08:49:54.062468Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T08:49:54.062481Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T08:49:54.062484Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:54.062487Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2024-11-21T08:49:54.062503Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:433:2383], Recipient [1:433:2383]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:54.062507Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:54.062514Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2024-11-21T08:49:54.062521Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:49:54.062528Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2024-11-21T08:49:54.062533Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2024-11-21T08:49:54.062540Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T08:49:54.062545Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2024-11-21T08:49:54.062550Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2024-11-21T08:49:54.062554Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2024-11-21T08:49:54.062718Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2024-11-21T08:49:54.062728Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:49:54.062739Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2024-11-21T08:49:54.062742Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2024-11-21T08:49:54.062745Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2024-11-21T08:49:54.062749Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T08:49:54.062811Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2024-11-21T08:49:54.062814Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2024-11-21T08:49:54.062818Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2024-11-21T08:49:54.062821Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2024-11-21T08:49:54.062827Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T08:49:54.062830Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2024-11-21T08:49:54.062833Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2024-11-21T08:49:54.062837Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:54.062841Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T08:49:54.062845Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2024-11-21T08:49:54.062848Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2024-11-21T08:49:54.062904Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T08:49:54.062908Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:54.062912Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T08:49:54.088618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:49:54.088651Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T08:49:54.088693Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:49:54.088713Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T08:49:54.088721Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:49:54.088809Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T08:49:54.088815Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:49:54.088821Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup >> TestYmqHttpProxy::TestSendMessage [GOOD] >> test.py::test[pragma-file-default.txt-Results] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Debug] >> test.py::test[join-trivial_view--Results] [GOOD] >> test.py::test[join-trivial_view-off-Analyze] >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] >> test.py::test[column_order-values-default.txt-Results] [GOOD] >> test.py::test[compute_range-huge_in-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-Results] [SKIPPED] >> test.py::test[blocks-combine_hashed_count_filter--Results] [GOOD] >> test.py::test[blocks-date_equals--Debug] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |86.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |86.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |86.5%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Debug] [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> test.py::test[expr-expr_cast-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_cast-default.txt-ForceBlocks] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 12488210273328199628 2024-11-21T08:49:57.136197Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.136257Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.136271Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.136283Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.136295Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.136306Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.136319Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.136330Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.136561Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.136586Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.136600Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.136615Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.136629Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.136642Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.136657Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140182Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140230Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.140242Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.140249Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.140257Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.140267Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.140275Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.140283Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.140290Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.140759Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140780Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140793Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140806Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140819Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140830Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140844Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.140853Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> test.py::test[aggr_factory-histogram-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-ForceBlocks] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> test.py::test[window-full/session_compact--Debug] [GOOD] >> test.py::test[window-full/session_compact--Plan] [GOOD] >> test.py::test[window-full/session_compact--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 17517073182228340357 2024-11-21T08:49:57.832361Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832404Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832417Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832428Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832440Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832451Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832462Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832649Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.832666Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.832678Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.832691Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.832702Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.832712Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.832723Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.832734Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832741Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832745Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832757Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832761Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832766Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.832773Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T08:49:57.833083Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.833093Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.833099Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.833109Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.833115Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.833122Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T08:49:57.833128Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> test.py::test[aggregate-count_distinct_with_filter--Debug] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Plan] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Results] >> test.py::test[join-trivial_view-off-Analyze] [GOOD] >> test.py::test[join-trivial_view-off-Debug] >> TestKinesisHttpProxy::DifferentContentTypes >> test.py::test[hor_join-empty_out_hor_join-default.txt-Debug] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-ForceBlocks] >> TestKinesisHttpProxy::MissingAction [GOOD] >> BSCRestartPDisk::RestartOneByOne >> test.py::test[order_by-warn_offset_wo_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--Plan] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--Results] >> BSCRestartPDisk::RestartNotAllowed >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> test.py::test[aggregate-group_by_hop_only--Debug] [GOOD] >> test.py::test[aggregate-group_by_hop_only--ForceBlocks] >> test.py::test[aggregate-group_by_hop_only--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Plan] [GOOD] >> test.py::test[aggregate-group_by_hop_only--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct--Analyze] |86.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |86.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> test.py::test[aggregate-group_by_hop_only_distinct--Analyze] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_distinct--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_distinct--ForceBlocks] [SKIPPED] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestYmqHttpProxy::TestGetQueueUrl >> TYardTest::TestLogWriteCutUnequal [GOOD] >> TYardTest::TestLogMultipleWriteRead >> ControlImplementationTests::TestParallelRegisterSharedControl [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] |86.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |86.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> test.py::test[expr-expr_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-expr_cast-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_cast-default.txt-Results] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |86.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |86.5%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> TYardTest::TestLogMultipleWriteRead [GOOD] >> TYardTest::TestLogWriteLsnConsistency >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions |86.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |86.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TYardTest::TestLogWriteLsnConsistency [GOOD] >> TYardTest::TestLotsOfTinyAsyncLogLatency >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ControlImplementationTests::TestParallelRegisterSharedControl [GOOD] >> TestYmqHttpProxy::TestReceiveMessage >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Analyze] >> TYardTest::TestLotsOfTinyAsyncLogLatency [GOOD] >> TYardTest::TestLogLatency >> IcbAsActorTests::TestHttpGetResponse >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> test.py::test[tpch-q9-default.txt-Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs2--Analyze] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Debug] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--ForceBlocks] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs2--ForceBlocks] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Plan] [SKIPPED] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx2 [GOOD] >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] [SKIPPED] >> test.py::test[udf-python_script--Analyze] >> test.py::test[udf-python_script--Analyze] [SKIPPED] >> test.py::test[order_by-warn_offset_wo_sort--Results] [GOOD] >> test.py::test[params-no_optional_param-default.txt-Analyze] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue >> test.py::test[udf-python_script--Debug] [SKIPPED] >> test.py::test[udf-python_script--ForceBlocks] [SKIPPED] >> test.py::test[blocks-date_less_or_equal_scalar--Debug] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--ForceBlocks] >> TestKinesisHttpProxy::TestPing >> test.py::test[join-trivial_view-off-Debug] [GOOD] >> test.py::test[join-trivial_view-off-ForceBlocks] >> test.py::test[udf-python_script--Plan] [SKIPPED] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[compute_range-huge_in-default.txt-Results] [SKIPPED] >> test.py::test[udf-python_script--Results] [SKIPPED] >> test.py::test[weak_field-few_source_different_columns--Analyze] >> test.py::test[join-trivial_view-off-ForceBlocks] [SKIPPED] >> test.py::test[join-trivial_view-off-Plan] [GOOD] >> test.py::test[join-trivial_view-off-Results] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx2 [GOOD] Test command err: iteration# 2 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 8 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 14 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 20 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 26 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 32 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 38 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 44 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 50 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 56 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 62 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 68 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 74 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 80 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 86 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 92 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 98 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 104 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 110 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 116 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 122 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 128 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 134 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 140 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 146 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 152 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 158 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 164 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 170 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 176 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 182 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 188 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 194 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 200 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 206 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 212 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 218 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 224 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 230 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 236 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 242 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 248 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 254 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 260 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 266 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 272 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 278 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 284 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 290 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 296 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 302 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 308 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 314 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 320 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 326 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 332 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 338 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 344 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 350 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 356 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 362 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 368 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 374 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 380 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 386 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 392 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 398 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 404 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 410 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 416 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 422 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 428 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 434 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 440 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 446 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 452 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 458 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 464 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 470 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 476 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 482 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 488 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2024-11-21T08:49:56.403555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:49:56.403575Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:56.403590Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:56.406174Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:56.406271Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:49:56.406330Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:56.407145Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:56.415500Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:56.415604Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:56.415731Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:49:56.415743Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:49:56.415749Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:49:56.415786Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:56.419032Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:49:56.419074Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:56.419108Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:49:56.419113Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:49:56.419117Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:49:56.419122Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:56.419184Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:56.419190Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:56.419209Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:49:56.419223Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:49:56.419264Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:56.419271Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:56.419277Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:49:56.419282Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:49:56.419286Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:49:56.419290Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:49:56.419295Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:49:56.426420Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:56.426441Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:56.426449Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:49:56.426844Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:49:56.426851Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:56.426868Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:49:56.426896Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:49:56.426905Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:49:56.426913Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:49:56.426920Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:56.426924Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:49:56.426929Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:49:56.426933Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:56.426986Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:56.426990Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:49:56.426993Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:49:56.426996Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:56.427004Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:49:56.427007Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:49:56.427011Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:49:56.427014Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:56.427018Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:56.457497Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:49:56.457524Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:49:56.457541Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:49:56.457552Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:49:56.457565Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:49:56.457672Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:56.457678Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:56.457684Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:49:56.457704Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:49:56.457708Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:49:56.457742Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:49:56.457748Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:56.457752Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:49:56.457757Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:49:56.458398Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:49:56.458407Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:49:56.458442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:56.458447Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:56.458453Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:49:56.458459Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:49:56.458463Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:49:56.458470Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:49:56.458474Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:49:56.458479Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:56.458483Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:49:56.458486Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:49:56.458490Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:49:56.458527Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:49:56.458531Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:56.458534Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:49:56.458537Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:49:56.458541Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:49:56.458549Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:49:56.458552Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:49:56.458555Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:49:56.458558Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:49:56.458567Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:49:56.458571Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:49:56.458574Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:49:56.458579Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:49:56.458582Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:49:56.458585Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2024-11-21T08:50:00.054037Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:50:00.054040Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T08:50:00.054044Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:50:00.054047Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2024-11-21T08:50:00.054143Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2024-11-21T08:50:00.054154Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:50:00.054164Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:50:00.054166Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:50:00.054170Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2024-11-21T08:50:00.054173Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T08:50:00.054220Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2024-11-21T08:50:00.054224Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2024-11-21T08:50:00.054227Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2024-11-21T08:50:00.054230Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2024-11-21T08:50:00.054234Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:50:00.054237Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2024-11-21T08:50:00.054240Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2024-11-21T08:50:00.054244Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:00.054246Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:50:00.054249Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:50:00.054252Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:50:00.054296Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:334:2307], Recipient [2:334:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:50:00.054301Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:50:00.054308Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2024-11-21T08:50:00.054312Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:50:00.054315Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T08:50:00.054320Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2024-11-21T08:50:00.054323Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2024-11-21T08:50:00.054327Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054330Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2024-11-21T08:50:00.054333Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2024-11-21T08:50:00.054338Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2024-11-21T08:50:00.054434Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2024-11-21T08:50:00.054440Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054444Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2024-11-21T08:50:00.054447Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2024-11-21T08:50:00.054451Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2024-11-21T08:50:00.054455Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054458Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2024-11-21T08:50:00.054462Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2024-11-21T08:50:00.054466Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2024-11-21T08:50:00.054472Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2024-11-21T08:50:00.054476Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2024-11-21T08:50:00.054479Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2024-11-21T08:50:00.054483Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054486Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2024-11-21T08:50:00.054489Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2024-11-21T08:50:00.054492Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2024-11-21T08:50:00.054499Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054502Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2024-11-21T08:50:00.054505Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2024-11-21T08:50:00.054507Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2024-11-21T08:50:00.054511Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054513Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2024-11-21T08:50:00.054516Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2024-11-21T08:50:00.054519Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2024-11-21T08:50:00.054523Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054526Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2024-11-21T08:50:00.054528Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2024-11-21T08:50:00.054531Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2024-11-21T08:50:00.054534Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054537Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T08:50:00.054540Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2024-11-21T08:50:00.054545Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2024-11-21T08:50:00.054591Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2024-11-21T08:50:00.054597Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:50:00.054602Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054605Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2024-11-21T08:50:00.054608Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2024-11-21T08:50:00.054611Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T08:50:00.054645Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2024-11-21T08:50:00.054648Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2024-11-21T08:50:00.054651Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2024-11-21T08:50:00.054653Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2024-11-21T08:50:00.054657Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:50:00.054659Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2024-11-21T08:50:00.054662Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2024-11-21T08:50:00.054665Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:00.054668Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T08:50:00.054671Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T08:50:00.054673Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T08:50:00.069120Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2024-11-21T08:50:00.069144Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2024-11-21T08:50:00.069157Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:50:00.069166Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T08:50:00.069185Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:50:00.069195Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:50:00.069261Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2024-11-21T08:50:00.069265Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2024-11-21T08:50:00.069271Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T08:50:00.069275Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T08:50:00.069282Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:50:00.069286Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> test.py::test[aggregate-count_distinct_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Debug] >> TYardTest::TestLogLatency [GOOD] >> TYardTest::TestMultiYardLogLatency >> TestKinesisHttpProxy::TestWrongStream >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> ControlImplementationTests::TestRegisterLocalControl [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> test.py::test[expr-expr_cast-default.txt-Results] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Analyze] >> test.py::test[aggr_factory-histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-Plan] >> BSCRestartPDisk::RestartNotAllowed [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Debug] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Plan] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Results] >> test.py::test[aggr_factory-histogram-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-Results] >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TestKinesisHttpProxy::ListShards >> test.py::test[hor_join-empty_out_hor_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Plan] [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ControlImplementationTests::TestRegisterLocalControl [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Debug] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 8511531844785125734 >> test.py::test[select-discard-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-discard-default.txt-Plan] [GOOD] >> test.py::test[select-discard-default.txt-Results] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Debug] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest >> TYardTest::TestMultiYardLogLatency [GOOD] >> TYardTest::TestMultiYardFirstRecordToKeep >> test.py::test[weak_field-few_source_different_columns--Analyze] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Debug] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestKinesisHttpProxy::CreateDeleteStream >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> test.py::test[params-no_optional_param-default.txt-Analyze] [GOOD] >> test.py::test[params-no_optional_param-default.txt-Debug] >> TYardTest::TestMultiYardFirstRecordToKeep [GOOD] >> TYardTest::TestMultiYardLogMultipleWriteRead |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[aggregate-group_by_hop_only_distinct--ForceBlocks] [SKIPPED] >> test.py::test[json-json_query/common_syntax-default.txt-Analyze] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-Debug] >> test.py::test[expr-expr_yql_data-default.txt-Analyze] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Debug] >> test.py::test[window-full/session_compact--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--Debug] >> TNebiusAccessServiceTest::PassRequestId [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TestKinesisHttpProxy::TestPing [GOOD] >> TestYmqHttpProxy::TestReceiveMessage [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> test.py::test[produce-discard_reduce_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-native_desc_reduce_with_presort--Debug] >> TYardTest::TestMultiYardLogMultipleWriteRead [GOOD] >> TYardTest::TestLogOverwriteRestarts >> TestKinesisHttpProxy::TestRequestBadJson >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TestKinesisHttpProxy::TestWrongStream2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2024-11-21T08:50:04.020322Z node 2 :GRPC_CLIENT DEBUG: [730bf040710]{reqId} Connect to grpc://localhost:4293 2024-11-21T08:50:04.021098Z node 2 :GRPC_CLIENT DEBUG: [730bf040710]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2024-11-21T08:50:04.044234Z node 2 :GRPC_CLIENT DEBUG: [730bf040710]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } >> YdbYqlClient::CopyTables >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> TGRpcYdbTest::DropTableBadRequest |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TestKinesisHttpProxy::ListShards [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords |86.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2024-11-21T08:50:03.728677Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:50:03.729347Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/004988/r3tmp/tmpO9r9fC/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:03.729416Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/jptk/004988/r3tmp/tmpO9r9fC/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:50:03.729674Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:50:03.729725Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:03.729846Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T08:50:03.729851Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:03.729916Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T08:50:03.729920Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:03.729976Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T08:50:03.729982Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:03.730038Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T08:50:03.730043Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T08:50:03.730154Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:44:2074] ControllerId# 72057594037932033 2024-11-21T08:50:03.730157Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:50:03.730169Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:50:03.730207Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:50:03.733970Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:50:03.734183Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:50:03.734652Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:03.734686Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T08:50:03.734856Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [2:91:2068] ControllerId# 72057594037932033 2024-11-21T08:50:03.734861Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:50:03.734877Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:50:03.734910Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:50:03.735733Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:50:03.735840Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:50:03.735959Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:50:03.777334Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.777353Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:50:03.777481Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.777490Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:50:03.778181Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:50:03.778281Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.778288Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:50:03.779063Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:50:03.779222Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.779291Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.779300Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:50:03.779317Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:50:03.779430Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:03.779465Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.779478Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:50:03.779809Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:50:03.789069Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/004988/r3tmp/tmpO9r9fC/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:03.789181Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:50:03.789416Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T08:50:0 ... up# false 2024-11-21T08:50:04.532869Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2181038082 2024-11-21T08:50:04.532875Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 2181038082 2024-11-21T08:50:04.532941Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2181038082 2024-11-21T08:50:04.532952Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:50:04.532956Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T08:50:04.532972Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2024-11-21T08:50:04.533115Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:29:2059] Cookie# 0 Recipient# [1:439:2377] RecipientRewrite# [1:396:2345] Request# {NodeID: 2 GroupIDs: 2181038082 } StopGivingGroups# false 2024-11-21T08:50:04.533137Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 2181038082 } 2024-11-21T08:50:04.533239Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 6960020127241468608 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/004988/r3tmp/tmpO9r9fC//key.txt" EncryptedGroupKey: "H\264\313t\037\036DP\272\270\240\027\364\346\002\335b\362:#\2207\266\311\265\202\033\261\355;\211\301_r\227;" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2024-11-21T08:50:04.533256Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 6960020127241468608 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/004988/r3tmp/tmpO9r9fC//key.txt" EncryptedGroupKey: "H\264\313t\037\036DP\272\270\240\027\364\346\002\335b\362:#\2207\266\311\265\202\033\261\355;\211\301_r\227;" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T08:50:04.533279Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/jptk/004988/r3tmp/tmpO9r9fC//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T08:50:04.533475Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T08:50:04.533480Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:50:04.533734Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:599:2104] targetNodeId# 1 Marker# DSP01 2024-11-21T08:50:04.533752Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:600:2105] targetNodeId# 1 Marker# DSP01 2024-11-21T08:50:04.533766Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:601:2106] targetNodeId# 1 Marker# DSP01 2024-11-21T08:50:04.533782Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:602:2107] targetNodeId# 1 Marker# DSP01 2024-11-21T08:50:04.533795Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:603:2108] targetNodeId# 1 Marker# DSP01 2024-11-21T08:50:04.533810Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:604:2109] targetNodeId# 1 Marker# DSP01 2024-11-21T08:50:04.533824Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:605:2110] targetNodeId# 1 Marker# DSP01 2024-11-21T08:50:04.533828Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:50:04.533888Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2181038082 2024-11-21T08:50:04.534083Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T08:50:04.534110Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T08:50:04.534138Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T08:50:04.534162Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T08:50:04.534172Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T08:50:04.534180Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T08:50:04.534228Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T08:50:04.534232Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T08:50:04.534236Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T08:50:04.534255Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [2:608:2111] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T08:50:04.534261Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T08:50:04.534295Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:599:2104] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 4004788148011919241 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T08:50:04.536753Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T08:50:04.536773Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2024-11-21T08:50:04.536846Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T08:50:04.536870Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T08:50:04.536937Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:609:2508] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T08:50:04.536979Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:50:04.536986Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T08:50:04.536999Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2024-11-21T08:50:04.537004Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2024-11-21T08:50:04.537036Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T08:50:04.537084Z node 1 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2024-11-21T08:50:04.537134Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T08:50:04.537145Z node 1 :BS_PROXY_PUT ERROR: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2024-11-21T08:50:04.537152Z node 1 :BS_PROXY_PUT NOTICE: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T08:50:04.537218Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:599:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Debug] [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Analyze] >> test.py::test[expr-expr_yql_data-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-ForceBlocks] >> test.py::test[json-json_query/common_syntax-default.txt-Debug] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-ForceBlocks] >> test.py::test[aggr_factory-histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Analyze] >> test.py::test[window-win_func_on_cloned_source-default.txt-ForceBlocks] >> KqpIndexes::MultipleSecondaryIndex >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Analyze] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Debug] [SKIPPED] >> KqpIndexes::ForbidViewModification >> test.py::test[weak_field-few_source_different_columns--Debug] [GOOD] >> test.py::test[weak_field-few_source_different_columns--ForceBlocks] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> test.py::test[params-no_optional_param-default.txt-Debug] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[params-no_optional_param-default.txt-ForceBlocks] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Plan] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--Analyze] >> KqpSystemView::NodesRange2 >> test.py::test[hor_join-fuse_multi_outs2--Analyze] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--Debug] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--Plan] >> test.py::test[hor_join-fuse_multi_outs2--Plan] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_part--Analyze] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_part--Debug] >> test.py::test[aggregate-group_by_gs_duo--Debug] [GOOD] >> test.py::test[hor_join-merge_multiouts_part--Debug] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_part--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_part--Plan] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_part--Results] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--Analyze] [SKIPPED] >> test.py::test[aggregate-group_by_gs_duo--Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Results] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TPDiskRaces::OwnerRecreationRaces [GOOD] >> TPDiskTest::CommitDeleteChunks >> test.py::test[hor_join-sorted_out_mix--Debug] [SKIPPED] >> TestKinesisHttpProxy::TestConsumersEmptyNames >> test.py::test[hor_join-sorted_out_mix--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--Plan] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--Results] [SKIPPED] >> test.py::test[hor_join-yield_on-default.txt-Analyze] [SKIPPED] >> test.py::test[hor_join-yield_on-default.txt-Debug] [SKIPPED] >> test.py::test[hor_join-yield_on-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[hor_join-yield_on-default.txt-Plan] [SKIPPED] >> test.py::test[hor_join-yield_on-default.txt-Results] [SKIPPED] >> test.py::test[in-in_ansi-default.txt-Analyze] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TPDiskTest::CommitDeleteChunks [GOOD] >> TPDiskTest::DeviceHaltTooLong >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> TestKinesisHttpProxy::TestWrongRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2024-11-21T08:50:03.893723Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:50:03.894351Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpInC8jA/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:03.894410Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpInC8jA/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:50:03.894674Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:50:03.894728Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:03.894887Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T08:50:03.894897Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:03.894987Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T08:50:03.894994Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:03.895087Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T08:50:03.895093Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:03.895171Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T08:50:03.895177Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T08:50:03.895322Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:44:2074] ControllerId# 72057594037932033 2024-11-21T08:50:03.895327Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:50:03.895345Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:50:03.895387Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:50:03.899349Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:50:03.899557Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:50:03.900675Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:03.900713Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T08:50:03.900846Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [2:91:2068] ControllerId# 72057594037932033 2024-11-21T08:50:03.900851Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:50:03.900865Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:50:03.900905Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:50:03.901736Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:50:03.901857Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:50:03.901975Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:50:03.989671Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.989690Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:50:03.990787Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.990798Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:50:03.991424Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:50:03.991493Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.991500Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:50:03.991806Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:50:03.991878Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.991930Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.991934Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:50:03.991948Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:50:03.991984Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:50:03.996458Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:03.996528Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:03.996658Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:50:04.015496Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpInC8jA/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:04.015575Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:50:04.015756Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T08:50:0 ... {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:50:04.757321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1001 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:50:04.768572Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } } 2024-11-21T08:50:04.768608Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1001 } } 2024-11-21T08:50:04.777211Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.777278Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.777358Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 9780583433809665046 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:50:04.777663Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 9780583433809665046 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:50:04.777786Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.777801Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.777826Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 9780583433809665046 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:50:04.777856Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 9780583433809665046 Status: READY OnlyPhantomsRemain: false } } Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2024-11-21T08:50:04.873550Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1001 Path: "/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpF4Sezk//new_pdisk.dat" PDiskGuid: 14202034451249287959 PDiskCategory: 0 EntityStatus: CREATE } } } 2024-11-21T08:50:04.873571Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1001 Path: "/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpF4Sezk//new_pdisk.dat" PDiskGuid: 14202034451249287959 PDiskCategory: 0 EntityStatus: CREATE } } 2024-11-21T08:50:04.873593Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1001 Path# "/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpF4Sezk//new_pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:50:04.874727Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpF4Sezk//new_pdisk.dat": no such file. PDiskId# 1001 2024-11-21T08:50:04.874781Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpF4Sezk//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/0049a7/r3tmp/tmpF4Sezk//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14202034451249287959 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1001 2024-11-21T08:50:04.888434Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:534:2456] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:370:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T08:50:04.888482Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:50:04.888487Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:50:04.888491Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:50:04.888494Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:50:04.888497Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:50:04.888503Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:370:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:50:04.888509Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:370:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T08:50:04.888521Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:370:1] Marker# BPG33 2024-11-21T08:50:04.888526Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:370:1] Marker# BPG32 2024-11-21T08:50:04.888530Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:370:2] Marker# BPG33 2024-11-21T08:50:04.888534Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:370:2] Marker# BPG32 2024-11-21T08:50:04.888538Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:370:3] Marker# BPG33 2024-11-21T08:50:04.888542Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:370:3] Marker# BPG32 2024-11-21T08:50:04.888572Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:62:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:370:3] FDS# 370 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T08:50:04.888580Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:55:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:370:2] FDS# 370 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T08:50:04.888586Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:76:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:370:1] FDS# 370 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T08:50:04.889123Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:370:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82913 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T08:50:04.889158Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:370:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82913 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T08:50:04.889173Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:370:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 82913 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T08:50:04.889187Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:370:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T08:50:04.889195Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:370:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T08:50:05.411811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 0 AvailableSize: 68557996032 TotalSize: 68719476736 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 17112760320 State: Normal } } 2024-11-21T08:50:05.508788Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 68557996032 TotalSize: 68719476736 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34225520640 State: Normal } } 2024-11-21T08:50:05.548595Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T08:50:05.548615Z node 1 :BS_CONTROLLER NOTICE: {BSCTXUDM03@disk_metrics.cpp:110} PDisk not found PDiskId# 1:1001 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx0 [GOOD] >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryBadRequest >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> KqpIndexes::SecondaryIndexOrderBy >> BSCRestartPDisk::RestartOneByOne [GOOD] >> TestYmqHttpProxy::TestDeleteMessage >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2024-11-21T08:50:04.354415Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:50:04.355210Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/004976/r3tmp/tmp98hk1r/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:04.355278Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/jptk/004976/r3tmp/tmp98hk1r/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:50:04.355549Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:50:04.355608Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:04.355759Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T08:50:04.355767Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:04.355867Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T08:50:04.355873Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:04.355946Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T08:50:04.355951Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:04.356021Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T08:50:04.356028Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T08:50:04.356172Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2024-11-21T08:50:04.356176Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:50:04.356190Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:50:04.367891Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:50:04.371307Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:50:04.371611Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:50:04.371679Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:04.371685Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:50:04.448955Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:04.448976Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:50:04.449507Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:50:04.449571Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:50:04.449621Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:50:04.460849Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/004976/r3tmp/tmp98hk1r/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T08:50:04.460955Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:50:04.461108Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T08:50:04.461113Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:50:04.461132Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "om0\035\027h\014\'\265\213Bi\267\013\271\366-\374#\034" } 2024-11-21T08:50:04.461171Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2024-11-21T08:50:04.461180Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:75:2118] SessionId# [0:0:0] Cookie# 0 2024-11-21T08:50:04.461189Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.012184s 2024-11-21T08:50:04.461239Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2024-11-21T08:50:04.461243Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2024-11-21T08:50:04.464717Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.465639Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.466132Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.466398Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.466871Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.467018Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.467236Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T08:50:04.467247Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T08:50:04.467263Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.467315Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:04.467458Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T08:50:04.467716Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T08:50:04.467781Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T08:50:04.467791Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T08:50:04.481093Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:50:04.501742Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:50:04.502100Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:50:04.502207Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:50:04.509584Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:50:04.509602Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:50:04.509637Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:50:04.517384Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:50:04.517425Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:50:04.517456Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:50:04.517486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsCont ... us Record# {VDiskStatus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 2 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:50:05.550722Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 0 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:50:05.550730Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.550742Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 3 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:50:05.550754Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.550774Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 1 } Success: true } 2024-11-21T08:50:05.550782Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } 2024-11-21T08:50:05.550826Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 1 } } 2024-11-21T08:50:05.550868Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551056Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551071Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } Success: true } 2024-11-21T08:50:05.551078Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } 2024-11-21T08:50:05.551102Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } } 2024-11-21T08:50:05.551106Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551149Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551348Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } Success: true } 2024-11-21T08:50:05.551357Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 0 } 2024-11-21T08:50:05.551381Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551396Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } } 2024-11-21T08:50:05.551400Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551443Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551629Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551648Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 0 } Success: true } 2024-11-21T08:50:05.551696Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.551704Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 0 } } 2024-11-21T08:50:05.573195Z node 2 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:50:05.573316Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { PDisks { NodeID: 2 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/jptk/004976/r3tmp/tmpbTIKWB/pdisk_1.dat" PDiskGuid: 5840276675566543416 PDiskCategory: 0 EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 5840276675566543416 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-1" } VDisks { VDiskID { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 5840276675566543416 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-2" } Groups { GroupID: 2181038080 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 5840276675566543416 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038080 MainKeyVersion: 0 StoragePoolName: "pool-1" DeviceType: ROT } Groups { GroupID: 2181038081 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 5840276675566543416 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038081 MainKeyVersion: 0 StoragePoolName: "pool-2" DeviceType: ROT } } InstanceId: "3f1dc5d3-ffdd9d44-574c1547-a86b3e96" AvailDomain: 1 } 2024-11-21T08:50:05.573352Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 2 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/jptk/004976/r3tmp/tmpbTIKWB/pdisk_1.dat" PDiskGuid: 5840276675566543416 PDiskCategory: 0 EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 5840276675566543416 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-1" } VDisks { VDiskID { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 5840276675566543416 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-2" } Groups { GroupID: 2181038080 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 5840276675566543416 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038080 MainKeyVersion: 0 StoragePoolName: "pool-1" DeviceType: ROT } Groups { GroupID: 2181038081 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 5840276675566543416 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038081 MainKeyVersion: 0 StoragePoolName: "pool-2" DeviceType: ROT } } 2024-11-21T08:50:05.573415Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 2 PDiskId# 1000 Path# "/home/runner/.ya/build/build_root/jptk/004976/r3tmp/tmpbTIKWB/pdisk_1.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:50:05.573600Z node 2 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [82000000:1:0:0:0] VSlotId# 2:1000:1000 PDiskGuid# 5840276675566543416 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:05.573743Z node 2 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [82000000:1:0:0:0] VSlotId# 2:1000:1000 PDiskGuid# 5840276675566543416 2024-11-21T08:50:05.573795Z node 2 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [82000001:1:0:0:0] VSlotId# 2:1000:1001 PDiskGuid# 5840276675566543416 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:50:05.573904Z node 2 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [82000001:1:0:0:0] VSlotId# 2:1000:1001 PDiskGuid# 5840276675566543416 2024-11-21T08:50:05.963230Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 5840276675566543416 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 5840276675566543416 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2024-11-21T08:50:05.963396Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:50:05.963578Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1001 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:50:05.968191Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1000 } } 2024-11-21T08:50:05.968283Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1001 } } 2024-11-21T08:50:05.969203Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.969272Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.969303Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 5840276675566543416 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:50:05.969589Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 5840276675566543416 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:50:05.969647Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.969659Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T08:50:05.969679Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 5840276675566543416 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:50:05.969706Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 5840276675566543416 Status: READY OnlyPhantomsRemain: false } } >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumers >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx0 [GOOD] Test command err: iteration# 0 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 6 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 12 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 18 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 24 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 30 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 36 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 42 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 48 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 54 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 60 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 66 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 72 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 78 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 84 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 90 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 96 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 102 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 108 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 114 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 120 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 126 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 132 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 138 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 144 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 150 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 156 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 162 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 168 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 174 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 180 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 186 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 192 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 198 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 204 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 210 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 216 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 222 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 228 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 234 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 240 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 246 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 252 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 258 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 264 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 270 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 276 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 282 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 288 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 294 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 300 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 306 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 312 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 318 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 324 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 330 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 336 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 342 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 348 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 354 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 360 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 366 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 372 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 378 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 384 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 390 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 396 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 402 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 408 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 414 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 420 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 426 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 432 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 438 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 444 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 450 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 456 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 462 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 468 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 474 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 480 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 486 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> YdbYqlClient::CopyTables [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch >> TestYmqHttpProxy::TestChangeMessageVisibility >> TestYmqHttpProxy::TestListQueues >> test.py::test[blocks-date_equals--Debug] [GOOD] >> test.py::test[blocks-date_equals--Plan] [GOOD] >> test.py::test[blocks-date_equals--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 1373531960921207663 >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Plan] >> KqpIndexes::MultipleSecondaryIndex [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> test.py::test[weak_field-few_source_different_columns--ForceBlocks] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Plan] >> test.py::test[window-win_func_on_cloned_source-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] >> KqpMultishardIndex::DataColumnWriteNull-StreamLookup >> test.py::test[weak_field-few_source_different_columns--Plan] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Results] >> test.py::test[select-discard-default.txt-Results] [GOOD] >> test.py::test[select-refselect-1000-Analyze] >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> TestYmqHttpProxy::TestDeleteQueue >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp >> KqpIndexes::ForbidViewModification [GOOD] >> KqpIndexes::ForbidDirectIndexTableCreation >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> TestKinesisHttpProxy::TestCounters >> test.py::test[json-json_query/common_syntax-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-Plan] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-Results] >> test.py::test[expr-expr_yql_data-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Results] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> TYardTest::TestLogOverwriteRestarts [GOOD] >> TYardTest::TestMultiYardHarakiri >> KqpIndexes::SelectConcurentTX2 >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> test.py::test[aggr_factory-hll-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Debug] >> test.py::test[aggregate-group_by_gs_duo--Results] [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad >> test.py::test[aggregate-group_by_hop_list_key--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_list_key--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_list_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Debug] >> test.py::test[params-no_optional_param-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_only--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Results] [SKIPPED] >> test.py::test[aggregate-group_by_tz_date--Debug] >> test.py::test[params-no_optional_param-default.txt-Plan] [GOOD] >> test.py::test[params-no_optional_param-default.txt-Results] >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> KqpMultishardIndex::WriteIntoRenamingSyncIndex >> KqpIndexes::IndexOr >> TestYmqHttpProxy::TestListQueues [GOOD] >> test.py::test[in-in_ansi-default.txt-Analyze] [GOOD] >> test.py::test[in-in_ansi-default.txt-Debug] >> test.py::test[window-win_by_all_aggregate--Debug] [GOOD] >> test.py::test[window-win_by_all_aggregate--Plan] [GOOD] >> test.py::test[window-win_by_all_aggregate--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2024-11-21T08:49:53.064717Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652126343182024:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cf0/r3tmp/tmpbFzPDJ/pdisk_1.dat 2024-11-21T08:49:53.224329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:49:53.303872Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:53.313350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:53.313386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:53.320930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6751, node 1 2024-11-21T08:49:53.395577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:53.395589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:53.395591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:53.395620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:53.492674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.493549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:53.493556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.493698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:53.493735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:53.493738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:49:53.493815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:53.493816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:49:53.493842Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:53.494436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178993538, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:53.494444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:49:53.494526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:49:53.494607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:53.494635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:53.494641Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:49:53.494649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:49:53.494655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:49:53.494661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:49:53.495051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:49:53.495056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:49:53.495059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:53.495066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T08:49:53.496639Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:17766 2024-11-21T08:49:53.553563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.553620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:53.553624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.553737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:53.553757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.553938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178993601, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:53.553940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732178993601, at schemeshard: 72057594046644480 2024-11-21T08:49:53.553973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:49:53.553985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:49:53.554065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:49:53.554114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:53.554141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:53.554356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:49:53.554413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:49:53.554416Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:53.554423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 waiting... 2024-11-21T08:49:53.568734Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:49:53.569238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.569313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:53.569319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.569334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:49:53.569373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:49:53.569378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T08:49:53.569511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:53.569537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:53.569585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:53.569848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:49:53.569856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:49:53.569859Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:53.569873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T08:49:53.571261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.571304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:53.571757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIREC ... ECTORY, path: /Root/SQS/cloud4/000000000000000101v0 2024-11-21T08:50:15.912152Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:15.912183Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:15.912192Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715700:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:15.912514Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T08:50:15.912521Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T08:50:15.912526Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 5 2024-11-21T08:50:15.912566Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T08:50:15.912568Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T08:50:15.912570Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 3 2024-11-21T08:50:15.912805Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179015959, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:15.912810Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715700:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179015959, at schemeshard: 72057594046644480 2024-11-21T08:50:15.912831Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715700:0 128 -> 240 2024-11-21T08:50:15.912916Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:15.912938Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:15.912944Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715700:0 ProgressState 2024-11-21T08:50:15.912954Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715700:0 progress is 1/1 2024-11-21T08:50:15.912962Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715700:0 2024-11-21T08:50:15.912970Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715700, publications: 2, subscribers: 0 2024-11-21T08:50:15.913161Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T08:50:15.913165Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T08:50:15.913167Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 6 2024-11-21T08:50:15.913189Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T08:50:15.913191Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T08:50:15.913192Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 4 2024-11-21T08:50:15.913198Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715700, subscribers: 0 2024-11-21T08:50:15.913643Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715700, at schemeshard: 72057594046644480 2024-11-21T08:50:15.914122Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000101v0/v2, operationId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.914169Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715701:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:15.914296Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715701, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000101v0/v2 2024-11-21T08:50:15.914312Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:15.914333Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:15.914338Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715701:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:15.914501Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715701 2024-11-21T08:50:15.914504Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715701 2024-11-21T08:50:15.914507Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 5 2024-11-21T08:50:15.914529Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 30 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715701 2024-11-21T08:50:15.914532Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715701 2024-11-21T08:50:15.914533Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 30], version: 3 2024-11-21T08:50:15.914837Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715701, at schemeshard: 72057594046644480 2024-11-21T08:50:15.919298Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179015966, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:15.919310Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715701:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179015966, at schemeshard: 72057594046644480 2024-11-21T08:50:15.919333Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715701:0 128 -> 240 2024-11-21T08:50:15.919445Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:15.919474Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:15.919495Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715701:0 ProgressState 2024-11-21T08:50:15.919504Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715701:0 progress is 1/1 2024-11-21T08:50:15.919513Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715701:0 2024-11-21T08:50:15.919521Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715701, publications: 2, subscribers: 1 2024-11-21T08:50:15.919756Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715701 2024-11-21T08:50:15.919763Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715701 2024-11-21T08:50:15.919768Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 6 2024-11-21T08:50:15.919794Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 30 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715701 2024-11-21T08:50:15.919797Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715701 2024-11-21T08:50:15.919798Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 30], version: 4 2024-11-21T08:50:15.919805Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715701, subscribers: 1 2024-11-21T08:50:15.949382Z node 6 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [fa72512-a4bf2fdb-3de1596c-87c7b906] Got succesfult GRPC response. 2024-11-21T08:50:15.949409Z node 6 :HTTP_PROXY INFO: http request [CreateQueue] requestId [fa72512-a4bf2fdb-3de1596c-87c7b906] reply ok 2024-11-21T08:50:15.949452Z node 6 :HTTP DEBUG: (#44,[::1]:34748) <- (200 ) 2024-11-21T08:50:15.950830Z node 6 :HTTP DEBUG: (#44,[::1]:34748) connection closed Http output full {"QueueUrl":"http://ghrun-qcxhsi27zq.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName.fifo"} 2024-11-21T08:50:15.951671Z node 6 :HTTP DEBUG: (#44,[::1]:34762) incoming connection opened 2024-11-21T08:50:15.951697Z node 6 :HTTP DEBUG: (#44,[::1]:34762) -> (POST /Root) 2024-11-21T08:50:15.951742Z node 6 :HTTP_PROXY INFO: proxy service: incoming request from [988b:6c3e:db56:0:808b:6c3e:db56:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: 8eed85cc-51f9982b-b3bf13bc-24931359 2024-11-21T08:50:15.951899Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [8eed85cc-51f9982b-b3bf13bc-24931359] got new request from [988b:6c3e:db56:0:808b:6c3e:db56:0] 2024-11-21T08:50:15.953189Z node 6 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [8eed85cc-51f9982b-b3bf13bc-24931359] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:15.953197Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [8eed85cc-51f9982b-b3bf13bc-24931359] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:15.994335Z node 6 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [8eed85cc-51f9982b-b3bf13bc-24931359] Got succesfult GRPC response. 2024-11-21T08:50:15.994402Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [8eed85cc-51f9982b-b3bf13bc-24931359] reply ok 2024-11-21T08:50:15.994469Z node 6 :HTTP DEBUG: (#44,[::1]:34762) <- (200 ) 2024-11-21T08:50:15.994540Z node 6 :HTTP DEBUG: (#44,[::1]:34762) connection closed Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"a9c39826-f13fddf2-5659866e-e8e1925f"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"b51a76e2-f59a08ca-ad36196f-1bd215c9"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] >> TestYmqHttpProxy::TestPurgeQueue >> KqpUniqueIndex::UpdateOnFkAlreadyExist >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TYardTest::TestMultiYardHarakiri [GOOD] >> TYardTest::TestLogOwerwrite >> test.py::test[select-refselect-1000-Analyze] [GOOD] >> test.py::test[select-refselect-1000-Debug] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery >> TYardTest::TestLogOwerwrite [GOOD] >> KqpIndexes::SecondaryIndexOrderBy [GOOD] >> KqpIndexes::SecondaryIndexInsert1 >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] Test command err: Trying to start YDB, gRPC: 29293, MsgBus: 9846 2024-11-21T08:50:08.724373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652194140676549:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:09.250264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001df7/r3tmp/tmpoMBUVH/pdisk_1.dat 2024-11-21T08:50:09.772181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:09.772221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:09.772805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29293, node 1 2024-11-21T08:50:09.811210Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:50:09.811219Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:50:09.854375Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:09.972074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:09.972084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:09.972087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:09.972117Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9846 TClient is connected to server localhost:9846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:10.928246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:10.948312Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:50:10.972644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:11.290839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:11.497387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:11.597331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:13.624405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652215615514586:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:13.662983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:13.682654Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652194140676549:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:13.682672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:13.684340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.727684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.752743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.767496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.798882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.845518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.922455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652215615515084:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:13.922474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:13.922563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652215615515089:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:13.923871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:13.940636Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:50:13.940918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652215615515091:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:14.461289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:14.622703Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652219910482902:2489], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2024-11-21T08:50:14.622875Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWVhOTE3Mi0zMGQ2ODJjZC02NmE3NWYwNi1hNDZlMmViMA==, ActorId: [1:7439652219910482678:2459], ActorState: ExecuteState, TraceId: 01jd6ymcyx8hps1wr5wgkyfedr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:50:14.625068Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652219910482906:2491], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2024-11-21T08:50:14.625202Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWVhOTE3Mi0zMGQ2ODJjZC02NmE3NWYwNi1hNDZlMmViMA==, ActorId: [1:7439652219910482678:2459], ActorState: ExecuteState, TraceId: 01jd6ymcyz2zst8r6zd3vv9xhs, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:50:14.626881Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652219910482910:2493], status: GENERIC_ERROR, issues:
:3:41: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2024-11-21T08:50:14.626987Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWVhOTE3Mi0zMGQ2ODJjZC02NmE3NWYwNi1hNDZlMmViMA==, ActorId: [1:7439652219910482678:2459], ActorState: ExecuteState, TraceId: 01jd6ymcz20zdpbj8pwsry5j1j, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:50:14.628665Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652219910482914:2495], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2024-11-21T08:50:14.628788Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWVhOTE3Mi0zMGQ2ODJjZC02NmE3NWYwNi1hNDZlMmViMA==, ActorId: [1:7439652219910482678:2459], ActorState: ExecuteState, TraceId: 01jd6ymcz4cvgc7dhyx45jwrj3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 12985, MsgBus: 21042 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001df7/r3tmp/tmpGMw2nd/pdisk_1.dat 2024-11-21T08:50:15.136482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:15.152784Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12985, node 2 2024-11-21T08:50:15.176484Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:15.176493Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:15.176495Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:15.176526Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:50:15.208419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:15.208443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:15.212561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21042 TClient is connected to server localhost:21042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:15.332993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.334307Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:15.344669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.364787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.400377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.421890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:16.305700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652225613397711:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:16.305755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:16.313402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.340614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.363387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.383274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.403237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.420731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.457497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652225613398212:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:16.457547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:16.457819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652225613398217:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:16.459104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:16.464671Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:16.464796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652225613398219:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:16.992471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpMultishardIndex::DataColumnWriteNull-StreamLookup [GOOD] >> KqpMultishardIndex::DuplicateUpsert+StreamLookup >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> test.py::test[weak_field-few_source_different_columns--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Analyze] >> test.py::test[json-json_query/common_syntax-default.txt-Results] [GOOD] >> test.py::test[json-json_query/passing-default.txt-Analyze] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Results] [GOOD] >> test.py::test[expr-len--Analyze] >> TestKinesisHttpProxy::ListShardsToken >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Analyze] |86.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> KqpIndexes::SelectConcurentTX2 [GOOD] >> KqpIndexes::SelectFromAsyncIndexedTable >> TestKinesisHttpProxy::GoodRequestCreateStream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] Test command err: 2024-11-21T08:49:10.852126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:49:10.852732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:49:10.852757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044f9/r3tmp/tmpOsBTEg/pdisk_1.dat 2024-11-21T08:49:10.960984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:49:10.978236Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:11.020443Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:49:11.020791Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:49:11.020842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:11.020862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:11.031374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:11.144523Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:49:11.144551Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:49:11.144585Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:49:11.156614Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:49:11.156846Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:49:11.156861Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:49:11.156920Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:49:11.156952Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:49:11.156965Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:49:11.157035Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:49:11.157392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:11.157642Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:49:11.157654Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:49:11.171395Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:49:11.171597Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:49:11.171673Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:49:11.171722Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:49:11.180432Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:49:11.180605Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:49:11.180630Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:49:11.180788Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:49:11.180797Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:49:11.180803Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:49:11.180845Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:49:11.184655Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:49:11.184718Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:49:11.184743Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:49:11.184748Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:49:11.184753Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:49:11.184758Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:49:11.184886Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:11.184894Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:49:11.185017Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:49:11.185036Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:49:11.185049Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:11.185054Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:49:11.185061Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:49:11.185069Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:49:11.185075Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:49:11.185081Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:49:11.185087Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:49:11.185090Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:49:11.185096Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:49:11.185101Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:49:11.185119Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:49:11.185123Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:49:11.185152Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:49:11.185197Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:49:11.185207Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:49:11.185221Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:49:11.185229Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:49:11.185234Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:49:11.185239Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:49:11.185243Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:11.185284Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:49:11.185289Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:49:11.185293Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:49:11.185296Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:11.185306Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:49:11.185310Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:49:11.185314Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:49:11.185317Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:49:11.185323Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:49:11.185571Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:49:11.185578Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:49:11.196450Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:49:11.196480Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:49:11.196487Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:49:11.196499Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T08:50:16.571627Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:639:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:50:16.571635Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T08:50:16.571647Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037888, for tableId 8: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T08:50:16.571663Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 8 2024-11-21T08:50:16.571672Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:642:2544]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:50:16.571676Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037889 2024-11-21T08:50:16.571682Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037889, for tableId 8: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T08:50:16.571690Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 8 2024-11-21T08:50:16.849798Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [6:788:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T08:50:16.849823Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T08:50:16.849843Z node 6 :TX_DATASHARD TRACE: No cleanup at 72075186224037890 outdated step 62000 last cleanup 0 2024-11-21T08:50:16.849854Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:16.849861Z node 6 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T08:50:16.849865Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T08:50:16.849868Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:50:16.849891Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:791:2645]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:50:16.849900Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037891 2024-11-21T08:50:16.849912Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037891, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T08:50:16.849929Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037891, FollowerId 0, tableId 3 2024-11-21T08:50:16.850126Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:788:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:50:16.850131Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037890 2024-11-21T08:50:16.850138Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037890, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T08:50:16.850146Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037890, FollowerId 0, tableId 3 2024-11-21T08:50:16.872137Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [6:791:2645]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T08:50:16.872165Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T08:50:16.872193Z node 6 :TX_DATASHARD TRACE: No cleanup at 72075186224037891 outdated step 62000 last cleanup 0 2024-11-21T08:50:16.872223Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:16.872230Z node 6 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2024-11-21T08:50:16.872234Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2024-11-21T08:50:16.872237Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2024-11-21T08:50:18.000614Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] Handle TEvProposeTransaction 2024-11-21T08:50:18.000643Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] TxId# 281474976715669 ProcessProposeTransaction 2024-11-21T08:50:18.000659Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] Cookie# 0 userReqId# "" txid# 281474976715669 SEND to# [6:1499:3277] DataReq marker# P0 2024-11-21T08:50:18.000687Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] Cookie# 0 txid# 281474976715669 HANDLE TDataReq marker# P1 2024-11-21T08:50:18.000753Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2024-11-21T08:50:18.000804Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T08:50:18.000827Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 2 followers disallowed marker# P4b 2024-11-21T08:50:18.000840Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 SEND TEvProposeTransaction to datashard 72075186224037889 with read table request affected shards 2 followers disallowed marker# P4b 2024-11-21T08:50:18.000918Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [6:1499:3277], Recipient [6:639:2542]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1499 RawX2: 25769807053 } TxBody: " \0018\001BC\n\014\010\200\202\224\204\200\200\200\200\001\020\010\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001(\210\'0\217\247\200\200\200\200@H\001R\022\t\333\005\000\000\000\000\000\000\021\315\014\000\000\006\000\000\000" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T08:50:18.000926Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:50:18.000962Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:50:18.001004Z node 6 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:8:0] 2024-11-21T08:50:18.001019Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:50:18.001039Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T08:50:18.001043Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:50:18.001048Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:50:18.001052Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T08:50:18.001059Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is DelayComplete 2024-11-21T08:50:18.001062Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:50:18.001066Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037888 has finished 2024-11-21T08:50:18.001080Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:50:18.001085Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T08:50:18.001091Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: BAD_REQUEST 2024-11-21T08:50:18.001098Z node 6 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037888 status: BAD_REQUEST errors: SNAPSHOT_NOT_EXIST (Shard 72075186224037888 has no snapshot { table 72057594046644480:8 version 5000/281474976715663 }) | 2024-11-21T08:50:18.001119Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:50:18.001157Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# BAD_REQUEST shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2024-11-21T08:50:18.001171Z node 6 :TX_PROXY ERROR: Actor# [6:1499:3277] txid# 281474976715669 RESPONSE Status# WrongRequest marker# P13c 2024-11-21T08:50:18.001200Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [6:1499:3277], Recipient [6:642:2544]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1499 RawX2: 25769807053 } TxBody: " \0018\001BC\n\014\010\200\202\224\204\200\200\200\200\001\020\010\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001(\210\'0\217\247\200\200\200\200@H\001R\022\t\333\005\000\000\000\000\000\000\021\315\014\000\000\006\000\000\000" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T08:50:18.001204Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:50:18.001219Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:50:18.001231Z node 6 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:8:0] 2024-11-21T08:50:18.001237Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037889 on unit CheckDataTx 2024-11-21T08:50:18.001244Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037889 is Executed 2024-11-21T08:50:18.001248Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037889 executing on unit CheckDataTx 2024-11-21T08:50:18.001251Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037889 to execution unit FinishPropose 2024-11-21T08:50:18.001255Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037889 on unit FinishPropose 2024-11-21T08:50:18.001259Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037889 is DelayComplete 2024-11-21T08:50:18.001262Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037889 executing on unit FinishPropose 2024-11-21T08:50:18.001265Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037889 has finished 2024-11-21T08:50:18.001272Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:50:18.001275Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037889 on unit FinishPropose 2024-11-21T08:50:18.001279Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037889 send to client, exec latency: 0 ms, propose latency: 0 ms, status: BAD_REQUEST 2024-11-21T08:50:18.001283Z node 6 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037889 status: BAD_REQUEST errors: SNAPSHOT_NOT_EXIST (Shard 72075186224037889 has no snapshot { table 72057594046644480:8 version 5000/281474976715663 }) | 2024-11-21T08:50:18.001289Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 6642, MsgBus: 62011 2024-11-21T08:50:11.642486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652207004919420:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:11.642728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c9d/r3tmp/tmphTReEq/pdisk_1.dat 2024-11-21T08:50:12.126737Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:12.137688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:12.137710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:12.144917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6642, node 1 2024-11-21T08:50:12.344757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:12.344777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:12.344781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:12.344847Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62011 TClient is connected to server localhost:62011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:12.833442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:12.911248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:13.194738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:13.455194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:13.544037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.465101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652224184790157:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.466870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.479367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.515616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.601904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.671279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.697129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.778293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.864513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652224184790665:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.864538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.864737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652224184790670:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.865949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:15.870074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652224184790673:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:16.189027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:16.636975Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652207004919420:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:16.637028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:17.940462Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6ymfy616pewgfgxz30ff8r, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ2MjJjYzgtYjA4NTg0N2QtYmE4ZjA5YTMtZjZkNTZjYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T08:50:17.942841Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQ2MjJjYzgtYjA4NTg0N2QtYmE4ZjA5YTMtZjZkNTZjYTE=, ActorId: [1:7439652228479758951:2518], ActorState: ExecuteState, TraceId: 01jd6ymfy616pewgfgxz30ff8r, Create QueryResponse for error on request, msg: >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2024-11-21T08:49:56.177770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652139294292333:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cbb/r3tmp/tmp1vlkmk/pdisk_1.dat 2024-11-21T08:49:56.278094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:49:56.346547Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10501, node 1 2024-11-21T08:49:56.376191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:56.376201Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:56.376223Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:56.376255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:56.452619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.453832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.453844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.453980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:56.454030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:56.454033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:49:56.454133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:56.454135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:49:56.454164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.454499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178996499, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:56.454502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:49:56.454557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:49:56.454651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.454673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.454680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:49:56.454686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:49:56.454691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:49:56.454697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T08:49:56.455104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:49:56.455110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:49:56.455112Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:56.455122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2024-11-21T08:49:56.460653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:49:56.477224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:56.477244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:56.484556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10330 2024-11-21T08:49:56.520538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.520597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.520604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.520716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:56.520735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.520966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178996569, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:56.520970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732178996569, at schemeshard: 72057594046644480 2024-11-21T08:49:56.521005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T08:49:56.521021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:49:56.521028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T08:49:56.521088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.521117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.521347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:49:56.521353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:49:56.521356Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:56.521380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2024-11-21T08:49:56.522079Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T08:49:56.522658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.522707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.522711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.522721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T08:49:56.522734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:49:56.522736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T08:49:56.522810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:56.522824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.523005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.523648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T08:49:56.523654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T08:49:56.523657Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:56.523667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T08:49:56.524186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.524247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.524347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRE ... 240 2024-11-21T08:50:16.286813Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715685:0 ProgressState 2024-11-21T08:50:16.286822Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715685:0 progress is 1/1 2024-11-21T08:50:16.286830Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715685:0 2024-11-21T08:50:16.287722Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/SQS/.STD/MessageData, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.287825Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 1 -> 2 2024-11-21T08:50:16.287977Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:16.287981Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.288103Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/SQS/.STD/MessageData 2024-11-21T08:50:16.288122Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:16.288150Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:16.288158Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:50:16.288446Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T08:50:16.288452Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T08:50:16.288455Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 15 2024-11-21T08:50:16.288486Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T08:50:16.288488Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T08:50:16.288490Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 2 waiting... 2024-11-21T08:50:16.289338Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715686, at schemeshard: 72057594046644480 2024-11-21T08:50:16.293556Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:50:16.293581Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 2 -> 3 2024-11-21T08:50:16.293771Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 ProgressState at tabletId# 72057594046644480 2024-11-21T08:50:16.304671Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:50:16.304680Z node 6 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:50:16.304703Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 3 -> 128 2024-11-21T08:50:16.304801Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T08:50:16.305028Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179016351, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:16.305033Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732179016351 2024-11-21T08:50:16.305058Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 128 -> 129 2024-11-21T08:50:16.305164Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:16.305233Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:16.305240Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:50:16.305476Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T08:50:16.305482Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T08:50:16.305487Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 16 2024-11-21T08:50:16.305514Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T08:50:16.305516Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T08:50:16.305518Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 4 2024-11-21T08:50:16.306565Z node 6 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715686 Step: 1732179016351 OrderId: 281474976715686 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 248 } } 2024-11-21T08:50:16.306810Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T08:50:16.306817Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.306821Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2024-11-21T08:50:16.306873Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2024-11-21T08:50:16.306881Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2024-11-21T08:50:16.306891Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2024-11-21T08:50:17.322309Z node 6 :HTTP INFO: Listening on http://[::]:4835 2024-11-21T08:50:17.324629Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439652231994224865:2406], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T08:50:17.325889Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439652231994224916:2412], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T08:50:17.325902Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439652231994224917:2413], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T08:50:17.325911Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652231994224915:2411], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:17.325935Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:17.374331Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439652231994224913:2410]: Pool not found 2024-11-21T08:50:17.413812Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439652231994224911:2409]: Pool not found 2024-11-21T08:50:17.414708Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652231994225039:2432], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:17.414724Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439652231994225040:2433], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T08:50:17.414731Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:17.447024Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439652231994225037:2431]: Pool not found Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2024-11-21T08:50:18.324527Z node 6 :HTTP DEBUG: (#44,[::1]:51694) incoming connection opened 2024-11-21T08:50:18.324570Z node 6 :HTTP DEBUG: (#44,[::1]:51694) -> (POST /Root) 2024-11-21T08:50:18.324629Z node 6 :HTTP_PROXY INFO: proxy service: incoming request from [58d2:523b:f516:0:40d2:523b:f516:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 76726bff-8be7b1a5-dcb809a4-51961a61 2024-11-21T08:50:18.324706Z node 6 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [76726bff-8be7b1a5-dcb809a4-51961a61] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2024-11-21T08:50:18.324741Z node 6 :HTTP DEBUG: (#44,[::1]:51694) <- (400 InvalidAction) 2024-11-21T08:50:18.324751Z node 6 :HTTP DEBUG: (#44,[::1]:51694) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2024-11-21T08:50:18.324756Z node 6 :HTTP DEBUG: (#44,[::1]:51694) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 76726bff-8be7b1a5-dcb809a4-51961a61 x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2024-11-21T08:50:18.324808Z node 6 :HTTP DEBUG: (#44,[::1]:51694) connection closed >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter >> test.py::test[params-no_optional_param-default.txt-Results] [GOOD] >> test.py::test[pg-join_using4-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestLogOwerwrite [GOOD] Test command err: 2024-11-21T08:47:00.251034Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:00.264363Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 7998603848714728843 MagicNextLogChunkReference: 2214235938712975669 MagicLogChunk: 12322506553130331990 MagicDataChunk: 14293559684137424685 MagicSysLogChunk: 2159934127482365292 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178820148401 (2024-11-21T08:47:00.148401Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:00.275704Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:00.287512Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:00.287706Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:00.287885Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:00.292270Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:00.292387Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:00.344405Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1384324 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:00.344892Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2024-11-21T08:47:00.345747Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T08:47:00.435201Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:00.464367Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T08:47:00.472387Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T08:47:00.607129Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:00.607158Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T08:47:00.607432Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 1320088837025162254 MagicLogChunk: 16059328658808281128 MagicDataChunk: 17466933347399928135 MagicSysLogChunk: 13956483061124379805 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178820518331 (2024-11-21T08:47:00.518331Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:00.613833Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:00.614950Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:00.614972Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:00.620359Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:00.620590Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:00.620656Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:00.620830Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1343971 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:00.621252Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2024-11-21T08:47:00.622187Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T08:47:00.718347Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:00.738717Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T08:47:00.744378Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T08:47:01.044572Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:01.044590Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T08:47:01.072299Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 17630733234250962882 MagicLogChunk: 6711302570736989493 MagicDataChunk: 16347955885758591724 MagicSysLogChunk: 1161205394544482269 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178820795198 (2024-11-21T08:47:00.795198Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:01.096439Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:01.109904Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:01.109936Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:47:01.112362Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:01.112529Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:47:01.112568Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:47:01.147292Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1771800 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:47:01.205231Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2024-11-21T08:47:01.240319Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T08:47:01.409177Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:47:01.436315Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 6431680611676697224 MagicNextLogChunkReference: 13774539549093343638 MagicLogChunk: 13110220196056076646 MagicDataChunk: 9991255715055332182 MagicSysLogChunk: 5495933620522010253 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178821328632 (2024-11-21T08:47:01.328632Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:47:01.444369Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:47:01.453852Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:47:01.453 ... ng ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:50:17.965605Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 96 SectorIdx# 475 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 96 OffsetInChunk# 1945600} PDiskId# 1 2024-11-21T08:50:17.965642Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 96 OffsetInChunk# 1945600} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:50:17.995745Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:50:18.016334Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 3317760000 bytes (3 GB) Guid: 0 MagicNextLogChunkReference: 18024681293106484682 MagicLogChunk: 13517836633858962650 MagicDataChunk: 8493285776412036945 MagicSysLogChunk: 14057849879133406658 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 96 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732179016357645 (2024-11-21T08:50:16.357645Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:50:18.037674Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 8681065 NonceLog# 8264169 NonceData# 8650688} LogHeadChunkIdx# 96 LogHeadChunkPreviousNonce# 3986189} PDiskId# 1 2024-11-21T08:50:18.052654Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 96 SectorIdx# 476 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 96 OffsetInChunk# 1949696} PDiskId# 1 2024-11-21T08:50:18.052689Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 96 OffsetInChunk# 1949696} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:50:18.054043Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 96 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:50:18.054175Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 96 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:50:18.054242Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:50:18.092660Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 9817770 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:50:18.251793Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:50:18.260502Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 1890020294594145804 MagicNextLogChunkReference: 12509291043929730349 MagicLogChunk: 13855372300938975117 MagicDataChunk: 1583537662427585821 MagicSysLogChunk: 87562193665012443 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732179018166955 (2024-11-21T08:50:18.166955Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:50:18.268830Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:50:18.276125Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:50:18.276158Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:50:18.281779Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:50:18.281931Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:50:18.281975Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:50:18.348438Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1479031 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:50:18.366347Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},{chunkIdx# 2 users# 0 endOfSplice# 0},{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 2},},{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-2 firstLsnToKeep# 2},},] PDiskId# 1 2024-11-21T08:50:18.366365Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 2},},{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-2 firstLsnToKeep# 2},},] PDiskId# 1 2024-11-21T08:50:18.369589Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 3 users# 0 endOfSplice# 0},{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-3 firstLsnToKeep# 3},},{chunkIdx# 5 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-3 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T08:50:18.369601Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-3 firstLsnToKeep# 3},},{chunkIdx# 5 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-3 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T08:50:18.373001Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 4 users# 0 endOfSplice# 0},{chunkIdx# 5 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-4 firstLsnToKeep# 4},},{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 4-4 firstLsnToKeep# 4},},] PDiskId# 1 2024-11-21T08:50:18.373014Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 5 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-4 firstLsnToKeep# 4},},{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 4-4 firstLsnToKeep# 4},},] PDiskId# 1 2024-11-21T08:50:18.378918Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 5 users# 0 endOfSplice# 0},{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 4-5 firstLsnToKeep# 5},},{chunkIdx# 7 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 5},},{chunkIdx# 8 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 5},},] PDiskId# 1 2024-11-21T08:50:18.378930Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 4-5 firstLsnToKeep# 5},},{chunkIdx# 7 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 5},},{chunkIdx# 8 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 5},},] PDiskId# 1 2024-11-21T08:50:18.413818Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:50:18.424601Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 1890020294594145804 MagicNextLogChunkReference: 12509291043929730349 MagicLogChunk: 13855372300938975117 MagicDataChunk: 1583537662427585821 MagicSysLogChunk: 87562193665012443 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732179018166955 (2024-11-21T08:50:18.166955Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:50:18.432411Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1639696 NonceLog# 1482636 NonceData# 2077993} LogHeadChunkIdx# 6 LogHeadChunkPreviousNonce# 1481579 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:50:18.468633Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 8 SectorIdx# 36 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 8 OffsetInChunk# 147456} PDiskId# 1 2024-11-21T08:50:18.468673Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 8 OffsetInChunk# 147456} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:50:18.476505Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 5},},{chunkIdx# 7 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 5},},{chunkIdx# 8 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 5},},] PDiskId# 1 2024-11-21T08:50:18.477415Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 5},},{chunkIdx# 7 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 5},},{chunkIdx# 8 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 5},},] PDiskId# 1 2024-11-21T08:50:18.477472Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:50:18.512585Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns [GOOD] Test command err: Trying to start YDB, gRPC: 25478, MsgBus: 25848 2024-11-21T08:50:08.153064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652192264466587:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:08.153078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e05/r3tmp/tmplkW92c/pdisk_1.dat 2024-11-21T08:50:09.208426Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:09.209328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:09.209341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:09.212547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:09.213725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25478, node 1 2024-11-21T08:50:09.368609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:09.368622Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:09.368623Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:09.368663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25848 TClient is connected to server localhost:25848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:10.029297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:10.101005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:50:10.118534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:10.255583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:10.482624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:10.603497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:12.216165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652205149370187:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:12.217713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:12.313276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:12.343361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:12.423976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:12.457638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:12.473570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:12.502384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:12.556926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652209444337977:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:12.556975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:12.557127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652209444337984:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:12.558791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:12.564087Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:50:12.564179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652209444337986:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:13.153790Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652192264466587:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:13.153812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:13.274389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2927, MsgBus: 19594 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e05/r3tmp/tmpoImU5Z/pdisk_1.dat 2024-11-21T08:50:14.773845Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:14.780618Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2927, node 2 2024-11-21T08:50:14.806456Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:14.806466Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:14.806468Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:14.806497Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19594 2024-11-21T08:50:14.848811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:14.848836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:14.856456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:14.984998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:14.986437Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:14.996899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.037510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.072195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.090232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.514842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652222459605498:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.514859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.523113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.543277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.567202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.583137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.598649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.616681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.640833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652222459606003:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.640856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.640938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652222459606008:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.641608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:15.648588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652222459606010:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:16.051814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.866946Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:17.431910Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:17.670110Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:17.936001Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:17.946513Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:18.105876Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:18.130098Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:18.142387Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:18.221494Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:50:18.221798Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976715751 at tablet 72075186224037920 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976715751] at 72075186224037920 while waiting for scan finish) | 2024-11-21T08:50:18.232170Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715751 at tablet 72075186224037920 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976715751] at 72075186224037920 while waiting for scan finish) | >> KqpIndexes::SecondaryIndexInsert1 [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::ExecuteQueryCache >> KqpMultishardIndex::DuplicateUpsert+StreamLookup [GOOD] >> KqpMultishardIndex::SecondaryIndexSelect >> KqpIndexes::IndexOr [GOOD] >> KqpIndexes::IndexFilterPushDown >> KqpUniqueIndex::UpdateOnFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 >> KqpIndexes::CheckUpsertNonEquatableType+NotNull >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexInsert1 [GOOD] Test command err: Trying to start YDB, gRPC: 9995, MsgBus: 20849 2024-11-21T08:50:12.811834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652207384036253:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:12.820709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001d3d/r3tmp/tmpIouyDC/pdisk_1.dat 2024-11-21T08:50:13.887177Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:13.893594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:13.893619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:13.896731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:13.898179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9995, node 1 2024-11-21T08:50:14.048223Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:14.048236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:14.048237Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:14.048274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20849 TClient is connected to server localhost:20849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:14.531864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:14.566603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:14.698018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:14.764134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:14.841429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.356121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652220268939591:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.411764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.432852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.465184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.482097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.503701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.543388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.582810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:15.612709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652220268940128:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.612734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.612894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652220268940133:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:15.614735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:15.618376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:50:15.618452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652220268940135:2430], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:16.313971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:17.796482Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652207384036253:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:17.796512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16026, MsgBus: 22522 2024-11-21T08:50:18.699651Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652234265891215:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:18.701469Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001d3d/r3tmp/tmptcK99s/pdisk_1.dat 2024-11-21T08:50:18.733833Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16026, node 2 2024-11-21T08:50:18.773233Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:18.773248Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:18.773250Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:18.773292Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22522 2024-11-21T08:50:18.805355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:18.805397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:18.810466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:18.908897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.917600Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:18.919192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.988888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.021513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.049421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.261497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652238560859931:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.261518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.264777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.284079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.297795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.309527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.323759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.337509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.358559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652238560860434:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.358575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.358636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652238560860439:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.359322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:19.361711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652238560860441:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:19.744173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] Test command err: 2024-11-21T08:49:53.732874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652129458144456:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:53.732908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cef/r3tmp/tmpX6Otrz/pdisk_1.dat 2024-11-21T08:49:53.962921Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24595, node 1 2024-11-21T08:49:53.984395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:53.984405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:53.984406Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:53.984436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:54.066491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.067467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.067475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.067631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:54.067668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:54.067672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:49:54.067763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:54.067765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:49:54.067801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.068116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178994112, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:54.068121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:49:54.068167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:49:54.068279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.068309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.068315Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:49:54.068323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:49:54.068328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:49:54.068336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T08:49:54.068777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:49:54.068789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:49:54.068794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:54.068804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2024-11-21T08:49:54.072669Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:49:54.092563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:54.092590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:54.096595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30599 2024-11-21T08:49:54.128844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.128920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.128926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.129074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:54.129101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.129375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178994175, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:54.129380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732178994175, at schemeshard: 72057594046644480 2024-11-21T08:49:54.129437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T08:49:54.129455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:49:54.129462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T08:49:54.129542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.129576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.129851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:49:54.129858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:49:54.129861Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:54.129871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2024-11-21T08:49:54.136689Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T08:49:54.140672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.140740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.140746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.140764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T08:49:54.140786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:49:54.140789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T08:49:54.140917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:54.140941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.141007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.141226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T08:49:54.141232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T08:49:54.141235Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:54.141247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T08:49:54.145598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.145648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.145815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRE ... 976715696:0 3 -> 128 2024-11-21T08:50:19.621207Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.632657Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179019676, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:19.632680Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715696:0 HandleReply TEvOperationPlan, step: 1732179019676, at tablet: 72057594046644480 2024-11-21T08:50:19.632756Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715696:0 128 -> 240 2024-11-21T08:50:19.632929Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:19.632994Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:19.633003Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715696:0 ProgressState 2024-11-21T08:50:19.633011Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715696:0 progress is 1/1 2024-11-21T08:50:19.633018Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715696:0 2024-11-21T08:50:19.633071Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715696, publications: 1, subscribers: 1 2024-11-21T08:50:19.633412Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715696 2024-11-21T08:50:19.633419Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715696 2024-11-21T08:50:19.633423Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715696, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 5 2024-11-21T08:50:19.633440Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715696, subscribers: 1 Http output full {"Consumer":{"ConsumerCreationTimestamp":1732179.019,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}} 200 {"Consumer":{"ConsumerCreationTimestamp":1732179.019,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}} E0000 00:00:1732179019.637212 281284 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T08:50:19.636602Z node 7 :HTTP_PROXY INFO: http request [RegisterStreamConsumer] requestId [48f8c79-5012e414-9570abe-cf876ac] reply ok 2024-11-21T08:50:19.636685Z node 7 :HTTP DEBUG: (#44,[::1]:42314) <- (200 ) 2024-11-21T08:50:19.636724Z node 7 :HTTP DEBUG: (#44,[::1]:42314) connection closed 2024-11-21T08:50:19.637097Z node 7 :HTTP DEBUG: (#47,[::1]:42326) incoming connection opened 2024-11-21T08:50:19.637110Z node 7 :HTTP DEBUG: (#47,[::1]:42326) -> (POST /Root) 2024-11-21T08:50:19.637141Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d8ef:7abd:1e45:0:c0ef:7abd:1e45:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 6c73dc3e-404c3ba8-8ced59f7-e32d3049 2024-11-21T08:50:19.637238Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [6c73dc3e-404c3ba8-8ced59f7-e32d3049] got new request from [d8ef:7abd:1e45:0:c0ef:7abd:1e45:0] database '/Root' stream 'teststream' E0000 00:00:1732179019.637412 281283 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732179019.637422 281283 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732179019.637439 281283 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732179019.637445 281283 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T08:50:19.637405Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [6c73dc3e-404c3ba8-8ced59f7-e32d3049] [auth] Authorized successfully 2024-11-21T08:50:19.637431Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [6c73dc3e-404c3ba8-8ced59f7-e32d3049] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:19.637690Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [6c73dc3e-404c3ba8-8ced59f7-e32d3049] reply ok 2024-11-21T08:50:19.637730Z node 7 :HTTP DEBUG: (#47,[::1]:42326) <- (200 ) 2024-11-21T08:50:19.637765Z node 7 :HTTP DEBUG: (#47,[::1]:42326) connection closed Http output full {"NextToken":"CPWOqe+0MhABGAEiCnRlc3RzdHJlYW0=","Consumers":[{"ConsumerCreationTimestamp":1732179020,"ConsumerArn":"","ConsumerName":"user1","ConsumerStatus":"ACTIVE"}]} 200 {"NextToken":"CPWOqe+0MhABGAEiCnRlc3RzdHJlYW0=","Consumers":[{"ConsumerCreationTimestamp":1732179020,"ConsumerArn":"","ConsumerName":"user1","ConsumerStatus":"ACTIVE"}]} 2024-11-21T08:50:19.638060Z node 7 :HTTP DEBUG: (#44,[::1]:42340) incoming connection opened 2024-11-21T08:50:19.638072Z node 7 :HTTP DEBUG: (#44,[::1]:42340) -> (POST /Root) 2024-11-21T08:50:19.638089Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d820:32bb:1e45:0:c020:32bb:1e45:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: d49cac8f-64c7da90-81fd7876-98cfe1c7 2024-11-21T08:50:19.638475Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [d49cac8f-64c7da90-81fd7876-98cfe1c7] got new request from [d820:32bb:1e45:0:c020:32bb:1e45:0] database '/Root' stream '' 2024-11-21T08:50:19.638710Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [d49cac8f-64c7da90-81fd7876-98cfe1c7] [auth] Authorized successfully 2024-11-21T08:50:19.638759Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [d49cac8f-64c7da90-81fd7876-98cfe1c7] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:19.638905Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [d49cac8f-64c7da90-81fd7876-98cfe1c7] reply with status: BAD_REQUEST message:
: Error: Provided NextToken is malformed, code: 500040 2024-11-21T08:50:19.638937Z node 7 :HTTP DEBUG: (#44,[::1]:42340) <- (400 InvalidArgumentException) 2024-11-21T08:50:19.638944Z node 7 :HTTP DEBUG: (#44,[::1]:42340) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.ListStreamConsumers X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 61 { "NextToken":"CPWOqe+0MhABGAEiCnRlc3RzdHJlYW0=garbage", "StreamArn":"", "MaxResults":100 } 0 2024-11-21T08:50:19.638948Z node 7 :HTTP DEBUG: (#44,[::1]:42340) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: d49cac8f-64c7da90-81fd7876-98cfe1c7 x-amz-crc32: 40394744 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T08:50:19.638969Z node 7 :HTTP DEBUG: (#44,[::1]:42340) connection closed Http output full {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 400 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T08:50:19.639233Z node 7 :HTTP DEBUG: (#44,[::1]:42342) incoming connection opened 2024-11-21T08:50:19.639244Z node 7 :HTTP DEBUG: (#44,[::1]:42342) -> (POST /Root) 2024-11-21T08:50:19.639284Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d820:32bb:1e45:0:c020:32bb:1e45:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 63cedf8c-b2ec9543-4120fb96-8ab1849c 2024-11-21T08:50:19.639348Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [63cedf8c-b2ec9543-4120fb96-8ab1849c] got new request from [d820:32bb:1e45:0:c020:32bb:1e45:0] database '/Root' stream '' 2024-11-21T08:50:19.639457Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [63cedf8c-b2ec9543-4120fb96-8ab1849c] [auth] Authorized successfully 2024-11-21T08:50:19.639502Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [63cedf8c-b2ec9543-4120fb96-8ab1849c] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:19.639639Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [63cedf8c-b2ec9543-4120fb96-8ab1849c] reply with status: BAD_REQUEST message:
: Error: Provided NextToken is malformed, code: 500040 2024-11-21T08:50:19.639666Z node 7 :HTTP DEBUG: (#44,[::1]:42342) <- (400 InvalidArgumentException) 2024-11-21T08:50:19.639672Z node 7 :HTTP DEBUG: (#44,[::1]:42342) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.ListStreamConsumers X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "NextToken":"Y18AjrkckeND9kjnbEhjhkX$^[]?!", "StreamArn":"", "MaxResults":100 } 0 2024-11-21T08:50:19.639675Z node 7 :HTTP DEBUG: (#44,[::1]:42342) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 63cedf8c-b2ec9543-4120fb96-8ab1849c x-amz-crc32: 40394744 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T08:50:19.639693Z node 7 :HTTP DEBUG: (#44,[::1]:42342) connection closed Http output full {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 400 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T08:50:19.639882Z node 7 :HTTP DEBUG: (#47,[::1]:42346) incoming connection opened 2024-11-21T08:50:19.639892Z node 7 :HTTP DEBUG: (#47,[::1]:42346) -> (POST /Root) 2024-11-21T08:50:19.639919Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d848:69bd:1e45:0:c048:69bd:1e45:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: d40717ad-9ff96e79-d6865bd2-71b70a96 2024-11-21T08:50:19.639956Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [d40717ad-9ff96e79-d6865bd2-71b70a96] got new request from [d848:69bd:1e45:0:c048:69bd:1e45:0] database '/Root' stream 'teststream' 2024-11-21T08:50:19.640067Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [d40717ad-9ff96e79-d6865bd2-71b70a96] [auth] Authorized successfully 2024-11-21T08:50:19.640077Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [d40717ad-9ff96e79-d6865bd2-71b70a96] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:19.640286Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [d40717ad-9ff96e79-d6865bd2-71b70a96] reply ok 2024-11-21T08:50:19.640318Z node 7 :HTTP DEBUG: (#47,[::1]:42346) <- (200 ) 2024-11-21T08:50:19.640338Z node 7 :HTTP DEBUG: (#47,[::1]:42346) connection closed Http output full {"NextToken":"","Consumers":[{"ConsumerCreationTimestamp":1732179020,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}]} 200 {"NextToken":"","Consumers":[{"ConsumerCreationTimestamp":1732179020,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}]} >> test.py::test[blocks-date_less_or_equal_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Plan] >> test.py::test[in-in_ansi-default.txt-Debug] [GOOD] >> test.py::test[in-in_ansi-default.txt-ForceBlocks] >> test.py::test[blocks-date_less_or_equal_scalar--Plan] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Results] >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> test.py::test[select-refselect-1000-Debug] [GOOD] >> test.py::test[select-refselect-1000-ForceBlocks] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] |86.6%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Analyze] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Debug] >> test.py::test[expr-len--Analyze] [GOOD] >> test.py::test[expr-len--Debug] >> KqpSystemView::NodesRange2 [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] >> test.py::test[json-json_query/passing-default.txt-Analyze] [GOOD] >> test.py::test[json-json_query/passing-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 29583, MsgBus: 29675 2024-11-21T08:50:15.149584Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652222940667240:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:15.150805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c0c/r3tmp/tmpGKoG6z/pdisk_1.dat 2024-11-21T08:50:15.478817Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:15.480223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:15.480237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:15.485278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29583, node 1 2024-11-21T08:50:15.744518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:15.744532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:15.744534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:15.744571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29675 TClient is connected to server localhost:29675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:16.397628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:16.408931Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:50:16.429567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:50:16.482146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.538586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:16.592395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:16.954862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652227235635984:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:16.954912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:16.961268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.979200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:16.992289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:17.009205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:17.026028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:17.046028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:17.116731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652231530603798:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:17.116756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:17.116904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652231530603803:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:17.121209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:17.124575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652231530603805:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:17.634354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 26573, MsgBus: 23515 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c0c/r3tmp/tmppKU7B3/pdisk_1.dat 2024-11-21T08:50:18.946480Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:18.947055Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26573, node 2 2024-11-21T08:50:18.964647Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:18.964660Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:18.964662Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:18.964711Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23515 2024-11-21T08:50:19.032834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:19.032867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:19.033758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:19.057161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.060151Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:19.078289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.107497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.152736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.166450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.417424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652237517974788:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.417450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.420736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.437252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.448985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.463226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.476037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.490376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.517760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652237517975291:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.517787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.517919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652237517975296:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.518724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:19.520943Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:19.521011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652237517975298:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:19.842071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] Test command err: Trying to start YDB, gRPC: 6090, MsgBus: 16259 2024-11-21T08:50:16.455476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652226605607211:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:16.455565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bdc/r3tmp/tmppslXtC/pdisk_1.dat 2024-11-21T08:50:16.645177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:16.645203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:16.652984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:16.674643Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6090, node 1 2024-11-21T08:50:16.716421Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:16.716437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:16.716439Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:16.716484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16259 TClient is connected to server localhost:16259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:17.044891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:17.047760Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:50:17.055079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:17.140569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:17.187027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:17.209952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.022506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652230900575998:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.031351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.036380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.064712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.135115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.207113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.249715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.339421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.428312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652235195543855:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.428355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.428486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652235195543862:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.429784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:18.444556Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:50:18.444625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652235195543864:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:18.703907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.083010Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmU0MGQzMzItZjkzZDg5ZWEtNmQxNjFiMGItNGNlOTIxYzA=, ActorId: [1:7439652235195544141:2456], ActorState: ExecuteState, TraceId: 01jd6ymh6p0ykevwz3jrkrjnh2, Create QueryResponse for error on request, msg: 2024-11-21T08:50:19.091925Z node 1 :TX_DATASHARD ERROR: Complete [1732179019123 : 281474976710681] from 72075186224037920 at tablet 72075186224037920, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | Trying to start YDB, gRPC: 1372, MsgBus: 3390 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bdc/r3tmp/tmpdcwbHv/pdisk_1.dat 2024-11-21T08:50:19.448704Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1372, node 2 2024-11-21T08:50:19.481121Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:19.481133Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:19.481137Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:19.481176Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:50:19.508717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:19.508743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:19.509214Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3390 TClient is connected to server localhost:3390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:19.615633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.628340Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.645491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.679964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.718716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.777207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.862593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652237675206006:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.862641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.869426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.880926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.900851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.970907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.987025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:20.005858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:20.024458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652241970173822:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:20.024481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:20.024556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652241970173827:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:20.025194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:20.028712Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:20.028783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652241970173829:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:20.470154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:20.553111Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439652241970174363:2455] TxId: 281474976715672. Ctx: { TraceId: 01jd6ymjr6fwb8vhz80hqhz6k5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2ZjMDgxOGYtODBiYjY0ZTAtNWE4NTQxMDItMTUxMWRlZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2024-11-21T08:50:20.553237Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2ZjMDgxOGYtODBiYjY0ZTAtNWE4NTQxMDItMTUxMWRlZjM=, ActorId: [2:7439652241970174157:2455], ActorState: ExecuteState, TraceId: 01jd6ymjr6fwb8vhz80hqhz6k5, Create QueryResponse for error on request, msg: 2024-11-21T08:50:20.568686Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439652241970174377:2455] TxId: 281474976715674. Ctx: { TraceId: 01jd6ymjrgfxp50njgxnm8pmf5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2ZjMDgxOGYtODBiYjY0ZTAtNWE4NTQxMDItMTUxMWRlZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2024-11-21T08:50:20.568798Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2ZjMDgxOGYtODBiYjY0ZTAtNWE4NTQxMDItMTUxMWRlZjM=, ActorId: [2:7439652241970174157:2455], ActorState: ExecuteState, TraceId: 01jd6ymjrgfxp50njgxnm8pmf5, Create QueryResponse for error on request, msg: 2024-11-21T08:50:20.572836Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439652241970174386:2455] TxId: 281474976715676. Ctx: { TraceId: 01jd6ymjrvbr9r6e1epsenn58k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2ZjMDgxOGYtODBiYjY0ZTAtNWE4NTQxMDItMTUxMWRlZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2024-11-21T08:50:20.572935Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2ZjMDgxOGYtODBiYjY0ZTAtNWE4NTQxMDItMTUxMWRlZjM=, ActorId: [2:7439652241970174157:2455], ActorState: ExecuteState, TraceId: 01jd6ymjrvbr9r6e1epsenn58k, Create QueryResponse for error on request, msg: 2024-11-21T08:50:20.580783Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439652241970174395:2455] TxId: 281474976715678. Ctx: { TraceId: 01jd6ymjry8h14c8p0m0a38qrf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2ZjMDgxOGYtODBiYjY0ZTAtNWE4NTQxMDItMTUxMWRlZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2024-11-21T08:50:20.580918Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2ZjMDgxOGYtODBiYjY0ZTAtNWE4NTQxMDItMTUxMWRlZjM=, ActorId: [2:7439652241970174157:2455], ActorState: ExecuteState, TraceId: 01jd6ymjry8h14c8p0m0a38qrf, Create QueryResponse for error on request, msg: >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> test.py::test[pg-join_using4-default.txt-Analyze] [GOOD] >> test.py::test[pg-join_using4-default.txt-Debug] >> test.py::test[blocks-date_equals--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Debug] >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn >> TMonitoringTests::ValidActorId >> KqpUniqueIndex::InsertFkPartialColumnSet >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> test.py::test[aggr_factory-hll-default.txt-Debug] [GOOD] >> TMonitoringTests::ValidActorId [GOOD] >> test.py::test[aggr_factory-hll-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 6685, MsgBus: 15275 2024-11-21T08:50:10.931942Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652201637701025:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:11.065408Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439652204405058762:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:11.068535Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439652206124641028:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:11.105026Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439652203934773191:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:11.475603Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652202476904989:2248];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:11.485091Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:50:11.491879Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:50:11.504023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:50:11.514241Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:50:11.528474Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002373/r3tmp/tmpSWUVES/pdisk_1.dat 2024-11-21T08:50:12.153146Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:12.198277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:12.198307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:12.198812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:12.198975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:12.198991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:12.198995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:12.199190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:12.199360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:12.204452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:12.204466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:12.204669Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T08:50:12.204820Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T08:50:12.204822Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:50:12.204836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:12.206408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:12.206437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:12.206449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:12.221188Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T08:50:12.229320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6685, node 1 2024-11-21T08:50:12.704583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:12.704788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:12.704790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:12.704825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15275 TClient is connected to server localhost:15275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:13.918827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:14.018388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:15.921336Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652201637701025:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:15.921375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:15.997401Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652202476904989:2248];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:15.997433Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:16.014309Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439652204405058762:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:16.014333Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:16.032369Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439652206124641028:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:16.032403Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:16.092257Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439652203934773191:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:16.092293Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:18.238921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.372587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.465125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.736643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652235997441087:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.745933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.777082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.852399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.887082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.968753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.055644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.114434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.216852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652240292409043:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.216881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.216955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652240292409048:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.217798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:19.254081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652240292409050:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:19.893992Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179019861, txId: 281474976710671] shutting down 2024-11-21T08:50:20.144730Z node 2 :BS_PROXY_PUT ERROR: [0c5c0b0e6aa332f0] Result# TEvPutResult {Id# [72075186224037913:1:15:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T08:50:20.220698Z node 5 :BS_PROXY_PUT ERROR: [2fb79ab9c9a4b832] Result# TEvPutResult {Id# [72075186224037911:1:16:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T08:50:20.224742Z node 3 :BS_PROXY_PUT ERROR: [611ed6ed70f51fbc] Result# TEvPutResult {Id# [72075186224037914:1:15:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T08:50:20.274431Z node 4 :BS_PROXY_PUT ERROR: [f64198d88b7a48c0] Result# TEvPutResult {Id# [72075186224037895:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> KqpIndexes::IndexFilterPushDown [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2024-11-21T08:50:06.016611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652184276806912:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:06.017154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004184/r3tmp/tmpAToTxx/pdisk_1.dat 2024-11-21T08:50:06.484061Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:06.517674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:06.517854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:06.528614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23565, node 1 2024-11-21T08:50:07.624575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:07.624585Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:07.624586Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:07.624616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:09.193441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:09.196596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:09.196616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:09.204459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:50:09.204518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:50:09.204522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:50:09.205595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:09.205604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:50:09.206095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:09.207625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179009253, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:09.207634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:50:09.207693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 waiting... 2024-11-21T08:50:09.208280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:09.208321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:09.208330Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:50:09.208340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:50:09.208347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:50:09.208358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T08:50:09.209683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:50:09.209693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:50:09.209696Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:50:09.210210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T08:50:09.211518Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:50:10.217742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table-1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:50:10.218423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:10.218429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:50:10.220455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table-1 2024-11-21T08:50:10.503541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179010548, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:10.544753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:50:10.551725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-2, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:50:10.552150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:10.557910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /Root/Table-2 2024-11-21T08:50:10.576033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179010625, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:10.590532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710659, done: 0, blocked: 1 2024-11-21T08:50:10.591102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:50:10.592944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-3, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:50:10.593043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:10.593046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-4, opId: 281474976710660:1, at schemeshard: 72057594046644480 2024-11-21T08:50:10.593502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:10.595278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, dst path: /Root/Table-3, dst path: /Root/Table-4 2024-11-21T08:50:10.654680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179010702, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:10.670337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710660, done: 0, blocked: 2 2024-11-21T08:50:10.670924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2024-11-21T08:50:10.670948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 2024-11-21T08:50:10.673161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-5, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T08:50:10.673849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:10.673852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-6, opId: 281474976710661:1, at schemeshard: 72057594046644480 2024-11-21T08:50:10.674227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:10.674228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-7, opId: 281474976710661:2, at schemeshard: 72057594046644480 2024-11-21T08:50:10.674250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:10.674252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-8, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T08:50:10.674273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:10.676269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, dst path: /Root/Table-5, dst path: /Root/Table-6, dst path: /Root/Table-7, dst path: /Root/Table-8 2024-11-21T08:50:10.784879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179010828, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:10.806213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710661, done: 0, blocked: 4 2024-11-21T08:50:10.807244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:0 2024-11-21T08:50:10.807267Z nod ... hemeshard: 72057594046644480 2024-11-21T08:50:21.088671Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T08:50:21.088844Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:21.088848Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.096521Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T08:50:21.096584Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:21.096638Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.096665Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:50:21.097148Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.097156Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.097160Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:50:21.097197Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.097199Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.097200Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T08:50:21.097893Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:50:21.112810Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:50:21.112857Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T08:50:21.115034Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T08:50:21.141187Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:50:21.141197Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:50:21.141222Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T08:50:21.141792Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T08:50:21.142434Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179021188, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:21.142443Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732179021188 2024-11-21T08:50:21.142466Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T08:50:21.149297Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:21.149412Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.149426Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:50:21.149945Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.149954Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.149959Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:50:21.150001Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.150004Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.150005Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:50:21.153244Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732179021188 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 314 } } 2024-11-21T08:50:21.159407Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T08:50:21.159424Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.159431Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T08:50:21.159991Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:50:21.160009Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:50:21.160019Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:50:21.166524Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: Root/Test, pathId: , opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.166632Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:21.166636Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.167078Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: Root/Test 2024-11-21T08:50:21.167108Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterTable, at tablet72057594046644480 2024-11-21T08:50:21.167113Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T08:50:21.167450Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:21.167940Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:50:21.184642Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#281474976715659:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 message# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: PREPARED TxId: 281474976715659 MinStep: 1732179021189 MaxStep: 18446744073709551615 PrepareArriveTime: 1732179021167808 ExecLatency: 0 ProposeLatency: 16 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 41 } } 2024-11-21T08:50:21.184652Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:50:21.184680Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 3 -> 128 2024-11-21T08:50:21.185263Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:21.186011Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179021230, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:21.186022Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#281474976715659:0 HandleReply TEvOperationPlan, operationId: 281474976715659:0, stepId: 1732179021230, at schemeshard: 72057594046644480 2024-11-21T08:50:21.186092Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 129 2024-11-21T08:50:21.187221Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:21.187290Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.187301Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715659:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:50:21.187682Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:50:21.187691Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:50:21.187695Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 4 2024-11-21T08:50:21.188661Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715659 Step: 1732179021230 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 250 } } 2024-11-21T08:50:21.189164Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715659:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T08:50:21.189173Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.189178Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 129 -> 240 2024-11-21T08:50:21.189524Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T08:50:21.189537Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:50:21.189547Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2024-11-21T08:49:54.108339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652132488783970:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cea/r3tmp/tmpXRqh5c/pdisk_1.dat 2024-11-21T08:49:54.201621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:49:54.276453Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:54.300721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:54.300743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:54.303348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8958, node 1 2024-11-21T08:49:54.340113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:54.340126Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:54.340128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:54.340162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:54.468836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.469819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.469827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.469935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:54.469964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:54.469967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:49:54.470041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:54.470042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:49:54.470070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.470318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178994518, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:54.470325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:49:54.470364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:49:54.470477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.470501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.470507Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:49:54.470514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:49:54.470520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:49:54.470525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:49:54.470918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:49:54.470929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:49:54.470932Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:54.470941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T08:49:54.476543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:6890 2024-11-21T08:49:54.668786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.668848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.668852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.668948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:54.668964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.669171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178994714, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:54.669174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732178994714, at schemeshard: 72057594046644480 2024-11-21T08:49:54.669208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:49:54.669221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:49:54.669226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:49:54.669277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.669305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.669531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:49:54.669535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:49:54.669539Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:54.669547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 waiting... 2024-11-21T08:49:54.680600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:49:54.688452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.688508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.688512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.688524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:49:54.688542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:49:54.688545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T08:49:54.688641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:54.688657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.688694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.688873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:49:54.688877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:49:54.688880Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:54.688889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T08:49:54.696173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.696231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.696382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTO ... 5e421-cef6b6cf-5a1343a2-75364185] reply with status: STATUS_UNDEFINED message: The batch request doesn't contain any entries. 2024-11-21T08:50:21.455883Z node 7 :HTTP DEBUG: (#44,[::1]:40434) <- (400 AWS.SimpleQueueService.EmptyBatchRequest) 2024-11-21T08:50:21.455889Z node 7 :HTTP DEBUG: (#44,[::1]:40434) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ChangeMessageVisibilityBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 84 { "QueueUrl":"http://ghrun-qcxhsi27zq.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": [ ] } 0 2024-11-21T08:50:21.455893Z node 7 :HTTP DEBUG: (#44,[::1]:40434) Response: HTTP/1.1 400 AWS.SimpleQueueService.EmptyBatchRequest Connection: close x-amzn-requestid: a565e421-cef6b6cf-5a1343a2-75364185 x-amz-crc32: 2331440613 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2024-11-21T08:50:21.455919Z node 7 :HTTP DEBUG: (#44,[::1]:40434) connection closed Http output full {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2024-11-21T08:50:21.456347Z node 7 :HTTP DEBUG: (#47,[::1]:40436) incoming connection opened 2024-11-21T08:50:21.456363Z node 7 :HTTP DEBUG: (#47,[::1]:40436) -> (POST /Root) 2024-11-21T08:50:21.456382Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d838:d8ba:9f16:0:c038:d8ba:9f16:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: 771d904c-5e4d3def-1b55dae5-a840e22b 2024-11-21T08:50:21.456453Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [771d904c-5e4d3def-1b55dae5-a840e22b] got new request from [d838:d8ba:9f16:0:c038:d8ba:9f16:0] 2024-11-21T08:50:21.456511Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [771d904c-5e4d3def-1b55dae5-a840e22b] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:21.456514Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [771d904c-5e4d3def-1b55dae5-a840e22b] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:21.456791Z node 7 :SQS WARN: Request [771d904c-5e4d3def-1b55dae5-a840e22b] Message with offset 2 was not found in infly 2024-11-21T08:50:21.465091Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [771d904c-5e4d3def-1b55dae5-a840e22b] Got succesfult GRPC response. 2024-11-21T08:50:21.465134Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [771d904c-5e4d3def-1b55dae5-a840e22b] reply ok 2024-11-21T08:50:21.465187Z node 7 :HTTP DEBUG: (#47,[::1]:40436) <- (200 ) 2024-11-21T08:50:21.465234Z node 7 :HTTP DEBUG: (#47,[::1]:40436) connection closed Http output full {"Failed":[{"Message":"No such message.","Id":"Id-0","Code":"InvalidParameterValue","SenderFault":true}]} 2024-11-21T08:50:21.465731Z node 7 :HTTP DEBUG: (#44,[::1]:40438) incoming connection opened 2024-11-21T08:50:21.465751Z node 7 :HTTP DEBUG: (#44,[::1]:40438) -> (POST /Root) 2024-11-21T08:50:21.465829Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [1882:d9ba:9f16:0:82:d9ba:9f16:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: 101f628c-6c876765-6b065074-caba41c4 2024-11-21T08:50:21.466017Z node 7 :HTTP_PROXY WARN: http request [ChangeMessageVisibilityBatch] requestId [101f628c-6c876765-6b065074-caba41c4] got new request with incorrect json from [1882:d9ba:9f16:0:82:d9ba:9f16:0] 2024-11-21T08:50:21.466023Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [101f628c-6c876765-6b065074-caba41c4] reply with status: BAD_REQUEST message: [json.exception.type_error.302] type must be string, but is number 2024-11-21T08:50:21.466049Z node 7 :HTTP DEBUG: (#44,[::1]:40438) <- (400 InvalidArgumentException) 2024-11-21T08:50:21.466057Z node 7 :HTTP DEBUG: (#44,[::1]:40438) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ChangeMessageVisibilityBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked c3 { "QueueUrl":"http://ghrun-qcxhsi27zq.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": [ { "Id":"Id-0", "ReceiptHandle":0 } ] } 0 2024-11-21T08:50:21.466062Z node 7 :HTTP DEBUG: (#44,[::1]:40438) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 101f628c-6c876765-6b065074-caba41c4 x-amz-crc32: 3176743181 Content-Type: application/x-amz-json-1.1 Content-Length: 116 {"__type":"InvalidArgumentException","message":"[json.exception.type_error.302] type must be string, but is number"} 2024-11-21T08:50:21.466085Z node 7 :HTTP DEBUG: (#44,[::1]:40438) connection closed Http output full {"__type":"InvalidArgumentException","message":"[json.exception.type_error.302] type must be string, but is number"} 2024-11-21T08:50:21.466373Z node 7 :HTTP DEBUG: (#44,[::1]:40440) incoming connection opened 2024-11-21T08:50:21.466384Z node 7 :HTTP DEBUG: (#44,[::1]:40440) -> (POST /Root) 2024-11-21T08:50:21.466422Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [1882:d9ba:9f16:0:82:d9ba:9f16:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: e6cbdc82-7df60a92-ec8465c-7f6d360f 2024-11-21T08:50:21.466478Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [e6cbdc82-7df60a92-ec8465c-7f6d360f] got new request from [1882:d9ba:9f16:0:82:d9ba:9f16:0] 2024-11-21T08:50:21.466537Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [e6cbdc82-7df60a92-ec8465c-7f6d360f] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:21.466540Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [e6cbdc82-7df60a92-ec8465c-7f6d360f] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:21.466929Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [e6cbdc82-7df60a92-ec8465c-7f6d360f] Got succesfult GRPC response. 2024-11-21T08:50:21.466953Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [e6cbdc82-7df60a92-ec8465c-7f6d360f] reply ok 2024-11-21T08:50:21.466974Z node 7 :HTTP DEBUG: (#44,[::1]:40440) <- (200 ) 2024-11-21T08:50:21.467001Z node 7 :HTTP DEBUG: (#44,[::1]:40440) connection closed Http output full {"Failed":[{"Message":"VisibilityTimeout was not provided.","Id":"Id-0","Code":"MissingParameter","SenderFault":true}]} 2024-11-21T08:50:21.467264Z node 7 :HTTP DEBUG: (#47,[::1]:40452) incoming connection opened 2024-11-21T08:50:21.467278Z node 7 :HTTP DEBUG: (#47,[::1]:40452) -> (POST /Root) 2024-11-21T08:50:21.467296Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d89a:d8ba:9f16:0:c09a:d8ba:9f16:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: 9be40030-6042069-b5eed5df-c4414004 2024-11-21T08:50:21.467348Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [9be40030-6042069-b5eed5df-c4414004] got new request from [d89a:d8ba:9f16:0:c09a:d8ba:9f16:0] 2024-11-21T08:50:21.467383Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [9be40030-6042069-b5eed5df-c4414004] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:21.467385Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [9be40030-6042069-b5eed5df-c4414004] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:21.467669Z node 7 :SQS WARN: Request [9be40030-6042069-b5eed5df-c4414004] Failed to process receipt handle : (yexception) ydb/core/ymq/base/helpers.cpp:147: Condition violated: `!decoded.empty()' 2024-11-21T08:50:21.467774Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [9be40030-6042069-b5eed5df-c4414004] Got succesfult GRPC response. 2024-11-21T08:50:21.467789Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [9be40030-6042069-b5eed5df-c4414004] reply ok 2024-11-21T08:50:21.467810Z node 7 :HTTP DEBUG: (#47,[::1]:40452) <- (200 ) 2024-11-21T08:50:21.467835Z node 7 :HTTP DEBUG: (#47,[::1]:40452) connection closed Http output full {"Failed":[{"Message":"The specified receipt handle isn't valid.","Id":"Id-0","Code":"ReceiptHandleIsInvalid","SenderFault":true}]} 2024-11-21T08:50:21.468076Z node 7 :HTTP DEBUG: (#44,[::1]:40456) incoming connection opened 2024-11-21T08:50:21.468089Z node 7 :HTTP DEBUG: (#44,[::1]:40456) -> (POST /Root) 2024-11-21T08:50:21.468111Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [988e:daba:9f16:0:808e:daba:9f16:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: 57548707-924e9cd4-f06546a1-fa78940c 2024-11-21T08:50:21.468169Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [57548707-924e9cd4-f06546a1-fa78940c] got new request from [988e:daba:9f16:0:808e:daba:9f16:0] 2024-11-21T08:50:21.468237Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [57548707-924e9cd4-f06546a1-fa78940c] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:21.468239Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [57548707-924e9cd4-f06546a1-fa78940c] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:21.475646Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [57548707-924e9cd4-f06546a1-fa78940c] Got succesfult GRPC response. 2024-11-21T08:50:21.475686Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [57548707-924e9cd4-f06546a1-fa78940c] reply ok 2024-11-21T08:50:21.475740Z node 7 :HTTP DEBUG: (#44,[::1]:40456) <- (200 ) 2024-11-21T08:50:21.475803Z node 7 :HTTP DEBUG: (#44,[::1]:40456) connection closed Http output full {"Successful":[{"Id":"Id-0"}]} 2024-11-21T08:50:21.476374Z node 7 :HTTP DEBUG: (#47,[::1]:40460) incoming connection opened 2024-11-21T08:50:21.476397Z node 7 :HTTP DEBUG: (#47,[::1]:40460) -> (POST /Root) 2024-11-21T08:50:21.476435Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d815:d8ba:9f16:0:c015:d8ba:9f16:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: d438c444-840cf13e-e29712b4-df5cc7fa 2024-11-21T08:50:21.476557Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [d438c444-840cf13e-e29712b4-df5cc7fa] got new request from [d815:d8ba:9f16:0:c015:d8ba:9f16:0] 2024-11-21T08:50:21.476621Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [d438c444-840cf13e-e29712b4-df5cc7fa] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:21.476624Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [d438c444-840cf13e-e29712b4-df5cc7fa] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:21.485185Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [d438c444-840cf13e-e29712b4-df5cc7fa] Got succesfult GRPC response. 2024-11-21T08:50:21.485230Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [d438c444-840cf13e-e29712b4-df5cc7fa] reply ok 2024-11-21T08:50:21.485287Z node 7 :HTTP DEBUG: (#47,[::1]:40460) <- (200 ) 2024-11-21T08:50:21.485333Z node 7 :HTTP DEBUG: (#47,[::1]:40460) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} |86.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> KqpIndexes::CheckUpsertNonEquatableType+NotNull [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Analyze] [GOOD] Test command err: 2024-11-21T08:49:55.868705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652135577361512:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:55.868735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ccc/r3tmp/tmpmrJfsj/pdisk_1.dat 2024-11-21T08:49:55.981168Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:55.985652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:55.985677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:55.992558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6368, node 1 2024-11-21T08:49:56.064442Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:56.064455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:56.064458Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:56.064497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:56.207022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.208336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.208346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.209207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:56.209419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:56.209422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:49:56.209537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:56.209543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:49:56.209570Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.209864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178996254, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:56.209868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:49:56.209908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:49:56.210108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.210142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.210150Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:49:56.210160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:49:56.210167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:49:56.210176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:49:56.210765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:49:56.210772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:49:56.210776Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:56.210789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T08:49:56.216404Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:23278 2024-11-21T08:49:56.312769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.312846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.312853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.312983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:56.313018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.313279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178996359, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:56.313283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732178996359, at schemeshard: 72057594046644480 2024-11-21T08:49:56.313321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:49:56.313340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:49:56.313348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:49:56.313425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.313459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.313727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:49:56.313734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:49:56.313738Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:56.313749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 waiting... 2024-11-21T08:49:56.316646Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:49:56.324655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.324724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.324730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.324745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:49:56.324771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:49:56.324775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T08:49:56.324883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:56.324910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.324958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.325218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:49:56.325225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:49:56.325229Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:56.325241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T08:49:56.332613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.332664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.332863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIREC ... eQueue] requestId [592855cf-d049b5db-e5ef12c9-1d78ac02] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:21.536166Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [592855cf-d049b5db-e5ef12c9-1d78ac02] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:21.536883Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4, operationId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.536913Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715706:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/SQS/cloud4', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 28], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, at schemeshard: 72057594046644480 2024-11-21T08:50:21.537143Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715706, database: /Root, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/Root/SQS/cloud4', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 28], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4 2024-11-21T08:50:21.548699Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0, operationId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.548757Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715711:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:21.548961Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715711, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0 2024-11-21T08:50:21.548995Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:21.549035Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.549045Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715711:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:21.549298Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T08:50:21.549305Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T08:50:21.549309Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 7 2024-11-21T08:50:21.549345Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T08:50:21.549347Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T08:50:21.549348Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 3 2024-11-21T08:50:21.549864Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715711, at schemeshard: 72057594046644480 2024-11-21T08:50:21.552580Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179021601, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:21.552592Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715711:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179021601, at schemeshard: 72057594046644480 2024-11-21T08:50:21.552637Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715711:0 128 -> 240 2024-11-21T08:50:21.552753Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:21.552786Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.552793Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715711:0 ProgressState 2024-11-21T08:50:21.552802Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715711:0 progress is 1/1 2024-11-21T08:50:21.552810Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715711:0 2024-11-21T08:50:21.552821Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715711, publications: 2, subscribers: 1 2024-11-21T08:50:21.553117Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T08:50:21.553123Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T08:50:21.553126Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 8 2024-11-21T08:50:21.553158Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T08:50:21.553161Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T08:50:21.553162Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 4 2024-11-21T08:50:21.553168Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715711, subscribers: 1 2024-11-21T08:50:21.553870Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0/v4, operationId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.553928Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715712:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:21.554067Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715712, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0/v4 2024-11-21T08:50:21.554086Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:21.554107Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.554113Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715712:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:21.554259Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715712 2024-11-21T08:50:21.554264Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715712 2024-11-21T08:50:21.554267Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 5 2024-11-21T08:50:21.554296Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715712 2024-11-21T08:50:21.554299Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715712 2024-11-21T08:50:21.554300Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 3 2024-11-21T08:50:21.554668Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715712, at schemeshard: 72057594046644480 2024-11-21T08:50:21.558933Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179021608, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:21.558943Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715712:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179021608, at schemeshard: 72057594046644480 2024-11-21T08:50:21.558969Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715712:0 128 -> 240 2024-11-21T08:50:21.559095Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:21.559134Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.559141Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715712:0 ProgressState 2024-11-21T08:50:21.559151Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715712:0 progress is 1/1 2024-11-21T08:50:21.559159Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715712:0 2024-11-21T08:50:21.559168Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715712, publications: 2, subscribers: 1 2024-11-21T08:50:21.559336Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715712 2024-11-21T08:50:21.559343Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715712 2024-11-21T08:50:21.559345Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 6 2024-11-21T08:50:21.559374Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715712 2024-11-21T08:50:21.559377Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715712 2024-11-21T08:50:21.559379Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 4 2024-11-21T08:50:21.559385Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715712, subscribers: 1 2024-11-21T08:50:21.590373Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [592855cf-d049b5db-e5ef12c9-1d78ac02] Got succesfult GRPC response. 2024-11-21T08:50:21.590405Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [592855cf-d049b5db-e5ef12c9-1d78ac02] reply ok 2024-11-21T08:50:21.590458Z node 7 :HTTP DEBUG: (#47,[::1]:60550) <- (200 ) 2024-11-21T08:50:21.590501Z node 7 :HTTP DEBUG: (#47,[::1]:60550) connection closed Http output full {"QueueUrl":"http://ghrun-qcxhsi27zq.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo"} |86.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> test.py::test[window-win_fuse_window-default.txt-Debug] >> TestKinesisHttpProxy::TestCounters [FAIL] >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] >> KqpMultishardIndex::DuplicateUpsert-StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexFilterPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 3788, MsgBus: 20965 2024-11-21T08:50:17.229877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652230734799717:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:17.316167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b54/r3tmp/tmpyjucCd/pdisk_1.dat 2024-11-21T08:50:17.476440Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:17.492522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:17.492551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:17.497233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3788, node 1 2024-11-21T08:50:17.634680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:17.634690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:17.634692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:17.634724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20965 TClient is connected to server localhost:20965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:18.209129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.220441Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:50:18.231511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.377839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.596597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.629287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.984486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652235029768436:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.984594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.003801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.067678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.094224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.113066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.150798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.182993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.222120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652239324736276:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.222149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.222299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652239324736281:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.223127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:19.234210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652239324736283:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:19.630925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.674201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.697084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9714, MsgBus: 61779 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b54/r3tmp/tmpOYjXHW/pdisk_1.dat 2024-11-21T08:50:20.881215Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:20.885099Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9714, node 2 2024-11-21T08:50:20.904711Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:20.904721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:20.904724Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:20.904760Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61779 2024-11-21T08:50:20.948682Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:20.948730Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:20.949995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:21.060827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.062252Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:21.069229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.126369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.174997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.202143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.387176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652249209878902:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.387267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.389950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.401639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.425889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.436813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.451743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.464927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.494613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652249209879406:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.494636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.494805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652249209879411:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.495568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:21.498945Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:21.499028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652249209879413:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:21.733764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.750552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.762713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] Test command err: Trying to start YDB, gRPC: 15240, MsgBus: 27028 2024-11-21T08:50:18.054453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652234926772859:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:18.054462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b9c/r3tmp/tmpQfZ5TX/pdisk_1.dat 2024-11-21T08:50:18.361927Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15240, node 1 2024-11-21T08:50:18.416180Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:18.416200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:18.416202Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:18.416264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:50:18.500702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:18.500727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:18.501502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27028 TClient is connected to server localhost:27028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:18.668981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.674940Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:18.687018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.807090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.917624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.945062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:19.256217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652239221741733:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.256299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.292788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.305191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.332979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.351958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.378533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.455825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.481551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652239221742265:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.481573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.481691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652239221742270:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:19.482453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:19.489865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652239221742272:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:19.949697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:20.530219Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6ymjj9e710aryjnryjshaf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM4MjdmMDEtNWNiYTU2N2YtZTQzYTQzNDctZmI5MzA4ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T08:50:20.532995Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTM4MjdmMDEtNWNiYTU2N2YtZTQzYTQzNDctZmI5MzA4ZDE=, ActorId: [1:7439652243516710628:2514], ActorState: ExecuteState, TraceId: 01jd6ymjj9e710aryjnryjshaf, Create QueryResponse for error on request, msg: 2024-11-21T08:50:20.661537Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6ymjqs87v6c4b20n3w3wnw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM4MjdmMDEtNWNiYTU2N2YtZTQzYTQzNDctZmI5MzA4ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T08:50:20.661632Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTM4MjdmMDEtNWNiYTU2N2YtZTQzYTQzNDctZmI5MzA4ZDE=, ActorId: [1:7439652243516710628:2514], ActorState: ExecuteState, TraceId: 01jd6ymjqs87v6c4b20n3w3wnw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5874, MsgBus: 3805 2024-11-21T08:50:20.960314Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652245594223313:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:20.962536Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b9c/r3tmp/tmpNZVoRQ/pdisk_1.dat 2024-11-21T08:50:21.005665Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5874, node 2 2024-11-21T08:50:21.023522Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:21.023540Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:21.023542Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:21.023585Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3805 2024-11-21T08:50:21.074036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:21.074063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:21.079820Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:21.185398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.188582Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:50:21.209295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.233485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.257094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.272889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.582634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652249889192021:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.582654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.587806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.600603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.610863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.625048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.644957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.658410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.678317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652249889192535:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.678346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.678420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652249889192540:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.679194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:21.682038Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:50:21.682113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652249889192542:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:50:21.897638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.256855Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6ymmac3kwrzwate8p5y54e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmVlMDExMzYtZWJiMmUxMzgtZmQ5MDQwOTctNjA4ZDMzYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T08:50:22.256918Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmVlMDExMzYtZWJiMmUxMzgtZmQ5MDQwOTctNjA4ZDMzYTU=, ActorId: [2:7439652249889193593:2513], ActorState: ExecuteState, TraceId: 01jd6ymmac3kwrzwate8p5y54e, Create QueryResponse for error on request, msg: 2024-11-21T08:50:22.441774Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6ymmec61h7t587se6s7rck, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmVlMDExMzYtZWJiMmUxMzgtZmQ5MDQwOTctNjA4ZDMzYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T08:50:22.441863Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmVlMDExMzYtZWJiMmUxMzgtZmQ5MDQwOTctNjA4ZDMzYTU=, ActorId: [2:7439652249889193593:2513], ActorState: ExecuteState, TraceId: 01jd6ymmec61h7t587se6s7rck, Create QueryResponse for error on request, msg: >> test.py::test[in-in_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_query/passing-default.txt-Debug] [GOOD] >> test.py::test[json-json_query/passing-default.txt-ForceBlocks] >> TestKinesisHttpProxy::TestEmptyHttpBody >> test.py::test[in-in_ansi-default.txt-Plan] >> test.py::test[aggregate-group_by_tz_date--Debug] [GOOD] >> KqpUniqueIndex::InsertFkPartialColumnSet [GOOD] >> KqpUniqueIndex::InsertFkPkOverlap >> test.py::test[aggregate-group_by_tz_date--Plan] [GOOD] >> test.py::test[in-in_ansi-default.txt-Plan] [GOOD] >> test.py::test[in-in_ansi-default.txt-Results] >> test.py::test[select-refselect-1000-ForceBlocks] [GOOD] >> test.py::test[select-refselect-1000-Plan] [GOOD] >> test.py::test[select-refselect-1000-Results] >> test.py::test[aggregate-group_by_tz_date--Results] >> test.py::test[produce-native_desc_reduce_with_presort--Debug] [GOOD] >> test.py::test[produce-native_desc_reduce_with_presort--Plan] [GOOD] >> test.py::test[produce-native_desc_reduce_with_presort--Results] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2024-11-21T08:49:53.536352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652126516203912:2251];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:53.536370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cdf/r3tmp/tmp0mDy8f/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27376, node 1 2024-11-21T08:49:53.888254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:49:53.888273Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:53.892652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:53.892664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:53.892666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:53.892708Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:53.943179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.944101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:53.944112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.944283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:53.944319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:53.944322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:49:53.944403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:53.944404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:49:53.944430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:53.944709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178993993, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:53.944713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:49:53.944776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:49:53.944857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:53.944884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:53.944890Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:49:53.944898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:49:53.944905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:49:53.944911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:49:53.945327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:49:53.945333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:49:53.945337Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:53.945348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T08:49:53.946062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:49:53.981267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:53.981296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:53.982520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1976 2024-11-21T08:49:54.194463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.194544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.194550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.194732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:54.194758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.195044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178994238, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:54.195047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732178994238, at schemeshard: 72057594046644480 2024-11-21T08:49:54.195111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:49:54.195136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:49:54.195144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:49:54.195255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.195291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.195579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:49:54.195586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:49:54.195590Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:54.195601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 waiting... 2024-11-21T08:49:54.212682Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:49:54.220637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.220728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.220733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.220752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:49:54.220779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:49:54.220783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T08:49:54.220930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:54.220958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.221005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.221268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:49:54.221273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:49:54.221277Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:54.221287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T08:49:54.225202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.225251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:54.225471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281 ... d 4 2024-11-21T08:50:22.110931Z node 7 :PERSQUEUE DEBUG: Initializing topic 'teststream' partition 1. Completed. 2024-11-21T08:50:22.110938Z node 7 :PERSQUEUE INFO: [PQ: 72075186224037907, Partition: 1, State: StateInit] init complete for topic 'teststream' partition 1 generation 1 [7:7439652251787033608:2461] 2024-11-21T08:50:22.110942Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907, Partition: 1, State: StateInit] SYNC INIT topic teststream partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:50:22.110990Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Http output full {} 200 {} E0000 00:00:1732179022.114164 283007 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T08:50:22.112629Z node 7 :HTTP_PROXY INFO: http request [CreateStream] requestId [8a2876b9-934c61fd-1928d28f-c3bff587] reply ok 2024-11-21T08:50:22.112766Z node 7 :HTTP DEBUG: (#44,[::1]:56082) <- (200 ) 2024-11-21T08:50:22.112827Z node 7 :HTTP DEBUG: (#44,[::1]:56082) connection closed 2024-11-21T08:50:22.113417Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2024-11-21T08:50:22.113427Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Registered with mediator time cast 2024-11-21T08:50:22.113437Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Registered with mediator time cast 2024-11-21T08:50:22.113469Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Registered with mediator time cast 2024-11-21T08:50:22.113474Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Registered with mediator time cast 2024-11-21T08:50:22.113640Z node 7 :HTTP DEBUG: (#44,[::1]:56088) incoming connection opened 2024-11-21T08:50:22.113658Z node 7 :HTTP DEBUG: (#44,[::1]:56088) -> (POST /Root) 2024-11-21T08:50:22.113747Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [98d5:317b:e405:0:80d5:317b:e405:0] request [ListShards] url [/Root] database [/Root] requestId: e87c5d09-68503411-ec01dac0-a476fcf8 2024-11-21T08:50:22.113933Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [e87c5d09-68503411-ec01dac0-a476fcf8] got new request from [98d5:317b:e405:0:80d5:317b:e405:0] database '/Root' stream 'teststream' 2024-11-21T08:50:22.114126Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [e87c5d09-68503411-ec01dac0-a476fcf8] [auth] Authorized successfully 2024-11-21T08:50:22.114142Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [e87c5d09-68503411-ec01dac0-a476fcf8] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:22.117313Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:22.117325Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439652251787033651:2474], now have 1 active actors on pipe 2024-11-21T08:50:22.117329Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:22.117332Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439652251787033652:2475], now have 1 active actors on pipe 2024-11-21T08:50:22.117701Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [e87c5d09-68503411-ec01dac0-a476fcf8] reply ok 2024-11-21T08:50:22.117754Z node 7 :HTTP DEBUG: (#44,[::1]:56088) <- (200 ) 2024-11-21T08:50:22.117815Z node 7 :HTTP DEBUG: (#44,[::1]:56088) connection closed Http output full {"NextToken":"CKWiqe+0MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CKWiqe+0MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T08:50:22.118250Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:22.118259Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439652251787033652:2475] destroyed E0000 00:00:1732179022.119608 283006 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T08:50:22.119158Z node 7 :HTTP DEBUG: (#47,[::1]:56092) incoming connection opened 2024-11-21T08:50:22.119184Z node 7 :HTTP DEBUG: (#47,[::1]:56092) -> (POST /Root) 2024-11-21T08:50:22.119216Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [582b:6977:e405:0:402b:6977:e405:0] request [ListShards] url [/Root] database [/Root] requestId: 781e5a95-5ceaeb89-5a1c8efd-af82d354 2024-11-21T08:50:22.119334Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [781e5a95-5ceaeb89-5a1c8efd-af82d354] got new request from [582b:6977:e405:0:402b:6977:e405:0] database '/Root' stream 'teststream' 2024-11-21T08:50:22.119452Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:22.119456Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439652251787033651:2474] destroyed 2024-11-21T08:50:22.119568Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [781e5a95-5ceaeb89-5a1c8efd-af82d354] [auth] Authorized successfully 2024-11-21T08:50:22.119590Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [781e5a95-5ceaeb89-5a1c8efd-af82d354] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:22.120023Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:22.120028Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439652251787033664:2480], now have 1 active actors on pipe 2024-11-21T08:50:22.120032Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:22.120035Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439652251787033663:2479], now have 1 active actors on pipe 2024-11-21T08:50:22.120272Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [781e5a95-5ceaeb89-5a1c8efd-af82d354] reply ok 2024-11-21T08:50:22.120331Z node 7 :HTTP DEBUG: (#47,[::1]:56092) <- (200 ) Http output full {"NextToken":"CKiiqe+0MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T08:50:22.120382Z node 7 :HTTP DEBUG: (#47,[::1]:56092) connection closed 200 {"NextToken":"CKiiqe+0MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T08:50:22.120845Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:22.120854Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439652251787033663:2479] destroyed 2024-11-21T08:50:22.120859Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:22.120861Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439652251787033664:2480] destroyed 2024-11-21T08:50:22.120919Z node 7 :HTTP DEBUG: (#44,[::1]:56096) incoming connection opened 2024-11-21T08:50:22.120933Z node 7 :HTTP DEBUG: (#44,[::1]:56096) -> (POST /Root) 2024-11-21T08:50:22.120982Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [982c:6977:e405:0:802c:6977:e405:0] request [ListShards] url [/Root] database [/Root] requestId: 975c9caf-6051218d-6290627b-196a9042 2024-11-21T08:50:22.121070Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [975c9caf-6051218d-6290627b-196a9042] got new request from [982c:6977:e405:0:802c:6977:e405:0] database '/Root' stream 'teststream' E0000 00:00:1732179022.121311 283006 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T08:50:22.121279Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [975c9caf-6051218d-6290627b-196a9042] [auth] Authorized successfully 2024-11-21T08:50:22.121296Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [975c9caf-6051218d-6290627b-196a9042] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:22.121727Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:22.121738Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439652251787033675:2484], now have 1 active actors on pipe 2024-11-21T08:50:22.121742Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:22.121744Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439652251787033676:2485], now have 1 active actors on pipe 2024-11-21T08:50:22.122004Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [975c9caf-6051218d-6290627b-196a9042] reply ok 2024-11-21T08:50:22.122071Z node 7 :HTTP DEBUG: (#44,[::1]:56096) <- (200 ) 2024-11-21T08:50:22.122120Z node 7 :HTTP DEBUG: (#44,[::1]:56096) connection closed Http output full {"NextToken":"CKmiqe+0MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CKmiqe+0MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T08:50:22.122545Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:22.122554Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439652251787033675:2484] destroyed 2024-11-21T08:50:22.122561Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:22.122564Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439652251787033676:2485] destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2024-11-21T08:50:05.997426Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652179447873359:2251];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:05.997456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004183/r3tmp/tmp3FW1CN/pdisk_1.dat 2024-11-21T08:50:06.101778Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:06.122015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:06.122037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:06.136941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2061, node 1 2024-11-21T08:50:06.788644Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:06.788655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:06.788657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:06.788689Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:07.333404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:07.341397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:07.341415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:07.348479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:50:07.348536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:50:07.348539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:50:07.350211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:07.350710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:07.350718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:50:07.360387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:07.369013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179007412, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:07.369028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:50:07.369113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:50:07.370479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:07.370524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:07.370534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:50:07.370544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:50:07.370552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:50:07.370563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:50:07.371307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:50:07.371319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:50:07.371323Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:50:07.371336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:50:12.490710Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439652211464054333:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:12.490771Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004183/r3tmp/tmpL0Sn3v/pdisk_1.dat 2024-11-21T08:50:13.014338Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:13.057094Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:13.057123Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:13.064922Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19159, node 4 2024-11-21T08:50:13.436480Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:13.436494Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:13.436496Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:13.436548Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:13.613026Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.613263Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:13.613273Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.624616Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:50:13.624684Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:50:13.624689Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:50:13.628854Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:13.635337Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:13.635345Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:50:13.636635Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:13.645505Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179013691, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:13.645523Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:50:13.645596Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:50:13.646190Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:13.646253Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:13.646264Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:50:13.646278Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:50:13.646286Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:50:13.646300Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:50:13.647043Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940 ... :50:21.973047Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.973070Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T08:50:21.973154Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:21.973170Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:21.973182Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:21.974159Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.974169Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.974176Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:50:21.974238Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.974242Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.974244Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:50:21.974258Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.974262Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.974263Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T08:50:21.974276Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.974279Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.974281Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T08:50:21.974293Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.974297Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.974299Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T08:50:21.982096Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:50:21.982252Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179022028, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:21.982262Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179022028, at schemeshard: 72057594046644480 2024-11-21T08:50:21.982300Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:50:21.982326Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179022028, at schemeshard: 72057594046644480 2024-11-21T08:50:21.982336Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T08:50:21.982347Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179022028, at schemeshard: 72057594046644480 2024-11-21T08:50:21.982400Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T08:50:21.982415Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732179022028 2024-11-21T08:50:21.982423Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T08:50:21.992579Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:21.992754Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:21.992778Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T08:50:21.992802Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T08:50:21.992877Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T08:50:21.992893Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T08:50:21.992905Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:50:21.992917Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T08:50:21.992928Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T08:50:21.992940Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T08:50:21.992947Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:50:21.992965Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T08:50:21.992968Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T08:50:21.992970Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T08:50:21.992976Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T08:50:21.993887Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.993899Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.993903Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:50:21.993957Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.993959Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.993961Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T08:50:21.993974Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.993976Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.993978Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T08:50:21.993990Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.993993Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.993994Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T08:50:21.994006Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:50:21.994010Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:50:21.994012Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T08:50:21.994019Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T08:50:22.004856Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439652246274230842:2299], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:50:22.085077Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:50:22.085127Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:50:22.092297Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> test.py::test[expr-len--Debug] [GOOD] >> test.py::test[expr-len--ForceBlocks] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Debug] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--ForceBlocks] >> KqpMultishardIndex::DuplicateUpsert-StreamLookup [GOOD] >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn [GOOD] >> KqpIndexes::IndexTopSortPushDown ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2024-11-21T08:49:55.013002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652135796291630:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:55.013029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cd4/r3tmp/tmpSoUbkS/pdisk_1.dat 2024-11-21T08:49:55.331969Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:55.360275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:55.360299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:55.368722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62713, node 1 2024-11-21T08:49:55.378339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:55.378349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:55.378350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:55.378380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:55.445631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.446968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.446977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.447102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:55.447134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:55.447137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:49:55.447220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:55.447222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:49:55.447251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.447504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178995491, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:55.447508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:49:55.447551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:49:55.447631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:55.447656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.447663Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:49:55.447670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:49:55.447675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:49:55.447681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:49:55.448126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:49:55.448140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:49:55.448143Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:55.448155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T08:49:55.448921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:3465 2024-11-21T08:49:55.471937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.472242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.472249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.472381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:55.472403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.472633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178995519, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:55.472636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732178995519, at schemeshard: 72057594046644480 2024-11-21T08:49:55.472668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:49:55.472685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:49:55.472690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:49:55.472745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:55.472775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.473031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:49:55.473035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:49:55.473038Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:55.473046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 waiting... 2024-11-21T08:49:55.480833Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:49:55.481257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.481302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.481306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.481317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:49:55.481331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:49:55.481333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T08:49:55.481417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:55.481432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:55.481474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.481645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:49:55.481649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:49:55.481651Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:55.481660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T08:49:55.482197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.482261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.482362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECT ... "StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732179023,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732179023,"StorageLimitMb":0,"StreamName":"testtopic"}} 2024-11-21T08:50:23.082586Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a2906da3-ef5f45c7-34c262ae-eabd2db9] reply ok 2024-11-21T08:50:23.082684Z node 7 :HTTP DEBUG: (#44,[::1]:57120) <- (200 ) 2024-11-21T08:50:23.082732Z node 7 :HTTP DEBUG: (#44,[::1]:57120) connection closed 2024-11-21T08:50:23.083145Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.083154Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [7:7439652256480036233:2477] destroyed 2024-11-21T08:50:23.083158Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.083160Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [7:7439652256480036234:2478] destroyed 2024-11-21T08:50:23.083162Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.083164Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439652256480036235:2479] destroyed 2024-11-21T08:50:23.083168Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.083169Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439652256480036231:2475] destroyed 2024-11-21T08:50:23.083172Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.083173Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [7:7439652256480036232:2476] destroyed 2024-11-21T08:50:23.084460Z node 7 :HTTP DEBUG: (#44,[::1]:57136) incoming connection opened 2024-11-21T08:50:23.084480Z node 7 :HTTP DEBUG: (#44,[::1]:57136) -> (POST /Root) 2024-11-21T08:50:23.084518Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d8fd:103b:ac06:0:c0fd:103b:ac06:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: 848206d4-18c22899-de5d88ad-92db02aa 2024-11-21T08:50:23.084583Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [848206d4-18c22899-de5d88ad-92db02aa] got new request from [d8fd:103b:ac06:0:c0fd:103b:ac06:0] database '/Root' stream 'testtopic' 2024-11-21T08:50:23.084766Z node 7 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [848206d4-18c22899-de5d88ad-92db02aa] [auth] Authorized successfully 2024-11-21T08:50:23.084796Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [848206d4-18c22899-de5d88ad-92db02aa] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1732179.023,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1732179.023,"StreamName":"testtopic"}} 2024-11-21T08:50:23.085113Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [848206d4-18c22899-de5d88ad-92db02aa] reply ok 2024-11-21T08:50:23.085165Z node 7 :HTTP DEBUG: (#44,[::1]:57136) <- (200 ) 2024-11-21T08:50:23.085202Z node 7 :HTTP DEBUG: (#44,[::1]:57136) connection closed 2024-11-21T08:50:23.085557Z node 7 :HTTP DEBUG: (#44,[::1]:57138) incoming connection opened 2024-11-21T08:50:23.085570Z node 7 :HTTP DEBUG: (#44,[::1]:57138) -> (POST /Root) 2024-11-21T08:50:23.085588Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d8fd:103b:ac06:0:c0fd:103b:ac06:0] request [DescribeStream] url [/Root] database [/Root] requestId: 8a31315b-86832b46-9d6bced4-aea1fece 2024-11-21T08:50:23.085627Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [8a31315b-86832b46-9d6bced4-aea1fece] got new request from [d8fd:103b:ac06:0:c0fd:103b:ac06:0] database '/Root' stream 'testtopic' 2024-11-21T08:50:23.085736Z node 7 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [8a31315b-86832b46-9d6bced4-aea1fece] [auth] Authorized successfully 2024-11-21T08:50:23.085748Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [8a31315b-86832b46-9d6bced4-aea1fece] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:23.086063Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:23.086070Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [7:7439652256480036260:2488], now have 1 active actors on pipe 2024-11-21T08:50:23.086074Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:23.086076Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [7:7439652256480036261:2489], now have 1 active actors on pipe 2024-11-21T08:50:23.086079Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:23.086081Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [7:7439652256480036262:2490], now have 1 active actors on pipe 2024-11-21T08:50:23.086084Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:23.086087Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439652256480036263:2491], now have 1 active actors on pipe 2024-11-21T08:50:23.086090Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:23.086092Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439652256480036259:2487], now have 1 active actors on pipe 2024-11-21T08:50:23.086414Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [8a31315b-86832b46-9d6bced4-aea1fece] reply ok 2024-11-21T08:50:23.086465Z node 7 :HTTP DEBUG: (#44,[::1]:57138) <- (200 ) 2024-11-21T08:50:23.086495Z node 7 :HTTP DEBUG: (#44,[::1]:57138) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732179023,"StorageLimitMb":0,"StreamName":"testtopic"}} 2024-11-21T08:50:23.086786Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.086793Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [7:7439652256480036262:2490] destroyed 2024-11-21T08:50:23.086797Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.086798Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439652256480036263:2491] destroyed 2024-11-21T08:50:23.086920Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.086925Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439652256480036259:2487] destroyed 2024-11-21T08:50:23.086928Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.086929Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [7:7439652256480036260:2488] destroyed 2024-11-21T08:50:23.086931Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:50:23.086933Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [7:7439652256480036261:2489] destroyed |86.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> test.py::test[pg-join_using4-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using4-default.txt-ForceBlocks] >> test.py::test[in-in_ansi-default.txt-Results] [GOOD] >> test.py::test[in-in_ansi_join--Analyze] |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |86.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx3 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] Test command err: Trying to start YDB, gRPC: 29132, MsgBus: 5733 2024-11-21T08:50:21.148355Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652246662094668:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a64/r3tmp/tmpnTa611/pdisk_1.dat 2024-11-21T08:50:21.182907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:50:21.218273Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29132, node 1 2024-11-21T08:50:21.252394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:21.252407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:21.252409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:21.252442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:50:21.279833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:21.279856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:21.283644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5733 TClient is connected to server localhost:5733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:21.389920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.392504Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.425771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.461033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.493306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:50:21.514755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.917347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652246662096089:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.917441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.931361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.944611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.959745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.973902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.987589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:22.000118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:22.077053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652250957063903:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:22.077083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:22.077227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652250957063908:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:22.078287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:22.081231Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:22.081330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652250957063910:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:22.326787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15299, MsgBus: 9793 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a64/r3tmp/tmpvNIzgu/pdisk_1.dat 2024-11-21T08:50:22.894532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:22.904460Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15299, node 2 2024-11-21T08:50:22.920726Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:22.920738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:22.920740Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:22.920775Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9793 2024-11-21T08:50:22.964670Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:22.964696Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:22.967700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:23.024854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:23.038675Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:23.048849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:23.073438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:23.123352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:50:23.145710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.308516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652257792132339:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.308577Z node 2 :K ... default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.420276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652257792132850:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.421039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:23.423937Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:23.423994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652257792132852:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:23.643312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ( (declare $am_1 (DataType 'Decimal '22 '9)) (declare $b_1 (DataType 'Utf8)) (declare $b_2 (DataType 'Utf8)) (declare $constant_param_1 (DataType 'Timestamp)) (declare $constant_param_2 (DataType 'Utf8)) (declare $pa_id_1 (DataType 'Utf8)) (declare $status_1 (DataType 'Utf8)) (declare $type_1 (ListType (DataType 'Utf8))) (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (OptionalType (DataType 'Utf8))) (OptionalType (OptionalType (DataType 'Utf8))) (OptionalType (OptionalType (DataType 'Timestamp))) (OptionalType (OptionalType (DataType 'Utf8))) (DataType 'Int32)) (TupleType (OptionalType (OptionalType (DataType 'Utf8))) (OptionalType (OptionalType (DataType 'Utf8))) (OptionalType (OptionalType (DataType 'Timestamp))) (OptionalType (OptionalType (DataType 'Utf8))) (DataType 'Int32)))))) (let $1 (DataType 'Utf8)) (let $2 (DataType 'Timestamp)) (let $3 (OptionalType $2)) (let $4 '"$constant_param_1") (let $5 (OptionalType $1)) (let $6 (OptionalType $5)) (let $7 '"$constant_param_2") (let $8 '('('"_logical_id" '4702) '('"_id" '"6e0ccac3-75d67f31-7c90e23e-a5942e2") '('"_partition_mode" '"single"))) (let $9 (DqPhyStage '() (lambda '() (block '( (let $50 (Uint64 '10000)) (let $51 (Int32 '1)) (let $52 '((Just (Just $b_2)) $51)) (let $53 '((Just (Just $pa_id_1)) $51)) (let $54 (Int32 '0)) (let $55 (Just (Just $constant_param_1))) (let $56 '((Nothing $6) $54)) (let $57 (RangeMultiply $50 (RangeCreate (AsList '('((Just (Nothing $3)) $54) '($55 $54)))) (RangeCreate (AsList '($56 $56))))) (let $58 '($55 $51)) (let $59 (RangeCreate (AsList '($58 $58)))) (let $60 (Just (Just $constant_param_2))) (let $61 (RangeMultiply $50 $59 (RangeCreate (AsList '('((Just (Nothing $5)) $54) '($60 $54)))))) (let $62 '($60 $51)) (let $63 (RangeMultiply $50 $59 (RangeCreate (AsList '($62 $62))))) (let $64 (RangeUnion $57 $61 $63)) (let $65 (RangeMultiply $50 (RangeCreate (AsList '($53 $53))) $64)) (return (ToStream (Just '((RangeFinalize (RangeMultiply $50 (RangeUnion (RangeMultiply $50 (RangeCreate (AsList '($52 $52))) $65)))))))) ))) $8)) (let $10 (DqCnValue (TDqOutput $9 '0))) (let $11 '('('"$b_2") '($4) '($7) '('"$pa_id_1"))) (let $12 (KqpPhysicalTx '($9) '($10) $11 '('('type '"compute")))) (let $13 (KqpTable '"/Root/tg/tg_index/indexImplTable" '"72057594046644480:18" '"" '1)) (let $14 '('"am" '"b" '"id" '"pa_id" '"status" '"system_date" 'type)) (let $15 '"%kqp%tx_result_binding_0_0") (let $16 (TupleType $6 $6 (OptionalType $3) $6 (DataType 'Int32))) (let $17 (TupleType (ListType (TupleType $16 $16)))) (let $18 '('"b" '"pa_id" '"system_date" '"id")) (let $19 '('('"UsedKeyColumns" $18) '('"ExpectedMaxRanges" '3) '('"PointPrefixLen" '2))) (let $20 (KqpRowsSourceSettings $13 $14 '('('"Reverse") '('"Sorted")) %kqp%tx_result_binding_0_0 $19)) (let $21 (Bool 'false)) (let $22 (DataType 'Decimal '22 '9)) (let $23 (Uint64 '11)) (let $24 '('"b" $5)) (let $25 '('"id" $5)) (let $26 '('"pa_id" $5)) (let $27 '('"system_date" $3)) (let $28 (StructType $24 $25 $26 $27)) (let $29 '('('"_logical_id" '4861) '('"_id" '"454309ef-ddcbed90-9942a605-a1c3e6db") '('"_wide_channels" $28))) (let $30 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $20)) (lambda '($66) (block '( (let $67 (lambda '($70) (block '( (let $71 (Member $70 'type)) (let $72 (IfPresent $71 (lambda '($74) (block '( (let $75 '('Auto 'One 'Compact)) (let $76 (ToDict $type_1 (lambda '($77) $77) (lambda '($78) (Void)) $75)) (return (Contains $76 $74)) ))) $21)) (let $73 (And (Not (If (Exists $71) $72 (Bool 'true))) (Coalesce (!= (Member $70 '"status") $status_1) $21) (Coalesce (!= (Member $70 '"am") $am_1) $21))) (return (Member $70 '"b") (Member $70 '"id") (Member $70 '"pa_id") (Member $70 '"system_date") $73) )))) (let $68 (WideFilter (ExpandMap (ToFlow $66) $67) (lambda '($79 $80 $81 $82 $83) $83) $23)) (let $69 (lambda '($84 $85 $86 $87 $88) $84 $85 $86 $87)) (return (FromFlow (WideMap $68 $69))) ))) $29)) (let $31 (DqCnUnionAll (TDqOutput $30 '0))) (let $32 '('('"_logical_id" '4975) '('"_id" '"2755fef2-b1aa1a51-7a7da41a-fc263516") '('"_wide_channels" (StructType $25)))) (let $33 (DqPhyStage '($31) (lambda '($89) (block '( (let $90 '('('0 $21) '('2 $21) '('3 $21) '('1 $21))) (let $91 (WideTop (ToFlow $89) $23 $90)) (return (FromFlow (WideMap $91 (lambda '($92 $93 $94 $95) $93)))) ))) $32)) (let $34 (DqCnHashShuffle (TDqOutput $33 '0) '('0))) (let $35 (DqPhyStage '($34) (lambda '($96) (FromFlow (PartitionByKey (NarrowMap (ToFlow $96) (lambda '($97) (AsStruct '('"id" $97)))) (lambda '($98) (Member $98 '"id")) (Void) (Void) (lambda '($99) (Map (Filter (FlatMap $99 (lambda '($100) (Take (Nth $100 '1) (Uint64 '1)))) (lambda '($101) (Exists (Member $101 '"id")))) (lambda '($102) (AsStruct '('"b" $b_1) '('"id" (Member $102 '"id"))))))))) '('('"_logical_id" '5068) '('"_id" '"d756b0f4-949f00b7-53739d83-27971700")))) (let $36 (KqpTable '"/Root/tg" '"72057594046644480:16" '"" '1)) (let $37 '('"am" '"b" '"cur" '"id" '"pa_id" '"product" '"status" '"system_date" 'type)) (let $38 (KqpCnStreamLookup (TDqOutput $35 '0) $36 $37 (ListType (StructType '('"b" $1) $25)) '"LookupRows")) (let $39 (Uint64 '"1001")) (let $40 (StructType '('"am" (OptionalType $22)) $24 '('"cur" $5) $25 $26 '('"product" $5) '('"status" $5) $27 '('type $5))) (let $41 '('('"_logical_id" '5170) '('"_id" '"420e5dd6-a13a99d5-a7d56741-b61e7c2b") '('"_wide_channels" $40))) (let $42 (DqPhyStage '($38) (lambda '($103) (block '( (let $104 (lambda '($107) (Member $107 '"am") (Member $107 '"b") (Member $107 '"cur") (Member $107 '"id") (Member $107 '"pa_id") (Member $107 '"product") (Member $107 '"status") (Member $107 '"system_date") (Member $107 'type))) (return (FromFlow (ExpandMap (ToFlow (TopSort (OrderedFilter $103 (lambda '($105) (Exists (Member $105 '"id")))) $39 '($21 $21) (lambda '($106) '((Member $106 '"system_date") (Member $106 '"id"))))) $104))) ))) $41)) (let $43 (DqCnMerge (TDqOutput $42 '0) '('('"7" '"Desc") '('3 '"Desc")))) (let $44 (DqPhyStage '($43) (lambda '($108) (FromFlow (NarrowMap (Take (ToFlow $108) $39) (lambda '($109 $110 $111 $112 $113 $114 $115 $116 $117) (AsStruct '('"am" $109) '('"b" $110) '('"cur" $111) '('"id" $112) '('"pa_id" $113) '('"product" $114) '('"status" $115) '('"system_date" $116) '('type $117)))))) '('('"_logical_id" '5183) '('"_id" '"d0cf6442-45e972a9-a154ea26-a461ee74")))) (let $45 '($30 $33 $35 $42 $44)) (let $46 (DqCnResult (TDqOutput $44 '0) '())) (let $47 (KqpTxResultBinding $17 '0 '0)) (let $48 '('('"$am_1") '('"$b_1") '('"$status_1") '('"$type_1") '($15 $47))) (let $49 (KqpPhysicalTx $45 '($46) $48 '('('type '"data")))) (return (KqpPhysicalQuery '($12 $49) '((KqpTxResultBinding (ListType $40) '1 '0)) '('('type '"data_query")))) ) {"Plan":{"Plans":[{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Operators":[{"Inputs":[{"ExternalPlanNodeId":12}],"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Limit":"1001","Name":"TopSort","TopSortBy":""},{"E-Rows":"No estimate","Inputs":[{"ExternalPlanNodeId":10}],"Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["b","id"],"Node Type":"TableLookup","PlanNodeId":10,"Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"E-Rows":"No estimate","Plans":[{"PlanNodeId":9,"Operators":[{"Inputs":[{"ExternalPlanNodeId":8}],"Input":"NarrowMap","Name":"PartitionByKey"}],"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Limit":"11","Name":"Top","TopBy":""}],"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1}],"Limit":"11","Name":"Limit"},{"E-Rows":"No estimate","Inputs":[{"ExternalPlanNodeId":4}],"Predicate":"Not If And item.status != $status_1 And item.am != $am_1","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"Tables":["tg\/tg_index\/indexImplTable"],"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_0","Reverse":true,"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","ReadRangesPointPrefixLen":"2","ReadRangesKeys":["b","pa_id","system_date","id"],"Table":"tg\/tg_index\/indexImplTable","ReadColumns":["am","b","id","pa_id","status","system_date","type"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Limit-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Top"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Node Type":"Aggregate"}],"Table":"tg","PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"TopSort-Filter"}],"Node Type":"Merge","SortColumns":["system_date (Desc)","id (Desc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"Stage"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tg","reads":[{"lookup_by":["b","id"],"columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"type":"Lookup"}]},{"name":"\/Root\/tg\/tg_index\/indexImplTable","reads":[{"columns":["am","b","id","pa_id","status","system_date","type"],"reverse":true,"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"Limit":"1001","Name":"TopSort","TopSortBy":""}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"Name":"TableLookup","E-Cost":"No estimate","E-Size":"No estimate","LookupKeyColumns":["b","id"],"Table":"tg"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Filter"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2}}} |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[window-win_by_all_aggregate--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Debug] >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 65450, MsgBus: 10237 2024-11-21T08:50:20.844829Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652242000973378:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:20.844920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a99/r3tmp/tmpcwOFTv/pdisk_1.dat 2024-11-21T08:50:20.988553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:20.988577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:21.005239Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:21.005672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65450, node 1 2024-11-21T08:50:21.043413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:21.043426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:21.043428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:21.043462Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10237 TClient is connected to server localhost:10237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:21.205969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.212718Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:21.219534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.263647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.307789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.329293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:21.704441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652246295942108:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.705920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.713471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.736294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.743458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.757743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.771513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.786047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:21.809797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652246295942625:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.809818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.809888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652246295942630:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:21.810653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:21.813419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:21.813488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652246295942632:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:22.029778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 6384, MsgBus: 8450 2024-11-21T08:50:23.280979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652255651179550:2137];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:23.293354Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a99/r3tmp/tmp9J92ZC/pdisk_1.dat 2024-11-21T08:50:23.325317Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6384, node 2 2024-11-21T08:50:23.348646Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:23.348657Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:23.348659Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:23.348692Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8450 2024-11-21T08:50:23.397513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:23.397541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:23.400573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:23.454291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:23.464597Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:23.485070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:23.505858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:50:23.541280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:23.558949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.864132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652255651181021:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.864184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.866989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.876293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.885342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.899793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.913951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.928346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.944103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652255651181533:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.944127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.944296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652255651181538:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.945011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:23.952099Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:23.952217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652255651181540:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:24.221085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[select-refselect-1000-Results] [GOOD] >> test.py::test[select-swap_columns-default.txt-Analyze] >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[aggregate-group_by_tz_date--Results] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Debug] >> KqpIndexes::IndexTopSortPushDown [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx3 [GOOD] Test command err: iteration# 3 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 9 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 15 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 21 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 27 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 33 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 39 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 45 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 51 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 57 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 63 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 69 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 75 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 81 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 87 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 93 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 99 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 105 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 111 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 117 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 123 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 129 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 135 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 141 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 147 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 153 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 159 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 165 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 171 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 177 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 183 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 189 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 195 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 201 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 207 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 213 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 219 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 225 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 231 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 237 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 243 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 249 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 255 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 261 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 267 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 273 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 279 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 285 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 291 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 297 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 303 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 309 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 315 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 321 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 327 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 333 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 339 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 345 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 351 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 357 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 363 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 369 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 375 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 381 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 387 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 393 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 399 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 405 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 411 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 417 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 423 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 429 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 435 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 441 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 447 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 453 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 459 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 465 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 471 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 477 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 483 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 489 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> test.py::test[json-json_query/passing-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_query/passing-default.txt-Plan] [GOOD] >> test.py::test[json-json_query/passing-default.txt-Results] >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[expr-len--ForceBlocks] [GOOD] >> test.py::test[expr-len--Plan] [GOOD] >> test.py::test[expr-len--Results] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Plan] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Plan] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 6655, MsgBus: 22380 2024-11-21T08:50:22.422961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652250820889971:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:22.498436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a51/r3tmp/tmpdJdgDJ/pdisk_1.dat 2024-11-21T08:50:22.544839Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6655, node 1 2024-11-21T08:50:22.568414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:22.568426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:22.568427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:22.568464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:50:22.592656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:22.592682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:22.593616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22380 TClient is connected to server localhost:22380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:22.736728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.740573Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:22.742304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.773093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.810822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.842242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:23.003125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652250820891381:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.003209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.010120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.022376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.037363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.050333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.059741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.075963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.096756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652255115859182:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.096801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.096999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652255115859189:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.097622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:23.101152Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:23.101237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652255115859191:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:23.341128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 27768, MsgBus: 63792 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a51/r3tmp/tmp1NpGef/pdisk_1.dat 2024-11-21T08:50:24.060346Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:24.078260Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27768, node 2 2024-11-21T08:50:24.084720Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:24.084733Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:24.084735Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:24.084771Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63792 2024-11-21T08:50:24.132782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:24.132808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:24.139792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:24.268655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:24.276669Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:24.293117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:24.322296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:24.366161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:24.385418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:24.895527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652259128718996:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:24.895552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:24.899477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:24.911636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:24.932930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:24.992785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.006077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.025335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.050319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652263423686809:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:25.050339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:25.050484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652263423686814:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:25.051252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:25.054218Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:25.054311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652263423686816:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:25.313154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> TKeyValueTest::TestObtainLockNewApi |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexTopSortPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 20717, MsgBus: 64343 2024-11-21T08:50:22.338155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652253549283856:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:22.338232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a55/r3tmp/tmpO5LWaS/pdisk_1.dat 2024-11-21T08:50:22.424833Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:22.436470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:22.436492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:22.440712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20717, node 1 2024-11-21T08:50:22.464446Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:22.464458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:22.464460Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:22.464493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64343 TClient is connected to server localhost:64343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:22.611614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.614668Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:22.625071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.663864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.707608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:22.727845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:23.028665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652253549285287:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.030707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.039869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.050314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.063428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.072694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.092296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.104387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:23.125327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652257844253097:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.125351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.125445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652257844253102:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:23.126326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:23.129225Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:23.129308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652257844253104:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:23.435762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25692, MsgBus: 4676 2024-11-21T08:50:25.026958Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652266789395161:2265];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:25.027011Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a55/r3tmp/tmphh2ppp/pdisk_1.dat 2024-11-21T08:50:25.051564Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25692, node 2 2024-11-21T08:50:25.088823Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:25.088836Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:25.088838Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:25.088877Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4676 2024-11-21T08:50:25.128918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:25.128943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:25.134920Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:25.235563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:25.248610Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:25.263593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:25.336474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:25.391131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:25.464995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:25.542069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652266789396470:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:25.542105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:25.548517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.565272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.583141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.609222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.632516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.641987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.666176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652266789396984:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:25.666209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:25.666609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652266789396989:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:25.667372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:25.670000Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:25.670107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652266789396991:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:26.010068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:26.035024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:50:26.057123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] Test command err: 2024-11-21T08:49:56.374650Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652142571895479:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:56.374728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cba/r3tmp/tmpcQPb6q/pdisk_1.dat 2024-11-21T08:49:56.518293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:56.518315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:56.524760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:49:56.536316Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11486, node 1 2024-11-21T08:49:56.660404Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:56.660414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:56.660416Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:56.660446Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:56.884612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.885558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.885568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.885684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:56.885714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:56.885717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:49:56.885802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:56.885804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:49:56.885835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.886107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178996933, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:56.886111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:49:56.886153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:49:56.886242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.886270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.886277Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:49:56.886283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:49:56.886288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:49:56.886293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:49:56.886686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:49:56.886703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:49:56.886706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:56.886765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T08:49:56.892492Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:4780 2024-11-21T08:49:56.956695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.956751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.956757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.956851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:56.956870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.957046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178997003, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:56.957049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732178997003, at schemeshard: 72057594046644480 2024-11-21T08:49:56.957078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:49:56.957091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:49:56.957096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:49:56.957158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.957182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.957393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:49:56.957397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:49:56.957400Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:56.957407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 waiting... 2024-11-21T08:49:56.987074Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:49:56.987568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.987633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.987638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.987649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:49:56.987664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:49:56.987667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T08:49:56.987764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:56.987786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:56.987820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:56.987981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:49:56.987986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:49:56.987989Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:56.988000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T08:49:56.988637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:56.988680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:56.988784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECT ... 4-11-21T08:50:25.339193Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715708, publications: 2, subscribers: 0 2024-11-21T08:50:25.339471Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715708 2024-11-21T08:50:25.339478Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715708 2024-11-21T08:50:25.339481Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715708, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 8 2024-11-21T08:50:25.339522Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715708 2024-11-21T08:50:25.339525Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715708 2024-11-21T08:50:25.339527Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715708, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 4 2024-11-21T08:50:25.339534Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715708, subscribers: 0 2024-11-21T08:50:25.340443Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715708, at schemeshard: 72057594046644480 2024-11-21T08:50:25.341080Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0/v4, operationId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.341128Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715709:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:25.341296Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715709, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0/v4 2024-11-21T08:50:25.341322Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:25.341349Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:25.341356Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715709:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:25.341605Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715709 2024-11-21T08:50:25.341612Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715709 2024-11-21T08:50:25.341615Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 5 2024-11-21T08:50:25.341657Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715709 2024-11-21T08:50:25.341661Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715709 2024-11-21T08:50:25.341662Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 3 2024-11-21T08:50:25.344511Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715709, at schemeshard: 72057594046644480 2024-11-21T08:50:25.364677Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179025409, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:25.364697Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715709:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179025409, at schemeshard: 72057594046644480 2024-11-21T08:50:25.364744Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715709:0 128 -> 240 2024-11-21T08:50:25.364928Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:25.364976Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:25.364984Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715709:0 ProgressState 2024-11-21T08:50:25.364994Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715709:0 progress is 1/1 2024-11-21T08:50:25.365002Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715709:0 2024-11-21T08:50:25.365013Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715709, publications: 2, subscribers: 1 2024-11-21T08:50:25.365323Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715709 2024-11-21T08:50:25.365329Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715709 2024-11-21T08:50:25.365333Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 6 2024-11-21T08:50:25.365359Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715709 2024-11-21T08:50:25.365362Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715709 2024-11-21T08:50:25.365364Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 4 2024-11-21T08:50:25.365383Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715709, subscribers: 1 2024-11-21T08:50:25.386568Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [49e8438a-9bb9292c-fd49c800-89ef5d6a] Got succesfult GRPC response. 2024-11-21T08:50:25.386603Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [49e8438a-9bb9292c-fd49c800-89ef5d6a] reply ok 2024-11-21T08:50:25.386666Z node 7 :HTTP DEBUG: (#44,[::1]:38406) <- (200 ) 2024-11-21T08:50:25.386754Z node 7 :HTTP DEBUG: (#44,[::1]:38406) connection closed Http output full {"QueueUrl":"http://ghrun-qcxhsi27zq.auto.internal:8771/cloud4/000000000000000301v0/DlqName"} 2024-11-21T08:50:25.388510Z node 7 :HTTP DEBUG: (#44,[::1]:38412) incoming connection opened 2024-11-21T08:50:25.388541Z node 7 :HTTP DEBUG: (#44,[::1]:38412) -> (POST /Root) 2024-11-21T08:50:25.388592Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d877:1039:1a17:0:c077:1039:1a17:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: 1c229cd1-6e4b4533-cca1546e-403937a7 2024-11-21T08:50:25.388701Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [1c229cd1-6e4b4533-cca1546e-403937a7] got new request from [d877:1039:1a17:0:c077:1039:1a17:0] 2024-11-21T08:50:25.396809Z node 7 :HTTP_PROXY DEBUG: http request [GetQueueAttributes] requestId [1c229cd1-6e4b4533-cca1546e-403937a7] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:25.396823Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [1c229cd1-6e4b4533-cca1546e-403937a7] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:25.405015Z node 7 :HTTP_PROXY DEBUG: http request [GetQueueAttributes] requestId [1c229cd1-6e4b4533-cca1546e-403937a7] Got succesfult GRPC response. 2024-11-21T08:50:25.405060Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [1c229cd1-6e4b4533-cca1546e-403937a7] reply ok 2024-11-21T08:50:25.405173Z node 7 :HTTP DEBUG: (#44,[::1]:38412) <- (200 ) 2024-11-21T08:50:25.405227Z node 7 :HTTP DEBUG: (#44,[::1]:38412) connection closed Http output full {"Attributes":{"QueueArn":"yrn:yc:ymq:ru-central1:folder4:DlqName"}} 2024-11-21T08:50:25.409615Z node 7 :HTTP DEBUG: (#44,[::1]:38414) incoming connection opened 2024-11-21T08:50:25.409655Z node 7 :HTTP DEBUG: (#44,[::1]:38414) -> (POST /Root) 2024-11-21T08:50:25.409690Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d85e:7e3c:1a17:0:c05e:7e3c:1a17:0] request [SetQueueAttributes] url [/Root] database [/Root] requestId: 9f80a746-6a6f4180-91d5fd41-fb0d20ac 2024-11-21T08:50:25.409836Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [9f80a746-6a6f4180-91d5fd41-fb0d20ac] got new request from [d85e:7e3c:1a17:0:c05e:7e3c:1a17:0] 2024-11-21T08:50:25.409910Z node 7 :HTTP_PROXY DEBUG: http request [SetQueueAttributes] requestId [9f80a746-6a6f4180-91d5fd41-fb0d20ac] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:25.409913Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [9f80a746-6a6f4180-91d5fd41-fb0d20ac] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:25.434773Z node 7 :HTTP_PROXY DEBUG: http request [SetQueueAttributes] requestId [9f80a746-6a6f4180-91d5fd41-fb0d20ac] Got succesfult GRPC response. 2024-11-21T08:50:25.434792Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [9f80a746-6a6f4180-91d5fd41-fb0d20ac] reply ok 2024-11-21T08:50:25.434924Z node 7 :HTTP DEBUG: (#44,[::1]:38414) <- (200 ) 2024-11-21T08:50:25.435022Z node 7 :HTTP DEBUG: (#44,[::1]:38414) connection closed Http output full {} 2024-11-21T08:50:25.438377Z node 7 :HTTP DEBUG: (#44,[::1]:38422) incoming connection opened 2024-11-21T08:50:25.438408Z node 7 :HTTP DEBUG: (#44,[::1]:38422) -> (POST /Root) 2024-11-21T08:50:25.438461Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d85e:7e3c:1a17:0:c05e:7e3c:1a17:0] request [ListDeadLetterSourceQueues] url [/Root] database [/Root] requestId: ad3adf52-bbf260ac-571bc2d6-69b19f44 2024-11-21T08:50:25.438575Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [ad3adf52-bbf260ac-571bc2d6-69b19f44] got new request from [d85e:7e3c:1a17:0:c05e:7e3c:1a17:0] 2024-11-21T08:50:25.438645Z node 7 :HTTP_PROXY DEBUG: http request [ListDeadLetterSourceQueues] requestId [ad3adf52-bbf260ac-571bc2d6-69b19f44] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:25.438648Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [ad3adf52-bbf260ac-571bc2d6-69b19f44] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:25.441640Z node 7 :HTTP_PROXY DEBUG: http request [ListDeadLetterSourceQueues] requestId [ad3adf52-bbf260ac-571bc2d6-69b19f44] Got succesfult GRPC response. 2024-11-21T08:50:25.441679Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [ad3adf52-bbf260ac-571bc2d6-69b19f44] reply ok 2024-11-21T08:50:25.441720Z node 7 :HTTP DEBUG: (#44,[::1]:38422) <- (200 ) 2024-11-21T08:50:25.441781Z node 7 :HTTP DEBUG: (#44,[::1]:38422) connection closed Http output full {"NextToken":"","QueueUrls":["http://ghrun-qcxhsi27zq.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName"]} |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TestYmqHttpProxy::TestDeleteQueue [GOOD] |86.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> TKeyValueTest::TestWriteReadPatchRead >> TKeyValueTest::TestRewriteThenLastValue >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[aggr_factory-hll-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Results] >> test.py::test[window-win_fuse_window-default.txt-Debug] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-ForceBlocks] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> test.py::test[pg-join_using4-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using4-default.txt-Plan] [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks >> test.py::test[json-json_query/passing-default.txt-Results] [GOOD] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Analyze] >> test.py::test[pg-join_using4-default.txt-Results] >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> test.py::test[expr-len--Results] [GOOD] >> test.py::test[expr-list_builtins--Analyze] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2024-11-21T08:50:28.801608Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T08:50:28.801900Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2024-11-21T08:50:28.804114Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2024-11-21T08:50:28.804132Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2024-11-21T08:50:28.813667Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2024-11-21T08:50:28.813686Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2024-11-21T08:50:28.813695Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> test.py::test[expr-list_builtins--Debug] [SKIPPED] >> test.py::test[expr-list_builtins--ForceBlocks] [SKIPPED] >> test.py::test[expr-list_builtins--Plan] [SKIPPED] >> test.py::test[expr-list_builtins--Results] [SKIPPED] >> test.py::test[expr-list_from_range_opt-default.txt-Analyze] >> test.py::test[select-swap_columns-default.txt-Analyze] [GOOD] >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> test.py::test[select-swap_columns-default.txt-Debug] >> TKeyValueTest::TestRenameWorks >> TExportToS3Tests::UidAsIdempotencyKey >> DataShardVolatile::DistributedWrite >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Analyze] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> test.py::test[pg-join_using4-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_case_insensetive2-default.txt-Analyze] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2024-11-21T08:50:29.687915Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2024-11-21T08:50:29.688270Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> test.py::test[blocks-date_less_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-date_not_equals--Analyze] >> TExportToS3Tests::CheckItemProgress >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Debug] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Plan] >> test.py::test[window-win_func_aggr_with_qualified_all--Plan] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Analyze] [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |86.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Debug] >> TExportToS3Tests::DropSourceTableBeforeTransferring >> TExportToS3Tests::ShouldPreserveIncrBackupFlag ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestIncrementalKeySet [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> TExportToS3Tests::RebootDuringCompletion >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> test.py::test[expr-list_from_range_opt-default.txt-Analyze] [GOOD] >> test.py::test[expr-list_from_range_opt-default.txt-Debug] >> test.py::test[aggregate-group_by_with_where-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Plan] [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> test.py::test[aggregate-group_by_with_where-default.txt-Results] >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> test.py::test[select-swap_columns-default.txt-Debug] [GOOD] >> test.py::test[select-swap_columns-default.txt-ForceBlocks] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> test.py::test[window-win_fuse_window-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2024-11-21T08:49:54.628813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652132803875480:2236];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:49:54.628829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ccf/r3tmp/tmppUKF0K/pdisk_1.dat 2024-11-21T08:49:54.831339Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28835, node 1 2024-11-21T08:49:54.888097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:54.888117Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:54.888119Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:54.888152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:54.997130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.998062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:54.998073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.998174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:54.998202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:54.998205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:49:54.998281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:54.998283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:49:54.998312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:54.998539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178995043, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:54.998542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:49:54.998593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:49:54.998672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:54.998690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:54.998696Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:49:54.998703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:49:54.998709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:49:54.998715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:49:54.999098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:49:54.999108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:49:54.999111Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:54.999119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T08:49:55.002174Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:49:55.012436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:55.012457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:55.013510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9220 2024-11-21T08:49:55.092754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.092964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.092969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.093070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:55.093086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.093350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178995141, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:55.093354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732178995141, at schemeshard: 72057594046644480 2024-11-21T08:49:55.093404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:49:55.093418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:49:55.093424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:49:55.093492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:55.093519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.093746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:49:55.093752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:49:55.093755Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:55.093765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 waiting... 2024-11-21T08:49:55.096579Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:49:55.101789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.101840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.101844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.101855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:49:55.101871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:49:55.101874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T08:49:55.101957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T08:49:55.101974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:55.102005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.102185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:49:55.102189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:49:55.102192Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:49:55.102202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T08:49:55.102795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.102834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.102948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECT ... NFO: [72057594046644480] TDone opId# 281474976715685:0 ProgressState 2024-11-21T08:50:25.004447Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715685:0 progress is 1/1 2024-11-21T08:50:25.004456Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715685:0 2024-11-21T08:50:25.012623Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/SQS/.STD/MessageData, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.012760Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 1 -> 2 2024-11-21T08:50:25.012958Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:25.012963Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.013134Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/SQS/.STD/MessageData 2024-11-21T08:50:25.013165Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:25.013211Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:25.013221Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:50:25.013518Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T08:50:25.013523Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T08:50:25.013526Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 15 2024-11-21T08:50:25.013553Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T08:50:25.013555Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T08:50:25.013557Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 2 2024-11-21T08:50:25.013994Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:50:25.014005Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 2 -> 3 2024-11-21T08:50:25.014176Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 ProgressState at tabletId# 72057594046644480 waiting... 2024-11-21T08:50:25.022649Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:50:25.022659Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:50:25.022689Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 3 -> 128 2024-11-21T08:50:25.022795Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T08:50:25.023046Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179025066, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:25.023053Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732179025066 2024-11-21T08:50:25.023079Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 128 -> 129 2024-11-21T08:50:25.023173Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:25.023252Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:25.023260Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:50:25.023506Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T08:50:25.023511Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T08:50:25.023514Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 16 2024-11-21T08:50:25.023541Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T08:50:25.023543Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T08:50:25.023545Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 4 2024-11-21T08:50:25.024426Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715686, at schemeshard: 72057594046644480 2024-11-21T08:50:25.024695Z node 7 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715686 Step: 1732179025066 OrderId: 281474976715686 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 243 } } 2024-11-21T08:50:25.024936Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T08:50:25.024944Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:50:25.024948Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2024-11-21T08:50:25.025001Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2024-11-21T08:50:25.025010Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2024-11-21T08:50:25.025018Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2024-11-21T08:50:26.411025Z node 7 :HTTP INFO: Listening on http://[::]:13799 2024-11-21T08:50:26.412608Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439652270576323497:2406], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T08:50:26.415441Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439652270576323546:2413], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T08:50:26.415460Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439652270576323549:2415], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T08:50:26.415474Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439652270576323545:2412], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:26.417571Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:26.509698Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439652270576323541:2409]: Pool not found 2024-11-21T08:50:26.660618Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439652270576323547:2414]: Pool not found 2024-11-21T08:50:26.661949Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439652270576323674:2433], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T08:50:26.661965Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439652270576323673:2432], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:26.661985Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:26.721809Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439652270576323671:2431]: Pool not found 2024-11-21T08:50:27.413289Z node 7 :HTTP DEBUG: (#44,[::1]:36462) incoming connection opened 2024-11-21T08:50:27.413322Z node 7 :HTTP DEBUG: (#44,[::1]:36462) -> (POST /Root) 2024-11-21T08:50:27.413390Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [1830:95bb:c344:0:30:95bb:c344:0] request [CreateStream] url [/Root] database [/Root] requestId: 9a588d70-dba9e22d-7419bc4e-8b258cfa 2024-11-21T08:50:27.414008Z node 7 :HTTP_PROXY INFO: http request [CreateStream] requestId [9a588d70-dba9e22d-7419bc4e-8b258cfa] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map 2024-11-21T08:50:27.414066Z node 7 :HTTP DEBUG: (#44,[::1]:36462) <- (400 MissingParameter) 2024-11-21T08:50:27.414075Z node 7 :HTTP DEBUG: (#44,[::1]:36462) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2024-11-21T08:50:27.414079Z node 7 :HTTP DEBUG: (#44,[::1]:36462) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 9a588d70-dba9e22d-7419bc4e-8b258cfa x-amz-crc32: 3671469627 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map"} 2024-11-21T08:50:27.414135Z node 7 :HTTP DEBUG: (#44,[::1]:36462) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map"} |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TExportToS3Tests::TablePermissions [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Plan] >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::RebootDuringAbortion |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> test.py::test[window-win_fuse_window-default.txt-Plan] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] >> test.py::test[weak_field-weak_field_esc_string--Analyze] [GOOD] >> test.py::test[pg-join_using_case_insensetive2-default.txt-Analyze] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Debug] >> test.py::test[pg-join_using_case_insensetive2-default.txt-Debug] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> test.py::test[blocks-date_greater_or_equal_scalar--Debug] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Plan] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] >> test.py::test[produce-native_desc_reduce_with_presort--Results] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Debug] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> test.py::test[expr-list_from_range_opt-default.txt-Debug] [GOOD] >> test.py::test[expr-list_from_range_opt-default.txt-ForceBlocks] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> test.py::test[blocks-date_not_equals--Analyze] [GOOD] >> test.py::test[blocks-date_not_equals--Debug] >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2024-11-21T08:49:55.576315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652137971509584:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cd3/r3tmp/tmpWdeIKC/pdisk_1.dat 2024-11-21T08:49:55.628804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 11999, node 1 2024-11-21T08:49:55.655864Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:49:55.655878Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:49:55.667124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:49:55.667137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:49:55.667139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:49:55.667142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:49:55.667152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:49:55.667153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:49:55.667160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:49:55.676363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2024-11-21T08:49:55.676384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:49:55.676386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:49:55.676387Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:49:55.676388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:49:55.676389Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:49:55.676417Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:49:55.676430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2024-11-21T08:49:55.676498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:49:55.676551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:55.676581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:49:55.676666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.676767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:55.676770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.676777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:49:55.676784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:49:55.724381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:49:55.724429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:49:55.724741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:49:55.726216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.726233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.726340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:49:55.726430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:49:55.726433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:49:55.726540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:49:55.726542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:49:55.726569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.726794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:49:55.726907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178995771, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:55.726910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:49:55.727108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:49:55.727247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:55.727267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.727273Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:49:55.727279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:49:55.727314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:49:55.727320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:49:55.727800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:49:55.727814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:49:55.727817Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:49:55.727825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:49:55.728323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10844 2024-11-21T08:49:55.745960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.746015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:49:55.746019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.746111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:49:55.746127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:49:55.746312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732178995792, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:49:55.746315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732178995792, at schemeshard: 72057594046644480 2024-11-21T08:49:55.746485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:49:55.746498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:49:55.746503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:49:55.746557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:49:55.746579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:49:55.746764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:49:55.746769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:49:55.746772Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:49:55.746779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, ... HTTP_PROXY WARN: http request [DeleteMessageBatch] requestId [1b40c0ac-b846e3e9-677d8cbc-b48c2f31] got new request with incorrect json from [9838:757a:b004:0:8038:757a:b004:0] 2024-11-21T08:50:31.932095Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [1b40c0ac-b846e3e9-677d8cbc-b48c2f31] reply with status: BAD_REQUEST message: LogMessageFatal exception 2024-11-21T08:50:31.932112Z node 7 :HTTP DEBUG: (#47,[::1]:54940) <- (400 InvalidArgumentException) 2024-11-21T08:50:31.932116Z node 7 :HTTP DEBUG: (#47,[::1]:54940) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.DeleteMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 84 { "QueueUrl":"http://ghrun-qcxhsi27zq.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": { } } 0 2024-11-21T08:50:31.932119Z node 7 :HTTP DEBUG: (#47,[::1]:54940) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 1b40c0ac-b846e3e9-677d8cbc-b48c2f31 x-amz-crc32: 3518254967 Content-Type: application/x-amz-json-1.1 Content-Length: 75 {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T08:50:31.932142Z node 7 :HTTP DEBUG: (#47,[::1]:54940) connection closed Http output full {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} F0000 00:00:1732179031.932515 286829 generated_message_reflection.cc:181] F0000 00:00:1732179031.932515 286829 generated_message_reflection.cc:181] 2024-11-21T08:50:31.932451Z node 7 :HTTP DEBUG: (#44,[::1]:54948) incoming connection opened 2024-11-21T08:50:31.932464Z node 7 :HTTP DEBUG: (#44,[::1]:54948) -> (POST /Root) 2024-11-21T08:50:31.932481Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [18f3:767a:b004:0:f3:767a:b004:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: 599f7bcd-b03c01fe-b73b5b68-85eea230 2024-11-21T08:50:31.932579Z node 7 :HTTP_PROXY WARN: http request [DeleteMessageBatch] requestId [599f7bcd-b03c01fe-b73b5b68-85eea230] got new request with incorrect json from [18f3:767a:b004:0:f3:767a:b004:0] 2024-11-21T08:50:31.932582Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [599f7bcd-b03c01fe-b73b5b68-85eea230] reply with status: BAD_REQUEST message: LogMessageFatal exception 2024-11-21T08:50:31.932602Z node 7 :HTTP DEBUG: (#44,[::1]:54948) <- (400 InvalidArgumentException) 2024-11-21T08:50:31.932606Z node 7 :HTTP DEBUG: (#44,[::1]:54948) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.DeleteMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 7a { "QueueUrl":"http://ghrun-qcxhsi27zq.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries":"" } 0 2024-11-21T08:50:31.932609Z node 7 :HTTP DEBUG: (#44,[::1]:54948) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 599f7bcd-b03c01fe-b73b5b68-85eea230 x-amz-crc32: 3518254967 Content-Type: application/x-amz-json-1.1 Content-Length: 75 {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T08:50:31.932632Z node 7 :HTTP DEBUG: (#44,[::1]:54948) connection closed Http output full {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T08:50:31.932930Z node 7 :HTTP DEBUG: (#47,[::1]:54960) incoming connection opened 2024-11-21T08:50:31.932944Z node 7 :HTTP DEBUG: (#47,[::1]:54960) -> (POST /Root) 2024-11-21T08:50:31.932981Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [9838:757a:b004:0:8038:757a:b004:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: b10f02cc-47988431-f9beffa-1c441b7e 2024-11-21T08:50:31.933054Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [b10f02cc-47988431-f9beffa-1c441b7e] got new request from [9838:757a:b004:0:8038:757a:b004:0] 2024-11-21T08:50:31.933102Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [b10f02cc-47988431-f9beffa-1c441b7e] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:31.933104Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [b10f02cc-47988431-f9beffa-1c441b7e] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"Successful":[{"SequenceNumber":"0","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MessageId":"2c4b2583-88daf742-9e9c0cfd-63b1c7b1"},{"SequenceNumber":"0","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"988c36cc-2c3105f8-7f56b096-96091ba7"}]} 2024-11-21T08:50:31.977666Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [b10f02cc-47988431-f9beffa-1c441b7e] Got succesfult GRPC response. 2024-11-21T08:50:31.977716Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [b10f02cc-47988431-f9beffa-1c441b7e] reply ok 2024-11-21T08:50:31.977771Z node 7 :HTTP DEBUG: (#47,[::1]:54960) <- (200 ) 2024-11-21T08:50:31.977844Z node 7 :HTTP DEBUG: (#47,[::1]:54960) connection closed 2024-11-21T08:50:31.979440Z node 7 :HTTP DEBUG: (#44,[::1]:54976) incoming connection opened 2024-11-21T08:50:31.979463Z node 7 :HTTP DEBUG: (#44,[::1]:54976) -> (POST /Root) 2024-11-21T08:50:31.979493Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [18bc:777a:b004:0:bc:777a:b004:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: b62fe353-6c8c97f9-5e9b3c9e-fd93dcc8 2024-11-21T08:50:31.979575Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [b62fe353-6c8c97f9-5e9b3c9e-fd93dcc8] got new request from [18bc:777a:b004:0:bc:777a:b004:0] 2024-11-21T08:50:31.979635Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [b62fe353-6c8c97f9-5e9b3c9e-fd93dcc8] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:31.979638Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [b62fe353-6c8c97f9-5e9b3c9e-fd93dcc8] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:32.006519Z node 7 :SQS WARN: Mark infly [cloud4/000000000000000101v0/1] for reloading. Reason: MessageDeleted 2024-11-21T08:50:32.007065Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [b62fe353-6c8c97f9-5e9b3c9e-fd93dcc8] Got succesfult GRPC response. 2024-11-21T08:50:32.007132Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [b62fe353-6c8c97f9-5e9b3c9e-fd93dcc8] reply ok 2024-11-21T08:50:32.007193Z node 7 :HTTP DEBUG: (#44,[::1]:54976) <- (200 ) 2024-11-21T08:50:32.007244Z node 7 :HTTP DEBUG: (#44,[::1]:54976) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SenderId":"fake_user_sid@as","SentTimestamp":"1732179031941","ApproximateFirstReceiveTimestamp":"1732179031995","ApproximateReceiveCount":"1"},"ReceiptHandle":"EAEgu--p77QyKAE","Body":"MessageBody-0","MessageId":"2c4b2583-88daf742-9e9c0cfd-63b1c7b1"}]} 2024-11-21T08:50:32.007761Z node 7 :HTTP DEBUG: (#44,[::1]:54982) incoming connection opened 2024-11-21T08:50:32.007780Z node 7 :HTTP DEBUG: (#44,[::1]:54982) -> (POST /Root) 2024-11-21T08:50:32.007849Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [18bc:777a:b004:0:bc:777a:b004:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 5e8b9827-76f5654e-36d7510d-6a1d372c 2024-11-21T08:50:32.007951Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [5e8b9827-76f5654e-36d7510d-6a1d372c] got new request from [18bc:777a:b004:0:bc:777a:b004:0] 2024-11-21T08:50:32.008027Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [5e8b9827-76f5654e-36d7510d-6a1d372c] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:32.008030Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [5e8b9827-76f5654e-36d7510d-6a1d372c] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:32.019893Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [5e8b9827-76f5654e-36d7510d-6a1d372c] Got succesfult GRPC response. 2024-11-21T08:50:32.019964Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [5e8b9827-76f5654e-36d7510d-6a1d372c] reply ok 2024-11-21T08:50:32.020037Z node 7 :HTTP DEBUG: (#44,[::1]:54982) <- (200 ) 2024-11-21T08:50:32.020087Z node 7 :HTTP DEBUG: (#44,[::1]:54982) connection closed Http output full {"Messages":[{"MD5OfBody":"3bf7e6d806a0b8062135ae945eca30bf","Attributes":{"SenderId":"fake_user_sid@as","SentTimestamp":"1732179031941","ApproximateFirstReceiveTimestamp":"1732179032008","ApproximateReceiveCount":"1"},"ReceiptHandle":"EAIgyO-p77QyKAE","Body":"MessageBody-1","MessageId":"988c36cc-2c3105f8-7f56b096-96091ba7"}]} 2024-11-21T08:50:32.020642Z node 7 :HTTP DEBUG: (#47,[::1]:54986) incoming connection opened 2024-11-21T08:50:32.020662Z node 7 :HTTP DEBUG: (#47,[::1]:54986) -> (POST /Root) 2024-11-21T08:50:32.020706Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [1804:767a:b004:0:4:767a:b004:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: 7d775288-9b0e4979-6e1b2194-2546bbaf 2024-11-21T08:50:32.020832Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [7d775288-9b0e4979-6e1b2194-2546bbaf] got new request from [1804:767a:b004:0:4:767a:b004:0] 2024-11-21T08:50:32.020901Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [7d775288-9b0e4979-6e1b2194-2546bbaf] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:32.020905Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [7d775288-9b0e4979-6e1b2194-2546bbaf] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:32.040393Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [7d775288-9b0e4979-6e1b2194-2546bbaf] Got succesfult GRPC response. 2024-11-21T08:50:32.040442Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [7d775288-9b0e4979-6e1b2194-2546bbaf] reply ok 2024-11-21T08:50:32.040600Z node 7 :HTTP DEBUG: (#47,[::1]:54986) <- (200 ) 2024-11-21T08:50:32.040670Z node 7 :HTTP DEBUG: (#47,[::1]:54986) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2024-11-21T08:50:32.041321Z node 7 :HTTP DEBUG: (#44,[::1]:54998) incoming connection opened 2024-11-21T08:50:32.041352Z node 7 :HTTP DEBUG: (#44,[::1]:54998) -> (POST /Root) 2024-11-21T08:50:32.041462Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [1804:767a:b004:0:4:767a:b004:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: d625db14-e7ac0038-74e07029-f8f3a02f 2024-11-21T08:50:32.041557Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [d625db14-e7ac0038-74e07029-f8f3a02f] got new request from [1804:767a:b004:0:4:767a:b004:0] 2024-11-21T08:50:32.041634Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [d625db14-e7ac0038-74e07029-f8f3a02f] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T08:50:32.041638Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [d625db14-e7ac0038-74e07029-f8f3a02f] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T08:50:32.042135Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [d625db14-e7ac0038-74e07029-f8f3a02f] Got succesfult GRPC response. 2024-11-21T08:50:32.042152Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [d625db14-e7ac0038-74e07029-f8f3a02f] reply ok 2024-11-21T08:50:32.042177Z node 7 :HTTP DEBUG: (#44,[::1]:54998) <- (200 ) 2024-11-21T08:50:32.042222Z node 7 :HTTP DEBUG: (#44,[::1]:54998) connection closed Http output full {} >> test.py::test[aggr_factory-hll-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:30.206647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:30.206677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:30.206682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:30.206687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:30.206699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:30.206703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:30.206724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:30.206806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:30.242021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:30.242052Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:30.264605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:30.265576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:30.265636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:30.283911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:30.284117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:30.284251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.284337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:30.285270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.285596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:30.285608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.285648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:30.285655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:30.285662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:30.285677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.286960Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:30.331528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:30.331633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.331715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:30.331760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:30.331768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.340938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.340985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:30.341053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.341066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:30.341071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:30.341077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:30.348612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.348651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:30.348659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:30.349404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.349420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.349427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.349434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.350091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:30.350561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:30.350620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:30.350818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.350844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:30.350852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.350911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:30.350918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.350952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:30.350964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:30.351352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:30.351360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:30.351409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.351414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:30.351500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.351506Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:30.351518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:30.351525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.351530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:30.351536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.351540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:30.351544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:30.351555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:30.351562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:30.351566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:30.351853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:30.351868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:30.351872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:30.351877Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:30.351882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:30.351896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.161307Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T08:50:32.161791Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.161837Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.161845Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:32.161858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:32.161892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:32.163904Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T08:50:32.163948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2024-11-21T08:50:32.164308Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:32.164336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:32.164345Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T08:50:32.164371Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T08:50:32.164404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:17051 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DFCDA1EB-5FCD-41F9-9009-5E859D4337FB amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:17051 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 55888ADE-8FD5-43AB-990D-BEAB3ED1ABEF amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T08:50:32.182948Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:32.182964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:50:32.183058Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:32.183065Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2024-11-21T08:50:32.183091Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.183100Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2024-11-21T08:50:32.183413Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T08:50:32.183426Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T08:50:32.183431Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T08:50:32.183437Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T08:50:32.183443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:50:32.183462Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:17051 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F48C8331-CEE1-49E0-AD72-AD5773351FE6 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:17051 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 55454B63-332B-4744-A42D-331C31ADB27E amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2024-11-21T08:50:32.187985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2024-11-21T08:50:32.202808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:32.202826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T08:50:32.202849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:32.202863Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:32.202875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:32.202879Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.202883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:50:32.202889Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T08:50:32.202931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:32.203465Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.203542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.203549Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T08:50:32.203563Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T08:50:32.203568Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:32.203574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T08:50:32.203587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T08:50:32.203593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:32.203599Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T08:50:32.203605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T08:50:32.203629Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:50:32.204860Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T08:50:32.204878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T08:50:32.205493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:50:32.205505Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:469:2433] TestWaitNotification: OK eventTxId 103 >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit >> test.py::test[select-swap_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-swap_columns-default.txt-Plan] [GOOD] >> test.py::test[select-swap_columns-default.txt-Results] >> TKeyValueTest::TestGetStatusWorks >> test.py::test[window-win_func_aggr_with_qualified_all--Results] [GOOD] >> test.py::test[window-win_func_percent_rank-default.txt-Debug] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> test.py::test[aggregate-group_by_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Debug] >> TExportToS3Tests::ExportStartTime [GOOD] >> test.py::test[in-in_ansi_join--Analyze] [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> test.py::test[in-in_ansi_join--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:31.180512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:31.180535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.180540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:31.180545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:31.180556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:31.180560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:31.180579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.180649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:31.191223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:31.191239Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:31.193807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:31.194547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:31.194584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:31.195505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:31.195615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:31.195691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.195738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:31.196407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.196655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.196663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.196697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:31.196704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.196709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:31.196719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.197664Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:31.213095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:31.213170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.213221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:31.213253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:31.213260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.213834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.213855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:31.213883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.213891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:31.213894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:31.213897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:31.214212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.214222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:31.214224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:31.214678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.214687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.214691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.214695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.215063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:31.215340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:31.215383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:31.215523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.215539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:31.215545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.215578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:31.215582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.215599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.215607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:31.215900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.215905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.215931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.215934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:31.215987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.215991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:31.215999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:31.216001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.216005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:31.216008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.216011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:31.216013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:31.216020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:31.216024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:31.216027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:31.216240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.216253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.216258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:31.216262Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:31.216266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.216279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.013251Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T08:50:33.013664Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.013696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.013703Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:33.013716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:33.013747Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:33.014016Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T08:50:33.014049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2024-11-21T08:50:33.014185Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:33.014204Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:33.014211Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T08:50:33.014229Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T08:50:33.014252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T08:50:33.029342Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:33.029358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T08:50:33.029463Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:33.029471Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2024-11-21T08:50:33.029612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.029622Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2024-11-21T08:50:33.029795Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T08:50:33.029807Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T08:50:33.029811Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T08:50:33.029817Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T08:50:33.029824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:50:33.029840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2024-11-21T08:50:33.030415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:27562 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 51D38F95-8976-41F0-9664-309A62602AAE amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:27562 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7E256C7F-60F1-4E9D-9D78-EEC858EB4A09 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 43 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:27562 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 83A5F50B-7D86-4020-8B1F-4E3EE97600DF amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:27562 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2889808C-C10E-447D-9254-DB193FAAB095 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2024-11-21T08:50:33.043883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 500 RawX2: 12884904349 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:33.043906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2024-11-21T08:50:33.043929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 500 RawX2: 12884904349 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:33.043944Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 500 RawX2: 12884904349 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:33.043958Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:33.043962Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.043966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:50:33.043974Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T08:50:33.044020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:33.047231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.047358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.047370Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T08:50:33.047392Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T08:50:33.047397Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:33.047405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T08:50:33.047429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T08:50:33.047439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:33.047445Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T08:50:33.047450Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T08:50:33.047477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:50:33.047984Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T08:50:33.048001Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T08:50:33.048410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:50:33.048422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:526:2480] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:30.776459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:30.776493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:30.776499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:30.776504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:30.776517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:30.776522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:30.776532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:30.776616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:30.797695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:30.797720Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:30.802749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:30.803580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:30.803643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:30.805088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:30.805431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:30.805520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.805606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:30.806469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.806751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:30.806762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.806803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:30.806810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:30.806816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:30.806829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.808120Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:30.841076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:30.841163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.841241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:30.841283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:30.841291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.842257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.842286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:30.842339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.842350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:30.842356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:30.842360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:30.842755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.842768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:30.842773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:30.843132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.843142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.843148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.843154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.843683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:30.844117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:30.844170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:30.844368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.844391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:30.844398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.844448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:30.844454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.844495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:30.844508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:30.844891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:30.844898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:30.844943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.844948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:30.845034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.845041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:30.845052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:30.845059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.845065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:30.845070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.845074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:30.845078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:30.845088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:30.845094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:30.845098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:30.845388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:30.845403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:30.845408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:30.845413Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:30.845417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:30.845429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 3 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2024-11-21T08:50:32.752942Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.752950Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.752959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2024-11-21T08:50:32.752985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:32.753068Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.753081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.753085Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T08:50:32.753090Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T08:50:32.753097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:50:32.753165Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.753175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.753179Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T08:50:32.753182Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T08:50:32.753186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:50:32.753194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T08:50:32.760719Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T08:50:32.760817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T08:50:32.760827Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T08:50:32.760833Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T08:50:32.761018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T08:50:32.761049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2024-11-21T08:50:32.761170Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:32.761199Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:32.761207Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2024-11-21T08:50:32.761245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.761255Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T08:50:32.761259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T08:50:32.761274Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:50:32.761286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:50:32.761293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T08:50:32.761302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T08:50:32.761309Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T08:50:32.761313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T08:50:32.761323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:50:32.761329Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T08:50:32.761333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T08:50:32.761337Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:50:32.761500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.761523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.761847Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:32.761858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:32.761894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:50:32.761916Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:32.761920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T08:50:32.761925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T08:50:32.762085Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.762096Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.762101Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T08:50:32.762105Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:50:32.762110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:50:32.762215Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.762224Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.762228Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T08:50:32.762235Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:50:32.762238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:50:32.762248Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T08:50:32.762253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:123:2149] 2024-11-21T08:50:32.762791Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.762817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:32.762830Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T08:50:32.762841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T08:50:32.762848Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T08:50:32.762852Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2024-11-21T08:50:32.762857Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2024-11-21T08:50:32.763141Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T08:50:32.763158Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:50:32.763164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:823:2760] TestWaitNotification: OK eventTxId 103 >> test.py::test[pg-join_using_case_insensetive2-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using_case_insensetive2-default.txt-ForceBlocks] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> test.py::test[weak_field-weak_field_esc_string--Debug] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDyNumber [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:31.682654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:31.682680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.682686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:31.682691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:31.682703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:31.682708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:31.682727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.682854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:31.695131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:31.695155Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:31.697946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:31.698811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:31.698856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:31.700009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:31.700165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:31.700263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.700320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:31.701113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.701392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.701403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.701445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:31.701452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.701460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:31.701472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.702696Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:31.720837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:31.720919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.720981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:31.721023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:31.721032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.721636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.721660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:31.721690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.721700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:31.721705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:31.721710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:31.722018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.722027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:31.722032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:31.722293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.722314Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.722320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.722327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.722985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:31.723345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:31.723394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:31.723584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.723605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:31.723611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.723660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:31.723666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.723693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.723705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:31.724082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.724090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.724135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.724140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:31.724242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.724250Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:31.724265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:31.724270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.724276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:31.724281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.724286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:31.724290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:31.724301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:31.724308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:31.724312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:31.724601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.724615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.724620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:31.724626Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:31.724631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.724643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... tabletId# 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: PREPARED TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 PrepareArriveTime: 97000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 56 } } 2024-11-21T08:50:33.416873Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T08:50:33.416901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.416907Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T08:50:33.419021Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.419079Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.419089Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:33.419104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:33.419154Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:33.419576Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T08:50:33.419614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2024-11-21T08:50:33.419770Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:33.419792Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:33.419801Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T08:50:33.419824Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T08:50:33.419854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T08:50:33.603974Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:33.603997Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:50:33.604096Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:33.604104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2024-11-21T08:50:33.604283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.604295Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2024-11-21T08:50:33.604757Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T08:50:33.604775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T08:50:33.604780Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T08:50:33.604787Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T08:50:33.604794Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:50:33.604814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2024-11-21T08:50:33.604962Z node 3 :DATASHARD_BACKUP ERROR: [Export] [scanner] Error read data from table: Invalid DyNumber binary representation REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:27580 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2D5D00F3-7B2B-484A-A066-9A0F6E5BD9FB amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2024-11-21T08:50:33.616787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2024-11-21T08:50:33.628971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:33.628996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T08:50:33.629023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:33.629038Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:33.629058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:33.629061Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.629066Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:50:33.629075Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T08:50:33.629119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:33.629542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.629617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.629626Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T08:50:33.629639Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T08:50:33.629643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:33.629650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T08:50:33.629664Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T08:50:33.629670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:50:33.629676Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T08:50:33.629680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T08:50:33.629704Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:50:33.630073Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T08:50:33.630086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T08:50:33.630127Z node 3 :EXPORT NOTICE: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2] State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid DyNumber binary representation' } 2024-11-21T08:50:33.630498Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:50:33.630514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:460:2424] TestWaitNotification: OK eventTxId 102 >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> test.py::test[expr-list_from_range_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-list_from_range_opt-default.txt-Plan] [GOOD] >> test.py::test[expr-list_from_range_opt-default.txt-Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Debug] [GOOD] |86.7%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-ForceBlocks] >> test.py::test[select-swap_columns-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportStartTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:31.555576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:31.555601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.555606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:31.555611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:31.555623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:31.555627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:31.555649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.555715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:31.579211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:31.579229Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:31.588900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:31.589720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:31.589767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:31.593302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:31.593473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:31.593565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.593620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:31.600476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.600745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.600755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.600790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:31.600798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.600803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:31.600814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.601865Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:31.635222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:31.635314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.635387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:31.635427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:31.635435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.638936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.638971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:31.639009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.639020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:31.639025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:31.639029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:31.639442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.639453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:31.639460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:31.639816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.639826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.639832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.639838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.640473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:31.640853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:31.640907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:31.641066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.641089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:31.641095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.641146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:31.641153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.641181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.641192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:31.641564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.641571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.641611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.641617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:31.641704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.641711Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:31.641722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:31.641727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.641735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:31.641740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.641745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:31.641749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:31.641761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:31.641767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:31.641771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:31.642053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.642066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.642071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:31.642076Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:31.642080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.642091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 08:50:34.327352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710757 2024-11-21T08:50:34.327728Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2024-11-21T08:50:34.328410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpCreateConsistentCopyTables CreateConsistentCopyTables { CopyTableDescriptions { SrcPath: "/MyRoot/Table" DstPath: "/MyRoot/export-102/0" OmitIndexes: true OmitFollowers: true IsBackup: true } } Internal: true } TxId: 281474976710758 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:34.328483Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/export-102/0, opId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:50:34.328602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: export-102, child name: 0, child id: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:50:34.328621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 0 2024-11-21T08:50:34.328626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:50:34.328636Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:50:34.328656Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:50:34.328706Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710758:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:34.328832Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:50:34.328843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:50:34.329319Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710758, response: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2024-11-21T08:50:34.329347Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710758, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /MyRoot/export-102/0 2024-11-21T08:50:34.329399Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710758, status# StatusAccepted 2024-11-21T08:50:34.329409Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 PathId: 4 2024-11-21T08:50:34.329424Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:34.329428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:50:34.329457Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:50:34.329475Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:34.329481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 3 2024-11-21T08:50:34.329486Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2024-11-21T08:50:34.329623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:50:34.329639Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710758:0 ProgressState, operation type: TxCopyTable, at tablet72057594046678944 2024-11-21T08:50:34.329702Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710758:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T08:50:34.329807Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T08:50:34.329822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T08:50:34.329827Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2024-11-21T08:50:34.329832Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:50:34.329838Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:50:34.331078Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T08:50:34.331097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T08:50:34.331102Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2024-11-21T08:50:34.331107Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 1 2024-11-21T08:50:34.331111Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:50:34.331126Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2024-11-21T08:50:34.331837Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2024-11-21T08:50:34.331852Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2024-11-21T08:50:34.331859Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2024-11-21T08:50:34.335572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2024-11-21T08:50:34.335630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72057594037968897 2024-11-21T08:50:34.335638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T08:50:34.335750Z node 3 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T08:50:34.335813Z node 3 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2024-11-21T08:50:34.335851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T08:50:34.335857Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T08:50:34.335874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T08:50:34.335882Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T08:50:34.335889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T08:50:34.335919Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 2 -> 3 2024-11-21T08:50:34.336246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2024-11-21T08:50:34.336278Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2024-11-21T08:50:34.336894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:50:34.336938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:50:34.336947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2024-11-21T08:50:34.336956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:2 at tablet# 72057594046678944 2024-11-21T08:50:34.337766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T08:50:34.337802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2024-11-21T08:50:34.337818Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409547 2024-11-21T08:50:34.337822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> test.py::test[window-win_func_percent_rank-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_percent_rank-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_percent_rank-default.txt-Results] >> test.py::test[select-to_dict-default.txt-Analyze] |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Debug] >> TKeyValueTest::TestBasicWriteRead >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> test.py::test[produce-process_streaming-default.txt-Debug] [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan >> test.py::test[produce-process_streaming-default.txt-Plan] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Results] |86.7%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> test.py::test[expr-list_from_range_opt-default.txt-Results] [GOOD] >> TKeyValueTest::TestWriteLongKey >> test.py::test[expr-longint_builtins-default.txt-Analyze] >> test.py::test[pg-join_using_case_insensetive2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_case_insensetive2-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using_case_insensetive2-default.txt-Results] |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |86.7%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> test.py::test[select-to_dict-default.txt-Analyze] [GOOD] >> test.py::test[select-to_dict-default.txt-Debug] >> test.py::test[weak_field-weak_field_esc_string--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Plan] >> test.py::test[weak_field-weak_field_esc_string--Plan] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> KqpScan::ScanDuringSplit10 >> test.py::test[window-win_fuse_window-default.txt-Results] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Analyze] >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> test.py::test[window-win_func_percent_rank-default.txt-Results] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Debug] |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |86.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:33.584235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:33.584267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:33.584272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:33.584277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:33.584289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:33.584293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:33.584314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:33.584392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:33.609598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:33.609623Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:33.623218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:33.624337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:33.624388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:33.625695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:33.625844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:33.625933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:33.625999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:33.626786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:33.627051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:33.627061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:33.627102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:33.627109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:33.627115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:33.627128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.634913Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:33.659313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:33.659442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.659529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:33.659575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:33.659584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.672650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:33.672704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:33.672866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.672885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:33.672891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:33.672896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:33.674130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.674151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:33.674158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:33.674572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.674583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.674589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:33.674596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:33.675315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:33.675665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:33.675721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:33.675907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:33.675930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:33.675937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:33.675994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:33.676001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:33.676036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:33.676047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:33.676436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:33.676444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:33.676494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:33.676499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:33.676587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:33.676594Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:33.676606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:33.676611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:33.676617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:33.676623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:33.676627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:33.676632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:33.676643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:33.676649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:33.676653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:33.676975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:33.676987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:33.676992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:33.676998Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:33.677002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:33.677013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lt, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 281474976710765:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:50:36.000187Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 3 -> 128 2024-11-21T08:50:36.000570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T08:50:36.000615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T08:50:36.000621Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:36.000635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710765 ready parts: 1/1 2024-11-21T08:50:36.000667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976710765 MinStep: 5000010 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:36.000927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2024-11-21T08:50:36.000959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 281474976710765 at step: 5000010 2024-11-21T08:50:36.001043Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:36.001062Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:36.001069Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 HandleReply TEvOperationPlan, stepId: 5000010, at schemeshard: 72057594046678944 2024-11-21T08:50:36.001091Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 128 -> 129 2024-11-21T08:50:36.001117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 REQUEST: PUT /Backup2/metadata.json HTTP/1.1 HEADERS: Host: localhost:26137 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FBD6D3AF-033B-44FF-B534-88D0A10C1BC7 amz-sdk-request: attempt=1 content-length: 73 content-md5: 5UnTthDw7DG9u0TfCJZu+w== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/metadata.json / / 73 REQUEST: PUT /Backup2/permissions.pb HTTP/1.1 HEADERS: Host: localhost:26137 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6016BCCC-8496-4904-99B0-53C11EB3C17D amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/permissions.pb / / 43 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000010 2024-11-21T08:50:36.016622Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:36.016645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T08:50:36.016752Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:36.016760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 7 2024-11-21T08:50:36.017018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T08:50:36.017033Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:36.017335Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2024-11-21T08:50:36.017353Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2024-11-21T08:50:36.017358Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2024-11-21T08:50:36.017364Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2024-11-21T08:50:36.017385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:50:36.017408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710765 REQUEST: PUT /Backup2/scheme.pb HTTP/1.1 HEADERS: Host: localhost:26137 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 028CB04D-ADE7-4F83-B531-DA7F68B7351D amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/scheme.pb / / 355 2024-11-21T08:50:36.022164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: PUT /Backup2/data_00.csv HTTP/1.1 HEADERS: Host: localhost:26137 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 38773208-1B67-4F1A-940D-626CB69D52DE amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/data_00.csv / / 0 2024-11-21T08:50:36.035655Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 800 RawX2: 12884904629 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:36.035687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2024-11-21T08:50:36.035715Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 800 RawX2: 12884904629 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:36.035729Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 800 RawX2: 12884904629 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T08:50:36.035745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:36.035749Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T08:50:36.035754Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T08:50:36.035762Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 129 -> 240 2024-11-21T08:50:36.035819Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:36.036350Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T08:50:36.036431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T08:50:36.036442Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2024-11-21T08:50:36.036456Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2024-11-21T08:50:36.036460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2024-11-21T08:50:36.036467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2024-11-21T08:50:36.036480Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710765 2024-11-21T08:50:36.036486Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2024-11-21T08:50:36.036492Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2024-11-21T08:50:36.036497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710765:0 2024-11-21T08:50:36.036520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T08:50:36.037009Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2024-11-21T08:50:36.037022Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710765 2024-11-21T08:50:36.037367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:50:36.037391Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:829:2763] TestWaitNotification: OK eventTxId 104 |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |86.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> KqpScan::RemoteShardScan >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_list-default.txt-Debug] >> test.py::test[expr-longint_builtins-default.txt-Analyze] [GOOD] >> test.py::test[expr-longint_builtins-default.txt-Debug] >> test.py::test[pg-join_using_case_insensetive2-default.txt-Results] [GOOD] >> test.py::test[pg-nullif-default.txt-Analyze] >> test.py::test[produce-reduce_all_list-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_lambda-default.txt-Debug] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> test.py::test[select-to_dict-default.txt-Debug] [GOOD] >> test.py::test[select-to_dict-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_esc_string--Results] [GOOD] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Analyze] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation >> TExportToS3Tests::AuditCancelledExport [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 [GOOD] Test command err: iteration# 4 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 10 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 16 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 22 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 28 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 34 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 40 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 46 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 52 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 58 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 64 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 70 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 76 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 82 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 88 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 94 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 100 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 106 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 112 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 118 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 124 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 130 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 136 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 142 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 148 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 154 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 160 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 166 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 172 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 178 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 184 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 190 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 196 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 202 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 208 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 214 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 220 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 226 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 232 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 238 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 244 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 250 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 256 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 262 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 268 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 274 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 280 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 286 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 292 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 298 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 304 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 310 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 316 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 322 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 328 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 334 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 340 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 346 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 352 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 358 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 364 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 370 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 376 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 382 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 388 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 394 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 400 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 406 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 412 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 418 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 424 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 430 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 436 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 442 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 448 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 454 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 460 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 466 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 472 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 478 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 484 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> test.py::test[blocks-date_not_equals--Debug] [GOOD] >> test.py::test[blocks-date_not_equals--ForceBlocks] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Plan] >> test.py::test[expr-longint_builtins-default.txt-Debug] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-interval_add_date--Debug] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Plan] [GOOD] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] >> test.py::test[expr-longint_builtins-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Analyze] [GOOD] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Debug] >> KqpScan::ScanRetryRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T08:50:32.762531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:32.762553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:32.762559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:32.762564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:32.762575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:32.762579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:32.762588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:32.762668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:32.774320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:32.774342Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:32.776761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:32.776786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:32.776825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:32.779207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:32.779252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:32.779345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:32.779388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:32.779935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:32.780240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:32.780252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:32.780296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:32.780304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:32.780311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:32.780324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.781496Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:32.798521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:32.798586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.798644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:32.798679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:32.798686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.799193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:32.799224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:32.799261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.799269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:32.799273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:32.799278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:32.799585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.799595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:32.799599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:32.799849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.799857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.799862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:32.799868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:32.800482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:32.800851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:32.800896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:32.801057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:32.801079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:32.801088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:32.801134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:32.801141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:32.801163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:32.801174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:32.801563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:32.801571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:32.801604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:32.801609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:32.801671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:32.801677Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:32.801688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:32.801693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:32.801698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:32.801703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:32.801707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:32.801710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:32.801720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:32.801726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:32.801729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:32.802012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:32.802027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:32.802033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:32.802038Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:32.802042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:32.802057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... opose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:2720" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:39.378403Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:39.378439Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:50:39.378529Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:39.378537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:39.379134Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:50:39.379146Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:50:39.379254Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:39.379296Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2024-11-21T08:50:39.379354Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2024-11-21T08:50:39.379361Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2024-11-21T08:50:39.379417Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:39.379427Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet72057594046678944 2024-11-21T08:50:39.379433Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2024-11-21T08:50:39.379439Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2024-11-21T08:50:39.379934Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2024-11-21T08:50:39.379945Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2024-11-21T08:50:39.380037Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:39.380043Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:39.380064Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T08:50:39.380579Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2024-11-21T08:50:39.380621Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:39.380626Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:50:39.380641Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T08:50:39.380730Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2024-11-21T08:50:39.380747Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2024-11-21T08:50:39.380843Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T08:50:39.380874Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2024-11-21T08:50:39.381331Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T08:50:39.381395Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:50:39.381402Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:549:2511] TestWaitNotification: OK eventTxId 102 AUDIT LOG buffer(7): 2024-11-21T08:50:38.770826Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T08:50:38.778882Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T08:50:39.113445Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT START, status=SUCCESS, detailed_status=SUCCESS, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none} 2024-11-21T08:50:39.122462Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE DIRECTORY, paths=[/MyRoot/export-102], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T08:50:39.129428Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=CREATE TABLE COPY FROM, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T08:50:39.379288Z: component=schemeshard, tx_id=281474976710759, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=BACKUP TABLE, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T08:50:39.380791Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2024-11-21T08:50:38.797674Z, end_time=2024-11-21T08:51:08.845674Z AUDIT LOG checked line: 2024-11-21T08:50:39.380791Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2024-11-21T08:50:38.797674Z, end_time=2024-11-21T08:51:08.845674Z |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> test.py::test[pg-nullif-default.txt-Analyze] [GOOD] >> test.py::test[pg-nullif-default.txt-Debug] >> test.py::test[window-win_over_few_partitions_other--Analyze] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Debug] |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |86.7%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> test.py::test[select-to_dict-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-to_dict-default.txt-Plan] [GOOD] >> test.py::test[select-to_dict-default.txt-Results] >> test.py::test[aggregate-percentile_interval-default.txt-Debug] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Plan] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Results] |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> test.py::test[window-win_func_with_struct_access-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Debug] [GOOD] >> test.py::test[weak_field-weak_member_string_copy-default.txt-ForceBlocks] >> test.py::test[expr-longint_builtins-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true >> test.py::test[pg-nullif-default.txt-Debug] [GOOD] >> test.py::test[pg-nullif-default.txt-ForceBlocks] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> test.py::test[produce-reduce_lambda-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_lambda-default.txt-Plan] [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked >> test.py::test[window-win_over_few_partitions_other--Debug] [GOOD] >> test.py::test[window-win_over_few_partitions_other--ForceBlocks] >> test.py::test[blocks-interval_add_date--Debug] [GOOD] >> test.py::test[pg-nullif-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-nullif-default.txt-Plan] [GOOD] >> test.py::test[pg-nullif-default.txt-Results] >> test.py::test[blocks-interval_add_date--Plan] [GOOD] >> test.py::test[blocks-interval_add_date--Results] >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [GOOD] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex >> test.py::test[weak_field-weak_member_string_copy-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Plan] [GOOD] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] [GOOD] >> test.py::test[key_filter-complex-default.txt-Analyze] >> test.py::test[select-to_dict-default.txt-Results] [GOOD] >> test.py::test[select-trivial_between-default.txt-Analyze] >> test.py::test[in-in_ansi_join--Debug] [GOOD] >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> test.py::test[in-in_ansi_join--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T08:50:30.867796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:30.867823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:30.867828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:30.867833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:30.867844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:30.867848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:30.867856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:30.867928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:30.886619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:30.886644Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:30.894653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:30.894683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:30.894720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:30.897139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:30.897181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:30.897260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.897300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:30.904697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.904985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:30.904997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.905033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:30.905042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:30.905047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:30.905062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.906302Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:30.935914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:30.935993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.936052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:30.936089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:30.936097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.940932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.940975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:30.941031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.941043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:30.941047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:30.941053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:30.941466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.941478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:30.941483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:30.941788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.941799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.941805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.941812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.942408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:30.942800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:30.942850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:30.943028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:30.943051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:30.943058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.943107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:30.943114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:30.943140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:30.943151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:30.943513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:30.943522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:30.943562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:30.943567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:30.943632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:30.943638Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:30.943649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:30.943653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.943658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:30.943663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:30.943667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:30.943671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:30.943681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:30.943687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:30.943691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:30.943974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:30.943987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:30.943992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:30.943997Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:30.944001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:30.944013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d manually' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2024-11-21T08:50:42.917067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.917074Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.917080Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2024-11-21T08:50:42.917099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:42.917203Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.917213Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.917217Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T08:50:42.917221Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T08:50:42.917225Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:50:42.917317Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.917326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.917330Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T08:50:42.917333Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T08:50:42.917337Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:50:42.917344Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T08:50:42.917714Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T08:50:42.917736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T08:50:42.917743Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T08:50:42.917749Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T08:50:42.918057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T08:50:42.918083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:50:42.918216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2024-11-21T08:50:42.918288Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:42.918305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 12884904042 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:42.918311Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2024-11-21T08:50:42.918343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.918351Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T08:50:42.918355Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T08:50:42.918363Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:50:42.918370Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:50:42.918375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T08:50:42.918381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T08:50:42.918385Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T08:50:42.918388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T08:50:42.918394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:50:42.918399Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T08:50:42.918402Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T08:50:42.918405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:50:42.918512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.919112Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:42.919125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:42.919150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:50:42.919169Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:42.919173Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T08:50:42.919177Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T08:50:42.919292Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.919302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.919306Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T08:50:42.919310Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:50:42.919313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:50:42.919379Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.919386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.919390Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T08:50:42.919393Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:50:42.919396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:50:42.919403Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T08:50:42.919407Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:124:2150] 2024-11-21T08:50:42.919911Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.919968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T08:50:42.919996Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T08:50:42.920006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T08:50:42.920014Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T08:50:42.920018Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2024-11-21T08:50:42.920022Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2024-11-21T08:50:42.920335Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T08:50:42.920356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:50:42.920361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:753:2693] TestWaitNotification: OK eventTxId 103 |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> test.py::test[pg-nullif-default.txt-Results] [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-Analyze] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_library--Debug] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Results] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Analyze] >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Analyze] >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges |86.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |86.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> test.py::test[select-trivial_between-default.txt-Analyze] [GOOD] >> test.py::test[select-trivial_between-default.txt-Debug] |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit >> test.py::test[key_filter-complex-default.txt-Analyze] [GOOD] >> test.py::test[key_filter-complex-default.txt-Debug] >> test.py::test[window-win_over_few_partitions_other--ForceBlocks] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Plan] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Results] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Analyze] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Debug] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-Analyze] [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-Debug] >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> test.py::test[aggregate-agg_phases_table3-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Debug] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> test.py::test[blocks-interval_add_date--Results] [GOOD] >> test.py::test[blocks-nested_optionals--Debug] >> test.py::test[aggregate-percentile_interval-default.txt-Results] [GOOD] >> test.py::test[bigdate-round-default.txt-Debug] >> test.py::test[blocks-date_not_equals--ForceBlocks] [GOOD] >> test.py::test[blocks-date_not_equals--Plan] >> SystemView::ConcurrentScans >> test.py::test[blocks-date_not_equals--Plan] [GOOD] >> test.py::test[blocks-date_not_equals--Results] >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 [GOOD] Test command err: iteration# 5 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 11 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 17 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 23 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 29 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 35 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 41 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 47 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 53 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 59 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 65 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 71 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 77 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 83 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 89 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 95 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 101 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 107 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 113 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 119 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 125 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 131 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 137 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 143 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 149 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 155 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 161 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 167 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 173 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 179 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 185 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 191 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 197 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 203 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 209 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 215 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 221 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 227 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 233 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 239 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 245 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 251 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 257 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 263 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 269 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 275 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 281 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 287 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 293 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 299 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 305 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 311 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 317 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 323 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 329 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 335 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 341 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 347 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 353 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 359 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 365 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 371 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 377 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 383 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 389 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 395 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 401 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 407 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 413 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 419 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 425 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 431 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 437 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 443 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 449 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 455 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 461 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 467 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 473 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 479 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 485 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> test.py::test[select-trivial_between-default.txt-Debug] [GOOD] >> test.py::test[select-trivial_between-default.txt-ForceBlocks] >> test.py::test[pg-numeric_to_pg-default.txt-Debug] [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-ForceBlocks] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx1 [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull >> test.py::test[window-win_multiaggr_library--Debug] [GOOD] >> test.py::test[window-win_multiaggr_library--Plan] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> test.py::test[bigdate-round-default.txt-Debug] [GOOD] >> test.py::test[key_filter-complex-default.txt-Debug] [GOOD] >> test.py::test[bigdate-round-default.txt-Plan] [GOOD] >> test.py::test[bigdate-round-default.txt-Results] >> test.py::test[key_filter-complex-default.txt-ForceBlocks] >> test.py::test[aggregate-agg_phases_table3-default.txt-Debug] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-ForceBlocks] >> test.py::test[blocks-nested_optionals--Debug] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Debug] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-ForceBlocks] >> LabeledDbCounters::TwoTablets >> TKeyValueTest::TestWriteLongKey [GOOD] >> KqpScan::ScanRetryReadRanges [GOOD] >> test.py::test[blocks-nested_optionals--Plan] [GOOD] >> test.py::test[blocks-nested_optionals--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx1 [GOOD] Test command err: iteration# 1 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 7 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 13 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 19 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 25 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 31 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 37 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 43 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 49 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 55 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 61 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 67 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 73 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 79 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 85 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 91 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 97 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 103 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 109 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 115 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 121 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 127 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 133 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 139 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 145 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 151 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 157 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 163 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 169 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 175 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 181 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 187 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 193 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 199 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 205 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 211 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 217 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 223 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 229 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 235 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 241 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 247 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 253 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 259 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 265 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 271 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 277 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 283 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 289 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 295 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 301 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 307 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 313 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 319 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 325 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 331 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 337 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 343 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 349 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 355 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 361 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 367 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 373 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 379 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 385 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 391 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 397 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 403 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 409 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 415 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 421 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 427 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 433 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 439 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 445 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 451 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 457 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 463 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 469 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 475 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 481 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 487 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 493 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 499 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 505 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 511 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 517 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 523 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 529 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 535 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 541 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 547 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 553 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 559 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 565 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 571 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 577 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 583 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 589 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 595 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 601 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 607 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 613 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 619 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 625 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 631 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 637 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 643 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 649 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 655 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 661 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 667 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 673 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 679 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 685 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1363 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1369 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1375 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1381 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1387 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1393 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1399 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1405 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1411 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1417 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1423 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1429 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1435 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1441 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1447 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1453 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1459 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1465 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1471 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1477 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1483 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1489 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1495 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1501 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1507 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1513 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1519 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1525 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1531 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1537 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1543 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1549 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1555 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1561 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1567 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1573 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1579 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1585 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1591 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1597 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1603 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1609 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1615 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1621 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1627 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1633 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1639 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1645 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1651 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1657 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1663 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1669 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1675 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1681 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1687 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1693 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1699 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1705 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1711 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1717 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1723 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1729 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1735 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1741 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1747 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1753 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1759 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1765 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1771 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1777 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1783 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1789 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1795 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1801 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1807 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1813 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1819 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1825 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1831 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1837 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1843 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1849 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1855 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1861 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1867 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1873 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1879 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1885 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1891 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1897 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1903 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1909 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1915 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1921 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1927 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1933 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1939 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1945 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1951 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1957 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1963 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1969 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1975 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1981 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1987 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1993 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1999 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2005 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2011 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2017 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2023 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2029 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2035 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 [GOOD] Test command err: iteration# 1 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 7 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 13 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 19 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 25 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 31 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 37 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 43 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 49 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 55 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 61 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 67 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 73 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 79 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 85 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 91 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 97 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 103 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 109 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 115 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 121 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 127 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 133 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 139 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 145 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 151 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 157 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 163 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 169 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 175 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 181 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 187 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 193 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 199 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 205 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 211 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 217 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 223 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 229 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 235 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 241 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 247 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 253 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 259 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 265 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 271 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 277 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 283 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 289 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 295 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 301 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 307 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 313 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 319 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 325 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 331 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 337 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 343 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 349 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 355 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 361 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 367 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 373 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 379 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 385 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 391 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 397 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 403 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 409 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 415 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 421 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 427 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 433 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 439 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 445 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 451 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 457 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 463 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 469 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 475 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 481 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 487 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin-ColumnStore |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort >> SystemView::ConcurrentScans [GOOD] >> SystemView::Describe >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi >> test.py::test[select-trivial_between-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_between-default.txt-Plan] [GOOD] >> test.py::test[select-trivial_between-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:142:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:145:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:146:2057] recipient: [5:144:2166] Leader for TabletID 72057594037927937 is [5:147:2167] sender: [5:148:2057] recipient: [5:144:2166] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:147:2167] Leader for TabletID 72057594037927937 is [5:147:2167] sender: [5:217:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:147:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:149:2171] Leader for TabletID 72057594037927937 is [7:152:2172] sender: [7:153:2057] recipient: [7:149:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:152:2172] Leader for TabletID 72057594037927937 is [7:152:2172] sender: [7:222:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:148:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:151:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:152:2057] recipient: [8:150:2171] Leader for TabletID 72057594037927937 is [8:153:2172] sender: [8:154:2057] recipient: [8:150:2171] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:153:2172] Leader for TabletID 72057594037927937 is [8:153:2172] sender: [8:223:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] >> test.py::test[pg-numeric_to_pg-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-Results] |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2024-11-21T08:50:41.677776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:41.677911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:41.677944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:50:41.678083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:41.678121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:41.678144Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001615/r3tmp/tmpSKq4uz/pdisk_1.dat 2024-11-21T08:50:41.868539Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:41.977720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:42.067522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:42.067560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:42.068445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:42.068464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:42.079883Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:50:42.080085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:42.080316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:42.491982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:43.114376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1332:2801], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:43.114413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1343:2806], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:43.114427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:43.115618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:50:43.702197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1346:2809], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:50:43.992244Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yn8s9avhh9mzxg7f01nt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGI0MTQ2NDQtNDE4MGMzYi1mZmU1MWIwYi00NDkyZmMyNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2024-11-21T08:50:44.416793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yn9n75rfxxaymeng1088g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ0ZWMyM2MtZDU2NTQ3MDktZDEyZGI0NGItNDNlMDVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1545:2929] -> [2:1501:2404] -- EvScanData from [2:1550:2411]: pass 2024-11-21T08:50:44.634533Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yn9n75rfxxaymeng1088g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ0ZWMyM2MtZDU2NTQ3MDktZDEyZGI0NGItNDNlMDVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2024-11-21T08:50:44.635363Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2024-11-21T08:50:46.093927Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:50:46.097238Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:46.097316Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:50:46.097427Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:46.097686Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:50:46.097727Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001615/r3tmp/tmpnT5HVu/pdisk_1.dat 2024-11-21T08:50:46.281540Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:46.383690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:46.480938Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:46.480974Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:46.483646Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:46.483696Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:46.504763Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T08:50:46.504918Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:46.505059Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:46.868540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:47.439963Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1327:2799], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:47.439991Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1338:2804], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:47.440062Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:47.448244Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:50:48.067182Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1341:2807], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:50:48.247926Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6ynd0ffjtdv55hstnndf7y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmYxZDNhYzAtYjQ2OTljNWEtZjkxYjc4Y2MtMWRjNTI0MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2024-11-21T08:50:48.734609Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yndt7fvckqvbqzma5k13r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjM4ZTBhN2ItZTg0NTA2OC05MzliNDliZC1mZjJlZDg0Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1544:2930] -> [4:1498:2402] -- EvScanData from [4:1548:2409]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2024-11-21T08:50:48.739518Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] >> test.py::test[bigdate-round-default.txt-Results] [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull >> test.py::test[blocks-nested_optionals--Results] [GOOD] >> test.py::test[blocks-string_as_agg_key--Debug] >> KqpScan::ScanPg [GOOD] |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> test.py::test[pg-numeric_to_pg-default.txt-Results] [GOOD] >> test.py::test[pg-range_function_multi-default.txt-Analyze] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> TSequenceReboots::CreateMultipleSequencesNoInitialSequenceShard >> test.py::test[window-win_multiaggr_library--Results] [GOOD] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2024-11-21T08:50:38.560142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:38.560302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:38.560336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:50:38.560461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:38.560495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:38.560517Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dfc/r3tmp/tmpDf0Qgz/pdisk_1.dat 2024-11-21T08:50:38.674902Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:38.835681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:38.953164Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:50:38.953790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:38.953818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:38.954711Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:50:38.954817Z node 2 :TX_PROXY DEBUG: actor# [2:193:2108] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:50:38.960574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:38.960609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:38.960909Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2024-11-21T08:50:38.981055Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:50:38.981336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:38.981483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:39.387693Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Handle TEvProposeTransaction 2024-11-21T08:50:39.387722Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:50:39.387758Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1165:2710] 2024-11-21T08:50:39.407196Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:50:39.407610Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:50:39.407626Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:50:39.407710Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:50:39.407765Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:50:39.407782Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:50:39.408296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:39.408443Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:50:39.409361Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:50:39.409390Z node 1 :TX_PROXY DEBUG: Actor# [1:1165:2710] txid# 281474976715657 SEND to# [1:1072:2650] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:50:39.473969Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1216:2358] 2024-11-21T08:50:39.474039Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:39.500411Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:39.500648Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:50:39.500906Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:50:39.500934Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:50:39.500952Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:50:39.501056Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:50:39.509008Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:50:39.509133Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:50:39.509169Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1240:2373] 2024-11-21T08:50:39.509174Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:50:39.509179Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:50:39.509184Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:50:39.509492Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:50:39.509521Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:50:39.509566Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:50:39.509574Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:39.509583Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:50:39.509588Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:50:39.580664Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1197:2738], serverId# [2:1244:2374], sessionId# [0:0:0] 2024-11-21T08:50:39.580797Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:50:39.580863Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:50:39.580906Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:50:39.581403Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:50:39.593558Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:50:39.593614Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:50:39.618682Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2024-11-21T08:50:39.618739Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2024-11-21T08:50:39.618768Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T08:50:39.942743Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1273:2759], serverId# [2:1276:2384], sessionId# [0:0:0] 2024-11-21T08:50:39.943834Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 980 RawX2: 4294969869 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:50:39.943857Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:50:39.943934Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:50:39.943943Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:50:39.943955Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:50:39.944026Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:50:39.944062Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:50:39.944328Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:50:39.944348Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:50:39.944468Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:50:39.944554Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:39.944899Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:50:39.944909Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:50:39.952920Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, ... 8:50:45.869685Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2941], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd6ynawfdfxccbsdb44geyt7. SessionId : ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll inputs 2024-11-21T08:50:45.869688Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2941], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd6ynawfdfxccbsdb44geyt7. SessionId : ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll sources 2024-11-21T08:50:45.869692Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2941], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd6ynawfdfxccbsdb44geyt7. SessionId : ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Resume execution, run status: Finished 2024-11-21T08:50:45.869695Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2941], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd6ynawfdfxccbsdb44geyt7. SessionId : ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. ProcessOutputsState.Inflight: 0 2024-11-21T08:50:45.869698Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2941], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd6ynawfdfxccbsdb44geyt7. SessionId : ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Do not drain channelId: 1, finished 2024-11-21T08:50:45.869701Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1572:2941], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd6ynawfdfxccbsdb44geyt7. SessionId : ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:50:45.869705Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. Tasks execution finished 2024-11-21T08:50:45.869709Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1572:2941], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd6ynawfdfxccbsdb44geyt7. SessionId : ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T08:50:45.869720Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2024-11-21T08:50:45.869737Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:50:45.869769Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T08:50:45.869805Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2902] TxId: 281474976715664. Ctx: { TraceId: 01jd6ynawfdfxccbsdb44geyt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1572:2941], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 393 Tasks { TaskId: 1 CpuTimeUs: 136 FinishTimeMs: 1732179045869 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 4 BuildCpuTimeUs: 132 HostName: "ghrun-qcxhsi27zq" NodeId: 3 StartTimeMs: 1732179045869 } MaxMemoryUsage: 1048576 } 2024-11-21T08:50:45.869810Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01jd6ynawfdfxccbsdb44geyt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1572:2941] 2024-11-21T08:50:45.869825Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2902] TxId: 281474976715664. Ctx: { TraceId: 01jd6ynawfdfxccbsdb44geyt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:50:45.869831Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1569:2902] TxId: 281474976715664. Ctx: { TraceId: 01jd6ynawfdfxccbsdb44geyt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T08:50:45.869836Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2902] TxId: 281474976715664. Ctx: { TraceId: 01jd6ynawfdfxccbsdb44geyt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Yzk1Mjc4ZjktNGZmOWZkNTQtZWY1MjIxYzItMjFkNTViYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000393s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T08:50:45.870126Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2024-11-21T08:50:45.870145Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] Handle TEvProposeTransaction 2024-11-21T08:50:45.870150Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] TxId# 0 ProcessProposeTransaction 2024-11-21T08:50:45.870169Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1574:2942] SnapshotReq marker# P0 2024-11-21T08:50:45.870301Z node 3 :TX_PROXY DEBUG: Actor# [3:1576:2942] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2024-11-21T08:50:45.870465Z node 3 :TX_PROXY DEBUG: Actor# [3:1576:2942] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2024-11-21T08:50:45.870493Z node 3 :TX_PROXY DEBUG: Actor# [3:1574:2942] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2024-11-21T08:50:47.327470Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:47.327566Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:47.327575Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:50:47.327628Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:47.327674Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:47.327697Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dfc/r3tmp/tmp2tuZQg/pdisk_1.dat 2024-11-21T08:50:47.522414Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:47.637446Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:47.745063Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:47.745101Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:47.746067Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:47.746086Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:47.761217Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2024-11-21T08:50:47.761362Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:47.761465Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:48.115035Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:48.702041Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1327:2796], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.702072Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1337:2801], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.702083Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.703180Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:50:49.271817Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1341:2804], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:50:49.533301Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yne7x7zmcq017rsvkrfyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NWQ2MGU2NmMtYjIxNTg2YmMtZDJlZTllZDctNWRhMGI0Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:49.984796Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ynf2p78thbghdz611nje7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRhMjljZTktY2JlODE5NzctNjhlNDJlNjctOWEyYzBlNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:50.171799Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ynf2p78thbghdz611nje7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRhMjljZTktY2JlODE5NzctNjhlNDJlNjctOWEyYzBlNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:50.172896Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> test.py::test[aggregate-agg_phases_table3-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Plan] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] >> test.py::test[select-trivial_between-default.txt-Results] [GOOD] >> test.py::test[key_filter-complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-complex-default.txt-Plan] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Analyze] >> test.py::test[key_filter-complex-default.txt-Plan] [GOOD] >> test.py::test[key_filter-complex-default.txt-Results] >> SystemView::Describe [GOOD] >> SystemView::DescribeAccessDenied >> TSequenceReboots::CopyTableWithSequence >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Plan] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: 2024-11-21T08:50:28.859622Z node 1 :BS_PROXY_GET ERROR: [47ad982f08e135f5] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[72057594037927937:2:1:2:1:5:0] DEADLINE Size# 0 RequestedSize# 5} ErrorReason# "status# DEADLINE from# [0:1:0:0:0]"} Marker# BPG29 2024-11-21T08:50:28.859659Z node 1 :BS_VDISK_PATCH ERROR: VDISK[0:_:0:0:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [72057594037927937:2:1:2:1:5:0] PatchedBlobId# [72057594037927937:2:1:2:4:5:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [0:1:0:0:0] Marker# BSVSP01 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:143:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:146:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:145:2167] Leader for TabletID 72057594037927937 is [5:148:2168] sender: [5:149:2057] recipient: [5:145:2167] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:148:2168] Leader for TabletID 72057594037927937 is [5:148:2168] sender: [5:218:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2172] Leader for TabletID 72057594037927937 is [7:153:2173] sender: [7:154:2057] recipient: [7:150:2172] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2173] Leader for TabletID 72057594037927937 is [7:153:2173] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:151:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:153:2174] Leader for TabletID 72057594037927937 is [8:156:2175] sender: [8:157:2057] recipient: [8:153:2174] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:156:2175] Leader for TabletID 72057594037927937 is [8:156:2175] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:156:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:156:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:153:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:156:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:155:2176] Leader for TabletID 72057594037927937 is [10:158:2177] sender: [10:159:2057] recipient: [10:155:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:158:2177] Leader for TabletID 72057594037927937 is [10:158:2177] sender: [10:228:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:154:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:158:2057] recipient: [11:156:2176] Leader for TabletID 72057594037927937 is [11:159:2177] sender: [11:160:2057] recipient: [11:156:2176] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:159:2177] Leader for TabletID 72057594037927937 is [11:159:2177] sender: [11:229:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:157:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:160:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:161:2057] recipient: [12:159:2179] Leader for TabletID 72057594037927937 is [12:162:2180] sender: [12:163:2057] recipient: [12:159:2179] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:162:2180] Leader for TabletID 72057594037927937 is [12:162:2180] sender: [12:215:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:161:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:163:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:165:2057] recipient: [13:164:2183] Leader for TabletID 72057594037927937 is [13:166:2184] sender: [13:167:2057] recipient: [13:164:2183] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:166:2184] Leader for TabletID 72057594037927937 is [13:166:2184] sender: [13:219:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:166:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:169:2057] recipient: [14:168:2188] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:170:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:172:2057] recipient: [14:168:2188] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:171:2189] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:241:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:166:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:168:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:170:2057] recipient: [15:169:2188] Leader for TabletID 72057594037927937 is [15:171:2189] sender: [15:172:2057] recipient: [15:169:2188] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:171:2189] Leader for TabletID 72057594037927937 is [15:171:2189] sender: [15:241:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit >> TSequenceReboots::CreateMultipleSequencesHaveInitialSequenceShard >> test.py::test[in-in_ansi_join--ForceBlocks] [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride+StreamLookupJoin-ColumnStore >> test.py::test[in-in_ansi_join--Plan] [GOOD] >> test.py::test[in-in_ansi_join--Results] >> test.py::test[pg-range_function_multi-default.txt-Analyze] [GOOD] >> test.py::test[pg-range_function_multi-default.txt-Debug] |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |86.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> test.py::test[select-two_selects_with_diff_fields-default.txt-Analyze] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Debug] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute >> test.py::test[blocks-date_not_equals--Results] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Analyze] >> TSequenceReboots::CreateDropRecreate >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> test.py::test[key_filter-complex-default.txt-Results] [GOOD] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Analyze] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Analyze] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Debug] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--ForceBlocks] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Plan] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Results] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] >> SystemView::DescribeAccessDenied [GOOD] >> SystemView::CollectScriptingQueries >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] [GOOD] >> test.py::test[window-current/aggregations--Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:149:2057] recipient: [4:147:2169] Leader for TabletID 72057594037927937 is [4:150:2170] sender: [4:151:2057] recipient: [4:147:2169] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:150:2170] Leader for TabletID 72057594037927937 is [4:150:2170] sender: [4:220:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:150:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:150:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:143:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:143:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:148:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:149:2057] recipient: [12:147:2169] Leader for TabletID 72057594037927937 is [12:150:2170] sender: [12:151:2057] recipient: [12:147:2169] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:150:2170] Leader for TabletID 72057594037927937 is [12:150:2170] sender: [12:220:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Analyze] >> test.py::test[blocks-decimal_multiplicative_ops--Analyze] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Debug] >> test.py::test[pg-range_function_multi-default.txt-Debug] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] Test command err: Trying to start YDB, gRPC: 25177, MsgBus: 8777 2024-11-21T08:50:17.153449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652230116539068:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:17.185080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bb8/r3tmp/tmpyUCiVx/pdisk_1.dat 2024-11-21T08:50:17.254595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25177, node 1 2024-11-21T08:50:17.272925Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:17.272935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:17.272937Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:17.272971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:50:17.304666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:17.304690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:17.305638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8777 TClient is connected to server localhost:8777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:17.759606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:17.782445Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:17.791661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:17.977642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.131146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.183474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:18.388680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652234411507796:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.388707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.486900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.516893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.557082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.567865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.585407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.615114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:18.640127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652234411508320:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.640153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.640334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652234411508325:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:18.641150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:18.646628Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:50:18.652869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652234411508327:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } waiting... 2024-11-21T08:50:18.861725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:19.383609Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652238706477433:2681], TxId: 281474976715706, task: 2. Ctx: { TraceId : 01jd6ymhk9cma9k5qdc6h44pgr. SessionId : ydb://session/3?node_id=1&id=MTgwZWE2N2QtYjcyMmRhNDUtYzE3OGI3M2UtMmZmYTdiODA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InputTransform[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037937 node# 1 state# Ready) } } 2024-11-21T08:50:19.384661Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652238706477433:2681], TxId: 281474976715706, task: 2. Ctx: { TraceId : 01jd6ymhk9cma9k5qdc6h44pgr. SessionId : ydb://session/3?node_id=1&id=MTgwZWE2N2QtYjcyMmRhNDUtYzE3OGI3M2UtMmZmYTdiODA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037937 node# 1 state# Ready) } }. 2024-11-21T08:50:19.385088Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652238706477434:2682], TxId: 281474976715706, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=MTgwZWE2N2QtYjcyMmRhNDUtYzE3OGI3M2UtMmZmYTdiODA=. TraceId : 01jd6ymhk9cma9k5qdc6h44pgr. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439652238706477308:2515], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T08:50:19.385353Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgwZWE2N2QtYjcyMmRhNDUtYzE3OGI3M2UtMmZmYTdiODA=, ActorId: [1:7439652234411509322:2515], ActorState: ExecuteState, TraceId: 01jd6ymhk9cma9k5qdc6h44pgr, Create QueryResponse for error on request, msg: 2024-11-21T08:50:22.153163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652230116539068:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:22.153223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:50:29.055318Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439652281656164327:2515] TxId: 281474976716796. Ctx: { TraceId: 01jd6ymv08deaagabmmvpk8awe, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgwZWE2N2QtYjcyMmRhNDUtYzE3OGI3M2UtMmZmYTdiODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting data TxId 281474976716796 because datashard 72075186224037927: is in process of split opId 281474976710658 state SplitSrcWaitForNoTxInFlight (wrong shard state); 2024-11-21T08:50:29.057384Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgwZWE2N2QtYjcyMmRhNDUtYzE3OGI3M2UtMmZmYTdiODA=, ActorId: [1:7439652234411509322:2515], ActorState: ExecuteState, TraceId: 01jd6ymv08deaagabmmvpk8awe, Create QueryResponse for error on request, msg: 2024-11-21T08:50:29.085140Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T08:50:29.085154Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2024-11-21T08:50:29.085156Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2024-11-21T08:50:29.100580Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2024-11-21T08:50:29.100594Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2024-11-21T08:50:29.123054Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2024-11-21T08:50:29.123067Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2024-11-21T08:50:29.134717Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2024-11-21T08:50:29.134729Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2024-11-21T08:50:29.134731Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T08:50:29.134733Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2024-11-21T08:50:29.134734Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2024-11-21T08:50:29.134821Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2024-11-21T08:50:32.256288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:50:32.256304Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:39.166581Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2024-11-21T08:50:39.179669Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2024-11-21T08:50:39.179681Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2024-11-21T08:50:39.217574Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2024-11-21T08:50:39.217590Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2024-11-21T08:50:39.217592Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2024-11-21T08:50:39.221269Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found Trying to start YDB, gRPC: 31210, MsgBus: 19864 2024-11-21T08:50:43.589225Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652340865732805:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:43.591606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bb8/r3tmp/tmpCD3k31/pdisk_1.dat 2024-11-21T08:50:43.620517Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31210, node 2 2024-11-21T08:50:43.640587Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:43.640598Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:43.640600Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:43.640640Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19864 2024-11-21T08:50:43.696671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:43.696703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:43.704694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:43.788831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:43.800536Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:43.809389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:43.829017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:43.864657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:43.890130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:44.066464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652345160701503:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:44.066491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:44.073562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:44.081868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:44.097987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:44.167563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:44.184346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:44.202806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:44.221398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652345160702015:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:44.221424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:44.221526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652345160702020:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:44.222357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:44.228767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652345160702022:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:44.445988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:48.592273Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652340865732805:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:48.592732Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[expr-longint_builtins-default.txt-ForceBlocks] [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull >> test.py::test[select-two_selects_with_diff_fields-default.txt-Debug] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-ForceBlocks] >> SystemView::CollectScriptingQueries [GOOD] >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Results] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Analyze] >> test.py::test[blocks-string_as_agg_key--Debug] [GOOD] >> test.py::test[blocks-string_as_agg_key--Plan] [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> test.py::test[window-current/aggregations--Analyze] [GOOD] >> test.py::test[window-current/aggregations--Debug] |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> TPQCachingProxyTest::TestPublishAndForget >> IncrementalBackup::SimpleBackup >> IncrementalBackup::SimpleRestore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2024-11-21T08:50:47.650793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652360674898603:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:47.694527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00359e/r3tmp/tmpl1uwfw/pdisk_1.dat 2024-11-21T08:50:47.799179Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:47.821454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:47.821481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:47.827759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11528, node 1 2024-11-21T08:50:47.888445Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:47.888458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:47.888461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:47.888501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:48.060400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:48.072772Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:50:48.125147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:48.420144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866735:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866814:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866815:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866804:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866805:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866806:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866807:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866808:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866809:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866810:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866811:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866812:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866816:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866817:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866818:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866819:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866813:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866823:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866820:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866821:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.420735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652364969866822:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:48.422533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710676:3, at schemeshard: 72057594046644480 2024-11-21T08:50:48.457349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652364969866860:2415], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710676 completed, doublechecking } 2024-11-21T08:50:48.457370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652364969866852:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710676 completed, doublechecking } 2024-11-21T08:50:48.457388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652364969866854:2409], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710676 completed, doublechecking } 2024-11-21T08:50:48.457404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652364969866847:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710676 completed, doublechecking } 2024-11-21T08:50:48.457411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652364969866861:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710676 completed, doublechecking } 2024-11-21T08:50:48.457418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652364969866846:2401], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710676 completed, doublechecking } 2024-11-21T08:50:48.457427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [Wo ... ] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:52.417369Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439652381008671362:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:52.418270Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2024-11-21T08:50:52.449132Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439652381008671364:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2024-11-21T08:50:52.555173Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ynhw0d9f8rm5rmgxc8xw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZGFlYzQzMzQtODJkYzFlOWYtNjc2MmJmOWMtNjkwNDViMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:52.560275Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:52.655938Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6ynj2w847zqr2v3d26b3d5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZGFlYzQzMzQtODJkYzFlOWYtNjc2MmJmOWMtNjkwNDViMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:52.667680Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T08:50:52.759383Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6ynj6c50nkf19gjcr8rdm5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZGFlYzQzMzQtODJkYzFlOWYtNjc2MmJmOWMtNjkwNDViMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:52.770888Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root
: Error: Access denied 2024-11-21T08:50:52.771796Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1
: Error: Access denied 2024-11-21T08:50:52.776124Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys
: Error: Access denied 2024-11-21T08:50:52.779290Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys
: Error: Access denied 2024-11-21T08:50:52.780267Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys/partition_stats
: Error: Access denied 2024-11-21T08:50:52.781105Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys/partition_stats
: Error: Access denied 2024-11-21T08:50:52.797767Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2024-11-21T08:50:52.797914Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:50:52.797942Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 13 2024-11-21T08:50:52.797963Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:50:52.798138Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 14 2024-11-21T08:50:52.798183Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:50:52.798648Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T08:50:52.798717Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:50:52.799923Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7439652375118911962:2106], Type=268959746 2024-11-21T08:50:52.799934Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7439652375118911962:2106], Type=268959746 2024-11-21T08:50:52.799937Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7439652375118911962:2106], Type=268959746 2024-11-21T08:50:52.799940Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7439652375118911962:2106], Type=268959746 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00359e/r3tmp/tmpriMrUM/pdisk_1.dat 2024-11-21T08:50:54.164420Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:54.164596Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 30106, node 16 2024-11-21T08:50:54.184395Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:54.184411Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:54.184415Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:54.184461Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:50:54.251864Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:54.251904Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:54.252341Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:54.255109Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:54.255323Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:54.260995Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:54.528787Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439652389205783267:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:54.528819Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:54.528900Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439652389205783279:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:54.529691Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:50:54.532145Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:50:54.532197Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439652389205783281:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:50:54.604804Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ynky09hsaqp4kh3jxd5tt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NWExMmIwZTktNGExN2I3NDktOGY0MzQ2NDEtZjY1YjRiNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:54.660258Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ynm0tfrm8yhwkjqx7t8dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=Y2I1ZWJkNTQtYjk2NDg4YTctODBjYzljZWEtMTdjOGU5MmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:54.671963Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179054704, txId: 281474976715662] shutting down 2024-11-21T08:50:54.733476Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ynm2p69ndha3q74hcspjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NGJlMDU4NWMtNWJhYjA0NzItZDM1ZmMxNy00N2VkOThmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:54.734534Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439652389205783451:2335], owner: [16:7439652389205783448:2333], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T08:50:54.756740Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439652389205783451:2335], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T08:50:54.756869Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439652389205783451:2335], row count: 2, finished: 1 2024-11-21T08:50:54.756881Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439652389205783451:2335], owner: [16:7439652389205783448:2333], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T08:50:54.762012Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179054719, txId: 281474976715664] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2024-11-21T08:50:39.591433Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:50:39.596619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:39.596775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:39.596843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:50:39.597310Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:50:39.597329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001626/r3tmp/tmpli9KBG/pdisk_1.dat 2024-11-21T08:50:39.729829Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:39.840673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:39.945730Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:50:39.946149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:39.946172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:39.946926Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:50:39.947024Z node 2 :TX_PROXY DEBUG: actor# [2:193:2108] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:50:39.947331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:39.947351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:39.952666Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2024-11-21T08:50:39.965279Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:50:39.965467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:39.965581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:40.361558Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Handle TEvProposeTransaction 2024-11-21T08:50:40.361584Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:50:40.361618Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1161:2705] 2024-11-21T08:50:40.371547Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:50:40.371859Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:50:40.371876Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:50:40.371965Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:50:40.372023Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:50:40.372038Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:50:40.372589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:40.372723Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:50:40.373702Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:50:40.373719Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 SEND to# [1:1071:2648] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:50:40.419920Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1240:2764] 2024-11-21T08:50:40.420009Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:40.437600Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1242:2765] 2024-11-21T08:50:40.437672Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:40.445090Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:40.445458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:50:40.445617Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T08:50:40.445627Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T08:50:40.445635Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T08:50:40.445685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:50:40.454795Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T08:50:40.454915Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:50:40.454947Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:1333:2812] 2024-11-21T08:50:40.454954Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:50:40.454958Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T08:50:40.454964Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:50:40.455479Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T08:50:40.455506Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T08:50:40.455561Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:50:40.455568Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:40.455577Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T08:50:40.455584Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:50:40.455710Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:1263:2777], serverId# [1:1301:2795], sessionId# [0:0:0] 2024-11-21T08:50:40.455759Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:50:40.455820Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:50:40.455839Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T08:50:40.456655Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1244:2766] 2024-11-21T08:50:40.456707Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:40.458084Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:40.458196Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:50:40.458343Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2024-11-21T08:50:40.458351Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2024-11-21T08:50:40.458358Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2024-11-21T08:50:40.458397Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:50:40.458406Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2024-11-21T08:50:40.458419Z node 1 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:50:40.458428Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [1:1352:2823] 2024-11-21T08:50:40.458433Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2024-11-21T08:50:40.458436Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2024-11-21T08:50:40.458443Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2024-11-21T08:50:40.458630Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2024-11-21T08:50:40.458641Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2024-11-21T08:50:40.458773Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2024-11-21T08:50:40.458780Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:40.458789Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2024-11-21T08:50:40.458794Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2024-11-21T08:50:40.458820Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [1:1268:2782], serverId# [1:1329:2809], sessionId# [0:0:0] 2024-11-21T08:50:40.459026Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2024-11-21T08:50:40.459077Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:50:40.459092Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037894 2024-11-21T08:50:40.460187Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 720575 ... base : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2024-11-21T08:50:54.970167Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2024-11-21T08:50:54.970170Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:50:54.970177Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T08:50:54.970194Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:50:54.970197Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2024-11-21T08:50:54.970201Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2024-11-21T08:50:54.970204Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2024-11-21T08:50:54.970206Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2024-11-21T08:50:54.970209Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2024-11-21T08:50:54.970212Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:50:54.970215Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T08:50:54.970237Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2933] TxId: 281474976715667. Ctx: { TraceId: 01jd6ynjjeb0gg2sx8da1866kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1895:3120], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 135 Tasks { TaskId: 1 CpuTimeUs: 66 FinishTimeMs: 1732179054969 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 9 BuildCpuTimeUs: 57 HostName: "ghrun-qcxhsi27zq" NodeId: 5 StartTimeMs: 1732179054969 } MaxMemoryUsage: 1048576 } 2024-11-21T08:50:54.970243Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2933] TxId: 281474976715667. Ctx: { TraceId: 01jd6ynjjeb0gg2sx8da1866kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1895:3120], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2024-11-21T08:50:54.972131Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1896:3120] 2024-11-21T08:50:54.972164Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2024-11-21T08:50:54.972176Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2024-11-21T08:50:54.972179Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2024-11-21T08:50:54.972243Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:50:54.972251Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2024-11-21T08:50:54.972255Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2024-11-21T08:50:54.972263Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2024-11-21T08:50:54.972267Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2024-11-21T08:50:54.972270Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2024-11-21T08:50:54.972276Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:50:54.972281Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2024-11-21T08:50:54.972285Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1895:3120], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd6ynjjeb0gg2sx8da1866kn. SessionId : ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T08:50:54.972311Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2024-11-21T08:50:54.972347Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:50:54.972419Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T08:50:54.972478Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2933] TxId: 281474976715667. Ctx: { TraceId: 01jd6ynjjeb0gg2sx8da1866kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1895:3120], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 535 DurationUs: 3000 Tasks { TaskId: 1 CpuTimeUs: 69 FinishTimeMs: 1732179054972 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 57 HostName: "ghrun-qcxhsi27zq" NodeId: 5 StartTimeMs: 1732179054969 } MaxMemoryUsage: 1048576 } 2024-11-21T08:50:54.972486Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd6ynjjeb0gg2sx8da1866kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1895:3120] 2024-11-21T08:50:54.972518Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2933] TxId: 281474976715667. Ctx: { TraceId: 01jd6ynjjeb0gg2sx8da1866kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:50:54.972523Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1892:2933] TxId: 281474976715667. Ctx: { TraceId: 01jd6ynjjeb0gg2sx8da1866kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T08:50:54.972530Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1892:2933] TxId: 281474976715667. Ctx: { TraceId: 01jd6ynjjeb0gg2sx8da1866kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjQ3YWU5ZjMtNTkxNzJiMDYtYmRlZGJhMzItODY1ODA3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000535s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 182 >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |86.8%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:226:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:156:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:156:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:156:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:155:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:155:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:156:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:157:2176] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:160:2057] recipient: [10:157:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:159:2177] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:229:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:161:2057] recipient: [11:159:2179] Leader for TabletID 72057594037927937 is [11:162:2180] sender: [11:163:2057] recipient: [11:159:2179] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:162:2180] Leader for TabletID 72057594037927937 is [11:162:2180] sender: [11:215:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:161:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:163:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:165:2057] recipient: [12:164:2183] Leader for TabletID 72057594037927937 is [12:166:2184] sender: [12:167:2057] recipient: [12:164:2183] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:166:2184] Leader for TabletID 72057594037927937 is [12:166:2184] sender: [12:219:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:166:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:169:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:170:2057] recipient: [13:168:2188] Leader for TabletID 72057594037927937 is [13:171:2189] sender: [13:172:2057] recipient: [13:168:2188] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:171:2189] Leader for TabletID 72057594037927937 is [13:171:2189] sender: [13:241:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:166:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:169:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:170:2057] recipient: [14:168:2188] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:172:2057] recipient: [14:168:2188] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:171:2189] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:241:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:167:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:170:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:171:2057] recipient: [15:169:2188] Leader for TabletID 72057594037927937 is [15:172:2189] sender: [15:173:2057] recipient: [15:169:2188] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:172:2189] Leader for TabletID 72057594037927937 is [15:172:2189] sender: [15:242:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:31.150789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:31.150819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.150825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:31.150830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:31.150842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:31.150846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:31.150855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.150937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:31.173773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:31.173790Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:31.184794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:31.185612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:31.185650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:31.186672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:31.186794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:31.186872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.186915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:31.187555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.187794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.187803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.187836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:31.187842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.187848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:31.187858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.188890Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:31.219455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:31.219512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.219563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:31.219597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:31.219604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.224434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.224461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:31.224493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.224503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:31.224507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:31.224512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:31.224941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.224952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:31.224956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:31.225284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.225293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.225299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.225305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.225917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:31.228032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:31.228086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:31.228273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.228298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:31.228305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.228355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:31.228363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.228391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.228403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:31.228782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.228790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.228826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.228831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:31.228907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.228914Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:31.228925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:31.228929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.228935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:31.228939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.228944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:31.228948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:31.228958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:31.228963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:31.228967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:31.229251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.229263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.229268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:31.229273Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:31.229277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.229288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 74976720762 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976720762 2024-11-21T08:50:55.005052Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T08:50:55.005058Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T08:50:55.005065Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2024-11-21T08:50:55.005089Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:55.005165Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.005174Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.005178Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T08:50:55.005181Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:50:55.005185Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:50:55.005298Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.005307Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.005311Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T08:50:55.005314Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2024-11-21T08:50:55.005317Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:50:55.005325Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2024-11-21T08:50:55.005809Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T08:50:55.005859Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2024-11-21T08:50:55.005864Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2024-11-21T08:50:55.005868Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2024-11-21T08:50:55.006054Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2024-11-21T08:50:55.006078Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2024-11-21T08:50:55.006132Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.006152Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:55.006169Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 17179871338 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:55.006177Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2024-11-21T08:50:55.006198Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T08:50:55.006205Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2024-11-21T08:50:55.006209Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2024-11-21T08:50:55.006216Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:55.006223Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:50:55.006228Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2024-11-21T08:50:55.006233Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2024-11-21T08:50:55.006237Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2024-11-21T08:50:55.006240Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2024-11-21T08:50:55.006246Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:50:55.006250Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2024-11-21T08:50:55.006254Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:50:55.006257Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:50:55.006849Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.007147Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:55.007157Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:55.007182Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:50:55.007201Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:55.007205Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:332:2310], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2024-11-21T08:50:55.007258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:332:2310], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2024-11-21T08:50:55.007395Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.007405Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.007412Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T08:50:55.007416Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:50:55.007421Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:50:55.007509Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.007518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.007522Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T08:50:55.007525Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:50:55.007529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:50:55.007537Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2024-11-21T08:50:55.007541Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:285:2273] 2024-11-21T08:50:55.007897Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.008078Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T08:50:55.008095Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2024-11-21T08:50:55.008104Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2024-11-21T08:50:55.008110Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T08:50:55.008114Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2024-11-21T08:50:55.008118Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2024-11-21T08:50:55.008440Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T08:50:55.008458Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:50:55.008463Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:697:2641] TestWaitNotification: OK eventTxId 102 |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> test.py::test[blocks-decimal_multiplicative_ops--Debug] [GOOD] >> TReplicationTests::Create >> test.py::test[blocks-decimal_multiplicative_ops--ForceBlocks] >> test.py::test[like-ilike_clause-default.txt-Analyze] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Debug] |86.8%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Debug] >> test.py::test[select-two_selects_with_diff_fields-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Plan] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2024-11-21T08:50:56.269147Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:50:56.269171Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:50:56.272862Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:50:56.272899Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T08:50:56.272918Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T08:50:56.272925Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T08:50:56.272938Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 |86.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicationTests::Create [GOOD] >> TReplicationTests::CreateDropRecreate >> DataShardVolatile::TwoAppendsMustBeVolatile [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::Alter >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |86.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> TSubscriberCombinationsTest::MigratedPathRecreation >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TSubscriberTest::NotifyDelete >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> TSubscriberTest::InvalidNotification >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot >> test.py::test[like-ilike_clause-default.txt-Debug] [GOOD] >> test.py::test[like-ilike_clause-default.txt-ForceBlocks] >> TSubscriberTest::NotifyDelete [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Plan] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] >> test.py::test[window-current/aggregations--Debug] [GOOD] >> test.py::test[window-current/aggregations--ForceBlocks] >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalRestoreScan::Empty >> IncrementalRestoreScan::ChangeSenderSimple >> TSubscriberTest::InvalidNotification [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 27487, MsgBus: 25999 2024-11-21T08:50:48.852178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652362735609512:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:48.852419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00445e/r3tmp/tmpaB3VfT/pdisk_1.dat 2024-11-21T08:50:48.958884Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27487, node 1 2024-11-21T08:50:48.983052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:48.983064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:48.983066Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:48.983106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:50:49.020814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:49.020840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:49.024910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25999 TClient is connected to server localhost:25999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:49.201092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.205274Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:49.212307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.267185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.313631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.332888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.517855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652367030578371:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:49.526732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:49.594128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:49.610177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:49.631707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:49.648986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:49.664927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:49.728481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:49.749650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652367030578898:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:49.749682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:49.749783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652367030578903:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:49.750665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:49.756242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652367030578905:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:50.094803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.124352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62684, MsgBus: 14059 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00445e/r3tmp/tmpyoED1f/pdisk_1.dat 2024-11-21T08:50:50.576746Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652371090005385:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:50.581507Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:50:50.605972Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62684, node 2 2024-11-21T08:50:50.628468Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:50.628482Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:50.628486Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:50.628531Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14059 TClient is connected to server localhost:14059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:50:50.680916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:50.680944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:50.681262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.684025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:50.684537Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:50.696799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:50.708784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.737244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:50.759426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:50.962710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [Workloa ... part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:55.107191Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:55.405897Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439652394868931970:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:55.409700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:55.414769Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:55.420828Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:55.435265Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:55.460387Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:55.472381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:55.489889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:55.556801Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439652394868932491:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:55.556822Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:55.556887Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439652394868932496:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:55.557816Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:55.560049Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439652394868932498:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:55.886984Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:55.976372Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9242, MsgBus: 27019 2024-11-21T08:50:56.467816Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439652400148600408:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00445e/r3tmp/tmpj3rVFA/pdisk_1.dat 2024-11-21T08:50:56.470470Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:50:56.482598Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9242, node 6 2024-11-21T08:50:56.499419Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:56.499433Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:56.499437Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:56.499481Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27019 TClient is connected to server localhost:27019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:56.566286Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:56.566332Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:56.567403Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:56.569678Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:56.572461Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:56.578518Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:56.597935Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:56.618944Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:56.637274Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:56.801330Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652400148601809:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:56.801356Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:56.809267Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.820552Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.843215Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.852476Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.863187Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.876365Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.900794Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652400148602311:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:56.900824Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:56.900906Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652400148602316:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:56.901856Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:56.905188Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652400148602318:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:57.149687Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:57.178536Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TSubscriberTest::Boot [GOOD] |86.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:143:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:143:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:143:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:147:2057] recipient: [12:145:2167] Leader for TabletID 72057594037927937 is [12:148:2168] sender: [12:149:2057] recipient: [12:145:2167] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:148:2168] Leader for TabletID 72057594037927937 is [12:148:2168] sender: [12:218:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:148:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:150:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:152:2057] recipient: [13:151:2172] Leader for TabletID 72057594037927937 is [13:153:2173] sender: [13:154:2057] recipient: [13:151:2172] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:153:2173] Leader for TabletID 72057594037927937 is [13:153:2173] sender: [13:223:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:148:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:150:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:152:2057] recipient: [14:151:2172] Leader for TabletID 72057594037927937 is [14:153:2173] sender: [14:154:2057] recipient: [14:151:2172] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:153:2173] Leader for TabletID 72057594037927937 is [14:153:2173] sender: [14:224:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:151:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:154:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:155:2057] recipient: [15:153:2174] Leader for TabletID 72057594037927937 is [15:156:2175] sender: [15:157:2057] recipient: [15:153:2174] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:156:2175] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] 2024-11-21T08:50:58.249988Z node 17 :BS_PROXY_GET ERROR: [47ad982f08e135f5] Response# TEvGetResult {Status# BLOCKED ResponseSz# 1 {[72057594037927937:2:1:2:1:5:0] BLOCKED Size# 0 RequestedSize# 5} ErrorReason# "status# BLOCKED from# [0:1:0:0:0]"} Marker# BPG29 2024-11-21T08:50:58.250028Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 24 SentAt# 1970-01-01T00:00:00.024000Z GotAt# 24 ErrorReason# status# BLOCKED from# [0:1:0:0:0] 2024-11-21T08:50:58.250914Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2024-11-21T08:50:58.250929Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3, Details: Status# ALREADY From# [0:1:0:0:0] NodeId# 17 QuorumTracker# {Erroneous# 1 Successful# 0} Marker# TSYS31 >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestore [GOOD] Test command err: 2024-11-21T08:50:56.547088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:56.547739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:56.547775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fa8/r3tmp/tmp7SLJpe/pdisk_1.dat 2024-11-21T08:50:56.676903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T08:50:56.676985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.677046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:50:56.677088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:56.677105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.677315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.677337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:50:56.677409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.677420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:50:56.677425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:56.677430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:56.677530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.677536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:56.677540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:56.677598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.677604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.677608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:56.677614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.678147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:56.678263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:56.678309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:50:56.678531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.678537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:50:56.678542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.696719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:50:56.696747Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:56.744789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:56.744826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:56.755399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:56.869206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.869279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:50:56.869305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:56.869370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:56.869394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:56.869431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:50:56.869444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:50:56.869652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:56.869660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:50:56.869698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:56.869704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T08:50:56.869771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.869778Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T08:50:56.869790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:56.869795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.869803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:56.869808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.869814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:56.869819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:56.869827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:50:56.869833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:50:56.869838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T08:50:56.870284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T08:50:56.870301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T08:50:56.870306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T08:50:56.870312Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:50:56.870317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:50:56.870332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T08:50:56.870337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T08:50:56.870483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T08:50:56.871495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:50:56.871557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.871573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, schema: Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.871670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T08:50:56.871683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T08:50:56.871689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T08:50:56.871711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] wa ... geExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:50:57.946869Z node 1 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037889, generation# 1, at tablet# 72075186224037888 2024-11-21T08:50:57.960643Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186224037889:1][72075186224037888][1:945:2754] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T08:50:57.960687Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:50:57.960708Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][[OwnerId: 72057594046644480, LocalPathId: 7]][[OwnerId: 72057594046644480, LocalPathId: 2]][1:938:2752] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [3] } 2024-11-21T08:50:57.960752Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2024-11-21T08:50:57.960769Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][[OwnerId: 72057594046644480, LocalPathId: 7]][[OwnerId: 72057594046644480, LocalPathId: 2]][1:938:2752] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 4 BodySize: 18 }] } 2024-11-21T08:50:57.960786Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:50:57.960822Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186224037889:1][72075186224037888][1:945:2754] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:50:57.960859Z node 1 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037889, generation# 1, at tablet# 72075186224037888 2024-11-21T08:50:57.973250Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186224037889:1][72075186224037888][1:945:2754] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 4 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 4 2024-11-21T08:50:57.973299Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:50:57.973322Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][[OwnerId: 72057594046644480, LocalPathId: 7]][[OwnerId: 72057594046644480, LocalPathId: 2]][1:938:2752] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [4] } 2024-11-21T08:50:57.973340Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][[OwnerId: 72057594046644480, LocalPathId: 7]][[OwnerId: 72057594046644480, LocalPathId: 2]][1:938:2752] Exhausted 2024-11-21T08:50:57.973352Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:939:2754] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData 2024-11-21T08:50:57.973394Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][[OwnerId: 72057594046644480, LocalPathId: 7]][[OwnerId: 72057594046644480, LocalPathId: 2]][1:938:2752] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2024-11-21T08:50:57.973410Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][281474976715663][[OwnerId: 72057594046644480, LocalPathId: 7]][[OwnerId: 72057594046644480, LocalPathId: 2]][1:938:2752] Finish 0 2024-11-21T08:50:57.973481Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2024-11-21T08:50:57.973487Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037889 2024-11-21T08:50:57.973538Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:50:57.973548Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:50:57.973558Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715663] at 72075186224037889 for CreateIncrementalRestoreSrc 2024-11-21T08:50:57.973646Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:57.984510Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:50:57.984554Z node 1 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2024-11-21T08:50:57.984575Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:50:57.984708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 840 RawX2: 4294969969 } Origin: 72075186224037889 State: 2 TxId: 281474976715663 Step: 0 Generation: 1 2024-11-21T08:50:57.984721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715663, tablet: 72075186224037889, partId: 2 2024-11-21T08:50:57.984753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715663:2, at schemeshard: 72057594046644480, message: Source { RawX1: 840 RawX2: 4294969969 } Origin: 72075186224037889 State: 2 TxId: 281474976715663 Step: 0 Generation: 1 2024-11-21T08:50:57.984763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715663:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T08:50:57.984771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715663:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 840 RawX2: 4294969969 } Origin: 72075186224037889 State: 2 TxId: 281474976715663 Step: 0 Generation: 1 2024-11-21T08:50:57.984785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715663:2, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:57.984789Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715663:2, at schemeshard: 72057594046644480 2024-11-21T08:50:57.984794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715663:2, datashard: 72075186224037889, at schemeshard: 72057594046644480 2024-11-21T08:50:57.984801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715663:2 129 -> 240 2024-11-21T08:50:57.984984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715663:2, at schemeshard: 72057594046644480 2024-11-21T08:50:57.985023Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037889 state Ready 2024-11-21T08:50:57.985036Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T08:50:57.985076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715663:2, at schemeshard: 72057594046644480 2024-11-21T08:50:57.985085Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715663:2 ProgressState 2024-11-21T08:50:57.985098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715663:2 progress is 5/5 2024-11-21T08:50:57.985103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 5/5 2024-11-21T08:50:57.985110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715663, ready parts: 5/5, is published: true 2024-11-21T08:50:57.985122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715663 2024-11-21T08:50:57.985129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 5/5 2024-11-21T08:50:57.985134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:0 2024-11-21T08:50:57.985139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:0 2024-11-21T08:50:57.985149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 5 2024-11-21T08:50:57.985154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:1 2024-11-21T08:50:57.985157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:1 2024-11-21T08:50:57.985162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T08:50:57.985166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:2 2024-11-21T08:50:57.985169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:2 2024-11-21T08:50:57.985184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 4 2024-11-21T08:50:57.985188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:3 2024-11-21T08:50:57.985192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:3 2024-11-21T08:50:57.985197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T08:50:57.985200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:4 2024-11-21T08:50:57.985203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715663:4 2024-11-21T08:50:57.985207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 3 2024-11-21T08:50:58.341279Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ynqmn0gepyt8a1j2dhgx4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg5YzAwMzgtYzVmMWFlMzUtOTY1NjQzYzMtODA3NjJlMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2024-11-21T08:50:58.126938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:50:58.127368Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T08:50:58.127392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T08:50:58.127399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T08:50:58.127410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T08:50:58.127438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T08:50:58.127452Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:58.127463Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T08:50:58.127469Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:58.127541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:31:2063] 2024-11-21T08:50:58.127547Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:32:2064][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:31:2063] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2024-11-21T08:50:58.014103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:50:58.014564Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T08:50:58.014600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2024-11-21T08:50:58.014608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2024-11-21T08:50:58.014620Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:34:2065] 2024-11-21T08:50:58.014629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2024-11-21T08:50:58.014646Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:58.014677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2065] 2024-11-21T08:50:58.014686Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:58.014772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2024-11-21T08:50:58.014782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2024-11-21T08:50:58.014791Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2024-11-21T08:50:58.014802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:34:2065] 2024-11-21T08:50:58.014811Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Path was updated to new version: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:58.014819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:35:2065] 2024-11-21T08:50:58.014825Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:58.014834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2065] 2024-11-21T08:50:58.014840Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2024-11-21T08:50:57.795969Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T08:50:57.796002Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T08:50:57.796030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T08:50:57.796036Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T08:50:57.796044Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:33:2065] 2024-11-21T08:50:57.796047Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2024-11-21T08:50:57.796076Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:33:2065] 2024-11-21T08:50:57.796080Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2024-11-21T08:50:57.796096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:50:57.796156Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:39:2067] 2024-11-21T08:50:57.796162Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2024-11-21T08:50:57.796196Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:39:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T08:50:57.796237Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:40:2067] 2024-11-21T08:50:57.796241Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2024-11-21T08:50:57.796247Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:40:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T08:50:57.796261Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2067] 2024-11-21T08:50:57.796264Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2024-11-21T08:50:57.796268Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:41:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T08:50:57.796279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2024-11-21T08:50:57.796289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:39:2067] 2024-11-21T08:50:57.796295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2024-11-21T08:50:57.796301Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:40:2067] 2024-11-21T08:50:57.796306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2024-11-21T08:50:57.796311Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2067] 2024-11-21T08:50:57.796321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:36:2067] 2024-11-21T08:50:57.796353Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:37:2067] 2024-11-21T08:50:57.796370Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Set up state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:57.796378Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2067] 2024-11-21T08:50:57.796385Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2067][/root/db/dir_inside] Ignore empty state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2024-11-21T08:50:57.796471Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:32:2064], cookie# 0, event size# 118 2024-11-21T08:50:57.796478Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2024-11-21T08:50:57.797426Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T08:50:57.797499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2024-11-21T08:50:57.797510Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:39:2067] 2024-11-21T08:50:57.797520Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:36:2067] 2024-11-21T08:50:57.797532Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Update to strong state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2024-11-21T08:50:57.797574Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:33:2065], cookie# 0, event size# 117 2024-11-21T08:50:57.797579Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2024-11-21T08:50:57.797585Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T08:50:57.797598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2024-11-21T08:50:57.797604Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:40:2067] 2024-11-21T08:50:57.797620Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:37:2067] 2024-11-21T08:50:57.797627Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Path was updated to new version: owner# [1:34:2066], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:58.214719Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:50:58.214851Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2024-11-21T08:50:58.214869Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2024-11-21T08:50:58.214881Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2024-11-21T08:50:58.214893Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:33:2064] 2024-11-21T08:50:58.214913Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:34:2064] 2024-11-21T08:50:58.214924Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:32:2064][path] Set up state: owner# [3:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:50:58.214933Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2064] 2024-11-21T08:50:58.214941Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:32:2064][path] Ignore empty state: owner# [3:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> IncrementalRestoreScan::Empty [GOOD] >> test.py::test[like-ilike_clause-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Plan] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Results] >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> DataShardVolatile::VolatileCommitOnBlobStorageFailure [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 [GOOD] >> IncrementalBackup::SimpleBackup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2024-11-21T08:50:59.093924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:59.094527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:59.094561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fc5/r3tmp/tmptg8xn6/pdisk_1.dat 2024-11-21T08:50:59.250132Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Exhausted 2024-11-21T08:50:59.250167Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2024-11-21T08:50:59.250174Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Finish 0 >> TReplicationTests::CopyReplicatedTable [GOOD] >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackup [GOOD] Test command err: 2024-11-21T08:50:56.654572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:56.655110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:56.655141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fb5/r3tmp/tmp4mZfCG/pdisk_1.dat 2024-11-21T08:50:56.782397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T08:50:56.782466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.782511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:50:56.782539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:56.782553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.782752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.782769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:50:56.782804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.782810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:50:56.782813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:56.782816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:56.782864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.782867Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:56.782869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:56.782905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.782908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.782912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:56.782916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.783355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:56.783421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:56.783451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:50:56.783622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.783627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:50:56.783630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.798863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:50:56.798884Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:56.845707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:56.845752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:56.856294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:56.963343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.963407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:50:56.963450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:56.963528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:56.963539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:56.963577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:50:56.963590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:50:56.963844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:56.963856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:50:56.963899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:56.963905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T08:50:56.963966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.963975Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T08:50:56.963987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:56.964003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.964010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:56.964016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.964020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:56.964024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:56.964035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:50:56.964040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:50:56.964044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T08:50:56.964533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T08:50:56.964558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T08:50:56.964563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T08:50:56.964569Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:50:56.964574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:50:56.964591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T08:50:56.964597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T08:50:56.964783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T08:50:56.965828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:50:56.965895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:56.965912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, schema: Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:56.965998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T08:50:56.966012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T08:50:56.966018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T08:50:56.966038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] wa ... HEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715664 2024-11-21T08:50:58.563352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715664 ready parts: 4/4 2024-11-21T08:50:58.563362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2024-11-21T08:50:58.563367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:0 2024-11-21T08:50:58.563380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 3 2024-11-21T08:50:58.563385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:1 2024-11-21T08:50:58.563388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:1 2024-11-21T08:50:58.563409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T08:50:58.563413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:2 2024-11-21T08:50:58.563416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:2 2024-11-21T08:50:58.563425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 9] was 3 2024-11-21T08:50:58.563429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:3 2024-11-21T08:50:58.563432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:3 2024-11-21T08:50:58.563443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 8] was 4 2024-11-21T08:50:58.816554Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupImpl TableId: [72057594046644480:9:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:50:58.816660Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T08:50:58.816688Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:1064:2820] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2024-11-21T08:50:58.816695Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:1064:2820] Handshake with writer: sender# [1:1066:2820] 2024-11-21T08:50:58.816720Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 1 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 4 Data: 57b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T08:50:58.816764Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 71 },{ Order: 1 BodySize: 71 },{ Order: 2 BodySize: 71 },{ Order: 3 BodySize: 57 },{ Order: 4 BodySize: 57 }] } 2024-11-21T08:50:58.816814Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:9:1][72075186224037891][1:1160:2820] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T08:50:58.816828Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037891 } 2024-11-21T08:50:58.816850Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:9:1][72075186224037891][1:1160:2820] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 1732179057790922 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 1 Group: 1732179057790922 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 2 Group: 1732179057790922 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 3 Group: 1732179057814156 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 4 Group: 1732179057814156 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2024-11-21T08:50:58.816985Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [1:1161:2893], serverId# [1:1162:2894], sessionId# [0:0:0] 2024-11-21T08:50:58.828653Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:9:1][72075186224037891][1:1160:2820] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:50:58.828702Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037891 } 2024-11-21T08:50:58.828725Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0,1,2,3,4] } 2024-11-21T08:50:58.828759Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:1064:2820] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:50:58.828773Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:904:2712]][0][1:1065:2820] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:50:58.828814Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:50:58.828828Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T08:50:58.828883Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:50:58.828900Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 3 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T08:50:58.828908Z node 1 :PERSQUEUE DEBUG: waiting read cookie 3 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T08:50:59.044856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ynram5fnt1xyxtk3nb23t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFmNjJjZmUtM2NhNTA5Y2YtMTIyOWJhMWUtYjhjMTU2MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { null_flag_value: NULL_VALUE } }, { items { uint32_value: 5 } items { uint32_value: 200 } }, { items { uint32_value: 6 } items { null_flag_value: NULL_VALUE } } 2024-11-21T08:50:59.059033Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ynrb71k347ygct99mh8jc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNkOGU5NDktNzc5OWQzYTctNjhlMjlkYjEtMmJiOTY2ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:59.059237Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:50:59.070870Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:50:59.070941Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:50:59.084241Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6ynrc097jm3nyrr1j83890, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI0MzNkMzctN2I2MzkwNjktODkyMGZkZmYtODYyMTI3YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:50:59.084451Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:50:59.095403Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:50:59.095479Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:50:59.295389Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:50:59.295462Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 3 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2024-11-21T08:50:59.295519Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:50:59.295647Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:904:2712]][0][1:1065:2820] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2024-11-21T08:50:59.295690Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:50:59.295700Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T08:50:59.295735Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:50:59.295748Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 4 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T08:50:59.295754Z node 1 :PERSQUEUE DEBUG: waiting read cookie 4 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T08:50:59.636329Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6ynrwn0s8k12nav38xkfm4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA5NmQ0MTQtNTQ4YzA3YmEtNDcwNzlkMDgtYzhjMDQxNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { null_flag_value: NULL_VALUE } }, { items { uint32_value: 5 } items { uint32_value: 200 } }, { items { uint32_value: 6 } items { null_flag_value: NULL_VALUE } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 [GOOD] Test command err: iteration# 3 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 9 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 15 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 21 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 27 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 33 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 39 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 45 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 51 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 57 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 63 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 69 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 75 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 81 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 87 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 93 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 99 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 105 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 111 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 117 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 123 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 129 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 135 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 141 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 147 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 153 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 159 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 165 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 171 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 177 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 183 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 189 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 195 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 201 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 207 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 213 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 219 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 225 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 231 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 237 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 243 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 249 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 255 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 261 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 267 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 273 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 279 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 285 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 291 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 297 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 303 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 309 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 315 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 321 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 327 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 333 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 339 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 345 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 351 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 357 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 363 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 369 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 375 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 381 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 387 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 393 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 399 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 405 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 411 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 417 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 423 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 429 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 435 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 441 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 447 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 453 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 459 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 465 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 471 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 477 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 483 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 489 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 495 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 501 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 507 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 513 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 519 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 525 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 531 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 537 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 543 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 549 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 555 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 561 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 567 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 573 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 579 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 585 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 591 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 597 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 603 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 609 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 615 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 621 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 627 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 633 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 639 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 645 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 651 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 657 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 663 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 669 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 675 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 681 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 687 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1365 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1371 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1377 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1383 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1389 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1395 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1401 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1407 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1413 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1419 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1425 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1431 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1437 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1443 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1449 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1455 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1461 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1467 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1473 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1479 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1485 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1491 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1497 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1503 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1509 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1515 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1521 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1527 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1533 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1539 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1545 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1551 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1557 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1563 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1569 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1575 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1581 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1587 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1593 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1599 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1605 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1611 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1617 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1623 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1629 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1635 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1641 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1647 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1653 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1659 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1665 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1671 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1677 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1683 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1689 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1695 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1701 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1707 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1713 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1719 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1725 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1731 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1737 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1743 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1749 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1755 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1761 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1767 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1773 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1779 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1785 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1791 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1797 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1803 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1809 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1815 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1821 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1827 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1833 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1839 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1845 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1851 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1857 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1863 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1869 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1875 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1881 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1887 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1893 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1899 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1905 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1911 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1917 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1923 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1929 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1935 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1941 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1947 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1953 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1959 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1965 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1971 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1977 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1983 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1989 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1995 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2001 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2007 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2013 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2019 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2025 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2031 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2037 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> test.py::test[blocks-decimal_multiplicative_ops--Results] [GOOD] >> test.py::test[blocks-interval_div_scalar--Analyze] >> TSubscriberTest::SyncWithOutdatedReplica ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2024-11-21T08:50:59.019583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:59.020289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:59.020331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fbc/r3tmp/tmpEKiCHb/pdisk_1.dat 2024-11-21T08:50:59.162249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T08:50:59.162329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:59.162390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:50:59.162426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:50:59.162443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:59.162668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:59.162685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:50:59.162730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:59.162739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:50:59.162743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:59.162748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:59.162822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:59.162827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:50:59.162831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:59.162880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:59.162886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:59.162891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:59.162897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:59.163505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:59.163639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:59.163676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:50:59.163880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:59.163889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:50:59.163893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:59.179801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:50:59.179828Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:59.227305Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:50:59.227559Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:50:59.227597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:59.227614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:59.240637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:59.360799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:50:59.360864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:50:59.360876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:59.360931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:59.360939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:50:59.360996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:50:59.361009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:50:59.361211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:50:59.361219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:50:59.361256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:50:59.361261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T08:50:59.361336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:59.361344Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T08:50:59.361360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:59.361364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:59.361370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:59.361386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:59.361392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:59.361395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:59.361404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:50:59.361410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:50:59.361414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T08:50:59.361831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T08:50:59.361843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T08:50:59.361849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T08:50:59.361854Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:50:59.361858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:50:59.361872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T08:50:59.361877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T08:50:59.362016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T08:50:59.362093Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:50:59.362100Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:50:59.362122Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:50:59.363465Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:50:59.363611Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:50:59.363623Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:50:59.363679Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:50:59.363708Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResoluti ... 024-11-21T08:50:59.911100Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037889 state Ready 2024-11-21T08:50:59.911105Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T08:50:59.911124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:50:59.911129Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:50:59.911137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:50:59.911141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2024-11-21T08:50:59.911146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2024-11-21T08:50:59.911155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715658 2024-11-21T08:50:59.911160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2024-11-21T08:50:59.911165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:50:59.911168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2024-11-21T08:50:59.911183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T08:50:59.911287Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvNavigate describe path /Root/IncrBackupTable 2024-11-21T08:50:59.911301Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] HANDLE EvNavigateScheme /Root/IncrBackupTable 2024-11-21T08:50:59.911581Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:50:59.911604Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2024-11-21T08:50:59.911825Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] Handle TEvDescribeSchemeResult Forward to# [1:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T08:50:59.911971Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:789:2644], serverId# [1:790:2645], sessionId# [0:0:0] 2024-11-21T08:50:59.912074Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:50:59.912105Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:50:59.912139Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T08:50:59.912166Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2024-11-21T08:50:59.912182Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:50:59.915718Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvGetProxyServicesRequest 2024-11-21T08:50:59.915757Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T08:50:59.915895Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:796:2650], serverId# [1:797:2651], sessionId# [0:0:0] 2024-11-21T08:50:59.965270Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T08:50:59.965309Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:50:59.965340Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:50:59.965351Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:50:59.965402Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> TSubscriberTest::NotifyUpdate >> ColumnBuildTest::BaseCase >> ColumnBuildTest::CancelBuild >> TExportToS3Tests::CancelledExportEndTime [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:56.852483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:56.852510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:56.852516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:56.852521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:56.852527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:56.852531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:56.852540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:56.852631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:56.864650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:56.864672Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:56.867299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:56.868124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:56.868153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:56.869262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:56.869444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:56.869536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:56.869593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:56.870387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:56.870647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:56.870656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:56.870696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:56.870703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:56.870708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:56.870721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:56.871750Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:56.889594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:56.889675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:56.889742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:56.889794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:56.889802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:56.890602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:56.890639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:56.890678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:56.890687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:56.890692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:56.890696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:56.891029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:56.891038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:56.891042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:56.891311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:56.891318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:56.891323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:56.891330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.891908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:56.892197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:56.892261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:56.892436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:56.892459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:56.892469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:56.892522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:56.892528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:56.892557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:56.892569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:56.892902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:56.892910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:56.892950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:56.892956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:56.893032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:56.893039Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:56.893050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:56.893054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.893060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:56.893065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:56.893069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:56.893073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:56.893083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:56.893089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:56.893094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:56.893371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:56.893397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:56.893402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:56.893407Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:56.893411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:56.893425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:00.033767Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:00.033770Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:51:00.033777Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:00.033785Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T08:51:00.035353Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:00.035378Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:51:00.035611Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 299 } } 2024-11-21T08:51:00.035619Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T08:51:00.035637Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 299 } } 2024-11-21T08:51:00.035650Z node 8 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 299 } } 2024-11-21T08:51:00.035749Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:51:00.035754Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T08:51:00.035765Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:51:00.035771Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:00.035777Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:51:00.035788Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:00.035792Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T08:51:00.036249Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.036325Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.066640Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:51:00.066673Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:00.066715Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:51:00.066725Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:00.066732Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:51:00.066745Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:00.066750Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.066754Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:00.066760Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:51:00.066766Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:51:00.067682Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.067782Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.067794Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2024-11-21T08:51:00.067803Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:51:00.067808Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2024-11-21T08:51:00.067819Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T08:51:00.067824Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2024-11-21T08:51:00.068672Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.068688Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:00.068703Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:00.068708Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:00.068715Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:51:00.068735Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:338:2313] message: TxId: 102 2024-11-21T08:51:00.068745Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:00.068751Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:00.068755Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:00.068788Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:00.068792Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:00.069203Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:00.069215Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:428:2392] TestWaitNotification: OK eventTxId 102 2024-11-21T08:51:00.069330Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:00.069407Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 85us result status StatusSuccess 2024-11-21T08:51:00.069521Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[like-ilike_clause-default.txt-Results] [GOOD] >> test.py::test[like-like_clause-default.txt-Analyze] >> test.py::test[window-current/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-current/aggregations--Plan] [GOOD] >> test.py::test[window-current/aggregations--Results] >> IncrementalRestoreScan::ChangeSenderEmpty >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TSubscriberTest::NotifyUpdate [GOOD] |86.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |86.9%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds+StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2024-11-21T08:51:00.744146Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:51:00.744641Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2024-11-21T08:51:00.744663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2024-11-21T08:51:00.744672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2024-11-21T08:51:00.744684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:34:2065] 2024-11-21T08:51:00.744694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:35:2065] 2024-11-21T08:51:00.744710Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:51:00.744750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:36:2065] 2024-11-21T08:51:00.744760Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:51:00.744820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T08:51:00.744837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2065], cookie# 1 2024-11-21T08:51:00.744845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2024-11-21T08:51:00.744851Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2024-11-21T08:51:00.744860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2024-11-21T08:51:00.744865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T08:51:00.744870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T08:51:00.744877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:34:2065], cookie# 1 2024-11-21T08:51:00.744884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:51:00.744890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:35:2065], cookie# 1 2024-11-21T08:51:00.744897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:51:00.744903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2065], cookie# 1 2024-11-21T08:51:00.744907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Unexpected sync response: sender# [1:36:2065], cookie# 1 >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2024-11-21T08:51:00.804172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:51:00.804748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T08:51:00.804785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T08:51:00.804793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T08:51:00.804806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T08:51:00.804834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T08:51:00.804850Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:51:00.804862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T08:51:00.804870Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:51:00.805084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T08:51:00.805099Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:33:2064] 2024-11-21T08:51:00.805109Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Update to strong state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> test.py::test[blocks-interval_div_scalar--Analyze] [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> test.py::test[blocks-interval_div_scalar--Debug] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:30.996465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:30.996485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:30.996490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:30.996495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:30.996505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:30.996508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:30.996524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:30.996591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:31.007732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:31.007751Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:31.009820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:31.010407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:31.010443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:31.011295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:31.011396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:31.011476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.011527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:31.016275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.016542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.016553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.016593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:31.016600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.016607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:31.016618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.018010Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:31.036418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:31.036514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.036590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:31.036640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:31.036649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.041764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.041795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:31.041844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.041855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:31.041859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:31.041864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:31.044928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.044952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:31.044960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:31.048654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.048675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.048682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.048690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.049481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:31.051245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:31.051325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:31.051561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.051598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:31.051610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.051675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:31.051682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.051712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.051725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:31.052877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.052889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.052942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.052968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:31.053065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.053074Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:31.053091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:31.053097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.053105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:31.053112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.053119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:31.053125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:31.053142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:31.053152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:31.053158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:31.053564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.053587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.053593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:31.053600Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:31.053605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.053625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rrier }, at tablet# 72057594046678944 2024-11-21T08:51:00.787943Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 240 -> 240 2024-11-21T08:51:00.788578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.788592Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710758:0 ProgressState 2024-11-21T08:51:00.788608Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2024-11-21T08:51:00.788613Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2024-11-21T08:51:00.788618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2024-11-21T08:51:00.788637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710758 2024-11-21T08:51:00.788643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2024-11-21T08:51:00.788649Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2024-11-21T08:51:00.788653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2024-11-21T08:51:00.788688Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:00.788692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:00.789098Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2024-11-21T08:51:00.789113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2024-11-21T08:51:00.789581Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2024-11-21T08:51:00.807710Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:51:00.807729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:51:00.808685Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:27960" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:00.808750Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.808775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:00.808851Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:00.808858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.809096Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:51:00.809103Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:51:00.809408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:00.809434Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2024-11-21T08:51:00.809476Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2024-11-21T08:51:00.809480Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2024-11-21T08:51:00.809531Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.809537Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet72057594046678944 2024-11-21T08:51:00.809541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2024-11-21T08:51:00.809544Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2024-11-21T08:51:00.809977Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2024-11-21T08:51:00.809987Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2024-11-21T08:51:00.810056Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.810062Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.810077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T08:51:00.810445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2024-11-21T08:51:00.810469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.810473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:00.810483Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T08:51:00.810542Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2024-11-21T08:51:00.810553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2024-11-21T08:51:00.810662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T08:51:00.810688Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2024-11-21T08:51:00.811005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T08:51:00.811191Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:00.811198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:544:2506] TestWaitNotification: OK eventTxId 102 >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[PipeResets] >> test.py::test[like-like_clause-default.txt-Analyze] [GOOD] >> test.py::test[like-like_clause-default.txt-Debug] >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] |86.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] >> ColumnBuildTest::CancelBuild [GOOD] >> TCdcStreamWithRebootsTests::DisableStream[TabletReboots] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2024-11-21T08:51:01.686060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:01.686682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:01.686719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fa4/r3tmp/tmpreuKBt/pdisk_1.dat 2024-11-21T08:51:01.950009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T08:51:01.950099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:01.950169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:51:01.950217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:01.950234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:01.950527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:01.950550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:51:01.950605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:01.950615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:51:01.950619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:01.950624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:01.950706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:01.950713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:51:01.950717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:01.950767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:01.950773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:01.950779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:51:01.950785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:01.951373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:01.951480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:01.951517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:51:01.951766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:01.951775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:51:01.951780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:01.975529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:51:01.975561Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:02.029754Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:51:02.030058Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:51:02.030110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:02.030128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:02.041144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:02.160911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:02.160991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:51:02.161005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:51:02.161078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:02.161088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T08:51:02.161126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:51:02.161140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:51:02.161358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:02.161366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:51:02.161428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:02.161433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T08:51:02.161514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:02.161522Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T08:51:02.161535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:02.161539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:02.161546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:02.161552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:02.161557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:02.161561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:02.161572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:51:02.161579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:51:02.161583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T08:51:02.162072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T08:51:02.162090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T08:51:02.162097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T08:51:02.162103Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:51:02.162109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:51:02.162125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T08:51:02.162130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T08:51:02.162301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T08:51:02.162388Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:51:02.162395Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:51:02.162425Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:51:02.163801Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:51:02.164001Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:51:02.164016Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:51:02.164074Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:51:02.164108Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResoluti ... .TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2024-11-21T08:51:02.743112Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvNavigate describe path /Root/IncrBackupTable 2024-11-21T08:51:02.743124Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] HANDLE EvNavigateScheme /Root/IncrBackupTable 2024-11-21T08:51:02.743202Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:51:02.743212Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2024-11-21T08:51:02.743344Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] Handle TEvDescribeSchemeResult Forward to# [1:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T08:51:02.743437Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:51:02.743467Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:51:02.743508Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T08:51:02.743528Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[bigdate-round-default.txt-Results] [GOOD] |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[pg-range_function_multi-default.txt-Debug] [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:00.984043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:00.984069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:00.984075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:00.984080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:00.984086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:00.984090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:00.984100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:00.984302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:00.995009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:00.995036Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:00.997985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:00.998773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:00.998813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:01.000125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:01.000305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:01.000412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:01.000487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:01.001407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:01.001630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:01.001638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:01.001668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:01.001674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:01.001678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:01.001689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.002736Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:01.017131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:01.017221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.017290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:01.017339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:01.017348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.018311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:01.018343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:01.018397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.018409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:01.018414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:01.018419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:01.018938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.018954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:01.018959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:01.019449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.019465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.019471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:01.019477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:01.020058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:01.020474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:01.020532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:01.020724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:01.020747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:01.020756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:01.020809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:01.020816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:01.020846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:01.020859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:01.021237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:01.021244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:01.021291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:01.021296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:01.021401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.021408Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:01.021420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:01.021424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:01.021430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:01.021435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:01.021440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:01.021445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:01.021455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:01.021460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:01.021464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:01.021739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:01.021752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:01.021757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:01.021762Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:01.021766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:01.021781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... usAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:51:03.342161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T08:51:03.342188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:51:03.342222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T08:51:03.342227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T08:51:03.342232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2024-11-21T08:51:03.342296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:03.342317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:03.342324Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2024-11-21T08:51:03.342329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2024-11-21T08:51:03.342721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T08:51:03.342732Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2024-11-21T08:51:03.342744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T08:51:03.342748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T08:51:03.342754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2024-11-21T08:51:03.342765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710761 2024-11-21T08:51:03.342770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T08:51:03.342775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T08:51:03.342779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T08:51:03.342795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T08:51:03.343161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T08:51:03.343175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T08:51:03.343185Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2024-11-21T08:51:03.343197Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1135:2999], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:51:03.343531Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T08:51:03.343546Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1135:2999], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:51:03.343553Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2024-11-21T08:51:03.343889Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T08:51:03.343904Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1135:2999], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:51:03.343911Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T08:51:03.343932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:03.343937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1159:3023] TestWaitNotification: OK eventTxId 102 2024-11-21T08:51:03.344310Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2024-11-21T08:51:03.344392Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } 2024-11-21T08:51:03.344592Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:03.344643Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 56us result status StatusSuccess 2024-11-21T08:51:03.344787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.9%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter >> test.py::test[window-current/aggregations--Results] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Analyze] >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] >> ColumnBuildTest::BaseCase [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc >> KqpJoinOrder::FiveWayJoinWithComplexPreds+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore >> test.py::test[like-like_clause-default.txt-Debug] [GOOD] >> test.py::test[like-like_clause-default.txt-ForceBlocks] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> test.py::test[blocks-interval_div_scalar--Debug] [GOOD] >> test.py::test[blocks-interval_div_scalar--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:01.072028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:01.072057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:01.072062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:01.072067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:01.072073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:01.072077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:01.072085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:01.072179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:01.091591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:01.091617Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:01.100969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:01.101902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:01.101940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:01.103412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:01.103591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:01.103686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:01.103757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:01.109117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:01.109454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:01.109468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:01.109508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:01.109516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:01.109522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:01.109540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.110929Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:01.135205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:01.135277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.135340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:01.135384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:01.135392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.136304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:01.136334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:01.136386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.136397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:01.136401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:01.136406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:01.136888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.136900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:01.136906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:01.137249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.137261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.137267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:01.137273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:01.137914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:01.138327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:01.138382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:01.138557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:01.138580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:01.138591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:01.138646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:01.138652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:01.138684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:01.138696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:01.139089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:01.139097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:01.139139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:01.139144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:01.139228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:01.139235Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:01.139246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:01.139250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:01.139255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:01.139260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:01.139265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:01.139269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:01.139281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:01.139286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:01.139290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:01.139582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:01.139597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:01.139601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:01.139606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:01.139610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:01.139626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... G: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2024-11-21T08:51:04.201003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2024-11-21T08:51:04.201012Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2024-11-21T08:51:04.201021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2024-11-21T08:51:04.201048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2024-11-21T08:51:04.204548Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T08:51:04.204595Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T08:51:04.204639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2024-11-21T08:51:04.204673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2024-11-21T08:51:04.204722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2024-11-21T08:51:04.204729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2024-11-21T08:51:04.204734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2024-11-21T08:51:04.220826Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1814:3681], Recipient [1:753:2645]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:51:04.220852Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:51:04.289776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2024-11-21T08:51:04.289831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 665 RawX2: 4294969873 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2024-11-21T08:51:04.289844Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2024-11-21T08:51:04.289852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2024-11-21T08:51:04.290711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2024-11-21T08:51:04.290732Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2024-11-21T08:51:04.290749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2024-11-21T08:51:04.290754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2024-11-21T08:51:04.290761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2024-11-21T08:51:04.290779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:569:2509] message: TxId: 281474976725761 2024-11-21T08:51:04.290788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2024-11-21T08:51:04.290792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2024-11-21T08:51:04.290797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2024-11-21T08:51:04.290813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2024-11-21T08:51:04.291598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2024-11-21T08:51:04.291622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2024-11-21T08:51:04.291641Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2024-11-21T08:51:04.291669Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T08:51:04.294363Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T08:51:04.294403Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T08:51:04.294422Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T08:51:04.295007Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T08:51:04.295033Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T08:51:04.295040Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2024-11-21T08:51:04.295074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T08:51:04.295081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1161:3036] TestWaitNotification: OK eventTxId 106 2024-11-21T08:51:04.295487Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2024-11-21T08:51:04.295588Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } |86.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |86.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] [GOOD] |86.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[TabletReboots] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:04.266554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:04.266581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:04.266587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:04.266591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:04.266604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:04.266608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:04.266617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:04.266702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:04.277397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:04.277424Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:04.280457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:04.280559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:04.280588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:04.284485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:04.284576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:04.284713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.284912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:04.285760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.286057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:04.286070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.286084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:04.286091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:04.286097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:04.286144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:04.287936Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:04.310144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:04.310233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.310299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:04.310350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:04.310360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.311222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.311247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:04.311296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.311305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:04.311309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:04.311314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:04.311923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.311937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:04.311942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:04.312398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.312409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.312416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.312423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.313004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:04.313462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:04.313517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:04.313715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.313744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:04.313751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.313813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:04.313819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.313851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:04.313864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:04.314349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:04.314360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:04.314400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.314405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:04.314487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.314494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:04.314504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:04.314508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.314513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:04.314518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.314523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:04.314527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:04.314538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:04.314543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:04.314547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... yToNotify, TxId: 281474976715657, ready parts: 1/3, is published: false 2024-11-21T08:51:04.851282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.851289Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72057594046678944 2024-11-21T08:51:04.851353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.851357Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:51:04.851365Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2024-11-21T08:51:04.851368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2024-11-21T08:51:04.851372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2024-11-21T08:51:04.851411Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:04.851421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:04.851424Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2024-11-21T08:51:04.851429Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:51:04.851434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:04.851510Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:04.851517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:04.851520Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2024-11-21T08:51:04.851523Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:04.851526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:51:04.851532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true FAKE_COORDINATOR: Erasing txId 281474976715657 2024-11-21T08:51:04.851893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 191 } } 2024-11-21T08:51:04.851901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:04.851918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 191 } } 2024-11-21T08:51:04.851933Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 191 } } 2024-11-21T08:51:04.852282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 8589936908 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:51:04.852293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:04.852309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 8589936908 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:51:04.852315Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:04.852321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 8589936908 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:51:04.852331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.852336Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.852340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:04.852346Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2024-11-21T08:51:04.852539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:04.852854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:04.852876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.853107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.853164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.853169Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2024-11-21T08:51:04.853179Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2024-11-21T08:51:04.853184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T08:51:04.853191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2024-11-21T08:51:04.853195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T08:51:04.853200Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:51:04.853203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T08:51:04.853212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:04.853215Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2024-11-21T08:51:04.853218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2024-11-21T08:51:04.853229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:04.853233Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2024-11-21T08:51:04.853235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2024-11-21T08:51:04.853239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:04.853731Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:04.853773Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 50us result status StatusSuccess 2024-11-21T08:51:04.853872Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[window-current/ansi_current_mixed--Analyze] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Debug] >> KqpWorkloadService::TestQueueSizeSimple |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[produce-reduce_lambda-default.txt-Plan] [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> DataShardVolatile::UpsertNoLocksArbiter [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[window-win_multiaggr_library--Results] [GOOD] |86.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |86.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool |86.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |86.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> test.py::test[like-like_clause-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause-default.txt-Plan] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:31.462318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:31.462344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.462349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:31.462354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:31.462366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:31.462370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:31.462378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:31.462447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:31.477798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:31.477817Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:31.480511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:31.481272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:31.481316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:31.482499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:31.482653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:31.482742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.482837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:31.483689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.483926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.483937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.483969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:31.483975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.483981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:31.483992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.485049Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:31.510061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:31.510141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.510212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:31.510253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:31.510262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.513037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.513069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:31.513101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.513110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:31.513115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:31.513119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:31.513562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.513573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:31.513578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:31.513910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.513920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.513927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.513933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.514519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:31.514890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:31.514943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:31.515120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:31.515143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:31.515150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.515203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:31.515209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:31.515237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.515248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:31.515624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:31.515631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:31.515679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:31.515685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:31.515784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:31.515790Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:31.515804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:31.515808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.515813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:31.515819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:31.515823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:31.515827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:31.515836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:31.515842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:31.515845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:31.516122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.516136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:31.516141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:31.516147Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:31.516151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:31.516162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T08:50:56.970611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T08:50:56.970628Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T08:50:56.970672Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T08:50:56.970679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T08:50:56.970684Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T08:50:56.984333Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T08:50:59.648461Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.001 2024-11-21T08:50:59.688463Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.001 2024-11-21T08:50:59.737766Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T08:50:59.737863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T08:50:59.737894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T08:50:59.737908Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T08:50:59.737957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T08:50:59.737966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T08:50:59.737972Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T08:50:59.748389Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T08:51:02.367025Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.001 2024-11-21T08:51:02.388463Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.001 2024-11-21T08:51:02.411695Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T08:51:02.411790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T08:51:02.411815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T08:51:02.411826Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T08:51:02.411875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T08:51:02.411881Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T08:51:02.411885Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T08:51:02.422144Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T08:51:05.168349Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [3:560:2521], attempt# 1 2024-11-21T08:51:05.171568Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [3:559:2520] 2024-11-21T08:51:05.177139Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [3:560:2521], sender# [3:559:2520] 2024-11-21T08:51:05.177175Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [3:559:2520] 2024-11-21T08:51:05.177208Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [3:560:2521], sender# [3:559:2520], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } 2024-11-21T08:51:05.177272Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [3:560:2521], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13281 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DE4EDCE1-B340-4252-9D9E-3C1B636E877C amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2024-11-21T08:51:05.184281Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [3:560:2521], result# 2024-11-21T08:51:05.184382Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [3:559:2520], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:51:05.192938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:05.193158Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T08:51:05.193190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:05.193205Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:05.193221Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:05.193226Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.193231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:51:05.193239Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T08:51:05.193297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:05.200841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.201034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.201045Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T08:51:05.201067Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T08:51:05.201071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:51:05.201079Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T08:51:05.201107Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T08:51:05.201116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T08:51:05.201121Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T08:51:05.201127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T08:51:05.201167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:05.201892Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T08:51:05.201908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T08:51:05.208708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:05.208734Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:578:2535] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:50:57.830724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:57.830755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:57.830761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:57.830767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:57.830773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:57.830778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:57.830786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:57.830875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:57.842952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:57.842978Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:57.846738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:57.847570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:57.847620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:57.849134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:57.849313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:57.849432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:57.849518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:57.850476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:57.850753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:57.850764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:57.850804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:57.850812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:57.850818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:57.850831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:50:57.852142Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:50:57.870337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:57.870435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:57.870503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:57.870549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:57.870557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:57.871462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:57.871493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:57.871542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:57.871553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:57.871558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:57.871564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:57.871997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:57.872009Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:57.872014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:57.872400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:57.872413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:57.872419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:57.872426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:57.873001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:57.873429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:57.873486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:57.873673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:57.873699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:57.873708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:57.873764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:57.873770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:57.873802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:57.873814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:57.874216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:57.874224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:57.874265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:57.874270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:50:57.874346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:57.874352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:57.874364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:57.874368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:57.874375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:57.874380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:57.874384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:57.874388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:57.874399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:57.874406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:57.874410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:50:57.874676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:57.874690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:50:57.874695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:50:57.874700Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:50:57.874704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:57.874718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... schemeshard: 72057594046678944 2024-11-21T08:51:06.176047Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:06.176055Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:06.176062Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:06.176079Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.177744Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:123:2149] sender: [27:234:2058] recipient: [27:15:2062] 2024-11-21T08:51:06.180183Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:06.180264Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.180330Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:06.180393Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:06.180400Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.182209Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:06.182242Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:06.182291Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.182303Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:06.182309Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:06.182316Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:06.184552Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.184572Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:06.184581Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:06.185050Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.185062Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.185069Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:06.185076Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:06.185118Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:06.186411Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:06.186485Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:06.186707Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:06.186738Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 115964119145 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:06.186751Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:06.186820Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:06.186829Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:06.186864Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:06.186878Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:06.192686Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:06.192708Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:06.192769Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:06.192777Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:06.192870Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.192879Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:06.192901Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:06.192906Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:06.192914Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:06.192922Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:06.192927Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:06.192932Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:06.192957Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:06.192964Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:06.192968Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:06.193127Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:06.193139Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:06.193144Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:06.193150Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:06.193156Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:06.193171Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T08:51:06.194124Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T08:51:06.194239Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T08:51:06.194437Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] Bootstrap 2024-11-21T08:51:06.195862Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] Become StateWork (SchemeCache [27:269:2261]) 2024-11-21T08:51:06.196591Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:06.196669Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:51:06.196686Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2024-11-21T08:51:06.196773Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2024-11-21T08:51:06.196988Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:51:06.197751Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:06.197784Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T08:51:06.198637Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> KqpWorkloadServiceDistributed::TestDistributedQueue >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:141:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:144:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:145:2057] recipient: [5:143:2166] Leader for TabletID 72057594037927937 is [5:146:2167] sender: [5:147:2057] recipient: [5:143:2166] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:146:2167] Leader for TabletID 72057594037927937 is [5:146:2167] sender: [5:216:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:142:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:145:2057] recipient: [6:144:2166] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:146:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:147:2167] sender: [6:148:2057] recipient: [6:144:2166] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:147:2167] Leader for TabletID 72057594037927937 is [6:147:2167] sender: [6:217:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:144:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:147:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:146:2168] Leader for TabletID 72057594037927937 is [7:149:2169] sender: [7:150:2057] recipient: [7:146:2168] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:149:2169] Leader for TabletID 72057594037927937 is [7:149:2169] sender: [7:219:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:144:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:147:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:148:2057] recipient: [8:146:2168] Leader for TabletID 72057594037927937 is [8:149:2169] sender: [8:150:2057] recipient: [8:146:2168] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:149:2169] Leader for TabletID 72057594037927937 is [8:149:2169] sender: [8:219:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:145:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:148:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:149:2057] recipient: [9:147:2168] Leader for TabletID 72057594037927937 is [9:150:2169] sender: [9:151:2057] recipient: [9:147:2168] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:150:2169] Leader for TabletID 72057594037927937 is [9:150:2169] sender: [9:220:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:150:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:153:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:152:2173] Leader for TabletID 72057594037927937 is [10:155:2174] sender: [10:156:2057] recipient: [10:152:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:155:2174] Leader for TabletID 72057594037927937 is [10:155:2174] sender: [10:225:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:150:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:154:2057] recipient: [11:152:2173] Leader for TabletID 72057594037927937 is [11:155:2174] sender: [11:156:2057] recipient: [11:152:2173] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:155:2174] Leader for TabletID 72057594037927937 is [11:155:2174] sender: [11:225:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:151:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:154:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:155:2057] recipient: [12:153:2173] Leader for TabletID 72057594037927937 is [12:156:2174] sender: [12:157:2057] recipient: [12:153:2173] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:156:2174] Leader for TabletID 72057594037927937 is [12:156:2174] sender: [12:226:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 [GOOD] >> TKeyValueTest::TestRenameWorksNewApi [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> KqpWorkloadServiceActors::TestPoolFetcher >> DataShardVolatile::UpsertBrokenLockArbiter [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> test.py::test[in-in_ansi_join--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Analyze] >> test.py::test[blocks-interval_div_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_div_scalar--Plan] [GOOD] >> test.py::test[blocks-interval_div_scalar--Results] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> test.py::test[like-like_clause-default.txt-Results] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Analyze] |86.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:146:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:146:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:152:2173] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:156:2057] recipient: [7:152:2173] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:155:2174] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:225:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:156:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:155:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:155:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:211:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:160:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:161:2057] recipient: [9:159:2180] Leader for TabletID 72057594037927937 is [9:162:2181] sender: [9:163:2057] recipient: [9:159:2180] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:162:2181] Leader for TabletID 72057594037927937 is [9:162:2181] sender: [9:215:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:162:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:165:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:166:2057] recipient: [10:164:2185] Leader for TabletID 72057594037927937 is [10:167:2186] sender: [10:168:2057] recipient: [10:164:2185] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:167:2186] Leader for TabletID 72057594037927937 is [10:167:2186] sender: [10:237:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:165:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:166:2057] recipient: [11:164:2185] Leader for TabletID 72057594037927937 is [11:167:2186] sender: [11:168:2057] recipient: [11:164:2185] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:167:2186] Leader for TabletID 72057594037927937 is [11:167:2186] sender: [11:237:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 720 ... 37 is [15:146:2167] sender: [15:216:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:141:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:144:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:145:2057] recipient: [16:143:2166] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:147:2057] recipient: [16:143:2166] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:146:2167] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:216:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:147:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:148:2057] recipient: [17:146:2168] Leader for TabletID 72057594037927937 is [17:149:2169] sender: [17:150:2057] recipient: [17:146:2168] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:149:2169] Leader for TabletID 72057594037927937 is [17:149:2169] sender: [17:219:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:149:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:152:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:153:2057] recipient: [18:151:2173] Leader for TabletID 72057594037927937 is [18:154:2174] sender: [18:155:2057] recipient: [18:151:2173] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:154:2174] Leader for TabletID 72057594037927937 is [18:154:2174] sender: [18:224:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:149:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:152:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:153:2057] recipient: [19:151:2173] Leader for TabletID 72057594037927937 is [19:154:2174] sender: [19:155:2057] recipient: [19:151:2173] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:154:2174] Leader for TabletID 72057594037927937 is [19:154:2174] sender: [19:224:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:153:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:154:2057] recipient: [20:152:2173] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:156:2057] recipient: [20:152:2173] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:155:2174] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:225:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:153:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:156:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:157:2057] recipient: [21:155:2176] Leader for TabletID 72057594037927937 is [21:158:2177] sender: [21:159:2057] recipient: [21:155:2176] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:158:2177] Leader for TabletID 72057594037927937 is [21:158:2177] sender: [21:211:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:157:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:160:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:161:2057] recipient: [22:159:2180] Leader for TabletID 72057594037927937 is [22:162:2181] sender: [22:163:2057] recipient: [22:159:2180] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:162:2181] Leader for TabletID 72057594037927937 is [22:162:2181] sender: [22:215:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:162:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:165:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:166:2057] recipient: [23:164:2185] Leader for TabletID 72057594037927937 is [23:167:2186] sender: [23:168:2057] recipient: [23:164:2185] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:167:2186] Leader for TabletID 72057594037927937 is [23:167:2186] sender: [23:237:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:162:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:165:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:166:2057] recipient: [24:164:2185] Leader for TabletID 72057594037927937 is [24:167:2186] sender: [24:168:2057] recipient: [24:164:2185] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:167:2186] Leader for TabletID 72057594037927937 is [24:167:2186] sender: [24:237:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:165:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:168:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:169:2057] recipient: [25:167:2187] Leader for TabletID 72057594037927937 is [25:170:2188] sender: [25:171:2057] recipient: [25:167:2187] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:170:2188] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] |86.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 [GOOD] Test command err: iteration# 5 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 11 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 17 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 23 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 29 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 35 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 41 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 47 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 53 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 59 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 65 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 71 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 77 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 83 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 89 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 95 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 101 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 107 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 113 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 119 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 125 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 131 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 137 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 143 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 149 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 155 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 161 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 167 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 173 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 179 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 185 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 191 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 197 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 203 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 209 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 215 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 221 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 227 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 233 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 239 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 245 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 251 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 257 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 263 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 269 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 275 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 281 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 287 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 293 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 299 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 305 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 311 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 317 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 323 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 329 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 335 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 341 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 347 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 353 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 359 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 365 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 371 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 377 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 383 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 389 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 395 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 401 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 407 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 413 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 419 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 425 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 431 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 437 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 443 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 449 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 455 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 461 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 467 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 473 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 479 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 485 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 491 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 497 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 503 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 509 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 515 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 521 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 527 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 533 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 539 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 545 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 551 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 557 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 563 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 569 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 575 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 581 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 587 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 593 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 599 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 605 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 611 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 617 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 623 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 629 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 635 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 641 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 647 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 653 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 659 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 665 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 671 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 677 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 683 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 689 BlobsWritten# 2041 blobsWrittenF ... blobsUnwritten# 1218 iteration# 1367 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1373 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1379 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1385 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1391 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1397 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1403 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1409 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1415 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1421 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1427 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1433 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1439 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1445 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1451 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1457 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1463 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1469 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1475 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1481 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1487 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1493 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1499 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1505 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1511 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1517 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1523 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1529 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1535 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1541 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1547 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1553 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1559 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1565 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1571 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1577 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1583 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1589 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1595 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1601 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1607 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1613 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1619 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1625 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1631 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1637 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1643 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1649 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1655 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1661 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1667 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1673 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1679 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1685 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1691 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1697 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1703 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1709 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1715 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1721 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1727 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1733 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1739 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1745 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1751 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1757 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1763 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1769 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1775 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1781 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1787 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1793 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1799 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1805 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1811 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1817 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1823 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1829 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1835 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1841 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1847 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1853 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1859 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1865 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1871 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1877 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1883 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1889 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1895 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1901 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1907 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1913 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1919 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1925 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1931 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1937 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1943 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1949 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1955 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1961 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1967 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1973 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1979 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1985 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1991 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1997 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2003 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2009 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2015 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2021 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2027 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2033 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2039 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[TabletReboots] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[TabletReboots] >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[PipeResets] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> test.py::test[window-current/ansi_current_mixed--Debug] [GOOD] >> test.py::test[window-current/ansi_current_mixed--ForceBlocks] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] Test command err: 127.0.0.1 - - [21/Nov/2024 08:48:18] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 08:48:19] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 08:48:20] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 08:48:21] "GET /nested_library.sql.txt HTTP/1.1" 200 - >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> test.py::test[blocks-interval_div_scalar--Results] [GOOD] >> test.py::test[blocks-minmax_tuple--Analyze] >> test.py::test[limit-empty_read_after_limit-default.txt-Analyze] [GOOD] >> BsControllerTest::SelfHealMirror3dc >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart |87.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> test.py::test[insert-append_sorted-to_sorted_calc-Analyze] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Debug] >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect |87.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[select-two_selects_with_diff_fields-default.txt-Plan] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> test.py::test[blocks-minmax_tuple--Analyze] [GOOD] >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> test.py::test[blocks-minmax_tuple--Debug] |87.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> BsControllerTest::TestLocalBrokenRelocation >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> test.py::test[window-current/ansi_current_mixed--ForceBlocks] [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> BsControllerTest::TestLocalSelfHeal >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> test.py::test[window-current/ansi_current_mixed--Plan] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Results] >> DataShardVolatile::UpsertBrokenLockArbiterRestart [GOOD] >> ResourcePoolsDdl::TestDropResourcePool >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart >> KqpWorkloadServiceActors::TestCreateDefaultPool |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2024-11-21T08:51:11.038536Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T08:51:11.038558Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T08:51:11.038576Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T08:51:11.038580Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T08:51:11.038587Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T08:51:11.038593Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T08:51:11.038600Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T08:51:11.038604Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T08:51:11.038610Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T08:51:11.038614Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T08:51:11.038620Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T08:51:11.038623Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T08:51:11.038630Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T08:51:11.038664Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T08:51:11.038671Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T08:51:11.038675Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T08:51:11.038682Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T08:51:11.038686Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T08:51:11.038693Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T08:51:11.038698Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T08:51:11.038704Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T08:51:11.038708Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T08:51:11.038715Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T08:51:11.038719Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T08:51:11.038728Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T08:51:11.038732Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T08:51:11.038739Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T08:51:11.038743Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T08:51:11.038749Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T08:51:11.038754Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T08:51:11.038761Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T08:51:11.038765Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T08:51:11.038770Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T08:51:11.038774Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T08:51:11.038779Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T08:51:11.038783Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T08:51:11.038789Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T08:51:11.038793Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T08:51:11.038803Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T08:51:11.038807Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T08:51:11.038813Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T08:51:11.038817Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T08:51:11.038822Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T08:51:11.038826Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T08:51:11.038832Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T08:51:11.038837Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T08:51:11.038843Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T08:51:11.038847Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T08:51:11.038857Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T08:51:11.038861Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T08:51:11.038867Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T08:51:11.038871Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T08:51:11.038877Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T08:51:11.038882Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T08:51:11.038889Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T08:51:11.038894Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T08:51:11.038900Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T08:51:11.038904Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T08:51:11.038910Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T08:51:11.038914Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T08:51:11.038920Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T08:51:11.038924Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T08:51:11.038929Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T08:51:11.038934Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T08:51:11.038939Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T08:51:11.038943Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T08:51:11.038949Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T08:51:11.038953Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T08:51:11.038958Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T08:51:11.038963Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T08:51:11.038972Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T08:51:11.038976Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T08:51:11.043426Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T08:51:11.043683Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T08:51:11.043692Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T08:51:11.043699Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T08:51:11.043705Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T08:51:11.043712Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T08:51:11.043718Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T08:51:11.043724Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T08:51:11.043730Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T08:51:11.043737Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T08:51:11.043743Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T08:51:11.043749Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T08:51:11.043756Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T08:51:11.043762Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T08:51:11.043771Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T08:51:11.043778Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T08:51:11.043784Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T08:51:11.043790Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T08:51:11.043796Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T08:51:11.043803Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T08:51:11.043809Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T08:51:11.043815Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T08:51:11.043822Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T08:51:11.043828Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T08:51:11.043834Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T08:51:11.043840Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T08:51:11.043847Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T08:51:11.043853Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T08:51:11.043860Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T08:51:11.043867Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T08:51:11.043873Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T08:51:11.043880Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T08:51:11.043886Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T08:51:11.043893Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T08:51:11.043899Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2024-11-21T08:51:11.707129Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2024-11-21T08:51:11.707135Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2024-11-21T08:51:11.707141Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2024-11-21T08:51:11.707147Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2024-11-21T08:51:11.707218Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T08:51:11.707229Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2024-11-21T08:51:11.707236Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2024-11-21T08:51:11.707243Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2024-11-21T08:51:11.707249Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2024-11-21T08:51:11.707255Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2024-11-21T08:51:11.707261Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2024-11-21T08:51:11.707267Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2024-11-21T08:51:11.707274Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2024-11-21T08:51:11.707280Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2024-11-21T08:51:11.707286Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2024-11-21T08:51:11.707292Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2024-11-21T08:51:11.707298Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2024-11-21T08:51:11.707304Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2024-11-21T08:51:11.707310Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2024-11-21T08:51:11.707316Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2024-11-21T08:51:11.707322Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2024-11-21T08:51:11.707384Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2024-11-21T08:51:11.707395Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2024-11-21T08:51:11.707402Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2024-11-21T08:51:11.707408Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2024-11-21T08:51:11.707415Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2024-11-21T08:51:11.707421Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2024-11-21T08:51:11.707427Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2024-11-21T08:51:11.707433Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2024-11-21T08:51:11.707439Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2024-11-21T08:51:11.707445Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2024-11-21T08:51:11.707452Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2024-11-21T08:51:11.707458Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2024-11-21T08:51:11.707506Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2024-11-21T08:51:11.707515Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2024-11-21T08:51:11.707522Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2024-11-21T08:51:11.707528Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2024-11-21T08:51:11.707533Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2024-11-21T08:51:11.707539Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2024-11-21T08:51:11.707545Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2024-11-21T08:51:11.707552Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2024-11-21T08:51:11.707558Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2024-11-21T08:51:11.708305Z 10 01h25m01.783560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2024-11-21T08:51:11.708389Z 8 01h25m01.812560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2024-11-21T08:51:11.708454Z 2 01h25m02.027560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2024-11-21T08:51:11.708520Z 7 01h25m02.375560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2024-11-21T08:51:11.708576Z 5 01h25m02.581560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2024-11-21T08:51:11.708633Z 4 01h25m02.816560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2024-11-21T08:51:11.708689Z 7 01h25m03.033560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2024-11-21T08:51:11.708738Z 4 01h25m03.733560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2024-11-21T08:51:11.708787Z 10 01h25m03.935560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2024-11-21T08:51:11.708843Z 2 01h25m04.376560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2024-11-21T08:51:11.708892Z 4 01h25m04.729560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2024-11-21T08:51:11.709278Z 5 01h25m05.022560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2024-11-21T08:51:11.709365Z 4 01h25m05.132560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2024-11-21T08:51:11.709433Z 7 01h25m05.421560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2024-11-21T08:51:11.709487Z 10 01h25m05.716560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2024-11-21T08:51:11.709540Z 7 01h25m05.824560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2024-11-21T08:51:11.709602Z 7 01h25m08.303560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2024-11-21T08:51:11.711443Z 1 01h25m08.304072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.711462Z 1 01h25m08.304072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2024-11-21T08:51:11.711498Z 8 01h25m09.873560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2024-11-21T08:51:11.713244Z 1 01h25m09.874072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.713260Z 1 01h25m09.874072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2024-11-21T08:51:11.713373Z 7 01h25m12.110560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2024-11-21T08:51:11.715001Z 1 01h25m12.111072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.715015Z 1 01h25m12.111072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2024-11-21T08:51:11.715045Z 2 01h25m13.658560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2024-11-21T08:51:11.716720Z 1 01h25m13.659072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.716737Z 1 01h25m13.659072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2024-11-21T08:51:11.716914Z 10 01h25m18.027560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2024-11-21T08:51:11.718570Z 1 01h25m18.028072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.718586Z 1 01h25m18.028072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2024-11-21T08:51:11.718609Z 7 01h25m18.450560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2024-11-21T08:51:11.720194Z 1 01h25m18.451072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.720231Z 1 01h25m18.451072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2024-11-21T08:51:11.720316Z 10 01h25m20.431560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2024-11-21T08:51:11.721939Z 1 01h25m20.432072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.721953Z 1 01h25m20.432072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2024-11-21T08:51:11.721982Z 5 01h25m22.881560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2024-11-21T08:51:11.723632Z 1 01h25m22.882072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.723648Z 1 01h25m22.882072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2024-11-21T08:51:11.723733Z 5 01h25m24.207560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2024-11-21T08:51:11.725370Z 1 01h25m24.208072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.725399Z 1 01h25m24.208072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2024-11-21T08:51:11.725605Z 4 01h25m28.798560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2024-11-21T08:51:11.727209Z 1 01h25m28.799072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.727222Z 1 01h25m28.799072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2024-11-21T08:51:11.727427Z 7 01h25m33.240560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2024-11-21T08:51:11.729092Z 1 01h25m33.241072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.729108Z 1 01h25m33.241072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2024-11-21T08:51:11.729183Z 4 01h25m33.640560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2024-11-21T08:51:11.730834Z 1 01h25m33.641072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.730849Z 1 01h25m33.641072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2024-11-21T08:51:11.730878Z 4 01h25m34.251560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2024-11-21T08:51:11.732514Z 1 01h25m34.252072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.732529Z 1 01h25m34.252072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2024-11-21T08:51:11.732551Z 4 01h25m34.256560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2024-11-21T08:51:11.734190Z 1 01h25m34.257072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.734204Z 1 01h25m34.257072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2024-11-21T08:51:11.734233Z 10 01h25m34.818560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2024-11-21T08:51:11.735915Z 1 01h25m34.819072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.735929Z 1 01h25m34.819072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2024-11-21T08:51:11.736133Z 2 01h25m38.351560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2024-11-21T08:51:11.737823Z 1 01h25m38.352072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:11.737838Z 1 01h25m38.352072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed |87.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> BsControllerTest::TestLocalSelfHeal [GOOD] >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> BsControllerTest::SelfHealMirror3dc [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2024-11-21T08:51:11.979620Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T08:51:11.979641Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T08:51:11.979655Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T08:51:11.979660Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T08:51:11.979667Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T08:51:11.979671Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T08:51:11.979676Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T08:51:11.979680Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T08:51:11.979685Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T08:51:11.979689Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T08:51:11.979695Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T08:51:11.979700Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T08:51:11.979706Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T08:51:11.979713Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T08:51:11.979720Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T08:51:11.979724Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T08:51:11.979729Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T08:51:11.979734Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T08:51:11.979740Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T08:51:11.979745Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T08:51:11.979751Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T08:51:11.979755Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T08:51:11.979762Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T08:51:11.979766Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T08:51:11.979775Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T08:51:11.979779Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T08:51:11.979785Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T08:51:11.979789Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T08:51:11.979795Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T08:51:11.979798Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T08:51:11.979805Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T08:51:11.979809Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T08:51:11.979814Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T08:51:11.979818Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T08:51:11.979824Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T08:51:11.979828Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T08:51:11.979834Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T08:51:11.979839Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T08:51:11.979847Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T08:51:11.979852Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T08:51:11.979858Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T08:51:11.979862Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T08:51:11.979868Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T08:51:11.979873Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T08:51:11.979879Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T08:51:11.979883Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T08:51:11.979889Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T08:51:11.979894Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T08:51:11.979904Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T08:51:11.979908Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T08:51:11.979915Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T08:51:11.979919Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T08:51:11.979926Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T08:51:11.979933Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T08:51:11.979939Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T08:51:11.979943Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T08:51:11.979950Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T08:51:11.979954Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T08:51:11.979961Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T08:51:11.979965Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T08:51:11.979971Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T08:51:11.979976Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T08:51:11.979981Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T08:51:11.979985Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T08:51:11.979992Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T08:51:11.979996Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T08:51:11.980002Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T08:51:11.980007Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T08:51:11.980012Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T08:51:11.980017Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T08:51:11.980025Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T08:51:11.980030Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T08:51:11.983369Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T08:51:11.983825Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T08:51:11.983837Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T08:51:11.983845Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T08:51:11.983852Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T08:51:11.983859Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T08:51:11.983865Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T08:51:11.983872Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T08:51:11.983878Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T08:51:11.983886Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T08:51:11.983893Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T08:51:11.983899Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T08:51:11.983906Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T08:51:11.983913Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T08:51:11.983922Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T08:51:11.983929Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T08:51:11.983935Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T08:51:11.983942Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T08:51:11.983949Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T08:51:11.983955Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T08:51:11.983960Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T08:51:11.983967Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T08:51:11.983973Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T08:51:11.983979Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T08:51:11.983985Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T08:51:11.983993Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T08:51:11.983999Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T08:51:11.984005Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T08:51:11.984012Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T08:51:11.984019Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T08:51:11.984026Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T08:51:11.984033Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T08:51:11.984040Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T08:51:11.984048Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T08:51:11.984055Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 0.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T08:51:12.439278Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000002d:1:2:2:0] -> [8000002d:2:2:2:0] 2024-11-21T08:51:12.439289Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T08:51:12.439295Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000002d:1:1:1:0] -> [8000002d:2:1:1:0] 2024-11-21T08:51:12.439313Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T08:51:12.439320Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000001d:1:1:2:0] -> [8000001d:2:1:2:0] 2024-11-21T08:51:12.439333Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T08:51:12.439340Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000001d:1:2:0:0] -> [8000001d:2:2:0:0] 2024-11-21T08:51:12.439352Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:12.439358Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000001d:1:0:1:0] -> [8000001d:2:0:1:0] 2024-11-21T08:51:12.439369Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:12.439375Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000001d:1:0:2:0] -> [8000001d:2:0:2:0] 2024-11-21T08:51:12.439387Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T08:51:12.439393Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000001d:1:1:0:0] -> [8000001d:2:1:0:0] 2024-11-21T08:51:12.439405Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.439410Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000001d:2:2:1:0] PDiskId# 1001 VSlotId# 1009 created 2024-11-21T08:51:12.439420Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000001d:2:2:1:0] status changed to INIT_PENDING 2024-11-21T08:51:12.439430Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T08:51:12.439436Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000001d:1:0:0:0] -> [8000001d:2:0:0:0] 2024-11-21T08:51:12.439447Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T08:51:12.439454Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000001d:1:2:2:0] -> [8000001d:2:2:2:0] 2024-11-21T08:51:12.439464Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T08:51:12.439471Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000001d:1:1:1:0] -> [8000001d:2:1:1:0] 2024-11-21T08:51:12.439489Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T08:51:12.439495Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000000d:1:1:2:0] -> [8000000d:2:1:2:0] 2024-11-21T08:51:12.439505Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T08:51:12.439510Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000000d:1:2:0:0] -> [8000000d:2:2:0:0] 2024-11-21T08:51:12.439521Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:12.439527Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000000d:1:0:1:0] -> [8000000d:2:0:1:0] 2024-11-21T08:51:12.439538Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:12.439544Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000000d:1:0:2:0] -> [8000000d:2:0:2:0] 2024-11-21T08:51:12.439555Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T08:51:12.439561Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000000d:1:1:0:0] -> [8000000d:2:1:0:0] 2024-11-21T08:51:12.439574Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.439579Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000000d:2:2:1:0] PDiskId# 1003 VSlotId# 1009 created 2024-11-21T08:51:12.439586Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000000d:2:2:1:0] status changed to INIT_PENDING 2024-11-21T08:51:12.439597Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T08:51:12.439602Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000000d:1:0:0:0] -> [8000000d:2:0:0:0] 2024-11-21T08:51:12.439612Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T08:51:12.439617Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000000d:1:2:2:0] -> [8000000d:2:2:2:0] 2024-11-21T08:51:12.439627Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T08:51:12.439632Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000000d:1:1:1:0] -> [8000000d:2:1:1:0] 2024-11-21T08:51:12.439648Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T08:51:12.439653Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000007d:1:1:2:0] -> [8000007d:2:1:2:0] 2024-11-21T08:51:12.439665Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T08:51:12.439671Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000007d:1:2:0:0] -> [8000007d:2:2:0:0] 2024-11-21T08:51:12.439680Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:12.439686Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000007d:1:0:1:0] -> [8000007d:2:0:1:0] 2024-11-21T08:51:12.439697Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:12.439703Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000007d:1:0:2:0] -> [8000007d:2:0:2:0] 2024-11-21T08:51:12.439714Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T08:51:12.439720Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000007d:1:1:0:0] -> [8000007d:2:1:0:0] 2024-11-21T08:51:12.439731Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.439736Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000007d:2:2:1:0] PDiskId# 1000 VSlotId# 1010 created 2024-11-21T08:51:12.439744Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000007d:2:2:1:0] status changed to INIT_PENDING 2024-11-21T08:51:12.439756Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T08:51:12.439761Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000007d:1:0:0:0] -> [8000007d:2:0:0:0] 2024-11-21T08:51:12.439770Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T08:51:12.439776Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000007d:1:2:2:0] -> [8000007d:2:2:2:0] 2024-11-21T08:51:12.439785Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T08:51:12.439791Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000007d:1:1:1:0] -> [8000007d:2:1:1:0] 2024-11-21T08:51:12.439808Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T08:51:12.439812Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000006d:1:1:2:0] -> [8000006d:2:1:2:0] 2024-11-21T08:51:12.439822Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T08:51:12.439828Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000006d:1:2:0:0] -> [8000006d:2:2:0:0] 2024-11-21T08:51:12.439837Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:12.439842Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000006d:1:0:1:0] -> [8000006d:2:0:1:0] 2024-11-21T08:51:12.439852Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:12.439857Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000006d:1:0:2:0] -> [8000006d:2:0:2:0] 2024-11-21T08:51:12.439866Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T08:51:12.439871Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000006d:1:1:0:0] -> [8000006d:2:1:0:0] 2024-11-21T08:51:12.439881Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.439885Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000006d:2:2:1:0] PDiskId# 1001 VSlotId# 1010 created 2024-11-21T08:51:12.439893Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000006d:2:2:1:0] status changed to INIT_PENDING 2024-11-21T08:51:12.439903Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T08:51:12.439908Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000006d:1:0:0:0] -> [8000006d:2:0:0:0] 2024-11-21T08:51:12.439919Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T08:51:12.439925Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000006d:1:2:2:0] -> [8000006d:2:2:2:0] 2024-11-21T08:51:12.439935Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T08:51:12.439941Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000006d:1:1:1:0] -> [8000006d:2:1:1:0] 2024-11-21T08:51:12.441097Z 27 00h05m02.113048s :BS_NODE DEBUG: [27] VDiskId# [8000007d:2:2:1:0] status changed to REPLICATING 2024-11-21T08:51:12.441214Z 27 00h05m03.121048s :BS_NODE DEBUG: [27] VDiskId# [8000000d:2:2:1:0] status changed to REPLICATING 2024-11-21T08:51:12.441291Z 27 00h05m03.511048s :BS_NODE DEBUG: [27] VDiskId# [8000004d:2:2:1:0] status changed to REPLICATING 2024-11-21T08:51:12.441413Z 27 00h05m03.558048s :BS_NODE DEBUG: [27] VDiskId# [8000006d:2:2:1:0] status changed to REPLICATING 2024-11-21T08:51:12.441502Z 27 00h05m03.679048s :BS_NODE DEBUG: [27] VDiskId# [8000002d:2:2:1:0] status changed to REPLICATING 2024-11-21T08:51:12.441593Z 27 00h05m03.831048s :BS_NODE DEBUG: [27] VDiskId# [8000001d:2:2:1:0] status changed to REPLICATING 2024-11-21T08:51:12.441694Z 27 00h05m04.265048s :BS_NODE DEBUG: [27] VDiskId# [8000005d:2:2:1:0] status changed to REPLICATING 2024-11-21T08:51:12.442079Z 27 00h05m05.629048s :BS_NODE DEBUG: [27] VDiskId# [8000003d:2:2:1:0] status changed to REPLICATING 2024-11-21T08:51:12.442343Z 27 00h05m11.531048s :BS_NODE DEBUG: [27] VDiskId# [8000006d:2:2:1:0] status changed to READY 2024-11-21T08:51:12.444466Z 27 00h05m11.531560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.444486Z 27 00h05m11.531560s :BS_NODE DEBUG: [27] VDiskId# [8000006d:1:2:1:0] destroyed 2024-11-21T08:51:12.444534Z 27 00h05m11.979048s :BS_NODE DEBUG: [27] VDiskId# [8000004d:2:2:1:0] status changed to READY 2024-11-21T08:51:12.446439Z 27 00h05m11.979560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.446458Z 27 00h05m11.979560s :BS_NODE DEBUG: [27] VDiskId# [8000004d:1:2:1:0] destroyed 2024-11-21T08:51:12.446505Z 27 00h05m13.588048s :BS_NODE DEBUG: [27] VDiskId# [8000005d:2:2:1:0] status changed to READY 2024-11-21T08:51:12.448339Z 27 00h05m13.588560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.448357Z 27 00h05m13.588560s :BS_NODE DEBUG: [27] VDiskId# [8000005d:1:2:1:0] destroyed 2024-11-21T08:51:12.448404Z 27 00h05m14.926048s :BS_NODE DEBUG: [27] VDiskId# [8000001d:2:2:1:0] status changed to READY 2024-11-21T08:51:12.450285Z 27 00h05m14.926560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.450304Z 27 00h05m14.926560s :BS_NODE DEBUG: [27] VDiskId# [8000001d:1:2:1:0] destroyed 2024-11-21T08:51:12.450530Z 27 00h05m18.426048s :BS_NODE DEBUG: [27] VDiskId# [8000003d:2:2:1:0] status changed to READY 2024-11-21T08:51:12.452726Z 27 00h05m18.426560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.452745Z 27 00h05m18.426560s :BS_NODE DEBUG: [27] VDiskId# [8000003d:1:2:1:0] destroyed 2024-11-21T08:51:12.453117Z 27 00h05m31.312048s :BS_NODE DEBUG: [27] VDiskId# [8000000d:2:2:1:0] status changed to READY 2024-11-21T08:51:12.454953Z 27 00h05m31.312560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.454969Z 27 00h05m31.312560s :BS_NODE DEBUG: [27] VDiskId# [8000000d:1:2:1:0] destroyed 2024-11-21T08:51:12.455006Z 27 00h05m32.212048s :BS_NODE DEBUG: [27] VDiskId# [8000007d:2:2:1:0] status changed to READY 2024-11-21T08:51:12.456794Z 27 00h05m32.212560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.456813Z 27 00h05m32.212560s :BS_NODE DEBUG: [27] VDiskId# [8000007d:1:2:1:0] destroyed 2024-11-21T08:51:12.460015Z 27 00h05m34.939048s :BS_NODE DEBUG: [27] VDiskId# [8000002d:2:2:1:0] status changed to READY 2024-11-21T08:51:12.462311Z 27 00h05m34.939560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T08:51:12.462335Z 27 00h05m34.939560s :BS_NODE DEBUG: [27] VDiskId# [8000002d:1:2:1:0] destroyed >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 [GOOD] >> test.py::test[blocks-minmax_tuple--Debug] [GOOD] >> test.py::test[blocks-minmax_tuple--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23834, MsgBus: 31951 2024-11-21T08:50:49.449559Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652367567963647:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:49.449590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004456/r3tmp/tmp8YsQm1/pdisk_1.dat 2024-11-21T08:50:49.602357Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23834, node 1 2024-11-21T08:50:49.633827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:49.633844Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:49.633846Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:49.633885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31951 TClient is connected to server localhost:31951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:49.704251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.708760Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:49.751136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:49.751167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:49.756586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:49.789600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.880978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.908704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.980316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:50.074391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652371862932375:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:50.074418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:50.108957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.128334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.193651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.211232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.227758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.239770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.255436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652371862932893:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:50.255466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:50.255739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652371862932898:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:50:50.256679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:50:50.260979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652371862932900:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:50:50.586800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.596095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.610229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.626786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.640857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.683390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.700422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.712103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.725004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.740944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.756001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.764963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.782959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.937581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T08:50:50.947602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.954853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.972051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.982747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T08:50:50.996948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T08:50:51.057637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T08:50:51.113557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... 72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:51:10.821842Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:51:10.821846Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:51:10.821861Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:51:10.821865Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:51:10.821875Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:51:10.821879Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:51:10.821893Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:51:10.821897Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:51:10.821906Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:51:10.821909Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:51:10.822096Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:51:10.822100Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:51:10.822108Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:51:10.822112Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:51:10.822126Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:51:10.822130Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:51:10.822137Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:51:10.822142Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:51:10.822149Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:51:10.822153Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:51:10.822158Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:51:10.822162Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:51:10.822187Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:51:10.822191Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:51:10.822206Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:51:10.822210Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:51:10.822221Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:51:10.822224Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:51:10.822239Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:51:10.822243Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:51:10.822253Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:51:10.822256Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:51:10.826611Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:51:10.826636Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:51:10.826649Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:51:10.826655Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:51:10.826674Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:51:10.826680Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:51:10.826700Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:51:10.826706Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:51:10.826715Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:51:10.826720Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:51:10.826727Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:51:10.826733Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:51:10.826769Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:51:10.826776Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:51:10.826795Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:51:10.826801Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:51:10.826814Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:51:10.826819Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:51:10.826835Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:51:10.826841Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:51:10.826852Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:51:10.826857Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2024-11-21T08:51:09.619312Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T08:51:09.619332Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T08:51:09.619345Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T08:51:09.619349Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T08:51:09.619355Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T08:51:09.619359Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T08:51:09.619365Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T08:51:09.619369Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T08:51:09.619374Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T08:51:09.619378Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T08:51:09.619384Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T08:51:09.619388Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T08:51:09.619394Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T08:51:09.619401Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T08:51:09.619406Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T08:51:09.619410Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T08:51:09.619416Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T08:51:09.619419Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T08:51:09.619425Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T08:51:09.619429Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T08:51:09.619434Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T08:51:09.619438Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T08:51:09.619443Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T08:51:09.619447Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T08:51:09.619457Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T08:51:09.619461Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T08:51:09.619467Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T08:51:09.619472Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T08:51:09.619477Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T08:51:09.619481Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T08:51:09.619487Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T08:51:09.619491Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T08:51:09.619497Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T08:51:09.619501Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T08:51:09.619506Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T08:51:09.619510Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T08:51:09.619516Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T08:51:09.619519Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T08:51:09.619528Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T08:51:09.619532Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T08:51:09.619537Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T08:51:09.619541Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T08:51:09.619547Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T08:51:09.619551Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T08:51:09.619556Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T08:51:09.619560Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T08:51:09.619566Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T08:51:09.619569Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T08:51:09.619578Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T08:51:09.619582Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T08:51:09.619587Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T08:51:09.619591Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T08:51:09.619597Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T08:51:09.619604Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T08:51:09.619610Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T08:51:09.619613Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T08:51:09.619619Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T08:51:09.619623Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T08:51:09.619629Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T08:51:09.619634Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T08:51:09.619639Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T08:51:09.619643Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T08:51:09.619648Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T08:51:09.619652Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T08:51:09.619658Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T08:51:09.619662Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T08:51:09.619667Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T08:51:09.619671Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T08:51:09.619676Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T08:51:09.619680Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T08:51:09.619689Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T08:51:09.619693Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T08:51:09.622781Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T08:51:09.622993Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T08:51:09.623001Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T08:51:09.623008Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T08:51:09.623014Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T08:51:09.623020Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T08:51:09.623025Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T08:51:09.623031Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T08:51:09.623037Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T08:51:09.623044Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T08:51:09.623051Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T08:51:09.623057Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T08:51:09.623064Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T08:51:09.623070Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T08:51:09.623079Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T08:51:09.623086Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T08:51:09.623092Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T08:51:09.623098Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T08:51:09.623105Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T08:51:09.623111Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T08:51:09.623117Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T08:51:09.623124Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T08:51:09.623130Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T08:51:09.623136Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T08:51:09.623143Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T08:51:09.623150Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T08:51:09.623156Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T08:51:09.623163Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T08:51:09.623170Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T08:51:09.623176Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T08:51:09.623183Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T08:51:09.623190Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T08:51:09.623196Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T08:51:09.623202Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T08:51:09.623209Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 2] NodeServiceSetUpdate 2024-11-21T08:51:12.844639Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:12.844645Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000036:4:0:2:0] -> [80000036:5:0:2:0] 2024-11-21T08:51:12.844655Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T08:51:12.844660Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [80000036:4:1:0:0] -> [80000036:5:1:0:0] 2024-11-21T08:51:12.844671Z 5 05h45m00.119456s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T08:51:12.844678Z 5 05h45m00.119456s :BS_NODE DEBUG: [5] VDiskId# [80000036:4:0:1:0] -> [80000036:5:0:1:0] 2024-11-21T08:51:12.844690Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T08:51:12.844697Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [80000036:4:1:1:0] -> [80000036:5:1:1:0] 2024-11-21T08:51:12.844710Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:12.844716Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [80000036:4:0:0:0] -> [80000036:5:0:0:0] 2024-11-21T08:51:12.844726Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T08:51:12.844732Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000036:4:2:2:0] -> [80000036:5:2:2:0] 2024-11-21T08:51:12.844742Z 29 05h45m00.119456s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2024-11-21T08:51:12.844748Z 29 05h45m00.119456s :BS_NODE DEBUG: [29] VDiskId# [80000036:4:2:1:0] -> [80000036:5:2:1:0] 2024-11-21T08:51:12.844758Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T08:51:12.844763Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000036:4:1:2:0] -> [80000036:5:1:2:0] 2024-11-21T08:51:12.844771Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.844782Z 33 05h45m00.119456s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T08:51:12.844800Z 33 05h45m00.119456s :BS_NODE DEBUG: [33] VDiskId# [80000036:5:2:0:0] PDiskId# 1000 VSlotId# 1008 created 2024-11-21T08:51:12.844814Z 33 05h45m00.119456s :BS_NODE DEBUG: [33] VDiskId# [80000036:5:2:0:0] status changed to INIT_PENDING 2024-11-21T08:51:12.844836Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2024-11-21T08:51:12.844842Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] VDiskId# [80000026:4:2:1:0] -> [80000026:5:2:1:0] 2024-11-21T08:51:12.844853Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:12.844858Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000026:4:0:2:0] -> [80000026:5:0:2:0] 2024-11-21T08:51:12.844869Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T08:51:12.844875Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [80000026:4:1:0:0] -> [80000026:5:1:0:0] 2024-11-21T08:51:12.844885Z 24 05h45m00.119456s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T08:51:12.844893Z 24 05h45m00.119456s :BS_NODE DEBUG: [24] VDiskId# [80000026:4:1:1:0] -> [80000026:5:1:1:0] 2024-11-21T08:51:12.844906Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:12.844911Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [80000026:4:0:0:0] -> [80000026:5:0:0:0] 2024-11-21T08:51:12.844928Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T08:51:12.844934Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000026:4:2:2:0] -> [80000026:5:2:2:0] 2024-11-21T08:51:12.844944Z 10 05h45m00.119456s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2024-11-21T08:51:12.844953Z 10 05h45m00.119456s :BS_NODE DEBUG: [10] VDiskId# [80000026:4:0:1:0] -> [80000026:5:0:1:0] 2024-11-21T08:51:12.844967Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2024-11-21T08:51:12.844974Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] VDiskId# [80000026:5:2:0:0] PDiskId# 1000 VSlotId# 1009 created 2024-11-21T08:51:12.844984Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] VDiskId# [80000026:5:2:0:0] status changed to INIT_PENDING 2024-11-21T08:51:12.844999Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T08:51:12.845004Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000026:4:1:2:0] -> [80000026:5:1:2:0] 2024-11-21T08:51:12.845012Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.845030Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:12.845036Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000016:5:0:2:0] -> [80000016:6:0:2:0] 2024-11-21T08:51:12.845045Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T08:51:12.845051Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [80000016:5:1:0:0] -> [80000016:6:1:0:0] 2024-11-21T08:51:12.845061Z 22 05h45m00.119456s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2024-11-21T08:51:12.845067Z 22 05h45m00.119456s :BS_NODE DEBUG: [22] VDiskId# [80000016:5:1:1:0] -> [80000016:6:1:1:0] 2024-11-21T08:51:12.845077Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:12.845083Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [80000016:5:0:0:0] -> [80000016:6:0:0:0] 2024-11-21T08:51:12.845093Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T08:51:12.845099Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000016:5:2:2:0] -> [80000016:6:2:2:0] 2024-11-21T08:51:12.845109Z 10 05h45m00.119456s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2024-11-21T08:51:12.845115Z 10 05h45m00.119456s :BS_NODE DEBUG: [10] VDiskId# [80000016:5:0:1:0] -> [80000016:6:0:1:0] 2024-11-21T08:51:12.845125Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T08:51:12.845131Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] VDiskId# [80000016:5:2:1:0] -> [80000016:6:2:1:0] 2024-11-21T08:51:12.845141Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T08:51:12.845147Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000016:5:1:2:0] -> [80000016:6:1:2:0] 2024-11-21T08:51:12.845154Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.845164Z 33 05h45m00.119456s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T08:51:12.845169Z 33 05h45m00.119456s :BS_NODE DEBUG: [33] VDiskId# [80000016:6:2:0:0] PDiskId# 1001 VSlotId# 1008 created 2024-11-21T08:51:12.845178Z 33 05h45m00.119456s :BS_NODE DEBUG: [33] VDiskId# [80000016:6:2:0:0] status changed to INIT_PENDING 2024-11-21T08:51:12.845197Z 34 05h45m00.119456s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2024-11-21T08:51:12.845202Z 34 05h45m00.119456s :BS_NODE DEBUG: [34] VDiskId# [80000006:5:2:1:0] -> [80000006:6:2:1:0] 2024-11-21T08:51:12.845214Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:12.845220Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000006:5:0:2:0] -> [80000006:6:0:2:0] 2024-11-21T08:51:12.845230Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T08:51:12.845235Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [80000006:5:1:0:0] -> [80000006:6:1:0:0] 2024-11-21T08:51:12.845246Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T08:51:12.845252Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [80000006:5:1:1:0] -> [80000006:6:1:1:0] 2024-11-21T08:51:12.845262Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:12.845268Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [80000006:5:0:0:0] -> [80000006:6:0:0:0] 2024-11-21T08:51:12.845278Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T08:51:12.845284Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000006:5:2:2:0] -> [80000006:6:2:2:0] 2024-11-21T08:51:12.845294Z 10 05h45m00.119456s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2024-11-21T08:51:12.845300Z 10 05h45m00.119456s :BS_NODE DEBUG: [10] VDiskId# [80000006:5:0:1:0] -> [80000006:6:0:1:0] 2024-11-21T08:51:12.845310Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T08:51:12.845315Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] VDiskId# [80000006:6:2:0:0] PDiskId# 1000 VSlotId# 1011 created 2024-11-21T08:51:12.845325Z 30 05h45m00.119456s :BS_NODE DEBUG: [30] VDiskId# [80000006:6:2:0:0] status changed to INIT_PENDING 2024-11-21T08:51:12.845336Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T08:51:12.845341Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000006:5:1:2:0] -> [80000006:6:1:2:0] 2024-11-21T08:51:12.845349Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.846500Z 32 05h45m01.963456s :BS_NODE DEBUG: [32] VDiskId# [80000076:5:2:0:0] status changed to REPLICATING 2024-11-21T08:51:12.846628Z 30 05h45m01.964456s :BS_NODE DEBUG: [30] VDiskId# [80000066:5:2:0:0] status changed to REPLICATING 2024-11-21T08:51:12.846706Z 28 05h45m02.900456s :BS_NODE DEBUG: [28] VDiskId# [80000026:5:2:0:0] status changed to REPLICATING 2024-11-21T08:51:12.846761Z 33 05h45m03.512456s :BS_NODE DEBUG: [33] VDiskId# [80000036:5:2:0:0] status changed to REPLICATING 2024-11-21T08:51:12.846816Z 32 05h45m03.931456s :BS_NODE DEBUG: [32] VDiskId# [80000056:5:2:0:0] status changed to REPLICATING 2024-11-21T08:51:12.846926Z 30 05h45m04.098456s :BS_NODE DEBUG: [30] VDiskId# [80000046:5:2:0:0] status changed to REPLICATING 2024-11-21T08:51:12.846997Z 30 05h45m04.156456s :BS_NODE DEBUG: [30] VDiskId# [80000006:6:2:0:0] status changed to REPLICATING 2024-11-21T08:51:12.847290Z 33 05h45m05.219456s :BS_NODE DEBUG: [33] VDiskId# [80000016:6:2:0:0] status changed to REPLICATING 2024-11-21T08:51:12.847358Z 30 05h45m09.272456s :BS_NODE DEBUG: [30] VDiskId# [80000046:5:2:0:0] status changed to READY 2024-11-21T08:51:12.849720Z 32 05h45m09.272968s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.849739Z 32 05h45m09.272968s :BS_NODE DEBUG: [32] VDiskId# [80000046:4:2:0:0] destroyed 2024-11-21T08:51:12.849856Z 33 05h45m11.239456s :BS_NODE DEBUG: [33] VDiskId# [80000036:5:2:0:0] status changed to READY 2024-11-21T08:51:12.851916Z 32 05h45m11.239968s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.851932Z 32 05h45m11.239968s :BS_NODE DEBUG: [32] VDiskId# [80000036:4:2:0:0] destroyed 2024-11-21T08:51:12.852120Z 30 05h45m18.484456s :BS_NODE DEBUG: [30] VDiskId# [80000006:6:2:0:0] status changed to READY 2024-11-21T08:51:12.854179Z 32 05h45m18.484968s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.854195Z 32 05h45m18.484968s :BS_NODE DEBUG: [32] VDiskId# [80000006:5:2:0:0] destroyed 2024-11-21T08:51:12.854281Z 30 05h45m23.043456s :BS_NODE DEBUG: [30] VDiskId# [80000066:5:2:0:0] status changed to READY 2024-11-21T08:51:12.856336Z 32 05h45m23.043968s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.856351Z 32 05h45m23.043968s :BS_NODE DEBUG: [32] VDiskId# [80000066:4:2:0:0] destroyed 2024-11-21T08:51:12.856659Z 28 05h45m31.187456s :BS_NODE DEBUG: [28] VDiskId# [80000026:5:2:0:0] status changed to READY 2024-11-21T08:51:12.858527Z 32 05h45m31.187968s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.858540Z 32 05h45m31.187968s :BS_NODE DEBUG: [32] VDiskId# [80000026:4:2:0:0] destroyed 2024-11-21T08:51:12.858572Z 32 05h45m32.952456s :BS_NODE DEBUG: [32] VDiskId# [80000056:5:2:0:0] status changed to READY 2024-11-21T08:51:12.860617Z 32 05h45m32.952968s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.860633Z 32 05h45m32.952968s :BS_NODE DEBUG: [32] VDiskId# [80000056:4:2:0:0] destroyed 2024-11-21T08:51:12.860735Z 33 05h45m34.831456s :BS_NODE DEBUG: [33] VDiskId# [80000016:6:2:0:0] status changed to READY 2024-11-21T08:51:12.862709Z 32 05h45m34.831968s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.862722Z 32 05h45m34.831968s :BS_NODE DEBUG: [32] VDiskId# [80000016:5:2:0:0] destroyed 2024-11-21T08:51:12.862825Z 32 05h45m35.579456s :BS_NODE DEBUG: [32] VDiskId# [80000076:5:2:0:0] status changed to READY 2024-11-21T08:51:12.864911Z 32 05h45m35.579968s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T08:51:12.864926Z 32 05h45m35.579968s :BS_NODE DEBUG: [32] VDiskId# [80000076:4:2:0:0] destroyed >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency >> BsControllerTest::SelfHealBlock4Plus2 >> test.py::test[insert-append_sorted-to_sorted_calc-Debug] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-ForceBlocks] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 [GOOD] Test command err: iteration# 2 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 8 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 14 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 20 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 26 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 32 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 38 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 44 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 50 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 56 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 62 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 68 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 74 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 80 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 86 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 92 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 98 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 104 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 110 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 116 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 122 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 128 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 134 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 140 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 146 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 152 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 158 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 164 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 170 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 176 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 182 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 188 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 194 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 200 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 206 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 212 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 218 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 224 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 230 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 236 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 242 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 248 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 254 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 260 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 266 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 272 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 278 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 284 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 290 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 296 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 302 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 308 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 314 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 320 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 326 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 332 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 338 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 344 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 350 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 356 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 362 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 368 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 374 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 380 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 386 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 392 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 398 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 404 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 410 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 416 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 422 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 428 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 434 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 440 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 446 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 452 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 458 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 464 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 470 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 476 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 482 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 488 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 494 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 500 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 506 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 512 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 518 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 524 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 530 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 536 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 542 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 548 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 554 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 560 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 566 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 572 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 578 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 584 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 590 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 596 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 602 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 608 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 614 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 620 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 626 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 632 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 638 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 644 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 650 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 656 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 662 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 668 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 674 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 680 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 686 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1364 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1370 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1376 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1382 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1388 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1394 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1400 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1406 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1412 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1418 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1424 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1430 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1436 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1442 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1448 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1454 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1460 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1466 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1472 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1478 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1484 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1490 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1496 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1502 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1508 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1514 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1520 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1526 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1532 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1538 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1544 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1550 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1556 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1562 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1568 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1574 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1580 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1586 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1592 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1598 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1604 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1610 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1616 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1622 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1628 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1634 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1640 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1646 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1652 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1658 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1664 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1670 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1676 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1682 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1688 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1694 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1700 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1706 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1712 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1718 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1724 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1730 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1736 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1742 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1748 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1754 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1760 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1766 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1772 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1778 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1784 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1790 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1796 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1802 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1808 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1814 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1820 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1826 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1832 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1838 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1844 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1850 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1856 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1862 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1868 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1874 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1880 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1886 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1892 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1898 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1904 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1910 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1916 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1922 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1928 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1934 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1940 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1946 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1952 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1958 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1964 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1970 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1976 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1982 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1988 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1994 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2000 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2006 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2012 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2018 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2024 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2030 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2036 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Analyze] >> test.py::test[window-current/ansi_current_mixed--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Analyze] >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[PipeResets] [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:02.652810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:02.652839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:02.652845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:02.652850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:02.652867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:02.652871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:02.652881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:02.652965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:02.678393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:02.678420Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:02.684658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:02.684816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:02.684862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:02.693472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:02.693581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:02.693755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:02.693978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:02.694696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:02.695025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:02.695038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:02.695052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:02.695060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:02.695066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:02.695110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:02.696604Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:02.750664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:02.750779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:02.750854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:02.750905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:02.750914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:02.762171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:02.762226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:02.762296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:02.762310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:02.762319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:02.762325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:02.763283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:02.763299Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:02.763304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:02.763694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:02.763704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:02.763710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:02.763717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:02.764478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:02.764903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:02.764961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:02.765192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:02.765219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:02.765227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:02.765294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:02.765301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:02.765337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:02.765350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:02.765789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:02.765800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:02.765852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:02.765857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:02.765943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:02.765949Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:02.765961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:02.765966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:02.765971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:02.765976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:02.765981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:02.765985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:02.765996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:02.766003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:02.766007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... age: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:51:14.073533Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 94489282827 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:14.073540Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:14.073562Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.073632Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.073638Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:14.073644Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:51:14.073650Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:14.073789Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.073962Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.073969Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:14.073974Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:51:14.073979Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:51:14.074107Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 94489282827 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:14.074124Z node 22 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:14.074133Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 94489282827 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:14.074145Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:14.074149Z node 22 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:14.074155Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:14.074161Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:51:14.074282Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.074287Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:14.074292Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:14.074299Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:14.074309Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:14.074374Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.074379Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:14.074383Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:14.074944Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:14.074976Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.074988Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:51:14.075445Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.075464Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:51:14.075511Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:14.075599Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.075605Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:14.075635Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:14.075642Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:14.075673Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 3/3 2024-11-21T08:51:14.075678Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:14.075685Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2024-11-21T08:51:14.075691Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:14.075696Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:14.075701Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:14.075729Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:14.075735Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:14.075739Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:14.075744Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:14.075748Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:14.075751Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:14.075762Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:51:14.075818Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:51:14.075823Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:51:14.075836Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:51:14.075843Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:51:14.075848Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:14.076391Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:51:14.076461Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:14.076467Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:14.076533Z node 22 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:14.076552Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:14.076557Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [22:747:2651] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:14.076627Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:14.076661Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 45us result status StatusPathDoesNotExist 2024-11-21T08:51:14.076704Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |87.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[blocks-string_as_agg_key--Plan] [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] Test command err: 2024-11-21T08:51:07.549981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652446278883179:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:07.550104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fb9/r3tmp/tmpM5c4wo/pdisk_1.dat 2024-11-21T08:51:07.664596Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:07.666871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.666895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.672581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20008, node 1 2024-11-21T08:51:07.688372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:07.688382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:07.688383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:07.688410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:07.727191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:07.732393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:07.738319Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:08.015061Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:08.015082Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652450573850912:2298], Start check tables existence, number paths: 2 2024-11-21T08:51:08.015677Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjNjAyMDItN2VlM2ZjOWQtZDMyZWQ2NzYtNGE0NzkzZWY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDFjNjAyMDItN2VlM2ZjOWQtZDMyZWQ2NzYtNGE0NzkzZWY= 2024-11-21T08:51:08.015775Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjNjAyMDItN2VlM2ZjOWQtZDMyZWQ2NzYtNGE0NzkzZWY=, ActorId: [1:7439652450573850913:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:08.017775Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:08.017780Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:08.017789Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:08.017817Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652450573850912:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:08.017824Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652450573850912:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:08.017828Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652450573850912:2298], Successfully finished 2024-11-21T08:51:08.017866Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:08.018416Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652450573850938:2285], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:08.019076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:08.019518Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652450573850938:2285], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:08.019995Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652450573850938:2285], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:08.020939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652450573850938:2285], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:08.106154Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652450573850938:2285], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:08.107476Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652450573850938:2285], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:08.115565Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T08:51:08.115584Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:08.115620Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjNjAyMDItN2VlM2ZjOWQtZDMyZWQ2NzYtNGE0NzkzZWY=, ActorId: [1:7439652450573850913:2299], ActorState: ReadyState, TraceId: 01jd6yp16k59mfvzefvga1c4zv, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL default WITH ( CONCURRENT_QUERY_LIMIT=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T08:51:08.117596Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652450573850998:2301], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T08:51:08.187062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652450573850998:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:08.190273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:08.193771Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZDFjNjAyMDItN2VlM2ZjOWQtZDMyZWQ2NzYtNGE0NzkzZWY=, ActorId: [1:7439652450573850913:2299], ActorState: ExecuteState, TraceId: 01jd6yp16k59mfvzefvga1c4zv, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7439652450573850999:2299] WorkloadServiceCleanup: 0 2024-11-21T08:51:08.194532Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjNjAyMDItN2VlM2ZjOWQtZDMyZWQ2NzYtNGE0NzkzZWY=, ActorId: [1:7439652450573850913:2299], ActorState: CleanupState, TraceId: 01jd6yp16k59mfvzefvga1c4zv, EndCleanup, isFinal: 0 2024-11-21T08:51:08.194569Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjNjAyMDItN2VlM2ZjOWQtZDMyZWQ2NzYtNGE0NzkzZWY=, ActorId: [1:7439652450573850913:2299], ActorState: CleanupState, TraceId: 01jd6yp16k59mfvzefvga1c4zv, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7439652446278883257:2256] 2024-11-21T08:51:08.200590Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTBlNzIxZWItZDQ2OGE1YjQtMTNkZDkyN2MtMTg0OTQxYTY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTBlNzIxZWItZDQ2OGE1YjQtMTNkZDkyN2MtMTg0OTQxYTY= 2024-11-21T08:51:08.200655Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTBlNzIxZWItZDQ2OGE1YjQtMTNkZDkyN2MtMTg0OTQxYTY=, ActorId: [1:7439652450573851010:2302], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:08.200698Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T08:51:08.200714Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652450573851012:2303], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T08:51:08.200761Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTBlNzIxZWItZDQ2OGE1YjQtMTNkZDkyN2MtMTg0OTQxYTY=, ActorId: [1:7439652450573851010:2302], ActorState: ReadyState, TraceId: 01jd6yp198e7veak53c8qjs37g, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439652450573851009:2326] database: Root databaseId: /Root pool id: default 2024-11-21T08:51:08.200780Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439652450573851010:2302], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=1&id=ZTBlNzIxZWItZDQ2OGE1YjQtMTNkZDkyN2MtMTg0OTQxYTY= 2024-11-21T08:51:08.200797Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652450573851013:2304], Database: /Root, Start database fetching 2024-11-21T08:51:08.201031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652450573851012:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:08.201047Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652450573851013:2304], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T08:51:08.201055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:08.201063Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, Databa ... Id: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, ExecutePhyTx, tx: 0x0000000000000000 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 1 2024-11-21T08:51:14.208488Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, TExecPhysicalRequest, add DeferredEffect to Transaction, current Transactions.size(): 1 2024-11-21T08:51:14.208493Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, TExecPhysicalRequest, tx has commit locks 2024-11-21T08:51:14.208504Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, Sending to Executer TraceId: 0 8 2024-11-21T08:51:14.208524Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, Created new KQP executer: [5:7439652476528978565:2409] isRollback: 0 2024-11-21T08:51:14.213508Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T08:51:14.213572Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, txInfo Status: Committed Kind: ReadWrite TotalDuration: 14.944 ServerDuration: 14.895 QueriesCount: 2 2024-11-21T08:51:14.213611Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:14.213627Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:14.213630Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, EndCleanup, isFinal: 0 2024-11-21T08:51:14.213641Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp74p3hj7rvc1t3scwan9, Sent query response back to proxy, proxyRequestId: 18, proxyId: [5:7439652472234010098:2060] 2024-11-21T08:51:14.213896Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, TxId: 2024-11-21T08:51:14.213914Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T08:51:14.214017Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ReadyState, TraceId: 01jd6yp7563vjfpht63bke5dc5, received request, proxyRequestId: 19 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [5:7439652476528978574:2417] database: /Root databaseId: /Root pool id: default 2024-11-21T08:51:14.214020Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ReadyState, TraceId: 01jd6yp7563vjfpht63bke5dc5, request placed into pool from cache: default 2024-11-21T08:51:14.214032Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ReadyState, TraceId: 01jd6yp7563vjfpht63bke5dc5, Sending CompileQuery request 2024-11-21T08:51:14.214250Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, ExecutePhyTx, tx: 0x00005518BB93E218 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:14.214262Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, Sending to Executer TraceId: 0 8 2024-11-21T08:51:14.214275Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, Created new KQP executer: [5:7439652476528978577:2409] isRollback: 0 2024-11-21T08:51:14.218295Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T08:51:14.218331Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, ExecutePhyTx, tx: 0x00005518BB91A918 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:14.218533Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T08:51:14.218576Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, txInfo Status: Committed Kind: ReadOnly TotalDuration: 4.362 ServerDuration: 4.326 QueriesCount: 2 2024-11-21T08:51:14.218622Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:14.218633Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:14.218636Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, EndCleanup, isFinal: 0 2024-11-21T08:51:14.218649Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ExecuteState, TraceId: 01jd6yp7563vjfpht63bke5dc5, Sent query response back to proxy, proxyRequestId: 19, proxyId: [5:7439652472234010098:2060] 2024-11-21T08:51:14.218848Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, TxId: 2024-11-21T08:51:14.218870Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, TxId: 2024-11-21T08:51:14.218911Z node 5 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [5:7439652472234010818:2306], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2024-11-21T08:51:14.218951Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:14.218957Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:14.218959Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:14.218962Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:14.218976Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDU1ZTkzMDAtYWE2YjZiZGUtNjIzYTE1MzItOWI4Yzk5NmU=, ActorId: [5:7439652476528978542:2409], ActorState: unknown state, Session actor destroyed |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] |87.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TTxDataShardUploadRows::TestUploadRowsLocks-StreamLookup |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::UpsertDependenciesShardsRestart [GOOD] Test command err: 2024-11-21T08:50:31.559766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:31.563832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:31.563903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033a3/r3tmp/tmprwnqmt/pdisk_1.dat 2024-11-21T08:50:31.723989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:31.756606Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:31.804455Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:50:31.804720Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:50:31.804759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:31.804778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:31.816633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:32.108119Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:50:32.108147Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:50:32.108180Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:610:2519] 2024-11-21T08:50:32.118545Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:50:32.118810Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:50:32.118825Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:50:32.118870Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:50:32.118922Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:50:32.118939Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:50:32.119416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:32.119558Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:50:32.119686Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:50:32.119696Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:50:32.134803Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:50:32.134983Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:50:32.135062Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:635:2540] 2024-11-21T08:50:32.135105Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:32.135996Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:50:32.144640Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:32.144684Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:50:32.144801Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:50:32.144816Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:50:32.144821Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:50:32.144859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:50:32.148160Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:50:32.148265Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:50:32.148296Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:651:2549] 2024-11-21T08:50:32.148300Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:50:32.148304Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:50:32.148307Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:50:32.148447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:635:2540], Recipient [1:635:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:50:32.148456Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:50:32.148520Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:50:32.148548Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:50:32.148656Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:635:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:32.148664Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:32.148672Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:632:2538], serverId# [1:642:2544], sessionId# [0:0:0] 2024-11-21T08:50:32.148682Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:50:32.148689Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:32.148697Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:50:32.148702Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:50:32.148706Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:50:32.148712Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:50:32.148718Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:50:32.148731Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:642:2544] 2024-11-21T08:50:32.148736Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:50:32.148762Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:50:32.148828Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:50:32.148840Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:50:32.148859Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:50:32.148877Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:50:32.148882Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:50:32.148887Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:50:32.148892Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:50:32.148942Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:50:32.148949Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:50:32.148953Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:50:32.148956Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:50:32.148968Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:50:32.148972Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:50:32.148975Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:50:32.148978Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:50:32.148983Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:50:32.149253Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:652:2550], Recipient [1:635:2540]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:50:32.149264Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:50:32.160162Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:50:32.160201Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:50:32.160227Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:50:32.160241Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224 ... count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:51:14.181484Z node 17 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2024-11-21T08:51:14.181494Z node 17 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [17:1091:2865] 2024-11-21T08:51:14.181498Z node 17 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T08:51:14.181503Z node 17 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2024-11-21T08:51:14.181508Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T08:51:14.181557Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [17:1002:2802], Recipient [17:976:2789]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037890 OperationCookie: 281474976715664 2024-11-21T08:51:14.181565Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037890 for split OpId 281474976715664 2024-11-21T08:51:14.181593Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [17:1002:2802], Recipient [17:1002:2802]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:14.181598Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:14.181650Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037890] received poison pill [17:1085:2859] 2024-11-21T08:51:14.181657Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037890] notify reset [17:1085:2859] 2024-11-21T08:51:14.181677Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [17:1085:2859], Recipient [17:976:2789]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:51:14.181682Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:51:14.181688Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T08:51:14.181694Z node 17 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:14.181701Z node 17 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T08:51:14.181706Z node 17 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T08:51:14.181710Z node 17 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:51:14.181715Z node 17 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2024-11-21T08:51:14.181721Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:51:14.181728Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [17:723:2071] 2024-11-21T08:51:14.181732Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [17:723:2071] 2024-11-21T08:51:14.181745Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send [17:726:2071] 2024-11-21T08:51:14.181749Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [17:726:2071] 2024-11-21T08:51:14.181803Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [17:24:2071], Recipient [17:1002:2802]: {TEvRegisterTabletResult TabletId# 72075186224037890 Entry# 3000} 2024-11-21T08:51:14.181807Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T08:51:14.181814Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 3000 2024-11-21T08:51:14.181819Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T08:51:14.181827Z node 17 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715664 2024-11-21T08:51:14.181833Z node 17 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2024-11-21T08:51:14.181843Z node 17 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:51:14.181852Z node 17 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2024-11-21T08:51:14.181859Z node 17 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [17:1092:2866] 2024-11-21T08:51:14.181862Z node 17 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2024-11-21T08:51:14.181866Z node 17 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2024-11-21T08:51:14.181870Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T08:51:14.181895Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [17:1007:2805], Recipient [17:976:2789]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037891 OperationCookie: 281474976715664 2024-11-21T08:51:14.181900Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715664 2024-11-21T08:51:14.181923Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [17:1087:2861], Recipient [17:1002:2802]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:14.181928Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:14.181933Z node 17 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [17:1085:2859], serverId# [17:1087:2861], sessionId# [0:0:0] 2024-11-21T08:51:14.181942Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [17:1007:2805], Recipient [17:1007:2805]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:14.181961Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:14.181988Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037891] received poison pill [17:1086:2860] 2024-11-21T08:51:14.181993Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037891] notify reset [17:1086:2860] 2024-11-21T08:51:14.182017Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [17:1086:2860], Recipient [17:976:2789]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:51:14.182021Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:51:14.182047Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2024-11-21T08:51:14.182052Z node 17 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:14.182056Z node 17 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2024-11-21T08:51:14.182059Z node 17 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2024-11-21T08:51:14.182063Z node 17 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2024-11-21T08:51:14.182067Z node 17 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T08:51:14.182071Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2024-11-21T08:51:14.182077Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [17:723:2071] 2024-11-21T08:51:14.182081Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [17:723:2071] 2024-11-21T08:51:14.182098Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [17:24:2071], Recipient [17:1007:2805]: {TEvRegisterTabletResult TabletId# 72075186224037891 Entry# 3000} 2024-11-21T08:51:14.182102Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T08:51:14.182105Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2024-11-21T08:51:14.182109Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T08:51:14.182128Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send [17:726:2071] 2024-11-21T08:51:14.182132Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [17:726:2071] 2024-11-21T08:51:14.182151Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [17:1088:2862], Recipient [17:1007:2805]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:14.182155Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:14.182159Z node 17 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [17:1086:2860], serverId# [17:1088:2862], sessionId# [0:0:0] 2024-11-21T08:51:14.182207Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [17:24:2071], Recipient [17:1002:2802]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2024-11-21T08:51:14.182212Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T08:51:14.182217Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 3000 2024-11-21T08:51:14.182223Z node 17 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037890: waitStep# 3000 readStep# 3000 observedStep# 3000 2024-11-21T08:51:14.182229Z node 17 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037890 promoting UnprotectedReadEdge to v3000/18446744073709551615 2024-11-21T08:51:14.182265Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [17:24:2071], Recipient [17:1007:2805]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2024-11-21T08:51:14.182270Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T08:51:14.182273Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 0 next step 3000 2024-11-21T08:51:14.182280Z node 17 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2024-11-21T08:51:14.182284Z node 17 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037891 promoting UnprotectedReadEdge to v3000/18446744073709551615 2024-11-21T08:51:14.192661Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715664 2024-11-21T08:51:14.193474Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037889] send [17:1059:2839] 2024-11-21T08:51:14.193488Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037889] push event to server [17:1059:2839] 2024-11-21T08:51:14.193605Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [17:391:2384], Recipient [17:1061:2840] 2024-11-21T08:51:14.193629Z node 17 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2024-11-21T08:51:14.193997Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715664 2024-11-21T08:51:14.194014Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 2 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T08:51:14.194138Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [17:964:2782], Recipient [17:976:2789]: NKikimr::TEvTablet::TEvFollowerGcApplied ... split finished >> test.py::test[window-win_func_aggr_with_qualified_all--Analyze] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Debug] >> test.py::test[blocks-minmax_tuple--ForceBlocks] [GOOD] >> test.py::test[blocks-minmax_tuple--Plan] >> test.py::test[aggregate-group_by_cube_expr_trio--Analyze] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Debug] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> test.py::test[blocks-minmax_tuple--Plan] [GOOD] >> test.py::test[blocks-minmax_tuple--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:476:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:479:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:480:2057] recipient: [4:478:2500] Leader for TabletID 72057594037927937 is [4:481:2501] sender: [4:482:2057] recipient: [4:478:2500] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:481:2501] Leader for TabletID 72057594037927937 is [4:481:2501] sender: [4:551:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:481:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:483:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:485:2057] recipient: [5:484:2505] Leader for TabletID 72057594037927937 is [5:486:2506] sender: [5:487:2057] recipient: [5:484:2505] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:486:2506] Leader for TabletID 72057594037927937 is [5:486:2506] sender: [5:556:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:481:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:483:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:485:2057] recipient: [6:484:2505] Leader for TabletID 72057594037927937 is [6:486:2506] sender: [6:487:2057] recipient: [6:484:2505] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:486:2506] Leader for TabletID 72057594037927937 is [6:486:2506] sender: [6:556:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:482:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:485:2057] recipient: [7:484:2505] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:486:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:487:2506] sender: [7:488:2057] recipient: [7:484:2505] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:487:2506] Leader for TabletID 72057594037927937 is [7:487:2506] sender: [7:557:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:484:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:487:2057] recipient: [8:486:2507] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:488:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:489:2508] sender: [8:490:2057] recipient: [8:486:2507] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:489:2508] Leader for TabletID 72057594037927937 is [8:489:2508] sender: [8:559:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:484:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:487:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:488:2057] recipient: [9:486:2507] Leader for TabletID 72057594037927937 is [9:489:2508] sender: [9:490:2057] recipient: [9:486:2507] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:489:2508] Leader for TabletID 72057594037927937 is [9:489:2508] sender: [9:559:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:485:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:488:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:489:2057] recipient: [10:487:2507] Leader for TabletID 72057594037927937 is [10:490:2508] sender: [10:491:2057] recipient: [10:487:2507] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:490:2508] Leader for TabletID 72057594037927937 is [10:490:2508] sender: [10:560:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:487:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:490:2057] recipient: [11:489:2509] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:491:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:492:2510] sender: [11:493:2057] recipient: [11:489:2509] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:492:2510] Leader for TabletID 72057594037927937 is [11:492:2510] sender: [11:562:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 927937 is [13:105:2137] sender: [13:492:2057] recipient: [13:490:2509] Leader for TabletID 72057594037927937 is [13:493:2510] sender: [13:494:2057] recipient: [13:490:2509] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:493:2510] Leader for TabletID 72057594037927937 is [13:493:2510] sender: [13:563:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:141:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:144:2057] recipient: [16:143:2166] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:145:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:147:2057] recipient: [16:143:2166] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:146:2167] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:216:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:141:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:145:2057] recipient: [17:143:2166] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:147:2057] recipient: [17:143:2166] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:146:2167] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:216:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:476:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:479:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:480:2057] recipient: [18:478:2500] Leader for TabletID 72057594037927937 is [18:481:2501] sender: [18:482:2057] recipient: [18:478:2500] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:481:2501] Leader for TabletID 72057594037927937 is [18:481:2501] sender: [18:551:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:481:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:484:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:485:2057] recipient: [19:483:2505] Leader for TabletID 72057594037927937 is [19:486:2506] sender: [19:487:2057] recipient: [19:483:2505] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:486:2506] Leader for TabletID 72057594037927937 is [19:486:2506] sender: [19:556:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:481:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:484:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:485:2057] recipient: [20:483:2505] Leader for TabletID 72057594037927937 is [20:486:2506] sender: [20:487:2057] recipient: [20:483:2505] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:486:2506] Leader for TabletID 72057594037927937 is [20:486:2506] sender: [20:556:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:482:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:485:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:486:2057] recipient: [21:484:2505] Leader for TabletID 72057594037927937 is [21:487:2506] sender: [21:488:2057] recipient: [21:484:2505] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:487:2506] Leader for TabletID 72057594037927937 is [21:487:2506] sender: [21:535:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:484:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:487:2057] recipient: [22:486:2507] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:488:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:489:2508] sender: [22:490:2057] recipient: [22:486:2507] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:489:2508] Leader for TabletID 72057594037927937 is [22:489:2508] sender: [22:560:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:484:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:487:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:488:2057] recipient: [23:486:2507] Leader for TabletID 72057594037927937 is [23:489:2508] sender: [23:490:2057] recipient: [23:486:2507] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:489:2508] Leader for TabletID 72057594037927937 is [23:489:2508] sender: [23:559:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:485:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:488:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:489:2057] recipient: [24:487:2507] Leader for TabletID 72057594037927937 is [24:490:2508] sender: [24:491:2057] recipient: [24:487:2507] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:490:2508] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2024-11-21T08:51:06.791262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652443270599829:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:06.791308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fcb/r3tmp/tmppZnecX/pdisk_1.dat 2024-11-21T08:51:06.950187Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13687, node 1 2024-11-21T08:51:06.984430Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:06.984444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:06.984446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:06.984486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:07.052640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:07.064829Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:07.079759Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:07.110534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.110566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.116680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:07.433232Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjODdhZWYtMjk5YjFiZWMtYzQ2YjNjNzYtMWM2N2NlMzU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NGVjODdhZWYtMjk5YjFiZWMtYzQ2YjNjNzYtMWM2N2NlMzU= 2024-11-21T08:51:07.433356Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:07.433365Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was disabled 2024-11-21T08:51:07.435284Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjODdhZWYtMjk5YjFiZWMtYzQ2YjNjNzYtMWM2N2NlMzU=, ActorId: [1:7439652447565567569:2296], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:07.436233Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI= 2024-11-21T08:51:07.436293Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:07.436359Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ReadyState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439652447565567570:2280] database: Root databaseId: /Root pool id: 2024-11-21T08:51:07.436386Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ReadyState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Sending CompileQuery request 2024-11-21T08:51:07.472404Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, ExecutePhyTx, tx: 0x000005367B847A98 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:07.472425Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Sending to Executer TraceId: 0 8 2024-11-21T08:51:07.472455Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Created new KQP executer: [1:7439652447565567575:2297] isRollback: 0 2024-11-21T08:51:07.477582Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Forwarded TEvStreamData to [1:7439652447565567570:2280] 2024-11-21T08:51:07.480536Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T08:51:07.480636Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, txInfo Status: Committed Kind: Pure TotalDuration: 8.313 ServerDuration: 8.269 QueriesCount: 2 2024-11-21T08:51:07.480670Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:07.480730Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:07.480737Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, EndCleanup, isFinal: 1 2024-11-21T08:51:07.480748Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: ExecuteState, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7439652443270599917:2256] 2024-11-21T08:51:07.480751Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: unknown state, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Cleanup temp tables: 0 2024-11-21T08:51:07.480801Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDdlZGMxOTUtNGNjMjY0NmMtOTljZWIxMjctOGM2MDIwMGI=, ActorId: [1:7439652447565567571:2297], ActorState: unknown state, TraceId: 01jd6yp0hc7fwwn4fsyhvq7d25, Session actor destroyed 2024-11-21T08:51:07.482239Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NGVjODdhZWYtMjk5YjFiZWMtYzQ2YjNjNzYtMWM2N2NlMzU=, ActorId: [1:7439652447565567569:2296], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:07.482254Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NGVjODdhZWYtMjk5YjFiZWMtYzQ2YjNjNzYtMWM2N2NlMzU=, ActorId: [1:7439652447565567569:2296], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:07.482257Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjODdhZWYtMjk5YjFiZWMtYzQ2YjNjNzYtMWM2N2NlMzU=, ActorId: [1:7439652447565567569:2296], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:07.482260Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjODdhZWYtMjk5YjFiZWMtYzQ2YjNjNzYtMWM2N2NlMzU=, ActorId: [1:7439652447565567569:2296], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:07.482272Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGVjODdhZWYtMjk5YjFiZWMtYzQ2YjNjNzYtMWM2N2NlMzU=, ActorId: [1:7439652447565567569:2296], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:07.827815Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652446672602072:2062];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fcb/r3tmp/tmp7LNiPh/pdisk_1.dat 2024-11-21T08:51:07.844571Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:07.850205Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13254, node 2 2024-11-21T08:51:07.884367Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:07.884379Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:07.884381Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:07.884427Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" ... seId: /Root pool id: default 2024-11-21T08:51:14.124521Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T08:51:14.124531Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7439652474737377456:2320], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk= 2024-11-21T08:51:14.124538Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439652474737377459:2322], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T08:51:14.124554Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7439652474737377460:2323], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, Start pool fetching 2024-11-21T08:51:14.124558Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439652474737377461:2324], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T08:51:14.127531Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439652474737377459:2322], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2024-11-21T08:51:14.127546Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439652474737377461:2324], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2024-11-21T08:51:14.127553Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2024-11-21T08:51:14.127569Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7439652474737377460:2323], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, Pool info successfully resolved 2024-11-21T08:51:14.127585Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk= 2024-11-21T08:51:14.127604Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439652474737377424:2310], DatabaseId: /Root, PoolId: default, Received new request, worker id: [8:7439652474737377456:2320], session id: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk= 2024-11-21T08:51:14.127612Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439652474737377424:2310], DatabaseId: /Root, PoolId: default, Reply continue success to [8:7439652474737377456:2320], session id: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, local in flight: 1 2024-11-21T08:51:14.127618Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk= 2024-11-21T08:51:14.127632Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, continue request, pool id: default 2024-11-21T08:51:14.127645Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, Sending CompileQuery request 2024-11-21T08:51:14.127817Z node 8 :KQP_SESSION INFO: Scheme error, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], status: PathNotTable 2024-11-21T08:51:14.222668Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, ExecutePhyTx, tx: 0x000005365FD27098 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:14.222695Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, Sending to Executer TraceId: 0 8 2024-11-21T08:51:14.222711Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, Created new KQP executer: [8:7439652474737377471:2320] isRollback: 0 2024-11-21T08:51:14.234141Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T08:51:14.234175Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, ExecutePhyTx, tx: 0x000005365FD26D98 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:14.234305Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T08:51:14.234340Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, txInfo Status: Committed Kind: ReadOnly TotalDuration: 11.695 ServerDuration: 11.675 QueriesCount: 2 2024-11-21T08:51:14.234373Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:14.234386Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ExecuteState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2024-11-21T08:51:14.234457Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439652474737377424:2310], DatabaseId: /Root, PoolId: default, Received cleanup request, worker id: [8:7439652474737377456:2320], session id: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, duration: 0.109822s, cpu consumed: 0.000407s 2024-11-21T08:51:14.234464Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439652474737377424:2310], DatabaseId: /Root, PoolId: default, Reply cleanup success to [8:7439652474737377456:2320], session id: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, local in flight: 0 2024-11-21T08:51:14.234476Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: CleanupState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, EndCleanup, isFinal: 0 2024-11-21T08:51:14.234488Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: CleanupState, TraceId: 01jd6yp72c5q6nm5dfx9tbfcww, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7439652470442409388:2256] 2024-11-21T08:51:14.234523Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: default, Duration: 0.109822s, CpuConsumed: 0.000407s, AdjustCpuQuota: 0 2024-11-21T08:51:14.234605Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, TxId: 2024-11-21T08:51:14.234634Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, TxId: 2024-11-21T08:51:14.236343Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:14.236356Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:14.236360Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:14.236362Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:14.236378Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWE4OGUyYTgtZmVjNmQ4MDItNDcxMGFiNWQtZWNmZjk0Njk=, ActorId: [8:7439652474737377456:2320], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:14.236654Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MzljZDU5M2MtMjg1YjZlNGQtODc1YjI4Y2MtMzE1YTE4YmE=, ActorId: [8:7439652470442409981:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:14.236658Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MzljZDU5M2MtMjg1YjZlNGQtODc1YjI4Y2MtMzE1YTE4YmE=, ActorId: [8:7439652470442409981:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:14.236660Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MzljZDU5M2MtMjg1YjZlNGQtODc1YjI4Y2MtMzE1YTE4YmE=, ActorId: [8:7439652470442409981:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:14.236661Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MzljZDU5M2MtMjg1YjZlNGQtODc1YjI4Y2MtMzE1YTE4YmE=, ActorId: [8:7439652470442409981:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:14.236667Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MzljZDU5M2MtMjg1YjZlNGQtODc1YjI4Y2MtMzE1YTE4YmE=, ActorId: [8:7439652470442409981:2299], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:14.425428Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:14.430521Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:14.432576Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> TExternalTableTest::DropExternalTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2024-11-21T08:51:14.115110Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T08:51:14.115131Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T08:51:14.115150Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T08:51:14.115155Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T08:51:14.115161Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T08:51:14.115166Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T08:51:14.115173Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T08:51:14.115177Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T08:51:14.115183Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T08:51:14.115187Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T08:51:14.115193Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T08:51:14.115197Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T08:51:14.115203Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T08:51:14.115207Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T08:51:14.115213Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T08:51:14.115217Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T08:51:14.115223Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T08:51:14.115227Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T08:51:14.115234Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T08:51:14.115238Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T08:51:14.115245Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T08:51:14.115250Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T08:51:14.115257Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T08:51:14.115261Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T08:51:14.115283Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T08:51:14.115288Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T08:51:14.115295Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T08:51:14.115299Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T08:51:14.115306Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T08:51:14.115309Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T08:51:14.115315Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T08:51:14.115319Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T08:51:14.115324Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T08:51:14.115329Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T08:51:14.115338Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T08:51:14.115343Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T08:51:14.115352Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T08:51:14.115357Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T08:51:14.115370Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T08:51:14.115375Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T08:51:14.115384Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T08:51:14.115389Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T08:51:14.115396Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T08:51:14.115402Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T08:51:14.115410Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T08:51:14.115415Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T08:51:14.115422Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T08:51:14.115427Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T08:51:14.115435Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T08:51:14.115440Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T08:51:14.115447Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T08:51:14.115452Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T08:51:14.115459Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T08:51:14.115467Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T08:51:14.115474Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T08:51:14.115479Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T08:51:14.115485Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T08:51:14.115490Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T08:51:14.115496Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T08:51:14.115500Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T08:51:14.115506Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T08:51:14.115510Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T08:51:14.115517Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T08:51:14.115535Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T08:51:14.118400Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2157:49] Status# ERROR ClientId# [1:2157:49] ServerId# [0:0:0] PipeClient# [1:2157:49] 2024-11-21T08:51:14.118614Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2158:37] Status# ERROR ClientId# [2:2158:37] ServerId# [0:0:0] PipeClient# [2:2158:37] 2024-11-21T08:51:14.118625Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2159:37] Status# ERROR ClientId# [3:2159:37] ServerId# [0:0:0] PipeClient# [3:2159:37] 2024-11-21T08:51:14.118630Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2160:37] Status# ERROR ClientId# [4:2160:37] ServerId# [0:0:0] PipeClient# [4:2160:37] 2024-11-21T08:51:14.118637Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2161:37] Status# ERROR ClientId# [5:2161:37] ServerId# [0:0:0] PipeClient# [5:2161:37] 2024-11-21T08:51:14.118643Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2162:37] Status# ERROR ClientId# [6:2162:37] ServerId# [0:0:0] PipeClient# [6:2162:37] 2024-11-21T08:51:14.118649Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2163:37] Status# ERROR ClientId# [7:2163:37] ServerId# [0:0:0] PipeClient# [7:2163:37] 2024-11-21T08:51:14.118656Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2164:37] Status# ERROR ClientId# [8:2164:37] ServerId# [0:0:0] PipeClient# [8:2164:37] 2024-11-21T08:51:14.118661Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2165:37] Status# ERROR ClientId# [9:2165:37] ServerId# [0:0:0] PipeClient# [9:2165:37] 2024-11-21T08:51:14.118667Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2166:37] Status# ERROR ClientId# [10:2166:37] ServerId# [0:0:0] PipeClient# [10:2166:37] 2024-11-21T08:51:14.118673Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2167:37] Status# ERROR ClientId# [11:2167:37] ServerId# [0:0:0] PipeClient# [11:2167:37] 2024-11-21T08:51:14.118679Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2168:37] Status# ERROR ClientId# [12:2168:37] ServerId# [0:0:0] PipeClient# [12:2168:37] 2024-11-21T08:51:14.118685Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2169:37] Status# ERROR ClientId# [13:2169:37] ServerId# [0:0:0] PipeClient# [13:2169:37] 2024-11-21T08:51:14.118691Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2170:37] Status# ERROR ClientId# [14:2170:37] ServerId# [0:0:0] PipeClient# [14:2170:37] 2024-11-21T08:51:14.118697Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2171:37] Status# ERROR ClientId# [15:2171:37] ServerId# [0:0:0] PipeClient# [15:2171:37] 2024-11-21T08:51:14.118703Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2172:37] Status# ERROR ClientId# [16:2172:37] ServerId# [0:0:0] PipeClient# [16:2172:37] 2024-11-21T08:51:14.118708Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2173:37] Status# ERROR ClientId# [17:2173:37] ServerId# [0:0:0] PipeClient# [17:2173:37] 2024-11-21T08:51:14.118714Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2174:37] Status# ERROR ClientId# [18:2174:37] ServerId# [0:0:0] PipeClient# [18:2174:37] 2024-11-21T08:51:14.118723Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2175:37] Status# ERROR ClientId# [19:2175:37] ServerId# [0:0:0] PipeClient# [19:2175:37] 2024-11-21T08:51:14.118729Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2176:37] Status# ERROR ClientId# [20:2176:37] ServerId# [0:0:0] PipeClient# [20:2176:37] 2024-11-21T08:51:14.118735Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2177:37] Status# ERROR ClientId# [21:2177:37] ServerId# [0:0:0] PipeClient# [21:2177:37] 2024-11-21T08:51:14.118741Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2178:37] Status# ERROR ClientId# [22:2178:37] ServerId# [0:0:0] PipeClient# [22:2178:37] 2024-11-21T08:51:14.118747Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2179:37] Status# ERROR ClientId# [23:2179:37] ServerId# [0:0:0] PipeClient# [23:2179:37] 2024-11-21T08:51:14.118753Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2180:37] Status# ERROR ClientId# [24:2180:37] ServerId# [0:0:0] PipeClient# [24:2180:37] 2024-11-21T08:51:14.118759Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2181:37] Status# ERROR ClientId# [25:2181:37] ServerId# [0:0:0] PipeClient# [25:2181:37] 2024-11-21T08:51:14.118765Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2182:37] Status# ERROR ClientId# [26:2182:37] ServerId# [0:0:0] PipeClient# [26:2182:37] 2024-11-21T08:51:14.118771Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2183:37] Status# ERROR ClientId# [27:2183:37] ServerId# [0:0:0] PipeClient# [27:2183:37] 2024-11-21T08:51:14.118777Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2184:37] Status# ERROR ClientId# [28:2184:37] ServerId# [0:0:0] PipeClient# [28:2184:37] 2024-11-21T08:51:14.118783Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2185:37] Status# ERROR ClientId# [29:2185:37] ServerId# [0:0:0] PipeClient# [29:2185:37] 2024-11-21T08:51:14.118788Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2186:37] Status# ERROR ClientId# [30:2186:37] ServerId# [0:0:0] PipeClient# [30:2186:37] 2024-11-21T08:51:14.118793Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2187:37] Status# ERROR ClientId# [31:2187:37] ServerId# [0:0:0] PipeClient# [31:2187:37] 2024-11-21T08:51:14.118799Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2188:37] Status# ERROR ClientId# [32:2188:37] ServerId# [0:0:0] PipeClient# [32:2188:37] 2024-11-21T08:51:14.149485Z 1 00h00m00.002048s :BS_NODE DEBUG: [1] CheckState from [1:2253:71] expected 1 current 0 2024-11-21T08:51:14.149513Z 2 00h00m00.002048s :BS_NODE DEBUG: [2] CheckState from [2:2254:38] expected 1 current 0 2024-11-21T08:51:14.149523Z 3 00h00m00.002048s :BS_NODE DEBUG: [3] CheckState from [3:2255:38] expected 1 current 0 2024-11-21T08:51:14.149532Z 4 00h00m00.002048s :BS_NODE DEBUG: [4] CheckState from [4:2256:38] expected 1 current 0 2024-11-21T08:51:14.149540Z 5 00h00m00.002048s :BS_NODE DEBUG: [5] CheckState from [5:2257:38] expected 1 current 0 2024-11-21T08:51:14.149549Z 6 00h00m00.002048s :BS_NODE DEBUG: [6] CheckState from [6:2258:38] expected 1 current 0 2024-11-21T08:51:14.149556Z 7 00h00m00.002048s :BS_NODE DEBUG: [7] CheckState from [7:2259:38] expected 1 current 0 2024-11-21T08:51:14.149563Z 8 00h00m00.002048s :BS_NODE DEBUG: [8] CheckState from [8:2260:38] expected 1 current 0 2024-11-21T08:51:14.149571Z 9 00h00m00.002048s :BS_NODE DEBUG: [9] CheckState from [9:2261:38] expected 1 current 0 2024-11-21T08:51:14.149578Z 10 00h00m00.002048s :BS_NODE DEBUG: [10] CheckState from [10:2262 ... ODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T08:51:15.729833Z 11 05h15m00.117920s :BS_NODE DEBUG: [11] VDiskId# [80000010:5:0:6:0] -> [80000010:6:0:6:0] 2024-11-21T08:51:15.729843Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T08:51:15.729848Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000010:6:0:3:0] PDiskId# 1001 VSlotId# 1012 created 2024-11-21T08:51:15.729858Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000010:6:0:3:0] status changed to INIT_PENDING 2024-11-21T08:51:15.729877Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:15.729883Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2024-11-21T08:51:15.729893Z 3 05h15m00.117920s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:15.729899Z 3 05h15m00.117920s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2024-11-21T08:51:15.729906Z 4 05h15m00.117920s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.729916Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2024-11-21T08:51:15.729925Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2024-11-21T08:51:15.729936Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T08:51:15.729942Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:0:4:0] -> [80000000:4:0:4:0] 2024-11-21T08:51:15.729951Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:15.729957Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:0:5:0] -> [80000000:4:0:5:0] 2024-11-21T08:51:15.729967Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:15.729972Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] VDiskId# [80000000:3:0:7:0] -> [80000000:4:0:7:0] 2024-11-21T08:51:15.729982Z 12 05h15m00.117920s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T08:51:15.729988Z 12 05h15m00.117920s :BS_NODE DEBUG: [12] VDiskId# [80000000:3:0:6:0] -> [80000000:4:0:6:0] 2024-11-21T08:51:15.730015Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T08:51:15.730021Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000000:4:0:3:0] PDiskId# 1001 VSlotId# 1013 created 2024-11-21T08:51:15.730029Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000000:4:0:3:0] status changed to INIT_PENDING 2024-11-21T08:51:15.730052Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:15.730057Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [80000038:4:0:1:0] -> [80000038:5:0:1:0] 2024-11-21T08:51:15.730067Z 3 05h15m00.117920s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:15.730073Z 3 05h15m00.117920s :BS_NODE DEBUG: [3] VDiskId# [80000038:4:0:2:0] -> [80000038:5:0:2:0] 2024-11-21T08:51:15.730084Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T08:51:15.730094Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000038:4:0:6:0] -> [80000038:5:0:6:0] 2024-11-21T08:51:15.730103Z 4 05h15m00.117920s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.730112Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T08:51:15.730118Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000038:4:0:4:0] -> [80000038:5:0:4:0] 2024-11-21T08:51:15.730128Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T08:51:15.730137Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000038:4:0:0:0] -> [80000038:5:0:0:0] 2024-11-21T08:51:15.730150Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:15.730155Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] VDiskId# [80000038:4:0:5:0] -> [80000038:5:0:5:0] 2024-11-21T08:51:15.730165Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:15.730170Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] VDiskId# [80000038:4:0:7:0] -> [80000038:5:0:7:0] 2024-11-21T08:51:15.730181Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T08:51:15.730186Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000038:5:0:3:0] PDiskId# 1001 VSlotId# 1014 created 2024-11-21T08:51:15.730195Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000038:5:0:3:0] status changed to INIT_PENDING 2024-11-21T08:51:15.730213Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T08:51:15.730220Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] VDiskId# [80000028:4:0:6:0] -> [80000028:5:0:6:0] 2024-11-21T08:51:15.730233Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:15.730239Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [80000028:4:0:1:0] -> [80000028:5:0:1:0] 2024-11-21T08:51:15.730250Z 3 05h15m00.117920s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:15.730256Z 3 05h15m00.117920s :BS_NODE DEBUG: [3] VDiskId# [80000028:4:0:2:0] -> [80000028:5:0:2:0] 2024-11-21T08:51:15.730265Z 4 05h15m00.117920s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.730276Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T08:51:15.730282Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000028:4:0:4:0] -> [80000028:5:0:4:0] 2024-11-21T08:51:15.730292Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:15.730297Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] VDiskId# [80000028:4:0:5:0] -> [80000028:5:0:5:0] 2024-11-21T08:51:15.730306Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T08:51:15.730314Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000028:4:0:0:0] -> [80000028:5:0:0:0] 2024-11-21T08:51:15.730327Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:15.730334Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] VDiskId# [80000028:4:0:7:0] -> [80000028:5:0:7:0] 2024-11-21T08:51:15.730345Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T08:51:15.730350Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000028:5:0:3:0] PDiskId# 1001 VSlotId# 1015 created 2024-11-21T08:51:15.730358Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000028:5:0:3:0] status changed to INIT_PENDING 2024-11-21T08:51:15.730375Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:15.730381Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [80000018:4:0:1:0] -> [80000018:5:0:1:0] 2024-11-21T08:51:15.730390Z 3 05h15m00.117920s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:15.730395Z 3 05h15m00.117920s :BS_NODE DEBUG: [3] VDiskId# [80000018:4:0:2:0] -> [80000018:5:0:2:0] 2024-11-21T08:51:15.730403Z 4 05h15m00.117920s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.730412Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T08:51:15.730418Z 5 05h15m00.117920s :BS_NODE DEBUG: [5] VDiskId# [80000018:4:0:4:0] -> [80000018:5:0:4:0] 2024-11-21T08:51:15.730428Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:15.730437Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] VDiskId# [80000018:4:0:5:0] -> [80000018:5:0:5:0] 2024-11-21T08:51:15.730447Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:15.730453Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] VDiskId# [80000018:4:0:7:0] -> [80000018:5:0:7:0] 2024-11-21T08:51:15.730464Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T08:51:15.730474Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] VDiskId# [80000018:4:0:0:0] -> [80000018:5:0:0:0] 2024-11-21T08:51:15.730487Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T08:51:15.730493Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000018:5:0:3:0] PDiskId# 1001 VSlotId# 1016 created 2024-11-21T08:51:15.730500Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000018:5:0:3:0] status changed to INIT_PENDING 2024-11-21T08:51:15.730511Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T08:51:15.730518Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] VDiskId# [80000018:4:0:6:0] -> [80000018:5:0:6:0] 2024-11-21T08:51:15.731727Z 13 05h15m02.418920s :BS_NODE DEBUG: [13] VDiskId# [80000010:6:0:3:0] status changed to REPLICATING 2024-11-21T08:51:15.731866Z 13 05h15m03.322920s :BS_NODE DEBUG: [13] VDiskId# [80000038:5:0:3:0] status changed to REPLICATING 2024-11-21T08:51:15.731961Z 13 05h15m03.349920s :BS_NODE DEBUG: [13] VDiskId# [80000000:4:0:3:0] status changed to REPLICATING 2024-11-21T08:51:15.732059Z 13 05h15m03.481920s :BS_NODE DEBUG: [13] VDiskId# [80000020:6:0:3:0] status changed to REPLICATING 2024-11-21T08:51:15.732171Z 1 05h15m03.884920s :BS_NODE DEBUG: [1] VDiskId# [80000021:3:0:6:0] status changed to REPLICATING 2024-11-21T08:51:15.732264Z 13 05h15m04.503920s :BS_NODE DEBUG: [13] VDiskId# [80000018:5:0:3:0] status changed to REPLICATING 2024-11-21T08:51:15.732371Z 13 05h15m04.831920s :BS_NODE DEBUG: [13] VDiskId# [80000030:4:0:3:0] status changed to REPLICATING 2024-11-21T08:51:15.732693Z 13 05h15m05.457920s :BS_NODE DEBUG: [13] VDiskId# [80000008:4:0:3:0] status changed to REPLICATING 2024-11-21T08:51:15.732847Z 13 05h15m05.616920s :BS_NODE DEBUG: [13] VDiskId# [80000028:5:0:3:0] status changed to REPLICATING 2024-11-21T08:51:15.733109Z 13 05h15m17.716920s :BS_NODE DEBUG: [13] VDiskId# [80000020:6:0:3:0] status changed to READY 2024-11-21T08:51:15.734476Z 4 05h15m17.717432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.734495Z 4 05h15m17.717432s :BS_NODE DEBUG: [4] VDiskId# [80000020:5:0:3:0] destroyed 2024-11-21T08:51:15.734592Z 13 05h15m23.078920s :BS_NODE DEBUG: [13] VDiskId# [80000010:6:0:3:0] status changed to READY 2024-11-21T08:51:15.735852Z 4 05h15m23.079432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.735869Z 4 05h15m23.079432s :BS_NODE DEBUG: [4] VDiskId# [80000010:5:0:3:0] destroyed 2024-11-21T08:51:15.735950Z 13 05h15m29.706920s :BS_NODE DEBUG: [13] VDiskId# [80000000:4:0:3:0] status changed to READY 2024-11-21T08:51:15.737206Z 4 05h15m29.707432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.737224Z 4 05h15m29.707432s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:0:3:0] destroyed 2024-11-21T08:51:15.737261Z 13 05h15m29.823920s :BS_NODE DEBUG: [13] VDiskId# [80000008:4:0:3:0] status changed to READY 2024-11-21T08:51:15.738494Z 4 05h15m29.824432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.738511Z 4 05h15m29.824432s :BS_NODE DEBUG: [4] VDiskId# [80000008:3:0:3:0] destroyed 2024-11-21T08:51:15.738658Z 13 05h15m30.065920s :BS_NODE DEBUG: [13] VDiskId# [80000038:5:0:3:0] status changed to READY 2024-11-21T08:51:15.739820Z 4 05h15m30.066432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.739836Z 4 05h15m30.066432s :BS_NODE DEBUG: [4] VDiskId# [80000038:4:0:3:0] destroyed 2024-11-21T08:51:15.739878Z 13 05h15m31.347920s :BS_NODE DEBUG: [13] VDiskId# [80000028:5:0:3:0] status changed to READY 2024-11-21T08:51:15.741033Z 4 05h15m31.348432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.741054Z 4 05h15m31.348432s :BS_NODE DEBUG: [4] VDiskId# [80000028:4:0:3:0] destroyed 2024-11-21T08:51:15.741197Z 1 05h15m35.198920s :BS_NODE DEBUG: [1] VDiskId# [80000021:3:0:6:0] status changed to READY 2024-11-21T08:51:15.744707Z 4 05h15m35.199432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.744727Z 4 05h15m35.199432s :BS_NODE DEBUG: [4] VDiskId# [80000021:2:0:6:0] destroyed 2024-11-21T08:51:15.744771Z 13 05h15m36.436920s :BS_NODE DEBUG: [13] VDiskId# [80000030:4:0:3:0] status changed to READY 2024-11-21T08:51:15.745997Z 4 05h15m36.437432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.746015Z 4 05h15m36.437432s :BS_NODE DEBUG: [4] VDiskId# [80000030:3:0:3:0] destroyed 2024-11-21T08:51:15.746053Z 13 05h15m36.924920s :BS_NODE DEBUG: [13] VDiskId# [80000018:5:0:3:0] status changed to READY 2024-11-21T08:51:15.747241Z 4 05h15m36.925432s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:15.747257Z 4 05h15m36.925432s :BS_NODE DEBUG: [4] VDiskId# [80000018:4:0:3:0] destroyed >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TExternalTableTest::ParallelCreateExternalTable >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::ReadOnlyMode >> TExternalTableTest::Decimal >> TExternalTableTest::ParallelCreateExternalTable [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TTxDataShardUploadRows::TestUploadRowsLocks-StreamLookup [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> TExternalTableTest::CreateExternalTable >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> TExternalTableTest::Decimal [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalTableTest::ReadOnlyMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:16.561703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:16.561728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:16.561734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:16.561740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:16.561746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:16.561751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:16.561762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:16.561861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:16.574132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:16.574155Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:16.587411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:16.588356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:16.588406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:16.591839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:16.593150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:16.593298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.593421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:16.597096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.597520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.597538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.597592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:16.597601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.597609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:16.597631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.599084Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:16.616963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:16.617041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.617103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:16.617150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:16.617158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.619155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.619220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:16.619293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.619309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:16.619315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:16.619321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:16.620386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.620407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:16.620414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:16.621315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.621331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.621338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.621346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.622050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:16.622512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:16.622579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:16.622783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.622812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.622819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.622882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:16.622889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.622925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:16.622940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:16.623336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.623344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.623412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.623419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:16.623524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.623532Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:16.623547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:16.623552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.623558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:16.623563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.623568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:16.623573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:16.623585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:16.623591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:16.623595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:16.623932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:16.623950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:16.623955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:16.623961Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:16.623966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:16.623980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xId 125: got EvNotifyTxCompletionResult 2024-11-21T08:51:16.642700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:367:2359] 2024-11-21T08:51:16.642713Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2024-11-21T08:51:16.642737Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2024-11-21T08:51:16.642743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2024-11-21T08:51:16.642747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:367:2359] 2024-11-21T08:51:16.642761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2024-11-21T08:51:16.642765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:367:2359] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2024-11-21T08:51:16.642835Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:16.642863Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 40us result status StatusSuccess 2024-11-21T08:51:16.642940Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.643031Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:16.643045Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 16us result status StatusSuccess 2024-11-21T08:51:16.643085Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.643161Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:16.643176Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 16us result status StatusSuccess 2024-11-21T08:51:16.643226Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.643283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:16.643296Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 15us result status StatusSuccess 2024-11-21T08:51:16.643330Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.643368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:16.643397Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 30us result status StatusSuccess 2024-11-21T08:51:16.643434Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> TExternalTableTest::ParallelCreateSameExternalTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:16.473073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:16.473103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:16.473108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:16.473114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:16.473122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:16.473126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:16.473136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:16.473236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:16.487518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:16.487550Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:16.496286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:16.497330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:16.497394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:16.499704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:16.499952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:16.500089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.500192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:16.501742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.502178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.502195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.502248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:16.502259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.502267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:16.502292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.504158Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:16.522050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:16.522142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.522215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:16.522257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:16.522263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.523326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.523362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:16.523422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.523434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:16.523439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:16.523444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:16.523965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.523976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:16.523981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:16.524533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.524545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.524551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.524557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.525098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:16.525651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:16.525714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:16.525938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.525965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.525975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.526030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:16.526036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.526067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:16.526080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:16.526531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.526541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.526593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.526608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:16.526704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.526711Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:16.526724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:16.526728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.526733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:16.526739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.526743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:16.526747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:16.526759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:16.526765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:16.526769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:16.527056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:16.527073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:16.527077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:16.527083Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:16.527087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:16.527104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... MESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2024-11-21T08:51:16.758406Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.758428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.758436Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2024-11-21T08:51:16.758458Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T08:51:16.758487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:16.758496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:16.758502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:51:16.758915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:51:16.758944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T08:51:16.759030Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.759035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.759056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:16.759069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:16.759077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:16.759095Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.759099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T08:51:16.759103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T08:51:16.759107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T08:51:16.759110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T08:51:16.759161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.759166Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T08:51:16.759179Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:51:16.759183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:51:16.759189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:51:16.759195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:51:16.759200Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:51:16.759203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:51:16.759213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:16.759217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:16.759222Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2024-11-21T08:51:16.759226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:51:16.759229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T08:51:16.759232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T08:51:16.759362Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:16.759371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:16.759376Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:16.759380Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:51:16.759384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:51:16.759491Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:16.759502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:16.759505Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:16.759508Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:51:16.759512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:16.759807Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:16.759821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:16.759824Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:16.759828Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:51:16.759832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:51:16.759843Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:51:16.760230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:51:16.760520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:51:16.760784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:51:16.760837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:51:16.760843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:51:16.760911Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:51:16.760927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:51:16.760931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:331:2323] TestWaitNotification: OK eventTxId 101 2024-11-21T08:51:16.760994Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:16.761020Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 34us result status StatusSuccess 2024-11-21T08:51:16.761109Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:16.832839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:16.832872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:16.832878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:16.832883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:16.832889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:16.832894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:16.832903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:16.833008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:16.845287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:16.845310Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:16.848629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:16.849505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:16.849545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:16.851164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:16.851379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:16.851504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.851603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:16.852809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.853133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.853148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.853189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:16.853199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.853206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:16.853225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.854235Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:16.872725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:16.872812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.872880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:16.872933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:16.872941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.873830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.873856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:16.873901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.873910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:16.873915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:16.873920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:16.874312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.874322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:16.874326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:16.874644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.874655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.874661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.874668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.875361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:16.875746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:16.875797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:16.875985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.876012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.876022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.876075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:16.876081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.876110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:16.876122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:16.876580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.876590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.876633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.876639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:16.876737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.876744Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:16.876757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:16.876762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.876768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:16.876773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.876778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:16.876782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:16.876795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:16.876801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:16.876805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:16.877131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:16.877147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:16.877152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:16.877157Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:16.877162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:16.877178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 08:51:16.887522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:16.887578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:51:16.887922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.887931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.887960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:16.887973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:16.887986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.887991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T08:51:16.887996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T08:51:16.887999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T08:51:16.888009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.888016Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:16.888028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:16.888032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:16.888040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T08:51:16.888045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:16.888050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:16.888055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:16.888066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:16.888072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T08:51:16.888075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2024-11-21T08:51:16.888079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T08:51:16.888300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:16.888316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:16.888320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:16.888325Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T08:51:16.888329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:51:16.888549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:16.888562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:16.888566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:16.888570Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:51:16.888574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:16.888587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T08:51:16.889102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:16.889438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:51:16.889495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:51:16.889502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:51:16.889571Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:51:16.889592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:16.889597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:325:2317] TestWaitNotification: OK eventTxId 102 2024-11-21T08:51:16.889668Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:16.889697Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 39us result status StatusSuccess 2024-11-21T08:51:16.889773Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T08:51:16.890528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:16.890584Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2024-11-21T08:51:16.890598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists:1 2024-11-21T08:51:16.890623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133, at schemeshard: 72057594046678944 2024-11-21T08:51:16.891081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T08:51:16.891114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:51:16.891166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:51:16.891173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:51:16.891231Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:51:16.891247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:51:16.891252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:333:2325] TestWaitNotification: OK eventTxId 103 >> test.py::test[window-win_func_aggr_with_qualified_all--Debug] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--ForceBlocks] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> test.py::test[insert-append_sorted-to_sorted_calc-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Plan] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:16.731132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:16.731158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:16.731161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:16.731165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:16.731170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:16.731172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:16.731179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:16.731256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:16.743164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:16.743185Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:16.754690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:16.755580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:16.755619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:16.757512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:16.757674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:16.757768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.757839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:16.758684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.758929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.758939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.758973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:16.758981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.758987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:16.758999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.760330Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:16.776417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:16.776501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.776576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:16.776624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:16.776631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.779332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.779363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:16.779431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.779441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:16.779446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:16.779452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:16.779870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.779878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:16.779882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:16.780174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.780180Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.780186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.780193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.780773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:16.781122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:16.781178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:16.781368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.781399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.781409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.781479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:16.781485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:16.781519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:16.781533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:16.782199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.782209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.782262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.782267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:16.782368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.782375Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:16.782387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:16.782392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.782397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:16.782402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:16.782407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:16.782411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:16.782422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:16.782428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:16.782432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:16.782733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:16.782744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:16.782748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:16.782754Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:16.782758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:16.782768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 4 2024-11-21T08:51:16.915767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 0 2024-11-21T08:51:16.915783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:16.915867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:51:16.915879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:51:16.916779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2024-11-21T08:51:16.916818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2024-11-21T08:51:16.916884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.916890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.916938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T08:51:16.916958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.916964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:479:2440], at schemeshard: 72057594046678944, txId: 129, path id: 1 2024-11-21T08:51:16.916969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:479:2440], at schemeshard: 72057594046678944, txId: 129, path id: 5 2024-11-21T08:51:16.916981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.916988Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#129:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:16.916998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2024-11-21T08:51:16.917029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:16.917248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T08:51:16.917261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T08:51:16.917268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2024-11-21T08:51:16.917274Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T08:51:16.917280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T08:51:16.917708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T08:51:16.917725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T08:51:16.917730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2024-11-21T08:51:16.917735Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:51:16.917739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:51:16.917753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2024-11-21T08:51:16.917963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2024-11-21T08:51:16.918005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2024-11-21T08:51:16.918461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T08:51:16.918684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:16.918709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:16.918722Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T08:51:16.918753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2024-11-21T08:51:16.918788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:51:16.918797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:51:16.918881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T08:51:16.919395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:16.919413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:16.919478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T08:51:16.919496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:16.919501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:479:2440], at schemeshard: 72057594046678944, txId: 129, path id: 1 2024-11-21T08:51:16.919506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:479:2440], at schemeshard: 72057594046678944, txId: 129, path id: 5 FAKE_COORDINATOR: Erasing txId 129 2024-11-21T08:51:16.919592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2024-11-21T08:51:16.919600Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2024-11-21T08:51:16.919615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2024-11-21T08:51:16.919621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T08:51:16.919627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2024-11-21T08:51:16.919633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T08:51:16.919638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2024-11-21T08:51:16.919642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2024-11-21T08:51:16.919658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:51:16.919665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2024-11-21T08:51:16.919670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T08:51:16.919673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2024-11-21T08:51:16.919804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T08:51:16.919816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T08:51:16.919821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2024-11-21T08:51:16.919826Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:51:16.919830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T08:51:16.919981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T08:51:16.919992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T08:51:16.919995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2024-11-21T08:51:16.919998Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2024-11-21T08:51:16.920005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:51:16.920015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2024-11-21T08:51:16.921059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T08:51:16.921164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TExternalTableTest::SchemeErrors >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:17.367898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:17.367927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:17.367932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:17.367938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:17.367944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:17.367949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:17.367958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:17.368046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:17.379929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:17.379952Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:17.383481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:17.384412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:17.384463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:17.390345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:17.390626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:17.390769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.390878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:17.392148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.392550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:17.392566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.392619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:17.392628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:17.392638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:17.392655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.395093Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:17.412847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.412936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.413017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:17.413072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:17.413080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.413993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.414023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:17.414076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.414087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:17.414092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:17.414097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:17.414577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.414592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:17.414596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:17.415045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.415059Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.415066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.415075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.415657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:17.416084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:17.416141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:17.416353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.416378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.416389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.416456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:17.416463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.416495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:17.416507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:17.416889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:17.416897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:17.416945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.416951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:17.417048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.417055Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:17.417067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:17.417071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.417077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:17.417082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.417087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:17.417091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:17.417101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:17.417108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:17.417111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:17.417394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:17.417409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:17.417414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:17.417419Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:17.417424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:17.417439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 678944, txId: 125, subscribers: 0 2024-11-21T08:51:17.430797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2024-11-21T08:51:17.431181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2024-11-21T08:51:17.431218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2024-11-21T08:51:17.431309Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:17.431339Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 35us result status StatusSuccess 2024-11-21T08:51:17.431406Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.431461Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:17.431473Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 13us result status StatusSuccess 2024-11-21T08:51:17.431520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2024-11-21T08:51:17.431551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2024-11-21T08:51:17.431557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2024-11-21T08:51:17.431572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2024-11-21T08:51:17.431575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2024-11-21T08:51:17.431583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2024-11-21T08:51:17.431586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2024-11-21T08:51:17.431663Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2024-11-21T08:51:17.431676Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2024-11-21T08:51:17.431684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2024-11-21T08:51:17.431688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:338:2330] 2024-11-21T08:51:17.431704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2024-11-21T08:51:17.431707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:338:2330] 2024-11-21T08:51:17.431722Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2024-11-21T08:51:17.431731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2024-11-21T08:51:17.431734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:338:2330] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2024-11-21T08:51:17.431784Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:17.431796Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 14us result status StatusSuccess 2024-11-21T08:51:17.431830Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2024-11-21T08:51:17.432391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.432448Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2024-11-21T08:51:17.432458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2024-11-21T08:51:17.432474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131, at schemeshard: 72057594046678944 2024-11-21T08:51:17.432940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2024-11-21T08:51:17.432965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131, operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:17.017482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:17.017512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:17.017516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:17.017523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:17.017530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:17.017534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:17.017544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:17.017645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:17.028739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:17.028769Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:17.032119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:17.032990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:17.033031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:17.034535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:17.034797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:17.034917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.035026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:17.038515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.038824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:17.038834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.038868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:17.038873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:17.038878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:17.038895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.040521Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:17.057315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.057423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.057504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:17.057571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:17.057581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.058630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.058665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:17.058721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.058733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:17.058739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:17.058745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:17.059206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.059217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:17.059222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:17.059572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.059581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.059587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.059598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.060228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:17.060616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:17.060676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:17.060880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.060904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.060914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.060972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:17.060980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.061029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:17.061044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:17.061610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:17.061623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:17.061677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.061684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:17.061790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.061799Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:17.061813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:17.061818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.061823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:17.061829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.061834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:17.061838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:17.061853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:17.061859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:17.061863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:17.062178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:17.062194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:17.062199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:17.062205Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:17.062209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:17.062225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 302892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T08:51:17.303021Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:17.303031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:17.303035Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:17.303040Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:51:17.303044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:51:17.303149Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:17.303157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:17.303161Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:17.303164Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:51:17.303168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:17.303397Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:17.303409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:17.303413Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:17.303417Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:51:17.303421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:51:17.303432Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T08:51:17.303779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:17.303968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:17.304138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:51:17.304184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:51:17.304192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:51:17.304288Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:51:17.304306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:17.304310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:331:2323] TestWaitNotification: OK eventTxId 102 2024-11-21T08:51:17.304380Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:17.304404Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 35us result status StatusSuccess 2024-11-21T08:51:17.304472Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T08:51:17.305129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.305177Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2024-11-21T08:51:17.305188Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2024-11-21T08:51:17.305209Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131, at schemeshard: 72057594046678944 2024-11-21T08:51:17.305744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2024-11-21T08:51:17.305797Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:51:17.305862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:51:17.305867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:51:17.305931Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:51:17.305948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:51:17.305952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:339:2331] TestWaitNotification: OK eventTxId 103 2024-11-21T08:51:17.306014Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:17.306037Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 28us result status StatusSuccess 2024-11-21T08:51:17.306093Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::SchemeErrors [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Debug] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:04.600678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:04.600703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:04.600708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:04.600713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:04.600728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:04.600732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:04.600741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:04.600821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:04.615241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:04.615266Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:04.624625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:04.624733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:04.624757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:04.627397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:04.627476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:04.627608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.627755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:04.628373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.628650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:04.628662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.628674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:04.628682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:04.628688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:04.628722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:04.630078Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:04.648042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:04.648108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.648160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:04.648474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:04.648490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.652630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.652667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:04.652710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.652723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:04.652728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:04.652737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:04.656444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.656470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:04.656478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:04.656920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.656931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.656937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.656945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.657655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:04.658098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:04.658148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:04.658342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.658367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:04.658374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.658446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:04.658456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.658490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:04.658503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:04.659027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:04.659039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:04.659073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.659077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:04.659157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.659164Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:04.659175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:04.659179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.659184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:04.659190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.659194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:04.659198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:04.659210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:04.659217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:04.659221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ssState at tablet: 72057594046678944 2024-11-21T08:51:17.536182Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.536195Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.536199Z node 34 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:17.536227Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:17.536234Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:17.536408Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 241 } } 2024-11-21T08:51:17.536417Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:17.536433Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 241 } } 2024-11-21T08:51:17.536446Z node 34 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 241 } } 2024-11-21T08:51:17.536551Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.536560Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.536563Z node 34 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:17.536567Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:17.536571Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:17.536649Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 146028890378 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:17.536657Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:17.536669Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 146028890378 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:17.536674Z node 34 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:17.536681Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 146028890378 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:17.536691Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.536694Z node 34 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.536698Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:17.536705Z node 34 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T08:51:17.536809Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.536819Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.536823Z node 34 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:17.536826Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:51:17.536830Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:17.536838Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:51:17.538095Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.538952Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.539007Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.539025Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.539133Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.539142Z node 34 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T08:51:17.539154Z node 34 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T08:51:17.539162Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:17.539167Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:51:17.539180Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [34:396:2370] message: TxId: 1003 2024-11-21T08:51:17.539185Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:17.539191Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:17.539196Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:17.539207Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:17.539211Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:17.539215Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:17.539229Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:17.539232Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:17.539236Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:17.539245Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:17.539342Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:17.542301Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:17.542319Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [34:599:2530] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:17.543196Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:17.543259Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 73us result status StatusSuccess 2024-11-21T08:51:17.543355Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:17.805373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:17.805423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:17.805429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:17.805435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:17.805441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:17.805445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:17.805455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:17.805558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:17.817334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:17.817359Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:17.825099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:17.826063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:17.826103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:17.836871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:17.837540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:17.837667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.837775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:17.840793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.841124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:17.841135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.841222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:17.841232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:17.841239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:17.841258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.842666Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:17.862287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.862367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.862428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:17.862480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:17.862488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.863340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.863367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:17.863412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.863422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:17.863427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:17.863432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:17.863879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.863892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:17.863897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:17.864286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.864297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.864302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.864309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.864955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:17.865402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:17.865456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:17.865642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:17.865669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.865676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.865731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:17.865738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:17.865772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:17.865784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:17.866161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:17.866169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:17.866213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:17.866218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:17.866314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:17.866321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:17.866333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:17.866337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.866343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:17.866348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:17.866353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:17.866357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:17.866367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:17.866373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:17.866377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:17.866695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:17.866712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:17.866717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:17.866722Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:17.866727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:17.866741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... _bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2024-11-21T08:51:17.879226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.879278Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2024-11-21T08:51:17.879289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2024-11-21T08:51:17.879354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2024-11-21T08:51:17.879754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.879780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2024-11-21T08:51:17.880372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.880410Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2024-11-21T08:51:17.880418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2024-11-21T08:51:17.880432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2024-11-21T08:51:17.882646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.882684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2024-11-21T08:51:17.883553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.883636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2024-11-21T08:51:17.883645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2024-11-21T08:51:17.883664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2024-11-21T08:51:17.884158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.884188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2024-11-21T08:51:17.884741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.884782Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2024-11-21T08:51:17.884790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2024-11-21T08:51:17.884806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2024-11-21T08:51:17.885302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.885325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2024-11-21T08:51:17.885843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.885884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2024-11-21T08:51:17.885893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2024-11-21T08:51:17.885934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2024-11-21T08:51:17.886368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.886389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2024-11-21T08:51:17.886865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:17.886902Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2024-11-21T08:51:17.886910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2024-11-21T08:51:17.886929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:165, at schemeshard: 72057594046678944 2024-11-21T08:51:17.887358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:165" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:17.887380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:165, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> test.py::test[blocks-minmax_tuple--Results] [GOOD] >> test.py::test[blocks-pg--Analyze] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> TExternalTableTest::ReplaceExternalTableIfNotExists >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> DataShardOutOfOrder::TestReadTableWriteConflict >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] [GOOD] >> test.py::test[insert-keepmeta-with_read_udf_fail-Analyze] >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> test.py::test[insert-keepmeta-with_read_udf_fail-Analyze] [SKIPPED] >> test.py::test[insert-keepmeta-with_read_udf_fail-Debug] [SKIPPED] >> test.py::test[insert-keepmeta-with_read_udf_fail-ForceBlocks] [SKIPPED] >> test.py::test[insert-keepmeta-with_read_udf_fail-Plan] [SKIPPED] >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2024-11-21T08:51:15.925709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:15.926079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:15.926094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042fc/r3tmp/tmpIgiSKS/pdisk_1.dat 2024-11-21T08:51:16.029658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:16.047782Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:16.090134Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:51:16.090399Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:51:16.090434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:16.090447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:16.101000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:16.209765Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:51:16.209787Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:51:16.209811Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:51:16.219161Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:51:16.219412Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:51:16.219425Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:51:16.219495Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:51:16.219530Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:51:16.219543Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:51:16.219608Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:51:16.220006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:16.220254Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:51:16.220265Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:51:16.234205Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:16.234424Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:16.234496Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2024-11-21T08:51:16.234544Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:16.235443Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:16.242925Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:16.243223Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:16.243260Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:16.243398Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:16.243413Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:16.243421Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:16.243460Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:16.247115Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:16.247168Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:16.247190Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:666:2557] 2024-11-21T08:51:16.247195Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:16.247199Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:16.247204Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:16.247230Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:16.247342Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:641:2542] 2024-11-21T08:51:16.247383Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:16.248252Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:16.248509Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:16.248515Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:16.248599Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:16.248616Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:16.248692Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:16.248699Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:16.248706Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:16.248711Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:16.248715Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:16.248720Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:16.248725Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:16.248751Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2548], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:16.248755Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:16.248761Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:630:2536], serverId# [1:653:2548], sessionId# [0:0:0] 2024-11-21T08:51:16.248843Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2548] 2024-11-21T08:51:16.248848Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:16.248866Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:16.248907Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:16.248916Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:16.248931Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:16.248938Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:16.248946Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:16.248951Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:16.248955Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:16.248996Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:16.249000Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:16.249003Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:16.249007Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:16.249016Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:16.249020Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:16.249023Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:16.249027Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:16.249031Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:16.249120Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:16.249144Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:16.249256Z node 1 :TX_DATASHARD DEBUG: LoadChangeRec ... 2024-11-21T08:51:18.488543Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:51:18.488552Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:51:18.488567Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715660] at 72075186224037888 2024-11-21T08:51:18.488571Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2024-11-21T08:51:18.488573Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:18.488575Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T08:51:18.488578Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T08:51:18.488584Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:51:18.488593Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715660] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2024-11-21T08:51:18.488637Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:51:18.488644Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:18.488646Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T08:51:18.488648Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:18.488651Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:18.488666Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:18.488669Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:18.488671Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:51:18.488673Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:51:18.488680Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2024-11-21T08:51:18.488682Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:51:18.488685Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715660] at 72075186224037888 has finished 2024-11-21T08:51:18.499021Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:18.499050Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715660] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:18.499061Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715660 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:51:18.499089Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:18.499659Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvProposeTransaction 2024-11-21T08:51:18.499674Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 281474976715661 ProcessProposeTransaction 2024-11-21T08:51:18.499686Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:819:2656] DataReq marker# P0 2024-11-21T08:51:18.499707Z node 2 :TX_PROXY DEBUG: Actor# [2:819:2656] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2024-11-21T08:51:18.499765Z node 2 :TX_PROXY DEBUG: Actor# [2:819:2656] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2024-11-21T08:51:18.499803Z node 2 :TX_PROXY DEBUG: Actor# [2:819:2656] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T08:51:18.499824Z node 2 :TX_PROXY DEBUG: Actor# [2:819:2656] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2024-11-21T08:51:18.499901Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:819:2656], Recipient [2:630:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 819 RawX2: 8589937248 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t3\003\000\000\000\000\000\000\021`\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2024-11-21T08:51:18.499911Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:18.499938Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:18.499976Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2024-11-21T08:51:18.499992Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:51:18.500003Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2024-11-21T08:51:18.500007Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:51:18.500011Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:18.500013Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:51:18.500020Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T08:51:18.500029Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2024-11-21T08:51:18.500032Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2024-11-21T08:51:18.500034Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:18.500037Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2024-11-21T08:51:18.500042Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2024-11-21T08:51:18.500046Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2024-11-21T08:51:18.500048Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2024-11-21T08:51:18.500050Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2024-11-21T08:51:18.500052Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T08:51:18.500059Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:819:2656] for [0:281474976715661] at 72075186224037888 2024-11-21T08:51:18.500061Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2024-11-21T08:51:18.500072Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:18.500084Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2024-11-21T08:51:18.500089Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2024-11-21T08:51:18.500093Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2024-11-21T08:51:18.500110Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:819:2656], Recipient [2:630:2536]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2024-11-21T08:51:18.500113Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2024-11-21T08:51:18.500124Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:819:2656], Recipient [2:630:2536]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2024-11-21T08:51:18.500127Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T08:51:18.500136Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:630:2536], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:18.500138Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:18.500143Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:18.500148Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:51:18.500151Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T08:51:18.500155Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T08:51:18.500160Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2024-11-21T08:51:18.500164Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2024-11-21T08:51:18.500168Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2024-11-21T08:51:18.500172Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2024-11-21T08:51:18.500175Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2024-11-21T08:51:18.500290Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2024-11-21T08:51:18.500296Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:51:18.500300Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:18.500304Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:18.500306Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:18.500312Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:18.500411Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:825:2661], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T08:51:18.500417Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit >> test.py::test[window-win_func_aggr_with_qualified_all--Plan] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier+StreamLookup Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:18.657757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:18.657786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:18.657791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:18.657797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:18.657802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:18.657807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:18.657816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:18.657900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:18.670625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:18.670649Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:18.674257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:18.675126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:18.675166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:18.676927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:18.677145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:18.677250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:18.677331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:18.678510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:18.678803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:18.678814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:18.678850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:18.678857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:18.678864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:18.678879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.680540Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:18.697775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:18.697844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.697919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:18.697962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:18.697969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.698735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:18.698756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:18.698796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.698805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:18.698809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:18.698814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:18.699187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.699196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:18.699200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:18.699549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.699557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.699563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:18.699569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:18.700133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:18.700537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:18.700585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:18.700748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:18.700770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:18.700780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:18.700826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:18.700832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:18.700857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:18.700869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:18.701246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:18.701253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:18.701287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:18.701292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:18.701363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.701369Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:18.701392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:18.701396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:18.701402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:18.701408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:18.701412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:18.701415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:18.701425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:18.701431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:18.701435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:18.701721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:18.701733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:18.701737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:18.701742Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:18.701747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:18.701760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:18.707131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:18.707135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T08:51:18.707140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T08:51:18.707144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T08:51:18.707192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.707198Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T08:51:18.707208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:51:18.707212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:51:18.707217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:51:18.707222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:51:18.707227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:51:18.707231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:51:18.707243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:18.707248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T08:51:18.707252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T08:51:18.707256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T08:51:18.707388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:18.707399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:18.707404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:18.707408Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:51:18.707412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:18.707656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:18.707665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:18.707669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:18.707673Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:51:18.707676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:51:18.707685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:51:18.708058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:51:18.708319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:51:18.708362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:51:18.708368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:51:18.708435Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:51:18.708452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:51:18.708457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:300:2292] TestWaitNotification: OK eventTxId 101 2024-11-21T08:51:18.708518Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:18.708542Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 34us result status StatusSuccess 2024-11-21T08:51:18.708618Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T08:51:18.709212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:18.709255Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2024-11-21T08:51:18.709264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2024-11-21T08:51:18.709270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2024-11-21T08:51:18.709704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:18.709726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:51:18.709768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:51:18.709772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:51:18.709818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:51:18.709832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:18.709836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:308:2300] TestWaitNotification: OK eventTxId 102 2024-11-21T08:51:18.709885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:18.709903Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 22us result status StatusPathDoesNotExist 2024-11-21T08:51:18.709933Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1392:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1395:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1396:2057] recipient: [4:1394:3416] Leader for TabletID 72057594037927937 is [4:1397:3417] sender: [4:1398:2057] recipient: [4:1394:3416] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:1397:3417] Leader for TabletID 72057594037927937 is [4:1397:3417] sender: [4:1467:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1397:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1399:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1401:2057] recipient: [5:1400:3421] Leader for TabletID 72057594037927937 is [5:1402:3422] sender: [5:1403:2057] recipient: [5:1400:3421] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:1402:3422] Leader for TabletID 72057594037927937 is [5:1402:3422] sender: [5:1472:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1397:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1399:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1401:2057] recipient: [6:1400:3421] Leader for TabletID 72057594037927937 is [6:1402:3422] sender: [6:1403:2057] recipient: [6:1400:3421] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:1402:3422] Leader for TabletID 72057594037927937 is [6:1402:3422] sender: [6:1472:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1400:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1403:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1404:2057] recipient: [7:1402:3423] Leader for TabletID 72057594037927937 is [7:1405:3424] sender: [7:1406:2057] recipient: [7:1402:3423] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:1405:3424] Leader for TabletID 72057594037927937 is [7:1405:3424] sender: [7:1475:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1402:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1404:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1406:2057] recipient: [8:1405:3425] Leader for TabletID 72057594037927937 is [8:1407:3426] sender: [8:1408:2057] recipient: [8:1405:3425] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:1407:3426] Leader for TabletID 72057594037927937 is [8:1407:3426] sender: [8:1477:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1402:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1405:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1406:2057] recipient: [9:1404:3425] Leader for TabletID 72057594037927937 is [9:1407:3426] sender: [9:1408:2057] recipient: [9:1404:3425] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:1407:3426] Leader for TabletID 72057594037927937 is [9:1407:3426] sender: [9:1477:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1405:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1408:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1409:2057] recipient: [10:1407:3427] Leader for TabletID 72057594037927937 is [10:1410:3428] sender: [10:1411:2057] recipient: [10:1407:3427] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:1410:3428] Leader for TabletID 72057594037927937 is [10:1410:3428] sender: [10:1480:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:141:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:144:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:145:2057] recipient: [13:143:2166] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:147:2057] recipient: [13:143:2166] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:146:2167] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:216:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:141:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:144:2057] recipient: [14:143:2166] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:145:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:147:2057] recipient: [14:143:2166] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:146:2167] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:216:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:268:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:271:2057] recipient: [15:270:2292] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:272:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:273:2293] sender: [15:274:2057] recipient: [15:270:2292] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:273:2293] Leader for TabletID 72057594037927937 is [15:273:2293] sender: [15:343:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:273:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:276:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:277:2057] recipient: [16:275:2297] Leader for TabletID 72057594037927937 is [16:278:2298] sender: [16:279:2057] recipient: [16:275:2297] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:278:2298] Leader for TabletID 72057594037927937 is [16:278:2298] sender: [16:348:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:273:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:275:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:277:2057] recipient: [17:276:2297] Leader for TabletID 72057594037927937 is [17:278:2298] sender: [17:279:2057] recipient: [17:276:2297] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:278:2298] Leader for TabletID 72057594037927937 is [17:278:2298] sender: [17:348:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:276:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:278:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:280:2057] recipient: [18:279:2299] Leader for TabletID 72057594037927937 is [18:281:2300] sender: [18:282:2057] recipient: [18:279:2299] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:281:2300] Leader for TabletID 72057594037927937 is [18:281:2300] sender: [18:329:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:278:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:280:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:282:2057] recipient: [19:281:2301] Leader for TabletID 72057594037927937 is [19:283:2302] sender: [19:284:2057] recipient: [19:281:2301] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:283:2302] Leader for TabletID 72057594037927937 is [19:283:2302] sender: [19:353:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:278:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:280:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:282:2057] recipient: [20:281:2301] Leader for TabletID 72057594037927937 is [20:283:2302] sender: [20:284:2057] recipient: [20:281:2301] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:283:2302] Leader for TabletID 72057594037927937 is [20:283:2302] sender: [20:353:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:281:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:284:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:285:2057] recipient: [21:283:2303] Leader for TabletID 72057594037927937 is [21:286:2304] sender: [21:287:2057] recipient: [21:283:2303] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:286:2304] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] >> TKeyValueTest::TestRenameToLongKey [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] |87.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[limit-empty_read_after_limit-default.txt-Analyze] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:18.899108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:18.899136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:18.899141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:18.899146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:18.899152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:18.899156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:18.899165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:18.899251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:18.910319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:18.910341Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:18.913450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:18.914267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:18.914306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:18.915852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:18.915990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:18.916086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:18.916156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:18.917090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:18.917398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:18.917411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:18.917457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:18.917467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:18.917474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:18.917489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.918667Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:18.935102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:18.935178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.935245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:18.935287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:18.935294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.936018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:18.936043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:18.936084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.936094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:18.936098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:18.936102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:18.936478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.936490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:18.936494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:18.936781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.936790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.936796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:18.936801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:18.937330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:18.937671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:18.937721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:18.937880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:18.937903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:18.937911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:18.937958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:18.937963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:18.937985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:18.937993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:18.938271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:18.938276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:18.938305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:18.938309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:18.938372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.938376Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:18.938384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:18.938386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:18.938389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:18.938393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:18.938395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:18.938397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:18.938404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:18.938408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:18.938411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:18.938587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:18.938598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:18.938601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:18.938606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:18.938610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:18.938623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... O: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:51:18.956816Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:51:18.956820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:51:18.958201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:51:18.958217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:51:18.958222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:51:18.958226Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:51:18.958231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:18.958245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T08:51:18.958681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T08:51:18.958711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2024-11-21T08:51:18.958810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:18.958832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:18.958839Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 103:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T08:51:18.958860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2024-11-21T08:51:18.958908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:18.958917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:18.959071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:51:18.959391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T08:51:18.961169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:18.961177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:18.961206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:18.961216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:18.961227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:18.961230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 2024-11-21T08:51:18.961233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T08:51:18.961236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T08:51:18.961286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:51:18.961292Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T08:51:18.961302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T08:51:18.961304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:51:18.961309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T08:51:18.961313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:51:18.961316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T08:51:18.961319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T08:51:18.961329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:18.961332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:51:18.961336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T08:51:18.961339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:51:18.961341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:51:18.961488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:51:18.961496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:51:18.961499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:51:18.961502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:51:18.961505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:51:18.961707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:51:18.961716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:51:18.961718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:51:18.961721Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:51:18.961724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:18.961732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T08:51:18.962336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:51:18.962384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:51:18.962420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:51:18.962425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:51:18.962480Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:51:18.962494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:51:18.962497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:361:2353] TestWaitNotification: OK eventTxId 103 2024-11-21T08:51:18.962548Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:18.962572Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 32us result status StatusSuccess 2024-11-21T08:51:18.962631Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> test.py::test[blocks-pg--Analyze] [GOOD] >> TRtmrTest::CreateWithoutTimeCastBuckets >> test.py::test[blocks-pg--Debug] >> test.py::test[aggregate-group_by_cube_expr_trio--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Plan] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:149:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:151:2173] Leader for TabletID 72057594037927937 is [7:154:2174] sender: [7:155:2057] recipient: [7:151:2173] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:154:2174] Leader for TabletID 72057594037927937 is [7:154:2174] sender: [7:224:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:149:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:152:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:151:2173] Leader for TabletID 72057594037927937 is [8:154:2174] sender: [8:155:2057] recipient: [8:151:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:154:2174] Leader for TabletID 72057594037927937 is [8:154:2174] sender: [8:224:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:151:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:153:2175] Leader for TabletID 72057594037927937 is [9:156:2176] sender: [9:157:2057] recipient: [9:153:2175] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:156:2176] Leader for TabletID 72057594037927937 is [9:156:2176] sender: [9:226:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2175] Leader for TabletID 72057594037927937 is [10:156:2176] sender: [10:157:2057] recipient: [10:153:2175] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2176] Leader for TabletID 72057594037927937 is [10:156:2176] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2177] Leader for TabletID 72057594037927937 is [11:158:2178] sender: [11:159:2057] recipient: [11:155:2177] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2178] Leader for TabletID 72057594037927937 is [11:158:2178] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipie ... ] sender: [18:169:2057] recipient: [18:165:2185] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:168:2186] Leader for TabletID 72057594037927937 is [18:168:2186] sender: [18:238:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:141:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:144:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:145:2057] recipient: [21:143:2166] Leader for TabletID 72057594037927937 is [21:146:2167] sender: [21:147:2057] recipient: [21:143:2166] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:146:2167] Leader for TabletID 72057594037927937 is [21:146:2167] sender: [21:216:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:141:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:144:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:145:2057] recipient: [22:143:2166] Leader for TabletID 72057594037927937 is [22:146:2167] sender: [22:147:2057] recipient: [22:143:2166] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:146:2167] Leader for TabletID 72057594037927937 is [22:146:2167] sender: [22:216:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:142:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:144:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:146:2057] recipient: [23:145:2166] Leader for TabletID 72057594037927937 is [23:147:2167] sender: [23:148:2057] recipient: [23:145:2166] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:147:2167] Leader for TabletID 72057594037927937 is [23:147:2167] sender: [23:217:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:147:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:149:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:151:2057] recipient: [24:150:2171] Leader for TabletID 72057594037927937 is [24:152:2172] sender: [24:153:2057] recipient: [24:150:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:152:2172] Leader for TabletID 72057594037927937 is [24:152:2172] sender: [24:222:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:147:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:151:2057] recipient: [25:149:2171] Leader for TabletID 72057594037927937 is [25:152:2172] sender: [25:153:2057] recipient: [25:149:2171] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:152:2172] Leader for TabletID 72057594037927937 is [25:152:2172] sender: [25:222:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:148:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:151:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:152:2057] recipient: [26:150:2171] Leader for TabletID 72057594037927937 is [26:153:2172] sender: [26:154:2057] recipient: [26:150:2171] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:153:2172] Leader for TabletID 72057594037927937 is [26:153:2172] sender: [26:223:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:153:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:156:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:157:2057] recipient: [27:155:2176] Leader for TabletID 72057594037927937 is [27:158:2177] sender: [27:159:2057] recipient: [27:155:2176] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:158:2177] Leader for TabletID 72057594037927937 is [27:158:2177] sender: [27:228:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:156:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:155:2176] Leader for TabletID 72057594037927937 is [28:158:2177] sender: [28:159:2057] recipient: [28:155:2176] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2177] Leader for TabletID 72057594037927937 is [28:158:2177] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:154:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:157:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:158:2057] recipient: [29:156:2176] Leader for TabletID 72057594037927937 is [29:159:2177] sender: [29:160:2057] recipient: [29:156:2176] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:159:2177] Leader for TabletID 72057594037927937 is [29:159:2177] sender: [29:229:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2024-11-21T08:51:16.830631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:16.831212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:16.831247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042fd/r3tmp/tmpK3iq95/pdisk_1.dat 2024-11-21T08:51:16.943965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:16.963392Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:17.006128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:17.006164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:17.016799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:17.120961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:17.136914Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:17.137153Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:17.137238Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:51:17.137280Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:17.144347Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:17.144521Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:17.144548Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:17.144668Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:17.144688Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:17.144695Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:17.144735Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:17.148280Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:17.148360Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:17.148386Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:51:17.148391Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:17.148395Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:17.148400Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:17.148537Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:17.148544Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:17.148691Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:17.148714Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:17.148728Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:17.148733Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:17.148739Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:51:17.148746Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:17.148751Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:17.148758Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:17.148763Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:17.148767Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:17.148771Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:17.148777Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:17.148795Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:51:17.148800Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:17.148827Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:17.148874Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:17.148883Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:17.148900Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:17.148909Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:17.148913Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:17.148918Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:17.148922Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:17.148969Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:17.148973Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:17.148976Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:17.148979Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:17.148990Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:17.148993Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:17.148996Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:17.149000Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:17.149004Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:17.149256Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:51:17.149265Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:17.159628Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:17.159657Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:17.159664Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:17.159677Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:51:17.159690Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:17.335302Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:17.335328Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:17.335337Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:51:17.335358Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:51:17.335364Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:51:17.335391Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:17.335401Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:51:17.335407Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:51:17.335413Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:51:17.336388Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:17.336416Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:17.336588Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:17.336596Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:17.336604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:17.336612Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:17.336618Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:17.336628Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... e execution plan for [3000:281474976715667] at 72075186224037890 executing on unit ReadTableScan 2024-11-21T08:51:18.391661Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompleteOperation 2024-11-21T08:51:18.391665Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2024-11-21T08:51:18.391715Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2024-11-21T08:51:18.391720Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2024-11-21T08:51:18.391724Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T08:51:18.391727Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2024-11-21T08:51:18.391733Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2024-11-21T08:51:18.391739Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T08:51:18.391744Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2024-11-21T08:51:18.391747Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:18.391751Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2024-11-21T08:51:18.391755Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T08:51:18.391758Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:51:18.402708Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:51:18.402744Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:51:18.402756Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2024-11-21T08:51:18.402781Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1076:2874], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:51:18.402794Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T08:51:18.967574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:18.967612Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:51:18.967633Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042fd/r3tmp/tmpaZpnoc/pdisk_1.dat 2024-11-21T08:51:19.050108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:19.063435Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:19.105462Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:19.105494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:19.116081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:19.223387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:19.235780Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:630:2536] 2024-11-21T08:51:19.235836Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:19.244961Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:19.245060Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:19.245222Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:19.245233Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:19.245240Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:19.245287Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:19.245300Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:19.245322Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:19.245344Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:647:2545] 2024-11-21T08:51:19.245350Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:19.245354Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:19.245358Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:19.245577Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:19.245593Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:19.245606Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T08:51:19.245616Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:19.245621Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:19.245629Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:19.245635Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:19.245669Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:19.245725Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:19.245741Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:19.246054Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:19.256353Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:19.256399Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:19.430785Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:665:2557], serverId# [2:667:2559], sessionId# [0:0:0] 2024-11-21T08:51:19.430956Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:19.430963Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:19.431153Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:19.431162Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:19.431171Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:51:19.431234Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:51:19.431266Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:51:19.431325Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:19.431338Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:51:19.431427Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:51:19.431505Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:19.431792Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:51:19.431801Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:19.431971Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:51:19.431979Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:51:19.431987Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:19.432220Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:19.432231Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:19.432237Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:51:19.432253Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:51:19.432263Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:51:19.432274Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:19.432396Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:19.432656Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:51:19.432666Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:51:19.432796Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:51:19.433678Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:701:2585], serverId# [2:702:2586], sessionId# [0:0:0] 2024-11-21T08:51:19.433703Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2024-11-21T08:51:15.693483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:15.694062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:15.694096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00432d/r3tmp/tmpVhMyFu/pdisk_1.dat 2024-11-21T08:51:15.806907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:15.825588Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:15.869710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:15.869750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:15.880483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:15.992791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:16.007427Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:51:16.007499Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:16.016250Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:16.016293Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:16.016462Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:16.016482Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:16.016489Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:16.016534Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:16.020307Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:16.020381Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:16.020410Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:51:16.020415Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:16.020420Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:16.020425Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:16.020707Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:16.020724Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:16.020737Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:51:16.020745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:16.020752Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:16.020760Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:16.020766Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:16.020801Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:16.020854Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:16.020870Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:16.021171Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:16.031525Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:16.031579Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:16.222115Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:51:16.222915Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:16.222935Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:16.223055Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:16.223064Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:16.223076Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:51:16.223146Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:51:16.223182Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:51:16.223319Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:16.223330Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:51:16.223665Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:51:16.223824Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:16.224197Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:51:16.224220Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:16.224357Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:51:16.224364Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:51:16.224373Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:16.224615Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:16.224629Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:16.224635Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:51:16.224652Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:51:16.224661Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:51:16.224672Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:16.225247Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:16.225583Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:51:16.225624Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:51:16.225632Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:51:16.227807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:16.227834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:16.227844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:16.228549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:16.229523Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:16.439551Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:16.439991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:16.520137Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yp943fccwwy4dnpgczxmr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNlZGI5NGUtMWQyNDY3ZWMtZmE2MzkyYTMtODhmMzBiMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:16.521282Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T08:51:16.521399Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:16.532195Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:16.532276Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:16.557672Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yp9dq4fxz52v8jmetfh2n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ3YmMwMWUtYzFhNzRmOWEtODE2ZTI1Yi1kOTczMjM3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:16.558221Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# ... ode 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T08:51:19.611512Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2024-11-21T08:51:19.611519Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:51:19.611841Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 904 RawX2: 12884904620 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\210\003\000\000\000\000\000\000\021\254\n\000\000\003\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2024-11-21T08:51:19.611853Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:19.611872Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:19.611917Z node 3 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2024-11-21T08:51:19.611931Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:51:19.611940Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:51:19.611944Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:51:19.611949Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:19.611953Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:51:19.611962Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T08:51:19.611972Z node 3 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T08:51:19.611977Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:51:19.611981Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:19.611985Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2024-11-21T08:51:19.611990Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2024-11-21T08:51:19.611995Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:51:19.611998Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2024-11-21T08:51:19.612002Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2024-11-21T08:51:19.612006Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T08:51:19.612014Z node 3 :TX_DATASHARD TRACE: Requested stream clearance from [3:904:2732] for [0:281474976715665] at 72075186224037888 2024-11-21T08:51:19.612019Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2024-11-21T08:51:19.612058Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2024-11-21T08:51:19.612063Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2024-11-21T08:51:19.612077Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2024-11-21T08:51:19.612082Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T08:51:19.612096Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:631:2536], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.612100Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.612106Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:19.612111Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:51:19.612116Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2024-11-21T08:51:19.612120Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T08:51:19.612125Z node 3 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2024-11-21T08:51:19.612129Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:51:19.612133Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2024-11-21T08:51:19.612137Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2024-11-21T08:51:19.612141Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2024-11-21T08:51:19.612184Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2024-11-21T08:51:19.612189Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:51:19.612193Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:19.612198Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:19.612201Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:19.612331Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [3:911:2737], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T08:51:19.612337Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T08:51:19.612373Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2024-11-21T08:51:19.612488Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T08:51:19.612508Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2024-11-21T08:51:19.612514Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2024-11-21T08:51:19.612601Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:19.612607Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:19.612642Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T08:51:19.612647Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2024-11-21T08:51:19.612679Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:897:2725], Recipient [3:631:2536]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:51:19.612684Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:51:19.612694Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:631:2536], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.612700Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.612706Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:19.612710Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:51:19.612716Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2024-11-21T08:51:19.612720Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2024-11-21T08:51:19.612726Z node 3 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2024-11-21T08:51:19.612731Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:51:19.612736Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2024-11-21T08:51:19.612740Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:19.612744Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:19.612753Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:51:19.612766Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2024-11-21T08:51:19.612770Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:19.612773Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:51:19.612777Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:51:19.612786Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:51:19.612790Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:51:19.612794Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T08:51:19.612798Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:19.612802Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:19.612806Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:19.612809Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:19.612816Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:19.612821Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:19.612827Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] Test command err: 2024-11-21T08:50:30.767463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:50:30.768032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:50:30.768057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00334d/r3tmp/tmp9JDOVI/pdisk_1.dat 2024-11-21T08:50:31.008157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:50:31.045984Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:31.100575Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:50:31.101029Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:50:31.101097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:31.101120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:31.116782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:31.367782Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:50:31.367811Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:50:31.367845Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:610:2519] 2024-11-21T08:50:31.375798Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:50:31.376045Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:50:31.376061Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:50:31.376117Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:50:31.376166Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:50:31.376184Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:50:31.376619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:50:31.376764Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:50:31.376867Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:50:31.376875Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:50:31.391426Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:50:31.391654Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:50:31.391741Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:635:2540] 2024-11-21T08:50:31.391791Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:31.392870Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:50:31.402704Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:31.402758Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:50:31.402948Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:50:31.402967Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:50:31.402974Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:50:31.403026Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:50:31.407757Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:50:31.407858Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:50:31.407903Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:651:2549] 2024-11-21T08:50:31.407909Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:50:31.407915Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:50:31.407921Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:50:31.408090Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:635:2540], Recipient [1:635:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:50:31.408097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:50:31.408162Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:50:31.408188Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:50:31.408320Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:635:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:31.408327Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:50:31.408335Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:632:2538], serverId# [1:642:2544], sessionId# [0:0:0] 2024-11-21T08:50:31.408343Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:50:31.408350Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:50:31.408358Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:50:31.408363Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:50:31.408367Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:50:31.408373Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:50:31.408379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:50:31.408391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:642:2544] 2024-11-21T08:50:31.408395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:50:31.408423Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:50:31.408489Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:50:31.408499Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:50:31.408519Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:50:31.408535Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:50:31.408540Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:50:31.408545Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:50:31.408549Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:50:31.408602Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:50:31.408607Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:50:31.408613Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:50:31.408616Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:50:31.408627Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:50:31.408630Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:50:31.408634Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:50:31.408637Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:50:31.408643Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:50:31.408913Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:652:2550], Recipient [1:635:2540]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:50:31.408922Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:50:31.419972Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:50:31.420009Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:50:31.420017Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:50:31.420030Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status ... ipient [16:990:2803]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:19.145295Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:19.145307Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:985:2800], serverId# [16:1009:2814], sessionId# [0:0:0] 2024-11-21T08:51:19.145341Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [16:740:2609], Recipient [16:990:2803]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T08:51:19.145347Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T08:51:19.145353Z node 16 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2024-11-21T08:51:19.145391Z node 16 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T08:51:19.145530Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:19.156325Z node 16 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T08:51:19.156388Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:990:2803], Recipient [16:740:2609]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T08:51:19.156397Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:51:19.156405Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 ... reading final result 2024-11-21T08:51:19.203178Z node 16 :TX_PROXY DEBUG: actor# [16:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T08:51:19.203210Z node 16 :TX_PROXY DEBUG: actor# [16:52:2099] TxId# 281474976715664 ProcessProposeKqpTransaction 2024-11-21T08:51:19.203517Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:1031:2833], Recipient [16:990:2803]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:19.203532Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:19.203541Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:1030:2832], serverId# [16:1031:2833], sessionId# [0:0:0] 2024-11-21T08:51:19.214015Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ypc01aa1fg2rfkh77x3en, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=N2JlMDU1MmQtZDQ2NzE5MmItNTRjY2UyM2QtZmMyOGIzMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:19.215201Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:1042:2836], Recipient [16:990:2803]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T08:51:19.215246Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:51:19.215266Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit CheckRead 2024-11-21T08:51:19.215290Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2024-11-21T08:51:19.215296Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:51:19.215303Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:19.215307Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:51:19.215323Z node 16 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037888 2024-11-21T08:51:19.215329Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2024-11-21T08:51:19.215333Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:19.215337Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:51:19.215341Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:51:19.215358Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T08:51:19.215419Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v6000/18446744073709551615 2024-11-21T08:51:19.215427Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[16:1042:2836], 0} after executionsCount# 1 2024-11-21T08:51:19.215436Z node 16 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[16:1042:2836], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:51:19.215452Z node 16 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[16:1042:2836], 0} finished in read 2024-11-21T08:51:19.215463Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2024-11-21T08:51:19.215467Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:51:19.215475Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:51:19.215478Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:51:19.215490Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2024-11-21T08:51:19.215493Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:51:19.215497Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037888 has finished 2024-11-21T08:51:19.215502Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:51:19.215522Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:51:19.215587Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:1044:2837], Recipient [16:740:2609]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T08:51:19.215597Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T08:51:19.215603Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2024-11-21T08:51:19.215610Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T08:51:19.215614Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2024-11-21T08:51:19.215617Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:19.215621Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T08:51:19.215627Z node 16 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2024-11-21T08:51:19.215631Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T08:51:19.215634Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:19.215637Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T08:51:19.215641Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2024-11-21T08:51:19.215650Z node 16 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T08:51:19.215669Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v6000/18446744073709551615 2024-11-21T08:51:19.215674Z node 16 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[16:1044:2837], 0} after executionsCount# 1 2024-11-21T08:51:19.215679Z node 16 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[16:1044:2837], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:51:19.215687Z node 16 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[16:1044:2837], 0} finished in read 2024-11-21T08:51:19.215692Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T08:51:19.215696Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T08:51:19.215702Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:51:19.215706Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:51:19.215711Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T08:51:19.215714Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:51:19.215717Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2024-11-21T08:51:19.215720Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T08:51:19.215730Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T08:51:19.215945Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [16:1042:2836], Recipient [16:990:2803]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:51:19.215955Z node 16 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T08:51:19.216102Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [16:1044:2837], Recipient [16:740:2609]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:51:19.216111Z node 16 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:19.942082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:19.942109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:19.942114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:19.942119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:19.942125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:19.942129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:19.942137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:19.942220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:19.953297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:19.953316Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:19.956350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:19.957185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:19.957239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:19.958900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:19.959120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:19.959238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:19.959340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:19.960699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:19.961000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:19.961012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:19.961051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:19.961057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:19.961064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:19.961079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.962638Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:19.978930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:19.979022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.979088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:19.979139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:19.979146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.980552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:19.980586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:19.980649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.980660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:19.980665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:19.980670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:19.981179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.981192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:19.981197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:19.981678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.981696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.981703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:19.981710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:19.982323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:19.982842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:19.982902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:19.983106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:19.983133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:19.983155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:19.983236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:19.983243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:19.983273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:19.983300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:19.983788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:19.983800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:19.983841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:19.983846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:19.983942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.983949Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:19.983961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:19.983965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:19.983970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:19.983988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:19.983992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:19.983996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:19.984008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:19.984015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:19.984019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:19.984323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:19.984342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:19.984346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:19.984351Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:19.984355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:19.984374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d: 1], version: 4 2024-11-21T08:51:19.988251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:19.988339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:51:19.988348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:51:19.988351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2024-11-21T08:51:19.988373Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2024-11-21T08:51:19.988376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:19.988385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2024-11-21T08:51:19.989255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.989269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId#100:0 at tablet72057594046678944 2024-11-21T08:51:19.989274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2024-11-21T08:51:19.989709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:51:19.989746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:51:19.990151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.990165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:19.990173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T08:51:19.990201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:19.990689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T08:51:19.990722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T08:51:19.990788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:19.990808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:19.990815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2024-11-21T08:51:19.990833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T08:51:19.990859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:19.990869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T08:51:19.991299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:19.991308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:19.991339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:19.991354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:19.991359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T08:51:19.991363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T08:51:19.991425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:51:19.991432Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T08:51:19.991442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T08:51:19.991445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T08:51:19.991451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T08:51:19.991456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T08:51:19.991460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T08:51:19.991463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T08:51:19.991474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:19.991479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T08:51:19.991483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T08:51:19.991489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T08:51:19.991601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:51:19.991619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:51:19.991624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T08:51:19.991628Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:51:19.991631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:19.991717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:51:19.991726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:51:19.991730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T08:51:19.991733Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:51:19.991736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:51:19.991744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T08:51:19.992432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:51:19.992483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T08:51:19.992545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T08:51:19.992552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T08:51:19.992625Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T08:51:19.992642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T08:51:19.992647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:308:2300] TestWaitNotification: OK eventTxId 100 2024-11-21T08:51:19.992709Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:19.992734Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 34us result status StatusSuccess 2024-11-21T08:51:19.992819Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2024-11-21T08:51:16.293398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:16.293993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:16.294025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004311/r3tmp/tmplClHUP/pdisk_1.dat 2024-11-21T08:51:16.429553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:16.464540Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:16.512843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:16.512882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:16.523528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:16.629118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:16.642650Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:16.642873Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:16.642946Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:51:16.642986Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:16.649437Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:16.649581Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:16.649604Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:16.649708Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:16.649723Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:16.649729Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:16.649758Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:16.652185Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:16.652357Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:16.652380Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:51:16.652384Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:16.652388Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:16.652393Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:16.652504Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:16.652510Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:16.652623Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:16.652635Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:16.652644Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:16.652648Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:16.652653Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:51:16.652658Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:16.652662Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:16.652668Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:16.652671Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:16.652677Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:16.652681Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:16.652684Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:16.652697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:51:16.652700Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:16.652720Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:16.652760Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:16.652767Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:16.652787Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:16.652797Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:16.652801Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:16.652806Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:16.652810Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:16.652846Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:16.652848Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:16.652850Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:16.652852Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:16.652860Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:16.652862Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:16.652864Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:16.652866Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:16.652869Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:16.653069Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:51:16.653075Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:16.664548Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:16.664584Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:16.664593Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:16.664606Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:51:16.664622Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:16.842754Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:16.842779Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:16.842789Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:51:16.842808Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:51:16.842814Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:51:16.842853Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:16.842865Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:51:16.842870Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:51:16.842875Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:51:16.843650Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:16.843665Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:16.843783Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:16.843790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:16.843796Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:16.843803Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:16.843808Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:16.843816Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 3Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2024-11-21T08:51:19.865047Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2024-11-21T08:51:19.865131Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:921:2745], Recipient [2:921:2745]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.865135Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.865140Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:51:19.865144Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:51:19.865149Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2024-11-21T08:51:19.865152Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2024-11-21T08:51:19.865157Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2024-11-21T08:51:19.865163Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2024-11-21T08:51:19.865167Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2024-11-21T08:51:19.865171Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T08:51:19.865174Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:51:19.865217Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2024-11-21T08:51:19.865221Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T08:51:19.865225Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:51:19.865229Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:51:19.865236Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2024-11-21T08:51:19.865240Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:51:19.865244Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2024-11-21T08:51:19.865247Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:19.865251Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:51:19.865254Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T08:51:19.865256Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T08:51:19.877360Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:51:19.877409Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:51:19.877420Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:51:19.877447Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1109:2906], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:51:19.877460Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:51:19.878733Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1109:2906], Recipient [2:926:2747]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2024-11-21T08:51:19.878750Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T08:51:19.878791Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 3500 txid# 281474976715668} 2024-11-21T08:51:19.878798Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2024-11-21T08:51:19.878806Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:51:19.878811Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:51:19.878895Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:926:2747], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.878901Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.878916Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T08:51:19.878924Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:51:19.878935Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2024-11-21T08:51:19.878941Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2024-11-21T08:51:19.878948Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2024-11-21T08:51:19.878955Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T08:51:19.878977Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2024-11-21T08:51:19.878984Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2024-11-21T08:51:19.878989Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2024-11-21T08:51:19.879120Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2024-11-21T08:51:19.879125Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:51:19.879131Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T08:51:19.879137Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2024-11-21T08:51:19.879141Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:51:19.879445Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1131:2925], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T08:51:19.879452Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T08:51:19.879526Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2024-11-21T08:51:19.879694Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T08:51:19.879778Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2024-11-21T08:51:19.879785Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2024-11-21T08:51:19.879812Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2024-11-21T08:51:19.879817Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2024-11-21T08:51:19.879850Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:926:2747], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.879854Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.879859Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T08:51:19.879864Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:51:19.879869Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2024-11-21T08:51:19.879873Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2024-11-21T08:51:19.879878Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2024-11-21T08:51:19.879884Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T08:51:19.879888Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2024-11-21T08:51:19.879892Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2024-11-21T08:51:19.879896Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2024-11-21T08:51:19.879950Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2024-11-21T08:51:19.879954Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2024-11-21T08:51:19.879958Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T08:51:19.879961Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2024-11-21T08:51:19.879967Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T08:51:19.879972Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T08:51:19.879977Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2024-11-21T08:51:19.879981Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:19.879984Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2024-11-21T08:51:19.879987Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T08:51:19.879991Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:51:19.890364Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:51:19.890395Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:51:19.890406Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2024-11-21T08:51:19.890432Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1109:2906], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:51:19.890446Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> test.py::test[window-win_func_aggr_with_qualified_all--Results] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Analyze] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> test.py::test[blocks-pg--Debug] [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] Test command err: 2024-11-21T08:51:07.117481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652446798009164:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:07.162811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fce/r3tmp/tmpmdCDS9/pdisk_1.dat 2024-11-21T08:51:07.243356Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32408, node 1 2024-11-21T08:51:07.272391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:07.272401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:07.272403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:07.272442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:07.272700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.272719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.282432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:07.346282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:07.349273Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:07.361256Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:07.596279Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:07.596879Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWFhMzJiNzgtNWI5MGFjZjYtMmJkMTYyOGQtYzE5MzRkNTE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWFhMzJiNzgtNWI5MGFjZjYtMmJkMTYyOGQtYzE5MzRkNTE= 2024-11-21T08:51:07.597083Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652446798009576:2296], Start check tables existence, number paths: 2 2024-11-21T08:51:07.599088Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWFhMzJiNzgtNWI5MGFjZjYtMmJkMTYyOGQtYzE5MzRkNTE=, ActorId: [1:7439652446798009577:2297], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:07.599169Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:07.599172Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:07.599178Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:07.599208Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652446798009576:2296], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:07.599215Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652446798009576:2296], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:07.599220Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652446798009576:2296], Successfully finished 2024-11-21T08:51:07.599259Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:07.601581Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652446798009604:2285], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:07.602423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:07.602969Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652446798009604:2285], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:07.603503Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652446798009604:2285], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:07.604666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652446798009604:2285], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:07.683772Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652446798009604:2285], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:07.684623Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652446798009604:2285], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:07.688283Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T08:51:07.688294Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:07.688314Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWFhMzJiNzgtNWI5MGFjZjYtMmJkMTYyOGQtYzE5MzRkNTE=, ActorId: [1:7439652446798009577:2297], ActorState: ReadyState, TraceId: 01jd6yp0s85nh9r2er368zs060, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T08:51:07.688315Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652446798009664:2301], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T08:51:07.688945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652446798009664:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:07.688967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:07.713566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:51:07.714894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:51:07.715544Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OWFhMzJiNzgtNWI5MGFjZjYtMmJkMTYyOGQtYzE5MzRkNTE=, ActorId: [1:7439652446798009577:2297], ActorState: ExecuteState, TraceId: 01jd6yp0s85nh9r2er368zs060, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7439652446798009673:2297] WorkloadServiceCleanup: 0 2024-11-21T08:51:07.715970Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWFhMzJiNzgtNWI5MGFjZjYtMmJkMTYyOGQtYzE5MzRkNTE=, ActorId: [1:7439652446798009577:2297], ActorState: CleanupState, TraceId: 01jd6yp0s85nh9r2er368zs060, EndCleanup, isFinal: 0 2024-11-21T08:51:07.715996Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWFhMzJiNzgtNWI5MGFjZjYtMmJkMTYyOGQtYzE5MzRkNTE=, ActorId: [1:7439652446798009577:2297], ActorState: CleanupState, TraceId: 01jd6yp0s85nh9r2er368zs060, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7439652446798009009:2060] 2024-11-21T08:51:07.717132Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTA3YmVlN2QtOTFjOWQ0MzktNzIzMzg5NDgtMmVjMDA2MWI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTA3YmVlN2QtOTFjOWQ0MzktNzIzMzg5NDgtMmVjMDA2MWI= 2024-11-21T08:51:07.717207Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T08:51:07.717219Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTA3YmVlN2QtOTFjOWQ0MzktNzIzMzg5NDgtMmVjMDA2MWI=, ActorId: [1:7439652446798009703:2304], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:07.717253Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTA3YmVlN2QtOTFjOWQ0MzktNzIzMzg5NDgtMmVjMDA2MWI=, ActorId: [1:7439652446798009703:2304], ActorState: ReadyState, TraceId: 01jd6yp0t5a1wc5j15dy816p9y, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7439652446798009702:2343] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T08:51:07.717261Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652446798009705:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:07.717279Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439652446798009703:2304], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTA3YmVlN2QtOTFjOWQ0MzktNzIzMzg5NDgtMmVjMDA2MWI= 2024-11-21T08:51:07.717287Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652446798009706:2306], Database: /Root, Start database fetching 2024-11-21T08:51:07.717439Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652446798009705:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:07.717449Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] A ... ctorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, ExecutePhyTx, tx: 0x0000000000000000 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 1 2024-11-21T08:51:20.406384Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, TExecPhysicalRequest, add DeferredEffect to Transaction, current Transactions.size(): 1 2024-11-21T08:51:20.406387Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, TExecPhysicalRequest, tx has commit locks 2024-11-21T08:51:20.406394Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, Sending to Executer TraceId: 0 8 2024-11-21T08:51:20.406404Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, Created new KQP executer: [7:7439652503287076227:2471] isRollback: 0 2024-11-21T08:51:20.415448Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T08:51:20.415502Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, txInfo Status: Committed Kind: ReadWrite TotalDuration: 11.311 ServerDuration: 11.251 QueriesCount: 2 2024-11-21T08:51:20.415529Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:20.415544Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:20.415550Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, EndCleanup, isFinal: 0 2024-11-21T08:51:20.415558Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6k9xj8r4sxtt8rxx6w, Sent query response back to proxy, proxyRequestId: 31, proxyId: [7:7439652494697140292:2060] 2024-11-21T08:51:20.415763Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, TxId: 2024-11-21T08:51:20.415783Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T08:51:20.415885Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ReadyState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, received request, proxyRequestId: 32 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [7:7439652503287076237:2479] database: /Root databaseId: /Root pool id: default 2024-11-21T08:51:20.415895Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ReadyState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, request placed into pool from cache: default 2024-11-21T08:51:20.415905Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ReadyState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, Sending CompileQuery request 2024-11-21T08:51:20.416053Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, ExecutePhyTx, tx: 0x0000176CFC39E018 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:20.416068Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, Sending to Executer TraceId: 0 8 2024-11-21T08:51:20.416080Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, Created new KQP executer: [7:7439652503287076240:2471] isRollback: 0 2024-11-21T08:51:20.417505Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T08:51:20.417528Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, ExecutePhyTx, tx: 0x0000176CFC3BD898 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:20.417656Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T08:51:20.417696Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, txInfo Status: Committed Kind: ReadOnly TotalDuration: 1.674 ServerDuration: 1.644 QueriesCount: 2 2024-11-21T08:51:20.417730Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:20.417744Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:20.417747Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, EndCleanup, isFinal: 0 2024-11-21T08:51:20.417760Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ExecuteState, TraceId: 01jd6ypd6zc7a3detbdsnwzsva, Sent query response back to proxy, proxyRequestId: 32, proxyId: [7:7439652494697140292:2060] 2024-11-21T08:51:20.417993Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, TxId: 2024-11-21T08:51:20.418017Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, TxId: 2024-11-21T08:51:20.418045Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7439652494697140962:2305], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2024-11-21T08:51:20.418064Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:20.418074Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:20.418076Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:20.418078Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:20.418097Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTM4ZDE5NDMtNzY0MjFmNTAtOWI0NjdmYmUtN2ZhNTg0Yw==, ActorId: [7:7439652503287076203:2471], ActorState: unknown state, Session actor destroyed >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> TTopicApiDescribes::GetPartitionDescribe >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> test.py::test[window-win_func_first_last_with_part--Analyze] [GOOD] >> TTopicApiDescribes::DescribeConsumer >> test.py::test[window-win_func_first_last_with_part--Debug] >> CompatibilityInfo::VDiskCompatible |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest >> CompatibilityInfo::VDiskCompatible [GOOD] >> CompatibilityInfo::VDiskIncompatible |87.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize1000 >> TTopicApiDescribes::GetLocalDescribe >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> TCdcStreamWithRebootsTests::DisableStream[TabletReboots] [GOOD] >> CompatibilityInfo::VDiskIncompatible [GOOD] >> CompatibilityInfo::VDiskIncompatibleWithDefault [GOOD] >> CompatibilityInfo::VDiskSuppressCompatibilityCheck [GOOD] >> CompatibilityInfo::VDiskMigration >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests2Inflight2BlobSize1000 >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier+StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup >> CompatibilityInfo::VDiskMigration [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests1Inflight1BlobSize1000 >> GroupReconfiguration::BsControllerDoesNotDisableGroup >> IncorrectQueries::VeryBigBlob >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests1Inflight1BlobSize1000 >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> BlobPatching::StressMirror3of4 >> IncorrectQueries::VeryBigBlob [GOOD] >> IncorrectQueries::WrongDataSize ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2024-11-21T08:51:16.780186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:16.780772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:16.780801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004300/r3tmp/tmp5ajcHh/pdisk_1.dat 2024-11-21T08:51:16.890866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:16.908734Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:16.952560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:16.952602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:16.963221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:17.077049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:17.092866Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T08:51:17.092946Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:17.100986Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:17.101054Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:17.101202Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:17.101221Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:17.101228Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:17.101271Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:17.104787Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:17.104864Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:17.104892Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T08:51:17.104897Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:17.104902Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:17.104907Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:17.105231Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T08:51:17.105258Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:17.106166Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:17.106190Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:17.106272Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T08:51:17.106307Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:17.106312Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:17.106318Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:17.106322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:17.106356Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:17.106398Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:17.106410Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:17.106577Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:17.106592Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:17.106662Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T08:51:17.106667Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T08:51:17.106671Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T08:51:17.106693Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:17.106698Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T08:51:17.106713Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:17.106723Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T08:51:17.106726Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:51:17.106729Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T08:51:17.106733Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:51:17.106833Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T08:51:17.106840Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T08:51:17.106853Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:51:17.106856Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:17.106863Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T08:51:17.106868Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:51:17.106918Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T08:51:17.106935Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:51:17.106961Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:51:17.106968Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T08:51:17.107052Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:17.107059Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:51:17.118284Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:17.118331Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:17.118494Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:51:17.118503Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:17.303971Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T08:51:17.304075Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T08:51:17.304768Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T08:51:17.304786Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:51:17.304839Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:51:17.304846Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:17.304854Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T08:51:17.304910Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:51:17.304935Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:51:17.305022Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:17.305025Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:17.305037Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:51:17.305047Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:51:17.305312Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:51:17.305410Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:17.305564Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:17.305570Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:17.305577Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:51:17.305608Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:51:17.305623Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:51:17.305669Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T08:51:17.305674Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:51:17.305705Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:17.305714Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... an for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:21.763161Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2024-11-21T08:51:21.763165Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2024-11-21T08:51:21.763172Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T08:51:21.763204Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:21.763208Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2024-11-21T08:51:21.763212Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:51:21.763216Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:51:21.763226Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:51:21.763229Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:51:21.763233Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2024-11-21T08:51:21.773616Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2024-11-21T08:51:21.773648Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2024-11-21T08:51:21.773659Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2024-11-21T08:51:21.789264Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T08:51:21.789287Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] TxId# 281474976715662 ProcessProposeKqpTransaction 2024-11-21T08:51:21.789482Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ypehe4gyp7zjrak3k395t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTJiNDQ4NjgtZDcyN2Q0NWYtZWE4ODkyY2EtODVjOGRhZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:21.790022Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1068:2869], Recipient [3:634:2539]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T08:51:21.790068Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:51:21.790081Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2024-11-21T08:51:21.790089Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2024-11-21T08:51:21.790097Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2024-11-21T08:51:21.790117Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T08:51:21.790121Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:51:21.790124Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:21.790127Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:51:21.790137Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2024-11-21T08:51:21.790141Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T08:51:21.790143Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:21.790145Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:51:21.790148Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:51:21.790159Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T08:51:21.790199Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1068:2869], 0} after executionsCount# 1 2024-11-21T08:51:21.790204Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1068:2869], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:51:21.790226Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1068:2869], 0} finished in read 2024-11-21T08:51:21.790233Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T08:51:21.790235Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:51:21.790238Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:51:21.790240Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:51:21.790247Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T08:51:21.790249Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:51:21.790251Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2024-11-21T08:51:21.790255Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:51:21.790270Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:51:21.790649Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1068:2869], Recipient [3:634:2539]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:51:21.790664Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2024-11-21T08:51:21.805076Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T08:51:21.805105Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] TxId# 281474976715663 ProcessProposeKqpTransaction 2024-11-21T08:51:21.805316Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ypehz1na6tqnaq29bwdcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OThjNzg5NTAtYjViMDc2ZjAtMzE3NDhmOTEtNGFmNDQ0MmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:21.806056Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1097:2892], Recipient [3:867:2702]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2024-11-21T08:51:21.806094Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T08:51:21.806105Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2024-11-21T08:51:21.806112Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2024-11-21T08:51:21.806122Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2024-11-21T08:51:21.806139Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T08:51:21.806144Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2024-11-21T08:51:21.806149Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:21.806152Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T08:51:21.806165Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2024-11-21T08:51:21.806170Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T08:51:21.806174Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:21.806178Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T08:51:21.806182Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2024-11-21T08:51:21.806195Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2024-11-21T08:51:21.806270Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1097:2892], 0} after executionsCount# 1 2024-11-21T08:51:21.806278Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1097:2892], 0} sends rowCount# 2, bytes# 48, quota rows left# 32765, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:51:21.806303Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1097:2892], 0} finished in read 2024-11-21T08:51:21.806312Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T08:51:21.806319Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T08:51:21.806323Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:51:21.806327Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:51:21.806336Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T08:51:21.806339Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:51:21.806343Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2024-11-21T08:51:21.806348Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T08:51:21.806366Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T08:51:21.806797Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1097:2892], Recipient [3:867:2702]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:51:21.806812Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } >> IncorrectQueries::WrongDataSize [GOOD] >> IncorrectQueries::WrongVDiskID >> Acceleration::TestAccelerationMirror3dcPutAsyncBlob1Slow >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> GroupReconfiguration::BsControllerDoesNotDisableGroup [GOOD] >> GroupReconfiguration::BsControllerDoesNotDisableGroupNoRequestsToNodesWVDisks >> IncorrectQueries::WrongVDiskID [GOOD] >> IncorrectQueries::ProtoQueryGet [GOOD] >> IncorrectQueries::WrongPartId ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DisableStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:03.729793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:03.729823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:03.729829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:03.729835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:03.729851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:03.729856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:03.729867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:03.729954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:03.748792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:03.748819Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:03.753034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:03.753199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:03.753242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:03.756400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:03.756495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:03.756635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:03.756830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:03.757670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:03.757979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:03.757991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:03.758004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:03.758012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:03.758019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:03.758061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:03.759635Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:03.778530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:03.778612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:03.778666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:03.778713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:03.778722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:03.779455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:03.779483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:03.779523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:03.779534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:03.779538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:03.779543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:03.779930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:03.779942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:03.779948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:03.780288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:03.780301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:03.780307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:03.780313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:03.781076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:03.781486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:03.781536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:03.781745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:03.781774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:03.781781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:03.781840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:03.781847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:03.781887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:03.781899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:03.782298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:03.782309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:03.782357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:03.782363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:03.782449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:03.782455Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:03.782465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:03.782470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:03.782475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:03.782480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:03.782484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:03.782488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:03.782498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:03.782503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:03.782508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... chemeBoard Send, to populator: [42:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T08:51:21.832919Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [42:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:51:21.833166Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:21.833178Z node 42 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:21.833197Z node 42 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/2 2024-11-21T08:51:21.833202Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/2 2024-11-21T08:51:21.833208Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/2, is published: false 2024-11-21T08:51:21.833356Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:21.833364Z node 42 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:1 ProgressState at tablet: 72057594046678944 2024-11-21T08:51:21.833512Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T08:51:21.833521Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:21.833541Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T08:51:21.833555Z node 42 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T08:51:21.833645Z node 42 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:21.833657Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:21.833661Z node 42 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:21.833667Z node 42 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:51:21.833674Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:21.833810Z node 42 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:21.833821Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:21.833825Z node 42 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:21.833829Z node 42 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:21.833833Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:21.833842Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/2, is published: true 2024-11-21T08:51:21.833919Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 180388628747 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:21.833925Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:21.833939Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 180388628747 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:21.833945Z node 42 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:21.833952Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 180388628747 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:21.833962Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:21.833966Z node 42 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:21.833971Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:21.833977Z node 42 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:1 129 -> 240 2024-11-21T08:51:21.842273Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:21.842815Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:21.842844Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:21.842860Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:21.842986Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:21.842999Z node 42 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:1 ProgressState 2024-11-21T08:51:21.843020Z node 42 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:1 progress is 2/2 2024-11-21T08:51:21.843025Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2024-11-21T08:51:21.843033Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/2, is published: true 2024-11-21T08:51:21.843039Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2024-11-21T08:51:21.843045Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:21.843051Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:21.843068Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:21.843073Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:21.843076Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:21.843090Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1004 2024-11-21T08:51:21.843696Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:21.843706Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:21.843785Z node 42 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:21.843807Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:21.843812Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [42:711:2616] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:21.843893Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:21.843968Z node 42 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 95us result status StatusSuccess 2024-11-21T08:51:21.844085Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateDisabled SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Acceleration::TestThresholdPutMirror3dc1Slow >> IncorrectQueries::WrongPartId [GOOD] >> IncorrectQueries::ProtobufBlob >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize1000 >> IncorrectQueries::ProtobufBlob [GOOD] >> IncorrectQueries::SameBlob ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2024-11-21T08:51:19.174836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:19.175387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:19.175417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042e7/r3tmp/tmpeol060/pdisk_1.dat 2024-11-21T08:51:19.289529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:19.308586Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:19.354609Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:51:19.354871Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:51:19.354908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:19.354922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:19.365548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:19.471936Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:51:19.471962Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:51:19.471991Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:51:19.482732Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:51:19.482997Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:51:19.483014Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:51:19.483075Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:51:19.483108Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:51:19.483123Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:51:19.483203Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:51:19.483630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:19.483944Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:51:19.483960Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:51:19.499733Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:19.500010Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:19.500100Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2024-11-21T08:51:19.500151Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:19.501165Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:19.508322Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:19.508682Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:19.508728Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:19.508874Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:19.508892Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:19.508899Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:19.508940Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:19.512565Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:19.512630Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:19.512654Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:666:2557] 2024-11-21T08:51:19.512660Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:19.512664Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:19.512670Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:19.512699Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:19.512854Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:641:2542] 2024-11-21T08:51:19.512898Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:19.513759Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:19.514035Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.514043Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.514143Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:19.514162Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:19.514251Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:19.514260Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:19.514267Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:19.514272Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:19.514276Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:19.514281Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:19.514287Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:19.514315Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2548], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:19.514320Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:19.514326Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:630:2536], serverId# [1:653:2548], sessionId# [0:0:0] 2024-11-21T08:51:19.514421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2548] 2024-11-21T08:51:19.514427Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:19.514447Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:19.514491Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:19.514501Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:19.514515Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:19.514522Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:19.514529Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:19.514534Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:19.514538Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:19.514579Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:19.514583Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:19.514587Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:19.514590Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:19.514600Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:19.514603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:19.514607Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:19.514610Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:19.514615Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:19.514721Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:19.514748Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:19.514884Z node 1 :TX_DATASHARD DEBUG: LoadChangeRec ... 2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T08:51:22.449664Z node 2 :KQP_EXECUTER INFO: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T08:51:22.449669Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T08:51:22.449675Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T08:51:22.449682Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T08:51:22.449687Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T08:51:22.449773Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:931:2745], Recipient [2:888:2712]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:22.449780Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:22.449789Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:930:2744], serverId# [2:931:2745], sessionId# [0:0:0] 2024-11-21T08:51:22.449828Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:927:2728], Recipient [2:888:2712]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 927 RawX2: 8589937320 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\237\003\000\000\000\000\000\000\021\250\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000 2024-11-21T08:51:22.449837Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:22.449872Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:888:2712], Recipient [2:888:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:51:22.449877Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:51:22.449895Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:22.449960Z node 2 :TX_DATASHARD TRACE: TxId: 281474976715662, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2024-11-21T08:51:22.449969Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2024-11-21T08:51:22.449976Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2024-11-21T08:51:22.450030Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:51:22.450043Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T08:51:22.450048Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:51:22.450052Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:22.450057Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:51:22.450065Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T08:51:22.450078Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715662] at 72075186224037888 2024-11-21T08:51:22.450083Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T08:51:22.450086Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:22.450090Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T08:51:22.450093Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T08:51:22.450103Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T08:51:22.450114Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715662] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2024-11-21T08:51:22.450170Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:51:22.450181Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:22.450185Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T08:51:22.450188Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:22.450192Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:22.450211Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:22.450214Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:22.450218Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:51:22.450222Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:51:22.450231Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T08:51:22.450234Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:51:22.450238Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2024-11-21T08:51:22.460821Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:22.460852Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:22.460864Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:51:22.460884Z node 2 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2024-11-21T08:51:22.460903Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:22.461196Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [2:24:2071], Recipient [2:888:2712]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 2001} 2024-11-21T08:51:22.461204Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2024-11-21T08:51:22.461209Z node 2 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2024-11-21T08:51:22.461215Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:22.461234Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T08:51:22.461275Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:51:22.461282Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T08:51:22.461290Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd6ypf688w81sqg4k1a2zxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYyNDE4NDEtNzZlMjU0MC0zYzYxMjFlOS1jMjZmMDZmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> GroupReconfiguration::BsControllerDoesNotDisableGroupNoRequestsToNodesWVDisks [GOOD] >> GroupReconfiguration::ReassignsDoNotCauseErrorMessagesMirror3dc >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight1BlobSize1000 >> TSequenceReboots::CreateMultipleSequencesHaveInitialSequenceShard [GOOD] >> Deadlines::TestPutMirror3of4 >> IncorrectQueries::SameBlob [GOOD] >> IncorrectQueries::WrongCrc >> test.py::test[aggregate-group_by_cube_expr_trio--Results] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Analyze] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> Deadlines::TestPutMirror3of4 [GOOD] >> Decommit3dc::Test >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests2Inflight2BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests1Inflight1BlobSize1000 >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests2Inflight2BlobSize1000 >> BlobPatching::StressMirror3of4 [GOOD] >> BlobPatching::StressMirror3dc >> test.py::test[window-win_func_first_last_with_part--Debug] [GOOD] >> test.py::test[window-win_func_first_last_with_part--ForceBlocks] >> Acceleration::TestAccelerationMirror3dcPutAsyncBlob1Slow [GOOD] >> Acceleration::TestAcceleration4Plus2BlockPutAsyncBlob1Slow |87.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |87.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |87.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> SelfHealActorTest::SingleErrorDisk >> SelfHealActorTest::SingleErrorDisk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 [GOOD] Test command err: iteration# 0 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 6 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 12 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 18 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 24 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 30 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 36 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 42 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 48 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 54 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 60 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 66 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 72 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 78 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 84 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 90 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 96 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 102 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 108 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 114 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 120 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 126 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 132 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 138 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 144 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 150 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 156 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 162 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 168 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 174 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 180 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 186 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 192 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 198 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 204 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 210 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 216 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 222 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 228 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 234 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 240 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 246 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 252 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 258 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 264 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 270 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 276 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 282 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 288 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 294 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 300 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 306 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 312 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 318 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 324 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 330 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 336 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 342 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 348 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 354 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 360 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 366 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 372 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 378 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 384 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 390 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 396 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 402 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 408 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 414 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 420 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 426 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 432 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 438 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 444 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 450 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 456 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 462 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 468 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 474 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 480 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 486 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 492 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 498 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 504 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 510 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 516 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 522 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 528 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 534 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 540 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 546 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 552 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 558 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 564 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 570 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 576 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 582 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 588 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 594 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 600 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 606 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 612 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 618 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 624 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 630 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 636 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 642 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 648 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 654 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 660 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 666 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 672 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 678 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 684 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1368 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1374 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1380 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1386 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1392 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1398 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1404 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1410 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1416 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1422 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1428 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1434 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1440 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1446 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1452 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1458 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1464 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1470 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1476 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1482 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1488 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1494 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1500 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1506 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1512 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1518 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1524 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1530 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1536 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1542 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1548 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1554 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1560 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1566 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1572 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1578 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1584 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1590 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1596 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1602 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1608 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1614 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1620 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1626 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1632 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1638 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1644 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1650 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1656 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1662 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1668 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1674 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1680 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1686 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1692 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1698 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1704 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1710 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1716 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1722 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1728 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1734 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1740 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1746 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1752 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1758 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1764 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1770 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1776 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1782 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1788 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1794 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1800 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1806 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1812 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1818 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1824 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1830 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1836 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1842 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1848 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1854 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1860 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1866 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1872 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1878 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1884 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1890 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1896 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1902 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1908 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1914 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1920 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1926 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1932 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1938 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1944 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1950 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1956 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1962 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1968 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1974 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1980 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1986 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1992 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1998 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2004 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2010 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2016 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2022 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2028 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2034 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2040 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2024-11-21T08:51:19.254616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:19.255062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:19.255082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042f2/r3tmp/tmpgkUYWo/pdisk_1.dat 2024-11-21T08:51:19.359446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:19.377241Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:19.419795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:19.419830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:19.430369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:19.534135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:19.751257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:20.010747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:20.010777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:20.010786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:20.011588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:51:20.188933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:51:20.254492Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ypcta07y5cy1rg9h39bdd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQzYjhlOGItOGFkZGEwOTQtY2FhODY5ZWUtYzExNzI2YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:20.274341Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ypd2bbgah2ha2pbbfm1m5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNkMWY4MzYtNWE0MzMwNzItZjBkZTNkYmUtZDA5N2U0ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2024-11-21T08:51:20.596478Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ypdb2c9bqm178d5nc5ch5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM0Njg2MGUtZjJiMzFkZmYtZmZmNzkzMzAtYTI1OWIwZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T08:51:20.644972Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ypdcr6c6d749btjez2wad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNjYTkzMzEtMTdhMzk3NS1lOTQ5Njc3Yi03NGNhOTExZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2024-11-21T08:51:20.662342Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ypde83hveys0mr0yvmby2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNjYTkzMzEtMTdhMzk3NS1lOTQ5Njc3Yi03NGNhOTExZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2024-11-21T08:51:21.057551Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ypdtr66xqtzsr46t78qr4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA4MzQ5MTItYmVkZjQ0MDUtZjRiNWI0ZDctOGE2NTEzYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2024-11-21T08:51:21.078642Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6ypdvc17783jjmkwpgj3wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM0Njg2MGUtZjJiMzFkZmYtZmZmNzkzMzAtYTI1OWIwZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2024-11-21T08:51:21.090544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6ypdvrevyd63bmevv1fa7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM0Njg2MGUtZjJiMzFkZmYtZmZmNzkzMzAtYTI1OWIwZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2024-11-21T08:51:21.099438Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWM0Njg2MGUtZjJiMzFkZmYtZmZmNzkzMzAtYTI1OWIwZGE=, ActorId: [1:930:2747], ActorState: ExecuteState, TraceId: 01jd6ypdw372vb5ma2e15wt6e7, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T08:51:21.110059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6ypdw372vb5ma2e15wt6e7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM0Njg2MGUtZjJiMzFkZmYtZmZmNzkzMzAtYTI1OWIwZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:21.537975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:21.538015Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:51:21.538039Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042f2/r3tmp/tmpcUVVFv/pdisk_1.dat 2024-11-21T08:51:21.629075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:21.657767Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:21.703752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:21.703786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:21.714796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:21.818717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:21.830681Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:21.830864Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:21.830937Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:630:2536] 2024-11-21T08:51:21.830977Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:21.838458Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:21.838629Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:21.838657Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:21.838810Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:21.838821Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:21.838829Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:21.838874Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:21.838887Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:21.838906Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:21.838922Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:647:2545] 2024-11-21T08:51:21.838926Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:21.838931Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:21.838935Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:21.839016Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:630:2536], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:21.839023Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:21.839134Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:21.839146Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:21.839158Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:638:2540], Recipient [2:630:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:21.839163Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:21.839168Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T08:51:21.839177Z node 2 :TX_DAT ... :984:2745], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:51:22.875330Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:22.875560Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:630:2536]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T08:51:22.875574Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:718:2598]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T08:51:22.886053Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T08:51:22.886111Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:630:2536], Recipient [2:718:2598]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T08:51:22.886120Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:51:22.886127Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2024-11-21T08:51:22.886154Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T08:51:22.886177Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:718:2598], Recipient [2:630:2536]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T08:51:22.886181Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:51:22.886185Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2024-11-21T08:51:23.307013Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ypfzk79amvk1x065c1ep3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmRkNjcxNWYtYjBlZDFhNWQtYTk0ODhmZTItZDRjMzE5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.307788Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1051:2841], Recipient [2:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 KeysSize: 1 2024-11-21T08:51:23.307823Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:51:23.307839Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2024-11-21T08:51:23.307854Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:51:23.307859Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:51:23.307864Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:23.307869Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:51:23.307881Z node 2 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2024-11-21T08:51:23.307887Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:51:23.307890Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:23.307895Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:51:23.307898Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:51:23.307914Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T08:51:23.307962Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:51:23.307968Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2024-11-21T08:51:23.307974Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1051:2841], 0} after executionsCount# 1 2024-11-21T08:51:23.307981Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1051:2841], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:51:23.307995Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1051:2841], 0} finished in read 2024-11-21T08:51:23.308006Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:51:23.308010Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:51:23.308013Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:51:23.308017Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:51:23.308027Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:51:23.308031Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:51:23.308039Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2024-11-21T08:51:23.308043Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:51:23.308061Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:51:23.308126Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:630:2536]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2024-11-21T08:51:23.308145Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1053:2842], Recipient [2:718:2598]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 KeysSize: 1 2024-11-21T08:51:23.308156Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T08:51:23.308164Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2024-11-21T08:51:23.308170Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T08:51:23.308173Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2024-11-21T08:51:23.308177Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:23.308181Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T08:51:23.308188Z node 2 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2024-11-21T08:51:23.308194Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T08:51:23.308197Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:23.308201Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T08:51:23.308233Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2024-11-21T08:51:23.308248Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T08:51:23.308270Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2024-11-21T08:51:23.308274Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2024-11-21T08:51:23.308279Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1053:2842], 0} after executionsCount# 1 2024-11-21T08:51:23.308284Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1053:2842], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:51:23.308291Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1053:2842], 0} finished in read 2024-11-21T08:51:23.308299Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T08:51:23.308302Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T08:51:23.308306Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:51:23.308312Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:51:23.308317Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T08:51:23.308320Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:51:23.308324Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2024-11-21T08:51:23.308327Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T08:51:23.308337Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T08:51:23.308361Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:718:2598]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2024-11-21T08:51:23.308532Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1051:2841], Recipient [2:630:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:51:23.308541Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T08:51:23.308871Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1053:2842], Recipient [2:718:2598]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:51:23.308881Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> Acceleration::TestThresholdPutMirror3dc1Slow [GOOD] >> Acceleration::TestThresholdPut4Plus2Block1Slow >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1BlobSize1000 |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1BlobSize1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateMultipleSequencesHaveInitialSequenceShard [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:50:52.200747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:52.200771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:52.200776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:52.200781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:52.200796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:52.200800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:52.200809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:52.200896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:52.228331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:52.228351Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:50:52.230956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:52.231068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:52.231104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:52.233777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:52.233852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:52.233981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:52.234202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:52.234854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:52.235162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:52.235177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:52.235192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:52.235200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:52.235206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:52.235252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:50:52.236740Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:50:52.255773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:52.255863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:52.255926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:52.255973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:52.255983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:52.256718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:52.256746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:52.256791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:52.256810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:52.256814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:52.256820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:52.257157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:52.257167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:52.257172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:52.257470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:52.257479Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:52.257485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:52.257493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:52.258104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:52.258436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:52.258492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:52.258716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:52.258741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:52.258749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:52.258804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:52.258811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:52.258845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:52.258857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:52.259212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:52.259221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:52.259269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:52.259274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:52.259369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:52.259376Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:52.259388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:52.259393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:52.259399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:52.259405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:52.259411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:52.259415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:52.259426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:52.259432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:52.259437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T08:51:23.112351Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:23.112360Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:23.112364Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:23.112368Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:51:23.112372Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2024-11-21T08:51:23.112385Z node 83 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:51:23.112515Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [83:203:2206], Recipient [83:127:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 6] Version: 2 } 2024-11-21T08:51:23.112521Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T08:51:23.112530Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:23.112537Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:23.112541Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:23.112545Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 2 2024-11-21T08:51:23.112549Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:51:23.112560Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:51:23.112565Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [83:352:2333] 2024-11-21T08:51:23.112570Z node 83 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:51:23.113004Z node 83 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:23.113112Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:23.113117Z node 83 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:23.113334Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:23.113340Z node 83 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:23.113352Z node 83 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [83:352:2333] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2024-11-21T08:51:23.113366Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:23.113370Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [83:426:2406] 2024-11-21T08:51:23.113406Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [83:430:2410], Recipient [83:127:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:23.113410Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:23.113414Z node 83 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:23.113505Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [83:479:2459], Recipient [83:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T08:51:23.113510Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:51:23.113525Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:23.113556Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/seq1" took 28us result status StatusSuccess 2024-11-21T08:51:23.113638Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq1" PathDescription { Self { Name: "seq1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1005 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:23.113724Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [83:480:2460], Recipient [83:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T08:51:23.113728Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:51:23.113735Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:23.113748Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/seq2" took 13us result status StatusSuccess 2024-11-21T08:51:23.113781Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq2" PathDescription { Self { Name: "seq2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:23.113843Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [83:481:2461], Recipient [83:127:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T08:51:23.113846Z node 83 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:51:23.113853Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:23.113865Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/seq3" took 12us result status StatusSuccess 2024-11-21T08:51:23.113896Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq3" PathDescription { Self { Name: "seq3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1003 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq3" PathId { OwnerId: 72057594046678944 LocalId: 6 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[aggregate-group_by_full_path-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Debug] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BlobPatching::StressMirror3dc [GOOD] >> BlobPatching::StressMirror3 >> TSequenceReboots::CreateMultipleSequencesNoInitialSequenceShard [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10Inflight1BlobSize1000 >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests2Inflight2BlobSize1000 [GOOD] >> CountingEvents::Put_Mirror3of4 >> CountingEvents::Put_Mirror3of4 [GOOD] >> CountingEvents::Put_Mirror3dc >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup [GOOD] >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> CountingEvents::Put_Mirror3dc [GOOD] >> CountingEvents::Put_Block42 >> BsControllerTest::DecommitRejected >> CountingEvents::Put_Block42 [GOOD] >> CountingEvents::Put_None >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[PipeResets] >> BlobPatching::StressMirror3 [GOOD] >> BlobPatching::StressNone >> IncorrectQueries::WrongCrc [GOOD] >> IncorrectQueries::ProtoHasVDiskAndExtQueue [GOOD] >> IndexRestoreGet::BlobRecovery >> CountingEvents::Put_None [GOOD] >> CountingEvents::Get_Mirror3of4 [GOOD] >> CountingEvents::Get_Mirror3dc >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[TabletReboots] >> BsControllerTest::DecommitRejected [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup [GOOD] Test command err: 2024-11-21T08:51:19.632564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:19.633173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:19.633211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042f7/r3tmp/tmp3O9UEW/pdisk_1.dat 2024-11-21T08:51:19.756880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:19.774027Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:19.822410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:19.822451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:19.836725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:19.941365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:19.956120Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:19.956358Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:19.956449Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:51:19.956498Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:19.964510Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:19.964719Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:19.964752Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:19.964945Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:19.964972Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:19.964980Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:19.965032Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:19.968486Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:19.968558Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:19.968587Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:51:19.968593Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:19.968598Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:19.968603Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:19.968740Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.968748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:19.968888Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:19.968911Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:19.968926Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:19.968931Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:19.968938Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:51:19.968945Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:19.968952Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:19.968959Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:19.968965Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:19.968969Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:19.968975Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:19.968980Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:19.969003Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:51:19.969008Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:19.969031Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:19.969077Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:19.969088Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:19.969105Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:19.969112Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:19.969116Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:19.969122Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:19.969126Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:19.969163Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:19.969166Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:19.969170Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:19.969174Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:19.969185Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:19.969189Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:19.969193Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:19.969196Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:19.969201Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:19.969449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:51:19.969458Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:19.980477Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:19.980509Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:19.980518Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:19.980531Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:51:19.980549Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:20.166316Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:20.166341Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:20.166350Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:51:20.166370Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:51:20.166375Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:51:20.166399Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:20.166409Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:51:20.166414Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:51:20.166420Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:51:20.167274Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:20.167298Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:20.167442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:20.167451Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:20.167458Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:20.167466Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:20.167471Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:20.167480Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 474976715672. Resolved key sets: 1 2024-11-21T08:51:24.682004Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T08:51:24.682021Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2024-11-21T08:51:24.682068Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T08:51:24.682106Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715672. Shard resolve complete, resolved shards: 1 2024-11-21T08:51:24.682117Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2024-11-21T08:51:24.682125Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2024-11-21T08:51:24.682134Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:24.682142Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T08:51:24.682217Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T08:51:24.682227Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1162:2911], 2024-11-21T08:51:24.682236Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1162:2911], 2024-11-21T08:51:24.682241Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T08:51:24.682361Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1162:2911], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2024-11-21T08:51:24.682369Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1162:2911], 2024-11-21T08:51:24.682374Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1162:2911], 2024-11-21T08:51:24.682476Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1164:2911], Recipient [2:1067:2852]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2024-11-21T08:51:24.682504Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:51:24.682515Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2002/281474976715665 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2024-11-21T08:51:24.682523Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4000/18446744073709551615 2024-11-21T08:51:24.682533Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T08:51:24.682548Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:51:24.682553Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:51:24.682558Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:51:24.682563Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:51:24.682576Z node 2 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T08:51:24.682582Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:51:24.682588Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:51:24.682592Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:51:24.682597Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:51:24.682610Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2024-11-21T08:51:24.682651Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1164:2911], 0} after executionsCount# 1 2024-11-21T08:51:24.682658Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1164:2911], 0} sends rowCount# 1, bytes# 24, quota rows left# 32766, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:51:24.682671Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1164:2911], 0} finished in read 2024-11-21T08:51:24.682680Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:51:24.682684Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:51:24.682688Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:51:24.682692Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:51:24.682703Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:51:24.682707Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:51:24.682711Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T08:51:24.682715Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:51:24.682846Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1164:2911], Recipient [2:1067:2852]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:51:24.682853Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T08:51:24.682965Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1162:2911], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 180 Tasks { TaskId: 1 CpuTimeUs: 39 FinishTimeMs: 1732179084682 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 9 BuildCpuTimeUs: 30 WaitInputTimeUs: 367 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732179084682 } MaxMemoryUsage: 1048576 } 2024-11-21T08:51:24.682973Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1162:2911] 2024-11-21T08:51:24.683004Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:51:24.683014Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd6yphbzfsgp4f4agbts65mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIxMmJmMTctZmZlNGM3OTgtZDJkNjY3YmItODgzYzY4ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000180s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateMultipleSequencesNoInitialSequenceShard [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:50:51.315926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:51.315951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:51.315957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:51.315962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:51.315976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:51.315980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:51.316005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:51.316103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:51.330981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:51.331009Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:50:51.336678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:51.336840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:51.336875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:51.339711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:51.339790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:51.339923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:51.340095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:51.340765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:51.341090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:51.341104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:51.341119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:51.341127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:51.341134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:51.341176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:50:51.342967Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:50:51.361054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:51.361148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.361232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:51.361282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:51.361290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.362177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:51.362210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:51.362263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.362283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:51.362290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:51.362296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:51.362732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.362746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:51.362751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:51.363114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.363125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.363131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:51.363138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:51.363766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:51.364227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:51.364293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:51.364516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:51.364541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:51.364549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:51.364604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:51.364611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:51.364651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:51.364664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:51.365105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:51.365117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:51.365168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:51.365173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:51.365307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.365315Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:51.365327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:51.365332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:51.365338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:51.365343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:51.365349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:51.365353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:51.365365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:51.365385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:51.365389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... n: 11 } 2024-11-21T08:51:24.376040Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T08:51:24.376048Z node 97 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:24.376057Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:24.376060Z node 97 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:24.376064Z node 97 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:51:24.376068Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2024-11-21T08:51:24.376081Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:51:24.376173Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [97:204:2207], Recipient [97:125:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Version: 2 } 2024-11-21T08:51:24.376178Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T08:51:24.376184Z node 97 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:24.376192Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:24.376195Z node 97 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:24.376198Z node 97 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T08:51:24.376201Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:24.376248Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:51:24.376251Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [97:375:2356] 2024-11-21T08:51:24.376255Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:51:24.378894Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:24.379168Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:24.379177Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:24.379208Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:24.379210Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:24.379229Z node 97 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [97:375:2356] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2024-11-21T08:51:24.379249Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:24.379255Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:376:2357] 2024-11-21T08:51:24.379295Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [97:380:2361], Recipient [97:125:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:24.379300Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:24.379303Z node 97 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:24.379374Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [97:450:2430], Recipient [97:125:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T08:51:24.379380Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:51:24.379393Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:24.379438Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/seq1" took 36us result status StatusSuccess 2024-11-21T08:51:24.379504Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq1" PathDescription { Self { Name: "seq1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1004 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:24.379586Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [97:451:2431], Recipient [97:125:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T08:51:24.379594Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:51:24.379602Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:24.379613Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/seq2" took 12us result status StatusSuccess 2024-11-21T08:51:24.379633Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq2" PathDescription { Self { Name: "seq2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1003 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:24.379674Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [97:452:2432], Recipient [97:125:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T08:51:24.379677Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:51:24.379682Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:24.379693Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/seq3" took 10us result status StatusSuccess 2024-11-21T08:51:24.379724Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq3" PathDescription { Self { Name: "seq3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1002 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq3" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[window-win_func_first_last_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Plan] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Results] >> test.py::test[aggregate-group_by_full_path-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-ForceBlocks] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> TCdcStreamWithRebootsTests::GetReadyStream[TabletReboots] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10000Inflight1BlobSize1000 >> CountingEvents::Get_Mirror3dc [GOOD] >> CountingEvents::Get_Block42 >> IndexRestoreGet::BlobRecovery [GOOD] >> Mirror3dc::GcQuorum >> Acceleration::TestThresholdPut4Plus2Block1Slow [GOOD] >> Acceleration::TestThresholdPutMirror3dc2Slow |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2024-11-21T08:51:02.653973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652422316342701:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:02.654013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037c2/r3tmp/tmpj9ZYp1/pdisk_1.dat 2024-11-21T08:51:02.734120Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:02.755292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:02.755333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6688, node 1 2024-11-21T08:51:02.762204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:02.764435Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:02.764451Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:02.764453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:02.764496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:02.803857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:02.805000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:02.805020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:02.808931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:51:02.808995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:51:02.809000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:51:02.810715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:02.830734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:51:02.830747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:51:02.831562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:02.833717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179062880, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:02.833741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:51:02.833809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:51:02.834653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:02.834704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:02.834715Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:51:02.834725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:51:02.834733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:51:02.834747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:51:02.835553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:51:02.835566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:51:02.835571Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:51:02.835586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:29285 2024-11-21T08:51:03.009138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652426611310773:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:03.009163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:03.068136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:03.072566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T08:51:03.072757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:03.072763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:03.073840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T08:51:03.073905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:03.073971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:03.073987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:51:03.074510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:03.074529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:03.074534Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:51:03.074583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:03.074586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:03.074588Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T08:51:03.075042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:51:03.077624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:51:03.077663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T08:51:03.078447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T08:51:03.131521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:51:03.131532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:51:03.131561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T08:51:03.132153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T08:51:03.133206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179063181, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:03.133221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732179063181 2024-11-21T08:51:03.133248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T08:51:03.133758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:03.133851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:03.133861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:51:03.134284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:03.134292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:03.134296Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 720575 ... ot 2024-11-21T08:51:23.185397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736827. Ctx: { TraceId: 01jd6ypfxf91cbt3dq3qhtdygb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEwNjA5MzEtYjA3ODM2YzYtNDRkNTdlMWYtOGMzNDM0ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.185466Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736830. Ctx: { TraceId: 01jd6ypfxfbzsjk9trneeynv2n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThmYzRiYTEtY2IyY2YzZGQtMmZjMDk2YjQtY2M2ODE4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.185525Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736829. Ctx: { TraceId: 01jd6ypfxf767namfsc6ffre3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA2NmQ4NGUtNDgxOTYzMDEtYmY0NDk0ZDctNzJmOTMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.185577Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736831. Ctx: { TraceId: 01jd6ypfxfdse24pgr9d5mdk63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk1ZTc1MGQtNWExZTAyMTgtZjc2ZWI4ZTgtNjRhODMzYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.186224Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736832. Ctx: { TraceId: 01jd6ypfxg8w7tt04e3ekr9kd3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMDUtZTJhY2MyZDMtYTJlYWM4M2ItM2VlNGM0MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.186570Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736833. Ctx: { TraceId: 01jd6ypfxg2x2qc3tcks9fse6f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNjYzE2N2QtNDFkYTlkZDAtMjcyZDcyNy1lNjk3MGE3Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.186739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736834. Ctx: { TraceId: 01jd6ypfxg6k8vkxhqxgj6qnnh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI1ZTc2NTYtMjU0YTNiZTItMjg2ZjE1NDctM2E3OTk2Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.188695Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736835. Ctx: { TraceId: 01jd6ypfxkb94ybkza9h62jbhn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJhZmY5MjMtZjdlMWJkODctZjE5NGQ2YzktNGZiZTYzMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.190106Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736837. Ctx: { TraceId: 01jd6ypfxn15gzdzke71jka5zt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNjYzE2N2QtNDFkYTlkZDAtMjcyZDcyNy1lNjk3MGE3Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.190168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736838. Ctx: { TraceId: 01jd6ypfxn9p8b5pqpy1v2jtk9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThmYzRiYTEtY2IyY2YzZGQtMmZjMDk2YjQtY2M2ODE4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.190213Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736839. Ctx: { TraceId: 01jd6ypfxn4xh8bqp6jeysvd1p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEwNjA5MzEtYjA3ODM2YzYtNDRkNTdlMWYtOGMzNDM0ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2024-11-21T08:51:23.190919Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736840. Ctx: { TraceId: 01jd6ypfxn6t8jnfmremwa49n9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA2NmQ4NGUtNDgxOTYzMDEtYmY0NDk0ZDctNzJmOTMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.190972Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736836. Ctx: { TraceId: 01jd6ypfxndxa2cwdge30mk00a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMDUtZTJhY2MyZDMtYTJlYWM4M2ItM2VlNGM0MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.191011Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736842. Ctx: { TraceId: 01jd6ypfxn6rhwvhfkta9yc38a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcwODk5NzMtZTkwMmMwZTMtYzVlNTUxOTYtZDFiMmMxNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.191022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736841. Ctx: { TraceId: 01jd6ypfxn93ex59mm8w6jfqdh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk1ZTc1MGQtNWExZTAyMTgtZjc2ZWI4ZTgtNjRhODMzYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.191109Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736843. Ctx: { TraceId: 01jd6ypfxn1g596r3n35xjfpxh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU3ZjljNmYtZWQ4Y2Q3MC1mMTkxMjBhNC0zZmQ1NTNlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.191157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736844. Ctx: { TraceId: 01jd6ypfxn0mzpqkjxd6z9d9tq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI1ZTc2NTYtMjU0YTNiZTItMjg2ZjE1NDctM2E3OTk2Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.191929Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736845. Ctx: { TraceId: 01jd6ypfxp0sdzq2kshqz8cv6g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJhZmY5MjMtZjdlMWJkODctZjE5NGQ2YzktNGZiZTYzMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.192833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736846. Ctx: { TraceId: 01jd6ypfxq0gnybgt8g4eccnwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNjYzE2N2QtNDFkYTlkZDAtMjcyZDcyNy1lNjk3MGE3Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179063181 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T08:51:23.193490Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736847. Ctx: { TraceId: 01jd6ypfxr9nbkdma3gnpbb1y4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI1ZTc2NTYtMjU0YTNiZTItMjg2ZjE1NDctM2E3OTk2Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.193792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736848. Ctx: { TraceId: 01jd6ypfxsc48ekwvvs6v2v4zx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRmZWRlMDUtZTJhY2MyZDMtYTJlYWM4M2ItM2VlNGM0MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.193847Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736850. Ctx: { TraceId: 01jd6ypfxs3asp8gyjsrpdqxbp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThmYzRiYTEtY2IyY2YzZGQtMmZjMDk2YjQtY2M2ODE4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.193932Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736849. Ctx: { TraceId: 01jd6ypfxs2tpembh48qky52ej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEwNjA5MzEtYjA3ODM2YzYtNDRkNTdlMWYtOGMzNDM0ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.194677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736851. Ctx: { TraceId: 01jd6ypfxscf2d13w3xev0tm0a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA2NmQ4NGUtNDgxOTYzMDEtYmY0NDk0ZDctNzJmOTMyNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.194720Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736852. Ctx: { TraceId: 01jd6ypfxs5cqw7zc2n5qp93zr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk1ZTc1MGQtNWExZTAyMTgtZjc2ZWI4ZTgtNjRhODMzYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:23.194758Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976736853. Ctx: { TraceId: 01jd6ypfxscxqd955p0jb91bdp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcwODk5NzMtZTkwMmMwZTMtYzVlNTUxOTYtZDFiMmMxNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179063181 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T08:51:23.591934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 684364 rowCount 10773 cpuUsage 0 2024-11-21T08:51:23.591963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 658736 rowCount 10351 cpuUsage 0 2024-11-21T08:51:23.692093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T08:51:23.692176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 10773, DataSize 684364 2024-11-21T08:51:23.692244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 10351, DataSize 658736 2024-11-21T08:51:23.692567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> BlobPatching::StressNone [GOOD] >> BlobStorageBlockRace::Test >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> TCdcStreamWithRebootsTests::DropStream[TabletReboots] >> BlobStorageBlockRace::Test [GOOD] >> BlobStorageBlockRace::BlocksRacingViaSyncLog >> CountingEvents::Get_Block42 [GOOD] >> CountingEvents::Get_None ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2024-11-21T08:51:25.140001Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T08:51:25.140021Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T08:51:25.140031Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T08:51:25.140033Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T08:51:25.140037Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T08:51:25.140039Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T08:51:25.140042Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T08:51:25.140044Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T08:51:25.140047Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T08:51:25.140050Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T08:51:25.140053Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T08:51:25.140055Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T08:51:25.140058Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T08:51:25.140060Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T08:51:25.140065Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T08:51:25.140068Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T08:51:25.140073Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T08:51:25.140077Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T08:51:25.140082Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T08:51:25.140086Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T08:51:25.140090Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T08:51:25.140093Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T08:51:25.140096Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T08:51:25.140098Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T08:51:25.140104Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T08:51:25.140106Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T08:51:25.140111Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T08:51:25.140114Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T08:51:25.140117Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T08:51:25.140119Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T08:51:25.142211Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:508:32] Status# ERROR ClientId# [1:508:32] ServerId# [0:0:0] PipeClient# [1:508:32] 2024-11-21T08:51:25.142305Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:509:20] Status# ERROR ClientId# [2:509:20] ServerId# [0:0:0] PipeClient# [2:509:20] 2024-11-21T08:51:25.142313Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:510:20] Status# ERROR ClientId# [3:510:20] ServerId# [0:0:0] PipeClient# [3:510:20] 2024-11-21T08:51:25.142320Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:511:20] Status# ERROR ClientId# [4:511:20] ServerId# [0:0:0] PipeClient# [4:511:20] 2024-11-21T08:51:25.142326Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:512:20] Status# ERROR ClientId# [5:512:20] ServerId# [0:0:0] PipeClient# [5:512:20] 2024-11-21T08:51:25.142332Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:513:20] Status# ERROR ClientId# [6:513:20] ServerId# [0:0:0] PipeClient# [6:513:20] 2024-11-21T08:51:25.142338Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:514:20] Status# ERROR ClientId# [7:514:20] ServerId# [0:0:0] PipeClient# [7:514:20] 2024-11-21T08:51:25.142344Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:515:20] Status# ERROR ClientId# [8:515:20] ServerId# [0:0:0] PipeClient# [8:515:20] 2024-11-21T08:51:25.142350Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:516:20] Status# ERROR ClientId# [9:516:20] ServerId# [0:0:0] PipeClient# [9:516:20] 2024-11-21T08:51:25.142356Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:517:20] Status# ERROR ClientId# [10:517:20] ServerId# [0:0:0] PipeClient# [10:517:20] 2024-11-21T08:51:25.142366Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:518:20] Status# ERROR ClientId# [11:518:20] ServerId# [0:0:0] PipeClient# [11:518:20] 2024-11-21T08:51:25.142372Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:519:20] Status# ERROR ClientId# [12:519:20] ServerId# [0:0:0] PipeClient# [12:519:20] 2024-11-21T08:51:25.142378Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:520:20] Status# ERROR ClientId# [13:520:20] ServerId# [0:0:0] PipeClient# [13:520:20] 2024-11-21T08:51:25.142383Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:521:20] Status# ERROR ClientId# [14:521:20] ServerId# [0:0:0] PipeClient# [14:521:20] 2024-11-21T08:51:25.142389Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:522:20] Status# ERROR ClientId# [15:522:20] ServerId# [0:0:0] PipeClient# [15:522:20] 2024-11-21T08:51:25.149436Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2024-11-21T08:51:25.149462Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2024-11-21T08:51:25.149471Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2024-11-21T08:51:25.149480Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2024-11-21T08:51:25.149488Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2024-11-21T08:51:25.149496Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2024-11-21T08:51:25.149504Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2024-11-21T08:51:25.149511Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2024-11-21T08:51:25.149519Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2024-11-21T08:51:25.149528Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2024-11-21T08:51:25.149534Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2024-11-21T08:51:25.149542Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2024-11-21T08:51:25.149549Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2024-11-21T08:51:25.149557Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2024-11-21T08:51:25.149566Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2024-11-21T08:51:25.149871Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:574:58] Status# OK ClientId# [1:574:58] ServerId# [1:603:59] PipeClient# [1:574:58] 2024-11-21T08:51:25.149875Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2024-11-21T08:51:25.150290Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:575:21] Status# OK ClientId# [2:575:21] ServerId# [1:604:60] PipeClient# [2:575:21] 2024-11-21T08:51:25.150295Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2024-11-21T08:51:25.150300Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:576:21] Status# OK ClientId# [3:576:21] ServerId# [1:605:61] PipeClient# [3:576:21] 2024-11-21T08:51:25.150303Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2024-11-21T08:51:25.150307Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:577:21] Status# OK ClientId# [4:577:21] ServerId# [1:606:62] PipeClient# [4:577:21] 2024-11-21T08:51:25.150309Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2024-11-21T08:51:25.150312Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:578:21] Status# OK ClientId# [5:578:21] ServerId# [1:607:63] PipeClient# [5:578:21] 2024-11-21T08:51:25.150315Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2024-11-21T08:51:25.150320Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:579:21] Status# OK ClientId# [6:579:21] ServerId# [1:608:64] PipeClient# [6:579:21] 2024-11-21T08:51:25.150324Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2024-11-21T08:51:25.150329Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:580:21] Status# OK ClientId# [7:580:21] ServerId# [1:609:65] PipeClient# [7:580:21] 2024-11-21T08:51:25.150332Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2024-11-21T08:51:25.150342Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:581:21] Status# OK ClientId# [8:581:21] ServerId# [1:610:66] PipeClient# [8:581:21] 2024-11-21T08:51:25.150346Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2024-11-21T08:51:25.150351Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:582:21] Status# OK ClientId# [9:582:21] ServerId# [1:611:67] PipeClient# [9:582:21] 2024-11-21T08:51:25.150355Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2024-11-21T08:51:25.150361Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:583:21] Status# OK ClientId# [10:583:21] ServerId# [1:612:68] PipeClient# [10:583:21] 2024-11-21T08:51:25.150365Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2024-11-21T08:51:25.150371Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:584:21] Status# OK ClientId# [11:584:21] ServerId# [1:613:69] PipeClient# [11:584:21] 2024-11-21T08:51:25.150374Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2024-11-21T08:51:25.150380Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:585:21] Status# OK ClientId# [12:585:21] ServerId# [1:614:70] PipeClient# [12:585:21] 2024-11-21T08:51:25.150383Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2024-11-21T08:51:25.150389Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:586:21] Status# OK ClientId# [13:586:21] ServerId# [1:615:71] PipeClient# [13:586:21] 2024-11-21T08:51:25.150393Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2024-11-21T08:51:25.150430Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:587:21] Status# OK ClientId# [14:587:21] ServerId# [1:616:72] PipeClient# [14:587:21] 2024-11-21T08:51:25.150434Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2024-11-21T08:51:25.150439Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:588:21] Status# OK ClientId# [15:588:21] ServerId# [1:617:73] PipeClient# [15:588:21] 2024-11-21T08:51:25.150443Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2024-11-21T08:51:25.150765Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:25.150777Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T08:51:25.153275Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2024-11-21T08:51:25.153561Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:25.153573Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T08:51:25.153588Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2024-11-21T08:51:25.153598Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:25.153602Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T08:51:25.153609Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2024-11-21T08:51:25.153631Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:25.153637Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T08:51:25.153644Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2024-11-21T08:51:25.153654Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T08:51:25.153661Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T08:51:25.153667Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2024-11-21T0 ... 13 00h01m06.094024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to REPLICATING 2024-11-21T08:51:25.215510Z 1 00h01m06.094024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.215547Z 1 00h01m10.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.215633Z 15 00h01m16.079512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2024-11-21T08:51:25.215679Z 1 00h01m16.079512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.215701Z 12 00h01m16.412512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2024-11-21T08:51:25.215738Z 1 00h01m16.412512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.215767Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.215779Z 14 00h01m20.793512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2024-11-21T08:51:25.215825Z 1 00h01m20.793512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.215860Z 2 00h01m23.072512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2024-11-21T08:51:25.215913Z 1 00h01m23.072512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.215936Z 11 00h01m23.105512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2024-11-21T08:51:25.215979Z 1 00h01m23.105512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.216004Z 1 00h01m24.979512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2024-11-21T08:51:25.216036Z 1 00h01m24.979512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.216065Z 13 00h01m25.161024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2024-11-21T08:51:25.216126Z 1 00h01m25.161024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T08:51:25.216263Z 7 00h01m25.161536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2024-11-21T08:51:25.216274Z 7 00h01m25.161536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2024-11-21T08:51:25.216305Z 14 00h01m28.522536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2024-11-21T08:51:25.216356Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483648 2024-11-21T08:51:25.216449Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T08:51:25.216456Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2024-11-21T08:51:25.216494Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T08:51:25.216499Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2024-11-21T08:51:25.216504Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T08:51:25.216508Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2024-11-21T08:51:25.216513Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T08:51:25.216517Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2024-11-21T08:51:25.216523Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T08:51:25.216530Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2024-11-21T08:51:25.216536Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T08:51:25.216540Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2024-11-21T08:51:25.216545Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T08:51:25.216550Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2024-11-21T08:51:25.216555Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T08:51:25.216560Z 1 00h01m28.522536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2024-11-21T08:51:25.216900Z 1 00h01m28.523048s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T08:51:25.216910Z 1 00h01m28.523048s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2024-11-21T08:51:25.216976Z 1 00h01m28.523048s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2024-11-21T08:51:25.216982Z 1 00h01m28.523048s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483648 Success# true 2024-11-21T08:51:25.216999Z 8 00h01m28.523048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T08:51:25.217003Z 8 00h01m28.523048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2024-11-21T08:51:25.217012Z 2 00h01m28.523048s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T08:51:25.217016Z 2 00h01m28.523048s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2024-11-21T08:51:25.217022Z 3 00h01m28.523048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T08:51:25.217025Z 3 00h01m28.523048s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2024-11-21T08:51:25.217031Z 4 00h01m28.523048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T08:51:25.217035Z 4 00h01m28.523048s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2024-11-21T08:51:25.217040Z 5 00h01m28.523048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T08:51:25.217044Z 5 00h01m28.523048s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2024-11-21T08:51:25.217049Z 6 00h01m28.523048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T08:51:25.217053Z 6 00h01m28.523048s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2024-11-21T08:51:25.217057Z 9 00h01m28.523048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T08:51:25.217063Z 13 00h01m28.523048s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T08:51:25.217066Z 13 00h01m28.523048s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2024-11-21T08:51:25.217072Z 14 00h01m28.523048s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T08:51:25.217075Z 14 00h01m28.523048s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2024-11-21T08:51:25.217081Z 15 00h01m28.523048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T08:51:25.217085Z 15 00h01m28.523048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2024-11-21T08:51:25.217091Z 15 00h01m28.523048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2024-11-21T08:51:25.217221Z 10 00h01m28.718512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2024-11-21T08:51:25.217325Z 15 00h01m31.381048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2024-11-21T08:51:25.217473Z 13 00h01m38.133512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2024-11-21T08:51:25.217541Z 3 00h01m38.166512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2024-11-21T08:51:25.217986Z 15 00h02m04.003048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2024-11-21T08:51:25.218121Z 9 00h02m04.003560s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T08:51:25.218127Z 9 00h02m04.003560s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> BlobStorageBlockRace::BlocksRacingViaSyncLog [GOOD] >> BlobStorageBlockRace::BlocksRacingViaSyncLog2 >> CountingEvents::Get_None [GOOD] >> CountingEvents::Collect_Mirror3of4 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:147:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:147:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:156:2057] recipient: [7:155:2175] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:158:2057] recipient: [7:155:2175] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:157:2176] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:205:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:156:2177] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:160:2057] recipient: [8:156:2177] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:159:2178] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:229:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:156:2177] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:160:2057] recipient: [9:156:2177] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:159:2178] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:229:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:160:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:161:2057] recipient: [10:159:2179] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:163:2057] recipient: [10:159:2179] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:162:2180] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:210:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:161:2181] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:165:2057] recipient: [11:161:2181] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:164:2182] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:234:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader f ... 37927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:150:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:153:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:154:2057] recipient: [24:152:2173] Leader for TabletID 72057594037927937 is [24:155:2174] sender: [24:156:2057] recipient: [24:152:2173] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:155:2174] Leader for TabletID 72057594037927937 is [24:155:2174] sender: [24:225:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:155:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:158:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:159:2057] recipient: [25:157:2178] Leader for TabletID 72057594037927937 is [25:160:2179] sender: [25:161:2057] recipient: [25:157:2178] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:160:2179] Leader for TabletID 72057594037927937 is [25:160:2179] sender: [25:230:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:155:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:158:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:159:2057] recipient: [26:157:2178] Leader for TabletID 72057594037927937 is [26:160:2179] sender: [26:161:2057] recipient: [26:157:2178] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:160:2179] Leader for TabletID 72057594037927937 is [26:160:2179] sender: [26:230:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:157:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:160:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:161:2057] recipient: [27:159:2179] Leader for TabletID 72057594037927937 is [27:162:2180] sender: [27:163:2057] recipient: [27:159:2179] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:162:2180] Leader for TabletID 72057594037927937 is [27:162:2180] sender: [27:232:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:162:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:165:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:166:2057] recipient: [28:164:2184] Leader for TabletID 72057594037927937 is [28:167:2185] sender: [28:168:2057] recipient: [28:164:2184] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:167:2185] Leader for TabletID 72057594037927937 is [28:167:2185] sender: [28:237:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:162:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:165:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:166:2057] recipient: [29:164:2184] Leader for TabletID 72057594037927937 is [29:167:2185] sender: [29:168:2057] recipient: [29:164:2184] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:167:2185] Leader for TabletID 72057594037927937 is [29:167:2185] sender: [29:237:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:167:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:169:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:171:2057] recipient: [30:170:2188] Leader for TabletID 72057594037927937 is [30:172:2189] sender: [30:173:2057] recipient: [30:170:2188] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:172:2189] Leader for TabletID 72057594037927937 is [30:172:2189] sender: [30:242:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:169:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:172:2057] recipient: [31:171:2190] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:173:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:174:2191] sender: [31:175:2057] recipient: [31:171:2190] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:174:2191] Leader for TabletID 72057594037927937 is [31:174:2191] sender: [31:244:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:169:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:172:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:173:2057] recipient: [32:171:2190] Leader for TabletID 72057594037927937 is [32:174:2191] sender: [32:175:2057] recipient: [32:171:2190] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:174:2191] Leader for TabletID 72057594037927937 is [32:174:2191] sender: [32:244:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:174:2057] recipient: [33:97:2132] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:176:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:178:2057] recipient: [33:177:2194] Leader for TabletID 72057594037927937 is [33:179:2195] sender: [33:180:2057] recipient: [33:177:2194] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:179:2195] Leader for TabletID 72057594037927937 is [33:179:2195] sender: [33:249:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] >> BlobStorageBlockRace::BlocksRacingViaSyncLog2 [GOOD] >> BlobStorageSync::TestSyncLogCuttingMirror3dc >> Acceleration::TestAcceleration4Plus2BlockPutAsyncBlob1Slow [GOOD] >> Acceleration::TestAccelerationMirror3dcPutAsyncBlob2Slow >> CountingEvents::Collect_Mirror3of4 [GOOD] >> CountingEvents::Collect_Mirror3dc >> CountingEvents::Collect_Mirror3dc [GOOD] >> CountingEvents::Collect_Block42 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fd6/r3tmp/tmp4yEOus/pdisk_1.dat 2024-11-21T08:51:07.124359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:07.175253Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:07.209675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.209699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21274, node 1 2024-11-21T08:51:07.213840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:07.224621Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:07.224629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:07.224631Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:07.224669Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:07.247056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:07.260454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:07.289453Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:07.528327Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:07.528353Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652446428101156:2298], Start check tables existence, number paths: 2 2024-11-21T08:51:07.528930Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmNkODMxNzctY2RmYmI4YTAtMzNiNTc4YTAtOGI1MWFhMTY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NmNkODMxNzctY2RmYmI4YTAtMzNiNTc4YTAtOGI1MWFhMTY= 2024-11-21T08:51:07.529010Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmNkODMxNzctY2RmYmI4YTAtMzNiNTc4YTAtOGI1MWFhMTY=, ActorId: [1:7439652446428101157:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:07.530734Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:07.530738Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:07.530745Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T08:51:07.530765Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652446428101156:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:07.530771Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652446428101156:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:07.530774Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652446428101156:2298], Successfully finished 2024-11-21T08:51:07.530796Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:07.558315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:07.593826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.593851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.600909Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T08:51:07.601264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:07.623918Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:07.638735Z node 3 :STATISTICS WARN: [72075186224037897] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T08:51:07.669537Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.669553Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.684733Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:07.788274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:51:07.825731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.825747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.829703Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:51:07.830330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:07.845256Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:07.854071Z node 2 :STATISTICS WARN: [72075186224037907] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T08:51:07.891217Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.891251Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.892382Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:07.926013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:51:07.933860Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:51:07.933883Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:51:07.933890Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:51:07.933895Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:51:07.933900Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:51:07.933904Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:51:07.947597Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:08.019775Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:08.019805Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439652448075473164:2313], Start check tables existence, number paths: 2 2024-11-21T08:51:08.019888Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:08.019890Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:08.023034Z node 3 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T08:51:08.023057Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439652448075473164:2313], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:08.023065Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439652448075473164:2313], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:08.023070Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439652448075473164:2313], Successfully finished 2024-11-21T08:51:08.023080Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:08.187869Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:08.187898Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652448447617175:2338], Start check tables existence, number paths: 2 2024-11-21T08:51:08.188014Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:08.188018Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:08.190907Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7439652448447617194:2342], Database: /Root/test-serverless, Start database fetching 2024-11-21T08:51:08.192506Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T08:51:08.192544Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7439652448447617194:2342], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2024-11-21T08:51:08.193004Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=ZTIxNDA2YWQtNmI2NDVkMDktOTQwMTJmYjQtYzNkOTJhOTI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTIxNDA2YWQtNmI2NDVkMDktOTQwMTJmYjQtYzNkOTJhOTI= 2024-11-21T08:51:08.193082Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652448447617175:2338], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:08.193089Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652448447617175:2338], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2 ... /3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ReadyState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, Sending CompileQuery request 2024-11-21T08:51:25.374186Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, acquire mvcc snapshot 2024-11-21T08:51:25.374555Z node 7 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, read snapshot result: UNAVAILABLE, step: 1732179085000, tx id: 18446744073709551615 2024-11-21T08:51:25.374575Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, ExecutePhyTx, tx: 0x000006BBDDCF6C18 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:25.374585Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, Sending to Executer TraceId: 0 8 2024-11-21T08:51:25.374600Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, Created new KQP executer: [7:7439652523915154626:2669] isRollback: 0 2024-11-21T08:51:25.375306Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T08:51:25.375349Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1.183 QueriesCount: 2 2024-11-21T08:51:25.375391Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:25.375433Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:25.375440Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, EndCleanup, isFinal: 0 2024-11-21T08:51:25.375455Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ExecuteState, TraceId: 01jd6ypj1k4fc2r8s7xhxqz4pf, Sent query response back to proxy, proxyRequestId: 56, proxyId: [7:7439652506735283652:2238] 2024-11-21T08:51:25.375603Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:25.375621Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2024-11-21T08:51:25.375635Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ReadyState, Created new KQP executer: [7:7439652523915154637:2669] isRollback: 1 2024-11-21T08:51:25.375650Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:25.375716Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: CleanupState, EndCleanup, isFinal: 1 2024-11-21T08:51:25.375725Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:25.375748Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NTcyNDdhOC05MjcyZDY4YS1mMWQ2ZGMwZC04ZWJlMGJiOQ==, ActorId: [7:7439652523915154612:2669], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:25.449107Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM= 2024-11-21T08:51:25.449160Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:25.449215Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ReadyState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, received request, proxyRequestId: 57 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [7:7439652523915154643:2921] database: Root databaseId: /Root pool id: default 2024-11-21T08:51:25.449224Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ReadyState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, request placed into pool from cache: default 2024-11-21T08:51:25.449234Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ReadyState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Sending CompileQuery request 2024-11-21T08:51:25.454470Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, ExecutePhyTx, tx: 0x000006BBDDCE9298 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:25.454490Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Sending to Executer TraceId: 0 8 2024-11-21T08:51:25.454507Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Created new KQP executer: [7:7439652523915154648:2678] isRollback: 0 2024-11-21T08:51:25.454897Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Forwarded TEvStreamData to [7:7439652523915154643:2921] 2024-11-21T08:51:25.455043Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T08:51:25.455085Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, txInfo Status: Committed Kind: Pure TotalDuration: 0.646 ServerDuration: 0.624 QueriesCount: 2 2024-11-21T08:51:25.455116Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:25.455140Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:25.455148Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, EndCleanup, isFinal: 1 2024-11-21T08:51:25.455157Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: ExecuteState, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Sent query response back to proxy, proxyRequestId: 57, proxyId: [7:7439652506735283652:2238] 2024-11-21T08:51:25.455160Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: unknown state, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Cleanup temp tables: 0 2024-11-21T08:51:25.455186Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=Yjk5NDFmYmMtMTk4MDIxOWYtZWZlZWVlYmYtYjhiYTZkNmM=, ActorId: [7:7439652523915154644:2678], ActorState: unknown state, TraceId: 01jd6ypj498jfw3hty3asnb6w4, Session actor destroyed 2024-11-21T08:51:25.456143Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NWJjZjVmZmQtOGFjZGRhZjAtODliYjg2OTctOWI1NTIyYg==, ActorId: [7:7439652506735284040:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:25.456156Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NWJjZjVmZmQtOGFjZGRhZjAtODliYjg2OTctOWI1NTIyYg==, ActorId: [7:7439652506735284040:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:25.456159Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NWJjZjVmZmQtOGFjZGRhZjAtODliYjg2OTctOWI1NTIyYg==, ActorId: [7:7439652506735284040:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:25.456161Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NWJjZjVmZmQtOGFjZGRhZjAtODliYjg2OTctOWI1NTIyYg==, ActorId: [7:7439652506735284040:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:25.456172Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NWJjZjVmZmQtOGFjZGRhZjAtODliYjg2OTctOWI1NTIyYg==, ActorId: [7:7439652506735284040:2299], ActorState: unknown state, Session actor destroyed >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[TabletReboots] >> CountingEvents::Collect_Block42 [GOOD] >> CountingEvents::Collect_None >> CountingEvents::Collect_None [GOOD] >> Deadlines::TestPutMirror3dc >> test.py::test[aggregate-group_by_full_path-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] |87.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[insert-keepmeta-with_read_udf_fail-Plan] [SKIPPED] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 [GOOD] >> Deadlines::TestPutMirror3dc [GOOD] >> Deadlines::TestPut4Plus2Block >> TCdcStreamWithRebootsTests::GetReadyStream[PipeResets] >> Acceleration::TestAccelerationMirror3dcPutAsyncBlob2Slow [GOOD] >> Acceleration::TestAcceleration4Plus2BlockPutAsyncBlob2Slow |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> Acceleration::TestThresholdPutMirror3dc2Slow [GOOD] >> Acceleration::TestThresholdPut4Plus2Block2Slow >> test.py::test[window-win_func_first_last_with_part--Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Analyze] >> Deadlines::TestPut4Plus2Block [GOOD] >> Deadlines::TestGetMirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 [GOOD] Test command err: iteration# 4 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 10 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 16 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 22 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 28 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 34 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 40 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 46 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 52 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 58 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 64 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 70 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 76 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 82 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 88 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 94 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 100 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 106 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 112 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 118 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 124 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 130 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 136 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 142 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 148 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 154 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 160 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 166 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 172 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 178 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 184 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 190 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 196 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 202 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 208 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 214 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 220 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 226 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 232 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 238 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 244 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 250 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 256 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 262 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 268 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 274 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 280 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 286 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 292 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 298 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 304 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 310 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 316 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 322 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 328 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 334 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 340 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 346 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 352 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 358 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 364 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 370 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 376 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 382 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 388 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 394 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 400 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 406 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 412 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 418 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 424 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 430 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 436 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 442 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 448 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 454 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 460 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 466 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 472 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 478 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 484 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 490 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 496 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 502 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 508 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 514 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 520 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 526 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 532 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 538 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 544 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 550 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 556 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 562 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 568 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 574 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 580 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 586 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 592 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 598 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 604 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 610 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 616 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 622 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 628 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 634 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 640 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 646 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 652 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 658 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 664 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 670 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 676 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 682 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 688 BlobsWritten# 2041 blobsWrittenF ... blobsUnwritten# 1218 iteration# 1366 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1372 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1378 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1384 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1390 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1396 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1402 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1408 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1414 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1420 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1426 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1432 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1438 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1444 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1450 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1456 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1462 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1468 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1474 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1480 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1486 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1492 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1498 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1504 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1510 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1516 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1522 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1528 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1534 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1540 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1546 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1552 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1558 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1564 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1570 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1576 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1582 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1588 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1594 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1600 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1606 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1612 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1618 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1624 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1630 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1636 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1642 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1648 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1654 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1660 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1666 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1672 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1678 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1684 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1690 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1696 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1702 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1708 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1714 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1720 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1726 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1732 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1738 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1744 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1750 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1756 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1762 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1768 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1774 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1780 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1786 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1792 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1798 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1804 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1810 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1816 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1822 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1828 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1834 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1840 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1846 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1852 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1858 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1864 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1870 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1876 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1882 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1888 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1894 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1900 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1906 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1912 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1918 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1924 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1930 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1936 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1942 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1948 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1954 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1960 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1966 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1972 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1978 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1984 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1990 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1996 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2002 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2008 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2014 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2020 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2026 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2032 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2038 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> Deadlines::TestGetMirror3dc [GOOD] >> Deadlines::TestGet4Plus2Block >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> Deadlines::TestGet4Plus2Block [GOOD] >> Deadlines::TestGetMirror3of4 >> TFlatTableBackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableCold::ColdBorrowScan [GOOD] >> TFlatTableCompactionScan::TestCompactionScan >> Deadlines::TestGetMirror3of4 [GOOD] >> TFlatTableCompactionScan::TestCompactionScan [GOOD] >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_Default >> TIterator::Single >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> test.py::test[aggregate-group_by_full_path-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_rename-default.txt-Analyze] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TFlatTableExecutorResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutorResourceProfile::TestUpdateConfig >> TFlatTableExecutorResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutorSliceOverlapScan::TestSliceOverlapScan ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> Deadlines::TestGetMirror3of4 [GOOD] Test command err: RandomSeed# 7815042083806888431 2024-11-21T08:51:26.741160Z 10 00h01m40.010512s :BS_PROXY_PUT ERROR: [9e16c3b09fbc15ca] Result# TEvPutResult {Id# [1:1:1:1:123:1000:0] Status# DEADLINE StatusFlags# { } ErrorReason# "Deadline timer hit" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T08:51:26.975787Z 9 00h01m40.010512s :BS_PROXY_PUT ERROR: [f04e3767180297a7] Result# TEvPutResult {Id# [1:1:1:1:123:1000:0] Status# DEADLINE StatusFlags# { } ErrorReason# "Deadline timer hit" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T08:51:27.260753Z 10 00h01m40.010512s :BS_PROXY_GET ERROR: [399551171eb9e902] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:1:123:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "Deadline timer hit"} Marker# BPG29 2024-11-21T08:51:27.455841Z 9 00h01m40.010512s :BS_PROXY_GET ERROR: [774127b4464b0083] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:1:123:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "Deadline timer hit"} Marker# BPG29 2024-11-21T08:51:27.640337Z 9 00h01m40.010512s :BS_PROXY_GET ERROR: [1523c811de5088e5] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:1:123:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "Deadline timer hit"} Marker# BPG29 >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_False >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Analyze] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Debug] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2024-11-21T08:51:22.099930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652509280204466:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:22.099972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:22.106005Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652511261853242:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00456b/r3tmp/tmpnLNt1n/pdisk_1.dat 2024-11-21T08:51:22.144443Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:51:22.152446Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:51:22.153750Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:22.191141Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30310, node 1 2024-11-21T08:51:22.236618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:22.236648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:22.240812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00456b/r3tmp/yandexsyMOgE.tmp 2024-11-21T08:51:22.240827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00456b/r3tmp/yandexsyMOgE.tmp 2024-11-21T08:51:22.240896Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00456b/r3tmp/yandexsyMOgE.tmp 2024-11-21T08:51:22.240947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:22.245004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:22.247554Z INFO: TTestServer started on Port 11580 GrpcPort 30310 2024-11-21T08:51:22.253568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:22.253598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:22.256545Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:51:22.258114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11580 PQClient connected to localhost:30310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:22.275369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:22.281686Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:22.297454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:51:22.492958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652509280205301:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.492981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652509280205295:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.493036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.493634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2024-11-21T08:51:22.493839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652509280205339:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.493863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.497353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652509280205310:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T08:51:22.521305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:51:22.521562Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652511261853462:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:51:22.521666Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWY2ZjI4ZmYtNTE1Nzk1NjQtN2I1ZGQ4YTUtZDNlZDhmNWM=, ActorId: [2:7439652511261853399:2279], ActorState: ExecuteState, TraceId: 01jd6ypf853rfhqv4dwbmpqkw5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:51:22.522391Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:51:22.594636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T08:51:22.602610Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652509280205578:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:51:22.603044Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmIyMDA5MWYtMmQwYTU2OWItMmY1M2E2M2EtYWIyMTBkMGE=, ActorId: [1:7439652509280205278:2300], ActorState: ExecuteState, TraceId: 01jd6ypf7t9azkh5xs4aaqmesd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:51:22.603284Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:51:22.697063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:51:22.741038Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd6ypfewc3xcfp9y3ywv3b9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBjNWQ1YjMtZDkzNzA0ZTctMjZiZDY2YTgtNWVhNTk0NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439652509280205825:3033] 2024-11-21T08:51:27.099185Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652509280204466:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:27.099248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:51:27.102256Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652511261853242:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:27.102293Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` wit ... PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 5 PartitionIds: 10 TopicName: "rt3.dc1--topic-x" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "topic-x" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--topic-x" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 5 Status: Active CreateVersion: 1 TabletId: 0 } Partitions { PartitionId: 10 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 AllPartitions { PartitionId: 5 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 10 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T08:51:27.809831Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [1:7439652530755043214:2469] 2024-11-21T08:51:27.809840Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] bootstrapping 4 [1:7439652530755043208:2467] 2024-11-21T08:51:27.810711Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7439652532736690590:2382] 2024-11-21T08:51:27.811039Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7439652532736690592:2384] 2024-11-21T08:51:27.811330Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7439652532736690592:2384] 2024-11-21T08:51:27.811683Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7439652532736690589:2381] 2024-11-21T08:51:27.811967Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7439652532736690589:2381] 2024-11-21T08:51:27.812175Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7439652532736690591:2383] 2024-11-21T08:51:27.812657Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7439652532736690591:2383] 2024-11-21T08:51:27.810861Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7439652530755043208:2467] 2024-11-21T08:51:27.811463Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [1:7439652530755043220:2471] 2024-11-21T08:51:27.811990Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7439652530755043220:2471] 2024-11-21T08:51:27.812314Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7439652530755043204:2465] 2024-11-21T08:51:27.812831Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7439652530755043204:2465] 2024-11-21T08:51:27.813165Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7439652532736690593:2385] 2024-11-21T08:51:27.813147Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7439652530755043205:2466] 2024-11-21T08:51:27.813444Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7439652530755043214:2469] 2024-11-21T08:51:27.813749Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7439652530755043205:2466] 2024-11-21T08:51:27.813895Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7439652530755043213:2468] 2024-11-21T08:51:27.814079Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7439652530755043216:2470] 2024-11-21T08:51:27.813671Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7439652532736690593:2385] 2024-11-21T08:51:27.814024Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7439652532736690594:2386] 2024-11-21T08:51:27.814462Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7439652532736690594:2386] 2024-11-21T08:51:27.814663Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7439652532736690595:2387] 2024-11-21T08:51:27.814927Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7439652532736690595:2387] 2024-11-21T08:51:27.815152Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7439652532736690596:2388] 2024-11-21T08:51:27.815471Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7439652532736690596:2388] 2024-11-21T08:51:27.814639Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7439652530755043216:2470] 2024-11-21T08:51:27.816588Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7439652530755043213:2468] ===Query complete Create topic result: 1 2024-11-21T08:51:27.819256Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652530755043269:3660]: Request location 2024-11-21T08:51:27.819424Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652530755043278:3663] connected; active server actors: 1 2024-11-21T08:51:27.819538Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2024-11-21T08:51:27.819549Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2024-11-21T08:51:27.819552Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 1 2024-11-21T08:51:27.819554Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2024-11-21T08:51:27.819556Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 1 2024-11-21T08:51:27.819559Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2024-11-21T08:51:27.819561Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 1 2024-11-21T08:51:27.819564Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 1 2024-11-21T08:51:27.819567Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 1 2024-11-21T08:51:27.819569Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 1 2024-11-21T08:51:27.819572Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 1 2024-11-21T08:51:27.819574Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 1 2024-11-21T08:51:27.819589Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 1 2024-11-21T08:51:27.819591Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 1 2024-11-21T08:51:27.819593Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 1 2024-11-21T08:51:27.819651Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652530755043269:3660]: Got location 2024-11-21T08:51:27.819728Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652530755043278:3663] disconnected; active server actors: 1 2024-11-21T08:51:27.819737Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652530755043278:3663] disconnected no session 2024-11-21T08:51:27.819820Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652530755043279:3664]: Request location 2024-11-21T08:51:27.819877Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652530755043281:3666] connected; active server actors: 1 2024-11-21T08:51:27.819946Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2024-11-21T08:51:27.819954Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2024-11-21T08:51:27.819956Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2024-11-21T08:51:27.819981Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652530755043279:3664]: Got location 2024-11-21T08:51:27.820036Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652530755043281:3666] disconnected; active server actors: 1 2024-11-21T08:51:27.820044Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652530755043281:3666] disconnected no session 2024-11-21T08:51:27.820085Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652530755043282:3667]: Request location 2024-11-21T08:51:27.820122Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652530755043284:3669] connected; active server actors: 1 >> TSequenceReboots::CreateDropRecreate [GOOD] >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::WideKey >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize1000 >> BuildStatsHistogram::Ten_Serial >> TFlatTableExecutorSliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutorStickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutorStickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutorStickyPages::TestSticky [GOOD] >> TFlatTableExecutorStickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutorStickyPages::TestNonStickyGroup_BTreeIndex |87.1%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> DBase::WideKey [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> Acceleration::TestThresholdPut4Plus2Block2Slow [GOOD] >> Acceleration::TestThresholdGetMirror3dc1Slow >> TFlatTableExecutorStickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutorStickyPages::TestStickyMain [GOOD] >> TFlatTableExecutorStickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutorStickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutorStickyPages::TestStickyAll [GOOD] >> TFlatTableExecutorStickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutorStickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutorVersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutorVersionedRows::TestVersionedRows >> TScreen::Sequential [GOOD] >> TScreen::Random >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> TFlatTableExecutorVersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutorVersionedRows::TestVersionedRowsSmallBlobs >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TTopicApiDescribes::DescribeConsumer [GOOD] >> test.py::test[aggregate-group_by_rollup_rename-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-group_by_rollup_rename-default.txt-Debug] >> Acceleration::TestAcceleration4Plus2BlockPutAsyncBlob2Slow [GOOD] >> Acceleration::TestAccelerationMirror3dcGetAsyncRead1Slow >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> TChargeBTreeIndex::NoNodes >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutorCachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutorColumnGroups::TestManyRows |87.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> TSharedPageCache::S3FIFO >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> TChargeBTreeIndex::NoNodes [GOOD] >> TIterator::Serial [GOOD] >> Acceleration::TestThresholdGetMirror3dc1Slow [GOOD] >> Acceleration::TestThresholdGet4Plus2Block1Slow >> BlobStorageSync::TestSyncLogCuttingMirror3dc [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize1000 >> test.py::test[aggregate-group_by_rollup_rename-default.txt-Debug] [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> TBtreeIndexBuilder::NoNodes [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Debug] [GOOD] >> TFlatTableExecutorVersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutorColumnGroups::TestManyRows [GOOD] >> Acceleration::TestAccelerationMirror3dcGetAsyncRead1Slow [GOOD] >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10000Inflight1BlobSize1000 [GOOD] >> TCdcStreamWithRebootsTests::GetReadyStream[PipeResets] [GOOD] >> TChargeBTreeIndex::NoNodes_Groups >> TSharedPageCache::S3FIFO [GOOD] >> TIterator::SerialReverse >> BlobStorageSync::TestSyncLogCuttingMirror3of4 >> Acceleration::TestAcceleration4Plus2BlockGetAsyncRead1Slow >> BuildStatsHistogram::Ten_Serial_Log >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--ForceBlocks] >> TFlatTableExecutorCompressedSelectRows::TestCompressedSelectRows [GOOD] >> Memtable::Basics [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateDropRecreate [GOOD] >> TFlatTableExecutorFollower::BasicFollowerRead [GOOD] >> TSharedPageCache::ClockPro >> Memtable::BasicsReverse [GOOD] >> TIterator::SerialReverse [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TFlatTableExecutorVersionedRows::TestVersionedRowsLargeBlobs Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:50:53.769780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:53.769801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:53.769807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:53.769812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:53.769826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:53.769830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:53.769839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:53.769931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:53.782797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:53.782818Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:50:53.785276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:53.785397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:53.785433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:53.792682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:53.792812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:53.792943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:53.793336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:53.794119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:53.794424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:53.794437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:53.794452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:53.794460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:53.794466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:53.794510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:50:53.796418Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:50:53.833450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:53.833525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:53.833585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:53.833635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:53.833644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:53.838697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:53.838742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:53.838789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:53.838814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:53.838818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:53.838824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:53.839453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:53.839470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:53.839476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:53.839870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:53.839882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:53.839889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:53.839896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:53.840570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:53.841101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:53.841162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:53.841363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:53.841405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:53.841413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:53.841472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:53.841480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:53.841515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:53.841530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:53.841912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:53.841923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:53.841978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:53.841984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:53.842066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:53.842073Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:53.842084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:53.842088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:53.842094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:53.842099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:53.842104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:53.842108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:53.842120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:53.842126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:53.842130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 9 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:28.592634Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#1004:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2024-11-21T08:51:28.592676Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T08:51:28.592700Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:51:28.592715Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:51:28.592727Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:28.592733Z node 97 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 2024-11-21T08:51:28.593203Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:28.593212Z node 97 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000005 first txId#1004 countTxs#1 2024-11-21T08:51:28.593219Z node 97 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000005 2024-11-21T08:51:28.593224Z node 97 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1004:0 2024-11-21T08:51:28.593255Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [97:125:2150], Recipient [97:125:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:51:28.593260Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:51:28.593279Z node 97 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:28.593285Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:28.593332Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:51:28.593353Z node 97 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:28.593358Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [97:204:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T08:51:28.593363Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [97:204:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T08:51:28.593461Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:28.593467Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:28.593478Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:51:28.593482Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:51:28.593487Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:51:28.593492Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T08:51:28.593497Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:51:28.593502Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:28.593506Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:28.593529Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:28.593534Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 1 2024-11-21T08:51:28.593538Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T08:51:28.593542Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2024-11-21T08:51:28.593663Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [97:204:2207], Recipient [97:125:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 11 } 2024-11-21T08:51:28.593669Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T08:51:28.593682Z node 97 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:28.593691Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:28.593696Z node 97 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:28.593701Z node 97 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:51:28.593707Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:51:28.593719Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:51:28.593799Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [97:204:2207], Recipient [97:125:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Version: 2 } 2024-11-21T08:51:28.593803Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T08:51:28.593810Z node 97 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:28.593818Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:28.593821Z node 97 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:28.593825Z node 97 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T08:51:28.593828Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:28.593836Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2024-11-21T08:51:28.593840Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [97:355:2336] 2024-11-21T08:51:28.593846Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:51:28.594374Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:28.594469Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:28.594474Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:28.594487Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:28.594490Z node 97 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:51:28.594502Z node 97 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [97:355:2336] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1004 at schemeshard: 72057594046678944 2024-11-21T08:51:28.594514Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:28.594519Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [97:431:2411] 2024-11-21T08:51:28.594555Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [97:433:2413], Recipient [97:125:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:28.594560Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:51:28.594564Z node 97 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:28.594648Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [97:453:2432], Recipient [97:125:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T08:51:28.594656Z node 97 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:51:28.594668Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:28.594705Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/seq" took 33us result status StatusSuccess 2024-11-21T08:51:28.594774Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq" PathDescription { Self { Name: "seq" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[aggregate-group_by_rollup_rename-default.txt-ForceBlocks] >> TFlatTableExecutorFollower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups >> TSharedPageCache::ClockPro [GOOD] >> Memtable::Markers [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests2Inflight2BlobSize1000 >> TFlatTableExecutorIndexLoading::CalculateReadSize_FlatIndex >> Memtable::Overlap [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] Test command err: 2024-11-21T08:51:21.940499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652505354086035:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:21.940559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:21.945049Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652505093051469:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:21.970621Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:51:21.971965Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:51:21.973309Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004584/r3tmp/tmpu4iIN6/pdisk_1.dat 2024-11-21T08:51:22.015283Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5025, node 1 2024-11-21T08:51:22.040344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:22.040371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:22.042180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:22.064129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004584/r3tmp/yandexdI7ewZ.tmp 2024-11-21T08:51:22.064138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004584/r3tmp/yandexdI7ewZ.tmp 2024-11-21T08:51:22.069468Z INFO: TTestServer started on Port 4988 GrpcPort 5025 2024-11-21T08:51:22.076029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:22.076067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:22.077355Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:51:22.077655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4988 2024-11-21T08:51:22.092288Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004584/r3tmp/yandexdI7ewZ.tmp 2024-11-21T08:51:22.092409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration PQClient connected to localhost:5025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:22.120843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:51:22.133016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:51:22.208815Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720660, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:22.329755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652509388018953:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.329789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.329867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652509388018980:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.331115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:51:22.339283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652509388018982:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:51:22.424643Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652509649054233:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:51:22.425252Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWE5YmVkZGYtOWRkZDJlMDctN2Q2NDBkOGQtYjViYmFlNmU=, ActorId: [1:7439652509649054207:2300], ActorState: ExecuteState, TraceId: 01jd6ypf4c93nxm7c05jamtxep, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:51:22.425955Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:51:22.427094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:51:22.432784Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652509388019025:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:51:22.432890Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2QwNTM2ZTUtN2E2M2Q2OTctMzAzZjNlN2MtYjI5NTUwYTc=, ActorId: [2:7439652509388018951:2280], ActorState: ExecuteState, TraceId: 01jd6ypf2r5pxv5prjvqen0z9d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:51:22.433164Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:51:22.450318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:51:22.520782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:51:22.569104Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jd6ypf9f9rdd0pqc8d489hy9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UwMjkzNmUtOWQ0YWU5MDItNzE0OGJjMjItZjVjYWFlZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439652509649054653:3018] 2024-11-21T08:51:26.940441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652505354086035:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:26.940485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:51:26.944008Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652505093051469:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:26.944041Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2024-11-21T08:51:27.621765Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2024-11-21T08:51:27.639797Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:51:27.639817Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx writes info 2024-11-21T08:51:27.640138Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:51:27.640152Z node 1 :PERSQUEUE INFO: [P ... partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1732179087 nanos: 724000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 11 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1732179087 nanos: 732000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1732179087 nanos: 733000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 12 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1732179087 nanos: 732000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1732179087 nanos: 735000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1732179087 nanos: 733000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1732179087 nanos: 736000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1732179087 nanos: 732000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1732179087 nanos: 733000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } } } } 2024-11-21T08:51:28.658979Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2024-11-21T08:51:28.659015Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2024-11-21T08:51:28.659412Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652535418860261:2553]: Request location 2024-11-21T08:51:28.659617Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652535418860263:2554] connected; active server actors: 1 2024-11-21T08:51:28.659726Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2024-11-21T08:51:28.659735Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2024-11-21T08:51:28.659739Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2024-11-21T08:51:28.659741Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2024-11-21T08:51:28.659744Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2024-11-21T08:51:28.659746Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2024-11-21T08:51:28.659748Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2024-11-21T08:51:28.659752Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2024-11-21T08:51:28.659754Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2024-11-21T08:51:28.659757Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2024-11-21T08:51:28.659759Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2024-11-21T08:51:28.659762Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2024-11-21T08:51:28.659764Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2024-11-21T08:51:28.659768Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2024-11-21T08:51:28.659769Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2024-11-21T08:51:28.661933Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652535418860261:2553]: Got location 2024-11-21T08:51:28.662127Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652535418860263:2554] disconnected; active server actors: 1 2024-11-21T08:51:28.662143Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652535418860263:2554] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1732179087737 tx_id: 281474976720677 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2024-11-21T08:51:28.663234Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2024-11-21T08:51:28.663270Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1732179087737 tx_id: 281474976720677 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } 2024-11-21T08:51:28.664375Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2024-11-21T08:51:28.664396Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true >> TIterator::GetKeyWithVersionSkips [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> Memtable::Wreck [GOOD] >> TLegacy::IndexIter >> Memtable::Erased [GOOD] >> TFlatTableExecutorIndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> TSwitchableCache::UpdateLimit [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> TLegacy::IndexIter [GOOD] >> TFlatTableExecutorIndexLoading::CalculateReadSize_BTreeIndex >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> BuildStatsMixedIndex::Single [GOOD] >> TPart::MassCheck >> TBtreeIndexTPart::External [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> Acceleration::TestAcceleration4Plus2BlockGetAsyncRead1Slow [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize1000 [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> BuildStatsMixedIndex::Single_Slices [GOOD] >> TChargeBTreeIndex::FewNodes >> Acceleration::TestAccelerationMirror3dcGetAsyncRead2Slow >> TPart::MassCheck [GOOD] >> TFlatTableExecutorIndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize1000 >> BuildStatsMixedIndex::Single_History [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TPart::WreckPart >> TFlatTableExecutorVersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests2Inflight2BlobSize1000 [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> TSwitchableCache::Switch_EvictNext [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> TFlatTableLongTx::MemTableLongTx [GOOD] >> TFlatTableLongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableLongTx::CompactCommittedLongTx [GOOD] >> TFlatTableLongTx::CompactedLongTxRestart [GOOD] >> TFlatTableLongTx::CompactMultipleChanges [GOOD] >> TFlatTableLongTx::LongTxBorrow [GOOD] >> TFlatTableLongTx::MemTableLongTxRead [GOOD] >> TFlatTableLongTxAndBlobs::SmallValues ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::GetReadyStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:27.072699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:27.072723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:27.072731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:27.072740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:27.072757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:27.072761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:27.072771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:27.072847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:27.084936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:27.084955Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:27.087007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:27.087091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:27.087111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:27.090157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:27.090246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:27.090395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:27.090646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:27.091387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:27.091654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:27.091664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:27.091677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:27.091684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:27.091690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:27.091731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:27.092977Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:27.110461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:27.110538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:27.110600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:27.110650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:27.110659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:27.111352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:27.111379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:27.111422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:27.111433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:27.111438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:27.111443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:27.111882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:27.111896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:27.111901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:27.112297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:27.112309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:27.112316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:27.112323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:27.112957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:27.113444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:27.113495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:27.113696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:27.113726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:27.113734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:27.113791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:27.113798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:27.113830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:27.113845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:27.114268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:27.114298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:27.114339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:27.114345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:27.114422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:27.114429Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:27.114440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:27.114445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:27.114451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:27.114456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:27.114461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:27.114465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:27.114477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:27.114483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:27.114487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... erationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:30.327081Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:30.327094Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 2/3 2024-11-21T08:51:30.327097Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 2/3 2024-11-21T08:51:30.327101Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: false 2024-11-21T08:51:30.327213Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:30.327221Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:1 ProgressState at tablet: 72057594046678944 2024-11-21T08:51:30.327339Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T08:51:30.327348Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:30.327365Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T08:51:30.327378Z node 12 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T08:51:30.327449Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:30.327460Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:30.327464Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:30.327470Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:51:30.327478Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:30.327627Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 51539609867 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:30.327637Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:30.327651Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 51539609867 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:30.327656Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:30.327662Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 51539609867 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:30.327671Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:30.327675Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:30.327679Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:30.327684Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:1 129 -> 240 2024-11-21T08:51:30.327759Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:30.327769Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:30.327772Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:30.327776Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:30.327780Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:51:30.327791Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:30.329008Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:30.329030Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:30.329045Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:30.329110Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:30.329120Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:1 ProgressState 2024-11-21T08:51:30.329133Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:1 progress is 3/3 2024-11-21T08:51:30.329137Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:30.329145Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2024-11-21T08:51:30.329150Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:30.329155Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:30.329160Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:30.329169Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:30.329174Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:30.329176Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:30.329192Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:30.329196Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:30.329199Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:30.329205Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:30.329268Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:51:30.329815Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:30.329823Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:30.329880Z node 12 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:30.329895Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:30.329899Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [12:719:2624] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:30.329981Z node 12 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:30.330014Z node 12 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 43us result status StatusSuccess 2024-11-21T08:51:30.330123Z node 12 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10Inflight10BlobSize1000 >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TFlatTableExecutorIndexLoading::PrechargeAndSeek_FlatIndex >> NFwd_TBlobs::Trace [GOOD] >> TPart::WreckPart [GOOD] |87.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[blocks-pg--Debug] [GOOD] |87.1%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTableLongTxAndBlobs::SmallValues [GOOD] >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> TVersions::WreckHead >> NFwd_TBlobs::Filtered [GOOD] >> TPart::PageFailEnv >> TFlatTableLongTxAndBlobs::OuterBlobValues [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> TFlatTableLongTxAndBlobs::ExternalBlobValues [GOOD] >> Acceleration::TestThresholdGet4Plus2Block1Slow [GOOD] >> TFlatTablePostponedScan::TestPostponedScan [GOOD] >> test.py::test[aggregate-group_by_rollup_rename-default.txt-ForceBlocks] [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1BlobSize1000 [GOOD] >> TChargeBTreeIndex::FewNodes [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--ForceBlocks] [GOOD] >> TVersions::WreckHead [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> TFlatTablePostponedScan::TestCancelFinishedScan [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests2Inflight2BlobSize1000 >> NFwd_TBTreeIndexCache::Basics [GOOD] >> TPart::PageFailEnv [GOOD] >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> TFlatTablePostponedScan::TestCancelRunningPostponedScan [GOOD] >> Acceleration::TestThresholdGetMirror3dc2Slow >> BuildStatsMixedIndex::Single_Groups_History_Slices >> TFlatTablePostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10Inflight10BlobSize1000 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TIcNodeCache::GetNodesInfoTest [GOOD] >> Acceleration::TestAccelerationMirror3dcGetAsyncRead2Slow [GOOD] >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[TabletReboots] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[PipeResets] [GOOD] >> Bloom::Conf [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize1000 [GOOD] >> TCdcStreamWithRebootsTests::MergeTable[TabletReboots] >> BlobStorageSync::TestSyncLogCuttingMirror3of4 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[PipeResets] >> TChargeBTreeIndex::NoNodes_History >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests2Inflight2BlobSize1000 [GOOD] >> test.py::test[aggregate-group_by_rollup_rename-default.txt-Plan] [GOOD] >> BlobStorageSync::TestSyncLogCuttingBlock4Plus2 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 >> TPart::ForwardEnv >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> TVersions::WreckHeadReverse >> Acceleration::TestThresholdGetMirror3dc2Slow [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> test.py::test[window-win_func_rank_by_opt_part--Plan] [GOOD] >> BuildStatsMixedIndex::Mixed [GOOD] >> Acceleration::TestThresholdGet4Plus2Block2Slow >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> TPart::ForwardEnv [GOOD] >> test.py::test[aggregate-group_by_rollup_rename-default.txt-Results] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> TVersions::WreckHeadReverse [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> TPart::WreckPartColumnGroups >> test.py::test[window-win_func_rank_by_opt_part--Results] >> TFlatTableReschedule::TestExecuteReschedule [GOOD] >> TPart::WreckPartColumnGroups [GOOD] |87.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests100Inflight10BlobSize1000 >> TFlatTableSnapshotWithCommits::SnapshotWithCommits [GOOD] >> Acceleration::TestAcceleration4Plus2BlockGetAsyncRead2Slow >> TPart::PageFailEnvColumnGroups |87.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight10BlobSize1000 >> Bloom::Hashes [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests1Inflight1BlobSize1000 >> TGenCompaction::OverloadFactorDuringForceCompaction >> Bloom::Rater >> Bloom::Rater [GOOD] >> Bloom::Dipping >> TChargeBTreeIndex::NoNodes_History [GOOD] >> Acceleration::TestThresholdGet4Plus2Block2Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPutMirror3dc1Slow >> TVersions::Wreck2 >> test.py::test[aggregate-group_by_rollup_rename-default.txt-Results] [GOOD] >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> TPart::PageFailEnvColumnGroups [GOOD] >> Acceleration::TestAcceleration4Plus2BlockGetAsyncRead2Slow [GOOD] >> Acceleration::TestDelayMultiplierPutMirror3dc1Slow >> TPart::ForwardEnvColumnGroups [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight10BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests100Inflight10BlobSize1000 [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests1Inflight1BlobSize1000 [GOOD] >> Bloom::Dipping [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TVersions::Wreck2 [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Analyze] >> test.py::test[window-win_func_rank_by_opt_part--Results] [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> Acceleration::TestDelayMultiplierPutMirror3dc1Slow [GOOD] >> test.py::test[window-yql-14277-default.txt-Analyze] >> TPart::Versions [GOOD] >> TPart::ManyVersions >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests100Inflight10BlobSize1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2024-11-21T08:51:22.016439Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652510562999544:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:22.016560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:22.024305Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652511966029154:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004563/r3tmp/tmpRhOaqG/pdisk_1.dat 2024-11-21T08:51:22.059510Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:51:22.060645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:22.061500Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:51:22.089627Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26346, node 1 2024-11-21T08:51:22.115523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:22.115546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:22.117386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:22.136413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004563/r3tmp/yandexkafXYA.tmp 2024-11-21T08:51:22.136428Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004563/r3tmp/yandexkafXYA.tmp 2024-11-21T08:51:22.136485Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004563/r3tmp/yandexkafXYA.tmp 2024-11-21T08:51:22.136522Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:22.156359Z INFO: TTestServer started on Port 2450 GrpcPort 26346 2024-11-21T08:51:22.156197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:22.156237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:22.156661Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:51:22.156943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2450 PQClient connected to localhost:26346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:22.180572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:51:22.211363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:51:22.405222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652511966029356:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.405244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652511966029331:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.405281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:22.406602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T08:51:22.419179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652511966029360:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T08:51:22.447920Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652510563000479:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:51:22.448049Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2JkYmI3YjgtZWM4ZmY5ZTEtNzc3Y2M5MGQtZTFjMzZkNzQ=, ActorId: [1:7439652510563000429:2299], ActorState: ExecuteState, TraceId: 01jd6ypf5fadg4a3gtr38k22ax, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:51:22.448740Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:51:22.449569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:51:22.472769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:51:22.509071Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652511966029430:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:51:22.509250Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGVmMzU4MjktOWI4NjhlMDUtNTRhOWRiYzYtMWFlZTAzOGU=, ActorId: [2:7439652511966029329:2280], ActorState: ExecuteState, TraceId: 01jd6ypf512vayqgk3y80e2vr5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:51:22.509555Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:51:22.558307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:51:22.629630Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ypfb9ddn483n0hgrchyea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk2NmQ3ZjctMjdiNTcxZmUtYjA2NWNjMzUtMjk0ZmYzNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439652510563000894:3019] 2024-11-21T08:51:27.016868Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652510562999544:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:27.016910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:51:27.024349Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652511966029154:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:27.024373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10000Inflight1000BlobSize1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:25.285313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:25.285339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:25.285345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:25.285368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:25.285396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:25.285400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:25.285409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:25.285487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:25.294227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:25.294249Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:25.296157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:25.296264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:25.296288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:25.298961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:25.299026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:25.299168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.299347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:25.299895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.300138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:25.300147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.300157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:25.300162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:25.300166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:25.300201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:25.303174Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:25.315093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:25.315158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.315206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:25.315241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:25.315246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.315827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.315848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:25.315885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.315892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:25.315895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:25.315899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:25.316166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.316172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:25.316175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:25.316418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.316424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.316427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.316432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.316774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:25.317144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:25.317202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:25.317362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.317396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:25.317404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.317458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:25.317463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.317487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:25.317500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:25.317915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:25.317924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:25.317956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.317959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:25.318025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.318031Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:25.318042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:25.318047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.318052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:25.318057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.318060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:25.318063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:25.318072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:25.318075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:25.318078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.214307Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:32.214313Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T08:51:32.214318Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:51:32.214354Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.214359Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:32.214363Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T08:51:32.214367Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T08:51:32.214402Z node 23 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.214438Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.214442Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:32.214447Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T08:51:32.214451Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:32.214492Z node 23 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.214559Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.214563Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:32.214570Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:32.214575Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:32.214607Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.214611Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:32.214615Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:32.214619Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:32.214627Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T08:51:32.214654Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.214658Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:32.214661Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T08:51:32.215234Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.215255Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:51:32.215268Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.215317Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:51:32.215352Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.215365Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.215371Z node 23 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:32.215381Z node 23 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 4/4 2024-11-21T08:51:32.215385Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T08:51:32.215390Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2024-11-21T08:51:32.215395Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T08:51:32.215400Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:32.215404Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:32.215426Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:32.215431Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:32.215434Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:32.215440Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:32.215446Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:32.215450Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:32.215455Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:51:32.215459Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T08:51:32.215463Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T08:51:32.215472Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T08:51:32.215548Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:51:32.215554Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T08:51:32.215565Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:51:32.215570Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T08:51:32.215576Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:51:32.216014Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.216034Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.216046Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.216056Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.216061Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:32.216550Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:51:32.216624Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:32.216630Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:32.216686Z node 23 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:32.216703Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:32.216708Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [23:819:2713] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:32.216772Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:32.216802Z node 23 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 43us result status StatusPathDoesNotExist 2024-11-21T08:51:32.216846Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10Inflight1BlobSize1000 >> Bloom::Basics [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TVersions::Wreck2Reverse >> Bloom::Stairs >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Analyze] [GOOD] >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> TIterator::Basics >> Acceleration::TestDelayMultiplierPut4Plus2Block1Slow >> test.py::test[window-yql-14277-default.txt-Analyze] [GOOD] >> TPart::ManyVersions [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1000BlobSize1000 >> Bloom::Stairs [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Debug] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> Acceleration::TestDelayMultiplierPut4Plus2Block1Slow [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10Inflight1BlobSize1000 [GOOD] >> TVersions::Wreck2Reverse [GOOD] >> TIterator::Basics [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPutMirror3dc1Slow [GOOD] >> TFlatTableExecutorIndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> BlobStorageSync::TestSyncLogCuttingBlock4Plus2 [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10000Inflight1000BlobSize1000 [GOOD] >> TCdcStreamWithRebootsTests::GetReadyStream[TabletReboots] [GOOD] >> Acceleration::TestDelayMultiplierPutMirror3dc2Slow >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> test.py::test[window-yql-14277-default.txt-Debug] >> Acceleration::TestMaxNumOfSlowDisksPutMirror3dc2Slow >> TPart::ManyDeltas [GOOD] >> TFlatTableExecutorIndexLoading::PrechargeAndSeek_BTreeIndex >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Debug] [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> BuildStatsBTreeIndex::Single [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests100Inflight1BlobSize1000 >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TVersions::Wreck1 >> BuildStatsBTreeIndex::Single_History >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> BlobStorageSync::SyncWhenDiskGetsDown [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> BurstDetection::TestPutEvenly >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_History >> TCdcStreamWithRebootsTests::DropStream[PipeResets] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[TabletReboots] >> test.py::test[window-yql-14277-default.txt-Debug] [GOOD] >> Acceleration::TestDelayMultiplierPutMirror3dc2Slow [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests1Inflight1BlobSize2000000 >> TChargeBTreeIndex::OneNode >> Acceleration::TestMaxNumOfSlowDisksPutMirror3dc2Slow [GOOD] >> TFlatTableExecutorIndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TVersions::Wreck1 [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--ForceBlocks] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests100Inflight1BlobSize1000 [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsBTreeIndex::Single_History [GOOD] >> TPartBtreeIndexIteration::NoNodes [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block2Slow >> TIterator::External [GOOD] >> TPartBtreeIndexIteration::FewNodes >> BurstDetection::TestPutEvenly [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> test.py::test[window-yql-14277-default.txt-ForceBlocks] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests1Inflight1BlobSize2000000 [GOOD] >> Acceleration::TestDelayMultiplierPut4Plus2Block2Slow >> TFlatTableExecutorIndexLoading::Scan_FlatIndex >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--ForceBlocks] [GOOD] >> TVersions::Wreck1Reverse >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests2Inflight2BlobSize1000 >> BurstDetection::TestPutBurst >> TPartBtreeIndexIteration::FewNodes [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block2Slow [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BlobPatching::Mirror3of4 >> BuildStatsBTreeIndex::Single_Groups_Slices >> NFwd_TBTreeIndexCache::End [GOOD] >> test.py::test[window-yql-14277-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14277-default.txt-Plan] [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize2000000 >> Acceleration::TestDelayMultiplierPut4Plus2Block2Slow [GOOD] >> Acceleration::TestDelayMultiplierGetMirror3dc1Slow >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests2Inflight2BlobSize1000 [GOOD] >> TIterator::GetKey [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BurstDetection::TestPutBurst [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> test.py::test[window-yql-14277-default.txt-Results] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests100Inflight1BlobSize2000000 >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10Inflight10BlobSize1000 >> BlobPatching::Mirror3of4 [GOOD] >> BurstDetection::TestOverlySensitive >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> test.py::test[window-yql-14277-default.txt-Results] [GOOD] >> Acceleration::TestDelayMultiplierGetMirror3dc1Slow [GOOD] >> Acceleration::TestDelayMultiplierGet4Plus2Block1Slow >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10Inflight10BlobSize1000 [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> BlobPatching::Mirror3dc >> BurstDetection::TestOverlySensitive [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests100Inflight10BlobSize1000 >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> BlobPatching::Mirror3dc [GOOD] >> CompatibilityInfo::BSControllerCompatible [GOOD] >> CompatibilityInfo::BSControllerIncompatible >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests100Inflight10BlobSize1000 [GOOD] >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> CompatibilityInfo::BSControllerIncompatible [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10000Inflight100BlobSize1000 >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BlobPatching::Mirror3 >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> CompatibilityInfo::BSControllerIncompatibleWithDefault [GOOD] >> CompatibilityInfo::BSControllerSuppressCompatibilityCheck >> BlobPatching::Mirror3 [GOOD] >> CompatibilityInfo::BSControllerSuppressCompatibilityCheck [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> BlobPatching::Block42 >> DBase::Affects [GOOD] >> CompatibilityInfo::BSControllerMigration [GOOD] >> BuildStatsFlatIndex::Single [GOOD] >> BlobPatching::Block42 [GOOD] >> DBase::Annex [GOOD] >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> BlobPatching::None >> BlobPatching::None [GOOD] >> BlobPatching::StressBlock42 >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> DBase::DropModifiedTable [GOOD] >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices >> BuildStatsHistogram::Single_History_Slices [GOOD] |87.1%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] Test command err: 2024-11-21T08:51:06.373015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652440231101028:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:06.373103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fe6/r3tmp/tmpIYPAKt/pdisk_1.dat 2024-11-21T08:51:06.440081Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25837, node 1 2024-11-21T08:51:06.449387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:06.449398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:06.449400Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:06.449434Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:51:06.473962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:06.473992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:06.475480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:06.511875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:06.514755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:06.547230Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:06.744564Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:06.744590Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652440231101465:2298], Start check tables existence, number paths: 2 2024-11-21T08:51:06.745211Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWNlZGYyYTYtOGJkNjJkOS02MDIwNTQ3Ny0xMTc2YzA2OQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MWNlZGYyYTYtOGJkNjJkOS02MDIwNTQ3Ny0xMTc2YzA2OQ== 2024-11-21T08:51:06.745300Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWNlZGYyYTYtOGJkNjJkOS02MDIwNTQ3Ny0xMTc2YzA2OQ==, ActorId: [1:7439652440231101466:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:06.747173Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:06.747178Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:06.748372Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:06.748396Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652440231101465:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:06.748404Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652440231101465:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:06.748408Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652440231101465:2298], Successfully finished 2024-11-21T08:51:06.748444Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:06.752612Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440231101491:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:06.753431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:06.753980Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440231101491:2284], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:06.754602Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440231101491:2284], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:06.755731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440231101491:2284], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:06.832262Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440231101491:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:06.833298Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440231101491:2284], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:06.834115Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy 2024-11-21T08:51:06.834168Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy, ActorId: [1:7439652440231101549:2300], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:06.834196Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T08:51:06.834200Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:06.834218Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652440231101551:2301], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:06.834227Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy, ActorId: [1:7439652440231101549:2300], ActorState: ReadyState, TraceId: 01jd6ynzyj8376vhsgtv24y859, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439652440231101548:2321] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T08:51:06.834243Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439652440231101549:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy 2024-11-21T08:51:06.834254Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652440231101552:2302], Database: /Root, Start database fetching 2024-11-21T08:51:06.836841Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652440231101552:2302], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T08:51:06.836862Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T08:51:06.836878Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439652440231101562:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy, Start pool fetching 2024-11-21T08:51:06.836886Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652440231101563:2304], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:06.836923Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652440231101551:2301], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:06.836929Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T08:51:06.836932Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T08:51:06.836980Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652440231101564:2305], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T08:51:06.840172Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652440231101563:2304], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:06.840231Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652440231101564:2305], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T08:51:06.840255Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439652440231101562:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy, Pool info successfully resolved 2024-11-21T08:51:06.840274Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy 2024-11-21T08:51:06.840310Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652440231101564:2305], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7439652440231101549:2300], session id: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy 2024-11-21T08:51:06.840321Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652440231101564:2305], DatabaseId: /Root, PoolId: sample_pool_id, Reply continue success to [1:7439652440231101549:2300], session id: ydb://session/3?node_id=1&id=NTc4NzAxZjUtZDgyZDU5ODEtNzhjM2E2LWJjYTk5ZjIy, local in flight: 1 2024-11-21T08:51:06.840326Z node 1 :KQP_WORKLOAD_SERV ... /3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ReadyState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, Sending CompileQuery request 2024-11-21T08:51:31.154971Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, acquire mvcc snapshot 2024-11-21T08:51:31.155314Z node 5 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, read snapshot result: UNAVAILABLE, step: 1732179091000, tx id: 18446744073709551615 2024-11-21T08:51:31.155335Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, ExecutePhyTx, tx: 0x0000477C3BBD7198 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:31.155340Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, Sending to Executer TraceId: 0 8 2024-11-21T08:51:31.155353Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, Created new KQP executer: [5:7439652549526983778:2621] isRollback: 0 2024-11-21T08:51:31.156271Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T08:51:31.156323Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1.368 QueriesCount: 2 2024-11-21T08:51:31.156372Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:31.156439Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:31.156450Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, EndCleanup, isFinal: 0 2024-11-21T08:51:31.156466Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ExecuteState, TraceId: 01jd6ypqp49sm7hedq4yyn4gvf, Sent query response back to proxy, proxyRequestId: 49, proxyId: [5:7439652532347112895:2192] 2024-11-21T08:51:31.156607Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:31.156622Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2024-11-21T08:51:31.156629Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ReadyState, Created new KQP executer: [5:7439652549526983789:2621] isRollback: 1 2024-11-21T08:51:31.156637Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:31.156702Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: CleanupState, EndCleanup, isFinal: 1 2024-11-21T08:51:31.156712Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:31.156730Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MzY4NWQ2YTEtYWM3YjY3OTItOTBhMjQ5OWItNDIyOTM0MWI=, ActorId: [5:7439652549526983763:2621], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:31.313183Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E= 2024-11-21T08:51:31.313255Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:31.313318Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ReadyState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, received request, proxyRequestId: 50 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [5:7439652549526983796:2887] database: Root databaseId: /Root pool id: default 2024-11-21T08:51:31.313336Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ReadyState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, request placed into pool from cache: default 2024-11-21T08:51:31.313350Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ReadyState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Sending CompileQuery request 2024-11-21T08:51:31.319171Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, ExecutePhyTx, tx: 0x0000477C39C55898 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:31.319194Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Sending to Executer TraceId: 0 8 2024-11-21T08:51:31.319214Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Created new KQP executer: [5:7439652549526983801:2630] isRollback: 0 2024-11-21T08:51:31.319620Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Forwarded TEvStreamData to [5:7439652549526983796:2887] 2024-11-21T08:51:31.319750Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T08:51:31.319788Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, txInfo Status: Committed Kind: Pure TotalDuration: 0.671 ServerDuration: 0.637 QueriesCount: 2 2024-11-21T08:51:31.319812Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:31.319837Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:31.319844Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, EndCleanup, isFinal: 1 2024-11-21T08:51:31.319856Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: ExecuteState, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Sent query response back to proxy, proxyRequestId: 50, proxyId: [5:7439652532347112895:2192] 2024-11-21T08:51:31.319881Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: unknown state, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Cleanup temp tables: 0 2024-11-21T08:51:31.319902Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YzVmYmNhZDAtOGRiOTc4NTYtNDVmY2ZkMzUtM2RhYmIxM2E=, ActorId: [5:7439652549526983797:2630], ActorState: unknown state, TraceId: 01jd6ypqvhcqv69czpj8j6g5th, Session actor destroyed 2024-11-21T08:51:31.320880Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NWM1ZTBjOTItMWNiYjFhNDctOGU5NGNlY2QtNjQ2ZmRlNjY=, ActorId: [5:7439652532347113326:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:31.320894Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NWM1ZTBjOTItMWNiYjFhNDctOGU5NGNlY2QtNjQ2ZmRlNjY=, ActorId: [5:7439652532347113326:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:31.320897Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWM1ZTBjOTItMWNiYjFhNDctOGU5NGNlY2QtNjQ2ZmRlNjY=, ActorId: [5:7439652532347113326:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:31.320900Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWM1ZTBjOTItMWNiYjFhNDctOGU5NGNlY2QtNjQ2ZmRlNjY=, ActorId: [5:7439652532347113326:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:31.320916Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NWM1ZTBjOTItMWNiYjFhNDctOGU5NGNlY2QtNjQ2ZmRlNjY=, ActorId: [5:7439652532347113326:2299], ActorState: unknown state, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2024-11-21T08:51:21.480918Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652504248650439:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:21.480974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:21.485778Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652504144545695:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:21.516864Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004596/r3tmp/tmpECSSH3/pdisk_1.dat 2024-11-21T08:51:21.523494Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:51:21.524504Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:21.554586Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17612, node 1 2024-11-21T08:51:21.567522Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004596/r3tmp/yandexlxiWpZ.tmp 2024-11-21T08:51:21.567532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004596/r3tmp/yandexlxiWpZ.tmp 2024-11-21T08:51:21.571179Z INFO: TTestServer started on Port 6531 GrpcPort 17612 TClient is connected to server localhost:6531 PQClient connected to localhost:17612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:21.589201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:21.591085Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004596/r3tmp/yandexlxiWpZ.tmp 2024-11-21T08:51:21.591169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration waiting... 2024-11-21T08:51:21.596791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:21.619091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:21.619123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:21.619904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:21.619924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:21.620649Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:51:21.620785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:21.621023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:21.634299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:51:21.842852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652504248651409:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:21.842877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652504248651421:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:21.842884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:21.842948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652504144545896:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:21.842980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:21.843067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652504144545908:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:21.844000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:51:21.846817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652504248651456:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:21.846845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:21.851785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T08:51:21.852060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652504248651423:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:51:21.852156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652504144545910:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:51:21.887159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:51:21.914818Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652504248651620:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:51:21.914929Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjgyNTRkOTItYzgwMGE4ZDctNThmMzZiZGMtOTVkNDRlNTU=, ActorId: [1:7439652504248651406:2300], ActorState: ExecuteState, TraceId: 01jd6ypekj5gytvne7m7k09gkh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:51:21.915605Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:51:21.946621Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652504144545953:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:51:21.946696Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDU1OWU0NWQtMzVhZjYzNjMtZDQ1MTRiY2ItM2E0M2E1MGI=, ActorId: [2:7439652504144545894:2280], ActorState: ExecuteState, TraceId: 01jd6ypekhf6mg18p9t8y7f6h6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:51:21.946933Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:51:21.962798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:51:22.059598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:51:22.115928Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd6ypetz5h4axy0trh8mkt08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRjOTU0MDItYjZlZGQ2Zi1iMTU3YTdmZC03MzA2MGYyYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: defau ... INFO: [PQ: 72075186224037898] doesn't have tx writes info 2024-11-21T08:51:28.207074Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [1:7439652534313424413:2516] 2024-11-21T08:51:28.207806Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [1:7439652534313424414:2517] 2024-11-21T08:51:28.208374Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7439652534313424416:2518] 2024-11-21T08:51:28.208943Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7439652534313424417:2519] 2024-11-21T08:51:28.210908Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:51:28.210923Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894] doesn't have tx writes info 2024-11-21T08:51:28.210936Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:51:28.210940Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:51:28.211066Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:51:28.211076Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037899] doesn't have tx writes info 2024-11-21T08:51:28.211099Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:51:28.211107Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897] doesn't have tx writes info 2024-11-21T08:51:28.211141Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7439652534209318023:2433] 2024-11-21T08:51:28.211154Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7439652534209318025:2435] 2024-11-21T08:51:28.211640Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7439652534209318024:2434] 2024-11-21T08:51:28.211766Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7439652534209318026:2436] 2024-11-21T08:51:28.212089Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] bootstrapping 4 [2:7439652534209318030:2438] 2024-11-21T08:51:28.209332Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [1:7439652534313424419:2520] 2024-11-21T08:51:28.209514Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [1:7439652534313424420:2521] 2024-11-21T08:51:28.210050Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7439652534313424422:2522] 2024-11-21T08:51:28.210081Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7439652534313424423:2523] 2024-11-21T08:51:28.210880Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [1:7439652534313424413:2516] 2024-11-21T08:51:28.210881Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 2 [1:7439652534313424416:2518] 2024-11-21T08:51:28.211256Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7439652534313424417:2519] 2024-11-21T08:51:28.211306Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [1:7439652534313424414:2517] 2024-11-21T08:51:28.211689Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [1:7439652534313424419:2520] 2024-11-21T08:51:28.211701Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [1:7439652534313424420:2521] 2024-11-21T08:51:28.211989Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7439652534313424423:2523] 2024-11-21T08:51:28.212061Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7439652534313424422:2522] 2024-11-21T08:51:28.212347Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7439652534209318029:2437] 2024-11-21T08:51:28.212578Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7439652534209318032:2439] 2024-11-21T08:51:28.212998Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2024-11-21T08:51:28.213008Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2024-11-21T08:51:28.213011Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 1, Generation 2 2024-11-21T08:51:28.213122Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7439652534209318023:2433] 2024-11-21T08:51:28.213496Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2024-11-21T08:51:28.213641Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7439652534209318025:2435] 2024-11-21T08:51:28.213653Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7439652534209318026:2436] 2024-11-21T08:51:28.213938Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7439652534209318029:2437] 2024-11-21T08:51:28.214012Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7439652534209318032:2439] 2024-11-21T08:51:28.214188Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 2 [2:7439652534209318030:2438] 2024-11-21T08:51:28.214266Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7439652534209318024:2434] 2024-11-21T08:51:28.214667Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2024-11-21T08:51:28.214676Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2024-11-21T08:51:28.214677Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 2, Generation 2 2024-11-21T08:51:28.214679Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2024-11-21T08:51:29.152839Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2024-11-21T08:51:29.152897Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2024-11-21T08:51:29.152915Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7439652538608391982:2549]: Bootstrap 2024-11-21T08:51:29.153086Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652538608391982:2549]: Request location 2024-11-21T08:51:29.153258Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652538608391984:2550] connected; active server actors: 1 2024-11-21T08:51:29.153341Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2024-11-21T08:51:29.153399Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652538608391982:2549]: Got location Got response: 2024-11-21T08:51:29.153711Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652538608391984:2550] disconnected; active server actors: 1 2024-11-21T08:51:29.153721Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652538608391984:2550] disconnected no session operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2024-11-21T08:51:29.154651Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2024-11-21T08:51:29.154679Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2024-11-21T08:51:29.154695Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7439652538608391985:2551]: Bootstrap 2024-11-21T08:51:29.154836Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652538608391985:2551]: Request location 2024-11-21T08:51:29.155002Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652538608391988:2553] connected; active server actors: 1 2024-11-21T08:51:29.155104Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2024-11-21T08:51:29.155179Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439652538608391985:2551]: Got location 2024-11-21T08:51:29.155294Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652538608391988:2553] disconnected; active server actors: 1 2024-11-21T08:51:29.155301Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439652538608391988:2553] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1732179088 nanos: 211000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2024-11-21T08:51:29.156040Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2024-11-21T08:51:29.156065Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2024-11-21T08:51:29.156074Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7439652538608391990:2554]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> BuildStatsHistogram::Ten_Mixed >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Crossed >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TFlatTableExecutorIndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_BTreeIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2024-11-21T08:51:05.713793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652435742041036:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00400a/r3tmp/tmpdjW8yb/pdisk_1.dat 2024-11-21T08:51:05.730897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 27918, node 1 2024-11-21T08:51:05.765016Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:05.773215Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:05.773224Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:05.773226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:05.773268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4170 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:51:05.828886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:05.828914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:05.829983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:05.864942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:05.888308Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:05.920805Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:06.188390Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:06.189143Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmMyNzNhZDYtOGEwMzY4YzYtNGJkYmQyNmUtMWRkZGQwY2Q=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmMyNzNhZDYtOGEwMzY4YzYtNGJkYmQyNmUtMWRkZGQwY2Q= 2024-11-21T08:51:06.191815Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:06.191829Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652440037008756:2297], Start check tables existence, number paths: 2 2024-11-21T08:51:06.191833Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:06.191842Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:06.191861Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmMyNzNhZDYtOGEwMzY4YzYtNGJkYmQyNmUtMWRkZGQwY2Q=, ActorId: [1:7439652440037008757:2298], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:06.192713Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652440037008756:2297], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:06.192727Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652440037008756:2297], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:06.192732Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652440037008756:2297], Successfully finished 2024-11-21T08:51:06.192760Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:06.193623Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440037008783:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:06.194673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:06.195680Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440037008783:2284], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:06.196179Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440037008783:2284], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:06.197458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440037008783:2284], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:06.279596Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440037008783:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:06.280836Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652440037008783:2284], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:06.284707Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ= 2024-11-21T08:51:06.284800Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ=, ActorId: [1:7439652440037008842:2300], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:06.284856Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ=, ActorId: [1:7439652440037008842:2300], ActorState: ReadyState, TraceId: 01jd6ynzdc6gvhwhhhjm1wf2d8, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439652440037008841:2322] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T08:51:06.284881Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T08:51:06.284884Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:06.284897Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439652440037008842:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ= 2024-11-21T08:51:06.284907Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652440037008844:2301], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:06.284929Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652440037008845:2302], Database: /Root, Start database fetching 2024-11-21T08:51:06.285460Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652440037008845:2302], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T08:51:06.285500Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652440037008844:2301], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:06.285506Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T08:51:06.285514Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T08:51:06.285517Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T08:51:06.285580Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439652440037008855:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ=, Start pool fetching 2024-11-21T08:51:06.285584Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652440037008857:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:06.285602Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652440037008856:2304], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T08:51:06.285788Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652440037008857:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:06.285795Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439652440037008855:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ=, Pool info successfully resolved 2024-11-21T08:51:06.285800Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ= 2024-11-21T08:51:06.285810Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652440037008856:2304], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7439652440037008842:2300], session id: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ= 2024-11-21T08:51:06.285821Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MWQ0YmRmMGQtNjFlNTJkMDAtMTk2OGU0ZGQtYmQyZjRkNTQ= 2024-11-21T08:51:06.285829Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T08:51:06.285833Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service tables creation 2024-11-21T08:51:06.285897Z node 1 ... 4976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:32.384435Z node 6 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:32.622666Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:32.622706Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439652552251736573:2298], Start check tables existence, number paths: 2 2024-11-21T08:51:32.622761Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:32.622773Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:32.622779Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:32.623240Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTBmYTZhNTktYjlkYWY2OWEtMWQwYWQwNWUtMTRkNjA0ZTY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTBmYTZhNTktYjlkYWY2OWEtMWQwYWQwNWUtMTRkNjA0ZTY= 2024-11-21T08:51:32.623416Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTBmYTZhNTktYjlkYWY2OWEtMWQwYWQwNWUtMTRkNjA0ZTY=, ActorId: [6:7439652552251736589:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:32.623473Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439652552251736573:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:32.623492Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439652552251736573:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:32.623496Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439652552251736573:2298], Successfully finished 2024-11-21T08:51:32.623839Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:32.624122Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652552251736591:2283], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:32.624862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:32.625175Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652552251736591:2283], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:32.625206Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652552251736591:2283], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:32.626674Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652552251736591:2283], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:32.709146Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652552251736591:2283], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:32.710067Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652552251736591:2283], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:32.710724Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI= 2024-11-21T08:51:32.710777Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [6:7439652552251736649:2300], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:32.710815Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T08:51:32.710823Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:32.710840Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652552251736651:2301], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:32.710853Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [6:7439652552251736649:2300], ActorState: ReadyState, TraceId: 01jd6yps763s8h5p3p410a4vak, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7439652552251736648:2320] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T08:51:32.710864Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7439652552251736649:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI= 2024-11-21T08:51:32.710882Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439652552251736652:2302], Database: /Root, Start database fetching 2024-11-21T08:51:32.710949Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439652552251736652:2302], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T08:51:32.710972Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T08:51:32.710986Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7439652552251736661:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, Start pool fetching 2024-11-21T08:51:32.710997Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652552251736662:2304], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:32.711130Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652552251736662:2304], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:32.711130Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652552251736651:2301], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:32.711139Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T08:51:32.711142Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T08:51:32.711147Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7439652552251736661:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, Pool info successfully resolved 2024-11-21T08:51:32.711185Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI= 2024-11-21T08:51:32.711191Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439652552251736665:2305], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T08:51:32.711205Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI= 2024-11-21T08:51:32.711220Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [6:7439652552251736649:2300], ActorState: ExecuteState, TraceId: 01jd6yps763s8h5p3p410a4vak, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2024-11-21T08:51:32.711267Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [6:7439652552251736649:2300], ActorState: ExecuteState, TraceId: 01jd6yps763s8h5p3p410a4vak, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2024-11-21T08:51:32.711281Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7439652552251736649:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI= 2024-11-21T08:51:32.711289Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [6:7439652552251736649:2300], ActorState: CleanupState, TraceId: 01jd6yps763s8h5p3p410a4vak, EndCleanup, isFinal: 1 2024-11-21T08:51:32.711321Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [6:7439652552251736649:2300], ActorState: CleanupState, TraceId: 01jd6yps763s8h5p3p410a4vak, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7439652552251736208:2256] 2024-11-21T08:51:32.711329Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [6:7439652552251736649:2300], ActorState: unknown state, TraceId: 01jd6yps763s8h5p3p410a4vak, Cleanup temp tables: 0 2024-11-21T08:51:32.711340Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjdiZWM2ZjYtN2U3NmViMzAtYjI4ZmU2MzEtNzBhMWVlYzI=, ActorId: [6:7439652552251736649:2300], ActorState: unknown state, TraceId: 01jd6yps763s8h5p3p410a4vak, Session actor destroyed 2024-11-21T08:51:32.711353Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439652552251736665:2305], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T08:51:32.712363Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTBmYTZhNTktYjlkYWY2OWEtMWQwYWQwNWUtMTRkNjA0ZTY=, ActorId: [6:7439652552251736589:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:32.712377Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTBmYTZhNTktYjlkYWY2OWEtMWQwYWQwNWUtMTRkNjA0ZTY=, ActorId: [6:7439652552251736589:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:32.712379Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTBmYTZhNTktYjlkYWY2OWEtMWQwYWQwNWUtMTRkNjA0ZTY=, ActorId: [6:7439652552251736589:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:32.712383Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTBmYTZhNTktYjlkYWY2OWEtMWQwYWQwNWUtMTRkNjA0ZTY=, ActorId: [6:7439652552251736589:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:32.712396Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTBmYTZhNTktYjlkYWY2OWEtMWQwYWQwNWUtMTRkNjA0ZTY=, ActorId: [6:7439652552251736589:2299], ActorState: unknown state, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::GetReadyStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:25.597129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:25.597153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:25.597158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:25.597163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:25.597178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:25.597183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:25.597191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:25.597264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:25.608373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:25.608398Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:25.616642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:25.616793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:25.616826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:25.623546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:25.623631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:25.623811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.624003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:25.631194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.631583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:25.631596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.631610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:25.631618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:25.631624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:25.631672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:25.633303Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:25.652754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:25.652846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.652919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:25.652972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:25.652981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.653859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.653885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:25.653932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.653942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:25.653947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:25.653953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:25.654330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.654338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:25.654343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:25.654623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.654631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.654637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.654644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.655308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:25.655660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:25.655706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:25.655922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.655950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:25.655957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.656012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:25.656019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.656052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:25.656064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:25.656436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:25.656446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:25.656490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.656496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:25.656577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.656584Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:25.656596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:25.656600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.656606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:25.656612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.656617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:25.656621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:25.656633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:25.656638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:25.656642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... tionProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:37.497830Z node 44 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:37.497836Z node 44 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 2/3 2024-11-21T08:51:37.497838Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 2/3 2024-11-21T08:51:37.497841Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: false 2024-11-21T08:51:37.497869Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:37.497873Z node 44 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:1 ProgressState at tablet: 72057594046678944 2024-11-21T08:51:37.497985Z node 44 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:37.497997Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:37.498001Z node 44 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:37.498006Z node 44 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:51:37.498010Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:37.498148Z node 44 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:37.498155Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:37.498157Z node 44 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:37.498160Z node 44 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:37.498162Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:51:37.498169Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:37.498316Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 219 } } 2024-11-21T08:51:37.498321Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:37.498332Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 219 } } 2024-11-21T08:51:37.498342Z node 44 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 219 } } 2024-11-21T08:51:37.498416Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 188978563338 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:37.498436Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:37.498447Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 188978563338 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:37.498452Z node 44 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:37.498459Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 188978563338 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:37.498467Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:37.498471Z node 44 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:37.498474Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:37.498479Z node 44 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:1 129 -> 240 2024-11-21T08:51:37.499218Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:37.499294Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:37.499308Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:37.499319Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:37.499362Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:37.499367Z node 44 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:1 ProgressState 2024-11-21T08:51:37.499374Z node 44 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:1 progress is 3/3 2024-11-21T08:51:37.499376Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:37.499379Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2024-11-21T08:51:37.499382Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:37.499385Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:37.499388Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:37.499394Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:37.499398Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:37.499401Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:37.499414Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:37.499417Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:37.499419Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:37.499423Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1004 2024-11-21T08:51:37.499891Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:37.499897Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:37.499939Z node 44 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:37.499952Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:37.499955Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [44:718:2623] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:37.500002Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:37.500032Z node 44 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 36us result status StatusSuccess 2024-11-21T08:51:37.500124Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TIterator::GetKey [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:28.087505Z 00000.004 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.004 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.006 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.006 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 302b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} hope 1 -> done Change{2, redo 0b alter 15b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 9216b requested for data (10240b in total) 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 10240b of static mem, Memory{10240 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release 5120b of static, Memory{5120 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} captured Res{static 5120b} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{6144 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} found attached Res{static 5120b} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} release captured by tx Res{static 5120b}, Memory{1024 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 1b requested for data (5121b in total) 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 5121b of static mem, Memory{5121 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release 5121b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 19456b requested for data (20480b in total) 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release tx data 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} request Res{1 20480b} type small_transaction 00000.007 DD| RESOURCE_BROKER: Submitted new unknown task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) priority=5 resources={0, 20480} 00000.007 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])' of unknown type 'small_transaction' to default queue 00000.007 DD| RESOURCE_BROKER: Allocate resources {0, 20480} for task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) from queue queue_default 00000.007 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])' of unknown type 'small_transaction' to default queue 00000.007 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.002384 (insert task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])) 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} acquired dyn mem Res{1 20480b}, Memory{0 dyn 20480} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} update resource task 1 releasing 10240b, Memory{0 dyn 10240} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} captured Res{1 10240b} 00000.007 DD| RESOURCE_BROKER: Update task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) (priority=5 type=small_transaction resources={0, 10240} resubmit=0) 00000.007 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])' of unknown type 'small_transaction' to default queue 00000.007 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.001192 (insert task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])) 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 10240} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} found attached Res{1 10240b} 00000.007 DD| TABLET_EXECUTOR: release 1024b of static tx data due to attached res 1, Memory{0 dyn 10240} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 1b requested for data (10241b in total) 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release tx data 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} released on update Res{1 10240b}, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} update Res{1 20480b} type small_transaction 00000.007 DD| RESOURCE_BROKER: Update cookie for task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) 00000.007 DD| RESOURCE_BROKER: Update task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) (priority=5 type=small_transaction resources={0, 20480} resubmit=1) 00000.007 EE| RESOURCE_BROKER: Assigning w ... 00000.002 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: All BS storage groups are stopped 00000.002 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.002 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:31.246568Z 00000.001 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.001 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.001 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 ...inserting rows 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 4832b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ...making snapshot and writing to table 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite} hope 1 -> done Change{3, redo 256b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 2/8589934595, generation 0 00000.002 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.002 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 4 00000.002 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch 2, 0 parts} step 5, product {1 parts epoch 2} done 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 8 for step 5 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...waiting for snapshot to complete ...borrowing snapshot 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot} hope 1 -> done Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 ...checking rows 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 1 -> done Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.003 II| TABLET_EXECUTOR: Leader{1:2:8} suiciding, Waste{2:0, 3447b +(1, 892b), 7 trc, -892b acc} 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 349 bytes, 349 total, blobs: { [1:2:7:1:36864:38:0], [1:2:2:1:8192:209:0], [1:2:6:1:32768:102:0] } 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 1183 bytes, 1183 total, blobs: { [1:2:5:1:12288:161:0], [1:2:3:1:24576:892:0], [1:2:4:1:24576:130:0] } 00000.003 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.003 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 231b, wait} done, Waste{2:0, 3447b +(1, 892b), 7 trc} 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ...checking rows 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 1 -> retry Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} touch new 0b, 2820b lo load (2820b in total), 0b requested for data (4194304b in total) 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} postponed, 2820b, pages {1 wait, 1 load}, freshly touched 1 pages 00000.003 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:5:1:12288:161:0] ok OK}, category 1 00000.004 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 2 -> done Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} ...restarting tablet 00000.004 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 3447b +(0, 0b), 1 trc, -892b acc} 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 231 bytes, 231 total, blobs: { [1:3:1:1:28672:231:0] } 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 247 bytes, 247 total, blobs: { [1:2:7:1:36864:38:0], [1:2:2:1:8192:209:0] } 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 291 bytes, 291 total, blobs: { [1:2:5:1:12288:161:0], [1:2:4:1:24576:130:0] } 00000.004 II| TABLET_EXECUTOR: Leader{1:4:0} activating executor 00000.004 II| TABLET_EXECUTOR: LSnap{1:4, on 4:1, 231b, wait} done, Waste{2:0, 3447b +(0, 0b), 1 trc} 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:2} commited cookie 2 for step 1 ...checking rows 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 1 -> retry Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} touch new 0b, 2820b lo load (2820b in total), 0b requested for data (4194304b in total) 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} postponed, 2820b, pages {1 wait, 1 load}, freshly touched 1 pages 00000.004 DD| TABLET_EXECUTOR: Leader{1:4:2} got result TEvResult{1 pages [1:2:5:1:12288:161:0] ok OK}, category 1 00000.005 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 2 -> done Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} 00000.005 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.005 II| TABLET_EXECUTOR: Leader{1:4:2} suiciding, Waste{2:0, 3447b +(0, 0b), 1 trc, -892b acc} 00000.005 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 4 reqs hit {4 5694b} miss {0 0b} 00000.005 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.005 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {602b, 12} 00000.005 II| FAKE_ENV: DS.1 gone, left {3678b, 6}, put {4938b, 10} 00000.005 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: All BS storage groups are stopped 00000.005 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.005 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 85}, stopped ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:156:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:155:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:155:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:156:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:155:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:155:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:156:2176] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:160:2057] recipient: [10:156:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:159:2177] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:229:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:161:2181] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:165:2057] recipient: [11:161:2181] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:164:2182] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:234:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 6:2167] sender: [29:216:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:142:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:145:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:146:2057] recipient: [30:144:2166] Leader for TabletID 72057594037927937 is [30:147:2167] sender: [30:148:2057] recipient: [30:144:2166] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:147:2167] Leader for TabletID 72057594037927937 is [30:147:2167] sender: [30:217:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:147:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:150:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:151:2057] recipient: [31:149:2171] Leader for TabletID 72057594037927937 is [31:152:2172] sender: [31:153:2057] recipient: [31:149:2171] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:152:2172] Leader for TabletID 72057594037927937 is [31:152:2172] sender: [31:222:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:147:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:150:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:151:2057] recipient: [32:149:2171] Leader for TabletID 72057594037927937 is [32:152:2172] sender: [32:153:2057] recipient: [32:149:2171] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:152:2172] Leader for TabletID 72057594037927937 is [32:152:2172] sender: [32:222:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:148:2057] recipient: [33:97:2132] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:151:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:152:2057] recipient: [33:150:2171] Leader for TabletID 72057594037927937 is [33:153:2172] sender: [33:154:2057] recipient: [33:150:2171] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:153:2172] Leader for TabletID 72057594037927937 is [33:153:2172] sender: [33:223:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:153:2057] recipient: [34:97:2132] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:156:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:157:2057] recipient: [34:155:2176] Leader for TabletID 72057594037927937 is [34:158:2177] sender: [34:159:2057] recipient: [34:155:2176] !Reboot 72057594037927937 (actor [34:105:2137]) rebooted! !Reboot 72057594037927937 (actor [34:105:2137]) tablet resolver refreshed! new actor is[34:158:2177] Leader for TabletID 72057594037927937 is [34:158:2177] sender: [34:228:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:139:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:153:2057] recipient: [35:97:2132] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:156:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:157:2057] recipient: [35:155:2176] Leader for TabletID 72057594037927937 is [35:158:2177] sender: [35:159:2057] recipient: [35:155:2176] !Reboot 72057594037927937 (actor [35:105:2137]) rebooted! !Reboot 72057594037927937 (actor [35:105:2137]) tablet resolver refreshed! new actor is[35:158:2177] Leader for TabletID 72057594037927937 is [35:158:2177] sender: [35:228:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:139:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:155:2057] recipient: [36:97:2132] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:158:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:159:2057] recipient: [36:157:2177] Leader for TabletID 72057594037927937 is [36:160:2178] sender: [36:161:2057] recipient: [36:157:2177] !Reboot 72057594037927937 (actor [36:105:2137]) rebooted! !Reboot 72057594037927937 (actor [36:105:2137]) tablet resolver refreshed! new actor is[36:160:2178] Leader for TabletID 72057594037927937 is [36:160:2178] sender: [36:230:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:106:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:139:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:160:2057] recipient: [37:97:2132] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:163:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:164:2057] recipient: [37:162:2182] Leader for TabletID 72057594037927937 is [37:165:2183] sender: [37:166:2057] recipient: [37:162:2182] !Reboot 72057594037927937 (actor [37:105:2137]) rebooted! !Reboot 72057594037927937 (actor [37:105:2137]) tablet resolver refreshed! new actor is[37:165:2183] Leader for TabletID 72057594037927937 is [37:165:2183] sender: [37:235:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:106:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:139:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:160:2057] recipient: [38:97:2132] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:163:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:164:2057] recipient: [38:162:2182] Leader for TabletID 72057594037927937 is [38:165:2183] sender: [38:166:2057] recipient: [38:162:2182] !Reboot 72057594037927937 (actor [38:105:2137]) rebooted! !Reboot 72057594037927937 (actor [38:105:2137]) tablet resolver refreshed! new actor is[38:165:2183] Leader for TabletID 72057594037927937 is [38:165:2183] sender: [38:235:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:106:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:139:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:161:2057] recipient: [39:97:2132] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:164:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:165:2057] recipient: [39:163:2182] Leader for TabletID 72057594037927937 is [39:166:2183] sender: [39:167:2057] recipient: [39:163:2182] !Reboot 72057594037927937 (actor [39:105:2137]) rebooted! !Reboot 72057594037927937 (actor [39:105:2137]) tablet resolver refreshed! new actor is[39:166:2183] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:106:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:139:2057] recipient: [40:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> NFwd_TFlatIndexCache::End [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 0} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> CompatibilityInfo::BSControllerMigration [GOOD] Test command err: RandomSeed# 7055193787264749354 2024-11-21T08:51:22.681432Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [f8fd830a4e917402] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:51:22.681472Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:1:0:35:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:51:22.682625Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [b52ceeee00c2b1f0] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:51:22.682644Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:2:0:11:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:51:22.683397Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [0b56dbe267de9c78] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:51:22.683415Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:3:0:2:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:51:22.684159Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [092d5eb40651898b] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:22.684177Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:4:0:40:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:22.685059Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [4f04f1907a8955c5] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:22.685076Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:5:0:16:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:22.685913Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [49a74b5ec7626b6b] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:22.685929Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:6:0:1:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:22.686652Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [24bc762ede064bb8] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:22.686668Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:7:0:48:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:22.687498Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [80fd0eda71a09d3e] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:22.687515Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:8:0:24:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:22.688396Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [fac6c693f6236782] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:22.688412Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:9:0:0:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:22.689263Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [5eb7b9d6ff36dc0a] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:22.689278Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:10:0:0:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:22.690172Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [5befcd8d4696f228] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:51:22.690190Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:11:0:29:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:51:22.691125Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [f1f37264cbfe36e7] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:51:22.691144Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:12:0:5:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:51:22.691989Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [04919e0037c0c7ba] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:51:22.692006Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:13:0:2:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:51:22.692930Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [90770bc2a5ac2366] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:22.692948Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:14:0:34:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:22.693880Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [454fad1314a8f818] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:22.693896Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:15:0:10:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:22.694607Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [7aab5cc35d7b767f] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:22.694624Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:16:0:1:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:22.695508Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [df77034456c44327] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:22.695525Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:17:0:39:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:22.696395Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [ee30bdfe7640f7de] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:51:22.696412Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:18:0:15:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:51:22.697286Z 7 00h00m41.410512s :BS_PROXY_GET ERROR: [dfcdd7318fcefabe] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:0:0:0:100:0] DEADLINE Size# 0 RequestedSize# 100} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:51:22.697303Z 7 00h00m41.410512s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:0:0:0:100:0] PatchedBlobId# [1:1:19:0:0:100:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE ... 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.887493Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.888901Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.890271Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.891639Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.893015Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.894412Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.895777Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.897149Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2024-11-21T08:51:37.898548Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:241} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } >> TFlatTableExecutorIndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_History_FlatIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> DBase::DropModifiedTable [GOOD] Test command err: 10 parts: 24000 rows, 1023 pages, 5 levels: (15913, 5312) (32008, 10677) (48025, 16016) (64045, 21356) (79984, 26669) 24000 rows, 1022 pages, 5 levels: (95941, 31988) (111937, 37320) (127807, 42610) (143950, 47991) (159964, 53329) 24000 rows, 1017 pages, 5 levels: (175939, 58654) (191968, 63997) (208006, 69343) (224077, 74700) (240169, 80064) 24000 rows, 1018 pages, 5 levels: (256087, 85370) (272023, 90682) (288022, 96015) (304021, 101348) (320014, 106679) 24000 rows, 1017 pages, 5 levels: (336010, 112011) (352159, 117394) (368092, 122705) (384097, 128040) (400177, 133400) 24000 rows, 1020 pages, 5 levels: (416161, 138728) (432139, 144054) (447946, 149323) (463885, 154636) (479950, 159991) 24000 rows, 1018 pages, 5 levels: (495844, 165289) (511843, 170622) (527917, 175980) (543799, 181274) (559849, 186624) 24000 rows, 1020 pages, 5 levels: (575899, 191974) (591895, 197306) (607765, 202596) (623614, 207879) (639565, 213196) 24000 rows, 1016 pages, 5 levels: (655633, 218552) (671707, 223910) (687631, 229218) (703516, 234513) (719437, 239820) 24000 rows, 1015 pages, 5 levels: (735415, 245146) (751432, 250485) (767404, 255809) (783427, 261150) (799303, 266442) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 10% (actual 10%) key = (80038, 26687) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160003, 53342) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240184, 80069) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320035, 106686) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400189, 133404) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559891, 186638) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719461, 239828) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80038, 26687) value = 2046023 (actual 2082181 - 0% error) 10% (actual 10%) key = (160003, 53342) value = 4091184 (actual 4123481 - 0% error) 9% (actual 9%) key = (240184, 80069) value = 6122543 (actual 6150789 - 0% error) 9% (actual 9%) key = (320035, 106686) value = 8157907 (actual 8182094 - 0% error) 9% (actual 9%) key = (400189, 133404) value = 10192958 (actual 10213133 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 12231984 (actual 12248119 - 0% error) 9% (actual 9%) key = (559891, 186638) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 16310625 (actual 16318673 - 0% error) 9% (actual 9%) key = (719461, 239828) value = 18343487 (actual 18347516 - 0% error) 9% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80041, 26688) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160006, 53343) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240187, 80070) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320038, 106687) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400192, 133405) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479959, 159994) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639649, 213224) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719467, 239830) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078803 - 0% error) 10% (actual 9%) key = (159427, 53150) value = 4076220 (actual 4112556 - 0% error) 10% (actual 9%) key = (239872, 79965) value = 6113960 (actual 6146198 - 0% error) 10% (actual 9%) key = (319849, 106624) value = 8153334 (actual 8181530 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192636 (actual 10213133 - 0% error) 10% (actual 9%) key = (479950, 159991) value = 12231529 (actual 12248119 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639481, 213168) value = 16306978 (actual 16318673 - 0% error) 10% (actual 9%) key = (719554, 239859) value = 18345472 (actual 18349551 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 40 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24122 (actual 10941 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 48205 (actual 36464 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 72229 (actual 61965 - 4% error) 10% (actual 10%) key = (290449, 96824) value = 96245 (actual 87436 - 3% error) 10% (actual 11%) key = (380554, 126859) value = 121759 (actual 114432 - 3% error) 10% (actual 11%) key = (470221, 156748) value = 147190 (actual 141318 - 2% error) 10% (actual 11%) key = (564922, 188315) value = 172572 (actual 169665 - 1% error) 10% (actual 11%) key = (654781, 218268) value = 198052 (actual 196636 - 0% error) 10% (actual 11%) key = (744745, 248256) value = 223572 (actual 223623 - 0% error) 6% (actual 6%) DataSizeHistogram: 10% (actual 4%) key = (34876, 11633) value = 2051181 (actual 936371 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 4097308 (actual 3106844 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 6142376 (actual 5275531 - 4% error) 10% (actual 11%) key = (295510, 98511) value = 8302483 (actual 7560005 - 3% error) 10% (actual 11%) key = (385543, 128522) value = 10466012 (actual 9847986 - 3% error) 11% (actual 12%) key = (485089, 161704) value = 12745808 (actual 12376381 - 1% error) 10% (actual 11%) key = (574921, 191648) value = 14910864 (actual 14665877 - 1% error) 10% (actual 10%) key = (659821, 219948) value = 16952139 (actual 16831893 - 0% error) 10% (actual 11%) key = (749764, 249929) value = 19112817 (actual 19116870 - 0% error) 6% (actual 6%) 10 parts: 458 rows, 20 pages, 2 levels: (129757, 43260) (277777, 92600) (456538, 152187) (612028, 204017) (789193, 263072) 435 rows, 19 pages, 2 levels: (112543, 37522) (293158, 97727) (441385, 147136) (615934, 205319) (788878, 262967) 945 rows, 41 pages, 3 levels: (151600, 50541) (323350, 107791) (489703, 163242) (644053, 214692) (785131, 261718) 1833 rows, 78 pages, 3 levels: (158677, 52900) (306616, 102213) (475423, 158482) (640840, 213621) (793240, 264421) 3716 rows, 157 pages, 4 levels: (159202, 53075) (325612, 108545) (486964, 162329) (645289, 215104) (796189, 265404) 7459 rows, 317 pages, 4 levels: (161596, 53873) (319558, 106527) (472684, 157569) (627499, 209174) (797368, 265797) 14922 rows, 632 pages, 5 levels: (158647, 52890) (322783, 107602) (480616, 160213) (642370, 214131) (798358, 266127) 29978 rows, 1271 pages, 5 levels: (161923, 53982) (322141, 107388) (482926, 160983) (641770, 213931) (798970, 266331) 60277 rows, 2559 pages, 6 levels: (158503, 52842) (317770, 105931) (477016, 159013) (638782, 212935) (799282, 266435) 119977 rows, 5092 pages, 6 levels: (159940, 53321) (320017, 106680) (480043, 160022) (638971, 212998) (799345, 266456) Checking BTree: Touched 0% bytes, 23 pages RowCountHistogram: 11% (actual 10%) key = (80065, 26696) value = 26939 (actual 24349 - 1% error) 8% (actual 10%) key = (160273, 53432) value = 46494 (actual 48472 - 0% error) 10% (actual 9%) key = (238531, 79518) value = 71449 (actual 71850 - 0% error) 11% (actual 10%) key = (321490, 107171) value = 97862 (actual 96825 - 0% error) 9% (actual 10%) key = (403054, 134359) value = 120541 (actual 121196 - 0% error) 9% (actual 10%) key = (482980, 161001) value = 142423 (actual 145274 - -1% error) 11% (actual 9%) key = (562504, 187509) value = 169510 (actual 169139 - 0% error) 8% (actual 10%) key = (642358, 214127) value = 189834 (actual 193159 - -1% error) 9% (actual 10%) key = (723937, 241320) value = 213159 (actual 217728 - -1% error) 11% (actual 9%) DataSizeHistogram: 9% (actual 10%) key = (78517, 26180) value = 1977474 (actual 2038537 - 0% error) 9% (actual 10%) key = (160273, 53432) value = 3961134 (actual 4131083 - 0% error) 10% (actual 9%) key = (238531, 79518) value = 6075748 (actual 6109008 - 0% error) 10% (actual 10%) key = (321490, 107171) value = 8316541 (actual 8227395 - 0% error) 9% (actual 10%) key = (403054, 134359) value = 10237629 (actual 10294360 - 0% error) 9% (actual 10%) key = (482980, 161001) value = 12091581 (actual 12337097 - -1% error) 11% (actual 9%) key = (562504, 187509) value = 14395756 (actual 14365428 - 0% error) 8% (actual 10%) key = (642358, 214127) value = 16125076 (actual 16407625 - -1% error) 9% (actual 10%) key = (723937, 241320) value = 18103285 (actual 18489967 - -1% error) 11% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (79669, 26564) value = 24001 (actual 24253 - 0% error) 10% (actual 9%) key = (159577, 53200) value = 48001 (actual 48252 - 0% error) 10% (actual 10%) key = (239932, 79985) value = 72009 (actual 72267 - 0% error) 10% (actual 10%) key = (319726, 106583) value = 96023 (actual 96287 - 0% error) 10% (actual 10%) key = (400054, 133359) value = 120041 (actual 120298 - 0% error) 10% (actual 10%) key = (479776, 159933) value = 144046 (actual 144308 - 0% error) 10% (actual 10%) key = (559822, 186615) value = 168053 (actual 168314 - 0% error) 10% (actual 10%) key = (639604, 213209) value = 192056 (actual 192323 - 0% error) 10% (actual 10%) key = (719287, 239770) value = 216074 (actual 216340 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79483, 26502) value = 2038439 (actual 2060507 - 0% error) 10% (actual 10%) key = (159100, 53041) value = 4076738 (actual 4098819 - 0% error) 10% (actual 10%) key = (239569, 79864) value = 6115069 (actual 6136999 - 0% error) 10% (actual 10%) key = (319558, 106527) value = 8153647 (actual 8175425 - 0% error) 10% (actual 10%) key = (399883, 133302) value = 10192275 (actual 10214181 - 0% error) 10% (actual 10%) key = (479713, 159912) value = 12231174 (actual 12253015 - 0% error) 10% (actual 10%) key = (559510, 186511) value = 14269344 (actual 14291540 - 0% error) 10% (actual 10%) key = (639241, 213088) value = 16307225 (actual 16329282 - 0% error) 10% (actual 10%) key = (719065, 239696) value = 18345696 (actual 18367843 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 45 pages RowCountHistogram: 10% (actual 6%) key = (49852, 16625) value = 25345 (actual 15296 - 4% error) 10% (actual 11%) key = (138232, 46085) value = 49467 (actual 41866 - 3% error) 10% (actual 9%) key = (217993, 72672) value = 73622 (actual 65713 - 3% error) 10% (actual 10%) key = (301231, 100418) value = 97803 (actual 90730 - 2% error) 10% (actual 10%) key = (382450, 127491) value = 121925 (actual 115037 - 2% error) 10% (actual 11%) key = (472855, 157626) value = 146109 (actual 142266 - 1% error) 10% (actual 8%) key = (542308, 180777) value = 170141 (actual 163067 - 2% error) 10% (actual 11%) key = (632302, 210775) value = 194246 (actual 190164 - 1% error) 10% (actual 10%) key = (718414, 239479) value = 219732 (actual 216100 - 1% error) ... (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (553, 192) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: 167 rows, 1 pages, 0 levels: () () () () () 166 rows, 1 pages, 0 levels: () () () () () 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] >> Acceleration::TestDelayMultiplierGet4Plus2Block1Slow [GOOD] >> Acceleration::TestDelayMultiplierGetMirror3dc2Slow >> GroupReconfiguration::ReassignsDoNotCauseErrorMessagesMirror3dc [GOOD] >> GroupReconfiguration::ReassignsDoNotCauseErrorMessagesMirror3of4 >> TFlatTableExecutorIndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_History_BTreeIndex >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[PipeResets] [GOOD] >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> BlobPatching::StressBlock42 [GOOD] >> BlobPatching::DiffsWithIncorectPatchedBlobPartId >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Analyze] >> BlobPatching::DiffsWithIncorectPatchedBlobPartId [GOOD] >> BlobPatching::PatchBlock42 >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> TFlatTableExecutorIndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_Groups_FlatIndex >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[PipeResets] >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[PipeResets] >> KqpStreamLookup::ReadTableWithIndexDuringSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:32.929467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:32.929485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:32.929488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:32.929491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:32.929499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:32.929502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:32.929507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:32.929559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:32.938545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:32.938564Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:32.940147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:32.940233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:32.940260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:32.942586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:32.942725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:32.942927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.943171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:32.944059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.944363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:32.944373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.944384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:32.944388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:32.944393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:32.944420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:32.945563Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:32.959629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:32.959703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.959749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:32.959786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:32.959793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.961470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.961502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:32.961540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.961549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:32.961554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:32.961559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:32.962050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.962064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:32.962068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:32.962443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.962454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.962460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.962467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.963078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:32.963468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:32.963526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:32.963677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.963705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:32.963714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.963782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:32.963787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.963811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:32.963820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:32.964159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:32.964166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:32.964190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.964195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:32.964271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.964277Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:32.964286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:32.964289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.964293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:32.964297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.964300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:32.964302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:32.964311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:32.964315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:32.964318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T08:51:39.473302Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:51:39.473369Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 234 } } 2024-11-21T08:51:39.473374Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:39.473400Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 234 } } 2024-11-21T08:51:39.473411Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 234 } } 2024-11-21T08:51:39.473761Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.473777Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.473781Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:39.473785Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T08:51:39.473802Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:39.473919Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:39.473925Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:39.473936Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:39.473942Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:39.473951Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:39.473961Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:39.473964Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:39.473969Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:39.473974Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T08:51:39.474150Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.474160Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.474164Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:39.474168Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T08:51:39.474172Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:51:39.474181Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T08:51:39.474884Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.474910Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.475662Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.475696Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:39.475713Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.475727Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:39.475787Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:39.475816Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:39.475823Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T08:51:39.475835Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T08:51:39.475838Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:51:39.475843Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T08:51:39.475858Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:459:2424] message: TxId: 1003 2024-11-21T08:51:39.475864Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:51:39.475869Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:39.475874Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:39.475885Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:39.475889Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:39.475892Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:39.475897Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:51:39.475900Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:39.475903Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:39.475916Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:39.475919Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:51:39.475922Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:51:39.475947Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:51:39.476551Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:39.476566Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:653:2575] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:39.476667Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:39.476738Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 77us result status StatusSuccess 2024-11-21T08:51:39.476832Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTableExecutorIndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_Groups_BTreeIndex >> Acceleration::TestDelayMultiplierGetMirror3dc2Slow [GOOD] >> Acceleration::TestDelayMultiplierGet4Plus2Block2Slow >> TCdcStreamWithRebootsTests::MergeTable[PipeResets] >> Mirror3dc::GcQuorum [GOOD] >> Mirror3dcRestore::TestRestore >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:08.896004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:08.896030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:08.896036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:08.896041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:08.896055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:08.896060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:08.896069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:08.896137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:08.923869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:08.923897Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:08.944815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:08.944939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:08.944973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:08.948183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:08.948281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:08.948440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:08.948645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:08.949350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:08.949660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:08.949671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:08.949684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:08.949691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:08.949697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:08.949738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:08.951275Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:08.995412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:08.995494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:08.995550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:08.995601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:08.995610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.000753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:09.000794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:09.000844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.000857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:09.000862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:09.000867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:09.001425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.001442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:09.001447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:09.001858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.001870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.001876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.001883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.002616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:09.007395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:09.007461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:09.007670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:09.007711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:09.007722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.007799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:09.007810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.007849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:09.007862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:09.008366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:09.008379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:09.008416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:09.008424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:09.008504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.008511Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:09.008524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:09.008529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.008536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:09.008541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.008545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:09.008550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:09.008564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:09.008571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:09.008576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.253306Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:39.253312Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T08:51:39.253318Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:51:39.253368Z node 81 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.253410Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.253414Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:39.253417Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T08:51:39.253421Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T08:51:39.253473Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.253477Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:39.253481Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T08:51:39.253484Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:39.253527Z node 81 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.253596Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.253600Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:39.253604Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:39.253611Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:39.253645Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.253648Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:39.253652Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:39.253655Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:39.253663Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T08:51:39.253686Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.253690Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:39.253693Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T08:51:39.254367Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:39.254394Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:51:39.254411Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:51:39.254440Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:39.254587Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.254619Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:39.254625Z node 81 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:39.254636Z node 81 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 4/4 2024-11-21T08:51:39.254639Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T08:51:39.254645Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2024-11-21T08:51:39.254650Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T08:51:39.254656Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:39.254660Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:39.254683Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:39.254687Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:39.254691Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:39.254696Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:39.254699Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:39.254705Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:39.254710Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:51:39.254714Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T08:51:39.254717Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T08:51:39.254725Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T08:51:39.254783Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.254843Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:51:39.254849Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T08:51:39.254860Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:51:39.254867Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T08:51:39.254872Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:51:39.255202Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.255225Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.255233Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.255237Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:39.255693Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:51:39.255746Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:39.255750Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:39.255801Z node 81 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:39.255815Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:39.255818Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [81:793:2699] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:39.255867Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:39.255898Z node 81 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 41us result status StatusPathDoesNotExist 2024-11-21T08:51:39.255940Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpStreamLookup::ReadTableDuringSplit >> TFlatTableExecutorIndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutorKeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutorMoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutorMoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutorReboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutorRejectProbability::MaxedOutRejectProbability |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> TFlatTableExecutorRejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutorRejectProbability::SomeRejectProbability >> TFlatTableExecutorRejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutorRejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutorRejectProbability::ZeroRejectProbabilityMultipleTables >> TFlatTableExecutorRejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorMemoryLimitExceeded >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TFlatTableExecutorResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Analyze] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Debug] >> Acceleration::TestDelayMultiplierGet4Plus2Block2Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksGetMirror3dc1Slow >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[PipeResets] >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[PipeResets] >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[TabletReboots] >> TCdcStreamWithRebootsTests::Attributes[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[TabletReboots] |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |87.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1000BlobSize1000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize2000000 >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests100Inflight1BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests2Inflight2BlobSize2000000 >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> Acceleration::TestMaxNumOfSlowDisksGetMirror3dc1Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksGet4Plus2Block1Slow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorResourceProfile::TestExecutorTxHoldOnRelease [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:27.673402Z 00000.004 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.005 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.007 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:29:2061]) priority=200 resources={1, 0} 00000.007 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:29:2061]) to queue queue_background_compaction 00000.007 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:29:2061]) from queue queue_background_compaction 00000.007 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:29:2061]) to queue queue_background_compaction 00000.007 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 50.000000 (insert task gen0-table-101-tablet-1 (1 by [1:29:2061])) 00000.008 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:29:2061]) (release resources {1, 0}) 00000.008 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 50.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:29:2061])) 00000.008 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.008 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.008 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.008 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.008 II| FAKE_ENV: DS.1 gone, left {1347b, 10}, put {1347b, 10} 00000.008 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.008 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.008 II| FAKE_ENV: All BS storage groups are stopped 00000.008 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.008 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:27.682811Z 00000.003 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.003 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 ...inserting rows 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 4832b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ...compacting 00000.004 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Full, forced state None, forced mode Full 00000.004 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.004 II| TABLET_EXECUTOR: Leader{1:2:4} starting compaction 00000.004 II| TABLET_EXECUTOR: Leader{1:2:5} starting Scan{1 on 101, Compact{1.2.4, eph 1}} 00000.004 II| TABLET_EXECUTOR: Leader{1:2:5} started compaction 1 00000.004 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.004 II| TABLET_EXECUTOR: Leader{1:2:5} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 4, product {1 parts epoch 2} done 00000.004 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 8 for step 4 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 3 for step 5 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:6} switch applied on followers, step 5 ...waiting until compacted ...making snapshot 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot} hope 1 -> done Change{4, redo 64b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 2, edge 2/8589934597, generation 0 00000.004 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.004 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{3 on 101, Compact{1.2.7, eph 1}} 00000.004 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 3 00000.004 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 3 generation 0 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:8} cache hit for data request from: [2:48:2084], pageCollection [1:2:4:1:12288:161:0] 00000.005 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 3 on TGenCompactionParams{101: gen 0 epoch 2, 1 parts} step 7, product {1 parts epoch 2} done 00000.005 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 3, generation 0 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 6 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 8 for step 7 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 3 for step 8 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} switch applied on followers, step 8 ...waiting for snapshot to complete ...borrowing snapshot 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 ...stopping the source tablet 00000.005 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 3377b +(3, 3962b), 9 trc, -3962b acc} ...starting the destination tablet 00000.005 II| TABLET_EXECUTOR: Leader{2:2:0} activating executor 00000.005 II| TABLET_EXECUTOR: LSnap{2:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.005 DD| TABLET_EXECUTOR: Leader{2:2:2} commited cookie 2 for step 1 00000.005 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema 00000.005 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema} hope 1 -> done Change{2, redo 0b alter 218b annex 0, ~{ } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{2:2:3} commited cookie 1 for step 2 ...loaning snapshot 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:4} commited cookie 1 for step 3 ...checking table only has cold parts 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts} release 4194304b of static, Memory{0 dyn 0} ...starting scan 00000.006 II| TABLET_EXECUTOR: Leader{2:2:5} starting Scan{2 on 101, DummyScan} 00000.006 DD| TABLET_EXECUTOR: Leader{0:0:-} sending TEvGet batch 161 bytes, 161 total, blobs: { [1:2:7:1:12288:161:0] } 00000.006 DD| TABLET_EXECUTOR: Leader{2:2:5} commited cookie 8 for step 4 ...restarting tablet, iteration 1 00000.006 II| TABLET_EXECUTOR: Leader{2:2:5} suiciding, Waste{2:0, 256b +(0, 0b), 4 trc, -0b acc} 00000.006 DD| TABLET_EXECUTOR: Leader{2:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [2:2:1:1:28672:35:0] } 00000.006 DD| TABLET_EXECUTOR: Leader{2:3:-} sending TEvGet batch 358 bytes, 358 total, blobs: { [2:2:3:1:36864:38:0], [2:2:2:1:8192:218:0], [2:2:3:1:32768:102:0] } 00000.006 II| TABLET_EXECUTOR: Leader{2:3:0} activating executor 00000.006 II| TABLET_EXECUTOR: LSnap{2:3, on 3:1, 178b, wa ... ig MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.001 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.001 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 302b annex 0, ~{ } -{ }, 0 gb} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.001 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} hope 1 -> done Change{2, redo 0b alter 15b annex 0, ~{ } -{ }, 0 gb} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} release 4194304b of static, Memory{0 dyn 0} 00000.001 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 19456b requested for data (20480b in total) 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release tx data 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} request Res{1 20480b} type small_transaction 00000.002 DD| RESOURCE_BROKER: Submitted new unknown task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061]) priority=5 resources={0, 20480} 00000.002 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.002 DD| RESOURCE_BROKER: Allocate resources {0, 20480} for task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061]) from queue queue_default 00000.002 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.002 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.002384 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])) 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} acquired dyn mem Res{1 20480b}, Memory{0 dyn 20480} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 2 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 10240b requested for data (30720b in total) 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release tx data 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} captured Res{1 20480b} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} update resource task 1 releasing 0b, Memory{0 dyn 20480} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} request Res{2 40960b} type small_transaction 00000.002 DD| RESOURCE_BROKER: Update task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061]) (priority=5 type=small_transaction resources={0, 20480} resubmit=0) 00000.002 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.002 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.002384 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])) 00000.002 DD| RESOURCE_BROKER: Submitted new unknown task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061]) priority=5 resources={0, 40960} 00000.002 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.002 DD| RESOURCE_BROKER: Allocate resources {0, 40960} for task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061]) from queue queue_default 00000.002 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.002 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.002384 to 0.007153 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])) 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} acquired dyn mem Res{2 40960b}, Memory{0 dyn 61440} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 3 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} found attached Res{1 20480b} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} moving tx data from attached Res{1 20480b} to Res{2 ...} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 1b requested for data (20481b in total) 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 4 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release Res{2 61440b}, Memory{0 dyn 0} 00000.002 DD| RESOURCE_BROKER: Update task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061]) (priority=5 type=medium_transaction resources={0, 61440} resubmit=0) 00000.002 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])' of unknown type 'medium_transaction' to default queue 00000.002 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.002384 to 0.009537 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])) 00000.002 DD| RESOURCE_BROKER: Finish task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061]) (release resources {0, 20480}) 00000.002 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.009537 to 0.007153 (remove task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])) 00000.002 DD| RESOURCE_BROKER: Finish task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061]) (release resources {0, 61440}) 00000.002 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.007153 to 0.000000 (remove task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])) 00000.002 II| TABLET_EXECUTOR: Leader{1:2:4} suiciding, Waste{2:0, 317b +(0, 0b), 3 trc, -0b acc} 00000.002 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.002 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.002 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.002 II| FAKE_ENV: DS.0 gone, left {180b, 3}, put {200b, 4} 00000.002 II| FAKE_ENV: DS.1 gone, left {352b, 3}, put {352b, 3} 00000.002 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: All BS storage groups are stopped 00000.002 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.002 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 6 Left 67}, stopped >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Debug] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-ForceBlocks] >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |87.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> Acceleration::TestMaxNumOfSlowDisksGet4Plus2Block1Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block1Slow >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize2000000 >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> Mirror3dcRestore::TestRestore [GOOD] >> Mirror3of4::Compaction >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2024-11-21T08:51:40.418813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:40.419438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:40.419471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00464c/r3tmp/tmpJrGoJU/pdisk_1.dat 2024-11-21T08:51:40.524928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:40.545642Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:40.589175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:40.589217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:40.599957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:40.706782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:40.930116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:40.930142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:760:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:40.930185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:40.930845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:41.118286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:764:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:42.263935Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yq181881tmpv8h8t4p734, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiMmIzYjYtMWFkYzkxNjgtOTM5NDllMzEtZjJkZWQ1NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:42.276967Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yq181881tmpv8h8t4p734, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiMmIzYjYtMWFkYzkxNjgtOTM5NDllMzEtZjJkZWQ1NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:42.280944Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yq181881tmpv8h8t4p734, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiMmIzYjYtMWFkYzkxNjgtOTM5NDllMzEtZjJkZWQ1NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:42.326335Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yq2k734w38cgzbry9tk60, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2JiMDE2MmItM2Y3MWM0NjYtZTM4MzAwZjMtOGMzMDI5ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests2Inflight2BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests10Inflight10BlobSize2000000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2024-11-21T08:51:40.611939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:40.612513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:40.612549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004645/r3tmp/tmpXE5GDL/pdisk_1.dat 2024-11-21T08:51:40.726749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:40.746210Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:40.789677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:40.789721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:40.800336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:40.904878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:41.124896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:702:2586], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:41.124950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2591], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:41.124968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:41.126060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:41.318256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:716:2594], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:42.262344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yq1e41zy2a6e02cwqq6cx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzYTViMDgtZGRhMDk0ODktOGNmOTlkMzYtYzA2OWEwODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:42.333011Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yq2jfb7zrc18ves4zpxpz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTkxMmFkNGYtNmIwMmRjZWEtMWNlNzI4ODMtZGI3YjA5NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |87.2%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] Test command err: 2024-11-21T08:51:07.547028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652445622989669:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:07.547091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fc4/r3tmp/tmpVewau6/pdisk_1.dat 2024-11-21T08:51:07.621822Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4060, node 1 2024-11-21T08:51:07.638300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:07.638312Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:07.638314Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:07.638359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:07.645789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.645826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.646891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:07.681991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:07.684828Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:07.754533Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:07.952648Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:07.953337Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTRmNWEzZjgtM2ZiMWM0MTUtZjdkZWE4MzktOWIyZWJjYzY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTRmNWEzZjgtM2ZiMWM0MTUtZjdkZWE4MzktOWIyZWJjYzY= 2024-11-21T08:51:07.953479Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:07.953482Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:07.953486Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:07.954976Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652445622990099:2297], Start check tables existence, number paths: 2 2024-11-21T08:51:07.955007Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTRmNWEzZjgtM2ZiMWM0MTUtZjdkZWE4MzktOWIyZWJjYzY=, ActorId: [1:7439652445622990100:2298], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:07.955213Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652445622990099:2297], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:07.955221Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652445622990099:2297], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:07.955226Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652445622990099:2297], Successfully finished 2024-11-21T08:51:07.955245Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:07.956046Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652445622990126:2286], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:07.956701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:07.957148Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652445622990126:2286], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:07.957659Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652445622990126:2286], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:07.958690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652445622990126:2286], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:08.008070Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652445622990126:2286], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:08.008939Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652445622990126:2286], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:08.009304Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652449917957480:2324], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:08.009596Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652449917957480:2324], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:08.010745Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OTRmNWEzZjgtM2ZiMWM0MTUtZjdkZWE4MzktOWIyZWJjYzY=, ActorId: [1:7439652445622990100:2298], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:08.010767Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OTRmNWEzZjgtM2ZiMWM0MTUtZjdkZWE4MzktOWIyZWJjYzY=, ActorId: [1:7439652445622990100:2298], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:08.010773Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTRmNWEzZjgtM2ZiMWM0MTUtZjdkZWE4MzktOWIyZWJjYzY=, ActorId: [1:7439652445622990100:2298], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:08.010774Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTRmNWEzZjgtM2ZiMWM0MTUtZjdkZWE4MzktOWIyZWJjYzY=, ActorId: [1:7439652445622990100:2298], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:08.010793Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTRmNWEzZjgtM2ZiMWM0MTUtZjdkZWE4MzktOWIyZWJjYzY=, ActorId: [1:7439652445622990100:2298], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:08.296550Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652449053672236:2073];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fc4/r3tmp/tmp91EBw0/pdisk_1.dat 2024-11-21T08:51:08.310454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:08.328756Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12202, node 2 2024-11-21T08:51:08.364874Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:08.364889Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:08.364891Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:08.364931Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:08.388471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:08.388501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:08.392651Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:08.413839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:08.416455Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:08.444944Z node 2 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:08.688263Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:08.688296Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652449053672770:2296], Start check tables existence, number paths: 2 2024-11-21T08:51:08.688690Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=NTVlYTg0ODEtMjdkNDkxYTItZjJiMmJ ... ession closed due to explicit close event 2024-11-21T08:51:42.470621Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NDE4NDllOTItYmRmMWVjNTQtZmYzMWQyODMtMTk0Yzk2NGU=, ActorId: [6:7439652463544520173:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:42.470624Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDE4NDllOTItYmRmMWVjNTQtZmYzMWQyODMtMTk0Yzk2NGU=, ActorId: [6:7439652463544520173:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:42.470626Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDE4NDllOTItYmRmMWVjNTQtZmYzMWQyODMtMTk0Yzk2NGU=, ActorId: [6:7439652463544520173:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:42.470638Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NDE4NDllOTItYmRmMWVjNTQtZmYzMWQyODMtMTk0Yzk2NGU=, ActorId: [6:7439652463544520173:2299], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:42.481127Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2r4ag4v3gh2pswqkmxd, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T08:51:42.481203Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2r4ag4v3gh2pswqkmxd, txInfo Status: Committed Kind: ReadWrite TotalDuration: 12.946 ServerDuration: 12.902 QueriesCount: 2 2024-11-21T08:51:42.481241Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2r4ag4v3gh2pswqkmxd, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:42.481277Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2r4ag4v3gh2pswqkmxd, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:42.481285Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2r4ag4v3gh2pswqkmxd, EndCleanup, isFinal: 0 2024-11-21T08:51:42.481300Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2r4ag4v3gh2pswqkmxd, Sent query response back to proxy, proxyRequestId: 464, proxyId: [6:7439652463544519542:2190] 2024-11-21T08:51:42.481423Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, TxId: 2024-11-21T08:51:42.481452Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T08:51:42.481577Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ReadyState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, received request, proxyRequestId: 465 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [6:7439652596688515062:4749] database: /Root databaseId: /Root pool id: default 2024-11-21T08:51:42.481586Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ReadyState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, request placed into pool from cache: default 2024-11-21T08:51:42.481599Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ReadyState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, Sending CompileQuery request 2024-11-21T08:51:42.481808Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, ExecutePhyTx, tx: 0x0000571721083718 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:42.481828Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, Sending to Executer TraceId: 0 8 2024-11-21T08:51:42.481842Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, Created new KQP executer: [6:7439652596688515065:4742] isRollback: 0 2024-11-21T08:51:42.483631Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T08:51:42.483654Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, ExecutePhyTx, tx: 0x0000571721083798 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:42.483800Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T08:51:42.483841Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, txInfo Status: Committed Kind: ReadOnly TotalDuration: 2.092 ServerDuration: 2.039 QueriesCount: 2 2024-11-21T08:51:42.483878Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:42.483893Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:42.483898Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, EndCleanup, isFinal: 0 2024-11-21T08:51:42.483913Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ExecuteState, TraceId: 01jd6yq2rh5vbyjt711116a3pr, Sent query response back to proxy, proxyRequestId: 465, proxyId: [6:7439652463544519542:2190] 2024-11-21T08:51:42.483977Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, TxId: 2024-11-21T08:51:42.483996Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, TxId: 2024-11-21T08:51:42.484019Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439652463544520568:2352], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2024-11-21T08:51:42.484034Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:42.484039Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:42.484040Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:42.484043Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:42.484055Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWJiOTI2YzQtMWZlODFmOWMtYjdlMjQ4ZmItY2VjZjgxZmY=, ActorId: [6:7439652596688515029:4742], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:42.765155Z node 7 :BS_PROXY_PUT ERROR: [42e4d3d445937662] Result# TEvPutResult {Id# [72075186224037889:1:552:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight1BlobSize2000000 >> TCdcStreamWithRebootsTests::InitialScan[TabletReboots] >> TVersions::Wreck0Reverse [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-Analyze] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests10Inflight10BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests100Inflight10BlobSize2000000 |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[PipeResets] >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block1Slow [GOOD] >> TCdcStreamWithRebootsTests::DropStream[PipeResets] [GOOD] >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |87.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |87.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |87.2%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.29301 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:29.029722Z 00000.004 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.004 II| FAKE_ENV: Starting storage for BS group 0 00000.004 II| FAKE_ENV: Starting storage for BS group 1 00000.004 II| FAKE_ENV: Starting storage for BS group 2 00000.004 II| FAKE_ENV: Starting storage for BS group 3 00000.005 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.005 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.005 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:5748446/8388608, Passive:131, MemLimit:0 Counters: Active:5748446/8388608, Passive:131, MemLimit:104857600 Counters: Active:5748446/6291456, Passive:131, MemLimit:6291456 00000.148 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.150 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 121 reqs hit {0 0b} miss {242 21422038b} 00000.150 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.150 II| FAKE_ENV: DS.0 gone, left {4715b, 8}, put {30358b, 308} 00000.150 II| FAKE_ENV: DS.1 gone, left {47655250b, 148}, put {47735244b, 314} 00000.157 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.157 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.157 II| FAKE_ENV: All BS storage groups are stopped 00000.157 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.157 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:29.189105Z 00000.001 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.002 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.002 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:5748446/8388608, Passive:131, MemLimit:0 00000.116 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 104857600 ReplacementPolicy: ThreeLeveledLRU Counters: Active:5748446/8388608, Passive:131, MemLimit:0 00000.116 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 2097152 ReplacementPolicy: ThreeLeveledLRU Counters: Active:5748446/2097152, Passive:131, MemLimit:0 00000.117 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 0 ReplacementPolicy: ThreeLeveledLRU Counters: Active:1028127/0, Passive:131, MemLimit:1048576 00000.117 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.119 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 121 reqs hit {0 0b} miss {242 21422038b} 00000.119 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.119 II| FAKE_ENV: DS.0 gone, left {4715b, 8}, put {30358b, 308} 00000.119 II| FAKE_ENV: DS.1 gone, left {47655250b, 148}, put {47735244b, 314} 00000.119 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.119 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.119 II| FAKE_ENV: All BS storage groups are stopped 00000.119 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.120 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:29.310411Z 00000.001 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.002 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.002 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:8311925/8388608, Passive:131, MemLimit:0 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:8311925/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312827/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 00000.152 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.153 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 651 reqs hit {0 0b} miss {654 57705781b} 00000.153 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.153 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00000.153 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00000.153 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.153 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.153 II| FAKE_ENV: All BS storage groups are stopped 00000.153 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00000.153 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:29.465929Z 00000.001 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: S3FIFO ...compacting ...waiting until compacted Counters: Active:8313376/8388608, Passive:131, MemLimit:0 Counters: Active:8313827/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 Counters: Active:8313376/8388608, Passive:131, MemLimit:8388608 Counters: Active:8313827/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8313827/8388608, Passive:131, MemLimit:8388608 00000.154 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.155 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 450 reqs hit {0 0b} miss {453 39564846b} 00000.156 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.156 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00000.156 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00000.156 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.156 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.156 II| FAKE_ENV: All BS storage groups are stopped 00000.156 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00000.156 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:29.623896Z 00000.001 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ClockPro 00000.002 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ClockPro 00000.002 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ClockPro ...compacting ...waiting until compacted Counters: Active:8210829/8388608, Passive:131, MemLimit:0 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 00000.180 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.181 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 651 reqs hit {0 0b} miss {654 57297397b} 00000.182 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.182 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00000.182 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00000.182 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.182 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.182 II| FAKE_ENV: All BS storage groups are stopped 00000.182 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00000.182 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T08:51:29.808026Z 00000.001 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.002 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.002 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted 00000.038 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: S3FIFO 00000.038 II| TABLET_SAUSAGECACHE: Replacement policy switch from ThreeLeveledLRU to S3FIFO 00000.038 II| TABLET_SAUSAGECACHE: Replacement policy switch from ThreeLeveledLRU to S3FIFO 00000.039 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.039 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 12 reqs hit {0 0b} miss {13 616620b} 00000.039 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.039 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10191b, 107} 00000.039 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10305919b, 107} 00000.039 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.039 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.039 II| FAKE_ENV: All BS storage groups are stopped 00000.039 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.039 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block1Slow [GOOD] Test command err: RandomSeed# 1115358062888726937 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 35 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 36 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 35 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 36 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 35 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 36 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 35 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 36 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog ... o_large>256b} BlockedGeneration# 0} 1970-01-01T00:02:42.960512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:3] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:42.960512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:4] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:43.860512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:5] NODATA} BlockedGeneration# 0} 1970-01-01T00:02:43.860512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:5] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:47.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:2] OK Size# 1024 FullDataSize# 1024 PayloadId# 0 Data# 1024b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:1] OK Size# 1024 FullDataSize# 1024 PayloadId# 0 Data# 1024b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:1] NODATA} {[1:1:1:1:2:1024:2] NODATA} {[1:1:1:1:2:1024:3] NODATA} {[1:1:1:1:2:1024:4] NODATA} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:1] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:2] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:3] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:4] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:1] NODATA} {[1:1:1:1:2:1024:2] NODATA} {[1:1:1:1:2:1024:3] NODATA} {[1:1:1:1:2:1024:4] NODATA} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:1] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:2] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:3] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVGetResult QueryResult Status# OK {[1:1:1:1:2:1024:4] OK Size# 256 FullDataSize# 1024 PayloadId# 0 Data# 256b} BlockedGeneration# 0} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:38.536274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:38.536294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:38.536299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:38.536303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:38.536314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:38.536317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:38.536323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:38.536395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:38.545098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:38.545113Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:38.546687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:38.546752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:38.546770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:38.548652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:38.548703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:38.548793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:38.548925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:38.549398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:38.549575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:38.549580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:38.549588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:38.549592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:38.549595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:38.549622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:38.550445Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:38.563138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:38.563203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.563242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:38.563280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:38.563288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.563795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:38.563815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:38.563844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.563851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:38.563855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:38.563859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:38.564164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.564174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:38.564179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:38.564527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.564537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.564542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:38.564548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:38.565055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:38.565450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:38.565496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:38.565658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:38.565680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:38.565686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:38.565733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:38.565740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:38.565785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:38.565796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:38.566186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:38.566195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:38.566237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:38.566242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:38.566305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.566312Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:38.566322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:38.566327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:38.566332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:38.566337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:38.566342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:38.566346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:38.566356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:38.566362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:38.566365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... age: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:51:44.551637Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 94489282827 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:44.551644Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:44.551669Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.551728Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.551734Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:44.551740Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:51:44.551746Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:44.551920Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.552113Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.552125Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:44.552131Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:51:44.552137Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:51:44.552286Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 94489282827 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:44.552294Z node 22 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:44.552302Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 94489282827 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:44.552312Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.552316Z node 22 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.552324Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:44.552330Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:51:44.552435Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.552439Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:44.552444Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:44.552448Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:44.552457Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:44.552510Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.552513Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:44.552515Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:44.552950Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.552984Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.552995Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:51:44.553362Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.553394Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:51:44.553429Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.553475Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.553479Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.553498Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.553503Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:44.553511Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 3/3 2024-11-21T08:51:44.553514Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:44.553517Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2024-11-21T08:51:44.553520Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:44.553524Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:44.553527Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:44.553541Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:44.553547Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:44.553549Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:44.553552Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:44.553554Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:44.553556Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:44.553561Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:51:44.553596Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:51:44.553599Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:51:44.553607Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:51:44.553611Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:51:44.553614Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:44.554042Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:51:44.554084Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:44.554087Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:44.554127Z node 22 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:44.554141Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:44.554144Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [22:747:2651] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:44.554191Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:44.554215Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 32us result status StatusPathDoesNotExist 2024-11-21T08:51:44.554240Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[PipeResets] [GOOD] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] |87.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[window-yql-14277-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-Analyze] [GOOD] |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |87.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] Test command err: 2024-11-21T08:51:05.853353Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652436875724152:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:05.853713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004007/r3tmp/tmpotrjOt/pdisk_1.dat 2024-11-21T08:51:05.934580Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24759, node 1 2024-11-21T08:51:05.954348Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:05.954367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:05.954369Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:05.954409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:05.954720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:05.954747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:05.957852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:06.000968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:06.020540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:06.058731Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:06.330464Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:06.330516Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:06.330520Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:06.331186Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTU0YzExNDAtYTQ0ZjAzZjItMTFkMDcyYmYtYjk3YmUyNWE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTU0YzExNDAtYTQ0ZjAzZjItMTFkMDcyYmYtYjk3YmUyNWE= 2024-11-21T08:51:06.333606Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:06.333632Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652441170692029:2298], Start check tables existence, number paths: 2 2024-11-21T08:51:06.333660Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTU0YzExNDAtYTQ0ZjAzZjItMTFkMDcyYmYtYjk3YmUyNWE=, ActorId: [1:7439652441170692030:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:06.334414Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652441170692029:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:06.334423Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652441170692029:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:06.334428Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652441170692029:2298], Successfully finished 2024-11-21T08:51:06.334448Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:06.338566Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652441170692048:2285], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:06.339306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:06.339838Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652441170692048:2285], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:06.340564Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652441170692048:2285], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:06.341598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652441170692048:2285], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:06.411345Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652441170692048:2285], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:06.412662Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652441170692048:2285], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:06.412797Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2024-11-21T08:51:06.412809Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2024-11-21T08:51:06.412840Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652441170692107:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:06.413222Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652441170692107:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:06.413239Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2024-11-21T08:51:06.413243Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T08:51:06.413291Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652441170692116:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T08:51:06.413488Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652441170692116:2301], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T08:51:06.414438Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T08:51:06.414448Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:06.414481Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTU0YzExNDAtYTQ0ZjAzZjItMTFkMDcyYmYtYjk3YmUyNWE=, ActorId: [1:7439652441170692030:2299], ActorState: ReadyState, TraceId: 01jd6ynzhe33xkmgr8t545wy9g, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: ALTER RESOURCE POOL sample_pool_id SET ( CONCURRENT_QUERY_LIMIT=42 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T08:51:06.420255Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652441170692128:2303], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T08:51:06.444377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652441170692128:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:06.447404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:06.448067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:51:06.449654Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652441170692116:2301], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T08:51:06.449676Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652441170692116:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool config has changed, queue size: -1, in flight limit: 42 2024-11-21T08:51:06.450025Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MTU0YzExNDAtYTQ0ZjAzZjItMTFkMDcyYmYtYjk3YmUyNWE=, ActorId: [1:7439652441170692030:2299], ActorState: ExecuteState, TraceId: 01jd6ynzhe33xkmgr8t545wy9g, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7439652441170692129:2299] WorkloadServiceCleanup: 0 2024-11-21T08:51:06.450413Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTU0YzExNDAtYTQ0ZjAzZjItMTFkMDcyYmYtYjk3YmUyNWE=, ActorId: [1:7439652441170692030:2299], ActorState: CleanupState, TraceId: 01jd6ynzhe33xkmgr8t545wy9g, EndCleanup, isFinal: 0 2024-11-21T08:51:06.450423Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTU0YzExNDAtYTQ0ZjAzZjItMTFkMDcyYmYtYjk3YmUyNWE=, ActorId: [1:7439652441170692030:2299], ActorState: CleanupState, TraceId: 01jd6ynzhe33xkmgr8t545wy9g, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7439652436875724368:2256] 2024-11-21T08:51:06.450436Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got resign request, DatabaseId: Root, PoolId: sample_pool_id 2024-11-21T08:51:06.450457Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652441170692116:2301], DatabaseId: Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2024-11-21T08:51:06.450652Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, ... WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=9&id=ODFjYWY1OWQtYzMxMzc5NDUtOWZiOWEzOTEtYThhMDU4ODM=, TxId: 2024-11-21T08:51:44.183202Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=9&id=ODFjYWY1OWQtYzMxMzc5NDUtOWZiOWEzOTEtYThhMDU4ODM=, TxId: Wait lease expiration 27.135522s: delayed = 1, running = 1 2024-11-21T08:51:44.183300Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ODFjYWY1OWQtYzMxMzc5NDUtOWZiOWEzOTEtYThhMDU4ODM=, ActorId: [9:7439652603656029561:2740], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:44.183305Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ODFjYWY1OWQtYzMxMzc5NDUtOWZiOWEzOTEtYThhMDU4ODM=, ActorId: [9:7439652603656029561:2740], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:44.183307Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ODFjYWY1OWQtYzMxMzc5NDUtOWZiOWEzOTEtYThhMDU4ODM=, ActorId: [9:7439652603656029561:2740], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:44.183309Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ODFjYWY1OWQtYzMxMzc5NDUtOWZiOWEzOTEtYThhMDU4ODM=, ActorId: [9:7439652603656029561:2740], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:44.183324Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ODFjYWY1OWQtYzMxMzc5NDUtOWZiOWEzOTEtYThhMDU4ODM=, ActorId: [9:7439652603656029561:2740], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:45.183594Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , Bootstrap. Database: /Root 2024-11-21T08:51:45.184182Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI= 2024-11-21T08:51:45.184246Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:45.184465Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T08:51:45.184637Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ReadyState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, received request, proxyRequestId: 79 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [9:7439652607950996907:2752] database: /Root databaseId: /Root pool id: default 2024-11-21T08:51:45.184643Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ReadyState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, request placed into pool from cache: default 2024-11-21T08:51:45.184655Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ReadyState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, Sending CompileQuery request 2024-11-21T08:51:45.184934Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, ExecutePhyTx, tx: 0x000044432764D618 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:45.184948Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, Sending to Executer TraceId: 0 8 2024-11-21T08:51:45.184976Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, Created new KQP executer: [9:7439652607950996910:2751] isRollback: 0 2024-11-21T08:51:45.186724Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T08:51:45.186751Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, ExecutePhyTx, tx: 0x000044432764D718 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T08:51:45.186906Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T08:51:45.186949Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, txInfo Status: Committed Kind: ReadOnly TotalDuration: 2.055 ServerDuration: 2.017 QueriesCount: 2 2024-11-21T08:51:45.186995Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T08:51:45.187010Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:45.187018Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, EndCleanup, isFinal: 0 2024-11-21T08:51:45.187032Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ExecuteState, TraceId: 01jd6yq5d09cceaswxrwxcc4ba, Sent query response back to proxy, proxyRequestId: 79, proxyId: [9:7439652474807008239:2190] 2024-11-21T08:51:45.187149Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, TxId: 2024-11-21T08:51:45.187182Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, TxId: 2024-11-21T08:51:45.187278Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:45.187289Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:45.187292Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:45.187309Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:45.187330Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZThhZGQ0YmEtODI4ZWJlYmItMzdmMjA4NDgtNDJkMmE3YjI=, ActorId: [9:7439652607950996906:2751], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:45.189380Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=N2M0NWYwMjAtYTM1OGJiNjktZTVlMGZmZDEtNjc0ODViYTc=, ActorId: [9:7439652474807008676:2297], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:45.189414Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=N2M0NWYwMjAtYTM1OGJiNjktZTVlMGZmZDEtNjc0ODViYTc=, ActorId: [9:7439652474807008676:2297], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:45.189417Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=N2M0NWYwMjAtYTM1OGJiNjktZTVlMGZmZDEtNjc0ODViYTc=, ActorId: [9:7439652474807008676:2297], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:45.189425Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=N2M0NWYwMjAtYTM1OGJiNjktZTVlMGZmZDEtNjc0ODViYTc=, ActorId: [9:7439652474807008676:2297], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:45.189454Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=N2M0NWYwMjAtYTM1OGJiNjktZTVlMGZmZDEtNjc0ODViYTc=, ActorId: [9:7439652474807008676:2297], ActorState: unknown state, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:09.051286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:09.051318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:09.051326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:09.051334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:09.051353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:09.051359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:09.051375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:09.051524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:09.065994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:09.066015Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:09.072650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:09.072771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:09.072794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:09.075280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:09.075366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:09.075506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:09.075698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:09.076392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:09.076697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:09.076711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:09.076726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:09.076734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:09.076741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:09.076782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:09.078617Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:09.102586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:09.102670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.102735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:09.102795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:09.102805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.103530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:09.103560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:09.103603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.103614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:09.103618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:09.103623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:09.104101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.104115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:09.104121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:09.104539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.104552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.104558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.104566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.105138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:09.105482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:09.105526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:09.105675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:09.105700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:09.105707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.105761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:09.105767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.105793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:09.105802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:09.106094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:09.106102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:09.106140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:09.106144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:09.106211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.106216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:09.106225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:09.106228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.106232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:09.106235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.106238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:09.106241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:09.106249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:09.106254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:09.106257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... xState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.779860Z node 52 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.779862Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:51:44.779865Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:1, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:51:44.779869Z node 52 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:1 129 -> 240 2024-11-21T08:51:44.783159Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.783204Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.783215Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:44.784097Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.784143Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.784157Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.784173Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.784330Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.784340Z node 52 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:1 ProgressState 2024-11-21T08:51:44.784359Z node 52 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:1 progress is 3/3 2024-11-21T08:51:44.784363Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:44.784370Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2024-11-21T08:51:44.784387Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [52:403:2378] message: TxId: 1004 2024-11-21T08:51:44.784397Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:44.784403Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:44.784409Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:44.784426Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:44.784431Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:44.784434Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:44.784453Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:44.784457Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:44.784460Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:44.784469Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:44.785175Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:44.785189Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [52:409:2384] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:44.785804Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:44.785934Z node 52 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 137us result status StatusSuccess 2024-11-21T08:51:44.786191Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\002\000\000\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:44.786295Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:44.786332Z node 52 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 39us result status StatusSuccess 2024-11-21T08:51:44.786415Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409550 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409549 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409550 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[PipeResets] >> TCdcStreamWithRebootsTests::CreateDropRecreate[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStream[PipeResets] >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc [GOOD] >> TPartBtreeIndexIteration::FewNodes_History |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[PipeResets] [GOOD] |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |87.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |87.2%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight1BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize2000000 |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:40.125315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:40.125335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:40.125339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:40.125342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:40.125353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:40.125355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:40.125364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:40.125448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:40.135583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:40.135603Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:40.137634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:40.137726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:40.137753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:40.140129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:40.140193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:40.140331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.140483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:40.141059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.141290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:40.141298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.141309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:40.141315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:40.141321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:40.141351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:40.142447Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:40.155588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:40.155666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.155722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:40.155770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:40.155778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.156351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.156377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:40.156412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.156421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:40.156426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:40.156431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:40.156794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.156802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:40.156807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:40.157104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.157113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.157119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.157125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.157715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:40.158153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:40.158206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:40.158391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.158417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:40.158425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.158488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:40.158495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.158546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:40.158558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:40.158984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:40.158995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:40.159028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.159032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:40.159095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.159102Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:40.159111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:40.159114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.159118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:40.159121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.159124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:40.159127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:40.159136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:40.159140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:40.159143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 6678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.359861Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:47.359867Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:47.359872Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:47.360088Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.360098Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.360102Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:47.360106Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:47.360110Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:51:47.364647Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 214 } } 2024-11-21T08:51:47.364664Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:47.364691Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 214 } } 2024-11-21T08:51:47.364706Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 214 } } 2024-11-21T08:51:47.364981Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.364995Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.365001Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:47.365006Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:51:47.365012Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:47.365030Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T08:51:47.365161Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:47.365168Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:47.365181Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:47.365187Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:47.365194Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:47.365204Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.365208Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:47.365212Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:47.365217Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T08:51:47.366300Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.367019Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.367053Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:47.367073Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.367088Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:47.367182Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:47.367191Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T08:51:47.367206Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T08:51:47.367211Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:51:47.367216Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T08:51:47.367228Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:397:2372] message: TxId: 1003 2024-11-21T08:51:47.367234Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:51:47.367240Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:47.367245Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:47.367258Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:47.367262Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:47.367266Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:47.367271Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:47.367275Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:47.367278Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:47.367294Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:47.367298Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:51:47.367301Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:51:47.367311Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:47.367861Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:47.367874Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:597:2529] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:47.367983Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:47.368029Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 52us result status StatusSuccess 2024-11-21T08:51:47.368124Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateScan SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |87.2%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests100Inflight10BlobSize2000000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize1000 >> TCdcStreamWithRebootsTests::DropStream[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:40.293724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:40.293753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:40.293759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:40.293765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:40.293779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:40.293784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:40.293795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:40.293887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:40.306576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:40.306601Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:40.308968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:40.309081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:40.309110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:40.312063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:40.312147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:40.312321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.312655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:40.313561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.313872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:40.313887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.313903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:40.313911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:40.313917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:40.313965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:40.315649Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:40.332870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:40.332945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.333006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:40.333052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:40.333060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.333728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.333750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:40.333789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.333797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:40.333801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:40.333806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:40.334111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.334118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:40.334122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:40.334423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.334430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.334435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.334441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.335036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:40.335363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:40.335397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:40.335562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.335584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:40.335591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.335644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:40.335649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.335677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:40.335689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:40.336036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:40.336045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:40.336080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.336084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:40.336156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.336162Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:40.336173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:40.336178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.336183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:40.336188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.336193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:40.336197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:40.336224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:40.336231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:40.336235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 8944 2024-11-21T08:51:47.954310Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.954327Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.954331Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:47.954336Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:47.954341Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:47.954460Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.954470Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.954473Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:47.954477Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:47.954482Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:51:47.954657Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 160 } } 2024-11-21T08:51:47.954665Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:47.954683Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 160 } } 2024-11-21T08:51:47.954695Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 160 } } 2024-11-21T08:51:47.954828Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.954839Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.954846Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:47.954851Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:51:47.954855Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:47.954868Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:51:47.954939Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:47.954944Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:47.954957Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:47.954964Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:47.954971Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:47.954980Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.954984Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.954988Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:47.954994Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T08:51:47.957023Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.957064Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.957096Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.957110Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:47.957133Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.957216Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.957224Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T08:51:47.957236Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T08:51:47.957241Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:47.957251Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:51:47.957265Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:397:2372] message: TxId: 1003 2024-11-21T08:51:47.957270Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:47.957277Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:47.957282Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:47.957293Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:47.957297Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:47.957301Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:47.957315Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:47.957319Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:47.957322Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:47.957331Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:47.958121Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:47.958135Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:596:2528] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:47.958222Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:47.958264Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 48us result status StatusSuccess 2024-11-21T08:51:47.958359Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[TabletReboots] [GOOD] |87.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:41.266685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:41.266709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.266714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:41.266719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:41.266733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:41.266737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:41.266745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.266830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:41.278606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:41.278628Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.281095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:41.281204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:41.281235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:41.285676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:41.285749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:41.285860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.286023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.286606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.286814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.286821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.286830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:41.286835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.286839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:41.286870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:41.287898Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.300687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:41.300753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.300805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:41.300848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:41.300857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.301625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.301651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:41.301696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.301705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:41.301710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:41.301715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:41.302089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.302097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:41.302100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:41.302410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.302417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.302421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.302425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.302937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:41.303270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:41.303311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:41.303452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.303469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:41.303473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.303526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:41.303533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.303557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:41.303567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.303916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.303925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.303955Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.303958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:41.304026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.304031Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:41.304038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:41.304041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.304045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:41.304048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.304051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:41.304053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:41.304061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:41.304065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:41.304068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ep: 0 Generation: 2 2024-11-21T08:51:48.058682Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:48.058700Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.059178Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.059221Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.059224Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.059228Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:51:48.059231Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:48.059290Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.059293Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.059295Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:51:48.059299Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:51:48.059345Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 94489282827 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:48.059350Z node 22 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:48.059354Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 94489282827 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:48.059360Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.059362Z node 22 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.059365Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:48.059369Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:51:48.059414Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.059417Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.059419Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:48.059421Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:51:48.059427Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T08:51:48.059472Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.059474Z node 22 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.059476Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T08:51:48.059916Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.059937Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:51:48.059946Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.059953Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:51:48.059985Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.060260Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.060320Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.060325Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:48.060332Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 4/4 2024-11-21T08:51:48.060335Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T08:51:48.060338Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2024-11-21T08:51:48.060341Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T08:51:48.060345Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:48.060347Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:48.060364Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:48.060367Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:48.060369Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:48.060372Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:48.060374Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:48.060376Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:48.060379Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:48.060381Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T08:51:48.060383Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T08:51:48.060388Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:51:48.060439Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.060443Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.060455Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:51:48.060459Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:51:48.060472Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:51:48.060476Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:51:48.060481Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:48.060958Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:51:48.061015Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:48.061022Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:48.061074Z node 22 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:48.061086Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:48.061089Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [22:752:2656] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:48.061133Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:48.061155Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 27us result status StatusPathDoesNotExist 2024-11-21T08:51:48.061179Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> BackupRestore::RestoreTablePartitioningSettings >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> TCdcStreamWithRebootsTests::InitialScan[TabletReboots] [GOOD] |87.3%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsHistogram::Many_Serial [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize2000000 |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] |87.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |87.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:25.768975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:25.768998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:25.769003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:25.769008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:25.769022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:25.769026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:25.769036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:25.769113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:25.780410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:25.780431Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:25.782750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:25.782852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:25.782901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:25.787459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:25.787557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:25.787700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.787907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:25.788681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.788994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:25.789006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.789021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:25.789028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:25.789034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:25.789076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:25.790959Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:25.810980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:25.811065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.811131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:25.811182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:25.811191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.811929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.811955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:25.811996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.812006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:25.812010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:25.812015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:25.812494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.812505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:25.812509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:25.812915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.812932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.812939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.812947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.813620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:25.814093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:25.814149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:25.814360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.814408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:25.814417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.814477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:25.814483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.814516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:25.814530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:25.814990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:25.815006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:25.815051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.815056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:25.815132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.815138Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:25.815150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:25.815154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.815160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:25.815165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.815169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:25.815173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:25.815190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:25.815197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:25.815221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... rationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:48.511602Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 326417516810 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:48.511610Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.511618Z node 76 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.511621Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:48.511626Z node 76 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:51:48.511877Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.511888Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.511892Z node 76 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.511897Z node 76 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:51:48.511902Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:48.511951Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.511959Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.511962Z node 76 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.511966Z node 76 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:51:48.511969Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:51:48.512002Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:51:48.512078Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:51:48.512098Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.512105Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.512108Z node 76 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.512111Z node 76 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:48.512118Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:48.512125Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:48.512239Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.512250Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.512254Z node 76 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.512257Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:48.513276Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.513335Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.513434Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.513440Z node 76 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:48.513450Z node 76 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 3/3 2024-11-21T08:51:48.513454Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:48.513459Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2024-11-21T08:51:48.513464Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:48.513469Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:48.513472Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:48.513491Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:48.513495Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:48.513498Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:48.513503Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:48.513507Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:48.513510Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:48.513518Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:51:48.513574Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.513591Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:51:48.513595Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:51:48.513610Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:51:48.513615Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:51:48.513620Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:48.513756Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.513786Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:51:48.513820Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:51:48.513880Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.513886Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.514258Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:51:48.514331Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:48.514335Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:48.514381Z node 76 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:48.514398Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:48.514402Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [76:747:2651] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:48.514457Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:48.514483Z node 76 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 34us result status StatusPathDoesNotExist 2024-11-21T08:51:48.514516Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TOlap::StoreStats >> BackupRestoreS3::RestoreTablePartitioningSettings >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:25.451440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:25.451461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:25.451466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:25.451471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:25.451482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:25.451486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:25.451494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:25.451561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:25.460998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:25.461013Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:25.462810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:25.462892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:25.462915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:25.465294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:25.465368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:25.465517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.465691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:25.466403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.466654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:25.466666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.466678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:25.466685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:25.466692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:25.466726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:25.467958Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:25.485957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:25.486030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.486081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:25.486122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:25.486128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.486799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.486818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:25.486854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.486861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:25.486865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:25.486869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:25.487202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.487211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:25.487214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:25.487477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.487484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.487488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.487494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.487927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:25.488250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:25.488304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:25.488496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:25.488523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:25.488531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.488591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:25.488598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:25.488629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:25.488641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:25.489082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:25.489092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:25.489133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:25.489138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:25.489222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:25.489228Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:25.489239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:25.489244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.489250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:25.489257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:25.489262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:25.489266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:25.489277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:25.489283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:25.489287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... rationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:48.497840Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 326417516810 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:51:48.497850Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.497856Z node 76 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.497860Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:48.497864Z node 76 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:51:48.498592Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.498610Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.498615Z node 76 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.498620Z node 76 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:51:48.498624Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:48.498693Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.498702Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.498705Z node 76 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.498709Z node 76 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:51:48.498713Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:51:48.498801Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:51:48.498887Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:51:48.498910Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.498917Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.498921Z node 76 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.498925Z node 76 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:48.498931Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:48.498939Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:48.499042Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.499052Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.499056Z node 76 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:51:48.499059Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/3, is published: true 2024-11-21T08:51:48.500924Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.500991Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.501103Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.501111Z node 76 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:51:48.501124Z node 76 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 3/3 2024-11-21T08:51:48.501128Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:48.501134Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2024-11-21T08:51:48.501140Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:51:48.501146Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:51:48.501151Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:51:48.501176Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:48.501182Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:51:48.501186Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:51:48.501191Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:51:48.501195Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:51:48.501198Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:51:48.501207Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:51:48.501273Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.501293Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:51:48.501297Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:51:48.501309Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:51:48.501314Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:51:48.501320Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:48.501675Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.501735Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:51:48.501776Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:51:48.501863Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.501870Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:51:48.502349Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:51:48.502458Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:51:48.502464Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:51:48.502526Z node 76 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:51:48.502554Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:51:48.502559Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [76:747:2651] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:51:48.502638Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:48.502666Z node 76 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 51us result status StatusPathDoesNotExist 2024-11-21T08:51:48.502706Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::InitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:44.315313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:44.315328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:44.315332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:44.315335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:44.315344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:44.315347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:44.315353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:44.315412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:44.327156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:44.327175Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:44.329332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:44.329462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:44.329487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:44.331704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:44.331766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:44.331880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.332026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:44.332713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:44.332947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:44.332957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:44.332968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:44.332974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:44.332979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:44.333015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:44.334174Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:44.349022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:44.349077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.349112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:44.349142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:44.349147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.349639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.349656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:44.349681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.349686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:44.349688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:44.349691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:44.350003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.350011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:44.350013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:44.350254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.350262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.350266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:44.350272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:44.350658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:44.350943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:44.350974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:44.351103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.351119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:44.351124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:44.351157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:44.351161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:44.351181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:44.351189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:44.351468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:44.351476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:44.351504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:44.351509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:44.351570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.351576Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:44.351585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:44.351587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:44.351590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:44.351593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:44.351596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:44.351598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:44.351605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:44.351609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:44.351611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ublished: false 2024-11-21T08:51:48.875713Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.875720Z node 14 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72057594046678944 2024-11-21T08:51:48.875781Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046678944 2024-11-21T08:51:48.875786Z node 14 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:51:48.875795Z node 14 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2024-11-21T08:51:48.875799Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2024-11-21T08:51:48.875802Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2024-11-21T08:51:48.875860Z node 14 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:48.875874Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:48.875878Z node 14 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2024-11-21T08:51:48.875883Z node 14 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:51:48.875887Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:48.875978Z node 14 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:48.875988Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:48.875991Z node 14 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2024-11-21T08:51:48.875995Z node 14 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:48.875999Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:51:48.876005Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true FAKE_COORDINATOR: Erasing txId 281474976715657 2024-11-21T08:51:48.876330Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 258 } } 2024-11-21T08:51:48.876338Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:48.876357Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 258 } } 2024-11-21T08:51:48.876383Z node 14 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 258 } } 2024-11-21T08:51:48.876851Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 60129544460 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:51:48.876862Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:48.876882Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 60129544460 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:51:48.876889Z node 14 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:48.876897Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 60129544460 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:51:48.876908Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.876912Z node 14 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.876916Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:48.876922Z node 14 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2024-11-21T08:51:48.877184Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:48.877585Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2024-11-21T08:51:48.877610Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.877629Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.877709Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T08:51:48.877715Z node 14 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2024-11-21T08:51:48.877726Z node 14 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2024-11-21T08:51:48.877730Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T08:51:48.877735Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2024-11-21T08:51:48.877740Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T08:51:48.877746Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:51:48.877750Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T08:51:48.877760Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:48.877764Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2024-11-21T08:51:48.877767Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2024-11-21T08:51:48.877784Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:48.877791Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2024-11-21T08:51:48.877794Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2024-11-21T08:51:48.877799Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:48.878539Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:48.878583Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 55us result status StatusSuccess 2024-11-21T08:51:48.878720Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable |87.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1BlobSize1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: Got : 24000 2106439 49449 9 9 Expected: 24000 2106439 49449 9 9 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 49449 9 9 Expected: 12816 1121048 49449 9 9 Got : 24000 3547100 81694 9 9 Expected: 24000 3547100 81694 9 9 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425282 81694 9 9 Expected: 9582 1425282 81694 9 9 Got : 24000 2460139 23760 9 9 Expected: 24000 2460139 23760 9 9 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060767 23760 9 9 Expected: 10440 1060767 23760 9 9 Got : 24000 4054050 46562 9 9 Expected: 24000 4054050 46562 9 9 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2273213 46562 9 9 Expected: 13570 2273213 46562 9 9 Got : 24000 2106459 49449 9 9 Expected: 24000 2106459 49449 9 9 Got : 24000 2460219 23555 9 9 Expected: 24000 2460219 23555 9 9 Got : 24000 4054270 46543 9 9 Expected: 24000 4054270 46543 9 9 Got : 24000 2106439 25272 38 44 Expected: 24000 2106439 25272 38 44 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 25272 20 23 Expected: 12816 1121048 25272 20 23 Got : 24000 3547100 49916 64 44 Expected: 24000 3547100 49916 64 44 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425198 49916 26 17 Expected: 9582 1425198 49916 26 17 Got : 24000 2460139 13170 42 41 Expected: 24000 2460139 13170 42 41 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 13170 18 18 Expected: 10440 1060798 13170 18 18 Got : 24000 4054050 29361 68 43 Expected: 24000 4054050 29361 68 43 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2277890 29361 38 24 Expected: 13570 2277890 29361 38 24 Got : 24000 2106459 25428 38 44 Expected: 24000 2106459 25428 38 44 Got : 24000 2460219 13482 41 41 Expected: 24000 2460219 13482 41 41 Got : 24000 4054270 29970 67 43 Expected: 24000 4054270 29970 67 43 Got : 24000 2106479 25458 38 44 Expected: 24000 2106479 25458 38 44 Got : 24000 2460259 13528 42 41 Expected: 24000 2460259 13528 42 41 Got : 24000 4054290 30013 67 43 Expected: 24000 4054290 30013 67 43 1 parts: 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 0% bytes, 4 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 205073 (actual 205115 - 0% error) 14% (actual 14%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 17416844 (actual 17420850 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24056 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48061 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72061 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96085 - 0% error) 10% (actual 10%) key = (400354, 133459) value = 120047 (actual 120093 - 0% error) 10% (actual 10%) key = (480133, 160052) value = 144053 (actual 144100 - 0% error) 10% (actual 10%) key = (560080, 186701) value = 168060 (actual 168102 - 0% error) 10% (actual 10%) key = (639892, 213305) value = 192073 (actual 192119 - 0% error) 10% (actual 10%) key = (719776, 239933) value = 216090 (actual 216137 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2042645 - 0% error) 10% (actual 10%) key = (159427, 53150) value = 4076220 (actual 4080259 - 0% error) 10% (actual 10%) key = (239872, 79965) value = 6113940 (actual 6117932 - 0% error) 10% (actual 10%) key = (319834, 106619) value = 8152983 (actual 8156951 - 0% error) 10% (actual 10%) key = (400105, 133376) value = 10190566 (actual 10194584 - 0% error) 10% (actual 10%) key = (479833, 159952) value = 12228261 (actual 12232212 - 0% error) 10% (actual 10%) key = (559774, 186599) value = 14265925 (actual 14269984 - 0% error) 10% (actual 10%) key = (639385, 213136) value = 16304923 (actual 16308915 - 0% error) 10% (actual 10%) key = (719437, 239820) value = 18342658 (actual 18346641 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 51 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 217180 (actual 217228 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 18443184 (actual 18447186 - 0% error) 9% (actual 9%) { [12965,17271), [20685,27602), [31405,43682), [58051,73731), [81074,85635), [86559,89297), [92588,112654), [134937,148111), [152568,158136), [169526,171272), [181381,184364), [188301,199001), [201179,227534) } 1 parts: 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 3% bytes, 111 pages RowCountHistogram: 6% (actual 6%) key = (80152, 26725) value = 7654 (actual 7700 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 21908 (actual 21959 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 37729 (actual 37776 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 44561 (actual 44610 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 62406 (actual 62455 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 70269 (actual 70314 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 83950 (actual 83996 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 96207 (actual 96256 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 110645 (actual 110694 - 0% error) 12% (actual 12%) DataSizeHistogram: 6% (actual 6%) key = (80152, 26725) value = 650681 (actual 654673 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 1862907 (actual 1866988 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 3200081 (actual 3204123 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 3780473 (actual 3784554 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 5294670 (actual 5298760 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 5965285 (actual 5969310 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 7125413 (actual 7129406 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 8166922 (actual 8170966 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 9391370 (actual 9395383 - 0% error) 12% (actual 12%) { [12965,17271), [20685,27602), [31405,43682), [58051,73731), [81074,85635), [86559,89297), [92588,112654), [134937,148111), [152568,158136), [169526,171272), [181381,184364), [188301,199001), [201179,227534) } Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (109672, 36565) value = 12716 (actual 12760 - 0% error) 10% (actual 10%) key = (200011, 66678) value = 25439 (actual 25485 - 0% error) 10% (actual 10%) key = (242497, 80840) value = 38151 (actual 38197 - 0% error) 10% (actual 10%) key = (323278, 107767) value = 50861 (actual 50910 - 0% error) 9% (actual 9%) key = (365755, 121926) value = 63568 (actual 63614 - 0% error) 10% (actual 10%) key = (482191, 160738) value = 76283 (actual 76335 - 0% error) 10% (actual 9%) key = (610882, 203635) value = 88992 (actual 89039 - 0% error) 10% (actual 10%) key = (673702, 224575) value = 101722 (actual 101768 - 0% error) 10% (actual 10%) key = (715753, 238592) value = 114435 (actual 114484 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (109522, 36515) value = 1078779 (actual 1082732 - 0% error) 10% (actual 10%) key = (199786, 66603) value = 2157298 (actual 2161219 - 0% error) ... (307549, NULL) (307615, NULL) (307678, NULL) (307744, NULL) 100 rows, 100 pages, 4 levels: (307810, NULL) (307876, NULL) (307939, NULL) (308005, NULL) (308065, NULL) 100 rows, 100 pages, 4 levels: (308131, NULL) (308194, NULL) (308260, NULL) (308320, NULL) (308386, NULL) 100 rows, 100 pages, 4 levels: (308452, NULL) (308518, NULL) (308587, NULL) (308650, NULL) (308719, NULL) 100 rows, 100 pages, 4 levels: (308779, NULL) (308842, NULL) (308908, NULL) (308974, NULL) (309049, NULL) 100 rows, 100 pages, 4 levels: (309115, NULL) (309181, NULL) (309247, NULL) (309319, NULL) (309385, NULL) 100 rows, 100 pages, 4 levels: (309448, NULL) (309511, NULL) (309580, NULL) (309649, NULL) (309715, NULL) 100 rows, 100 pages, 4 levels: (309775, NULL) (309850, NULL) (309922, NULL) (309994, NULL) (310060, NULL) 100 rows, 100 pages, 4 levels: (310132, NULL) (310195, NULL) (310264, NULL) (310327, NULL) (310396, NULL) 100 rows, 100 pages, 4 levels: (310465, NULL) (310534, NULL) (310594, NULL) (310660, NULL) (310726, NULL) 100 rows, 100 pages, 4 levels: (310801, NULL) (310867, NULL) (310945, NULL) (311011, NULL) (311077, NULL) 100 rows, 100 pages, 4 levels: (311140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) >> TOlap::CreateStoreWithDirs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:41.411576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:41.411597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.411603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:41.411608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:41.411622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:41.411627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:41.411636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.411710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:41.424386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:41.424405Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.426893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:41.426970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:41.426991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:41.429530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:41.429594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:41.429710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.429846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.430404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.430626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.430633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.430643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:41.430649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.430655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:41.430690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:41.431609Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.443911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:41.443974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.444014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:41.444045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:41.444051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.444606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.444628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:41.444656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.444663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:41.444666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:41.444669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:41.445027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.445036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:41.445039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:41.445302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.445309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.445312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.445316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.445700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:41.446006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:41.446039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:41.446161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.446178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:41.446183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.446219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:41.446223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.446244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:41.446252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.446542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.446549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.446575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.446578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:41.446632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.446636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:41.446644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:41.446646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.446650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:41.446653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.446656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:41.446659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:41.446666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:41.446670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:41.446672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 4, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:49.324411Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:49.324462Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:49.324467Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 3 2024-11-21T08:51:49.324479Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:3, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:49.324484Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:3 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:49.324490Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:3 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:49.324500Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:3, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:49.324503Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:51:49.324507Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:3, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:49.324515Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:3 129 -> 240 2024-11-21T08:51:49.324580Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.324589Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.324592Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:49.324596Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T08:51:49.324599Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:51:49.324698Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.324707Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.324710Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:49.324713Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T08:51:49.324717Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 6 2024-11-21T08:51:49.325218Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.325234Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.325237Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:49.325241Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T08:51:49.325245Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:51:49.325258Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/5, is published: true 2024-11-21T08:51:49.326642Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:51:49.326698Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.326843Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.326865Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:51:49.326966Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.327005Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.327020Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:51:49.327027Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:3 ProgressState 2024-11-21T08:51:49.327043Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:3 progress is 5/5 2024-11-21T08:51:49.327047Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2024-11-21T08:51:49.327053Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 5/5, is published: true 2024-11-21T08:51:49.327067Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:459:2424] message: TxId: 1003 2024-11-21T08:51:49.327072Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2024-11-21T08:51:49.327077Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:49.327082Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:49.327093Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:49.327097Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:49.327100Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:49.327105Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:49.327108Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:49.327110Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:49.327115Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:51:49.327118Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:51:49.327121Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:51:49.327134Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:49.327137Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:4 2024-11-21T08:51:49.327141Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:4 2024-11-21T08:51:49.327150Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:51:49.327226Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:49.327640Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:49.327650Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:658:2580] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:49.327747Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:49.327808Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 68us result status StatusSuccess 2024-11-21T08:51:49.327899Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateScan SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10000Inflight100BlobSize1000 [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests1Inflight1BlobSize1000 >> TOlap::CustomDefaultPresets |87.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |87.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateDropTable >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> TOlap::CreateTable >> TOlap::CustomDefaultPresets [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize2000000 >> TOlap::CreateTableTtl >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[TabletReboots] [GOOD] >> TOlap::AlterStore >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries |87.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:50.276763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:50.276792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.276797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:50.276802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:50.276808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:50.276812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:50.276820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.276906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:50.286303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:50.286326Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:50.288785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:50.289546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:50.289584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:50.290934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:50.291057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:50.291130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.291181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:50.291851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.292143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.292155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.292195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:50.292202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.292225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:50.292239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.293334Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:50.305901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:50.305992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.306054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:50.306098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:50.306106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.306838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.306862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:50.306901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.306908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:50.306911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:50.306914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:50.307270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.307281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:50.307287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:50.307602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.307610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.307616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.307622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.308080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:50.308359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:50.308405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:50.308547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.308567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:50.308574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.308613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:50.308617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.308640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.308649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:50.308930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.308936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.308972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.308976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:50.309041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.309047Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:50.309058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:50.309062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.309067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:50.309071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.309074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:50.309077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:50.309085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:50.309088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:50.309091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:50.309368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.309383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.309403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:50.309409Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:50.309414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.309431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:50.410479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:51:50.410948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.410957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:50.411014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:50.411042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.411047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:51:50.411052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T08:51:50.411126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.411132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId#102:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:51:50.411138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId#102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T08:51:50.411285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:50.411295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:50.411298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:50.411301Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:51:50.411305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:50.411411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:50.411419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:50.411422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:50.411426Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:50.411429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:50.411438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T08:51:50.411717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T08:51:50.411736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2024-11-21T08:51:50.411753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2024-11-21T08:51:50.411933Z node 1 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2024-11-21T08:51:50.411959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.411974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2024-11-21T08:51:50.412102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:50.412343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:50.412642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.423429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T08:51:50.423452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:50.423475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T08:51:50.423484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T08:51:50.423564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2024-11-21T08:51:50.423568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:50.423577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:51:50.424121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.424237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.424262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.424271Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:50.424287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:50.424291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:50.424298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:51:50.424311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 102 2024-11-21T08:51:50.424319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:50.424325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:50.424329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:50.424360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:50.424695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:50.424705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:418:2397] TestWaitNotification: OK eventTxId 102 2024-11-21T08:51:50.424835Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:50.424898Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 73us result status StatusSuccess 2024-11-21T08:51:50.425028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateDropTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:50.028920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:50.028953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.028959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:50.028964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:50.028972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:50.028976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:50.028986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.029077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:50.041731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:50.041761Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:50.049167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:50.050140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:50.050187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:50.051609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:50.051772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:50.051895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.051969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:50.052939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.053241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.053253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.053298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:50.053306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.053327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:50.053345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.054706Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:50.072663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:50.072762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.072857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:50.072909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:50.072918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.073877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.073913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:50.073968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.073978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:50.073982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:50.073987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:50.074984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.074999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:50.075005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:50.075597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.075617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.075625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.075636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.076372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:50.080886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:50.080979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:50.081250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.081310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:50.081325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.081427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:50.081438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.081495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.081513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:50.082333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.082346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.082403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.082426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:50.082517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.082527Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:50.082542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:50.082546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.082553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:50.082559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.082565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:50.082569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:50.082586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:50.082596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:50.082601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:50.083053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.083077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.083082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:50.083088Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:50.083094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.083112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : 2/3 2024-11-21T08:51:50.173577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: false 2024-11-21T08:51:50.173755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:50.173773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:50.173778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:50.173784Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:51:50.173796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:50.174721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:50.174745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:50.174749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:50.174755Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:51:50.174760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:50.174891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:50.174903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:50.174906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:50.174910Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:51:50.174914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:50.175159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:50.175175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:51:50.175178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:51:50.175183Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T08:51:50.175188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:50.175201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T08:51:50.175867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:2 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T08:51:50.176093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:51:50.176755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:51:50.176782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:51:50.176794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:51:50.204773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T08:51:50.204809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:50.204851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T08:51:50.204879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T08:51:50.205035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2024-11-21T08:51:50.205044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:50.205056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T08:51:50.208727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:51:50.208885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:51:50.208924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:51:50.208937Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T08:51:50.208959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2024-11-21T08:51:50.208964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T08:51:50.208971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T08:51:50.209000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:373:2353] message: TxId: 101 2024-11-21T08:51:50.209007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T08:51:50.209014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:51:50.209019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:51:50.209053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:51:50.209061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T08:51:50.209064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T08:51:50.209070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:50.209073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T08:51:50.209076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T08:51:50.209092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:50.209781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:51:50.209802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:374:2354] TestWaitNotification: OK eventTxId 101 2024-11-21T08:51:50.209961Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/DirB/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:50.210030Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/DirB/OlapStore" took 80us result status StatusSuccess 2024-11-21T08:51:50.210210Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/DirB/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] Test command err: 2024-11-21T08:51:49.045356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652626852696093:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:49.045413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f65/r3tmp/tmpxiGX7f/pdisk_1.dat 2024-11-21T08:51:49.114309Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19571, node 1 2024-11-21T08:51:49.129426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:49.129441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:49.129443Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:49.129487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:51:49.145609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:49.145641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:49.147196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.147696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:49.148267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.148288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.149172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:51:49.149294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:51:49.149305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:51:49.149930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:51:49.150180Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:51:49.150192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:51:49.151484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.152492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179109199, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:49.152507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:51:49.152581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:51:49.153139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.153201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.153214Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:51:49.153227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:51:49.153238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:51:49.153257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:51:49.153747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:51:49.153763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:51:49.153768Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:51:49.153782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:51:49.369366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652626852697012:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:49.369418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:49.398803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.398946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T08:51:49.399112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.399123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.399126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSequence Propose, path: /Root/table/_serial_column_Key, opId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T08:51:49.399165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 2 -> 2 2024-11-21T08:51:49.399222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.399763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/table 2024-11-21T08:51:49.399805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.399868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.399885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 ProgressState, operation type: TxCreateSequence, at tablet72057594046644480 2024-11-21T08:51:49.399926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:51:49.400362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T08:51:49.400408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:51:49.400416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:51:49.400421Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:51:49.400457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:51:49.400474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:51:49.400479Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:51:49.400495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:51:49.400501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:51:49.400502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T08:51:49.403312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:51:49.403346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 2 -> 3 2024-11-21T08:51:49.403412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:51:49.403426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T08:51:49.403881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T08:51:49.456433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 3 -> 128 2024-11-21T08:51:49.456572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:51:49.456579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:51:49.456588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 2814749767 ... : 281474976710671 2024-11-21T08:51:49.739533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T08:51:49.739543Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2024-11-21T08:51:49.739588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T08:51:49.739597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T08:51:49.739599Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 2 2024-11-21T08:51:49.739615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T08:51:49.739617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T08:51:49.739619Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 1 2024-11-21T08:51:49.739726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710671, at schemeshard: 72057594046644480 2024-11-21T08:51:49.741108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710671:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:51:49.741131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 2 -> 3 2024-11-21T08:51:49.741431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710671:0 ProgressState at tabletId# 72057594046644480 2024-11-21T08:51:49.741733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:1 3 -> 128 2024-11-21T08:51:49.742173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#281474976710671:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:51:49.745314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710671:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:51:49.745325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:51:49.745341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 3 -> 128 2024-11-21T08:51:49.745687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710671:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T08:51:49.746735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179109794, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:49.746748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710671:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732179109794 2024-11-21T08:51:49.746771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 128 -> 129 2024-11-21T08:51:49.746788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#281474976710671:1 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T08:51:49.746830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:1 128 -> 240 2024-11-21T08:51:49.747713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.747811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.747826Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710671:1 ProgressState 2024-11-21T08:51:49.747842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710671:1 progress is 1/2 2024-11-21T08:51:49.747873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710671:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:51:49.747945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037891 Status: COMPLETE TxId: 281474976710671 Step: 1732179109794 OrderId: 281474976710671 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037891 CpuTimeUsec: 360 } } 2024-11-21T08:51:49.748326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T08:51:49.748344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T08:51:49.748348Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 15 2024-11-21T08:51:49.748401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T08:51:49.748405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T08:51:49.748406Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 2024-11-21T08:51:49.748424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T08:51:49.748426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T08:51:49.748427Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2024-11-21T08:51:49.748451Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710671:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T08:51:49.748456Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.748460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 129 -> 240 2024-11-21T08:51:49.750589Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710671:0 ProgressState 2024-11-21T08:51:49.750614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710671:0 progress is 2/2 2024-11-21T08:51:49.750624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710671:0 2024-11-21T08:51:49.750653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710671:1 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/jptk/001f65/r3tmp/tmpFgFdRD/table/data_00.csv"2024-11-21T08:51:49.768153Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd6yq9vs5zw2ve47q4ax28b7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q4YjRjNWEtYWNlYmUxZTgtNDExM2RjYzctZTMyMWM3YmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/jptk/001f65/r3tmp/tmpFgFdRD/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/jptk/001f65/r3tmp/tmpFgFdRD/table/permissions.pb"2024-11-21T08:51:49.776421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/table, operationId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.776500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.776504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.776519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710673:0 progress is 1/1 2024-11-21T08:51:49.776544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710673:0 2024-11-21T08:51:49.776546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710673, publications: 3, subscribers: 0 2024-11-21T08:51:49.777325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710673, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/table, set owner:root@builtin 2024-11-21T08:51:49.777369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.777471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.777966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710673 2024-11-21T08:51:49.777977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710673 2024-11-21T08:51:49.777981Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 3 2024-11-21T08:51:49.778035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710673 2024-11-21T08:51:49.778038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710673 2024-11-21T08:51:49.778040Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2024-11-21T08:51:49.778056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976710673 2024-11-21T08:51:49.778061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710673 2024-11-21T08:51:49.778063Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 16 2024-11-21T08:51:49.778068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710673, subscribers: 0 Restore completed successfully2024-11-21T08:51:49.800642Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jd6yq9wm519whr6mwa1sa4n2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQwMjVhNGYtYzNiNGFmZTMtZWZjNjYzZWEtYTA2ZTNmNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet >> TOlap::CreateTable [GOOD] >> TOlap::CreateStore >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> TOlap::CreateTableTtl [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests10Inflight1BlobSize1000 >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:50.557691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:50.557715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.557721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:50.557726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:50.557732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:50.557735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:50.557744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.557837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:50.569882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:50.569906Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:50.572770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:50.573399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:50.573424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:50.575275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:50.575517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:50.575636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.575726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:50.576955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.577262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.577276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.577319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:50.577327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.577333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:50.577348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.578827Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:50.597218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:50.597315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.597377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:50.597437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:50.597445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.598207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.598234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:50.598279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.598289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:50.598293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:50.598297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:50.598715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.598728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:50.598732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:50.599091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.599102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.599109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.599115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.599644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:50.600080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:50.600142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:50.600337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.600363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:50.600373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.600424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:50.600430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.600461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.600474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:50.600936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.600945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.600996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.601002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:50.601089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.601096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:50.601107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:50.601111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.601116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:50.601122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.601127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:50.601131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:50.601146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:50.601152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:50.601156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:50.601436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.601453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.601458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:50.601462Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:50.601467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.601483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nId: 111:2, at schemeshard: 72057594046678944 2024-11-21T08:51:50.836587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 111:2 ProgressState 2024-11-21T08:51:50.836607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#111:2 progress is 3/3 2024-11-21T08:51:50.836611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2024-11-21T08:51:50.836617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 111, ready parts: 3/3, is published: true 2024-11-21T08:51:50.836632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:367:2347] message: TxId: 111 2024-11-21T08:51:50.836652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2024-11-21T08:51:50.836658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:0 2024-11-21T08:51:50.836664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:0 2024-11-21T08:51:50.836685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:51:50.836691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:1 2024-11-21T08:51:50.836694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:1 2024-11-21T08:51:50.836699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T08:51:50.836702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:2 2024-11-21T08:51:50.836705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:2 2024-11-21T08:51:50.836726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2024-11-21T08:51:50.837262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2024-11-21T08:51:50.837278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:623:2601] TestWaitNotification: OK eventTxId 111 2024-11-21T08:51:50.837493Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/DirA/DirB/NestedTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:50.837572Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/DirA/DirB/NestedTable" took 93us result status StatusSuccess 2024-11-21T08:51:50.837720Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/DirA/DirB/NestedTable" PathDescription { Self { Name: "NestedTable" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 111 CreateStep: 5000006 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "NestedTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 112 2024-11-21T08:51:50.838594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore/MyDir" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TableWithTiers" Schema { Columns { Name: "timestamp" Type: "Timestamp" } Columns { Name: "data" Type: "Utf8" } KeyColumnNames: "timestamp" } ColumnShardCount: 1 } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:50.838648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/MyDir/TableWithTiers, opId: 112:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.838715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: MyDir, child name: TableWithTiers, child id: [OwnerId: 72057594046678944, LocalPathId: 9], at schemeshard: 72057594046678944 2024-11-21T08:51:50.838729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2024-11-21T08:51:50.838765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 1 2024-11-21T08:51:50.838807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 112:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:50.838812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 112:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.838823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:50.838830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2024-11-21T08:51:50.839331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 112, response: Status: StatusAccepted TxId: 112 SchemeshardId: 72057594046678944 PathId: 9, at schemeshard: 72057594046678944 2024-11-21T08:51:50.839360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 112, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/MyDir/ 2024-11-21T08:51:50.839391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.839395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:50.839418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2024-11-21T08:51:50.839429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.839432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 3 2024-11-21T08:51:50.839436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 9 2024-11-21T08:51:50.839512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.839518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId#112:0 ProgressState at tabletId# 72057594046678944 2024-11-21T08:51:50.839545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId#112:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2024-11-21T08:51:50.839626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T08:51:50.839633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T08:51:50.839636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T08:51:50.839639Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2024-11-21T08:51:50.839645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:50.839736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T08:51:50.839742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T08:51:50.839744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T08:51:50.839746Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 1 2024-11-21T08:51:50.839748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2024-11-21T08:51:50.839755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T08:51:50.840479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2024-11-21T08:51:50.840530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 0, tablet: 72075186233409546 2024-11-21T08:51:50.841038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T08:51:50.841061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 TestModificationResult got TxId: 112, wait until txId: 112 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:50.533048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:50.533080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.533086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:50.533091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:50.533099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:50.533104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:50.533114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.533208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:50.545116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:50.545143Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:50.549659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:50.550562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:50.550608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:50.554248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:50.554465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:50.554592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.554676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:50.555833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.556157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.556172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.556258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:50.556269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.556277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:50.556296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.557832Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:50.576106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:50.576198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.576284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:50.576336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:50.576345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.577173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.577203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:50.577248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.577257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:50.577262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:50.577267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:50.577770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.577787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:50.577793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:50.578239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.578248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.578255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.578261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.578877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:50.579272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:50.579319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:50.579519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.579542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:50.579549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.579603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:50.579610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.579640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.579653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:50.580067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.580075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.580116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.580121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:50.580192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.580199Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:50.580227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:50.580232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.580237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:50.580243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.580247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:50.580252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:50.580263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:50.580269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:50.580274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:50.580580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.580593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.580597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:50.580602Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:50.580606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.580618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:50.760583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TPropose operationId#107:0 HandleReply TEvOperationPlan at schemeshard: 72057594046678944, stepId: 5000007 2024-11-21T08:51:50.760603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 129 2024-11-21T08:51:50.760619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.760626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: Erasing txId 107 2024-11-21T08:51:50.761036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.761044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.761065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:50.761082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.761086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 1 2024-11-21T08:51:50.761089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 2 2024-11-21T08:51:50.761130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.761136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId#107:0 ProgressState at schemeshard: 72057594046678944 2024-11-21T08:51:50.761143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId#107:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T08:51:50.761193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:51:50.761203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:51:50.761207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T08:51:50.761211Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:51:50.761214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:50.761253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:51:50.761260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:51:50.761263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T08:51:50.761266Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:51:50.761269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:50.761275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2024-11-21T08:51:50.761702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 107:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T08:51:50.761723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 0, tablet: 72075186233409546 2024-11-21T08:51:50.761964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 107 2024-11-21T08:51:50.761972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:50.761982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 107 2024-11-21T08:51:50.761990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 130 2024-11-21T08:51:50.762158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T08:51:50.762369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T08:51:50.762444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.762463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.762468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId#107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:50.762492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:50.762516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T08:51:50.762520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T08:51:50.762526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T08:51:50.762529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T08:51:50.762533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T08:51:50.762537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T08:51:50.762556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:51:50.762942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:51:50.763031Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:51:50.763796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.763942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409546 2024-11-21T08:51:50.764117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:51:50.764123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:51:50.764134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.764790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:51:50.764806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:51:50.764833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2024-11-21T08:51:50.764911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T08:51:50.764918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T08:51:50.765005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T08:51:50.765027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T08:51:50.765032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:652:2630] TestWaitNotification: OK eventTxId 107 2024-11-21T08:51:50.765122Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:50.765154Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 47us result status StatusPathDoesNotExist 2024-11-21T08:51:50.765189Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:51:50.765278Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2024-11-21T08:51:50.765287Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 9us result status StatusPathDoesNotExist 2024-11-21T08:51:50.765295Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "" PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex >> TOlap::CreateStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:05.394993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:05.395016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:05.395021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:05.395026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:05.395038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:05.395042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:05.395050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:05.395127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:05.405812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:05.405838Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:05.407972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:05.408067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:05.408096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:05.410378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:05.410449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:05.410566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:05.410719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:05.411266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:05.411529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:05.411539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:05.411553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:05.411559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:05.411564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:05.411604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:05.412836Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:05.429865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:05.429952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.430018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:05.430064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:05.430072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.430896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:05.430924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:05.430974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.430984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:05.430989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:05.430995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:05.432554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.432573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:05.432578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:05.433066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.433083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.433091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:05.433099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:05.433720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:05.434609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:05.434669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:05.434871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:05.434896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:05.434903Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:05.434957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:05.434965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:05.435000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:05.435014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:05.435799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:05.435810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:05.435851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:05.435856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:05.435941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:05.435948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:05.435960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:05.435964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:05.435970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:05.435975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:05.435979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:05.435983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:05.435993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:05.435999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:05.436002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... SHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.634931Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.634940Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:50.634946Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:50.634951Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:51:50.635077Z node 144 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.635086Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.635090Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:50.635093Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:50.635097Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:50.635187Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T08:51:50.635194Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:50.635210Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T08:51:50.635223Z node 144 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T08:51:50.635405Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:50.635412Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:50.635425Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:50.635430Z node 144 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:50.635437Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:50.635449Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.635453Z node 144 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.635457Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:50.635462Z node 144 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T08:51:50.635552Z node 144 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.635562Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.635566Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:50.635569Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:51:50.635573Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:50.635583Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:51:50.637136Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.637162Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.637431Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.637467Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.637557Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:50.637591Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.637598Z node 144 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T08:51:50.637611Z node 144 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T08:51:50.637615Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:50.637621Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:51:50.637635Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [144:400:2375] message: TxId: 1003 2024-11-21T08:51:50.637641Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:50.637648Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:50.637653Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:50.637669Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:50.637674Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:50.637677Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:50.637693Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:50.637697Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:50.637701Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:50.637710Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:50.638323Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:50.638334Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [144:598:2530] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:50.638461Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:50.638515Z node 144 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 61us result status StatusSuccess 2024-11-21T08:51:50.638614Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:50.863855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:50.863880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.863886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:50.863891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:50.863898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:50.863903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:50.863912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.864000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:50.874525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:50.874548Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:50.877892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:50.878748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:50.878792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:50.880680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:50.880911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:50.881034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.881110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:50.882114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.882420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.882430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.882472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:50.882480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.882486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:50.882501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.883727Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:50.901278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:50.901365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.901454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:50.901505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:50.901513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.903735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.903771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:50.903821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.903831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:50.903835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:50.903839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:50.904230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.904242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:50.904247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:50.904572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.904581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.904585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.904591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.905030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:50.905366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:50.905417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:50.905548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.905567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:50.905573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.905617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:50.905620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.905644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.905653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:50.906013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.906022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.906062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.906069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:50.906136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.906142Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:50.906151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:50.906153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.906157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:50.906160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.906163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:50.906166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:50.906174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:50.906179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:50.906181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:50.906447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.906463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.906467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:50.906473Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:50.906477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.906493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:51.134436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 104 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T08:51:51.134996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.135039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.135063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.135071Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T08:51:51.135089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T08:51:51.135093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:51:51.135100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T08:51:51.135112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 104 2024-11-21T08:51:51.135121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:51:51.135127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:51:51.135131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:51:51.135168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:51:51.135601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:51:51.135612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:570:2548] TestWaitNotification: OK eventTxId 104 2024-11-21T08:51:51.135748Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:51:51.135824Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/Table3" took 85us result status StatusSuccess 2024-11-21T08:51:51.135954Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/Table3" PathDescription { Self { Name: "Table3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 104 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } TtlSettings { Version: 1 UseTiering: "Tiering1" } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 105 2024-11-21T08:51:51.136740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { UseTiering: "Tiering1" } ColumnShardCount: 1 } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:51.136794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.136844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T08:51:51.136857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 0 2024-11-21T08:51:51.136895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:51:51.136932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:51.136937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.136949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T08:51:51.136959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:51:51.137381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusAccepted TxId: 105 SchemeshardId: 72057594046678944 PathId: 6, at schemeshard: 72057594046678944 2024-11-21T08:51:51.137419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2024-11-21T08:51:51.137455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:51.137459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:51.137493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T08:51:51.137504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:51.137508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 2 2024-11-21T08:51:51.137511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 6 2024-11-21T08:51:51.137588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.137593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId#105:0 ProgressState at tabletId# 72057594046678944 2024-11-21T08:51:51.137619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId#105:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2024-11-21T08:51:51.137701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:51:51.137709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:51:51.137711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:51:51.137715Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2024-11-21T08:51:51.137718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T08:51:51.137796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:51:51.137804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:51:51.137806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:51:51.137808Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2024-11-21T08:51:51.137810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:51:51.137819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T08:51:51.138325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2024-11-21T08:51:51.138357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 0, tablet: 72075186233409546 2024-11-21T08:51:51.138795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:51:51.138847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:51.201076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:51.201102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:51.201107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:51.201112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:51.201118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:51.201122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:51.201130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:51.201214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:51.212503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:51.212526Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:51.215294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:51.216078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:51.216107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:51.217408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:51.217586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:51.217675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:51.217733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:51.218570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:51.218847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:51.218856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:51.218901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:51.218908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:51.218914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:51.218925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.220111Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:51.236271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:51.236364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.236431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:51.236479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:51.236488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.237395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:51.237416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:51.237460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.237469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:51.237473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:51.237478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:51.237870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.237877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:51.237881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:51.238175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.238181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.238187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:51.238193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:51.238750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:51.239124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:51.239173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:51.239340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:51.239359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:51.239368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:51.239414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:51.239419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:51.239446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:51.239458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:51.239787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:51.239793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:51.239836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:51.239840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:51.239920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.239925Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:51.239935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:51.239939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:51.239945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:51.239954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:51.239959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:51.239962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:51.239971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:51.239976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:51.239980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:51.240289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:51.240303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:51.240307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:51.240312Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:51.240317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:51.240330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... n: 72057594037968897 2024-11-21T08:51:51.325008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 102:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T08:51:51.325014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 102:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T08:51:51.325028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 2 -> 3 2024-11-21T08:51:51.325437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:51.325458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:51.325974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.326018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.326024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#102:0 ProgressState at tabletId# 72057594046678944 2024-11-21T08:51:51.326052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TConfigureParts operationId#102:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409547 2024-11-21T08:51:51.327492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382272 2024-11-21T08:51:51.327527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72075186233409547 2024-11-21T08:51:51.330938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:51:51.330968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:51:51.331000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:51:51.331018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:51:51.331035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:51:51.331052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:51:51.331067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:51:51.331085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:51:51.331104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:51:51.331122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:51:51.331140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:51:51.331160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:51:51.332247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:51:51.332291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:51:51.332301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:51:51.332306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:51:51.332318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:51:51.332325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:51:51.332329Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:51:51.332347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:51:51.332353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:51:51.332357Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:51:51.332369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:51:51.332376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:51:51.332380Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:51:51.332390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:51:51.332398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:51:51.332402Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:51:51.332410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:51:51.332415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:51:51.332420Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T08:51:51.332462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:51:51.332469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:51:51.332472Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T08:51:51.332490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:51:51.332496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:51:51.332500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T08:51:51.332517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:51:51.332524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:51:51.332528Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T08:51:51.332544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:51:51.332551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:51:51.332555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T08:51:51.332567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:51:51.332574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; TestModificationResult got TxId: 102, wait until txId: 102 >> TOlap::AlterTtl [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] Test command err: 2024-11-21T08:51:51.172969Z node 1 :PIPE_SERVER DEBUG: [9437185] Detach 2024-11-21T08:51:51.177330Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2024-11-21T08:51:51.178812Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2024-11-21T08:51:51.179548Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:126:2152] 2024-11-21T08:51:51.179559Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:126:2152] 2024-11-21T08:51:51.179608Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:126:2152] 2024-11-21T08:51:51.179615Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:126:2152] 2024-11-21T08:51:51.179623Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:126:2152] 2024-11-21T08:51:51.179628Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:126:2152] 2024-11-21T08:51:51.179641Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:126:2152] 2024-11-21T08:51:51.179664Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:126:2152] Type# 269877249 Reason# ActorUnknown 2024-11-21T08:51:51.179681Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:129:2154] 2024-11-21T08:51:51.179685Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:129:2154] 2024-11-21T08:51:51.179692Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:129:2154] 2024-11-21T08:51:51.179696Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:129:2154] 2024-11-21T08:51:51.179701Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:129:2154] 2024-11-21T08:51:51.179704Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:129:2154] 2024-11-21T08:51:51.179710Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:129:2154] 2024-11-21T08:51:51.179721Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:129:2154] Type# 269877249 Reason# ActorUnknown 2024-11-21T08:51:51.179733Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:131:2156] 2024-11-21T08:51:51.179736Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:131:2156] 2024-11-21T08:51:51.179744Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:131:2156] 2024-11-21T08:51:51.179748Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:131:2156] 2024-11-21T08:51:51.179753Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:131:2156] 2024-11-21T08:51:51.179756Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:131:2156] 2024-11-21T08:51:51.179761Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:131:2156] 2024-11-21T08:51:51.179773Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:131:2156] Type# 269877249 Reason# ActorUnknown >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange >> TTabletPipeTest::TestOpen >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup-EvWrite >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:50.892047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:50.892071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.892076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:50.892081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:50.892087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:50.892091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:50.892099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:50.892187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:50.902129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:50.902145Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:50.904544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:50.905175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:50.905208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:50.906463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:50.906640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:50.906757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.906825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:50.907828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.908033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.908043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.908075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:50.908082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.908086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:50.908096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.909438Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:50.921636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:50.921716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.921779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:50.921830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:50.921838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.922623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.922653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:50.922698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.922708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:50.922712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:50.922718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:50.923145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.923158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:50.923162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:50.923513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.923525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.923531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.923538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.924137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:50.924569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:50.924622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:50.924817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:50.924844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:50.924854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.924912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:50.924920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:50.924950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.924964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:50.925421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:50.925432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:50.925475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:50.925481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:50.925563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:50.925571Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:50.925585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:50.925589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.925596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:50.925601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:50.925606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:50.925611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:50.925623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:50.925631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:50.925635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:50.925938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.925957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:50.925962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:50.925967Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:50.925973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:50.925990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rd: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2024-11-21T08:51:51.592060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:51.592091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2024-11-21T08:51:51.592098Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T08:51:51.592111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.592116Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2024-11-21T08:51:51.595438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.595532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.595542Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#105:0 HandleReply ProgressState at tablet: 72057594046678944 2024-11-21T08:51:51.595562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2024-11-21T08:51:51.595621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:51.596441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2024-11-21T08:51:51.596483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 105 at step: 5000006 2024-11-21T08:51:51.596662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:51.596689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:51.596698Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#105:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000006 2024-11-21T08:51:51.596833Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2024-11-21T08:51:51.596867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:51.596880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:51.597058Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186233409546;tx_state=execute;fline=manager.cpp:215;path_id=3;tiering_name=Tiering1;event=not_found; 2024-11-21T08:51:51.597079Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186233409546;tx_state=execute;fline=manager.cpp:215;path_id=3;tiering_name=Tiering1;event=not_found; FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2024-11-21T08:51:51.602878Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:51.602902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:51.602967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:51:51.603008Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:51.603015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 2 2024-11-21T08:51:51.603024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T08:51:51.603118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.603127Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId#105:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:51:51.603137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId#105:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T08:51:51.603320Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:51:51.603332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:51:51.603336Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:51:51.603342Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T08:51:51.603348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:51.603438Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:51:51.603447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:51:51.603451Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:51:51.603454Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2024-11-21T08:51:51.603477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:51:51.603486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T08:51:51.609674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T08:51:51.615721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:51:51.615794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:51:51.641503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T08:51:51.641540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:51.641579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T08:51:51.641592Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T08:51:51.641765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 105 2024-11-21T08:51:51.641771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:51.641783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 105 FAKE_COORDINATOR: Erasing txId 105 2024-11-21T08:51:51.643198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.643298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.643330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:51:51.643340Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T08:51:51.643365Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T08:51:51.643369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:51:51.643377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2024-11-21T08:51:51.643401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:334:2314] message: TxId: 105 2024-11-21T08:51:51.643411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:51:51.643417Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T08:51:51.643422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T08:51:51.643467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:51:51.643910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:51:51.643922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:512:2490] TestWaitNotification: OK eventTxId 105 >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> TTabletPipeTest::TestOpen [GOOD] >> KqpErrors::ResolveTableError >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests100Inflight1BlobSize1000 >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> DataShardWrite::UpsertPrepared+Volatile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:44.662312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:44.662332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:44.662338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:44.662343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:44.662356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:44.662360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:44.662369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:44.662433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:44.674467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:44.674486Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:44.676883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:44.676983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:44.677004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:44.679538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:44.679613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:44.679726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.679886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:44.680554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:44.680771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:44.680780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:44.680789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:44.680794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:44.680799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:44.680826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:44.682018Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:44.698468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:44.698533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.698579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:44.698621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:44.698629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.699168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.699188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:44.699218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.699225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:44.699230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:44.699234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:44.699525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.699535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:44.699539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:44.699821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.699830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.699836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:44.699843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:44.700438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:44.700823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:44.700869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:44.701034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:44.701057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:44.701064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:44.701151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:44.701159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:44.701188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:44.701200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:44.701594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:44.701605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:44.701646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:44.701651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:44.701732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:44.701738Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:44.701750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:44.701754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:44.701760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:44.701766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:44.701770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:44.701774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:44.701785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:44.701791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:44.701795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 78944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T08:51:52.145435Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:51:52.145572Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 556 } } 2024-11-21T08:51:52.145582Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:52.145603Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 556 } } 2024-11-21T08:51:52.145617Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 556 } } 2024-11-21T08:51:52.146036Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.146055Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.146060Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:52.146065Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T08:51:52.146070Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:52.146220Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:52.146228Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:52.146244Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:52.146251Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:52.146259Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:52.146275Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:52.146279Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:52.146284Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:52.146291Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T08:51:52.146639Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.146798Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.146808Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:52.146813Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T08:51:52.146817Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:51:52.146835Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T08:51:52.148171Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.148228Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.149232Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.149269Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:52.149293Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.149311Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:52.149427Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.149468Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:52.149478Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T08:51:52.149500Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T08:51:52.149505Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:51:52.149512Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T08:51:52.149536Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:459:2424] message: TxId: 1003 2024-11-21T08:51:52.149546Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:51:52.149554Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:52.149559Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:52.149577Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:52.149583Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:52.149586Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:52.149591Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:51:52.149595Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:52.149597Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:52.149616Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:52.149621Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:51:52.149624Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:51:52.149633Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:51:52.150198Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:52.150212Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:653:2575] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:52.150322Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:52.150398Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 86us result status StatusSuccess 2024-11-21T08:51:52.150517Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[TabletReboots] [GOOD] >> ExternalBlobsMultipleChannels::Simple |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2024-11-21T08:51:07.337524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652447244344551:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:07.338740Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652444383746564:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fbf/r3tmp/tmpK31ei8/pdisk_1.dat 2024-11-21T08:51:07.368585Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:07.369276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:51:07.398497Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16040, node 1 2024-11-21T08:51:07.413309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:07.413320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:07.413322Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:07.413362Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:51:07.436372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.436394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.438126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:07.472953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:07.475690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.475718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.480914Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:51:07.482386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:07.525323Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:07.651204Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:07.651236Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652444383746669:2275], Start check tables existence, number paths: 2 2024-11-21T08:51:07.651415Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:07.651423Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:07.652310Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652444383746669:2275], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:07.652323Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652444383746669:2275], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:07.652328Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652444383746669:2275], Successfully finished 2024-11-21T08:51:07.652350Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:07.652580Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2024-11-21T08:51:07.772268Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:07.773020Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTQ3ZDdjOTEtM2ZhOTM5N2ItMjQxOWQ4NzQtNjU3OTFhMTU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTQ3ZDdjOTEtM2ZhOTM5N2ItMjQxOWQ4NzQtNjU3OTFhMTU= 2024-11-21T08:51:07.773225Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2024-11-21T08:51:07.773230Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:07.773233Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:07.773985Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTQ3ZDdjOTEtM2ZhOTM5N2ItMjQxOWQ4NzQtNjU3OTFhMTU=, ActorId: [1:7439652447244345166:2298], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:07.774074Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652447244345165:2297], Start check tables existence, number paths: 2 2024-11-21T08:51:07.774546Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652447244345165:2297], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:07.774554Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652447244345165:2297], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:07.774558Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652447244345165:2297], Successfully finished 2024-11-21T08:51:07.774579Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:07.776492Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652447244345192:2493], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:07.777212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:07.781062Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652447244345192:2493], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976720658 2024-11-21T08:51:07.781866Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652447244345192:2493], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:07.787152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652447244345192:2493], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T08:51:07.842297Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652447244345192:2493], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:07.844224Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652447244345192:2493], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:07.844802Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTMzZjZhYzgtODExMDRiM2EtNjQyZWYyNzctNzE3MTg3NWQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTMzZjZhYzgtODExMDRiM2EtNjQyZWYyNzctNzE3MTg3NWQ= 2024-11-21T08:51:07.844877Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTMzZjZhYzgtODExMDRiM2EtNjQyZWYyNzctNzE3MTg3NWQ=, ActorId: [1:7439652447244345279:2300], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:07.844912Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTMzZjZhYzgtODExMDRiM2EtNjQyZWYyNzctNzE3MTg3NWQ=, ActorId: [1:7439652447244345279:2300], ActorState: ReadyState, TraceId: 01jd6yp0y48fk7d75ee7y7w1z3, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439652447244345278:2559] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T08:51:07.844931Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T08:51:07.844934Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:07.844943Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439652447244345279:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MTMzZjZhYzgtODExMDRiM2EtNjQyZWYyNzctNzE3MTg3NWQ= 2024-11-21T08:51:07.844953Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652447244345281:2301], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:07.844968Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652447244345282:2302], Database: /Root, Start database fetching 2024-11-21T08:51:07.845311Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652447244345282:2302], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T08:51:07.845337Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652447244345281:2301], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:07.845343Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T08:51:07.845349Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T08:51:07.845352Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T08:51:07.845418Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652447244345293:2304], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: ... iles were not loaded TServer::EnableGrpc on GrpcPort 28006, node 11 2024-11-21T08:51:52.108395Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:52.108411Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:52.108413Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:52.108471Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:51:52.144355Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:52.153451Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:52.153486Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:52.154832Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:52.156521Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:52.178382Z node 11 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:52.429886Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:52.429914Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7439652640104993157:2298], Start check tables existence, number paths: 2 2024-11-21T08:51:52.430319Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk= 2024-11-21T08:51:52.430474Z node 11 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:52.430483Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:52.430485Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:52.430492Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7439652640104993157:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:52.430498Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7439652640104993157:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:52.430501Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7439652640104993157:2298], Successfully finished 2024-11-21T08:51:52.430795Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:52.430837Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:52.431067Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439652640104993175:2283], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:52.431667Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:52.431939Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439652640104993175:2283], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:52.431971Z node 11 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439652640104993175:2283], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:52.433251Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439652640104993175:2283], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:52.521984Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439652640104993175:2283], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:52.522652Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439652640104993175:2283], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:52.522765Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2024-11-21T08:51:52.522773Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2024-11-21T08:51:52.522800Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439652640104993233:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:52.523042Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439652640104993233:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:52.523060Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2024-11-21T08:51:52.523064Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T08:51:52.523119Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7439652640104993242:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T08:51:52.523323Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7439652640104993242:2301], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T08:51:52.524128Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T08:51:52.524152Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:52.524165Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: ReadyState, TraceId: 01jd6yqcjb5xye06thve9h32xg, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T08:51:52.524168Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439652640104993254:2303], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T08:51:52.524395Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439652640104993254:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:52.524409Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:52.526300Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:51:52.526776Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7439652640104993242:2301], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T08:51:52.526779Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: ExecuteState, TraceId: 01jd6yqcjb5xye06thve9h32xg, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [11:7439652640104993263:2299] WorkloadServiceCleanup: 0 2024-11-21T08:51:52.527189Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: CleanupState, TraceId: 01jd6yqcjb5xye06thve9h32xg, EndCleanup, isFinal: 0 2024-11-21T08:51:52.527208Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: CleanupState, TraceId: 01jd6yqcjb5xye06thve9h32xg, Sent query response back to proxy, proxyRequestId: 3, proxyId: [11:7439652640104992594:2060] 2024-11-21T08:51:52.528224Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:52.528237Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:52.528240Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:52.528244Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:52.528258Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YjM3M2IwMDQtNmRhZDM2NjgtYmJlZDIzOGMtMWZkMGQ1Njk=, ActorId: [11:7439652640104993173:2299], ActorState: unknown state, Session actor destroyed >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight10BlobSize1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithInitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:09.071395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:09.071420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:09.071426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:09.071431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:09.071445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:09.071449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:09.071459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:09.071534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:09.085061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:09.085096Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:09.087692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:09.087791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:09.087822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:09.090455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:09.090531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:09.090662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:09.090863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:09.091544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:09.091856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:09.091870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:09.091883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:09.091891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:09.091897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:09.091938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:09.093478Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:09.112635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:09.112753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.112827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:09.112895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:09.112907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.113768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:09.113799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:09.113847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.113860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:09.113866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:09.113873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:09.114465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.114487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:09.114495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:09.116239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.116255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.116261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.116269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.116989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:09.117533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:09.117581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:09.117768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:09.117797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:09.117805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.117888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:09.117896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:09.117930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:09.117944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:09.118439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:09.118451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:09.118495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:09.118500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:09.118576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:09.118583Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:09.118595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:09.118599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.118605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:09.118610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:09.118615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:09.118620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:09.118632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:09.118639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:09.118657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 08:51:52.870346Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: false 2024-11-21T08:51:52.870509Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 627065227531 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:52.870515Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T08:51:52.870600Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 239 } } 2024-11-21T08:51:52.870614Z node 146 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 239 } } 2024-11-21T08:51:52.870851Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.870876Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 627065227531 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:52.870881Z node 146 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:52.870889Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 627065227531 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:52.870899Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:52.870903Z node 146 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:52.870908Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:52.870917Z node 146 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T08:51:52.871014Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.871057Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.871062Z node 146 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:52.871068Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:52.871074Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:52.871123Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.871127Z node 146 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:52.871131Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:52.871134Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:51:52.871192Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.871201Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.871204Z node 146 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:52.871208Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:51:52.871213Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:52.871221Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T08:51:52.872850Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:52.872900Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:52.872989Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:51:52.872997Z node 146 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T08:51:52.873011Z node 146 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T08:51:52.873016Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:51:52.873021Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T08:51:52.873037Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [146:399:2374] message: TxId: 1003 2024-11-21T08:51:52.873043Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:51:52.873050Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:52.873055Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:52.873065Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:51:52.873070Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:52.873073Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:52.873078Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:52.873082Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:52.873085Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:52.873101Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:52.873106Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:51:52.873109Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:51:52.873138Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:52.873202Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.873217Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.873235Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:52.873738Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:52.873748Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [146:593:2525] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:52.873851Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:52.873903Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 60us result status StatusSuccess 2024-11-21T08:51:52.874018Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateScan SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStream[PipeResets] [GOOD] >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize2000000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1000BlobSize1000 >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[PipeResets] [GOOD] >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests100Inflight1BlobSize1000 [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests10Inflight10BlobSize1000 >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:46.552079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:46.552103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:46.552108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:46.552113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:46.552126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:46.552130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:46.552140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:46.552240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:46.570251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:46.570271Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:46.574252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:46.574368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:46.574396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:46.585490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:46.585571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:46.585688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.585843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:46.586514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.586764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:46.586773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.586785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:46.586791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:46.586797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:46.586831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:46.587949Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:46.605884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:46.605947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.605994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:46.606035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:46.606043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.606823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.606846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:46.606879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.606889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:46.606893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:46.606898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:46.607253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.607263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:46.607268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:46.607544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.607552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.607556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.607562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.608125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:46.608498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:46.608550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:46.608704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.608727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:46.608733Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.608782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:46.608789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.608811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:46.608821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:46.609320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:46.609335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:46.609363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.609370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:46.609447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.609453Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:46.609463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:46.609467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.609473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:46.609478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.609483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:46.609486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:46.609497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:46.609502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:46.609506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 944 2024-11-21T08:51:53.751471Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.751488Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.751492Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:53.751497Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:53.751503Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:53.751616Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.751626Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.751629Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:53.751632Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:53.751636Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:51:53.751809Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 242 } } 2024-11-21T08:51:53.751817Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:53.751834Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 242 } } 2024-11-21T08:51:53.751847Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 242 } } 2024-11-21T08:51:53.751986Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.751996Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.751999Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:53.752003Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:51:53.752009Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:53.752019Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:51:53.752087Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:53.752093Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:53.752105Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:53.752111Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:53.752117Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:53.752126Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:53.752130Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:53.752134Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:53.752139Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T08:51:53.753853Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.753892Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.753921Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:53.753934Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:53.753953Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:53.754033Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:53.754040Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T08:51:53.754053Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T08:51:53.754057Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:53.754064Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:51:53.754078Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:397:2372] message: TxId: 1003 2024-11-21T08:51:53.754088Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:53.754095Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:53.754100Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:53.754112Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:53.754116Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:53.754119Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:53.754132Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:53.754136Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:53.754139Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:53.754147Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:53.755384Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:53.755398Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:596:2528] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:53.755524Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:53.755586Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 71us result status StatusSuccess 2024-11-21T08:51:53.755687Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:46.406009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:46.406029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:46.406034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:46.406039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:46.406050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:46.406054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:46.406063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:46.406131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:46.417763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:46.417781Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:46.422465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:46.422563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:46.422583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:46.430565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:46.430645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:46.430766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.430973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:46.431762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.432005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:46.432018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.432029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:46.432035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:46.432039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:46.432067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:46.433503Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:46.452161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:46.452252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.452316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:46.452366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:46.452375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.456567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.456603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:46.456650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.456660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:46.456664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:46.456668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:46.457222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.457242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:46.457248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:46.457658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.457672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.457676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.457683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.458385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:46.458824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:46.458871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:46.459028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.459052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:46.459060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.459123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:46.459130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.459159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:46.459172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:46.459632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:46.459646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:46.459676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.459681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:46.459745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.459752Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:46.459765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:46.459770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.459775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:46.459781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.459786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:46.459790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:46.459801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:46.459806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:46.459809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 944 2024-11-21T08:51:54.023151Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.023165Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.023170Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:54.023176Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:51:54.023188Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:51:54.023333Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.023343Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.023346Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:54.023350Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:51:54.023354Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:51:54.023518Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 287 } } 2024-11-21T08:51:54.023526Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:54.023544Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 287 } } 2024-11-21T08:51:54.023558Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 287 } } 2024-11-21T08:51:54.023690Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.023701Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.023705Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:51:54.023709Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:51:54.023713Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:51:54.023724Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:51:54.023793Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:54.023802Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:51:54.023815Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:54.023821Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:51:54.023827Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:51:54.023838Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:54.023841Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:54.023845Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:54.023852Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T08:51:54.025600Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.025638Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.025670Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:54.025687Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:51:54.025708Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:54.025797Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:51:54.025804Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T08:51:54.025819Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T08:51:54.025824Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:54.025830Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:51:54.025845Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:397:2372] message: TxId: 1003 2024-11-21T08:51:54.025852Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:51:54.025860Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:51:54.025865Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:51:54.025879Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:51:54.025888Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:51:54.025892Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:51:54.025907Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:51:54.025911Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:51:54.025914Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:51:54.025923Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:51:54.026534Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:51:54.026546Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:596:2528] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:51:54.026643Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:54.026697Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 61us result status StatusSuccess 2024-11-21T08:51:54.026804Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Viewer::Cluster10000Tablets >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight10BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests100Inflight10BlobSize1000 >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> KqpErrors::ResolveTableError [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] Test command err: 2024-11-21T08:51:49.783043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652627119990446:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:49.783063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f4f/r3tmp/tmpEz5oeo/pdisk_1.dat 2024-11-21T08:51:49.847060Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12540, node 1 2024-11-21T08:51:49.868075Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:49.868087Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:49.868088Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:49.868122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:49.887634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:49.887675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:30561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:49.894843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:49.918058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.919440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.919464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:49.932660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:51:49.932749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:51:49.932755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:51:49.940522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:49.940649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:51:49.940664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:51:49.941273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.942316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179109990, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:49.942330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:51:49.942413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:51:49.942923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.942977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.942987Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:51:49.942999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:51:49.943007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:51:49.943018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:51:49.943989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:51:49.944003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:51:49.944007Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:51:49.944024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:51:50.157925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652631414958667:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:50.157969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:50.167523Z node 1 :TX_PROXY DEBUG: actor# [1:7439652627119990665:2137] Handle TEvProposeTransaction 2024-11-21T08:51:50.167539Z node 1 :TX_PROXY DEBUG: actor# [1:7439652627119990665:2137] TxId# 281474976715658 ProcessProposeTransaction 2024-11-21T08:51:50.167553Z node 1 :TX_PROXY DEBUG: actor# [1:7439652627119990665:2137] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7439652631414958688:2602] 2024-11-21T08:51:50.185173Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2024-11-21T08:51:50.185324Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:51:50.185337Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:51:50.185365Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:51:50.185415Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:51:50.185425Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2024-11-21T08:51:50.185469Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] txid# 281474976715658 HANDLE EvClientConnected 2024-11-21T08:51:50.185573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:50.185713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T08:51:50.185927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:50.185933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:50.187861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2024-11-21T08:51:50.187935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:50.188009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:50.188028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:51:50.188118Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2024-11-21T08:51:50.188129Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652631414958688:2602] txid# 281474976715658 SEND to# [1:7439652631414958687:2300] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2024-11-21T08:51:50.188559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:50.188578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:50.188583Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:51:50.188630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:50.188632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:50.188634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T08:51:50.189195Z node 1 :FLAT_TX_SCHEMESHAR ... : 1732179113749, at schemeshard: 72057594046644480 2024-11-21T08:51:53.705957Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 129 2024-11-21T08:51:53.706423Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:53.706513Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:53.706530Z node 7 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 281474976710761:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:51:53.706783Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710761 2024-11-21T08:51:53.706790Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710761 2024-11-21T08:51:53.706796Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710761, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 REQUEST: HEAD /test_bucket/table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:10039 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 844F9658-C479-4268-88C4-1D0E06B2425F amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20241121/ru-central1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=110b3707e62b0f3e94741b951cc53ac6842eaa5829b35ec91961977da45beb1b content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20241121T085153Z S3_MOCK::HttpServeRead: /test_bucket/table/data_00.csv / 28 REQUEST: GET /test_bucket/table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:10039 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1E34F2CA-A2D8-4E30-B913-696DE2F91AF4 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20241121/ru-central1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=b9865ecc21305bc81b4122043f08b1c187565ab20b3603549f0e011d2ed7f226 content-type: application/xml range: bytes=0-27 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20241121T085153Z S3_MOCK::HttpServeRead: /test_bucket/table/data_00.csv / 28 2024-11-21T08:51:53.726298Z node 7 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 281474976710761:0 HandleReply TEvSchemaChanged at tablet# 72057594046644480 message# Source { RawX1: 7439652644166758416 RawX2: 4503629692143916 } Origin: 72075186224037891 State: 2 TxId: 281474976710761 Step: 0 Generation: 1 OpResult { Success: true Explain: "" BytesProcessed: 56 RowsProcessed: 7 } 2024-11-21T08:51:53.726328Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710761:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.726337Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 129 -> 240 2024-11-21T08:51:53.726387Z node 7 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TRestore, opId# 281474976710761:0, reason# domain is not a serverless db, domain# /Root, domainPathId# [OwnerId: 72057594046644480, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046644480, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:51:53.728036Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710761:0 ProgressState 2024-11-21T08:51:53.728065Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T08:51:53.728079Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T08:51:53.728730Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T08:51:53.864617Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7439652644166758570:2352] [0] Resolve database: name# /Root 2024-11-21T08:51:53.864832Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7439652644166758570:2352] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:51:53.864840Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7439652644166758570:2352] [0] Send request: schemeShardId# 72057594046644480 2024-11-21T08:51:53.865260Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7439652644166758570:2352] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715666 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:10039" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1732179113 } EndTime { seconds: 1732179113 } } 2024-11-21T08:51:53.876763Z node 7 :TX_PROXY DEBUG: actor# [7:7439652639871789426:2111] Handle TEvNavigate describe path /Root/table 2024-11-21T08:51:53.876783Z node 7 :TX_PROXY DEBUG: Actor# [7:7439652644166758578:3353] HANDLE EvNavigateScheme /Root/table 2024-11-21T08:51:53.876930Z node 7 :TX_PROXY DEBUG: Actor# [7:7439652644166758578:3353] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:51:53.876990Z node 7 :TX_PROXY DEBUG: Actor# [7:7439652644166758578:3353] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false ReturnSetVal: true } 2024-11-21T08:51:53.877440Z node 7 :TX_PROXY DEBUG: Actor# [7:7439652644166758578:3353] Handle TEvDescribeSchemeResult Forward to# [7:7439652644166758574:2353] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 11 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1732179113735 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "Key" Type: "Int32" TypeId: 1 Id: 1 DefaultFromSequence: "_serial_column_Key" NotNull: true IsBuildInProgress: false } Columns { Name: "Value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 1 IsBackup: false Sequences { Name: "_serial_column_Key" PathId { OwnerId: 72057594046644480 LocalId: 12 } Version: 1 SequenceShard: 72075186224037888 MinValue: 1 MaxValue: 2147483647 StartValue: 1 Cache: 1 Increment: 1 Cycle: false SetVal { NextValue: 8 NextUsed: false } DataType: "Int64" } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046644480 2024-11-21T08:51:53.900894Z node 7 :TX_PROXY DEBUG: actor# [7:7439652639871789426:2111] Handle TEvExecuteKqpTransaction 2024-11-21T08:51:53.900916Z node 7 :TX_PROXY DEBUG: actor# [7:7439652639871789426:2111] TxId# 281474976715667 ProcessProposeKqpTransaction 2024-11-21T08:51:53.902199Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqdwvcy157kp2rrbhsaef, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk3MjYxNzUtYWMyMWE3MGUtMTIxNGM0YWItMTQ0MTIzNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TClockProCache::Touch [GOOD] >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2024-11-21T08:51:53.358672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:53.358814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f84/r3tmp/tmpbFfXGB/pdisk_1.dat 2024-11-21T08:51:53.492491Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:53.590334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.672948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:53.672986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:53.691655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:53.691700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:53.704925Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:51:53.705093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:53.705206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:54.110836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:54.843654Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T08:51:54.843679Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T08:51:54.843689Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T08:51:54.843694Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T08:51:54.843709Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T08:51:54.845058Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T08:51:54.846584Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 300.000000s, cancelAfter: (empty maybe) 2024-11-21T08:51:54.846596Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T08:51:54.846602Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T08:51:54.846606Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T08:51:54.846612Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T08:51:54.846714Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T08:51:54.846808Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 0. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2024-11-21T08:51:54.846818Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2024-11-21T08:51:54.846828Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2024-11-21T08:51:54.846844Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T08:51:54.846901Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2024-11-21T08:51:54.846942Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T08:51:54.846958Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T08:51:54.847005Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] will be executed on 1 shards. 2024-11-21T08:51:54.847014Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T08:51:54.847168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2024-11-21T08:51:54.847174Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T08:51:54.848263Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1476 RawX2: 4294970201 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\010\000\000\000\000\006\002\002\004\004\006\006" Rows: 3 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\001\000\000\000" KeyPoints: "\001\000\004\000\000\000\002\000\000\000" KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=" } RequestContext { key: "TraceId" value: "01jd6yqespf4mkw6kazvn739n8" } EnableSpilling: false 2024-11-21T08:51:54.848334Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2024-11-21T08:51:54.848355Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T08:51:54.848370Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T08:51:54.848374Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2024-11-21T08:51:54.848379Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T08:51:54.848386Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T08:51:54.848390Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T08:51:54.882450Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T08:51:54.882515Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2024-11-21T08:51:54.882522Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2024-11-21T08:51:54.882532Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd6yqespf4mkw6kazvn739n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiZGViZjAtMjFkY2MwNTgtNDc1YmMzMGYtNmU0NTYxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T08:51:54.886329Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1491:2924], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2024-11-21T08:51:54.886622Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Q0NDUwMGYtYjYzMWI4ZWYtMjBhYWQ5NWYtODJhM2U2Mzk=, ActorId: [1:1489:2922], ActorState: ExecuteState, TraceId: 01jd6yqew328cz3305a4am52t6, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2024-11-21T08:51:49.012674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652625326773402:2250];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:49.012760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f56/r3tmp/tmpjujvHc/pdisk_1.dat 2024-11-21T08:51:49.082962Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29410, node 1 2024-11-21T08:51:49.103722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:49.103733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:49.103735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:49.103770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:49.112757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:49.112805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:49.114524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:49.146125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.147312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.147333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.148030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:51:49.148087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:51:49.148092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:51:49.148499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:51:49.148506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:51:49.148892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:49.149961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179109199, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:49.149973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:51:49.150048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:51:49.150423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.150476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.150485Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:51:49.150496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:51:49.150504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:51:49.150516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:51:49.151255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:51:49.151269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:51:49.151273Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:51:49.151320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T08:51:49.152477Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:49.297656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652625326774134:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:49.297695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:49.317277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.317458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T08:51:49.317630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.317636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.318462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2024-11-21T08:51:49.318533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.318592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.318622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:51:49.318685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:51:49.318810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:49.318828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:49.318839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:51:49.318889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:49.318899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:49.318901Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T08:51:49.320939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:51:49.320981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T08:51:49.321456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T08:51:49.374422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:51:49.374439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:51:49.374469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T08:51:49.375061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T08:51:49.375964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179109423, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:49.375979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732179109423 2024-11-21T08:51:49.376009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T08:51:49.376470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.376550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.376575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:51:49.376836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:49.376849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:49.376853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [Own ... FO: TAlterTable TPropose operationId#281474976710759:2 HandleReply TEvOperationPlan, operationId: 281474976710759:2, stepId: 1732179114645, at schemeshard: 72057594046644480 2024-11-21T08:51:54.598829Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 128 -> 129 2024-11-21T08:51:54.599455Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:54.599623Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:54.599643Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:1 ProgressState 2024-11-21T08:51:54.599660Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:1 progress is 1/3 2024-11-21T08:51:54.599695Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 ProgressState at tablet: 72057594046644480 2024-11-21T08:51:54.599720Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:51:54.600047Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710759 2024-11-21T08:51:54.600058Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2024-11-21T08:51:54.600063Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 6 2024-11-21T08:51:54.600116Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710759 2024-11-21T08:51:54.600120Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2024-11-21T08:51:54.600122Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2024-11-21T08:51:54.600140Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710759 2024-11-21T08:51:54.600143Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2024-11-21T08:51:54.600145Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 2024-11-21T08:51:54.601206Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037892 Status: COMPLETE TxId: 281474976710759 Step: 1732179114645 OrderId: 281474976710759 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037892 CpuTimeUsec: 254 } } 2024-11-21T08:51:54.601295Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037893 Status: COMPLETE TxId: 281474976710759 Step: 1732179114645 OrderId: 281474976710759 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037893 CpuTimeUsec: 321 } } 2024-11-21T08:51:54.601323Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T08:51:54.601332Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:51:54.601338Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T08:51:54.601376Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T08:51:54.601398Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:2, at schemeshard: 72057594046644480 2024-11-21T08:51:54.601400Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 129 -> 240 2024-11-21T08:51:54.601837Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:0 ProgressState 2024-11-21T08:51:54.601860Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 2/3 2024-11-21T08:51:54.601900Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:2 ProgressState 2024-11-21T08:51:54.601912Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:2 progress is 3/3 2024-11-21T08:51:54.601922Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T08:51:54.601949Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:1 2024-11-21T08:51:54.601960Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:2 2024-11-21T08:51:54.602214Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T08:51:54.603052Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715667, at schemeshard: 72057594046644480 2024-11-21T08:51:54.603438Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710760:0, path# /Root/table 2024-11-21T08:51:54.603487Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710760:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:54.603879Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2024-11-21T08:51:54.603904Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2024-11-21T08:51:54.603957Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 ProgressState 2024-11-21T08:51:54.604379Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046644480 2024-11-21T08:51:54.612300Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179114659, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:54.612326Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 1732179114659 2024-11-21T08:51:54.612332Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T08:51:54.613565Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710760:0 ProgressState 2024-11-21T08:51:54.613588Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T08:51:54.613605Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T08:51:54.614049Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 Restore ACL "/home/runner/.ya/build/build_root/jptk/001f56/r3tmp/tmpCSIMRq/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/jptk/001f56/r3tmp/tmpCSIMRq/table/permissions.pb"2024-11-21T08:51:54.677735Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/table, operationId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T08:51:54.677824Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715669:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:54.677838Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T08:51:54.677858Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715669:0 progress is 1/1 2024-11-21T08:51:54.677907Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715669:0 2024-11-21T08:51:54.677934Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715669, publications: 4, subscribers: 0 2024-11-21T08:51:54.678677Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715669, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/table, set owner:root@builtin 2024-11-21T08:51:54.678721Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:54.678854Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:54.679571Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715669 2024-11-21T08:51:54.679580Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715669 2024-11-21T08:51:54.679585Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2024-11-21T08:51:54.679639Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715669 2024-11-21T08:51:54.679643Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715669 2024-11-21T08:51:54.679644Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2024-11-21T08:51:54.679661Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715669 2024-11-21T08:51:54.679664Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715669 2024-11-21T08:51:54.679665Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 7 2024-11-21T08:51:54.679682Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046644480, cookie: 281474976715669 2024-11-21T08:51:54.679685Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715669 2024-11-21T08:51:54.679687Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2024-11-21T08:51:54.679693Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715669, subscribers: 0 Restore completed successfully >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests10Inflight10BlobSize1000 [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests100Inflight10BlobSize1000 >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> ExternalBlobsMultipleChannels::Simple [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2024-11-21T08:51:49.521720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652628044749194:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:49.521849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f54/r3tmp/tmpToX68Q/pdisk_1.dat 2024-11-21T08:51:49.590641Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18262, node 1 2024-11-21T08:51:49.618505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:49.618520Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:49.618522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:49.618562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:51:49.621893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:49.621919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:49.623624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:49.653485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.654292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.654309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.654774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:51:49.654825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:51:49.654830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:51:49.655149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:51:49.655156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:51:49.655490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.656281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179109703, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:49.656293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:51:49.656367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:51:49.656946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.657000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.657010Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:51:49.657021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:51:49.657029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:51:49.657041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:51:49.657676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:51:49.657696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:51:49.657701Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:51:49.657715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T08:51:49.659630Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:49.833979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652628044749968:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:49.834016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:49.861933Z node 1 :TX_PROXY DEBUG: actor# [1:7439652628044749275:2137] Handle TEvProposeTransaction 2024-11-21T08:51:49.861950Z node 1 :TX_PROXY DEBUG: actor# [1:7439652628044749275:2137] TxId# 281474976715658 ProcessProposeTransaction 2024-11-21T08:51:49.861965Z node 1 :TX_PROXY DEBUG: actor# [1:7439652628044749275:2137] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7439652628044749989:2589] 2024-11-21T08:51:49.871481Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } Temporary: false } } } UserToken: "" DatabaseName: "" 2024-11-21T08:51:49.871680Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:51:49.871697Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:51:49.871733Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:51:49.871762Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:51:49.871772Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2024-11-21T08:51:49.871812Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] txid# 281474976715658 HANDLE EvClientConnected 2024-11-21T08:51:49.871902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.872036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T08:51:49.872185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:49.872190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:51:49.876708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2024-11-21T08:51:49.876804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:51:49.876889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:51:49.876914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:51:49.877028Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2024-11-21T08:51:49.877038Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652628044749989:2589] txid# 281474976715658 SEND to# [1:7439652628044749988:2300] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2024-11-21T08:51:49.877460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:49.877470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:49.877475Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:51:49.877525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:51:49.877528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:51:49.877530Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, Loca ... 281474976710765:2 ProgressState 2024-11-21T08:51:54.810621Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:2 progress is 3/3 2024-11-21T08:51:54.810629Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2024-11-21T08:51:54.810660Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:1 2024-11-21T08:51:54.810662Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:2 2024-11-21T08:51:54.811005Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2024-11-21T08:51:54.811637Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976710762, at schemeshard: 72057594046644480 2024-11-21T08:51:54.812014Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710766:0, path# /Root/table 2024-11-21T08:51:54.812076Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710766:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:51:54.812559Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710766, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2024-11-21T08:51:54.812593Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710766, status# StatusAccepted 2024-11-21T08:51:54.812648Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710766:0 ProgressState 2024-11-21T08:51:54.812994Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710766, at schemeshard: 72057594046644480 2024-11-21T08:51:54.820917Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179114869, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:51:54.820944Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710766:0 HandleReply TEvOperationPlan: step# 1732179114869 2024-11-21T08:51:54.820950Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710766:0 128 -> 240 2024-11-21T08:51:54.821661Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710766:0 ProgressState 2024-11-21T08:51:54.821693Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710766:0 progress is 1/1 2024-11-21T08:51:54.821707Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710766:0 2024-11-21T08:51:54.822175Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710766 2024-11-21T08:51:54.822921Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710762 2024-11-21T08:51:54.887230Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7439652649206637954:2344] [0] Resolve database: name# /Root 2024-11-21T08:51:54.887583Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7439652649206637954:2344] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:51:54.887600Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7439652649206637954:2344] [0] Send request: schemeShardId# 72057594046644480 2024-11-21T08:51:54.887814Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7439652649206637954:2344] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:12683" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1732179114 } EndTime { seconds: 1732179114 } } 2024-11-21T08:51:54.889279Z node 10 :TX_PROXY DEBUG: actor# [10:7439652644911668712:2111] Handle TEvNavigate describe path /Root/table 2024-11-21T08:51:54.889297Z node 10 :TX_PROXY DEBUG: Actor# [10:7439652649206637960:3613] HANDLE EvNavigateScheme /Root/table 2024-11-21T08:51:54.889346Z node 10 :TX_PROXY DEBUG: Actor# [10:7439652649206637960:3613] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:51:54.889381Z node 10 :TX_PROXY DEBUG: Actor# [10:7439652649206637960:3613] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2024-11-21T08:51:54.889858Z node 10 :TX_PROXY DEBUG: Actor# [10:7439652649206637960:3613] Handle TEvDescribeSchemeResult Forward to# [10:7439652649206637958:2345] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1732179114806 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046644480 >> GroupReconfiguration::ReassignsDoNotCauseErrorMessagesMirror3of4 [GOOD] >> GroupReconfiguration::ReassignsDoNotCauseErrorMessagesBlock4Plus2 >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile |87.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[ansi_idents-string_escaping-default.txt-Analyze] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2024-11-21T08:51:53.621685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:53.622393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:53.622439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004516/r3tmp/tmpdN3Ab0/pdisk_1.dat 2024-11-21T08:51:53.743496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.765762Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:53.811068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:53.811113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:53.824997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:53.939307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:54.192529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:705:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:54.192565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:714:2593], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:54.192576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:54.193550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:54.408287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:719:2596], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:54.490277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yqe6g3r91g2x6syc6dy2v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRhYWQ3NWItY2QzZmIxYy02ZTc3ZTAyMi0xMDkzMTQ3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.503689Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yqefx0mwhrhtymz14qqm7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRjMDlmZjktYmM3Mzg1YWEtZjY3YzkzODMtZGUzZjczYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.514478Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yqeg9et6gdwaj4sbbj9ya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFjN2RlNDgtMWY3MWQ1YzYtMjgzNDFlODEtMzZlOWE3OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.526366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yqegmacnp1ma7r9whsmkb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAyNTUxMy1iYjQ2YWNkMi01YmRkNDI4ZS1lZGJjYTUyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.537910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yqegz4t46b841vethzcnj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ5NjQ3NWYtM2JkOGM3NmQtZjUwZmIwN2UtNjE5ZjE1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.548245Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6yqehbbv7mcfjzawp0pfka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQxOTViN2YtYWU1Yjk0ZTAtNTk4ODBiOTItNDNmYTM0ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.559736Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yqehn76kz54sa82rb44ym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDI0OThjYWYtYzA3NmRjMzEtYTE0YmEyZDgtZjJjYzg1MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.569978Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqej11de9rh9zwzcxhd00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA2Y2Q4MzctYTM0OWIxZWQtYzU3N2MxNDgtNzViODUzN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.581678Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6yqejb95qcezkhg4gnx600, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE4NzIxMTMtZGEzZThkYjgtMTdhNmFhMGEtNjhhNmNiNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.593198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6yqejqat87y70xc325096e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUwOWEwODQtNzBlZjdiNDQtMjAwNDE1YmEtNmIxNjM4ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.605282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd6yqek2f752bvmchgxp1pkp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTE2OTA3NjgtZmY4ODU0Y2ItODY1OWI1YzctOWRkN2JmZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.617421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd6yqeke31a39fdphx7zk88z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjYyYTExMjMtODNmMWEyOTItY2QzNjgyYzQtNTI3MWYwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.628824Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd6yqekt0f4e5eqapk6k967v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY2NmMxNjEtMTk2NWU2NTEtNThmZDU2N2QtMjQ2ZTc3ZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.639652Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd6yqem66c5b7cwgdtjxn4n3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjAyYzUzMGMtNzQyOGEzNWMtYjBjOWFmYzktZGVhOWZlODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.650873Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd6yqemh4q7skcqk9qwvfjvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBjMTRiYTUtYmQ4YzVhYjktNDgzMDhlYmItNmY4NTRkMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.661951Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd6yqemw2fjtkrtghyen4q3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjhlYTNhZjAtOTM1NWExYWEtMjdiZDg0YmQtMzUzMmZhZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.672635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd6yqen79e185ygbjd2qemk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGEyYzkxNzQtYWJiNjhmM2YtNmU3ODkyNWMtMTBkYTVmZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.686696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd6yqenj0tgasap9qbg6yggt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwNGIxYzktODY5MmQ4NGItODZjZjkxMGMtYjVkOTcyY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.698509Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd6yqenz2zm42n6mb6bdk0qt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNkMjIxOGEtNTI1MjVlNWEtOTEyZDU1MGUtYTk1MjU5OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.709171Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd6yqepb2se24z41abfatcpx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc5NTMxMTUtMTc3OTVhNTEtNTcwZTg3NTItZWUwNjczOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.718520Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd6yqepp10xsv9n5zw1djf52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg4MThlN2MtOTA3ZTVlZWMtMzI4OThhN2MtYTZiNThjZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.727008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd6yqepz0sh4tck8y6jr659p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODMwMDYxMzktNTgwM2I2OTUtNjZjZjg3Y2ItODViM2M2ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.735177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd6yqeq8ehj72qaxavndwxr9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U3MTJmMC05ZGJhMjMxNi1jYzhjZjYxNy1iOWZjNTA0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.743530Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd6yqeqg913tn1km5n2d67q2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRkMWIzOTEtMTIzOTEyYTktYzkzZGQ1NTctYTM3MTQ1Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.754251Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd6yqeqr9v4fpbrfm72bmwys, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmExMmZjY2UtNTE3YmM3OWUtZmZhNjQzMmQtNzZiMTJiODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.765080Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd6yqer30jsehptqjnzcwcn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY5MjA3NGYtYjYzOWQzZjAtNTc4ZjU4YjUtYmM5OTA3NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:54.773299Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jd6yqerecagq6aztz5fz4qaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I0YzI1YTMtNDljZjE0MjgtZmJlN2M0ODItM2VhNmI4Ng==, CurrentE ... 87Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jd6yqf4m89zgp44k84n9zv3s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE5NzVkYTItZmQxMWJkZTMtNTNiYmMzYWItNjg1ZjMzZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.176002Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jd6yqf4zb4c7nyv81cmn3vqp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUzZDMwMC1hYzJjNzEwMS0xNTYwYjMxZi01MjgzZGI1MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.188322Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jd6yqf5945bj9mftn6ww6gj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E1YWMxMDktZWY2YzIwMGMtZTlhNTAwOTAtOGU5OGUyODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.200533Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jd6yqf5n16qz1cv56w00y3gf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJmMmJmYjctOWY3ZjNhY2UtZTJhNjYzMjUtYWEyMWJmZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.211822Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jd6yqf628dqabcbcw3dr6k1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE0NmU2ZjAtYjFiNmIxNy0zNWU3ZTM4YS1jODkzNWFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.220023Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jd6yqf6d64abvjms2e4ztj5j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjhhOGE4NWItZjExODRiMTQtNjJlMGJhZmEtMTE0MDEzMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.228008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jd6yqf6n2sqrkrj4v1m645gy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJlYWU0MjQtMjhmYTg5MjQtZTdmYTVhN2ItYzhjZThhYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.236244Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jd6yqf6x8k1c9cjzpjr23d7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZmMmMzYzktM2EyYTBkZC04ZDkzY2Y4Ni04NDVhMWQ0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.245456Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jd6yqf75b9geat5vm80m4kyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE3NGMyOTMtZmIwZDcyZTgtMzQyMTQzNDgtZGEyZDllMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.255711Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jd6yqf7e355bz1bh4n0qqhpc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNiNDJlNzMtMmE1ZDY2YTktNmFkNzYxMDEtNTYwYjU4ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.265925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jd6yqf7se9gzm2hnw87z55fg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjczZTQyZjgtN2NjYzg4ZDAtNDMxYjRjZGMtNDhjNTY3ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.276124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jd6yqf831cfetn4jb7k44jpv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiZjg0MmEtODM0ZjkyNWItMTYzNTNhYmYtYWU0MzY5ZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.285807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jd6yqf8d5g60v5g6tq43q3wq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk5OTBmMGQtNmE2MTVhMDMtZDhhMjJmYWEtNzVmZmU3NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.296332Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jd6yqf8qfwcrjxq0s6edzrec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZlNDVkNWMtNjFhZjg1YWItNzczN2E0My1lMWUyY2YzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.308736Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jd6yqf919hgjd69rrb9vv4tc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBjNGUzMWEtMzU3YmQwM2ItNzg3YmNhNmUtNTkwODk1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.319492Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jd6yqf9e62mcd2z73dymw3sh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzEwYmEzOTUtOTNiM2JkZjktN2JjZmI1M2MtMjgxYTgyZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.331092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jd6yqf9s800nb0epmwajqm54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjc0YTc5MTQtYjkxYTVhOTAtM2FiZGQ5OS0yYTM2YWQ0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.343343Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jd6yqfa46xbmv8rhpv0afks0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI3YjMzMzgtMTA2NzQ1NS05YjZkZTQ2MS0xODg2MjVkYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.353183Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jd6yqfagah5apvmmskr760z7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmUwNWIxZDUtYjdjYzNiMzgtNzY5YjBhOGYtMmQ1OGFlMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.363669Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jd6yqfat3tg40prp11nya6na, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY2ZTg4MWUtOWE3YzdhYmMtMTZmM2MzYmUtODczZTY2NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.372622Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jd6yqfb40m8aag6r5tntspjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhY2U2Zi03ZTUxMjk2NS03NTM1ZjcyZC01NzU1Njg2Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.384138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jd6yqfbe1etb951v7xmjnxcq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGYyNzU5NzItNDRkMjA4MWEtZjQ3MTliYjItYjZjN2UyNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.396033Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jd6yqfbsdhgba2wg4ex43ptg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmYyYjE0ZWEtMWEyNDYxMDQtNzg5YTBjNWQtZTJlNDU4MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.408479Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jd6yqfc5b007fg5cvrgt6e6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M4ZDg1NjctNmFlZGZkYzctYzQ5ZmEyNDktYmQ0NTc5Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.420954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jd6yqfcjawamk77kkcpya8pr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQ5ZjNiZTMtZjBjYWFmZDYtZWMwMzJmM2UtNTY1MzY0NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.433214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jd6yqfcyatgm8xcp6c4spvvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU3YmI2MDEtM2U5NjY4ZTUtZmE1NDdkMDItZjRmZWY5NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.445431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jd6yqfda90ph4ma51cvk4tm5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVmYzRlY2YtODk0ZGUwZWQtMzlmMTQ5NjAtYTI2ZTAyNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.457707Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jd6yqfdp5e3zsste7rtr8135, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBlNjMzMjktYzk4NjM1YzQtOTFmYTY2NGUtYTYwZDFhNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.469856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jd6yqfe363v8ge4c7hpk9tzd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDUwOWU1Y2EtYTlhMTFiMjUtOGJiMjU4MmQtNGFlNWE3OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.481731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jd6yqfefaa9fbtzd3m79tdcw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQxOWI1YjEtNDgyZmEyNDQtYzY0NTUzOWUtY2YxZGI1MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.494320Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jd6yqfev1ebnm7p675vkcb4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM0MzJiYmQtYWFkMjRhODMtYTcyNmEwOTktNGMyZTcxNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.507107Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jd6yqff7cr18mt0ptwptkcaf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTYyZDQ1NWMtNzdiNTFhNTItYTU5OWNiMmEtYTY1NDAzN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.519485Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jd6yqffm6exgedp97gtxaba7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJmMzI4M2QtYTZkNjliZjgtNGY5NWExZDctMjZhZWViY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.532383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jd6yqfg196aevx2n14f9r65t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzlkM2Q3ODMtOGM0ZTk5ZmItNGY4M2VjOGMtNjA1ZjAzOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.545297Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jd6yqfgecn9n6eecfr3vzf47, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc1NTYwZDktNjRhNWIxYy03YWJjZjI5Mi0zZWZlMTBkOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:51:55.670742Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jd6yqfk982x2rq1f5mmhs900, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWFlOTM5NjUtNDlmOWU3ODEtZDliNjVlYmItZTFmNWMxNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T08:51:52.466377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:52.466748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:52.466765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042ea/r3tmp/tmp7heP9e/pdisk_1.dat 2024-11-21T08:51:52.569532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:52.585263Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:52.627574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:52.627606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:52.640642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:52.769099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:52.787435Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:52.787627Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:52.787696Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:51:52.787737Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:52.796226Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:52.796396Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:52.796420Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:52.796558Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:52.796575Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:52.796581Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:52.796622Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:52.800442Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:52.800498Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:52.800521Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:51:52.800526Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:52.800530Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:52.800535Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:52.800650Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:52.800656Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:52.800769Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:52.800785Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:52.800797Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:52.800802Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:52.800809Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:51:52.800816Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:52.800822Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:52.800829Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:52.800834Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:52.800838Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:52.800844Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:52.800850Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:52.800869Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:51:52.800874Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:52.800893Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:52.800934Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:52.800943Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:52.800957Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:52.800964Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:52.800969Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:52.800974Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:52.800979Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:52.801019Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:52.801023Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:52.801027Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:52.801031Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:52.801041Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:52.801045Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:52.801049Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:52.801053Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:52.801058Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:52.801295Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:51:52.801303Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:52.811574Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:52.811603Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:52.811610Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:52.811622Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:51:52.811635Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:53.000813Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.000843Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.000851Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:51:53.000869Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:51:53.000872Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:51:53.000927Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:53.000939Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:51:53.000945Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:51:53.000949Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:51:53.001608Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:53.001620Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:53.001739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.001744Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.001751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:53.001757Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:53.001761Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:53.001768Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2841], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 72 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 13 FinishTimeMs: 1732179115582 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 5 BuildCpuTimeUs: 8 WaitInputTimeUs: 1016 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732179115581 } MaxMemoryUsage: 1048576 } 2024-11-21T08:51:55.582995Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1046:2841] 2024-11-21T08:51:55.583004Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T08:51:55.583010Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T08:51:55.583055Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2842], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 149 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 106 FinishTimeMs: 1732179115582 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 94 BuildCpuTimeUs: 12 WaitInputTimeUs: 966 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732179115581 } MaxMemoryUsage: 1048576 } 2024-11-21T08:51:55.583061Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2842] 2024-11-21T08:51:55.583068Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T08:51:55.583074Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T08:51:55.583122Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2843], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 179 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 146 FinishTimeMs: 1732179115582 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 138 BuildCpuTimeUs: 8 WaitInputTimeUs: 848 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732179115581 } MaxMemoryUsage: 1048576 } 2024-11-21T08:51:55.583127Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2843] 2024-11-21T08:51:55.583132Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T08:51:55.583139Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T08:51:55.583194Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2844], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 145 DurationUs: 2000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 105 FinishTimeMs: 1732179115583 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 90 BuildCpuTimeUs: 15 WaitInputTimeUs: 1012 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732179115581 } MaxMemoryUsage: 1048576 } 2024-11-21T08:51:55.583199Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2844] 2024-11-21T08:51:55.583206Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T08:51:55.583211Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T08:51:55.583246Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2845], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 68 DurationUs: 2000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 34 FinishTimeMs: 1732179115583 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 17 BuildCpuTimeUs: 17 WaitInputTimeUs: 1361 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732179115581 } MaxMemoryUsage: 1048576 } 2024-11-21T08:51:55.583251Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2845] 2024-11-21T08:51:55.583255Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2846], 2024-11-21T08:51:55.583260Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2846], 2024-11-21T08:51:55.583276Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2846], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 55 DurationUs: 2000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 26 FinishTimeMs: 1732179115583 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 15 BuildCpuTimeUs: 11 WaitInputTimeUs: 1448 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732179115581 } MaxMemoryUsage: 1048576 } 2024-11-21T08:51:55.583281Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2846] 2024-11-21T08:51:55.583320Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:51:55.583331Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd6yqfg4cr8535e0jcac1vry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTkzYTc1MDctZmIxYjY3YTYtMjYyOWU4M2YtOWIzZTYyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000903s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> TBackupTests::BackupUuidColumn[Zstd] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] |87.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> VDiskBalancing::TestStopOneNode_Block42 |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1000BlobSize1000 >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:55.203622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:55.203650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:55.203654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:55.203660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:55.203665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:55.203669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:55.203693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:55.203759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:55.214901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:55.214927Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:55.217684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:55.218497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:55.218535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:55.219846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:55.220033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:55.220129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:55.220247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:55.221265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:55.221541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:55.221553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:55.221591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:55.221599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:55.221605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:55.221618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.222904Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:55.240057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:55.240143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.240220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:55.240260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:55.240269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.241056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:55.241081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:55.241133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.241143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:55.241149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:55.241155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:55.241634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.241645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:55.241650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:55.242036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.242050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.242055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:55.242062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:55.242562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:55.242925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:55.242973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:55.243224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:55.243261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:55.243270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:55.243339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:55.243350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:55.243389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:55.243406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:55.243932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:55.243940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:55.243980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:55.243985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:55.244069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.244076Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:55.244089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:55.244093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:55.244099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:55.244104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:55.244112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:55.244116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:55.244127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:55.244133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:55.244136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:55.244460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:55.244478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:55.244483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:55.244488Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:55.244492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:55.244506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:55.335921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:51:55.335950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T08:51:55.336083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:55.336103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:55.336113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T08:51:55.336131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T08:51:55.336161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:55.489720Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:405:2378], attempt# 0 2024-11-21T08:51:55.506287Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:405:2378], sender# [1:404:2377] 2024-11-21T08:51:55.507418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:55.507435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:55.507520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:55.507527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:51:55.507656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.507665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:51:55.507808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:55.507820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:55.507825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:55.507832Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:51:55.507838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:55.507856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:15120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E43F1ED8-2061-42D0-878A-469B8D6FD096 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T08:51:55.517844Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2024-11-21T08:51:55.519682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:15120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 59334BB2-5D52-4F4C-A5E0-9E085B9131A9 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T08:51:55.524719Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B26D0B54-1D76-45B5-8D80-D122F2B79308 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T08:51:55.536036Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T08:51:55.536091Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:404:2377] 2024-11-21T08:51:55.536121Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:405:2378], sender# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:15120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D0879F05-0ADE-4BF6-9080-C3924099E30D amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2024-11-21T08:51:55.541497Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2024-11-21T08:51:55.541523Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:405:2378], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:51:55.541576Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:51:55.547025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:55.547052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:55.547080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:55.547097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:55.547112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:55.547116Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.547122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:55.547130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:51:55.547184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:55.550111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.550177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.550188Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:55.550206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:55.550210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:55.550217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:51:55.550241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T08:51:55.550249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:55.550254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:55.550258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:55.550294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:55.551031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:55.551044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:390:2364] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:55.755687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:55.755720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:55.755725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:55.755731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:55.755738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:55.755742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:55.755752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:55.755835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:55.767967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:55.767999Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:55.771638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:55.772597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:55.772646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:55.774125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:55.774334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:55.774460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:55.774570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:55.775513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:55.775831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:55.775841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:55.775889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:55.775897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:55.775903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:55.775920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.777349Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:55.796191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:55.796340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.796424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:55.796476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:55.796485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.797504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:55.797537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:55.797612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.797625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:55.797631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:55.797638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:55.798123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.798136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:55.798141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:55.798474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.798484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.798491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:55.798499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:55.799122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:55.799507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:55.799569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:55.799784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:55.799810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:55.799818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:55.799879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:55.799886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:55.799923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:55.799935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:55.800341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:55.800350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:55.800402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:55.800408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:55.800502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:55.800509Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:55.800523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:55.800527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:55.800534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:55.800539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:55.800544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:55.800549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:55.800562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:55.800569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:55.800573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:55.800878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:55.800892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:55.800897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:55.800903Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:55.800907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:55.800920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... UP INFO: [Export] [s3] Finish: self# [1:475:2435], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:51:56.184335Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:474:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:14121 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2FE612F8-3D07-48C3-B9D6-F2C2A1E4B888 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T08:51:56.187443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:56.187459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:56.187561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:56.187568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:51:56.187646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.187657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:56.187885Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2024-11-21T08:51:56.188335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:56.188372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:56.188377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:56.188384Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:51:56.188392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:51:56.188414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:14121 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 837DED61-80A3-417A-81C2-FE55525B0761 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2024-11-21T08:51:56.189214Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2024-11-21T08:51:56.189240Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:472:2432] 2024-11-21T08:51:56.189294Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:473:2434], sender# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T08:51:56.189983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:14121 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 47D9D9B8-9B35-4E4E-BA8E-FC3E7B118943 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2024-11-21T08:51:56.190270Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2024-11-21T08:51:56.190282Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:51:56.190329Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:51:56.208741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:56.208776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:56.208812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:56.208828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:56.208843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:56.208895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:56.209055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:56.209063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T08:51:56.209080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:56.209090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:56.209097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:56.209102Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.209107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:56.209113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:51:56.209119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:51:56.209136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:56.212351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.217466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.217644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.217656Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:56.217678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:56.217684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:56.217692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:51:56.217719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2342] message: TxId: 102 2024-11-21T08:51:56.217730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:56.217736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:56.217741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:56.217773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:56.218342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:56.218356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:449:2411] TestWaitNotification: OK eventTxId 102 >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::DeleteImmediate >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests10000Inflight100BlobSize1000 >> TBackupTests::BackupUuidColumn[Raw] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 11082407706386312083 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2024-11-21T08:51:57.306804Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:284:55] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2024-11-21T08:51:57.306868Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:215:16] ServerId# [1:289:60] TabletId# 72057594037932033 PipeClientId# [8:215:16] 2024-11-21T08:51:57.306902Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:287:58] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2024-11-21T08:51:57.306927Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:197:16] ServerId# [1:286:57] TabletId# 72057594037932033 PipeClientId# [5:197:16] 2024-11-21T08:51:57.306949Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:285:56] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2024-11-21T08:51:57.306976Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:283:54] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2024-11-21T08:51:57.307002Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:288:59] TabletId# 72057594037932033 PipeClientId# [7:209:16] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob >> TOlap::StoreStats [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 >> VDiskBalancing::TestStopOneNode_Mirror3dc >> VDiskBalancing::TestRandom_Block42 >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:57.091503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:57.091535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.091541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:57.091546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:57.091553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:57.091556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:57.091566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.091652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:57.100351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:57.100377Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:57.102978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:57.103634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:57.103673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:57.104951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:57.105116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:57.105232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.105334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:57.106261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.106532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.106541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.106574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:57.106579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.106583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:57.106596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.107763Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:57.121070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:57.121162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.121226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:57.121261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:57.121267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.122112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.122148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:57.122214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.122224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:57.122228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:57.122232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:57.122691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.122704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:57.122710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:57.123034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.123043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.123048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.123054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.123515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:57.124137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:57.124188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:57.124386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.124408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:57.124417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.124462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:57.124466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.124493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.124502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:57.124845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.124852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.124902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.124909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:57.124995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.125003Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:57.125015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:57.125019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.125023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:57.125027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.125030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:57.125033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:57.125042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:57.125047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:57.125050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:57.125312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.125323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.125326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:57.125330Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:57.125333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.125344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Step: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:57.220087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:51:57.220117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T08:51:57.220256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.220272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:57.220278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T08:51:57.220295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T08:51:57.220324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:57.375823Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:405:2378], attempt# 0 2024-11-21T08:51:57.378735Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:405:2378], sender# [1:404:2377] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:65069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 72269986-7173-47C7-B7A7-AEEC34070325 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T08:51:57.380784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.380808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:57.380916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.380924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:51:57.381066Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2024-11-21T08:51:57.381782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.381801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:51:57.382054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:57.382069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:57.382074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:57.382081Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:51:57.382089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:57.382108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:65069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: ACC2F987-4EA5-45C2-A207-8AF5BEB654EB amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T08:51:57.382910Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2024-11-21T08:51:57.383550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:65069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F5182649-F4CB-40A4-8B65-6D917D5877A2 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T08:51:57.384358Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T08:51:57.384383Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:404:2377] 2024-11-21T08:51:57.384443Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:405:2378], sender# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:65069 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 38DF82DC-21EB-4990-AB21-AEFB835F82CE amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2024-11-21T08:51:57.386054Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2024-11-21T08:51:57.386070Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:405:2378], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:51:57.386119Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:51:57.388376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:57.388397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:57.388424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:57.388438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:57.388453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.388457Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.388461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:57.388470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:51:57.388515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.389185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.389234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.389243Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:57.389257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:57.389261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:57.389267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:51:57.389283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T08:51:57.389289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:57.389294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:57.389298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:57.389329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:57.389864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:57.389878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:390:2364] TestWaitNotification: OK eventTxId 102 >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::StoreStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:49.457071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:49.457099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:49.457105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:49.457110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:49.457117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:49.457121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:49.457131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:49.457213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:49.468946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:49.468972Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:49.471979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:49.472842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:49.472879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:49.474604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:49.474796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:49.474909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:49.474983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:49.476081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:49.476417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:49.476428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:49.476472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:49.476479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:49.476485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:49.476501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:49.478039Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:49.495118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:49.495222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:49.495300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:49.495350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:49.495359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:49.496326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:49.496357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:49.496410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:49.496421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:49.496426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:49.496431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:49.497155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:49.497168Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:49.497173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:49.497559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:49.497569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:49.497575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:49.497582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:49.498111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:49.498632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:49.498690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:49.498892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:49.498919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:49.498927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:49.498982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:49.498989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:49.499022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:49.499035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:49.499494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:49.499501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:49.499543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:49.499549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:49.499622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:49.499629Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:49.499640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:49.499645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:49.499650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:49.499655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:49.499660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:49.499665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:49.499676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:49.499682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:49.499686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:49.499994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:49.500006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:49.500010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:49.500015Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:49.500020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:49.500032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... drenVersion: 3 ColumnStoreVersion: 1 } } Children { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1732179112152 LastUpdateTime: 1732179112152 ImmediateTxCompleted: 11 PlannedTxCompleted: 12 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 2 RowUpdates: 1100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1270768 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 15 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 34 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } } Children { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1732179112152 LastUpdateTime: 1732179112152 ImmediateTxCompleted: 11 PlannedTxCompleted: 12 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 2 RowUpdates: 1100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1270768 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 15 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 34 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944 2024-11-21T08:51:57.832121Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:51:57.832153Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 34us result status StatusSuccess 2024-11-21T08:51:57.832244Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1732179112152 LastUpdateTime: 1732179112152 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 15 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 34 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1732179112152 LastUpdateTime: 1732179112152 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 15 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 34 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:57.096362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:57.096399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.096404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:57.096409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:57.096414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:57.096418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:57.096427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.096518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:57.124110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:57.124137Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:57.130772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:57.131599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:57.131639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:57.132929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:57.133138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:57.133239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.133322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:57.134222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.134482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.134492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.134527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:57.134534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.134540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:57.134555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.135726Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:57.154441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:57.154542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.154609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:57.154654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:57.154664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.155683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.155713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:57.155772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.155781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:57.155786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:57.155791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:57.156259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.156270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:57.156275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:57.159685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.159709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.159718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.159729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.160556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:57.164027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:57.164094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:57.164327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.164381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:57.164393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.164474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:57.164484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.164515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.164528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:57.164999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.165008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.165039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.165044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:57.165104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.165111Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:57.165124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:57.165129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.165134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:57.165139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.165144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:57.165148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:57.165159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:57.165164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:57.165168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:57.165548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.165566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.165572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:57.165578Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:57.165583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.165599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Step: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:57.289366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:51:57.289419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T08:51:57.289590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.289616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:57.289623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T08:51:57.289645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T08:51:57.289672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:57.447122Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:405:2378], attempt# 0 2024-11-21T08:51:57.457418Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:405:2378], sender# [1:404:2377] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:31459 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3BDF639C-59FB-4B86-B9F3-750FD9EF5A6D amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T08:51:57.461467Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:51:57.464064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.464088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:57.464171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.464182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:51:57.464244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.464256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:51:57.464548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:57.464592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:57.464599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:57.464606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:51:57.464613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:57.464634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:31459 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2083AA96-79A5-4254-BF41-FF649860AE13 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T08:51:57.467391Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:31459 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F0EA8597-B13B-4E99-8F01-CD9FF7041359 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T08:51:57.469938Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2024-11-21T08:51:57.470035Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:404:2377] 2024-11-21T08:51:57.470112Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:405:2378], sender# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T08:51:57.470381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:31459 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AB32BFF2-104C-4CDE-9461-C646BD3227EC amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2024-11-21T08:51:57.473012Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2024-11-21T08:51:57.473035Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:405:2378], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:51:57.473084Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:51:57.476091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T08:51:57.476115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:57.476143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T08:51:57.476157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T08:51:57.476191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.476197Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.476218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:57.476227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:51:57.476273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.476915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.477009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.477018Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:57.477031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:57.477035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:57.477040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:51:57.477056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T08:51:57.477063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:57.477068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:57.477072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:57.477097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:57.477584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:57.477597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:390:2364] TestWaitNotification: OK eventTxId 102 >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] >> BlobPatching::PatchBlock42 [GOOD] |87.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 10259654600340854436 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2024-11-21T08:51:58.149908Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2024-11-21T08:51:58.150100Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2024-11-21T08:51:58.175385Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 15643668947055077810 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2024-11-21T08:51:58.030209Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:57.583323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:57.583349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.583355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:57.583364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:57.583370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:57.583373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:57.583382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.583461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:57.596076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:57.596096Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:57.598197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:57.598759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:57.598790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:57.600538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:57.600711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:57.600801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.600886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:57.601728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.601967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.601979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.602011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:57.602018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.602023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:57.602037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.603179Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:57.620043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:57.620134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.620195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:57.620258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:57.620267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.621458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.621488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:57.621538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.621548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:57.621552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:57.621557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:57.622043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.622056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:57.622061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:57.622404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.622414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.622420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.622429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.623179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:57.624431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:57.624510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:57.624777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.624814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:57.624825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.624888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:57.624899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.624937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.624955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:57.625645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.625659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.625716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.625722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:57.625828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.625838Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:57.625856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:57.625862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.625870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:57.625878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.625884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:57.625891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:57.625908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:57.625915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:57.625920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:57.626391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.626423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.626432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:57.626439Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:57.626444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.626471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:57.745491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:51:57.745522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T08:51:57.745651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.745671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:57.745678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T08:51:57.745695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T08:51:57.745719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:57.887089Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:409:2382], attempt# 0 2024-11-21T08:51:57.891396Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:409:2382], sender# [1:408:2381] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:1632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E83DAB5C-6140-4524-BAC6-FDF0E072ABBD amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:51:57.893928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.893949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:51:57.894031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.894052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:51:57.894070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.894077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:57.894226Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:51:57.894981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:57.895001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:57.895006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:57.895012Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:51:57.895020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:57.895044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:1632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 37FD63AB-E6CD-440B-8C23-7A68912D213A amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T08:51:57.896421Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2024-11-21T08:51:57.896861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:1632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9DCD7F1C-81AF-4E54-B658-4C4AC8196CD5 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T08:51:57.897665Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2024-11-21T08:51:57.897690Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:408:2381] 2024-11-21T08:51:57.897714Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:409:2382], sender# [1:408:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:1632 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3E5D013F-F734-4FE0-AC2A-EE7A958781A4 amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2024-11-21T08:51:57.898460Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2024-11-21T08:51:57.898477Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:409:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:51:57.898524Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:408:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:51:57.910795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T08:51:57.910834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:57.910868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T08:51:57.910886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T08:51:57.910903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.910908Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.910913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:57.910922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:51:57.910978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.911686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.911806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.911816Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:57.911831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:57.911836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:57.911842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:51:57.911859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T08:51:57.911867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:57.911873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:57.911877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:57.911910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:51:57.912762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:57.912773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:394:2368] TestWaitNotification: OK eventTxId 102 >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 17211144352468917824 SEND TEvPut with key [1:1:1:0:0:100:0] 2024-11-21T08:51:58.338233Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2024-11-21T08:51:58.338376Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2024-11-21T08:51:58.360867Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:57.822134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:57.822165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.822170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:57.822176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:57.822182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:57.822187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:57.822196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.822273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:57.834084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:57.834110Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:57.836699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:57.837569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:57.837616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:57.838986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:57.839121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:57.839204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.839280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:57.839908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.840160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.840167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.840202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:57.840229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.840236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:57.840253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.841728Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:57.861017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:57.861135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.861217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:57.861270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:57.861280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.863202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.863250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:57.863337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.863353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:57.863357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:57.863363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:57.864096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.864115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:57.864122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:57.864591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.864602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.864607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.864614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.865145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:57.865460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:57.865510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:57.865675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.865696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:57.865702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.865747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:57.865752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.865780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.865789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:57.866142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.866150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.866206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.866213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:57.866314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.866322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:57.866334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:57.866339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.866345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:57.866350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.866357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:57.866362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:57.866374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:57.866380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:57.866384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:57.866695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.866708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.866713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:57.866718Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:57.866722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.866735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:58.141018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:51:58.141078Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2024-11-21T08:51:58.141717Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:475:2435], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2024-11-21T08:51:58.141730Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:475:2435], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:51:58.141758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:58.141767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:58.141923Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:474:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:15567 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 05F2DB83-87B5-474E-B393-468F3571D396 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T08:51:58.142789Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2024-11-21T08:51:58.143093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:58.143146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:51:58.143150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:51:58.143155Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:51:58.143159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:51:58.143174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15567 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5863D554-6B93-4F20-96B9-6FA43EAF234A amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2024-11-21T08:51:58.143689Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:51:58.143712Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:472:2432] 2024-11-21T08:51:58.143727Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:473:2434], sender# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:15567 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A3C244A0-6E10-4012-B1B5-2FDA990CF4E8 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2024-11-21T08:51:58.144221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:51:58.144329Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2024-11-21T08:51:58.144337Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:51:58.144374Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:51:58.156599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:58.156626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:51:58.156649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:58.156662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:58.156677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:58.156719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:58.156810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:58.156813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T08:51:58.156820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:58.156825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T08:51:58.156829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:58.156832Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:58.156835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:51:58.156838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:51:58.156842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:51:58.156850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:58.157292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:58.157529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:58.157608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:51:58.157615Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:51:58.157625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:51:58.157628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:58.157633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:51:58.157645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2342] message: TxId: 102 2024-11-21T08:51:58.157649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:51:58.157653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:51:58.157656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:51:58.157678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:51:58.158066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:51:58.158075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:449:2411] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 10473368254805134020 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2024-11-21T08:51:58.376617Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 12099353749935659088 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2024-11-21T08:51:58.715509Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:6309:827] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 14022512699310953358 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2024-11-21T08:51:58.675043Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:284:55] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2024-11-21T08:51:58.675106Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:215:16] ServerId# [1:289:60] TabletId# 72057594037932033 PipeClientId# [8:215:16] 2024-11-21T08:51:58.675130Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:287:58] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2024-11-21T08:51:58.675151Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:197:16] ServerId# [1:286:57] TabletId# 72057594037932033 PipeClientId# [5:197:16] 2024-11-21T08:51:58.675168Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:285:56] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2024-11-21T08:51:58.675192Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:283:54] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2024-11-21T08:51:58.675213Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:288:59] TabletId# 72057594037932033 PipeClientId# [7:209:16] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> BlobPatching::PatchBlock42 [GOOD] Test command err: RandomSeed# 13381944900110417204 1970-01-01T00:01:00.110512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:2:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:00.110512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:2:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:00.110512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:2:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:00.210512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:00.210512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:01.210512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:01.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:4:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:01.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:4:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:01.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:4:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:01.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:5:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:01.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:5:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:01.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:02.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:5:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:02.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:6:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:02.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:6:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:03.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:6:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 5 }}}} 1970-01-01T00:01:03.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:7:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:03.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:7:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:03.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:7:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524288} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:03.510512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:8:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:03.510512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:8:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:03.510512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:8:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:03.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:9:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:03.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:9:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:03.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:9:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:03.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:10:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:03.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:10:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:04.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:10:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 5 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 6 }}}} 1970-01-01T00:01:04.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:11:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:04.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:11:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:04.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:11:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:04.810512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:12:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:04.810512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:12:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 6 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 7 }}}} 1970-01-01T00:01:04.810512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:12:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:04.910512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:13:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 5 }}}} 1970-01-01T00:01:04.910512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:13:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Windo ... 84:0] writtenParts# 6 *** checking blob [1000000000:1:1595:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1596:5:29:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1597:5:45:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1601:5:16:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1602:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1603:5:25:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1604:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1605:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1607:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1609:5:28:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1613:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1614:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1615:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1616:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1617:5:50:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1619:5:12:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1624:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1625:5:32:16384:0] writtenParts# 7 *** checking blob [1000000000:1:1626:5:18:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1627:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1629:5:13:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1630:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1631:5:42:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1632:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1633:5:4:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1634:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1636:5:39:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1637:5:15:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1638:5:31:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1639:5:7:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1640:5:40:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1644:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1645:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1646:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1649:5:8:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1650:5:34:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1651:5:40:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1658:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1659:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1660:5:45:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1662:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1663:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1664:5:29:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1665:5:12:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1667:5:34:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1668:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1669:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1673:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1674:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1675:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1676:5:42:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1677:5:8:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1680:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1681:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1682:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1685:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1686:5:43:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1689:5:11:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1691:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1692:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1693:5:32:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1696:5:17:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1699:5:42:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1702:5:37:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1705:5:35:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1711:5:25:16384:0] writtenParts# 7 *** checking blob [1000000000:1:1713:5:27:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1720:5:3:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1721:5:39:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1722:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1723:5:8:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1726:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1727:5:19:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1728:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1729:5:31:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1730:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1731:5:30:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1732:5:46:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1733:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1734:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1735:5:41:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1736:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1737:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1738:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1740:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1741:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1742:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1745:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1744:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1746:5:11:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1747:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1749:5:6:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1751:5:18:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1752:5:51:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1753:5:47:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1757:5:28:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1758:5:24:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1760:5:13:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1764:5:14:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1766:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1773:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1777:5:33:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1778:5:49:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1779:5:45:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1782:5:0:16384:0] writtenParts# 7 *** checking blob [1000000000:1:1781:5:0:16384:0] writtenParts# 7 *** checking blob [1000000000:1:1784:5:22:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1787:5:7:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1790:5:32:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1795:5:35:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1799:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1800:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1801:5:48:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1802:5:44:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1806:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1807:5:31:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1809:5:30:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1811:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1812:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1813:5:41:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1814:5:7:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1816:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1817:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1815:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1818:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1819:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1820:5:47:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1824:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1826:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1828:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1829:5:25:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1831:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1832:5:40:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1835:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1837:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1840:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1841:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1839:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1842:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1843:5:10:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1844:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1845:5:22:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1846:5:45:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1848:5:47:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1850:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1852:5:8:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1854:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1857:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1863:5:1:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1867:5:36:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1869:5:18:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1870:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1871:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1872:5:43:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1873:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1874:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1877:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1875:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1876:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1878:5:33:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1879:5:2:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1881:5:31:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1882:5:17:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1883:5:0:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1884:5:16:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1885:5:12:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1886:5:18:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1887:5:41:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1888:5:27:16384:0] writtenParts# 6 *** checking blob [1000000000:1:1889:5:3:16384:0] writtenParts# 6 |87.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart |87.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExecutorDb::RandomOps [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> TExecutorDb::FullScan >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> Mirror3of4::Compaction [GOOD] >> MultiGet::SequentialGet >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> TStorageTenantTest::CreateTableInsideSubDomain >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] >> ExternalBlobsMultipleChannels::WithCompaction >> TStorageTenantTest::Boot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] Test command err: RandomSeed# 18438767409437719636 2024-11-21T08:51:21.946440Z 1 00h00m00.010512s :BS_LOCALRECOVERY CRIT: VDISK[82000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# false EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {[SyncerState 12][HugeBlobEntryPoint 1]} ReadLogReplies# {}} reason# Entry point for Syncer check failed, ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 6 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 6 } } status# ERROR;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T08:51:21.949083Z 1 00h00m30.000512s :BS_PROXY_PUT ERROR: [c84758556cab3f0a] Result# TEvPutResult {Id# [1:1:1:1:3:4:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> TStorageTenantTest::GenericCases >> TStorageTenantTest::CreateTableInsideSubDomain2 >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] >> TStorageTenantTest::DeclareAndDefine >> TContinuousBackupTests::Basic >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] Test command err: 2024-11-21T08:51:56.462261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:639:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:56.462465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:56.462549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:51:56.462661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:637:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:56.462674Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:56.462680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:51:56.680048Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:56.773865Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:51:56.804344Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:51:57.004628Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 23520, node 1 TClient is connected to server localhost:11563 2024-11-21T08:51:57.075801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:57.075820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:57.075824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:57.075909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Request timer = 0.002597694144 BASE_PERF = 0.8700261 test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:51:57.748313Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:57.754120Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6514, node 3 2024-11-21T08:51:57.766844Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:57.766861Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:57.766863Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:57.766925Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:57.840770Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:57.840806Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:57.841225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:57.842760Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:51:57.844608Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:57.846330Z node 3 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T08:51:58.067562Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439652666163770535:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:58.067582Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439652666163770553:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:58.067588Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:58.068456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:58.070009Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439652666163770556:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:58.173583Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzA4ZjFlNWYtZDI4ZWIwYTUtOTcyNjVlZjQtZDBlMTg5YTk=, ActorId: [3:7439652666163770525:2333], ActorState: ExecuteState, TraceId: 01jd6yqhzk3ntktmkd4h8h0a57, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction 2024-11-21T08:51:58.566333Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439652663681342797:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:58.570616Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:51:58.584441Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10237, node 4 2024-11-21T08:51:58.595558Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:58.595575Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:58.595578Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:58.595635Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8229 2024-11-21T08:51:58.618461Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999981s 2024-11-21T08:51:58.618528Z node 4 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T08:51:58.668461Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:58.668491Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:58.672687Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:58.986706Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439652663681343261:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:58.986754Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:58.990980Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439652663681343279:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:58.992027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:51:58.992341Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439652663681343274:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:58.992361Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:58.994729Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:58.994850Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439652663681343281:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:51:58.994859Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T08:51:58.994884Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T08:51:58.994886Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T08:51:58.994890Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T08:51:58.994892Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T08:51:58.994918Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T08:51:58.994920Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T08:51:58.994922Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T08:51:59.0 ... # peer# 2024-11-21T08:52:00.172525Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9ba800] received request Name# TopicService/DropTopic ok# false data# peer# 2024-11-21T08:52:00.172555Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a0400] received request Name# Coordination/CreateNode ok# false data# peer# 2024-11-21T08:52:00.172567Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b3600] received request Name# Coordination/AlterNode ok# false data# peer# 2024-11-21T08:52:00.172594Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b0600] received request Name# Coordination/DropNode ok# false data# peer# 2024-11-21T08:52:00.172608Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9be400] received request Name# Coordination/DescribeNode ok# false data# peer# 2024-11-21T08:52:00.172631Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a7600] received request Name# CreateDatabase ok# false data# peer# 2024-11-21T08:52:00.172651Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a7000] received request Name# GetDatabaseStatus ok# false data# peer# 2024-11-21T08:52:00.172670Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9bcc00] received request Name# AlterDatabase ok# false data# peer# 2024-11-21T08:52:00.172691Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b8400] received request Name# ListDatabases ok# false data# peer# 2024-11-21T08:52:00.172709Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9adc00] received request Name# RemoveDatabase ok# false data# peer# 2024-11-21T08:52:00.172734Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9aca00] received request Name# DescribeDatabaseOptions ok# false data# peer# 2024-11-21T08:52:00.172748Z node 5 :GRPC_SERVER DEBUG: [0x4686bf98d200] received request Name# GetScaleRecommendation ok# false data# peer# 2024-11-21T08:52:00.172774Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a4000] received request Name# ListEndpoints ok# false data# peer# 2024-11-21T08:52:00.172785Z node 5 :GRPC_SERVER DEBUG: [0x4686bf98c600] received request Name# WhoAmI ok# false data# peer# 2024-11-21T08:52:00.172815Z node 5 :GRPC_SERVER DEBUG: [0x4686bf98f000] received request Name# NodeRegistration ok# false data# peer# 2024-11-21T08:52:00.172819Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a2800] received request Name# Scan ok# false data# peer# 2024-11-21T08:52:00.172854Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a8200] received request Name# GetShardLocations ok# false data# peer# 2024-11-21T08:52:00.172856Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9af400] received request Name# DescribeTable ok# false data# peer# 2024-11-21T08:52:00.172889Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b4800] received request Name# CreateSnapshot ok# false data# peer# 2024-11-21T08:52:00.172893Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a1600] received request Name# RefreshSnapshot ok# false data# peer# 2024-11-21T08:52:00.172924Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a1000] received request Name# DiscardSnapshot ok# false data# peer# 2024-11-21T08:52:00.172928Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a0a00] received request Name# List ok# false data# peer# 2024-11-21T08:52:00.172961Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99f800] received request Name# RateLimiter/CreateResource ok# false data# peer# 2024-11-21T08:52:00.172961Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a6a00] received request Name# RateLimiter/AlterResource ok# false data# peer# 2024-11-21T08:52:00.172995Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a5800] received request Name# RateLimiter/DropResource ok# false data# peer# 2024-11-21T08:52:00.172996Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a7c00] received request Name# RateLimiter/ListResources ok# false data# peer# 2024-11-21T08:52:00.173029Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a8e00] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2024-11-21T08:52:00.173036Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b9c00] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2024-11-21T08:52:00.173067Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99e600] received request Name# CreateStream ok# false data# peer# 2024-11-21T08:52:00.173072Z node 5 :GRPC_SERVER DEBUG: [0x4686bf994a00] received request Name# ListStreams ok# false data# peer# 2024-11-21T08:52:00.173102Z node 5 :GRPC_SERVER DEBUG: [0x4686bf995600] received request Name# DeleteStream ok# false data# peer# 2024-11-21T08:52:00.173107Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a9400] received request Name# DescribeStream ok# false data# peer# 2024-11-21T08:52:00.173137Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99f200] received request Name# ListShards ok# false data# peer# 2024-11-21T08:52:00.173145Z node 5 :GRPC_SERVER DEBUG: [0x4686bf995c00] received request Name# SetWriteQuota ok# false data# peer# 2024-11-21T08:52:00.173175Z node 5 :GRPC_SERVER DEBUG: [0x4686bf989c00] received request Name# UpdateStream ok# false data# peer# 2024-11-21T08:52:00.173187Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a6400] received request Name# PutRecord ok# false data# peer# 2024-11-21T08:52:00.173218Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a5200] received request Name# PutRecords ok# false data# peer# 2024-11-21T08:52:00.173232Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a5e00] received request Name# GetRecords ok# false data# peer# 2024-11-21T08:52:00.173255Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99d400] received request Name# GetShardIterator ok# false data# peer# 2024-11-21T08:52:00.173271Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9ba200] received request Name# SubscribeToShard ok# false data# peer# 2024-11-21T08:52:00.173292Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b5a00] received request Name# DescribeLimits ok# false data# peer# 2024-11-21T08:52:00.173316Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a4c00] received request Name# DescribeStreamSummary ok# false data# peer# 2024-11-21T08:52:00.173326Z node 5 :GRPC_SERVER DEBUG: [0x46869e641e00] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2024-11-21T08:52:00.173356Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9bf000] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2024-11-21T08:52:00.173360Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9ae200] received request Name# UpdateShardCount ok# false data# peer# 2024-11-21T08:52:00.173408Z node 5 :GRPC_SERVER DEBUG: [0x4686bf996800] received request Name# UpdateStreamMode ok# false data# peer# 2024-11-21T08:52:00.173411Z node 5 :GRPC_SERVER DEBUG: [0x4686bf992600] received request Name# RegisterStreamConsumer ok# false data# peer# 2024-11-21T08:52:00.173444Z node 5 :GRPC_SERVER DEBUG: [0x4686bf999200] received request Name# DeregisterStreamConsumer ok# false data# peer# 2024-11-21T08:52:00.173448Z node 5 :GRPC_SERVER DEBUG: [0x4686bf998c00] received request Name# DescribeStreamConsumer ok# false data# peer# 2024-11-21T08:52:00.173481Z node 5 :GRPC_SERVER DEBUG: [0x4686bf998000] received request Name# ListStreamConsumers ok# false data# peer# 2024-11-21T08:52:00.173483Z node 5 :GRPC_SERVER DEBUG: [0x4686bf997400] received request Name# AddTagsToStream ok# false data# peer# 2024-11-21T08:52:00.173517Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99da00] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2024-11-21T08:52:00.173526Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99b000] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2024-11-21T08:52:00.173557Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99ce00] received request Name# ListTagsForStream ok# false data# peer# 2024-11-21T08:52:00.173561Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99aa00] received request Name# MergeShards ok# false data# peer# 2024-11-21T08:52:00.173598Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a2200] received request Name# RemoveTagsFromStream ok# false data# peer# 2024-11-21T08:52:00.173600Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99c200] received request Name# SplitShard ok# false data# peer# 2024-11-21T08:52:00.173636Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9a9a00] received request Name# StartStreamEncryption ok# false data# peer# 2024-11-21T08:52:00.173637Z node 5 :GRPC_SERVER DEBUG: [0x4686bf995000] received request Name# StopStreamEncryption ok# false data# peer# 2024-11-21T08:52:00.173669Z node 5 :GRPC_SERVER DEBUG: [0x4686bf98de00] received request Name# SelfCheck ok# false data# peer# 2024-11-21T08:52:00.173678Z node 5 :GRPC_SERVER DEBUG: [0x4686bf98fc00] received request Name# NodeCheck ok# false data# peer# 2024-11-21T08:52:00.173707Z node 5 :GRPC_SERVER DEBUG: [0x4686bf999e00] received request Name# CreateSession ok# false data# peer# 2024-11-21T08:52:00.173712Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99a400] received request Name# DeleteSession ok# false data# peer# 2024-11-21T08:52:00.173748Z node 5 :GRPC_SERVER DEBUG: [0x4686bf990800] received request Name# BeginTransaction ok# false data# peer# 2024-11-21T08:52:00.173750Z node 5 :GRPC_SERVER DEBUG: [0x4686bf997a00] received request Name# AttachSession ok# false data# peer# 2024-11-21T08:52:00.173786Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99ec00] received request Name# CommitTransaction ok# false data# peer# 2024-11-21T08:52:00.173787Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9ad600] received request Name# RollbackTransaction ok# false data# peer# 2024-11-21T08:52:00.173819Z node 5 :GRPC_SERVER DEBUG: [0x4686bf99bc00] received request Name# ExecuteQuery ok# false data# peer# 2024-11-21T08:52:00.173829Z node 5 :GRPC_SERVER DEBUG: [0x4686bf998600] received request Name# ExecuteScript ok# false data# peer# 2024-11-21T08:52:00.173858Z node 5 :GRPC_SERVER DEBUG: [0x4686bf999800] received request Name# FetchScriptResults ok# false data# peer# 2024-11-21T08:52:00.173872Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9ad000] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2024-11-21T08:52:00.173896Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b0c00] received request Name# ChangeTabletSchema ok# false data# peer# 2024-11-21T08:52:00.173914Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b0000] received request Name# RestartTablet ok# false data# peer# 2024-11-21T08:52:00.173931Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9ae800] received request Name# CreateLogStore ok# false data# peer# 2024-11-21T08:52:00.173954Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9afa00] received request Name# DescribeLogStore ok# false data# peer# 2024-11-21T08:52:00.173968Z node 5 :GRPC_SERVER DEBUG: [0x46869e646000] received request Name# DropLogStore ok# false data# peer# 2024-11-21T08:52:00.173992Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b7800] received request Name# AlterLogStore ok# false data# peer# 2024-11-21T08:52:00.174002Z node 5 :GRPC_SERVER DEBUG: [0x4686bf9b4e00] received request Name# CreateLogTable ok# false data# peer# 2024-11-21T08:52:00.174029Z node 5 :GRPC_SERVER DEBUG: [0x46869e649000] received request Name# DescribeLogTable ok# false data# peer# 2024-11-21T08:52:00.174037Z node 5 :GRPC_SERVER DEBUG: [0x4686bf98d800] received request Name# DropLogTable ok# false data# peer# 2024-11-21T08:52:00.174068Z node 5 :GRPC_SERVER DEBUG: [0x4686bf98cc00] received request Name# Login ok# false data# peer# 2024-11-21T08:52:00.174069Z node 5 :GRPC_SERVER DEBUG: [0x4686bf990e00] received request Name# AlterLogTable ok# false data# peer# 2024-11-21T08:52:00.174108Z node 5 :GRPC_SERVER DEBUG: [0x4686bf98c000] received request Name# DescribeReplication ok# false data# peer# |87.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] Test command err: RandomSeed# 9109254831152184027 2024-11-21T08:51:22.372569Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [8905fc2c56e67a84] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T08:51:22.372614Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [037997a09e8c9878] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:2:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T08:51:22.372638Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:1:10:1:1000:0] PatchedBlobId# [1:1:2:10:1:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T08:51:22.372663Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:1:10:2:1000:0] PatchedBlobId# [1:1:2:10:4098:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 >> TStorageTenantTest::LsLs >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> TContinuousBackupTests::TakeIncrementalBackup |87.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |87.4%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> TContinuousBackupTests::Basic [GOOD] >> TStorageTenantTest::CreateSolomonInsideSubDomain |87.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |87.4%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:50:42.653958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:42.653984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:42.653990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:42.653995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:42.654013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:42.654017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:42.654026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:42.654127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:42.665037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:42.665062Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:50:42.667323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:42.667428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:42.667460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:42.669661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:42.669733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:42.669835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:42.669980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:42.670520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:42.670786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:42.670796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:42.670809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:42.670816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:42.670821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:42.670864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:50:42.672021Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:50:42.687634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:42.687708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.687775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:42.687820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:42.687828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.688627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:42.688654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:42.688705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.688725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:42.688730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:42.688735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:42.689063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.689072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:42.689077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:42.689330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.689338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.689343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:42.689350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:42.689866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:42.690182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:42.690229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:42.690421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:42.690442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:42.690448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:42.690498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:42.690503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:42.690536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:42.690548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:42.690854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:42.690862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:42.690903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:42.690909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:42.690988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:42.690994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:42.691004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:42.691008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:42.691014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:42.691018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:42.691023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:42.691027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:42.691037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:42.691042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:42.691046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 594046678944 for txId: 1003 at step: 5000004 2024-11-21T08:52:00.070130Z node 225 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:00.070144Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 127 RawX2: 966367643751 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:00.070148Z node 225 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T08:52:00.070203Z node 225 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T08:52:00.070209Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T08:52:00.070229Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T08:52:00.070246Z node 225 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[225:402:2372], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:52:00.070566Z node 225 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:00.070572Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:52:00.070606Z node 225 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:00.070610Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [225:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:52:00.070647Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:00.070652Z node 225 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2024-11-21T08:52:00.070655Z node 225 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T08:52:00.070746Z node 225 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:00.070753Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:00.070756Z node 225 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:00.070759Z node 225 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:52:00.070762Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T08:52:00.070771Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T08:52:00.071056Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:00.071064Z node 225 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:52:00.071073Z node 225 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:52:00.071076Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:00.071080Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T08:52:00.071087Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [225:324:2316] message: TxId: 1003 2024-11-21T08:52:00.071092Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:00.071096Z node 225 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:00.071099Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:00.071126Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T08:52:00.071455Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:00.071532Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:00.071537Z node 225 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [225:612:2524] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:00.071627Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:00.071653Z node 225 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 31us result status StatusSuccess 2024-11-21T08:52:00.071711Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:00.071759Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T08:52:00.071772Z node 225 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 12us result status StatusSuccess 2024-11-21T08:52:00.071793Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T08:52:00.071822Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:00.071831Z node 225 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 10us result status StatusSuccess 2024-11-21T08:52:00.071865Z node 225 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:52:00.972388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:00.972418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:00.972423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:00.972428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:00.972435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:00.972438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:00.972447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:00.972551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:00.983923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:00.983949Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:00.991629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:00.992570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:00.992612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:00.996364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:00.997019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:00.997144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:00.997236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:01.000881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:01.001265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:01.001282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:01.001337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:01.001347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:01.001355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:01.001379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.003014Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:52:01.020726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:01.020806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.020882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:01.020937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:01.020946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.022793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.022835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:01.022897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.022911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:01.022916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:01.022922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:01.023462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.023476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:01.023481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:01.023955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.023966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.023974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:01.023981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:01.024703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:01.025074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:01.025134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:01.025310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.025336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:01.025343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:01.025418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:01.025426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:01.025465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:01.025478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:01.026555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:01.026566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:01.026617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:01.026622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:52:01.026731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.026740Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:01.026754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:01.026759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:01.026765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:01.026771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:01.026777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:01.026781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:01.026794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:01.026801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:01.026804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:52:01.027102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:01.027116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:01.027120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:52:01.027125Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:52:01.027130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:01.027145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 594046678944, LocalPathId: 2], version: 6 2024-11-21T08:52:01.364122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:52:01.364131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T08:52:01.364172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:52:01.364181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:52:01.364184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:52:01.364187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T08:52:01.364701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:52:01.365210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:52:01.365235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:01.365282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:52:01.365294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:52:01.365299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:52:01.376503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 251 } } 2024-11-21T08:52:01.376532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T08:52:01.376560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 251 } } 2024-11-21T08:52:01.376574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 251 } } FAKE_COORDINATOR: Erasing txId 104 2024-11-21T08:52:01.376992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T08:52:01.377001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T08:52:01.377015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T08:52:01.377023Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:52:01.377029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T08:52:01.377045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.377050Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.377055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:01.377063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T08:52:01.384137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.384327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.384438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.384450Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T08:52:01.384475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2024-11-21T08:52:01.384480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T08:52:01.384491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2024-11-21T08:52:01.384514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 104 2024-11-21T08:52:01.384524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T08:52:01.384532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:52:01.384537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:52:01.384568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:52:01.384573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2024-11-21T08:52:01.384577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2024-11-21T08:52:01.384581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:01.384585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2024-11-21T08:52:01.384588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2024-11-21T08:52:01.384598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:52:01.384696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:01.384702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:52:01.384713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:01.384719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:01.384725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:52:01.385328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:52:01.385340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:736:2639] 2024-11-21T08:52:01.385357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2024-11-21T08:52:01.385498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:01.385541Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 52us result status StatusPathDoesNotExist 2024-11-21T08:52:01.385577Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:01.385637Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:01.385649Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 15us result status StatusPathDoesNotExist 2024-11-21T08:52:01.385667Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> TStorageTenantTest::DeclareAndDefine [GOOD] >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> TStorageTenantTest::LsLs [GOOD] >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:52:01.505521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:01.505550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:01.505555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:01.505560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:01.505567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:01.505571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:01.505581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:01.505676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:01.518025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:01.518054Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:01.521856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:01.522744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:01.522783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:01.526994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:01.527619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:01.527749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.527849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:01.529060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:01.529406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:01.529419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:01.529463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:01.529472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:01.529479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:01.529497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.530892Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:52:01.549385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:01.549486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.549562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:01.549613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:01.549621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.551071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.551117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:01.551167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.551179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:01.551184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:01.551189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:01.552570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.552587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:01.552594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:01.552979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.552988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.552995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:01.553001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:01.553645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:01.554001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:01.554069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:01.554277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.554302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:01.554312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:01.554368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:01.554375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:01.554407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:01.554419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:01.554836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:01.554844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:01.554894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:01.554900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:52:01.554988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:01.554996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:01.555009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:01.555014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:01.555020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:01.555025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:01.555030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:01.555034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:01.555046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:01.555052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:01.555056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:52:01.555377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:01.555390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:01.555395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:52:01.555401Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:52:01.555405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:01.555443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.822011Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.822016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:01.822023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2024-11-21T08:52:01.822190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.822548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.822640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T08:52:01.822652Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2024-11-21T08:52:01.822667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2024-11-21T08:52:01.822672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2024-11-21T08:52:01.822679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2024-11-21T08:52:01.822694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 103 2024-11-21T08:52:01.822700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2024-11-21T08:52:01.822708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T08:52:01.822713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T08:52:01.822726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:01.822731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T08:52:01.822735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T08:52:01.822753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:52:01.822757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2024-11-21T08:52:01.822761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2024-11-21T08:52:01.822768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:52:01.822772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2024-11-21T08:52:01.822775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2024-11-21T08:52:01.822783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:52:01.823228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:52:01.823242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:746:2625] TestWaitNotification: OK eventTxId 103 2024-11-21T08:52:01.823366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:01.823427Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 73us result status StatusSuccess 2024-11-21T08:52:01.823594Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:01.823691Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:01.823726Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 39us result status StatusSuccess 2024-11-21T08:52:01.823844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:01.824002Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:01.824028Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 28us result status StatusSuccess 2024-11-21T08:52:01.824102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2024-11-21T08:52:00.905369Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652673285983048:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:00.905440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004349/r3tmp/tmpZXv5iP/pdisk_1.dat 2024-11-21T08:52:01.003151Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:01.025138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:01.025163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:01.032830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8657 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:01.081703Z node 1 :TX_PROXY DEBUG: actor# [1:7439652673285983116:2134] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:01.081731Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652677580950835:2419] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:01.081783Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652673285983214:2182], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.081809Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652673285983214:2182], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:01.081872Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:01.082366Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982799:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652677580950840:2420] 2024-11-21T08:52:01.082386Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652673285982799:2050] Subscribe: subscriber# [1:7439652677580950840:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.082399Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982805:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652677580950842:2420] 2024-11-21T08:52:01.082403Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652673285982805:2056] Subscribe: subscriber# [1:7439652677580950842:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.082416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950840:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652673285982799:2050] 2024-11-21T08:52:01.082421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950842:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652673285982805:2056] 2024-11-21T08:52:01.082426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652677580950837:2420] 2024-11-21T08:52:01.082433Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652677580950839:2420] 2024-11-21T08:52:01.082445Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652677580950836:2420][/dc-1] Set up state: owner# [1:7439652673285983214:2182], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.082483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950840:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652677580950837:2420], cookie# 1 2024-11-21T08:52:01.082486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950841:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652677580950838:2420], cookie# 1 2024-11-21T08:52:01.082489Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950842:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652677580950839:2420], cookie# 1 2024-11-21T08:52:01.082495Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982799:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652677580950840:2420] 2024-11-21T08:52:01.082498Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982799:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652677580950840:2420], cookie# 1 2024-11-21T08:52:01.082502Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982805:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652677580950842:2420] 2024-11-21T08:52:01.082504Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982805:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652677580950842:2420], cookie# 1 2024-11-21T08:52:01.086960Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982802:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652677580950841:2420] 2024-11-21T08:52:01.086997Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652673285982802:2053] Subscribe: subscriber# [1:7439652677580950841:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.087023Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982802:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652677580950841:2420], cookie# 1 2024-11-21T08:52:01.087040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950840:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652673285982799:2050], cookie# 1 2024-11-21T08:52:01.087045Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950842:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652673285982805:2056], cookie# 1 2024-11-21T08:52:01.087069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950841:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652673285982802:2053] 2024-11-21T08:52:01.087075Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652677580950841:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652673285982802:2053], cookie# 1 2024-11-21T08:52:01.087082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652677580950837:2420], cookie# 1 2024-11-21T08:52:01.087089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:01.087093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652677580950839:2420], cookie# 1 2024-11-21T08:52:01.087098Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:01.087106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652677580950838:2420] 2024-11-21T08:52:01.087125Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652677580950836:2420][/dc-1] Path was already updated: owner# [1:7439652673285983214:2182], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.087129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652677580950838:2420], cookie# 1 2024-11-21T08:52:01.087131Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652677580950836:2420][/dc-1] Unexpected sync response: sender# [1:7439652677580950838:2420], cookie# 1 2024-11-21T08:52:01.087137Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982802:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652677580950841:2420] 2024-11-21T08:52:01.093167Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652673285983214:2182], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:01.093272Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652673285983214:2182], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439652673285982805:2056] 2024-11-21T08:52:01.579643Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439652679073329577:2487][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439652673285982805:2056] 2024-11-21T08:52:01.579645Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652679073329559:2487][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439652679073329572:2487] 2024-11-21T08:52:01.579648Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652679073329559:2487][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439652679073329573:2487] 2024-11-21T08:52:01.579648Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652679073329558:2486][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7439652679073329566:2486] 2024-11-21T08:52:01.579650Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7439652679073329559:2487][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [2:7439652679073328928:2098], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.579651Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652679073329559:2487][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439652679073329574:2487] 2024-11-21T08:52:01.579654Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439652679073329559:2487][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7439652679073328928:2098], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.579655Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652679073329558:2486][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7439652679073329567:2486] 2024-11-21T08:52:01.579660Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439652679073328928:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T08:52:01.579661Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7439652679073329558:2486][/dc-1/USER_0/.metadata/workload_manager/running_requests] Set up state: owner# [2:7439652679073328928:2098], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.579667Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439652679073328928:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7439652679073329557:2485] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:01.579668Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652679073329558:2486][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7439652679073329568:2486] 2024-11-21T08:52:01.579673Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439652679073329558:2486][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7439652679073328928:2098], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.579680Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439652679073328928:2098], cacheItem# { Subscriber: { Subscriber: [2:7439652679073329557:2485] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:01.579684Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439652679073328928:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2024-11-21T08:52:01.579688Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439652679073328928:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7439652679073329559:2487] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:01.579692Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439652679073328928:2098], cacheItem# { Subscriber: { Subscriber: [2:7439652679073329559:2487] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:01.579698Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439652679073328928:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T08:52:01.579702Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439652679073328928:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7439652679073329558:2486] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:01.579706Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439652679073328928:2098], cacheItem# { Subscriber: { Subscriber: [2:7439652679073329558:2486] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:01.579709Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652679073329578:2488], recipient# [2:7439652679073329556:2317], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.579713Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652679073329579:2489], recipient# [2:7439652679073329555:2316], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.579747Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982799:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329563:2485] 2024-11-21T08:52:01.579751Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982802:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329564:2485] 2024-11-21T08:52:01.579753Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982802:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329570:2486] 2024-11-21T08:52:01.579756Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982802:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329576:2487] 2024-11-21T08:52:01.579756Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982799:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329569:2486] 2024-11-21T08:52:01.579758Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982805:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329565:2485] 2024-11-21T08:52:01.579760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982799:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329575:2487] 2024-11-21T08:52:01.579761Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982805:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329571:2486] 2024-11-21T08:52:01.579762Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652673285982805:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652679073329577:2487] >> ExternalBlobsMultipleChannels::SingleChannel >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2024-11-21T08:51:53.095907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:53.096341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:53.096376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040a4/r3tmp/tmpR24WcM/pdisk_1.dat 2024-11-21T08:51:53.197919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.217512Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:53.264870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:53.264904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:53.275948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:53.393823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.407815Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:53.408023Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:53.408107Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:51:53.408142Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:53.414103Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:53.414274Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:53.414297Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:53.414461Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:53.414468Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:53.414473Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:53.414516Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:53.416973Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:53.417061Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:53.417088Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:51:53.417093Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:53.417097Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:53.417102Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:53.417270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.417280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.417459Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:53.417483Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:53.417495Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.417499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.417505Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:51:53.417511Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:53.417516Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:53.417523Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:53.417528Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:53.417531Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:53.417535Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:53.417540Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:53.417555Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:51:53.417558Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:53.417581Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:53.417628Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:53.417637Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:53.417653Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:53.417662Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:53.417664Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:53.417668Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:53.417670Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:53.417709Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:53.417711Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:53.417714Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:53.417716Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:53.417725Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:53.417727Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:53.417729Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:53.417731Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:53.417736Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:53.417956Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:51:53.417961Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:53.428380Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:53.428419Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:53.428427Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:53.428442Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:51:53.428459Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:53.608868Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.608894Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.608902Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:51:53.608919Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:51:53.608923Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:51:53.608950Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:53.608959Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:51:53.608964Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:51:53.608969Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:51:53.609765Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:53.609783Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:53.609918Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.609924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.609931Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:53.609939Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:53.609943Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:53.609952Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T08:52:01.950495Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:969:2792], 1001} after executionsCount# 1 2024-11-21T08:52:01.950499Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:969:2792], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:01.950506Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:969:2792], 1001} finished in read 2024-11-21T08:52:01.950511Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2024-11-21T08:52:01.950515Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T08:52:01.950518Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:52:01.950521Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:52:01.950527Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2024-11-21T08:52:01.950530Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:52:01.950533Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2024-11-21T08:52:01.950536Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T08:52:01.950546Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T08:52:01.950623Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:974:2797], Recipient [6:683:2563]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:01.950627Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:01.950631Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:973:2796], serverId# [6:974:2797], sessionId# [0:0:0] 2024-11-21T08:52:01.950640Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:972:2795], Recipient [6:683:2563]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T08:52:01.950719Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:977:2800], Recipient [6:683:2563]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:01.950723Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:01.950727Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:976:2799], serverId# [6:977:2800], sessionId# [0:0:0] 2024-11-21T08:52:01.950748Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:975:2798], Recipient [6:683:2563]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T08:52:01.950759Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2024-11-21T08:52:01.950764Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:01.950768Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2024-11-21T08:52:01.950775Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2024-11-21T08:52:01.950784Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2024-11-21T08:52:01.950787Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2024-11-21T08:52:01.950790Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:01.950793Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2024-11-21T08:52:01.950799Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2024-11-21T08:52:01.950803Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2024-11-21T08:52:01.950807Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:01.950810Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2024-11-21T08:52:01.950813Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2024-11-21T08:52:01.950821Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T08:52:01.950840Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:975:2798], 1002} after executionsCount# 1 2024-11-21T08:52:01.950845Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:975:2798], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:01.950852Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:975:2798], 1002} finished in read 2024-11-21T08:52:01.950857Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2024-11-21T08:52:01.950860Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2024-11-21T08:52:01.950863Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T08:52:01.950867Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2024-11-21T08:52:01.950871Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2024-11-21T08:52:01.950874Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T08:52:01.950877Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2024-11-21T08:52:01.950880Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2024-11-21T08:52:01.950889Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2024-11-21T08:52:01.950958Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:980:2803], Recipient [6:680:2561]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:01.950962Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:01.950967Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:979:2802], serverId# [6:980:2803], sessionId# [0:0:0] 2024-11-21T08:52:01.950980Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:978:2801], Recipient [6:680:2561]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T08:52:01.951055Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:983:2806], Recipient [6:680:2561]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:01.951061Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:01.951065Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:982:2805], serverId# [6:983:2806], sessionId# [0:0:0] 2024-11-21T08:52:01.951084Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:981:2804], Recipient [6:680:2561]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T08:52:01.951094Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2024-11-21T08:52:01.951099Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:01.951102Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2024-11-21T08:52:01.951107Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2024-11-21T08:52:01.951115Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2024-11-21T08:52:01.951119Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2024-11-21T08:52:01.951122Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:01.951125Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2024-11-21T08:52:01.951131Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2024-11-21T08:52:01.951135Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2024-11-21T08:52:01.951138Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:01.951141Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2024-11-21T08:52:01.951144Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2024-11-21T08:52:01.951153Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T08:52:01.951166Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:981:2804], 1003} after executionsCount# 1 2024-11-21T08:52:01.951170Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:981:2804], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:01.951177Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:981:2804], 1003} finished in read 2024-11-21T08:52:01.951182Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2024-11-21T08:52:01.951185Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2024-11-21T08:52:01.951189Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2024-11-21T08:52:01.951192Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2024-11-21T08:52:01.951197Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2024-11-21T08:52:01.951200Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2024-11-21T08:52:01.951204Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2024-11-21T08:52:01.951216Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2024-11-21T08:52:01.951224Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |87.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> TStorageTenantTest::GenericCases [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2024-11-21T08:52:00.635540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652672206312978:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:00.635628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004340/r3tmp/tmpFMtG6v/pdisk_1.dat 2024-11-21T08:52:00.712248Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:00.738474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:00.738513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:00.740038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24040 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:00.780418Z node 1 :TX_PROXY DEBUG: actor# [1:7439652672206313076:2111] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:00.780462Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652672206313524:2416] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:00.780518Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652672206313102:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:00.780529Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652672206313102:2124], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:00.780673Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:00.781110Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312786:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652672206313529:2417] 2024-11-21T08:52:00.781127Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672206312786:2049] Subscribe: subscriber# [1:7439652672206313529:2417], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.781141Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312789:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652672206313530:2417] 2024-11-21T08:52:00.781145Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672206312789:2052] Subscribe: subscriber# [1:7439652672206313530:2417], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.781151Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312792:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652672206313531:2417] 2024-11-21T08:52:00.781156Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672206312792:2055] Subscribe: subscriber# [1:7439652672206313531:2417], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.781210Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313529:2417][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672206312786:2049] 2024-11-21T08:52:00.781215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313530:2417][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672206312789:2052] 2024-11-21T08:52:00.781220Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313531:2417][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672206312792:2055] 2024-11-21T08:52:00.781232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672206313526:2417] 2024-11-21T08:52:00.781240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672206313527:2417] 2024-11-21T08:52:00.781251Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652672206313525:2417][/dc-1] Set up state: owner# [1:7439652672206313102:2124], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:00.781285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672206313528:2417] 2024-11-21T08:52:00.781291Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652672206313525:2417][/dc-1] Path was already updated: owner# [1:7439652672206313102:2124], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:00.781299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313529:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652672206313526:2417], cookie# 1 2024-11-21T08:52:00.781302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313530:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652672206313527:2417], cookie# 1 2024-11-21T08:52:00.781306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313531:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652672206313528:2417], cookie# 1 2024-11-21T08:52:00.781313Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:52:00.781322Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312786:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652672206313529:2417] 2024-11-21T08:52:00.781326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312786:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652672206313529:2417], cookie# 1 2024-11-21T08:52:00.781331Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312789:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652672206313530:2417] 2024-11-21T08:52:00.781333Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312789:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652672206313530:2417], cookie# 1 2024-11-21T08:52:00.781336Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312792:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652672206313531:2417] 2024-11-21T08:52:00.781339Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672206312792:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652672206313531:2417], cookie# 1 2024-11-21T08:52:00.784268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313529:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672206312786:2049], cookie# 1 2024-11-21T08:52:00.784292Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313530:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672206312789:2052], cookie# 1 2024-11-21T08:52:00.784296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652672206313531:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672206312792:2055], cookie# 1 2024-11-21T08:52:00.784304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672206313526:2417], cookie# 1 2024-11-21T08:52:00.784311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:00.784315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672206313527:2417], cookie# 1 2024-11-21T08:52:00.784320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:00.784325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672206313528:2417], cookie# 1 2024-11-21T08:52:00.784327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652672206313525:2417][/dc-1] Unexpected sync response: sender# [1:7439652672206313528:2417], cookie# 1 TClient::Ls response: 2024-11-21T08:52:00.791121Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652672206313102:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:00.791221Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652672206313102:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Ver ... ns: true 2024-11-21T08:52:01.783190Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838422:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [4:7439652676087691487:2710] 2024-11-21T08:52:01.783191Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7439652679012838422:2054] Upsert description: path# /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers 2024-11-21T08:52:01.783196Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7439652679012838422:2054] Subscribe: subscriber# [4:7439652676087691487:2710], path# /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.783200Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838425:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [4:7439652676087691482:2709] 2024-11-21T08:52:01.783201Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7439652679012838425:2057] Upsert description: path# /dc-1/USER_0/.metadata/workload_manager/running_requests 2024-11-21T08:52:01.783204Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7439652679012838425:2057] Subscribe: subscriber# [4:7439652676087691482:2709], path# /dc-1/USER_0/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.783208Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838425:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [4:7439652676087691488:2710] 2024-11-21T08:52:01.783209Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7439652679012838425:2057] Upsert description: path# /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers 2024-11-21T08:52:01.783211Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7439652679012838425:2057] Subscribe: subscriber# [4:7439652676087691488:2710], path# /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.783222Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838419:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691474:2708] 2024-11-21T08:52:01.783224Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838422:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691475:2708] 2024-11-21T08:52:01.783227Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838425:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691476:2708] 2024-11-21T08:52:01.783367Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838419:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691480:2709] 2024-11-21T08:52:01.783372Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838419:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691486:2710] 2024-11-21T08:52:01.783374Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838422:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691481:2709] 2024-11-21T08:52:01.783377Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838422:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691487:2710] 2024-11-21T08:52:01.783379Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838425:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691482:2709] 2024-11-21T08:52:01.783381Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439652679012838425:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439652676087691488:2710] 2024-11-21T08:52:01.783265Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7439652676087691469:2709][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7439652676087690425:2102], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.783269Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439652676087691486:2710][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439652679012838419:2051] 2024-11-21T08:52:01.783273Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439652676087691487:2710][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439652679012838422:2054] 2024-11-21T08:52:01.783276Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439652676087691488:2710][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439652679012838425:2057] 2024-11-21T08:52:01.783280Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439652676087691470:2710][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7439652676087691483:2710] 2024-11-21T08:52:01.783282Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439652676087691470:2710][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7439652676087691484:2710] 2024-11-21T08:52:01.783285Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7439652676087691470:2710][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [4:7439652676087690425:2102], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.783287Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439652676087691470:2710][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7439652676087691485:2710] 2024-11-21T08:52:01.783290Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7439652676087691470:2710][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7439652676087690425:2102], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.783300Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7439652676087690425:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T08:52:01.783304Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7439652676087690425:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7439652676087691469:2709] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:01.783310Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439652676087690425:2102], cacheItem# { Subscriber: { Subscriber: [4:7439652676087691469:2709] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:01.783314Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7439652676087690425:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2024-11-21T08:52:01.783318Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7439652676087690425:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7439652676087691470:2710] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:01.783325Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439652676087690425:2102], cacheItem# { Subscriber: { Subscriber: [4:7439652676087691470:2710] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:01.783331Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439652676087691489:2711], recipient# [4:7439652676087691464:2341], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.783341Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439652676087691490:2712], recipient# [4:7439652676087691448:2335], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2024-11-21T08:52:01.355905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652679113459961:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:01.355954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:01.372918Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652677972066894:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00432a/r3tmp/tmpGHnCZ2/pdisk_1.dat 2024-11-21T08:52:01.397116Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:01.426290Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2822 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:01.447803Z node 1 :TX_PROXY DEBUG: actor# [1:7439652679113460045:2138] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:01.447826Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652679113460420:2395] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:01.447867Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652679113460067:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.447896Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652679113460067:2151], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:01.447947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:01.448370Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459713:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652679113460425:2396] 2024-11-21T08:52:01.448374Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459716:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652679113460426:2396] 2024-11-21T08:52:01.448398Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652679113459716:2055] Subscribe: subscriber# [1:7439652679113460426:2396], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.448398Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652679113459713:2052] Subscribe: subscriber# [1:7439652679113460425:2396], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.448413Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459719:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652679113460427:2396] 2024-11-21T08:52:01.448417Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652679113459719:2058] Subscribe: subscriber# [1:7439652679113460427:2396], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.448419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460425:2396][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652679113459713:2052] 2024-11-21T08:52:01.448424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460426:2396][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652679113459716:2055] 2024-11-21T08:52:01.448425Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459713:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652679113460425:2396] 2024-11-21T08:52:01.448428Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460427:2396][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652679113459719:2058] 2024-11-21T08:52:01.448429Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459716:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652679113460426:2396] 2024-11-21T08:52:01.448432Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459719:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652679113460427:2396] 2024-11-21T08:52:01.448434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652679113460422:2396] 2024-11-21T08:52:01.448441Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652679113460423:2396] 2024-11-21T08:52:01.448452Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652679113460421:2396][/dc-1] Set up state: owner# [1:7439652679113460067:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.448491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652679113460424:2396] 2024-11-21T08:52:01.448504Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652679113460421:2396][/dc-1] Path was already updated: owner# [1:7439652679113460067:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.448512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460425:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652679113460422:2396], cookie# 1 2024-11-21T08:52:01.448516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460426:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652679113460423:2396], cookie# 1 2024-11-21T08:52:01.448519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460427:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652679113460424:2396], cookie# 1 2024-11-21T08:52:01.448524Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459713:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652679113460425:2396], cookie# 1 2024-11-21T08:52:01.448529Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459716:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652679113460426:2396], cookie# 1 2024-11-21T08:52:01.448533Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652679113459719:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652679113460427:2396], cookie# 1 2024-11-21T08:52:01.448539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460425:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652679113459713:2052], cookie# 1 2024-11-21T08:52:01.448542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460426:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652679113459716:2055], cookie# 1 2024-11-21T08:52:01.448545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652679113460427:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652679113459719:2058], cookie# 1 2024-11-21T08:52:01.448552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652679113460422:2396], cookie# 1 2024-11-21T08:52:01.448562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:01.448567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652679113460423:2396], cookie# 1 2024-11-21T08:52:01.448577Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:01.448581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652679113460424:2396], cookie# 1 2024-11-21T08:52:01.448583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652679113460421:2396][/dc-1] Unexpected sync response: sender# [1:7439652679113460424:2396], cookie# 1 2024-11-21T08:52:01.456403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:01.456432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:01.457255Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652679113460067:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:01.457340Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652679113460067:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNo ... known Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.753165Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652677972067006:2109], recipient# [2:7439652677972066997:2106], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.753172Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652677972066981:2103], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.753182Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652677972067007:2110], recipient# [2:7439652677972066994:2275], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.753187Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652677972067008:2111], recipient# [2:7439652677972066995:2276], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.753503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652677972066994:2275], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:01.753537Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:01.831097Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652677972066981:2103], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.831159Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652677972067009:2112], recipient# [2:7439652677972066994:2275], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.831283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652677972066994:2275], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:01.908786Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652677972066981:2103], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.908867Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652677972067010:2113], recipient# [2:7439652677972066994:2275], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.908939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652677972066994:2275], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:02.007813Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652677972066981:2103], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.007871Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652682267034307:2114], recipient# [2:7439652677972066994:2275], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.007947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652677972066994:2275], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:02.100060Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652677972066981:2103], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.100135Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652682267034309:2116], recipient# [2:7439652677972066994:2275], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.100202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652677972066994:2275], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:02.175609Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652677972066981:2103], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.175670Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652682267034310:2117], recipient# [2:7439652677972066994:2275], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.175744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439652677972066994:2275], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DeletePrepared-Volatile [GOOD] Test command err: 2024-11-21T08:51:52.959326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:52.959702Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:52.959718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040b0/r3tmp/tmpv9ZYFg/pdisk_1.dat 2024-11-21T08:51:53.062330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.079518Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:53.121962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:53.121997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:53.132557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:53.237744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.252007Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:53.252609Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:53.252720Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:51:53.252765Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:53.260967Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:53.261142Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:53.261168Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:53.261309Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:53.261317Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:53.261323Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:53.261366Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:53.264555Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:53.264640Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:53.264667Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:51:53.264672Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:53.264676Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:53.264681Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:53.264825Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.264832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.264964Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:53.264984Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:53.264997Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.265003Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.265009Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:51:53.265016Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:53.265022Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:53.265029Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:53.265034Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:53.265038Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:53.265043Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:53.265048Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:53.265069Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:51:53.265073Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:53.265093Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:53.265137Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:53.265147Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:53.265164Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:53.265172Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:53.265176Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:53.265181Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:53.265184Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:53.265226Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:53.265231Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:53.265234Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:53.265237Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:53.265247Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:53.265250Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:53.265253Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:53.265256Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:53.265261Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:53.265526Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:51:53.265535Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:53.276668Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:53.276696Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:53.276704Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:53.276715Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:51:53.276730Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:53.480524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.480556Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.480566Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:51:53.480587Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:51:53.480592Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:51:53.480625Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:53.480637Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:51:53.480643Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:51:53.480649Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:51:53.481511Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:53.481537Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:53.481717Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.481724Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.481732Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:53.481742Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:53.481747Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:53.481757Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... to execute [1500:101] at 72075186224037888 on unit LoadTxDetails 2024-11-21T08:52:02.101805Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 got data writeOp from cache 1500:101 2024-11-21T08:52:02.101809Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T08:52:02.101813Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit LoadWriteDetails 2024-11-21T08:52:02.101816Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:02.101821Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:52:02.101845Z node 7 :TX_DATASHARD TRACE: Operation [1500:101] is the new logically complete end at 72075186224037888 2024-11-21T08:52:02.101849Z node 7 :TX_DATASHARD TRACE: Operation [1500:101] is the new logically incomplete end at 72075186224037888 2024-11-21T08:52:02.101853Z node 7 :TX_DATASHARD TRACE: Activated operation [1500:101] at 72075186224037888 2024-11-21T08:52:02.101860Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T08:52:02.101863Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:02.101868Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit BuildWriteOutRS 2024-11-21T08:52:02.101874Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit BuildWriteOutRS 2024-11-21T08:52:02.101888Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T08:52:02.101891Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit BuildWriteOutRS 2024-11-21T08:52:02.101895Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2024-11-21T08:52:02.101898Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit StoreAndSendWriteOutRS 2024-11-21T08:52:02.101903Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T08:52:02.101907Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2024-11-21T08:52:02.101911Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit PrepareWriteTxInRS 2024-11-21T08:52:02.101914Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit PrepareWriteTxInRS 2024-11-21T08:52:02.101921Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T08:52:02.101924Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit PrepareWriteTxInRS 2024-11-21T08:52:02.101927Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T08:52:02.101932Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T08:52:02.101936Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T08:52:02.101939Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T08:52:02.101942Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit ExecuteWrite 2024-11-21T08:52:02.101946Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit ExecuteWrite 2024-11-21T08:52:02.101952Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [1500:101] at 72075186224037888 2024-11-21T08:52:02.101991Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [1500:101] at 72075186224037888, row count=1 2024-11-21T08:52:02.102004Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:52:02.102017Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:52:02.102021Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit ExecuteWrite 2024-11-21T08:52:02.102024Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit CompleteWrite 2024-11-21T08:52:02.102029Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit CompleteWrite 2024-11-21T08:52:02.102097Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is DelayComplete 2024-11-21T08:52:02.102101Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit CompleteWrite 2024-11-21T08:52:02.102105Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:52:02.102109Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:52:02.102114Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T08:52:02.102118Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:52:02.102122Z node 7 :TX_DATASHARD TRACE: Execution plan for [1500:101] at 72075186224037888 has finished 2024-11-21T08:52:02.102128Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:02.102131Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:52:02.102135Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:52:02.102138Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:52:02.114697Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 101} 2024-11-21T08:52:02.114731Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:52:02.114753Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:52:02.114764Z node 7 :TX_DATASHARD TRACE: Complete execution for [1500:101] at 72075186224037888 on unit CompleteWrite 2024-11-21T08:52:02.114792Z node 7 :TX_DATASHARD DEBUG: Complete write [1500 : 101] from 72075186224037888 at tablet 72075186224037888 send result to client [7:557:2484] 2024-11-21T08:52:02.114806Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:02.115257Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:738:2610], Recipient [7:631:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:02.115270Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:02.115279Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [7:737:2609], serverId# [7:738:2610], sessionId# [0:0:0] 2024-11-21T08:52:02.115322Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:736:2608], Recipient [7:631:2536]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T08:52:02.116230Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:741:2613], Recipient [7:631:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:02.116245Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:02.116253Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [7:740:2612], serverId# [7:741:2613], sessionId# [0:0:0] 2024-11-21T08:52:02.116313Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:739:2611], Recipient [7:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T08:52:02.116343Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:52:02.116357Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/101 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2024-11-21T08:52:02.116365Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2024-11-21T08:52:02.116379Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2024-11-21T08:52:02.116404Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:52:02.116411Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:52:02.116417Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:02.116423Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:52:02.116441Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2024-11-21T08:52:02.116451Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:52:02.116456Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:02.116460Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:52:02.116464Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:52:02.116479Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T08:52:02.116549Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:739:2611], 1000} after executionsCount# 1 2024-11-21T08:52:02.116558Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:739:2611], 1000} sends rowCount# 2, bytes# 48, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551567, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:02.116577Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:739:2611], 1000} finished in read 2024-11-21T08:52:02.116589Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:52:02.116593Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:52:02.116597Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:52:02.116601Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:52:02.116613Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T08:52:02.116616Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:52:02.116620Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2024-11-21T08:52:02.116626Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:52:02.116648Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2024-11-21T08:52:00.563469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652674159835977:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:00.563520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00435c/r3tmp/tmp5Xvqxs/pdisk_1.dat 2024-11-21T08:52:00.629366Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28856 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:00.654138Z node 1 :TX_PROXY DEBUG: actor# [1:7439652674159836045:2136] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:00.654160Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652674159836420:2386] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:00.654194Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652674159836147:2187], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:00.654202Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652674159836147:2187], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:00.654248Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:00.654668Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835727:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652674159836425:2387] 2024-11-21T08:52:00.654691Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674159835727:2051] Subscribe: subscriber# [1:7439652674159836425:2387], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.654705Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835730:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652674159836426:2387] 2024-11-21T08:52:00.654709Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674159835730:2054] Subscribe: subscriber# [1:7439652674159836426:2387], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.654722Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835733:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652674159836427:2387] 2024-11-21T08:52:00.654731Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674159835733:2057] Subscribe: subscriber# [1:7439652674159836427:2387], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.654741Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836425:2387][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674159835727:2051] 2024-11-21T08:52:00.654752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836426:2387][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674159835730:2054] 2024-11-21T08:52:00.654756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836427:2387][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674159835733:2057] 2024-11-21T08:52:00.654763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674159836422:2387] 2024-11-21T08:52:00.654773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674159836423:2387] 2024-11-21T08:52:00.654783Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652674159836421:2387][/dc-1] Set up state: owner# [1:7439652674159836147:2187], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:00.654814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674159836424:2387] 2024-11-21T08:52:00.654826Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652674159836421:2387][/dc-1] Path was already updated: owner# [1:7439652674159836147:2187], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:00.654832Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836425:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674159836422:2387], cookie# 1 2024-11-21T08:52:00.654840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836426:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674159836423:2387], cookie# 1 2024-11-21T08:52:00.654843Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836427:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674159836424:2387], cookie# 1 2024-11-21T08:52:00.654847Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835727:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652674159836425:2387] 2024-11-21T08:52:00.654855Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835727:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674159836425:2387], cookie# 1 2024-11-21T08:52:00.654859Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835730:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652674159836426:2387] 2024-11-21T08:52:00.654862Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835730:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674159836426:2387], cookie# 1 2024-11-21T08:52:00.654870Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835733:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652674159836427:2387] 2024-11-21T08:52:00.654873Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835733:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674159836427:2387], cookie# 1 2024-11-21T08:52:00.659828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836425:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674159835727:2051], cookie# 1 2024-11-21T08:52:00.659849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836426:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674159835730:2054], cookie# 1 2024-11-21T08:52:00.659854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674159836427:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674159835733:2057], cookie# 1 2024-11-21T08:52:00.659864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674159836422:2387], cookie# 1 2024-11-21T08:52:00.659872Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:00.659877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674159836423:2387], cookie# 1 2024-11-21T08:52:00.659881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:00.659887Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674159836424:2387], cookie# 1 2024-11-21T08:52:00.659890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674159836421:2387][/dc-1] Unexpected sync response: sender# [1:7439652674159836424:2387], cookie# 1 2024-11-21T08:52:00.671808Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652674159836147:2187], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:00.671893Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652674159836147:2187], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [1:7439652678454804321:2835] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:01.784286Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652674159836147:2187], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 } 2024-11-21T08:52:01.784295Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652674159836147:2187], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439652678454804321:2835] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732179121800 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2024-11-21T08:52:01.784312Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439652674159836147:2187], cacheItem# { Subscriber: { Subscriber: [1:7439652678454804321:2835] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732179121800 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T08:52:01.784510Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439652678454804328:2836], recipient# [1:7439652678454804320:2834], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:52:01.784520Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652678454804320:2834] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:52:01.784536Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652678454804320:2834] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2024-11-21T08:52:01.784819Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652678454804320:2834] Handle TEvDescribeSchemeResult Forward to# [1:7439652678454804319:2833] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1732179121800 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1732179121800 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T08:52:01.795033Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835727:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652677169221621:2107] 2024-11-21T08:52:01.795033Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T08:52:01.795060Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674159835727:2051] Unsubscribe: subscriber# [3:7439652677169221621:2107], path# /dc-1/USER_0 2024-11-21T08:52:01.795070Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835730:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652677169221622:2107] 2024-11-21T08:52:01.795074Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674159835730:2054] Unsubscribe: subscriber# [3:7439652677169221622:2107], path# /dc-1/USER_0 2024-11-21T08:52:01.795080Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674159835733:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652677169221623:2107] 2024-11-21T08:52:01.795084Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674159835733:2057] Unsubscribe: subscriber# [3:7439652677169221623:2107], path# /dc-1/USER_0 2024-11-21T08:52:01.795274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:52:01.911253Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652674159836147:2187], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.911331Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439652674159836147:2187], cacheItem# { Subscriber: { Subscriber: [1:7439652674159836767:2641] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:01.911349Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439652678454804333:2840], recipient# [1:7439652678454804332:2286], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> MultiGet::SequentialGet [GOOD] >> ProxyEncryption::CorrectlyFailOnNoKeys >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> ProxyEncryption::CorrectlyFailOnNoKeys [GOOD] >> ScrubFast::SingleBlob >> DataShardWrite::WriteImmediateBadRequest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2024-11-21T08:52:00.772458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652674998398245:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00433a/r3tmp/tmpzoqqH5/pdisk_1.dat 2024-11-21T08:52:00.846500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:00.872112Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:00.877516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:00.877543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:00.880645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4422 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:00.918953Z node 1 :TX_PROXY DEBUG: actor# [1:7439652674998398109:2128] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:00.918975Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652674998398724:2412] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:00.919007Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652674998398329:2142], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:00.919013Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652674998398329:2142], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:00.919054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:00.919424Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397993:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652674998398729:2413] 2024-11-21T08:52:00.919437Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397993:2050] Subscribe: subscriber# [1:7439652674998398729:2413], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.919448Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397996:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652674998398730:2413] 2024-11-21T08:52:00.919451Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397996:2053] Subscribe: subscriber# [1:7439652674998398730:2413], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.919457Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397999:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652674998398731:2413] 2024-11-21T08:52:00.919460Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397999:2056] Subscribe: subscriber# [1:7439652674998398731:2413], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.919467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398729:2413][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674998397993:2050] 2024-11-21T08:52:00.919471Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398730:2413][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674998397996:2053] 2024-11-21T08:52:00.919474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398731:2413][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674998397999:2056] 2024-11-21T08:52:00.919479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674998398726:2413] 2024-11-21T08:52:00.919483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674998398727:2413] 2024-11-21T08:52:00.919490Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652674998398725:2413][/dc-1] Set up state: owner# [1:7439652674998398329:2142], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:00.919520Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652674998398728:2413] 2024-11-21T08:52:00.919526Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652674998398725:2413][/dc-1] Path was already updated: owner# [1:7439652674998398329:2142], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:00.919532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398729:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674998398726:2413], cookie# 1 2024-11-21T08:52:00.919534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398730:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674998398727:2413], cookie# 1 2024-11-21T08:52:00.919537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398731:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674998398728:2413], cookie# 1 2024-11-21T08:52:00.924254Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397993:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652674998398729:2413] 2024-11-21T08:52:00.924279Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397993:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674998398729:2413], cookie# 1 2024-11-21T08:52:00.924288Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397996:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652674998398730:2413] 2024-11-21T08:52:00.924293Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397996:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674998398730:2413], cookie# 1 2024-11-21T08:52:00.924297Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397999:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652674998398731:2413] 2024-11-21T08:52:00.924301Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397999:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652674998398731:2413], cookie# 1 2024-11-21T08:52:00.924309Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398729:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674998397993:2050], cookie# 1 2024-11-21T08:52:00.924312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398730:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674998397996:2053], cookie# 1 2024-11-21T08:52:00.924315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652674998398731:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674998397999:2056], cookie# 1 2024-11-21T08:52:00.924322Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674998398726:2413], cookie# 1 2024-11-21T08:52:00.924328Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:00.924331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674998398727:2413], cookie# 1 2024-11-21T08:52:00.924336Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:00.924341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652674998398728:2413], cookie# 1 2024-11-21T08:52:00.924343Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652674998398725:2413][/dc-1] Unexpected sync response: sender# [1:7439652674998398728:2413], cookie# 1 2024-11-21T08:52:00.929254Z node 1 :TX_PROXY DEBUG: actor# [1:7439652674998398109:2128] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:52:00.931323Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:52:00.939742Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652674998398329:2142], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:00.939838Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652674998398329:2142], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 Paren ... 715668 SEND EvProposeTransaction to# 72075186224037888 Coordinator marker# P7 2024-11-21T08:52:02.344049Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269091328, Sender [2:7439652676788211468:2271], Recipient [1:7439652683588334200:3071] 2024-11-21T08:52:02.344056Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2024-11-21T08:52:02.344064Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652683588334200:3071] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P11 Status# 16 2024-11-21T08:52:02.353853Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269091328, Sender [2:7439652676788211468:2271], Recipient [1:7439652683588334200:3071] 2024-11-21T08:52:02.353867Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2024-11-21T08:52:02.353875Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652683588334200:3071] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P10 Status# 17 2024-11-21T08:52:02.356691Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [2:7439652681083179147:2306], Recipient [1:7439652683588334200:3071] 2024-11-21T08:52:02.356702Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T08:52:02.356720Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652683588334200:3071] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037894 marker# P12 2024-11-21T08:52:02.356734Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [2:7439652681083178965:2292], Recipient [1:7439652683588334200:3071] 2024-11-21T08:52:02.356735Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T08:52:02.356738Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652683588334200:3071] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2024-11-21T08:52:02.356847Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652683588334200:3071] txid# 281474976715668 MergeResult ExecComplete TDataReq marker# P17 2024-11-21T08:52:02.356880Z node 1 :TX_PROXY INFO: Actor# [1:7439652683588334200:3071] txid# 281474976715668 RESPONSE Status# ExecComplete prepare time: 0.003784s execute time: 0.013191s total time: 0.016975s marker# P13 2024-11-21T08:52:02.369450Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397993:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439652676788211420:2099] 2024-11-21T08:52:02.369472Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397993:2050] Unsubscribe: subscriber# [2:7439652676788211420:2099], path# /dc-1/USER_0 2024-11-21T08:52:02.369480Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397996:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439652676788211421:2099] 2024-11-21T08:52:02.369485Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397996:2053] Unsubscribe: subscriber# [2:7439652676788211421:2099], path# /dc-1/USER_0 2024-11-21T08:52:02.369490Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397999:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439652676788211422:2099] 2024-11-21T08:52:02.369494Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397999:2056] Unsubscribe: subscriber# [2:7439652676788211422:2099], path# /dc-1/USER_0 2024-11-21T08:52:02.369545Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2024-11-21T08:52:02.369813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:52:02.531368Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652676788211423:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.531402Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [2:7439652676788211423:2100], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2024-11-21T08:52:02.531470Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652681083179374:2511][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:02.531605Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397993:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7439652681083179378:2511] 2024-11-21T08:52:02.531608Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397996:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7439652681083179379:2511] 2024-11-21T08:52:02.531613Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397996:2053] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2024-11-21T08:52:02.531619Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397993:2050] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2024-11-21T08:52:02.531630Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397993:2050] Subscribe: subscriber# [2:7439652681083179378:2511], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.531630Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397996:2053] Subscribe: subscriber# [2:7439652681083179379:2511], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.531643Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397999:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7439652681083179380:2511] 2024-11-21T08:52:02.531644Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397999:2056] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2024-11-21T08:52:02.531647Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652674998397999:2056] Subscribe: subscriber# [2:7439652681083179380:2511], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.531717Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439652681083179378:2511][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7439652674998397993:2050] 2024-11-21T08:52:02.531738Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439652681083179379:2511][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7439652674998397996:2053] 2024-11-21T08:52:02.531749Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439652681083179380:2511][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7439652674998397999:2056] 2024-11-21T08:52:02.531757Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652681083179374:2511][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [2:7439652681083179375:2511] 2024-11-21T08:52:02.531771Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397993:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652681083179378:2511] 2024-11-21T08:52:02.531782Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652681083179374:2511][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [2:7439652681083179376:2511] 2024-11-21T08:52:02.531789Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7439652681083179374:2511][/dc-1/USER_0/.metadata/initialization/migrations] Set up state: owner# [2:7439652676788211423:2100], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.531772Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397996:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652681083179379:2511] 2024-11-21T08:52:02.531794Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439652681083179374:2511][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [2:7439652681083179377:2511] 2024-11-21T08:52:02.531779Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652674998397999:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439652681083179380:2511] 2024-11-21T08:52:02.531800Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439652681083179374:2511][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [2:7439652676788211423:2100], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.531816Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439652676788211423:2100], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 1 } 2024-11-21T08:52:02.531830Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439652676788211423:2100], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7439652681083179374:2511] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:02.531851Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439652676788211423:2100], cacheItem# { Subscriber: { Subscriber: [2:7439652681083179374:2511] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:02.531873Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652681083179381:2512], recipient# [2:7439652681083179373:2318], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2024-11-21T08:52:00.582037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:00.582588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:00.582616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00450e/r3tmp/tmpxtE5Cm/pdisk_1.dat 2024-11-21T08:52:00.683213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:00.703461Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:00.748618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:00.748660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:00.759280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:00.868828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:01.118276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:705:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:01.118314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:714:2593], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:01.118326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:01.119278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:01.324068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:719:2596], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:01.428952Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yqmyxcy1jps3fp3cdqsv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTliYWUzMy0xMzk3NWFjYS0yM2FkNzg2Mi1mMDkyZTkyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.441321Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yqn8q45mxpv3n9bvsvd2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc0MzUwZTMtNGViZjYxZTEtMzI2ZTkxNy03YWIwOTdiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.455439Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yqn929y8h63rhhjzappaw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWJkYjM2Y2QtOGQ3NWMzYzEtMTQ1ZDdiMzctNWY5MTE3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.469610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yqn9hcd6zad7qn4qdvr0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM2NzYyZTctNzFmY2UyN2YtYmIyNzY2YTgtNWNmNjA4NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.483116Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yqn9z2fa1wed74d37k47c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QzZGEyZWMtMWViOTRiNDEtZjMwMWYyNzUtYjQ0NjBmMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.496431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6yqnac2jya1aaze8ay482g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA1Njg1N2YtZjZiMDFiNjAtNmQ2YTAzNC1hYmZiYTI3ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.509550Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yqnatc7p0q9yg70r6qkns, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWUzMDM4NzctZjY4OTY2YjktMTY3OTM1ZjItMzU4YWFmZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.523302Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqnb773ysv1p2fpj24kpa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M4ZjUwNDEtMzBhMzFkMmMtYTQ0ZWEzZjMtY2FlM2E3MjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.536757Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6yqnbm00m5s3c7bvhsax4c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmVmYjk0ODMtYmRlZjY0YjMtMzNiM2Q2YjAtMTI2NDdjOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.549745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6yqnc2dfq9khab7yh2eat0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzMxMTVlZmYtMThiZGVjNjUtMTFiNDkwYzQtYWViNzQwOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.562979Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd6yqncfffakccnb3397a45h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgzZDA2NjYtNDYzNjkxZTEtYjBjYzRlMjktOTRhOWUwMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.576270Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd6yqncw7s2hddhqh2tp22bm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY1MjBiZmYtN2UyMDk2Mi1hNDNkZjEzMy1jNWJiZWNk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.589294Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd6yqnd92d5snas2ab6mxsad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRlNTk5YWYtMzQ5MzE0MmItZjg0ODAxMzYtYWEzNjRlMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.602406Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd6yqndp716q3sn39q6phgsx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmMyZTE3Y2QtNTVjZDZmNGMtNjdiZjFiMzgtNDNlODcyNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.616536Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd6yqne45b017nt9qhh4jvak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU4OTI0NzctODZlYWI2YWMtYWFiMGVkY2MtODU2NmFkY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.630116Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd6yqnej970b11b8cv8bwmgd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTczZjcwNjktMjY5MjM3YWQtOTNkOTY1ZmUtN2U3MDY4NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.643349Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd6yqnez2rs5hqc9vm1vfnw8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk3ZWViZTYtYzI1OGU2YzQtODgxNzA0ODEtNGNmN2NhMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.656903Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd6yqnfdag3k16f5d39cwm5p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM1YTMwYTgtZjlmYjBlYzYtZTFjNzE0YmEtYzMzNmYwOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.670471Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd6yqnfted93tqh03q9xva4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNkNjQ0YWEtZWQ0NWM4OWEtZjJjY2VkZWItNTU1Njg3YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.686626Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd6yqng8a0yg58apcxps3y6j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQ1MTVmODAtMWYyZTIzNzItMWYxNjA0YzYtZmUzYWFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.700393Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd6yqngrfk82y4m88r1spvpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRkMDdiMjQtZGM2OTNmNTAtNTg2YTlkM2QtNDViNjMwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.714315Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd6yqnh672wk0e5wsfejcb7r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYxZDI3ZWYtOWJmN2I5YWEtMjVhMGFlMTItOGFhODk0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.727993Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd6yqnhkd33pwgq107a4hw3k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxZTAyMmUtYTI3ZjZkYmQtYmNmOWQ5ZWEtZDY4ZThjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.744815Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd6yqnj2c6j3z2nxhc4nkvc7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc4OGYxYTgtOGFkOWJjOGItNDFiZWZkOGEtYjk2ZDYwZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.760677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd6yqnjjbv2n224a3dy8tx4d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmZiMWE2MjMtMzRlZGEzNTEtZDA5OGZjODYtZjljNzQyMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.776815Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd6yqnk2d69rxcserbnmcmg4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU1NTg5YWUtN2RlZGIxOGUtZjE4MGEzN2MtZDhlYzBlMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.791314Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jd6yqnkjcq4jnwb01qrkx08v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRkN2U0NmYtMTQ4OGY4ZjMtYmY5YmU3ZjYtNjc3ODk5OTI=, CurrentExecu ... 40Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jd6yqp3zbzmpw3y5pn8z9sw1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWEyMmY2NDAtNDA5ZGY4OTAtZGY2MDc3MWQtNjEwYTZkZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.323193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jd6yqp490qp6j02nf3gx3e4k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZmMTM3NjQtYzFkNjlhYzQtZDIzZWZhZDYtYTVmNTUwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.333319Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jd6yqp4m37s6bdmaqbbcrtmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQxYjM3YzItZGY5ZTFkOWMtYjI3NDBmMzktZTM2NTVhMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.343471Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jd6yqp4y8647ct44p6aefgw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQyMjZlOTgtN2U3ZGM4M2UtYjUxMWJkOWMtZmViMzJjNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.353561Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jd6yqp58b2ty5ncx4kv5hzch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc5ZWU2YjItYWIyMzQwNmYtOTE2OGE0NWMtMjNjMzVmZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.365466Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jd6yqp5j6rk807m39dga21rk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk5ZWUzOTEtNzIyNGI4MjMtYTlmNGIyYzUtZTg5MjM2ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.377461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jd6yqp5z5bd4p3eytaj3t1dh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThjZjkyOWUtZGI4N2JkNjUtZTI5ZjgwNmYtZmU1MmVjNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.388371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jd6yqp6af03cf9hfk0x8snrp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ3YjdkYzMtM2FlZGFhYTMtODQwODc4NjctY2QwZWY4Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.398108Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jd6yqp6ndxqceq16sybsggjk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA4ZWRhNDMtZWRkMDhkZmEtNjg4NGJiZWUtOTU5OGM4ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.410043Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jd6yqp6z4pv43q8ghcgts3yd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MThiY2Q2MDgtMmM0MTk2NjgtNGYxYjdkZDgtYmVjNzU5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.420400Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jd6yqp7b5278ccwp2k29jarj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRlYmNlMjEtZDc4OWY4YzktNzA4NTE0OTYtNjNlNGFhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.429999Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jd6yqp7n3jv7t97gc5fzmzf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRmZGQ4MzAtNWE5YTVkOGMtOTgwZGIwMGItYWMxZThhNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.439940Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jd6yqp7zevgj1ngpsfgk04d4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc2OWNmMTAtZDcxYmE0Yy1hZTM5NDYzMC01MWM4OWNkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.449683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jd6yqp896mcw1nswxsqsn35s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTlhNjAyMDMtNDM3MzliYjQtYmFjMWM4ZTQtMjcyNDA4ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.459993Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jd6yqp8jax28e76zwrxgyrrr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2YxMWYwMDMtM2FmYWM4ZmQtMzI5YjI2YTAtZjdlOTBlMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.470113Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jd6yqp8x08mmmgp9j04nb1w4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRkY2I2MzEtOTE4OTE2ZDctZjAzNGQ3ZWMtMzYxOGM2YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.480916Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jd6yqp979ae64gpy573qek32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZkM2NhNDItODU2NzhiZS1jZjEwZGQ4NS1lZGIzNWI0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.491296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jd6yqp9j2kx45f895chj8wm6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ViMzI2ZmItMTljOTBiMjgtNmM3NDE0ZWYtZDdhZWU3YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.502259Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jd6yqp9w7w4bhyy60ey1595e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQ4N2RjMzgtZjVlOWQxMy1hODdlZjg1NS0xOTQ3OTUzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.512625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jd6yqpa7cepfkygqspj7ngcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgwZDlkYTQtZTMyMTRlZDYtYmQyOTI2OS0zMWExOTU1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.523404Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jd6yqpajc6420mqfgz93d8p6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRmOGUwNGItZWZhYjI2My1kNDNmYjExMC05NDc5ODJhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.534040Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jd6yqpaw77pefs9pd3hy4c62, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmYwZTM3M2MtNDkzYTM1ODUtZmYyZWFkOWEtNThjYjFiNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.546391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jd6yqpb710jsmz37afh5ntaq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRjM2U3YWYtZWM5YTcwZDItYjUyZWY4OTQtMjgxMmNmMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.557726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jd6yqpbm2vsm4qfq3ckvc8mp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVkMGIwZmItZWU5NjIzNTYtMzc1NzQwODQtZWFmNWY2N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.570439Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jd6yqpbz7458jq4adz3nkkgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ0NzJlNTYtNWJlMWY0OGMtZDE5NjQwOTctOGIxMjE0MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.582436Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jd6yqpcc9z2n0z4msd9cjq50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjU0Y2QyM2YtZjEyMjc3NDMtNTc4ZDcyZC0xYTY3Y2E4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.594311Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jd6yqpcq7egshncz9spm2qw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM0MTIwZGMtZDM2YTNkNWMtYzA0Mzg3MTMtZmI0NWJjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.606333Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jd6yqpd30gp5x4hc7301sjpx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQyMGRmODEtZmJjNzEzMWEtNTQyZjMzMWYtY2U2ZGYzNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.617917Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jd6yqpdf1n4fe2m2aq98kc22, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGEwNjMzMjYtMmY4ZDJhZGUtNjgxNWRiNTctZWRmMDdhNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.630307Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jd6yqpdv11z6x49rkeag7bpf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNhYmZkODItYTExOTEyYWEtYzBlZjhiMTMtYzQ2Y2I0ZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.641865Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jd6yqpe71djwy936hq05vrnv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWMzNzExOGItYThjY2VmMjktNDFlNWU3OTgtOWE3ZjkwNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.654292Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jd6yqpek22qsdt6yyyd3m99e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY5MDg3YTEtZGYxZGFmNDQtYTY2MmE0YTQtYTg5ZjYzNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.666955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jd6yqpezac7b04sjnd6nk72q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI0NmJkMDMtYmFlNTNjNzItYmRiN2YzY2EtMTc4NTliMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.680045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jd6yqpfc66sq0c34cqtqgm30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM1YTg2MWYtODlhMjNjYjUtODU3MTljOTMtZWUxMmJmYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.692291Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jd6yqpfsc8nhzkj8z35m0twr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWE4MmFkYmQtNDRlMDBhOGItMWRlYTAxMTQtYmYwZjQ0MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.712147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jd6yqpg88bxr2znkmq42kkv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTMzMzk5ZWEtMTI3MjUwZTEtMmQ3NWM5NzgtYWMwMTY0MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2024-11-21T08:52:02.281678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652681908874605:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:02.281761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042b6/r3tmp/tmpMrPR00/pdisk_1.dat 2024-11-21T08:52:02.356593Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:02.380766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:02.380793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:02.382205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15142 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:02.392510Z node 1 :TX_PROXY DEBUG: actor# [1:7439652681908874681:2137] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:02.392545Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652681908875048:2382] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:02.392587Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652681908874705:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.392598Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652681908874705:2150], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:02.392658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:02.393100Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874352:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652681908875053:2383] 2024-11-21T08:52:02.393126Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874355:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652681908875054:2383] 2024-11-21T08:52:02.393139Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652681908874352:2051] Subscribe: subscriber# [1:7439652681908875053:2383], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.393150Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652681908874355:2054] Subscribe: subscriber# [1:7439652681908875054:2383], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.393156Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874358:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652681908875055:2383] 2024-11-21T08:52:02.393160Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652681908874358:2057] Subscribe: subscriber# [1:7439652681908875055:2383], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.393175Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875053:2383][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681908874352:2051] 2024-11-21T08:52:02.393188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875054:2383][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681908874355:2054] 2024-11-21T08:52:02.393190Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874352:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652681908875053:2383] 2024-11-21T08:52:02.393193Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875055:2383][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681908874358:2057] 2024-11-21T08:52:02.393195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874355:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652681908875054:2383] 2024-11-21T08:52:02.393199Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874358:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652681908875055:2383] 2024-11-21T08:52:02.393200Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681908875050:2383] 2024-11-21T08:52:02.393206Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681908875051:2383] 2024-11-21T08:52:02.393218Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652681908875049:2383][/dc-1] Set up state: owner# [1:7439652681908874705:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.393260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681908875052:2383] 2024-11-21T08:52:02.393275Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652681908875049:2383][/dc-1] Path was already updated: owner# [1:7439652681908874705:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.393283Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875053:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681908875050:2383], cookie# 1 2024-11-21T08:52:02.393289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875054:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681908875051:2383], cookie# 1 2024-11-21T08:52:02.393293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875055:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681908875052:2383], cookie# 1 2024-11-21T08:52:02.393305Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874352:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681908875053:2383], cookie# 1 2024-11-21T08:52:02.393310Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874355:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681908875054:2383], cookie# 1 2024-11-21T08:52:02.393315Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874358:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681908875055:2383], cookie# 1 2024-11-21T08:52:02.393321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875053:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681908874352:2051], cookie# 1 2024-11-21T08:52:02.393323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875054:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681908874355:2054], cookie# 1 2024-11-21T08:52:02.393332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875055:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681908874358:2057], cookie# 1 2024-11-21T08:52:02.393353Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681908875050:2383], cookie# 1 2024-11-21T08:52:02.393367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:02.393372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681908875051:2383], cookie# 1 2024-11-21T08:52:02.393379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:02.393401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681908875052:2383], cookie# 1 2024-11-21T08:52:02.393403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875049:2383][/dc-1] Unexpected sync response: sender# [1:7439652681908875052:2383], cookie# 1 2024-11-21T08:52:02.401384Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652681908874705:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:02.401465Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652681908874705:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.636992Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875652:2827][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439652681908874352:2051] 2024-11-21T08:52:02.636995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875653:2827][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439652681908874355:2054] 2024-11-21T08:52:02.636998Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681908875654:2827][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439652681908874358:2057] 2024-11-21T08:52:02.637001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875642:2827][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439652681908875649:2827] 2024-11-21T08:52:02.637004Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875642:2827][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439652681908875650:2827] 2024-11-21T08:52:02.637009Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652681908875642:2827][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7439652681908874705:2150], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.637011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681908875642:2827][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439652681908875651:2827] 2024-11-21T08:52:02.637016Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652681908875642:2827][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7439652681908874705:2150], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.637020Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874352:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439652681908875646:2826] 2024-11-21T08:52:02.637023Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874352:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439652681908875652:2827] 2024-11-21T08:52:02.637026Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874355:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439652681908875647:2826] 2024-11-21T08:52:02.637028Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874355:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439652681908875653:2827] 2024-11-21T08:52:02.637030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874358:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439652681908875648:2826] 2024-11-21T08:52:02.637031Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652681908874358:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439652681908875654:2827] 2024-11-21T08:52:02.637038Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652681908874705:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T08:52:02.637049Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652681908874705:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7439652681908875641:2826] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:02.637065Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439652681908874705:2150], cacheItem# { Subscriber: { Subscriber: [1:7439652681908875641:2826] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:02.637071Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652681908874705:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T08:52:02.637075Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652681908874705:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7439652681908875642:2827] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:02.637081Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439652681908874705:2150], cacheItem# { Subscriber: { Subscriber: [1:7439652681908875642:2827] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:02.637097Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439652681908875655:2828], recipient# [1:7439652681908875640:2290], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.804146Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439652680789493372:2211], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.804190Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439652680789493372:2211], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.804200Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439652680789493578:2222], recipient# [3:7439652680789493576:2510], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.804226Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439652680789493577:2221], recipient# [3:7439652680789493572:2508], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.882429Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439652680789493372:2211], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.882494Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439652680789493594:2223], recipient# [3:7439652680789493591:2517], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> KqpScanSpilling::SelfJoinQueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2024-11-21T08:52:01.681652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652678681387657:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004322/r3tmp/tmpsSW6RO/pdisk_1.dat 2024-11-21T08:52:01.725643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:01.742728Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32116 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:01.771625Z node 1 :TX_PROXY DEBUG: actor# [1:7439652678681387504:2131] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:01.771651Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652678681388131:2421] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:01.771681Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652678681387735:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.771690Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652678681387735:2148], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:01.771727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:01.772163Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387392:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652678681388136:2422] 2024-11-21T08:52:01.772179Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652678681387392:2051] Subscribe: subscriber# [1:7439652678681388136:2422], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.772193Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387395:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652678681388137:2422] 2024-11-21T08:52:01.772197Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652678681387395:2054] Subscribe: subscriber# [1:7439652678681388137:2422], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.772202Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387398:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652678681388138:2422] 2024-11-21T08:52:01.772224Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652678681387398:2057] Subscribe: subscriber# [1:7439652678681388138:2422], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.772234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388136:2422][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652678681387392:2051] 2024-11-21T08:52:01.772239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388137:2422][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652678681387395:2054] 2024-11-21T08:52:01.772243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388138:2422][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652678681387398:2057] 2024-11-21T08:52:01.772250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652678681388133:2422] 2024-11-21T08:52:01.772256Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652678681388134:2422] 2024-11-21T08:52:01.772265Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652678681388132:2422][/dc-1] Set up state: owner# [1:7439652678681387735:2148], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.772265Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387392:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652678681388136:2422] 2024-11-21T08:52:01.772273Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387398:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652678681388138:2422] 2024-11-21T08:52:01.772277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387395:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652678681388137:2422] 2024-11-21T08:52:01.772301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652678681388135:2422] 2024-11-21T08:52:01.772307Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652678681388132:2422][/dc-1] Path was already updated: owner# [1:7439652678681387735:2148], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.772314Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388136:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652678681388133:2422], cookie# 1 2024-11-21T08:52:01.772317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388137:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652678681388134:2422], cookie# 1 2024-11-21T08:52:01.772320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388138:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652678681388135:2422], cookie# 1 2024-11-21T08:52:01.772328Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387392:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652678681388136:2422], cookie# 1 2024-11-21T08:52:01.772339Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387395:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652678681388137:2422], cookie# 1 2024-11-21T08:52:01.772343Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387398:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652678681388138:2422], cookie# 1 2024-11-21T08:52:01.772349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388136:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652678681387392:2051], cookie# 1 2024-11-21T08:52:01.772353Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388137:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652678681387395:2054], cookie# 1 2024-11-21T08:52:01.772355Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652678681388138:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652678681387398:2057], cookie# 1 2024-11-21T08:52:01.772360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652678681388133:2422], cookie# 1 2024-11-21T08:52:01.772370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:01.772377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652678681388134:2422], cookie# 1 2024-11-21T08:52:01.772385Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:01.772389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652678681388135:2422], cookie# 1 2024-11-21T08:52:01.772392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652678681388132:2422][/dc-1] Unexpected sync response: sender# [1:7439652678681388135:2422], cookie# 1 2024-11-21T08:52:01.781670Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652678681387735:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:01.781746Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652678681387735:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... athId: 3] Strong: 1 } 2024-11-21T08:52:02.860102Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652678681387735:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/Solomon PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7439652682976355937:2799] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 7 TableKind: 0 Created: 1 CreateStep: 1732179122900 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [1:7439652682976355937:2799] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 7 TableKind: 0 Created: 1 CreateStep: 1732179122900 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2024-11-21T08:52:02.860105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T08:52:02.860110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T08:52:02.860111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T08:52:02.860112Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:52:02.860113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2024-11-21T08:52:02.860119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2024-11-21T08:52:02.860122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7439652682976355957:2286] 2024-11-21T08:52:02.860155Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387392:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7439652680092716328:2192] 2024-11-21T08:52:02.860169Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387395:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7439652680092716329:2192] 2024-11-21T08:52:02.860174Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652678681387398:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7439652680092716330:2192] 2024-11-21T08:52:02.860553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2024-11-21T08:52:02.860564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2024-11-21T08:52:02.860565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2024-11-21T08:52:02.860567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2024-11-21T08:52:02.860569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2024-11-21T08:52:02.860571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2024-11-21T08:52:02.860573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2024-11-21T08:52:02.860576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2024-11-21T08:52:02.860593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2024-11-21T08:52:02.860600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2024-11-21T08:52:02.860613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2024-11-21T08:52:02.861779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2024-11-21T08:52:02.861843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T08:52:02.861896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2024-11-21T08:52:02.861920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2024-11-21T08:52:02.861939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T08:52:02.861958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T08:52:02.861977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:02.861996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T08:52:02.862015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2024-11-21T08:52:02.862036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T08:52:02.862064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T08:52:02.862082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T08:52:02.862100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2024-11-21T08:52:02.862117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2024-11-21T08:52:02.862135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2024-11-21T08:52:02.862153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2024-11-21T08:52:02.862170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T08:52:02.862178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2024-11-21T08:52:02.862187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T08:52:02.862206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T08:52:02.862213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T08:52:02.862235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T08:52:02.863037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2024-11-21T08:52:02.863052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2024-11-21T08:52:02.863066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2024-11-21T08:52:02.863075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2024-11-21T08:52:02.863080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T08:52:02.863081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2024-11-21T08:52:02.863088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2024-11-21T08:52:02.863096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2024-11-21T08:52:02.863102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2024-11-21T08:52:02.863111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2024-11-21T08:52:02.863117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T08:52:02.863119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2024-11-21T08:52:02.863125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2024-11-21T08:52:02.863132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2024-11-21T08:52:02.863140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2024-11-21T08:52:02.863143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2024-11-21T08:52:02.863159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2024-11-21T08:52:02.863176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:52:02.863188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T08:52:02.863193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T08:52:02.863214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:52:02.864048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> KqpRm::NodesMembershipByExchanger >> KqpRm::SingleTask >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> KqpScanSpilling::SelfJoin >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> IndexBuildTestReboots::BaseCaseWithDataColumns >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2024-11-21T08:51:53.334915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:51:53.335534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:51:53.335578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00405d/r3tmp/tmp5yYGuO/pdisk_1.dat 2024-11-21T08:51:53.457418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.484468Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:53.529809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:53.529851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:53.540520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:51:53.647059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:51:53.662988Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:51:53.663218Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:51:53.663312Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:51:53.663366Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:53.673236Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:51:53.673442Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:53.673470Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:51:53.673634Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:51:53.673645Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:51:53.673652Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:51:53.673702Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:51:53.677273Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:51:53.677365Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:51:53.677415Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:51:53.677421Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:51:53.677425Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:51:53.677430Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:53.677598Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.677608Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.677756Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:51:53.677777Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:51:53.677792Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.677797Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.677803Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:51:53.677809Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:53.677816Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:51:53.677823Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:51:53.677828Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:51:53.677831Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:51:53.677836Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:51:53.677841Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:51:53.677865Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:51:53.677869Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:51:53.677893Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:51:53.677942Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:51:53.677951Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:51:53.677970Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:51:53.677977Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:51:53.677981Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:51:53.677985Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:51:53.677989Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:53.678038Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:51:53.678042Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:51:53.678045Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:51:53.678048Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:53.678060Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:51:53.678063Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:51:53.678066Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:51:53.678069Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:53.678074Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:51:53.678331Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:51:53.678341Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:51:53.689414Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:51:53.689447Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:51:53.689455Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:51:53.689470Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:51:53.689487Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:51:53.892931Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.892959Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:51:53.892968Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:51:53.892989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:51:53.892994Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:51:53.893020Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:51:53.893030Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:51:53.893035Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:51:53.893041Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:51:53.893854Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:51:53.893878Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:51:53.894018Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.894025Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:51:53.894032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:51:53.894040Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:51:53.894044Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:51:53.894053Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... xecuting on unit WaitForPlan 2024-11-21T08:52:03.646617Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:52:03.646670Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 515 RawX2: 30064773525 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:52:03.646678Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:03.646727Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:770:2631], Recipient [7:770:2631]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:03.646732Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:03.646739Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:52:03.646747Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:03.646752Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:52:03.646760Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2024-11-21T08:52:03.646765Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2024-11-21T08:52:03.646772Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T08:52:03.646776Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2024-11-21T08:52:03.646780Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2024-11-21T08:52:03.646784Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2024-11-21T08:52:03.646881Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2024-11-21T08:52:03.646905Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2024-11-21T08:52:03.646911Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2024-11-21T08:52:03.646927Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2024-11-21T08:52:03.646931Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T08:52:03.646938Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2024-11-21T08:52:03.646942Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:03.646945Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:52:03.646966Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2024-11-21T08:52:03.646970Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2024-11-21T08:52:03.646974Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2024-11-21T08:52:03.646980Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T08:52:03.646983Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:03.646987Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2024-11-21T08:52:03.646992Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2024-11-21T08:52:03.647004Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T08:52:03.647007Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2024-11-21T08:52:03.647012Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2024-11-21T08:52:03.647018Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2024-11-21T08:52:03.647024Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T08:52:03.647028Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2024-11-21T08:52:03.647032Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2024-11-21T08:52:03.647036Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2024-11-21T08:52:03.647041Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T08:52:03.647045Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2024-11-21T08:52:03.647049Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T08:52:03.647052Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T08:52:03.647056Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T08:52:03.647060Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T08:52:03.647063Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2024-11-21T08:52:03.647066Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2024-11-21T08:52:03.647072Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2024-11-21T08:52:03.647200Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2024-11-21T08:52:03.647212Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2024-11-21T08:52:03.647219Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2024-11-21T08:52:03.647223Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:52:03.647226Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:52:03.647231Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:52:03.647234Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:52:03.647328Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:52:03.647332Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2024-11-21T08:52:03.647336Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2024-11-21T08:52:03.647370Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2024-11-21T08:52:03.647378Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2024-11-21T08:52:03.647382Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2024-11-21T08:52:03.647391Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2024-11-21T08:52:03.647421Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2024-11-21T08:52:03.647432Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2024-11-21T08:52:03.647446Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:52:03.647458Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:52:03.647462Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2024-11-21T08:52:03.647466Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2024-11-21T08:52:03.647471Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2024-11-21T08:52:03.647519Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2024-11-21T08:52:03.647523Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2024-11-21T08:52:03.647527Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:52:03.647531Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:52:03.647537Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T08:52:03.647541Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:52:03.647545Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2024-11-21T08:52:03.647549Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:03.647552Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:52:03.647556Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:52:03.647559Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:52:03.647658Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 1234567890011} 2024-11-21T08:52:03.647665Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T08:52:03.647768Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:52:03.647774Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2024-11-21T08:52:03.647787Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:763:2625] 2024-11-21T08:52:03.647796Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2024-11-21T08:52:00.519782Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652675171384927:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:00.519870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:00.523465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652675501154553:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:00.523674Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004351/r3tmp/tmpzcQk4P/pdisk_1.dat 2024-11-21T08:52:00.585960Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21740 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:00.601649Z node 1 :TX_PROXY DEBUG: actor# [1:7439652675171385003:2138] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:00.601669Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652675171385375:2390] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:00.601710Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652675171385100:2191], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:00.601723Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652675171385100:2191], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:00.601781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:00.602179Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384673:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652675171385380:2391] 2024-11-21T08:52:00.602201Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652675171384673:2052] Subscribe: subscriber# [1:7439652675171385380:2391], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.602216Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384679:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652675171385382:2391] 2024-11-21T08:52:00.602219Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652675171384679:2058] Subscribe: subscriber# [1:7439652675171385382:2391], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.602229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385380:2391][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652675171384673:2052] 2024-11-21T08:52:00.602234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385382:2391][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652675171384679:2058] 2024-11-21T08:52:00.602239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652675171385377:2391] 2024-11-21T08:52:00.602246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652675171385379:2391] 2024-11-21T08:52:00.602257Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652675171385376:2391][/dc-1] Set up state: owner# [1:7439652675171385100:2191], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:00.602295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385380:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652675171385377:2391], cookie# 1 2024-11-21T08:52:00.602302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385381:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652675171385378:2391], cookie# 1 2024-11-21T08:52:00.602304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385382:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652675171385379:2391], cookie# 1 2024-11-21T08:52:00.602309Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384673:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652675171385380:2391] 2024-11-21T08:52:00.602312Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384673:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652675171385380:2391], cookie# 1 2024-11-21T08:52:00.602316Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384679:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652675171385382:2391] 2024-11-21T08:52:00.602319Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384679:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652675171385382:2391], cookie# 1 2024-11-21T08:52:00.602344Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384676:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652675171385381:2391] 2024-11-21T08:52:00.602365Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652675171384676:2055] Subscribe: subscriber# [1:7439652675171385381:2391], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:00.602377Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384676:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652675171385381:2391], cookie# 1 2024-11-21T08:52:00.602388Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385380:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652675171384673:2052], cookie# 1 2024-11-21T08:52:00.602390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385382:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652675171384679:2058], cookie# 1 2024-11-21T08:52:00.602395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385381:2391][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652675171384676:2055] 2024-11-21T08:52:00.602399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652675171385381:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652675171384676:2055], cookie# 1 2024-11-21T08:52:00.602404Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652675171385377:2391], cookie# 1 2024-11-21T08:52:00.602409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:00.602412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652675171385379:2391], cookie# 1 2024-11-21T08:52:00.602415Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:00.602421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652675171385378:2391] 2024-11-21T08:52:00.602442Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652675171385376:2391][/dc-1] Path was already updated: owner# [1:7439652675171385100:2191], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:00.602445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652675171385378:2391], cookie# 1 2024-11-21T08:52:00.602450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652675171385376:2391][/dc-1] Unexpected sync response: sender# [1:7439652675171385378:2391], cookie# 1 2024-11-21T08:52:00.602455Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652675171384676:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652675171385381:2391] 2024-11-21T08:52:00.610368Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652675171385100:2191], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:00.610448Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652675171385100:2191], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Domai ... N: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7439652684458378756:2511], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:03.470179Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439652686137798975:2180], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.470240Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439652686137798975:2180], cacheItem# { Subscriber: { Subscriber: [3:7439652686137799042:2212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:03.470249Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439652686137798975:2180], cacheItem# { Subscriber: { Subscriber: [3:7439652686137799043:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:03.470288Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439652686137799057:2215], recipient# [3:7439652686137799041:2511], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.470395Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439652686137799041:2511], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:03.525823Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652675501154789:2103], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.525874Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439652675501154789:2103], cacheItem# { Subscriber: { Subscriber: [2:7439652679796122137:2114] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:03.525898Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652688386056744:2119], recipient# [2:7439652688386056743:2281], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.564237Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439652686137798975:2180], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.564268Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439652686137798975:2180], cacheItem# { Subscriber: { Subscriber: [3:7439652686137799042:2212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:03.564275Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439652686137798975:2180], cacheItem# { Subscriber: { Subscriber: [3:7439652686137799043:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:03.564300Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439652686137799058:2216], recipient# [3:7439652686137799041:2511], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.564574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439652686137799041:2511], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:03.616949Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439652686137798975:2180], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.616988Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439652686137798975:2180], cacheItem# { Subscriber: { Subscriber: [3:7439652686137799042:2212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:03.616995Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439652686137798975:2180], cacheItem# { Subscriber: { Subscriber: [3:7439652686137799043:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:03.617027Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439652686137799059:2217], recipient# [3:7439652686137799041:2511], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.617090Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439652686137799041:2511], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> KqpRm::NotEnoughExecutionUnits ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2024-11-21T08:52:00.890130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652672316612980:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:00.890202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004331/r3tmp/tmpjHaFyF/pdisk_1.dat 2024-11-21T08:52:00.984263Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:01.040608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:01.040639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server 2024-11-21T08:52:01.046486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected localhost:20503 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:01.060446Z node 1 :TX_PROXY DEBUG: actor# [1:7439652672316613049:2132] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:01.060472Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652676611580779:2425] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:01.060512Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652672316613153:2183], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:01.060523Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652672316613153:2183], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:01.060583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:01.061023Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612737:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652676611580784:2426] 2024-11-21T08:52:01.061045Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672316612737:2051] Subscribe: subscriber# [1:7439652676611580784:2426], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.061061Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612740:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652676611580785:2426] 2024-11-21T08:52:01.061065Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672316612740:2054] Subscribe: subscriber# [1:7439652676611580785:2426], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.061071Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612743:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652676611580786:2426] 2024-11-21T08:52:01.061074Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672316612743:2057] Subscribe: subscriber# [1:7439652676611580786:2426], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:01.061089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580784:2426][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672316612737:2051] 2024-11-21T08:52:01.061094Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580785:2426][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672316612740:2054] 2024-11-21T08:52:01.061098Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580786:2426][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652672316612743:2057] 2024-11-21T08:52:01.061108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652676611580781:2426] 2024-11-21T08:52:01.061115Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652676611580782:2426] 2024-11-21T08:52:01.061127Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652676611580780:2426][/dc-1] Set up state: owner# [1:7439652672316613153:2183], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.061161Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652676611580783:2426] 2024-11-21T08:52:01.061175Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652676611580780:2426][/dc-1] Path was already updated: owner# [1:7439652672316613153:2183], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:01.061183Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580784:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652676611580781:2426], cookie# 1 2024-11-21T08:52:01.061185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580785:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652676611580782:2426], cookie# 1 2024-11-21T08:52:01.061188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580786:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652676611580783:2426], cookie# 1 2024-11-21T08:52:01.061192Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612737:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652676611580784:2426] 2024-11-21T08:52:01.061195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612737:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652676611580784:2426], cookie# 1 2024-11-21T08:52:01.061198Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612740:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652676611580785:2426] 2024-11-21T08:52:01.061201Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612740:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652676611580785:2426], cookie# 1 2024-11-21T08:52:01.061204Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612743:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652676611580786:2426] 2024-11-21T08:52:01.061206Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612743:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652676611580786:2426], cookie# 1 2024-11-21T08:52:01.061586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580784:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672316612737:2051], cookie# 1 2024-11-21T08:52:01.061592Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580785:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672316612740:2054], cookie# 1 2024-11-21T08:52:01.061595Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652676611580786:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652672316612743:2057], cookie# 1 2024-11-21T08:52:01.061601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652676611580781:2426], cookie# 1 2024-11-21T08:52:01.061608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:01.061613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652676611580782:2426], cookie# 1 2024-11-21T08:52:01.061617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:01.061622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652676611580783:2426], cookie# 1 2024-11-21T08:52:01.061625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652676611580780:2426][/dc-1] Unexpected sync response: sender# [1:7439652676611580783:2426], cookie# 1 2024-11-21T08:52:01.073654Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652672316613153:2183], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:01.073757Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652672316613153:2183], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... ], cookie# 2 2024-11-21T08:52:03.445829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652685201515951:2835][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7439652685201515952:2835], cookie# 2 2024-11-21T08:52:03.445833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652685201515951:2835][/dc-1/USER_0/SimpleTable] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:03.445837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652685201515951:2835][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7439652685201515953:2835], cookie# 2 2024-11-21T08:52:03.445840Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652685201515951:2835][/dc-1/USER_0/SimpleTable] Sync is done: cookie# 2, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:03.445843Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652685201515951:2835][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7439652685201515954:2835], cookie# 2 2024-11-21T08:52:03.445845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652685201515951:2835][/dc-1/USER_0/SimpleTable] Unexpected sync response: sender# [1:7439652685201515954:2835], cookie# 2 2024-11-21T08:52:03.445851Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652672316613153:2183], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 } 2024-11-21T08:52:03.445860Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652672316613153:2183], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439652685201515951:2835] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732179123450 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2024-11-21T08:52:03.445870Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439652672316613153:2183], cacheItem# { Subscriber: { Subscriber: [1:7439652685201515951:2835] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732179123450 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2024-11-21T08:52:03.445894Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439652685201515962:2840], recipient# [1:7439652685201515961:2839], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:52:03.445897Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652685201515961:2839] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:52:03.445906Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652685201515961:2839] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2024-11-21T08:52:03.446025Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652685201515961:2839] Handle TEvDescribeSchemeResult Forward to# [1:7439652685201515960:2838] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1732179123450 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T08:52:03.454187Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612743:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652680595169782:2102] 2024-11-21T08:52:03.454187Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612740:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652680595169781:2102] 2024-11-21T08:52:03.454197Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672316612740:2054] Unsubscribe: subscriber# [3:7439652680595169781:2102], path# /dc-1/USER_0 2024-11-21T08:52:03.454202Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672316612743:2057] Unsubscribe: subscriber# [3:7439652680595169782:2102], path# /dc-1/USER_0 2024-11-21T08:52:03.454215Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652672316612737:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652680595169780:2102] 2024-11-21T08:52:03.454219Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652672316612737:2051] Unsubscribe: subscriber# [3:7439652680595169780:2102], path# /dc-1/USER_0 2024-11-21T08:52:03.454258Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T08:52:03.454467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:52:03.655293Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439652680595169766:2099], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:03.655343Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439652680595169766:2099], cacheItem# { Subscriber: { Subscriber: [3:7439652680595169935:2190] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:03.655374Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439652684890137557:2360], recipient# [3:7439652684890137556:2307], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> KqpRm::SingleTask [GOOD] >> KqpRm::SingleSnapshotByExchanger >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> KqpRm::SnapshotSharingByExchanger >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources >> KqpRm::NotEnoughExecutionUnits [GOOD] >> KqpRm::Reduce ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2024-11-21T08:52:04.152465Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:04.183766Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:04.183942Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:04.185224Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:04.197598Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:04.199760Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:04.200320Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:04.200652Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:04.200799Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:04.200805Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:04.200832Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:04.202966Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:04.203018Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:04.203081Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:04.203143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:04.203160Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:04.203265Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:04.227200Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:04.227258Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:04.248715Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:04.248776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:04.248796Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:04.248809Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:04.248839Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:04.248846Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:04.248852Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:04.248860Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:04.260518Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:04.260579Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:04.260793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:04.260803Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:04.262386Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:04.262665Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:04.263024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/001588/r3tmp/tmp19xmwR/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:04.263112Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/001588/r3tmp/tmp19xmwR/pdisk_1.dat 2024-11-21T08:52:04.263119Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/001588/r3tmp/tmp19xmwR/pdisk_1.dat 2024-11-21T08:52:04.263316Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:04.263408Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:04.263431Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:04.263448Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:04.263486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:04.263529Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:04.268578Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:04.268712Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:04.286906Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:04.287233Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:04.289597Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:04.289760Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/001588/r3tmp/tmp19xmwR/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:04.289934Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/001588/r3tmp/tmp19xmwR/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/001588/r3tmp/tmp19xmwR/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1686167871162646420 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:04.290158Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:04.290227Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:04.290232Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:04.290258Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:04.290265Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:04.290300Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:04.290320Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:04.290326Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:04.290331Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:04.290340Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:04.290407Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:04.290425Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:04.290429Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:04.290446Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:04.290500Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:04.290505Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:04.290508Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:04.290517Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:04.291229Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:04.291237Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:04.291264Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:04.291268Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:04.291479Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:04.291501Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:04.291509Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:04.291513Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:04.291621Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:04.291662Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:04.291667Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:04.291670Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:04.310149Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:04.310173Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:04.310177Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:04.310182Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> KqpScanSpilling::SelfJoinQueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2024-11-21T08:52:00.944935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:00.945625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:00.945670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044f8/r3tmp/tmph2vYKt/pdisk_1.dat 2024-11-21T08:52:01.082696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:01.104502Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:01.149792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:01.149828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:01.161896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:01.278775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:01.534056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:705:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:01.534089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:714:2593], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:01.534100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:01.535008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:01.738974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:719:2596], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:01.808790Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yqnbx92gmfmver7q8qm48, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzlmMjJlMWQtMjhmMTQwZTUtNGRjYzgzNGEtNzQ5ZTIyNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.831422Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yqnmx64sznpswqe9rsk31, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU3ODQyMTUtMzZjNzI2ZTAtYmZlZDllZWYtYmMxODkwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.852942Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yqnnkdmp0xvgmeskbs7gy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg3YTkyNzgtZDgyMDFjMmMtODBkOWU3N2UtZmQ1MGVmNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.886863Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yqnpnfc37jjegnvtyneq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFiNmZiMzktYzRiNmE4ZDktOGRjZDc2MGUtMTU0Yzg3OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.909265Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yqnqdcb344s9c15hx6g7k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVlMWY1MDUtNWNlMWNjY2QtYjVjNjQ4YTctNmQxOGQ5MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.932684Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6yqnr35av330wnx73jpe6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2ViMTYxNDgtNDY4N2EzYjItZmYyZmQxZTMtMjM4YjVkYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.956914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yqnrr6ee0dmcdxar2q6ms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhkNmMxMmQtYjlkM2YxMDQtNjQ3ZDg5ODMtODA3NTE1YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:01.979932Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqnskc3ynv46tey916jfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRkZTgzNWYtYjk4NGI4OGItZjdkZmI3NjYtMmZiNTU0NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.006318Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6yqnt79tfkdrjk3cqd3ryd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ2MTU2MDMtNDk2ZDhmNjEtNGQ5M2U5NGEtMmI5OGM2ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.035655Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6yqnv23jzx89rh0aqnbpw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhlZDViNjUtZDg3YjllYjEtNGM3N2Y1MmMtMjNjNDcxMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.069121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd6yqnw5bxw3m2tpzy58jywc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc1MjI3OTgtZTY4MDZlMjgtNGNhOTM2ZjYtYzZjNzJlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.091772Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd6yqnx1c6zw5n4rw18n2gfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTkyYjc5MmItYWFiZDhiNTMtMTBmOTBlYy05NDI4MzE3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.116416Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd6yqnxs7a9jpdsnwshc66ks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNmMDkxODYtM2RjYTNjNjItNjhlOTdjOGYtZjJkOTI1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.138745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd6yqnyga4mbzrbhmnmb75yf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk1NTdlYjMtYWJjMjQyNmMtOTUxMTdlM2ItNGUxNjgzMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.172347Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd6yqnz978b4t45cbc3fvzwh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjkxODU5Y2MtNzg4MDY2OTgtYWI0OTE4MjctM2M4NWRiZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.194890Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd6yqp08bm6e0q6ae4smcz7x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGJlYzRjYzUtZjA1ZjNjNDUtMzgwM2ZkNjgtYzdjMDYzNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.234898Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd6yqp157r1xtdkc9en7jht4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk2MjhmNjctOTg0YjJmMzUtZTQ5NjE0ZGYtNzQ2YzU0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.256772Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd6yqp27dy01a34amw1c18g1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY0NjRkZGEtYjQzMDE1NWQtNTIyYjgyYmMtYjEwZDNjYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.279390Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd6yqp2w2khx5drhqztsjqpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM0YmQ1OGQtODQ3M2M0Y2QtNTZmYWUxNjMtYmJmMjg4ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.302281Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd6yqp3ke23m718wy2yrhmr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdmYjc5M2ItZjgwOWM5NzQtMWRlNGRmMi0zZWRjNjI5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.324149Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd6yqp4a9jazmadms6e7mky8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE5NGJlNmUtNmQzZTJkZTUtOWVlMGJkNWEtNDQwN2I3M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.347677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd6yqp514sk2jf8ct61szx6y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTYzY2ZhM2QtYTNiZTA3M2EtZGEyODQxZjItYzdkN2NlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.374175Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd6yqp5r14hgy7yy1pfsxpee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQxOWJkNzMtNzk3MmY4NzQtNWUzODg3ZTYtODRlNjE1ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.396825Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd6yqp6j8c35xd86m55rr6wa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdmNDE4YjgtZTBjNmE0MGMtODRjODRiNTAtNjdlMmU2YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.427195Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd6yqp7b3v0v0mq82661bwns, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmM2ZDU4YS0yMjQ5ZjMwYy0yMmNlMWU5Ni03NTU5MDRmNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.449869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd6yqp89ddk6y92v7caz08n3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2JlZjE4MjgtNWU4YzVjM2QtOWE1MmZhZGUtNWY4ZDQzMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:02.472492Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jd6yqp8x2jbc66phtetgz5pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM1ZTJmN2QtYzE3Njk3OTItZWUwZjNjZjMtNTIyZDRlZGE=, CurrentE ... 3MzU2NGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.335278Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jd6yqq3v3vqx6xks21bfn3ag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk4N2IyZWYtZTMzNjFjMDItYWVlZDdkMDctNTI2ZTI2NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.359476Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jd6yqq4kakhx9nk2vrvyhrta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA4MWEzZWEtOWNmOGUzMDgtYTNhY2Q2ZjUtOTczYzI5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.383793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jd6yqq5bcdp3r4p4y9krcndc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNmNDRlNzYtZDA5ZjUxZTctY2M3Zjk4M2UtMzgxOWZlODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.408365Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jd6yqq64c5f6m1b2x2gqyp6n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFkZGJlMDMtZGMxMTZmY2MtMTNhODFkMGYtMjhjODg5NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.433816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jd6yqq6w5rd5fczdh3rvrsca, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgyN2NiMWQtNGNhZGEwMmQtZTI3Y2NlOGYtYmQzODIxYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.462116Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jd6yqq7s85ppyzqjqeh24w06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTNiMTI2NjItZWNlNWM3YmItYTU3MjUzYmMtYjNhODQ5ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.486198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jd6yqq8j6cv32kwpy1vr4v7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWIzNzFkZjMtYTFmODc5N2QtOTBmODFiYzYtZDQ2NGUyZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.510015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jd6yqq9a6pj96dwhx8wwag6z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM1MGM4YzktYWVlYWJhN2EtNTRhZmY3MDktYzdlMTNlYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.532233Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jd6yqqa2cxdnsh87d4g7kha7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIyOGEyYTktZDJkOTVjNTQtZTJkYTgxYWYtZmYyYjI5OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.553122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jd6yqqar2v42j7t1s6r0s00y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjAyZGViM2QtYjJmYmYwYzktNGIzMWY5YmItOTcyMWJlZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.574288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jd6yqqbd8n5h24fxpyt2gett, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTBlM2YyNzUtNzdmMzhjMDUtYTk0YmQ1NjQtM2VjMzk5Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.595925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jd6yqqc21xptx305xr4e7dcx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQ4MmQxODItNGQ1ODFjMmYtYWJiZjY0NGEtNDAxMjJkZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.616482Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jd6yqqcq4rgptd46vcwg2xea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjJlYWJkMjYtYWNiMjljNmItNTJiMzcwY2ItMzczYTc2Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.637064Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jd6yqqdc6jtrt7yydvg4bby5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk0MTgwYjgtNTAzZTA5MTktYjhlYzBiNDYtMmNkMGYxMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.656115Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jd6yqqe0azh21184s12h5t9d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY4MzZlNWUtZmEyYzhkOTgtYTc4MTkxOTctYzU2YzYwZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.678832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jd6yqqem1dx2z5bvqntgh069, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU4NTA1OTQtMTQwYzJiZTAtOGMyOGVmMjItODkyNGNiNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.700871Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jd6yqqfaet3jxk8jba8xfv9w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWZjYzg2ZWYtMzE4MTQ2OTItZWQxNGViMmYtY2JjNGVhOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.721192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jd6yqqg01p37gpksdnzy2ps4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzllZTY2MDUtY2VkZWY0NDktZmVmNWMyMWItNDAwN2YzOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.742686Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jd6yqqgn16ntrw41stkwjdwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTlmMjJmMS03YzVmZDQzLTNlMDM4ZjI3LTY2ZjE2Y2I1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.765136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jd6yqqhad59bg8wn8bp9tj0c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzczZGU5MzItODBhNGVkYzEtMTE0ODY2MjgtZTFkNjJjZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.787893Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jd6yqqj1cmr07rnzvjpxmfrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA5OTI4N2ItZDU3N2Q5YjAtYzVhNjdlMDMtZjZkYTFkOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.810162Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jd6yqqjqc44jqna82vejb7yg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThiNTlkNWUtOWZmOGFkM2MtYTc2NmQ2YTAtMzA3Yjc0Nzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.833051Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jd6yqqkea1xkx2tqxw6c47wx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWZjYWUxYTgtNTE3NWQ1NTktZmU4MzNlNzYtMmIzNTM1ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.854755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jd6yqqm49226a7wz5wfefdaw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZlMDMyOTktMzQ2NDA4NmQtODBjMWQ5Ni01ZjJiNjIxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.877258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jd6yqqmtcbq66g9eg6fm9yp6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA2YjVhYzgtOTJhNmY2NzgtNzBlZGNmNjQtNTMzODZmNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.901094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jd6yqqnha6d713f29x48qqam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYzZTM4OTctMzNjMTEwZGEtZjBlNjQyMGMtNTRmYmQwYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.926793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jd6yqqpa2rm0jrvmdzy43v4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2UxZGNiMDgtNjU2ZmE1NGYtZWU3NmQyZTctMTFkMTI2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.951140Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jd6yqqq333mamjzq9vb0nmg1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU0ZmJlOS04ODdkMDI1Ni0zYjQyOTI4Ny05M2QwYjU1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.976090Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jd6yqqqv7m9z3f3xx21z7smf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTMzYTBhMGQtNjkzZjdhNWYtYmI2YWMxN2MtY2QzNTZiNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.000977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jd6yqqrm5beekrbp301bkygc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY3Y2JlNzktMzc0ZjdjMDEtMjRlNGUwOTUtYTgxYmMxYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.031905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jd6yqqsg0g9whhjdhmrdjgvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2UxMzJkMzgtMWM1OTFjMGUtYmZiMWU1YTYtOWZlMjAyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.059232Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jd6yqqtcap67wwe48gdefqrh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY0Y2ZjNjAtZTY3ODgyMGMtNGM0Y2ZhMWItMzRjMTkxNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.083662Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jd6yqqv984jfgd8bckmpss1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBiY2VjYTAtYjY0YzljN2EtNzEwZWQzMDQtNjI4NjFhODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.110225Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jd6yqqw02kt7aacnytvwx87h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVkMjBmZTYtODIzYTBkMTYtNmU1NDFmZWMtMzMzMzE4YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.125577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.361892Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jd6yqr3m6cab0zwbhd3xmq56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg3MGY3ZGEtZTg5YWNiN2YtMzQyNGFhYjQtMzIzZWQzZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2024-11-21T08:52:02.024010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652681571941702:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:02.024035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042ba/r3tmp/tmpIg9csh/pdisk_1.dat 2024-11-21T08:52:02.092135Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:02.124474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:02.124506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:2801 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:52:02.132314Z node 1 :TX_PROXY DEBUG: actor# [1:7439652681571941915:2135] Handle TEvNavigate describe path dc-1 2024-11-21T08:52:02.132333Z node 1 :TX_PROXY DEBUG: Actor# [1:7439652681571942334:2420] HANDLE EvNavigateScheme dc-1 2024-11-21T08:52:02.132379Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652681571941939:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:02.132390Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439652681571941939:2149], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:52:02.132458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:52:02.132793Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974299:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652681571942340:2421] 2024-11-21T08:52:02.132809Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974299:2054] Subscribe: subscriber# [1:7439652681571942340:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.132820Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974302:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652681571942341:2421] 2024-11-21T08:52:02.132824Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974302:2057] Subscribe: subscriber# [1:7439652681571942341:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.132868Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942340:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652677276974299:2054] 2024-11-21T08:52:02.132872Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942341:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652677276974302:2057] 2024-11-21T08:52:02.132877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681571942337:2421] 2024-11-21T08:52:02.132883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681571942338:2421] 2024-11-21T08:52:02.132893Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439652681571942335:2421][/dc-1] Set up state: owner# [1:7439652681571941939:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.132932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942339:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681571942336:2421], cookie# 1 2024-11-21T08:52:02.132935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942340:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681571942337:2421], cookie# 1 2024-11-21T08:52:02.132937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942341:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681571942338:2421], cookie# 1 2024-11-21T08:52:02.132953Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974299:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652681571942340:2421] 2024-11-21T08:52:02.132956Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974299:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681571942340:2421], cookie# 1 2024-11-21T08:52:02.132959Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974302:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652681571942341:2421] 2024-11-21T08:52:02.132961Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974302:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681571942341:2421], cookie# 1 2024-11-21T08:52:02.133835Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974296:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439652681571942339:2421] 2024-11-21T08:52:02.133848Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974296:2051] Subscribe: subscriber# [1:7439652681571942339:2421], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:52:02.133861Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974296:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439652681571942339:2421], cookie# 1 2024-11-21T08:52:02.133890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942340:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652677276974299:2054], cookie# 1 2024-11-21T08:52:02.133893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942341:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652677276974302:2057], cookie# 1 2024-11-21T08:52:02.133899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942339:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652677276974296:2051] 2024-11-21T08:52:02.133902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439652681571942339:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652677276974296:2051], cookie# 1 2024-11-21T08:52:02.133907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681571942337:2421], cookie# 1 2024-11-21T08:52:02.133914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:52:02.133918Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681571942338:2421], cookie# 1 2024-11-21T08:52:02.133921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:52:02.133927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439652681571942336:2421] 2024-11-21T08:52:02.133935Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439652681571942335:2421][/dc-1] Path was already updated: owner# [1:7439652681571941939:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:02.133938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439652681571942336:2421], cookie# 1 2024-11-21T08:52:02.133940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439652681571942335:2421][/dc-1] Unexpected sync response: sender# [1:7439652681571942336:2421], cookie# 1 2024-11-21T08:52:02.133985Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974296:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439652681571942339:2421] 2024-11-21T08:52:02.134208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:02.140861Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439652681571941939:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:52:02.140944Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439652681571941939:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... quests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7439652692576098858:2290] 2024-11-21T08:52:04.199680Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439652692576098847:2290][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7439652688281131419:2222], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:52:04.199688Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652692576098874:2293], recipient# [2:7439652688281131410:2501], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:04.199710Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652692576098875:2294], recipient# [2:7439652692576098825:2513], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:04.199715Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439652688281131419:2222], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T08:52:04.199722Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439652688281131419:2222], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7439652692576098847:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:52:04.199729Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439652688281131419:2222], cacheItem# { Subscriber: { Subscriber: [2:7439652692576098847:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:04.199739Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652692576098876:2295], recipient# [2:7439652692576098824:2512], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:04.220604Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974299:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [2:7439652688281131430:2223] 2024-11-21T08:52:04.220604Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974296:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [2:7439652688281131429:2223] 2024-11-21T08:52:04.220614Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974296:2051] Unsubscribe: subscriber# [2:7439652688281131429:2223], path# /dc-1/USER_1 2024-11-21T08:52:04.220619Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974302:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [2:7439652688281131431:2223] 2024-11-21T08:52:04.220620Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974299:2054] Unsubscribe: subscriber# [2:7439652688281131430:2223], path# /dc-1/USER_1 2024-11-21T08:52:04.220623Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974302:2057] Unsubscribe: subscriber# [2:7439652688281131431:2223], path# /dc-1/USER_1 2024-11-21T08:52:04.220658Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2024-11-21T08:52:04.220702Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974296:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652683111779637:2225] 2024-11-21T08:52:04.220704Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974296:2051] Unsubscribe: subscriber# [3:7439652683111779637:2225], path# /dc-1/USER_0 2024-11-21T08:52:04.220707Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974299:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652683111779638:2225] 2024-11-21T08:52:04.220709Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974299:2054] Unsubscribe: subscriber# [3:7439652683111779638:2225], path# /dc-1/USER_0 2024-11-21T08:52:04.220712Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439652677276974302:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439652683111779639:2225] 2024-11-21T08:52:04.220714Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439652677276974302:2057] Unsubscribe: subscriber# [3:7439652683111779639:2225], path# /dc-1/USER_0 2024-11-21T08:52:04.220774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:52:04.220833Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T08:52:04.220880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:52:04.221374Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[3:7439652683111779627:2224], Type=268959746 2024-11-21T08:52:04.221382Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[3:7439652683111779627:2224], Type=268959746 2024-11-21T08:52:04.252559Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439652683111779612:2220], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:04.252600Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439652683111779612:2220], cacheItem# { Subscriber: { Subscriber: [3:7439652683111779756:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:04.252634Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439652691701716179:2983], recipient# [3:7439652691701716178:2749], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:04.279036Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439652688281131419:2222], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:04.279084Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439652688281131419:2222], cacheItem# { Subscriber: { Subscriber: [2:7439652692576098847:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:04.279092Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439652688281131419:2222], cacheItem# { Subscriber: { Subscriber: [2:7439652692576098848:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:52:04.279115Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439652692576098959:2334], recipient# [2:7439652692576098957:2531], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpScanSpilling::SelfJoin [GOOD] >> KqpRm::NotEnoughMemory >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> KqpRm::ManyTasks ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] Test command err: 2024-11-21T08:52:04.713894Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:04.748329Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:04.748520Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:04.749852Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:04.761763Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:04.763685Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:04.764254Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:04.764608Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:04.764768Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:04.764775Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:04.764806Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:04.767209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:04.767266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:04.767282Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:04.767346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:04.767365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:04.767527Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:04.795041Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:04.795095Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:04.808478Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:04.808537Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:04.808556Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:04.808568Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:04.808597Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:04.808605Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:04.808611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:04.808619Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:04.821025Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:04.821079Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:04.821287Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:04.821294Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:04.822850Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:04.823099Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:04.823426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/00157f/r3tmp/tmp3NqcL8/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:04.823502Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/00157f/r3tmp/tmp3NqcL8/pdisk_1.dat 2024-11-21T08:52:04.823509Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/00157f/r3tmp/tmp3NqcL8/pdisk_1.dat 2024-11-21T08:52:04.823671Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:04.823762Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:04.823785Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:04.823800Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:04.823834Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:04.823869Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:04.824324Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:04.824368Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:04.837215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:04.837499Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:04.839645Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:04.839806Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/00157f/r3tmp/tmp3NqcL8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:04.839961Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/00157f/r3tmp/tmp3NqcL8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00157f/r3tmp/tmp3NqcL8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14479872646760533105 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:04.840176Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:04.840276Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:04.840282Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:04.840312Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:04.840320Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:04.840353Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:04.840374Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:04.840380Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:04.840385Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:04.840394Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:04.840467Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:04.840486Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:04.840491Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:04.840509Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:04.840562Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:04.840567Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:04.840571Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:04.840577Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:04.841220Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:04.841229Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:04.841258Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:04.841262Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:04.841500Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:04.841523Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:04.841530Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:04.841534Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:04.841644Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:04.841688Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:04.841693Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:04.841696Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:04.868525Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:04.868548Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:04.868553Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:04.868572Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> DataShardWrite::UpsertPreparedManyTables+Volatile |87.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |87.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2024-11-21T08:52:02.882781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:02.883153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:02.883171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044ee/r3tmp/tmp5kFJsE/pdisk_1.dat 2024-11-21T08:52:02.988381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:03.005590Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:03.048158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:03.048188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:03.058673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:03.162624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:03.389148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:705:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:03.389185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:714:2593], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:03.389195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:03.390186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:03.579385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:719:2596], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:03.650766Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yqq5wczhm1szz2m930596, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAwMzI4NzEtODNjMjllNS1hZjVhYzIzNi03Mjk0NTBhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.661595Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yqqe4dcwk3ynh2rjz4xr6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQyYWY3ZDEtMjFiYmQzYjMtZDhiOTI3MjItYWY5MjM1Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.673749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yqqefb7paz7exsxpp7d9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTIwMjg5NGMtZjZlNWFkOWItZDJhODJkMzgtZDAxYzc5Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.684700Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yqqev7ykn5ekwkge7qd4z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzMyMzA1ZGMtOWNjM2Y1MzUtZGE4NjRiOTYtMzVkMzU2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.694629Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yqqf6da0sasnwcs3wm63c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U2YWQ2NTYtNDlhMjhhMDUtNDQ1MTI4ZGEtZjUzMmNhZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.702035Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6yqqff3hvkzbwbpkxt0bz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNkNTcwMjctZjQ5Y2FlNGQtZDdmMTQyZmItYjVlYTdjMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.709355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yqqfpahve763a0t8exbc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxMDEwZDgtYjE2N2M2Y2ItNGZkYTgzMzAtYzVhNzYwZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.718453Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqqfyf4ahw2jfhwdr5axd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRiNTdkODMtZmJlYzM2MDYtNjVjNzE3ZTYtY2U0Y2Q1MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.726154Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6yqqg7d5wqp5kj6e0a6pzd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY0YzAwNTctNzc5NzBkM2QtZjFlMzM0ZDYtY2MzYTg3NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.734482Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6yqqgf4rcp09eheq0wsbyz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc2NTU1NGYtODQ2M2UxMDgtNjAxMTE4NmUtYTJkMzMyNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.741696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd6yqqgq0bnedydbb1vy2p7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJkY2Y2YjAtYzdiMDM0YjctZTU1NjA2LWU3ZjlkMDQz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.750674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd6yqqgy1sp20drjqz8spe2p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRkMGY1NzctMjM1MzdkN2MtYWFjNDRhNjgtNWYzZjZkZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.759446Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd6yqqh74h6xe0x7049zjwxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjcyOTVjMjUtN2ExMGU2N2QtOGMwOWE0MDUtN2UyZTVmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.770457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd6yqqhg8r5z3zg6dxgged49, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWFhNTllMTctZTA4NjMyOTEtMjU2NmRlZGQtYmRkY2Q4MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.780816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd6yqqhv1c7yr8vy742zj24a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUwMWFjOGItZDZhNzYzOWYtNTVhOGEzODctNjk3MGNkMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.788363Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd6yqqj564sj8rq7antq29sg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiM2IyZjAtOGI5YjZiZDktMjI4MTBjMjQtZWI2MGY1OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.797134Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd6yqqjda9chn7ga5h040trh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjM4NDk3YWYtYjVkOGE2OGYtZDJlOTQyNTgtOWNjN2U1Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.805409Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd6yqqjp3jcxb5kdaey8hcms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE3ODUyMzYtYWEyMThkMGItMTFjYzgwYS1iZGI3Y2ZhNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.813379Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd6yqqjyd27fvxedhbcspmrs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVjN2JiNDgtOTg0ZDE5NDMtODkzZDU2OTEtZmZmNWVhODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.823192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd6yqqk65d9wswym81mmztdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVmYTg2MzgtMzY4OGUxMWEtNTYyNjI4MTgtZWE1N2ZlZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.833052Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd6yqqkg1c0cbmbqgbw9fdqs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM3MGU2Zi00YTQxNGM4My1kYzgyM2YwNy0xMjEwMDhh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.843386Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd6yqqkta6npn4bf9kvnv7ss, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmFkODk2MGUtNzk5ZTExOWEtYTY2ZDNhMWItNjg1NDAzNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.854332Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd6yqqm4djcj8mxcm2fmdr22, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRjYjI0OTItZGE5ZjUzZWQtZmI4YmU2MjctZjFkZDc2M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.866114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd6yqqmfemkxcesv4p67yx7x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBkMzRmYjEtODY1NDM0OTUtZGZkNzJjNTQtZDFlOGI3OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.877258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd6yqqmv1mgchxkt9gmwntfe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFiOGMxZjUtZWFkMmE0ODEtZjcxMmZiZTEtZDdjZWZkNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.891181Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd6yqqn676t69k9rj2t64c9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY2MTNmYzYtNTE4Mzc2NzgtZjk2NTExYzQtYWZiMzcxZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:03.905438Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jd6yqqnn1y22wk4sapkznwyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ1MmFjMy05MTM0Mzg5YS0zYzVlNTRmOS0zNDdiZTA1ZQ==, CurrentExecution ... 70Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jd6yqr7bfam8xc2kmz0bbq56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUzODg0NGMtNmFmNDlmMzEtZjE1NjMzOGMtZmZmYjVkYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.485802Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jd6yqr7s18hj046n2cjq2wev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM2MzkxOWEtZjVhYTMzOWYtODgxNGVkNy0xMGEwMmRlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.499217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jd6yqr878h2neprybnbzf4wb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VmMDY0YjItZTc5MmFkNmMtZjc3ZGEyMDYtZWFhN2QwODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.512798Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jd6yqr8ma6j80320xy8qr692, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcyYWI4NzEtZjJlOTMzNDUtNzAxMTM2OTAtMmI4ZjY3ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.526055Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jd6yqr92bzqrxfrkg8cthjnx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjExMjg2NmQtM2QyZTk1NWQtNDAxNWY1NmMtNWRjOTFhYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.546244Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jd6yqr9ff2d3rh4eqwh58we5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTkxYzZlMDAtYTU2YzA1N2MtNGM1MmNjM2EtNzI5OWZjOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.559458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jd6yqra3d8z0z9kmw0es2p25, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE2YTRiMmQtYjc1NTZkNzQtYWQyYzExYTgtZDcwYTQzMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.574140Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jd6yqrah3mw6hz733h21vqjk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjY1NzYwNzQtYjE4NWQyZjMtZWIwMzAxYzAtYjQ5YWY4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.587438Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jd6yqraz92yq8c4nne9tvex7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE1ZjU4OWQtM2ViN2EyNWEtOTE3MjVjY2UtZjQyMzY4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.599995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jd6yqrbdd4qdww7a0cyajwwe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDVmNzEyZmQtMjRlYzdmM2UtYzc0NTlmMGEtNzYzMWIwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.613610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jd6yqrbs10p6my9yd2pvjd3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhYzc1NWEtMjkyNDJiZTItNmU1YmU4N2MtNGVmMjZlN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.625226Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jd6yqrc7bvs9waapcj5zg2jh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhmYjdhYjItOWEyMzUzNTgtNTBjNjY4NTctYWZlYzhmNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.636620Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jd6yqrcj98y8sxgf5xzhqha5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc1MGYzMDktNWUwYjBlMGItNDM0ZDEwOWItNGUxZGJiMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.647649Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jd6yqrcx1c8gz2770xk89egy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNjMWU5OGQtZjk1ZjQwYzAtNGIxOTIyZGUtODA3ZjdhMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.658726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jd6yqrd95sqmccjyvrgeq69p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYxMjljYmUtYjE0ZjJiZjMtMzhiYzYzNy1iMjhlM2MzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.670007Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jd6yqrdmamjv0r2zg29f2gg1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFhYmI2NTUtZmYyMGI2OTUtYzRiZGE5NDItM2RmMzEyZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.684983Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jd6yqrdz446r6da6pzng3ctb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFlNjhiOC1lNTdlOWIwMy04ZGFjZmQ5Yy0zY2E4N2FjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.699392Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jd6yqree603yab6mhz0xqaw9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjMxMGQyZDktMTgwNTAzMDgtNTIyNDgxZmEtMmVlODBkYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.711621Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jd6yqrew638q432j3ty89jfp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY0Njg0NDgtODU2MWEwNGItMTQ0ODc1YS1lZTFhYTI1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.723581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jd6yqrf8118kbeq2bengqhea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmFhNTkwNGItYjc3NTgyOTktYmFjNDAyY2ItZDg0ZDNlNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.733641Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jd6yqrfm8j6swpq7gr978spr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjhkYjY2MDMtOGFjMzQxNjctYzU2NWFhNDItMWRlMWYzMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.746086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jd6yqrfze4mspr5zrkyf6th2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJiZWI4ZjEtYWYwN2FiZGYtMTNiNDIxYTctYjE4Yjk4NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.760534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jd6yqrgb0q4az0mwt5ezzwhv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDY4MDRkNzYtMTIwZWY4NTktMjk5MDIwNDItNGM4M2Y2ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.771649Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jd6yqrgt60zyvd7b04bxmyej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY4ODg3YzMtODI3NGM5MzMtMTg2ZDE4ZjMtYzE0OWY4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.782624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jd6yqrh46fb651xbfjme1gfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc3MGY3M2YtYzYwNzUwNDgtNTQxZDdmYmEtZTI4MmNjZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.795625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jd6yqrhh71zpjq14h8m8x0ws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E1OGNjYmUtN2YyMmZkYjktNjc3ZjRlMTUtYzQ0ZjczNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.806955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jd6yqrhx3z5qhex4fa3eda6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMyZWYwODktMzEwYTBmYWUtNWZkM2MyMWMtNzIxYmJhZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.821041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jd6yqrj84qn062edfcy2tqq0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQwZWUwZjktNmMyZDMxY2UtMzlkOGIzMTMtYWQ1Mjc0MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.838452Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jd6yqrjpbw0ed9t6tajveyr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWZhZTI5YjUtYTYwMDQzNGUtYjY4YWU5MTEtNmVhODQyMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.850444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jd6yqrk7dmw9sd5qeqg7w0s4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTczNzdjYzEtMWY2NTMzOGYtMTY5MmQ4OGQtNzkwNzkyYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.861890Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jd6yqrkkd2htttxkmd0w7pbz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNjY2JmMWYtYjE5ODM3Mi05NGU3MzZiMi1iNmM1YzBjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.875704Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jd6yqrkzbjaw8gys3epvdjxw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc4Yjc0NTAtYTVmOTA2NWUtMjM1ZjQwNWEtZjY1ZDg2ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.889270Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jd6yqrmd5kfdsvf5n2gcy1ph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWFlZWI4MGItYTM2NWM0MDMtODdmNTk5YjktN2U5YTg1MmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.901594Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jd6yqrmv98wn1ddakre78xm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjAyNDVjYTQtYTVhNmJhYzgtNDYyNGJmZDgtNjVjYjdiNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.913100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jd6yqrn750ysdnhjm3tqk8xg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJlNjQxNjctMTdkZWI0YjEtNWI1NTg2MmYtYTdhN2FmMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.932069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jd6yqrnn169drvqygve910sm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRhNjZhNGMtMTU2ODA2NTktMTk4YTBlZjktNTc0MzFmODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |87.5%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> KqpRm::Reduce [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> KqpRm::NotEnoughMemory [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/jptk/0016de/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 21873, MsgBus: 8122 2024-11-21T08:52:03.908591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652684132520275:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:03.908677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016de/r3tmp/tmpP6ialA/pdisk_1.dat 2024-11-21T08:52:03.978457Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21873, node 1 2024-11-21T08:52:03.992472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:03.992491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:03.992493Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:03.992533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:04.008441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:04.008476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:8122 2024-11-21T08:52:04.010221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:04.107513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.124517Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:04.137017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.196433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.233337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.266907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.361093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652688427488960:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.361197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.367808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.387962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.408820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.425357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.445829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.463587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.481593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652688427489473:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.481627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.481704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652688427489478:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.482578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:04.494840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652688427489480:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 (StructType '('"Key" $3) '('"Value" $4))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($20) (block '( (let $21 (lambda '($22) (block '( (let $23 (VariantType (TupleType $5 $5))) (let $24 (Variant $22 '0 $23)) (let $25 (Variant $22 '1 $23)) (return $24 $25) )))) (return (FromFlow (MultiMap (ToFlow $20) $21))) ))) '('('"_logical_id" '688) '('"_id" '"8dff3eff-ae7996f2-28849bdd-324a860e")))) (let $7 (DqCnUnionAll (TDqOutput $6 '1))) (let $8 '('('"_logical_id" '531) '('"_id" '"ea811b01-b3c347c4-b7704f12-880ae553") '('"_wide_channels" $5))) (let $9 (DqPhyStage '($7) (lambda '($26) (block '( (let $27 (lambda '($28) (Member $28 '"Key") (Member $28 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $26) $27))) ))) $8)) (let $10 (DqCnMap (TDqOutput $6 '0))) (let $11 (DqCnBroadcast (TDqOutput $9 '0))) (let $12 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $13 '('('"_logical_id" '603) '('"_id" '"1cbddedb-da210d06-c1476c71-fc65e7de") '('"_wide_channels" $12))) (let $14 (DqPhyStage '($10 $11) (lambda '($29 $30) (block '( (let $31 (lambda '($38) (block '( (let $39 (Member $38 '"Value")) (return (Member $38 '"Key") $39 $39 (Exists $39)) )))) (let $32 (lambda '($44 $45 $46 $47) $44 $45 $46)) (let $33 (lambda '($50 $51) $50 $51 $51)) (let $34 '('"2")) (let $35 '('0 '0 '1 '1)) (let $36 '('0 '"2" '1 '"3")) (let $37 (GraceJoinCore (WideMap (WideFilter (ExpandMap (ToFlow $29) $31) (lambda '($40 $41 $42 $43) $43)) $32) (WideMap (WideFilter (ToFlow $30) (lambda '($48 $49) (Exists $49))) $33) 'Inner $34 $34 $35 $36 '('"t1.Value") '('"t2.Value") '('"Broadcast"))) (return (FromFlow (WideSort $37 '('('0 (Bool 'true)))))) ))) $13)) (let $15 (DqCnMerge (TDqOutput $14 '0) '('('0 '"Asc")))) (let $16 (DqPhyStage '($15) (lambda '($52) (FromFlow (NarrowMap (ToFlow $52) (lambda '($53 $54 $55 $56) (AsStruct '('"t1.Key" $53) '('"t1.Value" $54) '('"t2.Key" $55) '('"t2.Value" $56)))))) '('('"_logical_id" '615) '('"_id" '"b7eac8d3-1d72d35a-9ee5b72-82544eac")))) (let $17 '($6 $9 $14 $16)) (let $18 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $19 (DqCnResult (TDqOutput $16 '0) $18)) (return (KqpPhysicalQuery '((KqpPhysicalTx $17 '($19) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $12) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/jptk/001709/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 28529, MsgBus: 19972 2024-11-21T08:52:04.076309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652689887182917:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:04.113933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001709/r3tmp/tmpkIIDkq/pdisk_1.dat 2024-11-21T08:52:04.159046Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:04.163557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:04.163584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:04.164602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28529, node 1 2024-11-21T08:52:04.193930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:04.193943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:04.193946Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:04.193981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19972 TClient is connected to server localhost:19972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:04.298671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.304790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:04.322117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.362016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.390161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.417665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:04.600190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652689887184285:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.604118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.609238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.629339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.649951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.670208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.685946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.700403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.725547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652689887184800:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.725570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.725710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652689887184805:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.726554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:04.729669Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:52:04.729758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652689887184807:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:05.157376Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439652694182152608:2509] TxId: 281474976715682. Ctx: { TraceId: 01jd6yqrvgb5mxaj93xt1gsaed, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:52:05.157889Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2024-11-21T08:52:05.157956Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2024-11-21T08:52:05.157980Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=2;input_channels_count=2; 2024-11-21T08:52:05.157997Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2024-11-21T08:52:05.158219Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:45 :META:Table { TableId { OwnerId: 72057594046644480 TableId: 6 } TablePath: "/Root/KeyValue" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Columns { Id: 1 Name: "Key" Type: 4 } Columns { Id: 2 Name: "Value" Type: 4097 } KeyColumnTypes: 4 Reads { ShardId: 72075186224037911 KeyRanges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } } ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC EnableShardsSequentialScan: true KeyColumnTypeInfos { } ReadType: ROWS 2024-11-21T08:52:05.158241Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [1:7439652694182152615:2517], task: 4 2024-11-21T08:52:05.158247Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set periodic stats 0.100000s 2024-11-21T08:52:05.158480Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.159420Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [1:7439652694182152608:2509] TaskId: 4 Stats: CpuTimeUs: 244 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 203 ComputeCpuTimeUs: 5 BuildCpuTimeUs: 198 HostName: "ghrun-qcxhsi27zq" NodeId: 1 StartTimeMs: 1732179125158 } MaxMemoryUsage: 104857600 2024-11-21T08:52:05.159447Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:81;event=bootstrap;compute=1;shards=1; 2024-11-21T08:52:05.159450Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152616:2518]. BEFORE: 1 + 0 + 0 2024-11-21T08:52:05.159460Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:324;event=start_scanner;state=Initial;tablet_id=72075186224037911;generation=0; 2024-11-21T08:52:05.159496Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:66;event=start_scanner;tablet_id=72075186224037911;generation=1;info=TShardState{ TabletId: 72075186224037911, State: Starting, Gen: 1, Last Key , Ranges: [#0: [(Uint64 : NULL) ; ())], , Retr ... /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.214166Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152614:2516], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2024-11-21T08:52:05.214168Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152614:2516], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:05.214175Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152614:2516], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:05.214178Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152614:2516], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:05.214186Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2024-11-21T08:52:05.214189Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2024-11-21T08:52:05.214192Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished, waiting for chunk delivery in output channelId: 4, seqNo: [11] 2024-11-21T08:52:05.214242Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2024-11-21T08:52:05.214247Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. Finish input channelId: 4, from: [1:7439652694182152614:2516] 2024-11-21T08:52:05.214250Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.214255Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152614:2516], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2024-11-21T08:52:05.214257Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152614:2516], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:05.214258Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152614:2516], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:05.214263Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2024-11-21T08:52:05.214265Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2024-11-21T08:52:05.214267Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. Tasks execution finished 2024-11-21T08:52:05.214269Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152614:2516], TxId: 281474976715682, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:05.214292Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 3. pass away 2024-11-21T08:52:05.214324Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715682;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:05.214458Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.214646Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.214659Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.214737Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.214856Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.214865Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.214976Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.215116Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.215125Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.215128Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T08:52:05.215132Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2024-11-21T08:52:05.215134Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. Tasks execution finished, waiting for chunk delivery in output channelId: 5, seqNo: [11] 2024-11-21T08:52:05.215189Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T08:52:05.215191Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T08:52:05.215193Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2024-11-21T08:52:05.215195Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. Tasks execution finished 2024-11-21T08:52:05.215196Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439652694182152615:2517], TxId: 281474976715682, task: 4. Ctx: { TraceId : 01jd6yqrvgb5mxaj93xt1gsaed. SessionId : ydb://session/3?node_id=1&id=YmU5M2QzNmUtNzA0NjEzY2EtZjMwYmU1ZjQtOGVhNGQzOGQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:05.215209Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715682, task: 4. pass away 2024-11-21T08:52:05.215224Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715682;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:05.215489Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179125194, txId: 281474976715681] shutting down 2024-11-21T08:52:05.216248Z node 1 :KQP_COMPUTE DEBUG: [CloseFile] from: [1:7439652694182152636:3619], error: (empty maybe) |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] >> TFlatExecutorLeases::BasicsInitialLease ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2024-11-21T08:52:05.456266Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:05.492014Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:05.492186Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:05.493646Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:05.518604Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:05.520771Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:05.521312Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:05.521670Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:05.521816Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.521822Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.521858Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:05.524054Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:05.524107Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:05.524123Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:05.524175Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.524189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.524309Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.553204Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.553270Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.564326Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.564379Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.564396Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.564407Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.564436Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.564444Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.564450Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.564457Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.575156Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.575201Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:05.575397Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:05.575404Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:05.577098Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:05.577360Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:05.577761Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/001565/r3tmp/tmpLOqyR2/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:05.577839Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/001565/r3tmp/tmpLOqyR2/pdisk_1.dat 2024-11-21T08:52:05.577846Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/001565/r3tmp/tmpLOqyR2/pdisk_1.dat 2024-11-21T08:52:05.578033Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:05.578117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:05.578141Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.578154Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:05.578188Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.578226Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.579239Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:05.579312Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.593298Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:05.593598Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:05.595780Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:05.595923Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/001565/r3tmp/tmpLOqyR2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:05.596078Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/001565/r3tmp/tmpLOqyR2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/001565/r3tmp/tmpLOqyR2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7674281871929001245 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:05.596326Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.596423Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.596430Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.596458Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.596465Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.596488Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.596507Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.596513Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.596519Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.596528Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.596598Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.596616Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.596621Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.596637Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:05.596689Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.596694Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.596696Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.596704Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:05.597488Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.597498Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:05.597526Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.597530Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:05.597703Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:05.597723Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.597731Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.597734Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.597835Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:05.597871Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.597875Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.597878Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.622716Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.622733Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:05.622738Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.622744Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> KqpRm::ManyTasks [GOOD] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests10000Inflight100BlobSize1000 [GOOD] >> KqpRm::NodesMembershipByExchanger [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2024-11-21T08:52:05.622563Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:05.653840Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:05.654016Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:05.655288Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:05.672180Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:05.674568Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:05.675129Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:05.675445Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:05.675609Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.675617Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.675651Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:05.677929Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:05.677988Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:05.678005Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:05.678062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.678078Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.678197Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.700733Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.700780Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.711874Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.711928Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.711946Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.711957Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.711984Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.711992Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.711997Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.712006Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.723170Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.723254Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:05.723598Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:05.723616Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:05.725241Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:05.725528Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:05.725906Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/001523/r3tmp/tmpxlO7Q6/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:05.725990Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/001523/r3tmp/tmpxlO7Q6/pdisk_1.dat 2024-11-21T08:52:05.725998Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/001523/r3tmp/tmpxlO7Q6/pdisk_1.dat 2024-11-21T08:52:05.726236Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:05.726355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:05.726387Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.726406Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:05.726460Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.726504Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.727632Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:05.727690Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.739504Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:05.739750Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:05.741580Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:05.741698Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/001523/r3tmp/tmpxlO7Q6/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:05.741822Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/001523/r3tmp/tmpxlO7Q6/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/001523/r3tmp/tmpxlO7Q6/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10293841778944622203 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:05.741972Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.742010Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.742013Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.742030Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.742036Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.742062Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.742079Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.742085Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.742091Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.742100Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.742161Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.742180Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.742184Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.742198Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:05.742243Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.742248Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.742251Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.742257Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:05.742842Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.742852Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:05.742876Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.742881Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:05.743053Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:05.743071Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.743078Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.743081Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.743190Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:05.743231Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.743236Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.743238Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.763093Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.763119Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:05.763123Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.763129Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2024-11-21T08:52:05.445739Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:05.476435Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:05.476639Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:05.477803Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:05.491578Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:05.493822Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:05.494381Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:05.494696Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:05.494823Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.494830Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.494859Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:05.497273Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:05.497330Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:05.497350Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:05.497421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.497440Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.497567Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.524588Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.524635Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.540633Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.540699Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.540722Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.540736Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.540769Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.540779Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.540786Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.540795Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.557164Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.557234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:05.557507Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:05.557516Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:05.559228Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:05.559525Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:05.559895Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/001573/r3tmp/tmpyE6RY0/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:05.559978Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/001573/r3tmp/tmpyE6RY0/pdisk_1.dat 2024-11-21T08:52:05.559984Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/001573/r3tmp/tmpyE6RY0/pdisk_1.dat 2024-11-21T08:52:05.560186Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:05.560315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:05.560341Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.560358Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:05.560397Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.560441Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.560901Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:05.560942Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.575162Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:05.575395Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:05.577528Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:05.577687Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/001573/r3tmp/tmpyE6RY0/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:05.577822Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/001573/r3tmp/tmpyE6RY0/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/001573/r3tmp/tmpyE6RY0/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16731775966656024427 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:05.578064Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.578138Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.578143Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.578174Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.578182Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.578205Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.578225Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.578230Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.578235Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.578244Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.578321Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.578339Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.578344Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.578362Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:05.578418Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.578424Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.578427Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.578435Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:05.579132Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.579142Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:05.579169Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.579173Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:05.579387Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:05.579410Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.579418Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.579421Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.579527Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:05.579568Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.579572Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.579575Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.599552Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.599590Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:05.599594Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.599600Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. |87.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2024-11-21T08:52:03.645975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:03.646577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:03.646613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044eb/r3tmp/tmpchmh49/pdisk_1.dat 2024-11-21T08:52:03.759406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:03.778147Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:03.820848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:03.820890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:03.831550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:03.938330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.203704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2024-11-21T08:52:04.483089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2639], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.483128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:787:2644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.483140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:04.484310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:04.690453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2647], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:04.799160Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yqr808044n1yg7dadyznc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM3MmNlNjYtYjI3MjM3MWUtM2E3ZWYyZTEtZTVkN2UzODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.813361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yqrj20qpspvhvt8wxqgy0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFhYTY3YzAtNWYyZDBkMTUtZTMwNWZhNzYtNWQzZDY3OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.832793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yqrjebstm5se3bex9dyhc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGE1OTNiMi1hNTNiMTY5ZC1kOWJmY2VhNy1iZTNlODMxNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.844293Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yqrk2bayenabxma81fntz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFiZjE4MjQtMTgwMjY4ZTMtZmQ2NzA1OTEtYmJlYWJkMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.858600Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yqrkdb427wvbwd95hb98b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjg5ZTYzYjAtZjNmZTg3ZjEtNzhkYzIyYTQtZGFiNzVlZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.872742Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6yqrkx7xtp226yc0709gzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNGM2ZmEtMzFiNDhiNTYtZTNkYjMyZTYtYmJlNWNiMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.883702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yqrmabjr7xcgbk851t79k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUxYzJlMGMtYzMwNTQ4Yi0zYjNlNTI1YS00MmU3YjFmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.894490Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yqrmn6yakqkfn7yqpfadr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ0ZjY1NGUtZGY0NjY0Yi04NGE1ZDdhMy0xNjBmNzBmMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.904793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6yqrmz5y7psnhnr0f1wbp2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQ4OGNiZTctMmI5ZjRkNWMtOThjNzc5N2QtNDY0MTU4Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.915299Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6yqrna4abv6p6jcf4374bx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJkMmQ4NzgtNzIwZGJlNGMtY2E5OThjNzYtNzhmNWI1Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.925902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd6yqrnmch0jzhppx4xn2b87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2MTEyOWYtYTU0N2Q0MDktYzhiZGZkZjQtOWI4MDhkMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.936887Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd6yqrnzb4kp40tfxsfbvpfh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ5OGU0YjQtN2VhOTI3ZDUtZDkxMmJlNmQtMjMwMTI5Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.948543Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd6yqrpa3tp3xz0wcn6q9sej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhlODY5NWQtNGIxMzA4NWUtODEwNjMxYjktNTE3Yzk0Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.960071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd6yqrpp3ss3knj03kq2ajxc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDUwZjAxMDgtYjk5MzUwOTMtYTI3ZDYwZDgtNDg1Y2RiM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.972816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd6yqrq1c0jchz25m868esr5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJkZDZiMC04ZjJjZDYzMC1jMTcyNjUyYS03MWE5MDg4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.984199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd6yqrqe01rgjdxcvdktqgka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M4M2MwMzUtNDA4NmUyMzMtYmEwODdkMGQtZDk5YWIwZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:04.995359Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd6yqrqs08gxs3h6sfz3f1ye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWE3OWU3OGYtYjUyM2JkM2ItYTlhNGFjMy05NTY0OTQwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.005968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd6yqrr4dybksetms4tz50yn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU0YzAwNi1hZjdhZjRiYy0yZmM3ZTRmYS0xNzAyNGE2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.015907Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd6yqrrf156k9z7p9vcrzsxz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzlmNjY4NmQtYzk2YWE0MDQtOTQ5NDAzODYtYjIzMWRiYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.026496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd6yqrrsezvsz4bpkkmbyamc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYwNzdlNC1lZjIyODk4YS1lMzNhMTJhOS1lYjU0YzBhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.036833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd6yqrs322fpv0bc7vk0qsgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDljYzdlYzAtNTUxYWVlOTEtODI4Y2NlYzYtOTZjYmJlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.047586Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd6yqrsec21yjkmf8r6fwwq5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzc5OGZhNjgtN2EzY2VlNWMtN2MxY2VkNWUtNTg0NDdkNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.058037Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd6yqrsrctaj05t85r6jswvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWViYzRmM2QtZDc0NjAzOTAtMjU3N2I3NWEtNGIwMGQ5OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.069934Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd6yqrt3f7evvyfy5syc6hbz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTAyOTFjZDQtY2E3YTk3YjMtNzRkMGYxZGUtNjNiOGI0MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.081912Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd6yqrtf7sy1q2awg4cbehy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWYyMzVlYWYtZDRlYzFmZGEtOTAyYmJlZWEtMzU3ODc2N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.093875Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd6yqrtv7k2aqp96knndvq4a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGMxMDQ0MzctMmUwMzA0NjgtOWYwNWNmYzItMzUyOTdhN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.104682Z node 1 :KQP ... :05.601333Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jd6yqsaq6b3ygeq4rb7kgwgj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E3YWJlNmUtYzM5NDJhM2MtYWY2YzY4ZDUtNGQ1M2MwNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.611587Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jd6yqsb24sbt3881dwbk8wkf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjE0ZGMzOGEtNTA2MDllNy05OTk1MGU3OS1kMGEyMThkYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.623630Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jd6yqsbd9hnkmt3tv3ymr2qq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZjZTA3NTgtN2FjNWJlYWYtM2FiYWJjZGEtYTJjMDllMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.635194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jd6yqsbs1z9ev63szsea1td3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg0ZGU1MTYtZWNiODMzMzctNGM5NjAyNjQtOTM3N2IxMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.646390Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jd6yqsc46485a7yzs658ssfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGYxMGU2YmItNzk4NmQ5MTktYWYzNWJkYmUtMWU5ZDEzNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.657927Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jd6yqscfcbhbyxxhyd6efsw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VjOWM2Zi0yYzViNzIzMS01ZTExYzA1NS00OTQ3Y2Rh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.674611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jd6yqscv1nz3vqfpq0an385s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlmNzhjZjItOTYzOGI4MjAtZTI1YjI4ZWQtMTM5ZTIzNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.686310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jd6yqsdb53nahz3htpjd5hpa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM5ZjFlMzctOTQ1NzQyOGItYzMwYTFiZTktNGVjMjI4NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.696750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jd6yqsdq9dj5tfn080w551br, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjgxYzRjMWYtNDcyYTVjOGMtNDg1MzdkM2YtZjBkMTViOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.706158Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jd6yqse2dhazj0kmm4ddpat8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQzOWY0MTItMzhlZDdiZjMtZGQzZDY3ZjMtMmM4ZDU0NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.716679Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jd6yqsebf89fkte7pea0k900, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgwMmI0MjktYWU2NzFhZTUtNjY3MmRmZTQtNzVhMTA0ODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.727098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jd6yqsen5k1c0rm8zr5zqdsa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIzYWQ3MjQtNTljMTQ2NWEtNjMwYzgxNy04MzIzZTE1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.737736Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jd6yqsf0bggzzr8j072z5q5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThhYWU1OTItYzc0OGQ0ODctOTVlODI2MzAtNGJlY2QxODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.748075Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jd6yqsfb6fa8tv045sm3sdph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjcyMjBjNzctNWEzMjQ0MDQtZWMyMGQ5ZGMtM2JiNTFiODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.756235Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jd6yqsfnd0e8j9m7jjkpryfa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc2MTJhODAtYjBhMjAyMWQtYzFmZjgyZWYtN2Y3ZDc4Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.767226Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jd6yqsfx9w4vjyjsx87c1fwd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ4MjU1OWQtNGIzNjJlZi1lMTQ3ODdmLWU5ZTFiNTdk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.778828Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jd6yqsg82e5nykzk0sv1qhd6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWNmNzVhNTEtYzMyM2RlYzYtYzhmZmJiMjItNzIxNDEyN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.790146Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jd6yqsgmc66f560mgdgqx9pb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQyNWUzYjItYjNkNjg3ZGUtYmQ5MDEwMTgtMTI4OGQ2ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.802822Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jd6yqsgze7gg9jvwb1wq7h1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk2MDE0ZTAtMjdkMjZhMDYtZDJlZDkwODMtYjcwODllYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.814977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jd6yqshc2gevw13dmhxyrzv6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQwMTg2NDMtMjI5N2JhMzAtZTIwMmY4YTUtZDhjZjVmMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.826866Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jd6yqshrbf9xrkrj5mx3fdn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdlNDExY2UtNjk5YmRhOGYtNzZhMjE5Yy1iZjMzNjI1Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.839053Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jd6yqsj45qscszaenk2245wp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM5ZGUxNGItOTU1NTM0NmQtYWYzYWMzMWEtMzJlMDc5YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.851169Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jd6yqsjg7bfr4pb9pq5sywq9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjYxZTJiM2EtZjhiYjVjYTAtZjgwOWM1NTQtYjYxMzRjYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.863333Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jd6yqsjw67ja6b002cg0hdng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGE2ZjFiNDktODY4YTI4ODYtZGUxOTJmNjctZTUzYjFlZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.875840Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jd6yqsk86jz8vs5sr232j94e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRkMWRhZWItZmRiMjBmYzAtZjJhYjg4NTAtYWI4OTA1ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.888094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jd6yqsknd811pmhaznvxatjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTMzOGM0NDMtMzlhMjhlNTYtYTNlM2ZlMTYtNjU0YzlhMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.900581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jd6yqsm1c16k9q653gnp5ca5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNkMzA5ZTMtMzdkZWMyZjYtMzE2OWVkMjAtNzlhMGQxNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.911883Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jd6yqsme6xh6e1tqzxnv4ckt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRlZmQ3NWMtYTFkZjZiNWEtMjRlOWFkOGEtYzZmNDdjN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.924521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jd6yqsmseb6z19svgged2s6m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTU0MDA3NTEtMmIwMWFkMTQtNzUzN2JlYzItNzA4MTA4NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.936908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jd6yqsn6c138gtzwr5qbevc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAzNjVkMTMtYjM2MzQ0M2YtN2ZlMjMzMmYtMjk1OWQ1Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.948102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jd6yqsnjfg3qpq8jve75v4ex, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRjNjM0OTctY2Q3MTQ2YzEtNWQzZGFjYjItYzIzM2EwNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.959471Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jd6yqsnxea4wm99z24wkgayf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTYxZjY4M2QtZDFmNzA5ZmUtNjJmOGYxOTEtYzY1OTIxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.971078Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jd6yqsp95ah5xmyvdvm1twe2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JhMzc3ZjMtZWQ2Yzk5ZDMtYTMwZWQ2MjgtZjBkMDczMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.982652Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jd6yqspmfrman7zs07ne3exb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVmNGRmMDktM2NkMTZkZjAtYzQxYWU2MjQtZTFhOWI1MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:05.994933Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jd6yqsq079r04t0men698hng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBmOGIyNmMtZjQzMDIwMGMtMTQyZDM4NDMtZjQyYTVjMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:06.075373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jd6yqssb9hdykg3mpzpsdw45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTU1ODNmMDQtYzJlNmJjNTItN2NmYTRlZTgtYjFlNDU2YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> KqpRm::SingleSnapshotByExchanger [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2024-11-21T08:52:05.761703Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:05.791330Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:05.791562Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:05.792726Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:05.808509Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:05.811051Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:05.811823Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:05.812368Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:05.812588Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.812598Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.812630Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:05.814902Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:05.814947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:05.814959Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:05.814996Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.815006Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.815266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.838776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.838820Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.851981Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.852036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.852053Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.852065Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.852090Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.852098Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.852103Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.852111Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.863426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.863477Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:05.863695Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:05.863702Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:05.865190Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:05.865438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:05.865754Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/001552/r3tmp/tmpktuiBH/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:05.865826Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/001552/r3tmp/tmpktuiBH/pdisk_1.dat 2024-11-21T08:52:05.865833Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/001552/r3tmp/tmpktuiBH/pdisk_1.dat 2024-11-21T08:52:05.865997Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:05.866189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:05.866213Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.866226Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:05.866266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.866303Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.867964Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:05.868022Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.879354Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:05.879614Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:05.881802Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:05.881970Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/001552/r3tmp/tmpktuiBH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:05.882116Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/001552/r3tmp/tmpktuiBH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/001552/r3tmp/tmpktuiBH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10733596727901365807 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:05.882306Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.882369Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.882374Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.882404Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.882412Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.882436Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.882455Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.882460Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.882465Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.882475Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.882542Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.882559Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.882562Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.882580Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:05.882633Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.882638Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.882642Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.882649Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:05.883272Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.883279Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:05.883307Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.883311Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:05.883481Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:05.883502Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.883510Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.883513Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.883616Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:05.883655Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.883659Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.883662Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.915369Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.915393Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:05.915399Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.915405Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2024-11-21T08:52:03.990101Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:04.014759Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:04.014899Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:04.015997Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:04.026422Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:04.027999Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:04.028461Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:04.028744Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:04.028866Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:04.028871Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:04.028897Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:04.030444Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:04.030494Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:04.030507Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:04.030558Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:04.030571Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:04.030667Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:04.052444Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:04.052487Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:04.063308Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:04.063346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:04.063356Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:04.063365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:04.063383Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:04.063389Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:04.063393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:04.063399Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:04.074367Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:04.074413Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:04.074618Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:04.074626Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:04.076374Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:04.076588Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:04.076973Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/001605/r3tmp/tmp5dGrOJ/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:04.077045Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/001605/r3tmp/tmp5dGrOJ/pdisk_1.dat 2024-11-21T08:52:04.077053Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/001605/r3tmp/tmp5dGrOJ/pdisk_1.dat 2024-11-21T08:52:04.077217Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:04.077296Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:04.077318Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:04.077332Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:04.077368Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:04.077422Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:04.078089Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:04.078141Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:04.089738Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:04.089982Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:04.092355Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:04.092517Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/001605/r3tmp/tmp5dGrOJ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:04.092672Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/001605/r3tmp/tmp5dGrOJ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/001605/r3tmp/tmp5dGrOJ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12729641128683624784 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:04.092884Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:04.092950Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:04.092956Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:04.092985Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:04.092994Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:04.093018Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:04.093037Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:04.093043Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:04.093050Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:04.093062Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:04.093152Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:04.093171Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:04.093177Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:04.093194Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:04.093272Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:04.093278Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:04.093282Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:04.093292Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:04.093983Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:04.093990Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:04.094016Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:04.094021Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:04.094203Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:04.094224Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:04.094233Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:04.094236Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:04.094348Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:04.094392Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:04.094398Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:04.094402Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:04.117312Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:04.117333Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:04.117354Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:04.117359Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:04.146025Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:52:04.147023Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2024-11-21T08:52:04.147111Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:52:04.147589Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:52:04.188525Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2024-11-21T08:52:04.235365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T08:52:04.284568Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2024-11-21T08:52:04.605330Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:04.616415Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:04.616544Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests10000Inflight100BlobSize1000 [GOOD] Test command err: RandomSeed# 11418612371498728923 2024-11-21T08:51:32.487646Z 2 00h00m30.010000s :BS_PROXY_GET ERROR: [c03cd058a1ae6d65] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:10124:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:32.487697Z 2 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:1:10:10124:1000:0] PatchedBlobId# [1:1:2:10:14220:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.583664Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [1505a572555dc54f] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:10125:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.583711Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:1:10:10125:1000:0] PatchedBlobId# [1:1:2:10:133005:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.585078Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [cf754d922adc1f65] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:2:10:133005:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.585105Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:2:10:133005:1000:0] PatchedBlobId# [1:1:3:10:14221:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.586055Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [8856ec80dee9ead1] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:3:10:14221:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.586077Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:3:10:14221:1000:0] PatchedBlobId# [1:1:4:10:18317:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.586928Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [54cd98cebe68efe2] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:4:10:18317:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.586950Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:4:10:18317:1000:0] PatchedBlobId# [1:1:5:10:42893:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.587799Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [50fe3268624bec13] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:5:10:42893:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.587820Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:5:10:42893:1000:0] PatchedBlobId# [1:1:6:10:22413:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.588711Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [c07758c398c04828] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:6:10:22413:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.588732Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:6:10:22413:1000:0] PatchedBlobId# [1:1:7:10:1933:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.589619Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [bad6690c16f2e708] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:7:10:1933:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.589643Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:7:10:1933:1000:0] PatchedBlobId# [1:1:8:10:1933:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.590491Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [3a487669d035e2a7] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:8:10:1933:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.590511Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:8:10:1933:1000:0] PatchedBlobId# [1:1:9:10:6029:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.591341Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [3ecd1afd549477ee] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:9:10:6029:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.591361Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:9:10:6029:1000:0] PatchedBlobId# [1:1:10:10:104333:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:33.592223Z 7 00h00m30.010000s :BS_PROXY_GET ERROR: [f8c75967802da099] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:10:10:104333:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:33.592246Z 7 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:104333:1000:0] PatchedBlobId# [1:1:11:10:10125:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:51:34.624279Z 4 00h00m30.010000s :BS_PROXY_GET ERROR: [2bd648e9672dfa8f] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:10126:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:34.624334Z 4 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:1:10:10126:1000:0] PatchedBlobId# [1:1:2:10:10126:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:34.625661Z 4 00h00m30.010000s :BS_PROXY_GET ERROR: [a38d2746a97c9ad6] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:2:10:10126:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:34.625686Z 4 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:2:10:10126:1000:0] PatchedBlobId# [1:1:3:10:14222:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:34.626619Z 4 00h00m30.010000s :BS_PROXY_GET ERROR: [b7b96e6cf4f53b1a] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:3:10:14222:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:34.626642Z 4 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:3:10:14222:1000:0] PatchedBlobId# [1:1:4:10:137102:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:34.627530Z 4 00h00m30.010000s :BS_PROXY_GET ERROR: [e2726e45689e2bfc] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:4:10:137102:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:34.627551Z 4 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:4:10:137102:1000:0] PatchedBlobId# [1:1:5:10:18318:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:34.628471Z 4 00h00m30.010000s :BS_PROXY_GET ERROR: [2fc801b4e3acdc87] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:5:10:18318:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:34.628494Z 4 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:5:10:18318:1000:0] PatchedBlobId# [1:1:6:10:22414:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:34.629411Z 4 00h00m30.010000s :BS_PROXY_GET ERROR: [02e01c87d9e44cb6] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:6:10:22414:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:34.629432Z 4 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:6:10:22414:1000:0] PatchedBlobId# [1:1:7:10:46990:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:34.630335Z 4 00h00m30.010000s :BS_PROXY_GET ERROR: [7c8edf65902d93ab] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:7:10:46990:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:34.630360Z 4 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:7:10:46990:1000:0] PatchedBlobId# [1:1:8:10:1934:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:51:34.631248Z 4 00h00m30.010000s :BS_PROXY_GET ERROR: [dd23d582d7b61bb9] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:8:10:1934:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Ma ... 6186:1000:0] PatchedBlobId# [1:1:107:10:79914:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 2024-11-21T08:52:03.959197Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [ea85e5e4fe4ebb80] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:101:10:18561:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T08:52:03.959349Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [9f65b89146de8d8e] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:99:10:14464:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T08:52:03.959502Z 3 00h00m30.010000s :BS_PROXY_GET ERROR: [2e3f37e6553ed81b] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:99:10:133172:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:52:03.959767Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:101:10:18561:1000:0] PatchedBlobId# [1:1:102:10:43137:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T08:52:03.959841Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [e83ad8093a94d8f3] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:102:10:18539:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T08:52:03.959867Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:99:10:14464:1000:0] PatchedBlobId# [1:1:100:10:14464:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 2024-11-21T08:52:03.959992Z 3 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:2:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:99:10:133172:1000:0] PatchedBlobId# [1:1:100:10:14388:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:52:03.960318Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:102:10:18539:1000:0] PatchedBlobId# [1:1:103:10:22635:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T08:52:03.961153Z 3 00h00m30.010000s :BS_PROXY_GET ERROR: [85ca8e251f63c521] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:102:10:18521:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:2:0]"} Marker# BPG29 2024-11-21T08:52:03.961790Z 3 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:2:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:102:10:18521:1000:0] PatchedBlobId# [1:1:103:10:22617:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:52:03.963516Z 6 00h00m30.010000s :BS_PROXY_GET ERROR: [e8a4f36e8bb74a08] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:104:10:22578:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:2:0]"} Marker# BPG29 2024-11-21T08:52:03.963711Z 2 00h00m30.010000s :BS_PROXY_GET ERROR: [54cb7c1b59925380] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:98:10:10345:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:52:03.964107Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [83257120426e57d3] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:103:10:22628:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T08:52:03.964200Z 6 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:2:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:104:10:22578:1000:0] PatchedBlobId# [1:1:105:10:2098:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:2:0] Marker# BSVSP01 2024-11-21T08:52:03.964332Z 2 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:98:10:10345:1000:0] PatchedBlobId# [1:1:99:10:14441:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:52:03.964605Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:103:10:22628:1000:0] PatchedBlobId# [1:1:104:10:22628:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 2024-11-21T08:52:03.966307Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [928b2007a5a8dc60] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:103:10:22579:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T08:52:03.966462Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [43f1cee28927807b] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:98:10:10358:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T08:52:03.966805Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:103:10:22579:1000:0] PatchedBlobId# [1:1:104:10:2099:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T08:52:03.966889Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [58a65fe762792107] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:106:10:125045:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T08:52:03.966916Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:98:10:10358:1000:0] PatchedBlobId# [1:1:99:10:14454:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 2024-11-21T08:52:03.967113Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:106:10:125045:1000:0] PatchedBlobId# [1:1:107:10:6261:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T08:52:03.969329Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [812f40351ecb7e38] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:99:10:10360:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T08:52:03.969637Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:99:10:10360:1000:0] PatchedBlobId# [1:1:100:10:14456:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 2024-11-21T08:52:03.970459Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [a805ee82f89ee15a] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:99:10:10273:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T08:52:03.970576Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [aa2d53963a4b96e9] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:97:10:6247:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T08:52:03.970723Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:99:10:10273:1000:0] PatchedBlobId# [1:1:100:10:14369:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T08:52:03.970786Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [f4dc4c8b19a031fc] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:97:10:6200:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T08:52:03.970809Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:97:10:6247:1000:0] PatchedBlobId# [1:1:98:10:10343:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 2024-11-21T08:52:03.970898Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:97:10:6200:1000:0] PatchedBlobId# [1:1:98:10:10296:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T08:52:03.973194Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [2ab253ec220ce40c] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:100:10:14422:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T08:52:03.973439Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:100:10:14422:1000:0] PatchedBlobId# [1:1:101:10:18518:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 2024-11-21T08:52:03.974427Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [077f4db287f46bbe] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:106:10:6254:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T08:52:03.974622Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [7698d3c340440074] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:104:10:2157:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T08:52:03.974815Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:106:10:6254:1000:0] PatchedBlobId# [1:1:107:10:6254:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T08:52:03.974978Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:104:10:2157:1000:0] PatchedBlobId# [1:1:105:10:2157:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout |87.5%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2024-11-21T08:52:04.956389Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:04.983018Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:04.983223Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:04.984332Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:04.997608Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:04.999589Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:05.000115Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:05.000444Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:05.000591Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.000599Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.000626Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:05.002709Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:05.002758Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:05.002772Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:05.002824Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.002837Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.002942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.025741Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.025789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.037922Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.037990Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.038010Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.038023Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.038049Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.038059Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.038064Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.038073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.056245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.056311Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:05.056571Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:05.056582Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:05.058399Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:05.058683Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:05.059054Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/00157e/r3tmp/tmpPxJ8lB/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:05.059135Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/00157e/r3tmp/tmpPxJ8lB/pdisk_1.dat 2024-11-21T08:52:05.059144Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/00157e/r3tmp/tmpPxJ8lB/pdisk_1.dat 2024-11-21T08:52:05.059339Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:05.059432Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:05.059457Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.059472Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:05.059513Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.059551Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.064496Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:05.064627Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.077689Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:05.078000Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:05.081936Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:05.082103Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/00157e/r3tmp/tmpPxJ8lB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:05.082276Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/00157e/r3tmp/tmpPxJ8lB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00157e/r3tmp/tmpPxJ8lB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14753459124887022949 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:05.082497Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.082574Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.082580Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.082614Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.082623Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.082651Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.082675Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.082685Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.082693Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.082709Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.082827Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.082849Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.082854Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.082872Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:05.082951Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.082959Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.082963Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.082971Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:05.083771Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.083785Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:05.083818Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.083824Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:05.084040Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:05.084062Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.084070Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.084074Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.084198Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:05.084277Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.084284Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.084287Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.122475Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.122499Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:05.122504Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.122510Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:05.148725Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:52:05.153094Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2024-11-21T08:52:05.153251Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:52:05.159720Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:52:05.204867Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2024-11-21T08:52:05.268843Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T08:52:05.314222Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2024-11-21T08:52:05.596389Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:05.604593Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:05.604738Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> IndexBuildTestReboots::CancelBuild >> IndexBuildTestReboots::DropIndexWithDataColumns |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |87.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> KqpRm::SnapshotSharingByExchanger [GOOD] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> IndexBuildTestReboots::IndexPartitioning >> TableCreator::CreateTables |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatTableBackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableBackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableBackgroundCompactions::TestRunBackgroundCompactionGen2 |87.5%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTableBackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableBackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2024-11-21T08:52:05.294187Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T08:52:05.323833Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T08:52:05.324031Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T08:52:05.325198Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T08:52:05.341749Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:05.343931Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:05.344499Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:05.344835Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:52:05.344992Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.345002Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:52:05.345031Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:52:05.347391Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:52:05.347453Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:52:05.347470Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:52:05.347530Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.347547Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:52:05.347664Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.374760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:52:05.374822Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.387963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:52:05.388032Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.388052Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:52:05.388065Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.388092Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:52:05.388098Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.388102Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:52:05.388108Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.406636Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:52:05.406698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:52:05.406905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:52:05.406912Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:52:05.408415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:52:05.408670Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T08:52:05.408996Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/00156b/r3tmp/tmprPQypk/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T08:52:05.409073Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/00156b/r3tmp/tmprPQypk/pdisk_1.dat 2024-11-21T08:52:05.409080Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/00156b/r3tmp/tmprPQypk/pdisk_1.dat 2024-11-21T08:52:05.409241Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T08:52:05.409313Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T08:52:05.409334Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.409349Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:05.409383Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.409439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T08:52:05.412527Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:05.412631Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T08:52:05.425656Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T08:52:05.425941Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T08:52:05.428160Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T08:52:05.428332Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/00156b/r3tmp/tmprPQypk/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T08:52:05.428498Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/00156b/r3tmp/tmprPQypk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00156b/r3tmp/tmprPQypk/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2939492577276998247 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T08:52:05.428729Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.428801Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.428806Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:52:05.428839Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:52:05.428847Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.428881Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.428903Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.428909Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:52:05.428915Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:52:05.428925Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:52:05.428997Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.429015Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.429020Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.429036Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T08:52:05.429093Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:52:05.429098Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:52:05.429102Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:52:05.429110Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T08:52:05.429831Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.429843Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T08:52:05.429872Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:52:05.429877Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T08:52:05.430080Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T08:52:05.430103Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.430113Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.430116Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.439210Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T08:52:05.439361Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:52:05.439373Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:52:05.439377Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:52:05.463695Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.463724Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:05.463729Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T08:52:05.463736Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T08:52:05.492607Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:52:05.496574Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2024-11-21T08:52:05.496688Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:52:05.497236Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:52:05.545054Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2024-11-21T08:52:05.595300Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T08:52:05.640557Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2024-11-21T08:52:05.916968Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T08:52:05.918706Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T08:52:05.918850Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> TCdcStreamWithRebootsTests::MergeTable[PipeResets] [GOOD] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] >> IndexBuildTestReboots::BaseCase >> IndexBuildTestReboots::DropIndex |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest |87.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] Test command err: Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2050b 40r} data 2167b + FlatIndex{1} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 0 0 2050b (0, 1) | 0 39 2050b (5, 7) + BTreeIndex{Empty, PageId: 0 RowCount: 40 DataSize: 2050 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{0} Label{04 rev 1, 2050b}, [0, +40)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b (0, 1) | 3 39 620b (5, 7) + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b (0, 1) | 1 39 2466b (5, 7) + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 8474b 40r} data 6832b + FlatIndex{15} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 7 0 1036b (0, 1) | 7 39 1036b (5, 7) + BTreeIndex{Empty, PageId: 7 RowCount: 40 DataSize: 1036 GroupDataSize: 7438 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{7} Label{74 rev 1, 1036b}, [0, +40)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 6 12 122b (1, 8) | 7 14 122b (2, NULL) | 8 16 122b (2, 4) | 9 18 122b (2, 7) | 10 20 122b (2, 10) | 11 22 122b (3, 3) | 12 24 122b (3, 6) | 13 26 122b (3, 8) | 14 28 122b (4, NULL) | 15 30 122b (4, 4) | 16 32 122b (4, 7) | 17 34 122b (4, 10) | 18 36 122b (5, 3) | 19 38 122b (5, 6) | 19 39 122b (5, 7) + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > (0, 4) | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > (0, 7) | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > (0, 10) | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > (1, 3) | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > (1, 6) | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > (1, 8) | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > ... urces {1, 0}) 00000.041 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen1 from 12.000000 to 0.000000 (remove task gen1-table-101-tablet-1 (50 by [20:29:2061])) 00000.041 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.041 DD| RESOURCE_BROKER: Submitted new background_compaction_gen2 task gen2-table-101-tablet-1 (57 by [20:29:2061]) priority=400 resources={1, 0} 00000.041 DD| RESOURCE_BROKER: Assigning waiting task gen2-table-101-tablet-1 (57 by [20:29:2061]) to queue queue_background_compaction 00000.041 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.041 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (56 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.041 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (56 by [20:29:2061]) to queue queue_compaction_gen0 00000.041 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (56 by [20:29:2061]) from queue queue_compaction_gen0 00000.041 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (56 by [20:29:2061]) to queue queue_compaction_gen0 00000.041 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.042 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (56 by [20:29:2061]) (release resources {1, 0}) 00000.042 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.042 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (58 by [20:29:2061]) priority=200 resources={1, 0} 00000.042 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (58 by [20:29:2061]) to queue queue_background_compaction 00000.042 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.043 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (58 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.043 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (58 by [20:29:2061]) to queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (58 by [20:29:2061]) from queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (58 by [20:29:2061]) to queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.043 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (58 by [20:29:2061]) (release resources {1, 0}) 00000.043 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.043 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (59 by [20:29:2061]) priority=200 resources={1, 0} 00000.043 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (59 by [20:29:2061]) to queue queue_background_compaction 00000.043 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.043 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (59 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.043 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (59 by [20:29:2061]) to queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (59 by [20:29:2061]) from queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (59 by [20:29:2061]) to queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.043 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (59 by [20:29:2061]) (release resources {1, 0}) 00000.043 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.043 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (60 by [20:29:2061]) priority=200 resources={1, 0} 00000.043 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (60 by [20:29:2061]) to queue queue_background_compaction 00000.043 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.043 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (60 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.043 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (60 by [20:29:2061]) to queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (60 by [20:29:2061]) from queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (60 by [20:29:2061]) to queue queue_compaction_gen0 00000.043 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.044 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (60 by [20:29:2061]) (release resources {1, 0}) 00000.044 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.044 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (61 by [20:29:2061]) priority=200 resources={1, 0} 00000.044 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (61 by [20:29:2061]) to queue queue_background_compaction 00000.044 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.044 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (61 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.044 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (61 by [20:29:2061]) to queue queue_compaction_gen0 00000.044 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (61 by [20:29:2061]) from queue queue_compaction_gen0 00000.044 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (61 by [20:29:2061]) to queue queue_compaction_gen0 00000.044 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.044 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (61 by [20:29:2061]) (release resources {1, 0}) 00000.044 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.044 DD| RESOURCE_BROKER: Submitted new background_compaction_gen1 task gen1-table-101-tablet-1 (62 by [20:29:2061]) priority=200 resources={1, 0} 00000.044 DD| RESOURCE_BROKER: Assigning waiting task gen1-table-101-tablet-1 (62 by [20:29:2061]) to queue queue_background_compaction 00000.044 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.086 DD| RESOURCE_BROKER: Update task gen2-table-101-tablet-1 (57 by [20:29:2061]) (priority=97 type=background_compaction_gen2 resources={1, 0} resubmit=0) 00000.086 DD| RESOURCE_BROKER: Assigning waiting task gen2-table-101-tablet-1 (57 by [20:29:2061]) to queue queue_background_compaction 00000.086 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (in-fly consumption {1, 0}) 00000.086 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.086 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987988 by [20:7:2054]) priority=150 resources={1, 0} 00000.086 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987988 by [20:7:2054]) to queue queue_background_compaction 00000.086 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.086 DD| RESOURCE_BROKER: Finish task bckg-block (987987987987 by [20:7:2054]) (release resources {1, 0}) 00000.086 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen2-table-101-tablet-1 (57 by [20:29:2061]) from queue queue_background_compaction 00000.086 DD| RESOURCE_BROKER: Assigning in-fly task gen2-table-101-tablet-1 (57 by [20:29:2061]) to queue queue_background_compaction 00000.086 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task gen2-table-101-tablet-1 (57 by [20:29:2061])) 00000.086 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.087 DD| RESOURCE_BROKER: Finish task gen2-table-101-tablet-1 (57 by [20:29:2061]) (release resources {1, 0}) 00000.087 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 600.000000 to 300.000000 (remove task gen2-table-101-tablet-1 (57 by [20:29:2061])) 00000.087 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987988 by [20:7:2054]) from queue queue_background_compaction 00000.087 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987988 by [20:7:2054]) to queue queue_background_compaction 00000.087 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task bckg-block (987987987988 by [20:7:2054])) 00000.087 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.088 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (63 by [20:29:2061]) priority=200 resources={1, 0} 00000.088 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [20:29:2061]) to queue queue_background_compaction 00000.088 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.088 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (63 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.088 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [20:29:2061]) to queue queue_compaction_gen0 00000.088 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_compaction_gen0 from 0.000000 to 300.000000 00000.088 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (63 by [20:29:2061]) from queue queue_compaction_gen0 00000.088 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (63 by [20:29:2061]) to queue queue_compaction_gen0 00000.088 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.088 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (63 by [20:29:2061]) (release resources {1, 0}) 00000.088 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.135 DD| RESOURCE_BROKER: Update task gen1-table-101-tablet-1 (62 by [20:29:2061]) (priority=166 type=background_compaction_gen1 resources={1, 0} resubmit=0) 00000.135 DD| RESOURCE_BROKER: Assigning waiting task gen1-table-101-tablet-1 (62 by [20:29:2061]) to queue queue_background_compaction 00000.135 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (in-fly consumption {1, 0}) 00000.135 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.135 DD| RESOURCE_BROKER: Removing task gen1-table-101-tablet-1 (62 by [20:29:2061]) 00000.136 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.136 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.136 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.136 II| FAKE_ENV: DS.0 gone, left {9702b, 90}, put {69314b, 689} 00000.136 II| FAKE_ENV: DS.1 gone, left {49679b, 125}, put {120827b, 750} 00000.136 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.136 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.136 II| FAKE_ENV: All BS storage groups are stopped 00000.136 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 2.000m 00000.136 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 652}, stopped >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling >> KqpScanSpilling::HandleErrorsCorrectly >> KqpScanSpilling::SpillingPragmaParseError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::MergeTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:40.242666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:40.242687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:40.242692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:40.242697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:40.242710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:40.242714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:40.242722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:40.242797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:40.253817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:40.253838Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:40.255928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:40.256020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:40.256047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:40.258545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:40.258609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:40.258721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.258904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:40.259563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.259820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:40.259830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.259842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:40.259848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:40.259854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:40.259893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:40.261242Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:40.278097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:40.278176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.278233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:40.278274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:40.278282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.278948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.278990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:40.279038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.279048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:40.279053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:40.279058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:40.279558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.279571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:40.279576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:40.279978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.279990Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.279997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.280004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.280579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:40.281033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:40.281087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:40.281279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.281302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:40.281309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.281362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:40.281370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.281417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:40.281429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:40.281870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:40.281879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:40.281916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.281921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:40.282009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.282015Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:40.282027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:40.282031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.282037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:40.282042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.282046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:40.282050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:40.282060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:40.282065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:40.282069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... e CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:08.207352Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:08.207444Z node 38 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 102us result status StatusSuccess 2024-11-21T08:52:08.207673Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:08.207756Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:08.207785Z node 38 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 28us result status StatusSuccess 2024-11-21T08:52:08.207872Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2024-11-21T08:52:08.271997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652708575407679:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034c7/r3tmp/tmp56hQjM/pdisk_1.dat 2024-11-21T08:52:08.346949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:08.404743Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:08.428698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:08.428723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:08.430687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6095 TServer::EnableGrpc on GrpcPort 1631, node 1 2024-11-21T08:52:08.465256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:08.465271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:08.465272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:08.465315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T08:52:08.479047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:08.481006Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:08.482031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:52:08.482519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> KqpErrors::ProposeError >> DataShardWrite::UpsertImmediate |87.6%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest |87.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |87.6%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScanSpilling::SpillingPragmaParseError [GOOD] |87.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |87.6%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/jptk/00164f/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 30806, MsgBus: 28956 2024-11-21T08:52:09.641899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652713648323515:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:09.641964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00164f/r3tmp/tmpRzcGqv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30806, node 1 2024-11-21T08:52:09.704390Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:09.724424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:09.724441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:09.724444Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:09.724481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28956 2024-11-21T08:52:09.741970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:09.742008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:09.743388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:09.789662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.801512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:09.810243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.886187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.954926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.965737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:10.033408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652717943292211:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.033453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.058939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.071346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.081105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.096385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.109641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.125130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.141360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652717943292715:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.141380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.141506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652717943292720:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.142127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:10.150977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652717943292722:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:10.341022Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652717943293023:2458], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2024-11-21T08:52:10.341132Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA3NzRmNzktYWNmNjVhNTYtYmZiMDcwZDEtYmVhMTllNzM=, ActorId: [1:7439652717943293016:2454], ActorState: ExecuteState, TraceId: 01jd6yqxz0b3xsc5q83tcg7vj2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::UpsertImmediate [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> TTabletPipeTest::TestSendAfterReboot >> TTabletPipeTest::TestSendAfterReboot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] Test command err: diskMask# 97 nonWorkingDomain# 0 194400 diskMask# 97 nonWorkingDomain# 1 64800 diskMask# 98 nonWorkingDomain# 0 102474 diskMask# 98 nonWorkingDomain# 1 64800 diskMask# 99 nonWorkingDomain# 0 74004 diskMask# 99 nonWorkingDomain# 1 8640 diskMask# 100 nonWorkingDomain# 0 194400 diskMask# 100 nonWorkingDomain# 1 1088640 diskMask# 101 nonWorkingDomain# 0 87624 diskMask# 101 nonWorkingDomain# 1 64800 diskMask# 102 nonWorkingDomain# 0 74004 diskMask# 102 nonWorkingDomain# 1 64800 diskMask# 103 nonWorkingDomain# 0 51744 diskMask# 103 nonWorkingDomain# 1 8640 diskMask# 104 nonWorkingDomain# 0 383040 diskMask# 104 nonWorkingDomain# 1 64800 diskMask# 105 nonWorkingDomain# 0 191520 diskMask# 105 nonWorkingDomain# 1 8640 diskMask# 106 nonWorkingDomain# 0 78444 diskMask# 106 nonWorkingDomain# 1 8640 diskMask# 107 nonWorkingDomain# 0 69270 diskMask# 107 nonWorkingDomain# 1 4320 diskMask# 108 nonWorkingDomain# 0 128424 diskMask# 108 nonWorkingDomain# 1 64800 diskMask# 109 nonWorkingDomain# 0 91512 diskMask# 109 nonWorkingDomain# 1 8640 diskMask# 110 nonWorkingDomain# 0 63264 diskMask# 110 nonWorkingDomain# 1 8640 diskMask# 111 nonWorkingDomain# 0 43620 diskMask# 111 nonWorkingDomain# 1 4320 diskMask# 112 nonWorkingDomain# 0 148794 diskMask# 112 nonWorkingDomain# 1 64800 diskMask# 113 nonWorkingDomain# 0 124764 diskMask# 113 nonWorkingDomain# 1 8640 diskMask# 114 nonWorkingDomain# 0 124764 diskMask# 114 nonWorkingDomain# 1 8640 diskMask# 115 nonWorkingDomain# 0 73344 diskMask# 115 nonWorkingDomain# 1 47544 diskMask# 116 nonWorkingDomain# 0 124764 diskMask# 116 nonWorkingDomain# 1 64800 diskMask# 117 nonWorkingDomain# 0 73344 diskMask# 117 nonWorkingDomain# 1 8640 diskMask# 118 nonWorkingDomain# 0 73344 diskMask# 118 nonWorkingDomain# 1 8640 diskMask# 119 nonWorkingDomain# 0 31656 diskMask# 119 nonWorkingDomain# 1 47544 diskMask# 120 nonWorkingDomain# 0 129324 diskMask# 120 nonWorkingDomain# 1 8640 diskMask# 121 nonWorkingDomain# 0 120750 diskMask# 121 nonWorkingDomain# 1 4320 diskMask# 122 nonWorkingDomain# 0 83184 diskMask# 122 nonWorkingDomain# 1 62544 diskMask# 123 nonWorkingDomain# 0 56460 diskMask# 123 nonWorkingDomain# 1 48192 diskMask# 124 nonWorkingDomain# 0 83184 diskMask# 124 nonWorkingDomain# 1 8640 diskMask# 125 nonWorkingDomain# 0 56460 diskMask# 125 nonWorkingDomain# 1 4320 diskMask# 126 nonWorkingDomain# 0 35496 diskMask# 126 nonWorkingDomain# 1 62544 diskMask# 127 nonWorkingDomain# 0 20640 diskMask# 127 nonWorkingDomain# 1 48192 diskMask# 128 nonWorkingDomain# 0 781920 diskMask# 128 nonWorkingDomain# 1 1088640 diskMask# 129 nonWorkingDomain# 0 210240 diskMask# 129 nonWorkingDomain# 1 64800 diskMask# 130 nonWorkingDomain# 0 220320 diskMask# 130 nonWorkingDomain# 1 64800 diskMask# 131 nonWorkingDomain# 0 95040 diskMask# 131 nonWorkingDomain# 1 8640 diskMask# 132 nonWorkingDomain# 0 210240 diskMask# 132 nonWorkingDomain# 1 1088640 diskMask# 133 nonWorkingDomain# 0 58074 diskMask# 133 nonWorkingDomain# 1 64800 diskMask# 134 nonWorkingDomain# 0 95040 diskMask# 134 nonWorkingDomain# 1 64800 diskMask# 135 nonWorkingDomain# 0 25164 diskMask# 135 nonWorkingDomain# 1 8640 diskMask# 136 nonWorkingDomain# 0 496800 diskMask# 136 nonWorkingDomain# 1 64800 diskMask# 137 nonWorkingDomain# 0 194400 diskMask# 137 nonWorkingDomain# 1 8640 diskMask# 138 nonWorkingDomain# 0 194400 diskMask# 138 nonWorkingDomain# 1 8640 diskMask# 139 nonWorkingDomain# 0 87624 diskMask# 139 nonWorkingDomain# 1 47544 diskMask# 140 nonWorkingDomain# 0 102474 diskMask# 140 nonWorkingDomain# 1 64800 diskMask# 141 nonWorkingDomain# 0 74004 diskMask# 141 nonWorkingDomain# 1 8640 diskMask# 142 nonWorkingDomain# 0 74004 diskMask# 142 nonWorkingDomain# 1 8640 diskMask# 143 nonWorkingDomain# 0 51744 diskMask# 143 nonWorkingDomain# 1 47544 diskMask# 144 nonWorkingDomain# 0 613440 diskMask# 144 nonWorkingDomain# 1 64800 diskMask# 145 nonWorkingDomain# 0 192960 diskMask# 145 nonWorkingDomain# 1 8640 diskMask# 146 nonWorkingDomain# 0 198720 diskMask# 146 nonWorkingDomain# 1 8640 diskMask# 147 nonWorkingDomain# 0 95040 diskMask# 147 nonWorkingDomain# 1 4320 diskMask# 148 nonWorkingDomain# 0 192960 diskMask# 148 nonWorkingDomain# 1 64800 diskMask# 149 nonWorkingDomain# 0 29484 diskMask# 149 nonWorkingDomain# 1 8640 diskMask# 150 nonWorkingDomain# 0 95040 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2024-11-21T08:52:03.551181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:03.551694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:03.551728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004050/r3tmp/tmpgfrjyZ/pdisk_1.dat 2024-11-21T08:52:03.659840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:03.675842Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:03.718267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:03.718301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:03.728856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:03.833287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:03.847604Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:03.847797Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:03.847870Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:52:03.847917Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:03.854953Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:03.855161Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:03.855187Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:03.855333Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:52:03.855340Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:52:03.855346Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:52:03.855388Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:03.858571Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:52:03.858651Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:03.858675Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:52:03.858679Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:52:03.858684Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:52:03.858689Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:03.858816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:03.858822Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:03.858951Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:52:03.858970Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:52:03.858981Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:03.858984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:03.858990Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:52:03.858997Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:52:03.859003Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:03.859010Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:52:03.859014Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:52:03.859017Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:52:03.859022Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:52:03.859027Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:52:03.859045Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:52:03.859049Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:03.859066Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:52:03.859110Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:52:03.859118Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:52:03.859134Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:52:03.859142Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:52:03.859145Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:52:03.859150Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:52:03.859154Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:52:03.859197Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:03.859200Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:52:03.859203Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:52:03.859206Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:52:03.859215Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:52:03.859218Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:52:03.859221Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:52:03.859224Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:52:03.859228Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:03.859444Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:52:03.859451Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:52:03.869847Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:52:03.869882Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:52:03.869890Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:52:03.869901Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:52:03.869917Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:04.059618Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:04.059643Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:04.059652Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:52:04.059671Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:52:04.059676Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:04.059701Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:52:04.059710Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:52:04.059714Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:52:04.059719Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:52:04.060505Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:52:04.060543Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:04.060691Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:04.060697Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:04.060703Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:52:04.060711Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:04.060716Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:52:04.060724Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 4Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2024-11-21T08:52:10.931527Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2024-11-21T08:52:10.931532Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T08:52:10.931535Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2024-11-21T08:52:10.931542Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:10.931546Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:52:10.931567Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2024-11-21T08:52:10.931571Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2024-11-21T08:52:10.931574Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2024-11-21T08:52:10.931579Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T08:52:10.931582Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:10.931585Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2024-11-21T08:52:10.931589Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2024-11-21T08:52:10.931601Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T08:52:10.931604Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2024-11-21T08:52:10.931607Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2024-11-21T08:52:10.931611Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2024-11-21T08:52:10.931616Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T08:52:10.931619Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2024-11-21T08:52:10.931622Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2024-11-21T08:52:10.931625Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2024-11-21T08:52:10.931630Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T08:52:10.931634Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2024-11-21T08:52:10.931637Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T08:52:10.931640Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T08:52:10.931644Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T08:52:10.931647Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T08:52:10.931651Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2024-11-21T08:52:10.931654Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2024-11-21T08:52:10.931659Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2024-11-21T08:52:10.931700Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2024-11-21T08:52:10.931713Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:52:10.931726Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:52:10.931730Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2024-11-21T08:52:10.931733Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2024-11-21T08:52:10.931739Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2024-11-21T08:52:10.931796Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2024-11-21T08:52:10.931801Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2024-11-21T08:52:10.931805Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:52:10.931808Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:52:10.931814Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T08:52:10.931818Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:52:10.931821Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2024-11-21T08:52:10.931826Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:10.931830Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:52:10.931834Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:52:10.931837Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:52:10.944614Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 100} 2024-11-21T08:52:10.944650Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:52:10.944675Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:52:10.944688Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2024-11-21T08:52:10.944717Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:557:2484] 2024-11-21T08:52:10.944734Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:10.945241Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:729:2601], Recipient [6:631:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:10.945261Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:10.945269Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:728:2600], serverId# [6:729:2601], sessionId# [0:0:0] 2024-11-21T08:52:10.945308Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:727:2599], Recipient [6:631:2536]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T08:52:10.945504Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:732:2604], Recipient [6:631:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:10.945512Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:10.945517Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:731:2603], serverId# [6:732:2604], sessionId# [0:0:0] 2024-11-21T08:52:10.945568Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:730:2602], Recipient [6:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T08:52:10.945593Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:52:10.945604Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:10.945615Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2024-11-21T08:52:10.945627Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2024-11-21T08:52:10.945653Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T08:52:10.945658Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:52:10.945663Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:10.945667Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:52:10.945684Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2024-11-21T08:52:10.945688Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T08:52:10.945691Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:10.945695Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:52:10.945698Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:52:10.945713Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T08:52:10.945767Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:730:2602], 1000} after executionsCount# 1 2024-11-21T08:52:10.945775Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:730:2602], 1000} sends rowCount# 3, bytes# 72, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551543, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:10.945793Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:730:2602], 1000} finished in read 2024-11-21T08:52:10.945805Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T08:52:10.945813Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:52:10.945817Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:52:10.945820Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:52:10.945832Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T08:52:10.945835Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:52:10.945838Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2024-11-21T08:52:10.945844Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:52:10.945864Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |87.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |87.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:54.644846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:54.644881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:54.644886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:54.644892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:54.644900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:54.644905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:54.644915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:54.645000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:54.658482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:54.658511Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:54.663646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:54.664484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:54.664527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:54.667933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:54.669243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:54.669371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:54.669494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:54.670661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:54.670912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:54.670919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:54.670953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:54.670959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:54.670963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:54.670975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:54.673448Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:54.686988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:54.687092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:54.687182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:54.687230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:54.687237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:54.688110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:54.688143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:54.688228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:54.688240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:54.688244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:54.688250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:54.692818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:54.692874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:54.692885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:54.695007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:54.695039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:54.695049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:54.695061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:54.696020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:54.700902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:54.701005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:54.701271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:54.701346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:54.701357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:54.701473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:54.701483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:54.701526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:54.701541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:54.702316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:54.702331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:54.702388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:54.702394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:54.702488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:54.702498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:54.702512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:54.702520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:54.702526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:54.702531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:54.702535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:54.702539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:54.702554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:54.702562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:54.702566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:54.702995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:54.703015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:54.703022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:54.703028Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:54.703034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:54.703048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 3709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:11.053480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:52:11.053517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T08:52:11.053798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:11.053859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:11.053868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T08:52:11.053930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T08:52:11.053959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:52:11.195990Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3451:5417], attempt# 0 2024-11-21T08:52:11.199879Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3451:5417], sender# [1:3450:5416] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:22824 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41C9400B-EE20-454B-B89D-E6AD984CCB28 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T08:52:11.204368Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3451:5417], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:22824 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B190701B-8DEC-4CF9-B08A-90371CB6C790 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:52:11.207006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:11.207024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:52:11.207101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:11.207107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:52:11.207203Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3451:5417], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2024-11-21T08:52:11.207548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:11.207559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:11.207825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:52:11.207837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:52:11.207842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:52:11.207848Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:52:11.207854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:52:11.207876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:22824 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BE10C38F-D85F-40EB-BF4A-0A0538338309 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T08:52:11.209014Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3451:5417], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T08:52:11.209126Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3450:5416] 2024-11-21T08:52:11.209316Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3451:5417], sender# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T08:52:11.209767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22824 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9F9863E3-D173-4DDF-BBB0-64C6943E2C80 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2024-11-21T08:52:11.210116Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3451:5417], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2024-11-21T08:52:11.210123Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3451:5417], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T08:52:11.210180Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:52:11.223775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:11.223806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:52:11.223846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:11.223859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:11.223871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:11.223875Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:11.223879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:11.223886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:52:11.224352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:11.225298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:11.225365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:11.225374Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:52:11.225387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:52:11.225407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:52:11.225413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:52:11.225443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T08:52:11.225450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:52:11.225455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:52:11.225459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:52:11.225528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:52:11.226377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:52:11.226390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3436:5403] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:115:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [1:117:2145] sender: [1:119:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:154:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:158:2057] recipient: [1:99:2134] Leader for TabletID 9437185 is [1:117:2145] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:163:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:165:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:193:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:196:2057] recipient: [1:98:2133] Leader for TabletID 9437184 is [1:114:2143] sender: [1:199:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:200:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:202:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:230:2057] recipient: [1:14:2061] >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> Viewer::JsonAutocompleteStartOfDatabaseName >> Viewer::TabletMerging >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::Write >> Viewer::JsonAutocompleteEmpty |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::PDiskMerging [GOOD] >> Viewer::LevenshteinDistance [GOOD] >> Viewer::QueryExecuteScript >> TTxDataShardMiniKQL::ReadConstant >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::TableStats >> TTxDataShardMiniKQL::WriteValueTooLarge >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> Viewer::SelectStringWithBase64Encoding ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:56.215979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:56.216025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:56.216031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:56.216042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:56.216050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:56.216054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:56.216064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:56.216145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:56.230355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:56.230383Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:56.239659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:56.240770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:56.240818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:56.252852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:56.254297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:56.254434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:56.254598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:56.265921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:56.266342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:56.266354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:56.266408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:56.266418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:56.266425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:56.266449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.268007Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:56.287791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:56.287910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.287991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:56.288040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:56.288048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.289466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:56.289498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:56.289570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.289584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:56.289589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:56.289595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:56.290045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.290056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:56.290062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:56.290392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.290401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.290408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:56.290416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:56.291126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:56.291582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:56.291642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:56.291859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:56.291887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:56.291897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:56.291957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:56.291964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:56.291999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:56.292012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:56.292513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:56.292523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:56.292577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:56.292583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:56.292679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:56.292687Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:56.292701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:56.292706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:56.292713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:56.292719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:56.292724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:56.292729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:56.292741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:56.292748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:56.292752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:56.293091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:56.293104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:56.293110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:56.293116Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:56.293121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:56.293136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... vFeed: self# [1:3453:5419] 2024-11-21T08:52:12.469054Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64479 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 81B75F68-8979-4D2F-A664-044D28B331E3 amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2024-11-21T08:52:12.469564Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2024-11-21T08:52:12.469600Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T08:52:12.469626Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64479 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 40BC1622-71EC-4459-80A7-554EE8F3E489 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2024-11-21T08:52:12.470188Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2024-11-21T08:52:12.470234Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T08:52:12.470242Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64479 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FD00EC40-0CE7-49F0-AEE5-FA2ECD42AF2E amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2024-11-21T08:52:12.470768Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2024-11-21T08:52:12.470780Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3454:5420], success# 1, error# , multipart# 1, uploadId# 1 2024-11-21T08:52:12.472188Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3454:5420], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64479 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A8389B09-9883-4BA9-9CA9-757691150E37 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2024-11-21T08:52:12.474055Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3454:5420], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2024-11-21T08:52:12.474147Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:52:12.476362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:12.476379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:52:12.476401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:12.476413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:12.476427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:12.476431Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:12.476435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:12.476442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:52:12.476494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:12.477220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:12.477308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:12.477315Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:52:12.477326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:52:12.477330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:52:12.477336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:52:12.477352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T08:52:12.477358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:52:12.477363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:52:12.477367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:52:12.477431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:52:12.478130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:52:12.478140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3439:5406] TestWaitNotification: OK eventTxId 102 >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> Viewer::JsonStorageListingV2 >> Viewer::JsonAutocompleteSimilarDatabaseName >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> TTxDataShardMiniKQL::WriteEraseRead >> KqpErrors::ProposeError [GOOD] >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> IndexBuildTestReboots::CancelBuild [GOOD] >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> Viewer::StorageGroupOutputWithoutFilterNoDepends >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob >> GroupReconfiguration::ReassignsDoNotCauseErrorMessagesBlock4Plus2 [GOOD] >> IncorrectQueries::InvalidPartID >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeError [GOOD] Test command err: 2024-11-21T08:52:10.908002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:52:10.910434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:10.910520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:10.910548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:10.910826Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:10.910836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f7a/r3tmp/tmpBBMMa0/pdisk_1.dat 2024-11-21T08:52:10.992511Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:11.090433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:11.180633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:11.180667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:11.181761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:11.181794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:11.193152Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:11.193318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:11.193442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:11.529778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:12.091227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1515:2919], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:12.091257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1526:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:12.091334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:12.092529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:12.682426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1529:2927], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:12.849000Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T08:52:12.849038Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T08:52:12.849051Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T08:52:12.849060Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T08:52:12.849086Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T08:52:12.849996Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T08:52:12.851558Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.445544s, cancelAfter: (empty maybe) 2024-11-21T08:52:12.851574Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2024-11-21T08:52:12.851585Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T08:52:12.851591Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T08:52:12.851601Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T08:52:12.851709Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2024-11-21T08:52:12.851832Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1704:2917] TxId: 0. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T08:52:12.851848Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2024-11-21T08:52:12.851860Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T08:52:12.851882Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T08:52:12.851946Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2024-11-21T08:52:12.851995Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T08:52:12.852018Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T08:52:12.852082Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2024-11-21T08:52:12.852096Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T08:52:12.852260Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:12.852270Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jd6yqznteb224w0a7qqt0s5g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZiNDA1YTItNWQ4NmVlMWUtMmFlNDIxLWE1ODY4MzY2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T08:52:12.853851Z node 1 :KQP_EXECUTER DEBUG: ActorId: [ ... YzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:13.168012Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T08:52:13.168197Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1949 RawX2: 4294970478 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\t\000\002\n\n" Rows: 1 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\005\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "default" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=" } RequestContext { key: "TraceId" value: "01jd6yr0q778m9meq2k801yd1k" } EnableSpilling: false 2024-11-21T08:52:13.168249Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2024-11-21T08:52:13.168268Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T08:52:13.168299Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T08:52:13.168304Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T08:52:13.168310Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T08:52:13.168319Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T08:52:13.168324Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T08:52:13.180085Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: RESPONSE_DATA, error: 2024-11-21T08:52:13.180127Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2024-11-21T08:52:13.180178Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: GENERIC_ERROR Issues { message: "Error executing transaction: transaction failed." severity: 1 } Result { Stats { CpuTimeUs: 101 } } , to ActorId: [1:1939:3182] 2024-11-21T08:52:13.180196Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2024-11-21T08:52:13.180244Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:52:13.180251Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T08:52:13.180295Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, ActorId: [1:1939:3182], ActorState: ExecuteState, TraceId: 01jd6yr0q778m9meq2k801yd1k, Create QueryResponse for error on request, msg: 2024-11-21T08:52:13.180450Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1954:3182] TxId: 0. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T08:52:13.180472Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T08:52:13.180537Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715684. Resolved key sets: 0 2024-11-21T08:52:13.180577Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:13.180584Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715684. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T08:52:13.180593Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T08:52:13.180596Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T08:52:13.180608Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:52:13.180612Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T08:52:13.180620Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd6yr0q778m9meq2k801yd1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE5OWFlZTctMTExZTczNDYtYzFkMjc1Y2YtN2IzYTQ5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:51:57.143893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:57.143919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.143924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:57.143933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:57.143939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:57.143943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:57.143951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:57.144026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:57.155739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:57.155761Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:57.160182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:57.161146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:57.161187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:57.162658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:57.162817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:57.162931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.163019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:57.164049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.164363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.164378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.164437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:57.164446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.164453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:57.164469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.165884Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:51:57.184461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:57.184543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.184601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:57.184647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:57.184655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.185335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.185361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:57.185424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.185434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:57.185438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:57.185444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:57.185834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.185851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:57.185856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:57.186225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.186236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.186241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.186249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.186838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:57.187212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:57.187261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:57.187438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:57.187464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:57.187471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.187524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:57.187531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:57.187559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.187569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:57.187940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:57.187947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:57.187991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:57.187997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:51:57.188071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:57.188078Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:57.188089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:57.188097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.188103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:57.188109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:57.188113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:57.188117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:57.188128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:57.188134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:57.188137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:51:57.188456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.188473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:51:57.188477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:51:57.188482Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:51:57.188487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:57.188501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T08:52:13.220218Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9475 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2E8E2C5C-5D9A-4035-9D28-16631656E204 amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2024-11-21T08:52:13.220991Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2024-11-21T08:52:13.221038Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T08:52:13.221070Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9475 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 92DC17F5-5BE8-487D-B8B4-9F276D543ABF amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2024-11-21T08:52:13.221718Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2024-11-21T08:52:13.221766Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T08:52:13.221775Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9475 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B919F4BC-25B5-41A3-BEEA-E87EB62A457B amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2024-11-21T08:52:13.222326Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2024-11-21T08:52:13.222334Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3454:5420], success# 1, error# , multipart# 1, uploadId# 1 2024-11-21T08:52:13.231539Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3454:5420], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9475 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CCFF8AA8-B099-4B05-8412-BC995415A7D5 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2024-11-21T08:52:13.236188Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3454:5420], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2024-11-21T08:52:13.236364Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T08:52:13.242819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:13.242854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T08:52:13.242889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:13.242903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T08:52:13.242922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:13.242926Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:13.242932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:13.242941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:52:13.243063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:13.244942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:13.245153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:13.245165Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:52:13.245183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:52:13.245188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:52:13.245196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:52:13.245224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T08:52:13.245233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:52:13.245239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:52:13.245244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:52:13.245284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:52:13.247052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:52:13.247070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3439:5406] TestWaitNotification: OK eventTxId 102 >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] >> HullReplWriteSst::Basic [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonStorageListingV1 >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::CancelBuild [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:07.829066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:07.829085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:07.829091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:07.829095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:07.829110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:07.829114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:07.829121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:07.829222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:07.840515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:07.840534Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:07.842643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:07.842741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:07.842765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:07.844895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:07.844965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:07.845073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:07.845212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:07.845806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:07.846081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:07.846092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:07.846103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:07.846110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:07.846116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:07.846157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:07.847649Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:07.864365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:07.864460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.864527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:07.864568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:07.864577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.865567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:07.865595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:07.865646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.865656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:07.865660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:07.865665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:07.866085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.866097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:07.866101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:07.866417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.866427Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.866432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:07.866439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:07.867020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:07.867366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:07.867421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:07.867623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:07.867651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:07.867659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:07.867714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:07.867719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:07.867747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:07.867759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:07.868141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:07.868155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:07.868200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:07.868229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:07.868314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.868321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:07.868334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:07.868338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:07.868344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:07.868349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:07.868353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:07.868357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:07.868368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:07.868375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:07.868379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ed: { upload rows: 10, upload bytes: 180, read rows: 10, read bytes: 180 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:52:13.767041Z node 20 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 1003, subscribers count# 0 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 1003 Status: SUCCESS IndexBuild { Id: 1003 State: STATE_PREPARING Settings { source_path: "/MyRoot/dir/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } 2024-11-21T08:52:13.767159Z node 20 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 1004 DatabaseName: "/MyRoot" IndexBuildId: 1003 2024-11-21T08:52:13.767178Z node 20 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 1004 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <1003> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 1004 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <1003> has been finished already" severity: 1 } TestWaitNotification wait txId: 1004 2024-11-21T08:52:13.767228Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:52:13.767234Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:52:13.767293Z node 20 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:52:13.767305Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:52:13.767312Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [20:827:2771] TestWaitNotification: OK eventTxId 1004 TestWaitNotification wait txId: 1003 2024-11-21T08:52:13.767351Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:52:13.767354Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:52:13.767387Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion index build in-flight, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:52:13.767391Z node 20 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion, index build is ready to notify, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:52:13.767399Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:13.767403Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [20:830:2774] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:13.767444Z node 20 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 1003 2024-11-21T08:52:13.767477Z node 20 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 1003 State: STATE_DONE Settings { source_path: "/MyRoot/dir/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 1003 State: STATE_DONE Settings { source_path: "/MyRoot/dir/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } 2024-11-21T08:52:13.767542Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:13.767589Z node 20 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/Table" took 52us result status StatusSuccess 2024-11-21T08:52:13.767687Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table" PathDescription { Self { Name: "Table" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:13.767760Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:13.767790Z node 20 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1" took 32us result status StatusSuccess 2024-11-21T08:52:13.767916Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1" PathDescription { Self { Name: "index1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:13.767970Z node 20 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: DoExecute TxId: 1005 DatabaseName: "/MyRoot" IndexBuildId: 1003 2024-11-21T08:52:13.768022Z node 20 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: Reply TxId: 1005 Status: SUCCESS BUILDINDEX RESPONSE Forget: NKikimrIndexBuilder.TEvForgetResponse TxId: 1005 Status: SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2024-11-21T08:52:13.176866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:13.176883Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:13.176897Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:13.179119Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:13.179228Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:13.179282Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:13.179954Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:13.187562Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:13.187732Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:13.187867Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:13.187880Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:13.187885Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:13.187920Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:13.191088Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:13.191158Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:13.191198Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:13.191203Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:13.191207Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:13.191222Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:13.191315Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.191333Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.191358Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:13.191380Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:13.191425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:13.191431Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:13.191438Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:13.191443Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:13.191446Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:13.191449Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:13.191454Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:13.198513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.198541Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.198550Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:13.198959Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:13.198969Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:13.198991Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:13.199019Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:13.199028Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:13.199037Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:13.199046Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:13.199050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:13.199055Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:13.199059Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:13.199121Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:13.199124Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:13.199128Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:13.199131Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:13.199139Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:13.199142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:13.199145Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:13.199148Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:13.199152Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:13.221720Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:13.221753Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:13.221760Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:13.221772Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:13.221788Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:13.221920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.221926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.221933Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:13.221950Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:13.221955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:13.221998Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:13.222008Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:13.222011Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:13.222015Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:13.222704Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:13.222723Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:13.222807Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.222815Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.222823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:13.222832Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:13.222837Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:13.222845Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:52:13.222850Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:13.222858Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:13.222862Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:13.222866Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:13.222870Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:13.222926Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:52:13.222930Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:13.222933Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:13.222937Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:13.222941Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:13.222954Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:13.222957Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:13.222961Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:13.222964Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:13.222979Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:52:13.222982Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:52:13.222985Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:52:13.222991Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:13.222993Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:13.222997Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... ARD TRACE: StateInit, received event# 268828672, Sender [3:224:2221], Recipient [3:227:2222]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:13.941038Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [3:224:2221], Recipient [3:227:2222]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:13.941092Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:224:2221], Recipient [3:227:2222]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:13.942098Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:227:2222] 2024-11-21T08:52:13.942154Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:13.942570Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2024-11-21T08:52:13.965437Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:13.965489Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:13.965839Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:13.965853Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:13.965862Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:13.965938Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:13.965957Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2024-11-21T08:52:13.965980Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2024-11-21T08:52:13.966003Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2024-11-21T08:52:13.966046Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:271:2259] 2024-11-21T08:52:13.966052Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:13.966057Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2024-11-21T08:52:13.966063Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:13.966135Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2024-11-21T08:52:13.966149Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2024-11-21T08:52:13.966200Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:227:2222], Recipient [3:227:2222]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.966211Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.966252Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:13.966266Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:13.966300Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:227:2222]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2024-11-21T08:52:13.966305Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T08:52:13.966309Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2024-11-21T08:52:13.966314Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:13.966353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 227 RawX2: 12884904110 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2024-11-21T08:52:13.966362Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:13.966369Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:13.966376Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:13.966382Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:13.966386Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:13.966390Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:13.966396Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:13.966411Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:227:2222]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2024-11-21T08:52:13.966416Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T08:52:13.966421Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2024-11-21T08:52:13.966427Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 9437184: waitStep# 0 readStep# 0 observedStep# 1000001 2024-11-21T08:52:13.966435Z node 3 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 9437184 promoting UnprotectedReadEdge to v0/18446744073709551615 2024-11-21T08:52:13.966446Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:269:2257], Recipient [3:227:2222]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T08:52:13.966450Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T08:52:13.966462Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:120:2146], Recipient [3:227:2222]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2024-11-21T08:52:13.966466Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T08:52:13.966472Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2024-11-21T08:52:13.966478Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2024-11-21T08:52:13.977288Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:269:2257], Recipient [3:227:2222]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:52:13.977307Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:52:14.040073Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:227:2222]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T08:52:14.040097Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T08:52:14.040164Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:279:2265], Recipient [3:227:2222]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:14.040169Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:14.040178Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:277:2264], serverId# [3:279:2265], sessionId# [0:0:0] 2024-11-21T08:52:14.040517Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:227:2222]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2024-11-21T08:52:14.040526Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:14.040552Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:14.040692Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2024-11-21T08:52:14.040716Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:14.040720Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2024-11-21T08:52:14.040725Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:14.040729Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:14.040738Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:14.040749Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2024-11-21T08:52:14.040754Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:14.040757Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:14.040760Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:52:14.040764Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:14.040836Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:14.040845Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:14.040856Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:14.040862Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:14.040865Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:14.040868Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2024-11-21T08:52:14.040874Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:52:14.040890Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2024-11-21T08:52:14.040894Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:14.040896Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:14.040900Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2024-11-21T08:52:14.040908Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:14.040911Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:14.040914Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2024-11-21T08:52:14.040924Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:14.040927Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2024-11-21T08:52:14.040934Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> HullReplWriteSst::Basic [GOOD] Test command err: commit chunk# 1 {ChunkIdx: 1 Offset: 101228544 Size: 32986104} 749682 commit chunk# 2 {ChunkIdx: 2 Offset: 101199872 Size: 33017828} 750403 commit chunk# 3 {ChunkIdx: 3 Offset: 101216256 Size: 33000008} 749998 commit chunk# 4 {ChunkIdx: 4 Offset: 101199872 Size: 33014440} 750326 commit chunk# 5 {ChunkIdx: 5 Offset: 101208064 Size: 33006432} 750144 commit chunk# 6 {ChunkIdx: 6 Offset: 101224448 Size: 32992880} 749836 commit chunk# 7 {ChunkIdx: 7 Offset: 101212160 Size: 33005552} 750124 commit chunk# 8 {ChunkIdx: 8 Offset: 101240832 Size: 32976864} 749472 commit chunk# 9 {ChunkIdx: 9 Offset: 101232640 Size: 32982980} 749611 commit chunk# 10 {ChunkIdx: 10 Offset: 101220352 Size: 32996092} 749909 commit chunk# 11 {ChunkIdx: 11 Offset: 101257216 Size: 32959704} 749082 commit chunk# 12 {ChunkIdx: 12 Offset: 101232640 Size: 32985048} 749658 commit chunk# 13 {ChunkIdx: 13 Offset: 101216256 Size: 33000404} 750007 commit chunk# 14 {ChunkIdx: 14 Offset: 101240832 Size: 32976292} 749459 commit chunk# 15 {ChunkIdx: 15 Offset: 101212160 Size: 33003792} 750084 commit chunk# 16 {ChunkIdx: 16 Offset: 101228544 Size: 32986940} 749701 commit chunk# 17 {ChunkIdx: 17 Offset: 101212160 Size: 33001944} 750042 commit chunk# 18 {ChunkIdx: 18 Offset: 101228544 Size: 32986016} 749680 commit chunk# 19 {ChunkIdx: 19 Offset: 101228544 Size: 32988216} 749730 commit chunk# 20 {ChunkIdx: 20 Offset: 101220352 Size: 32995168} 749888 commit chunk# 21 {ChunkIdx: 21 Offset: 101212160 Size: 33001988} 750043 commit chunk# 22 {ChunkIdx: 22 Offset: 101224448 Size: 32989448} 749758 commit chunk# 23 {ChunkIdx: 23 Offset: 101265408 Size: 32950024} 748862 commit chunk# 24 {ChunkIdx: 24 Offset: 101236736 Size: 32980296} 749550 commit chunk# 25 {ChunkIdx: 25 Offset: 101273600 Size: 32942632} 748694 commit chunk# 26 {ChunkIdx: 26 Offset: 101203968 Size: 33013736} 750310 commit chunk# 27 {ChunkIdx: 27 Offset: 101199872 Size: 33015716} 750355 commit chunk# 28 {ChunkIdx: 28 Offset: 101220352 Size: 32993628} 749853 commit chunk# 29 {ChunkIdx: 29 Offset: 101220352 Size: 32993540} 749851 commit chunk# 30 {ChunkIdx: 30 Offset: 101208064 Size: 33006960} 750156 commit chunk# 31 {ChunkIdx: 31 Offset: 101240832 Size: 32973740} 749401 commit chunk# 32 {ChunkIdx: 32 Offset: 101216256 Size: 32999040} 749976 commit chunk# 33 {ChunkIdx: 33 Offset: 101208064 Size: 33006520} 750146 commit chunk# 34 {ChunkIdx: 34 Offset: 101244928 Size: 32971320} 749346 commit chunk# 35 {ChunkIdx: 35 Offset: 101179392 Size: 33034328} 750778 commit chunk# 36 {ChunkIdx: 36 Offset: 101208064 Size: 33006036} 750135 commit chunk# 37 {ChunkIdx: 37 Offset: 101220352 Size: 32996576} 749920 commit chunk# 38 {ChunkIdx: 38 Offset: 101224448 Size: 32989800} 749766 commit chunk# 39 {ChunkIdx: 39 Offset: 101195776 Size: 33021920} 750496 commit chunk# 40 {ChunkIdx: 40 Offset: 101220352 Size: 32993408} 749848 commit chunk# 41 {ChunkIdx: 41 Offset: 101232640 Size: 32984168} 749638 commit chunk# 42 {ChunkIdx: 42 Offset: 101191680 Size: 33026012} 750589 commit chunk# 43 {ChunkIdx: 43 Offset: 101244928 Size: 32972640} 749376 commit chunk# 44 {ChunkIdx: 44 Offset: 101224448 Size: 32993276} 749845 commit chunk# 45 {ChunkIdx: 45 Offset: 101216256 Size: 32999832} 749994 commit chunk# 46 {ChunkIdx: 46 Offset: 101224448 Size: 32990328} 749778 commit chunk# 47 {ChunkIdx: 47 Offset: 101249024 Size: 32968680} 749286 commit chunk# 48 {ChunkIdx: 48 Offset: 101203968 Size: 33012196} 750275 commit chunk# 49 {ChunkIdx: 49 Offset: 101212160 Size: 33003044} 750067 commit chunk# 50 {ChunkIdx: 50 Offset: 101224448 Size: 32993276} 749845 commit chunk# 51 {ChunkIdx: 51 Offset: 101187584 Size: 33026716} 750605 commit chunk# 52 {ChunkIdx: 52 Offset: 101232640 Size: 32985048} 749658 commit chunk# 53 {ChunkIdx: 53 Offset: 101236736 Size: 32980648} 749558 commit chunk# 54 {ChunkIdx: 54 Offset: 101244928 Size: 32972772} 749379 commit chunk# 55 {ChunkIdx: 55 Offset: 101216256 Size: 33000712} 750014 commit chunk# 56 {ChunkIdx: 56 Offset: 101236736 Size: 32979240} 749526 commit chunk# 57 {ChunkIdx: 57 Offset: 101228544 Size: 32987292} 749709 commit chunk# 58 {ChunkIdx: 58 Offset: 101240832 Size: 32975500} 749441 commit chunk# 59 {ChunkIdx: 59 Offset: 101249024 Size: 32968680} 749286 commit chunk# 60 {ChunkIdx: 60 Offset: 101249024 Size: 32968680} 749286 commit chunk# 61 {ChunkIdx: 61 Offset: 101203968 Size: 33011140} 750251 commit chunk# 62 {ChunkIdx: 62 Offset: 101208064 Size: 33008808} 750198 commit chunk# 63 {ChunkIdx: 63 Offset: 101249024 Size: 32966876} 749245 commit chunk# 64 {ChunkIdx: 64 Offset: 101236736 Size: 32979680} 749536 commit chunk# 65 {ChunkIdx: 65 Offset: 101224448 Size: 32993276} 749845 commit chunk# 66 {ChunkIdx: 66 Offset: 101228544 Size: 32988436} 749735 commit chunk# 67 {ChunkIdx: 67 Offset: 101212160 Size: 33005068} 750113 commit chunk# 68 {ChunkIdx: 68 Offset: 101249024 Size: 32966964} 749247 commit chunk# 69 {ChunkIdx: 69 Offset: 101220352 Size: 32995960} 749906 commit chunk# 70 {ChunkIdx: 70 Offset: 101212160 Size: 33005244} 750117 commit chunk# 71 {ChunkIdx: 71 Offset: 101212160 Size: 33005552} 750124 commit chunk# 72 {ChunkIdx: 72 Offset: 101224448 Size: 32993276} 749845 commit chunk# 73 {ChunkIdx: 73 Offset: 101253120 Size: 32962652} 749149 commit chunk# 74 {ChunkIdx: 74 Offset: 101240832 Size: 32972948} 749383 commit chunk# 75 {ChunkIdx: 75 Offset: 101220352 Size: 32993936} 749860 commit chunk# 76 {ChunkIdx: 76 Offset: 101216256 Size: 33000844} 750017 commit chunk# 77 {ChunkIdx: 77 Offset: 101220352 Size: 32995256} 749890 commit chunk# 78 {ChunkIdx: 78 Offset: 101240832 Size: 32974840} 749426 commit chunk# 79 {ChunkIdx: 79 Offset: 101228544 Size: 32985356} 749665 commit chunk# 80 {ChunkIdx: 80 Offset: 101244928 Size: 32968768} 749288 commit chunk# 81 {ChunkIdx: 81 Offset: 101220352 Size: 32994860} 749881 commit chunk# 82 {ChunkIdx: 82 Offset: 101236736 Size: 32980956} 749565 commit chunk# 83 {ChunkIdx: 83 Offset: 101216256 Size: 32998468} 749963 commit chunk# 84 {ChunkIdx: 84 Offset: 101253120 Size: 32961948} 749133 commit chunk# 85 {ChunkIdx: 85 Offset: 101244928 Size: 32972420} 749371 commit chunk# 86 {ChunkIdx: 86 Offset: 101249024 Size: 32967712} 749264 commit chunk# 87 {ChunkIdx: 87 Offset: 101203968 Size: 33013736} 750310 commit chunk# 88 {ChunkIdx: 88 Offset: 101249024 Size: 32967228} 749253 commit chunk# 89 {ChunkIdx: 89 Offset: 101232640 Size: 32981264} 749572 commit chunk# 90 {ChunkIdx: 90 Offset: 101240832 Size: 32974444} 749417 commit chunk# 91 {ChunkIdx: 91 Offset: 101224448 Size: 32989316} 749755 commit chunk# 92 {ChunkIdx: 92 Offset: 101224448 Size: 32991824} 749812 commit chunk# 93 {ChunkIdx: 93 Offset: 101236736 Size: 32978536} 749510 commit chunk# 94 {ChunkIdx: 94 Offset: 101216256 Size: 33001460} 750031 commit chunk# 95 {ChunkIdx: 95 Offset: 101216256 Size: 33000800} 750016 commit chunk# 96 {ChunkIdx: 96 Offset: 101236736 Size: 32980296} 749550 commit chunk# 97 {ChunkIdx: 97 Offset: 101228544 Size: 32985796} 749675 commit chunk# 98 {ChunkIdx: 98 Offset: 101244928 Size: 32971936} 749360 commit chunk# 99 {ChunkIdx: 99 Offset: 101228544 Size: 32989184} 749752 commit chunk# 100 {ChunkIdx: 100 Offset: 101212160 Size: 33003792} 750084 commit chunk# 101 {ChunkIdx: 101 Offset: 101220352 Size: 32995124} 749887 commit chunk# 102 {ChunkIdx: 102 Offset: 101228544 Size: 32989184} 749752 commit chunk# 103 {ChunkIdx: 103 Offset: 101203968 Size: 33010612} 750239 commit chunk# 104 {ChunkIdx: 104 Offset: 101253120 Size: 32964588} 749193 commit chunk# 105 {ChunkIdx: 105 Offset: 101240832 Size: 32976864} 749472 commit chunk# 106 {ChunkIdx: 106 Offset: 101240832 Size: 32976600} 749466 commit chunk# 107 {ChunkIdx: 107 Offset: 101228544 Size: 32987072} 749704 commit chunk# 108 {ChunkIdx: 108 Offset: 101236736 Size: 32980956} 749565 commit chunk# 109 {ChunkIdx: 109 Offset: 101191680 Size: 33022888} 750518 commit chunk# 110 {ChunkIdx: 110 Offset: 101240832 Size: 32976864} 749472 commit chunk# 111 {ChunkIdx: 111 Offset: 101249024 Size: 32968680} 749286 commit chunk# 112 {ChunkIdx: 112 Offset: 101179392 Size: 33035516} 750805 commit chunk# 113 {ChunkIdx: 113 Offset: 101224448 Size: 32992748} 749833 >> Yq_1::Basic >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteEmptyColumns >> IncorrectQueries::InvalidPartID [GOOD] >> IncorrectQueries::Incompatible >> IncorrectQueries::Incompatible [GOOD] >> IncorrectQueries::Proto >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:26.507724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:26.507752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:26.507759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:26.507765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:26.507781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:26.507785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:26.507796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:26.507890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:26.521975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:26.521998Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:26.524479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:26.524598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:26.524634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:26.527819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:26.527913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:26.528070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:26.528323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:26.532096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:26.532450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:26.532467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:26.532482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:26.532490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:26.532497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:26.532537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:26.534166Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:26.555157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:26.555247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:26.555313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:26.555373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:26.555383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:26.556090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:26.556123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:26.556166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:26.556178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:26.556183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:26.556187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:26.556660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:26.556675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:26.556680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:26.557033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:26.557041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:26.557045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:26.557051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:26.557628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:26.557993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:26.558033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:26.558212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:26.558235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:26.558242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:26.558294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:26.558300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:26.558327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:26.558338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:26.558762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:26.558777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:26.558815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:26.558822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:26.558901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:26.558910Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:26.558923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:26.558928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:26.558934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:26.558940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:26.558945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:26.558950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:26.558963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:26.558970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:26.558974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ablet: 72075186233409546, partId: 2 2024-11-21T08:52:14.009958Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 635655162132 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:14.009965Z node 148 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:52:14.009971Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 339 RawX2: 635655162132 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:14.009982Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:14.009986Z node 148 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:14.009990Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:14.009996Z node 148 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T08:52:14.010079Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.010088Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.010092Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:14.010096Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:52:14.010100Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:14.010177Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.010185Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.010188Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:14.010191Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T08:52:14.010197Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:52:14.010317Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.010326Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.010330Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:14.010333Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T08:52:14.010337Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:52:14.010861Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.010877Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.010881Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:14.010885Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T08:52:14.010890Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:52:14.010904Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T08:52:14.011563Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:14.012072Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.012093Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:14.012146Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:14.012151Z node 148 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T08:52:14.012163Z node 148 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T08:52:14.012165Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:52:14.012169Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T08:52:14.012178Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [148:465:2430] message: TxId: 1003 2024-11-21T08:52:14.012183Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:52:14.012189Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:14.012192Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:14.012202Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:52:14.012229Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:52:14.012233Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:52:14.012237Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:52:14.012240Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:52:14.012242Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:52:14.012254Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:52:14.012257Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:52:14.012258Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:52:14.012264Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:52:14.012322Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.012351Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.012378Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.012693Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:14.013024Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:14.013031Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [148:665:2587] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:14.013154Z node 148 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:14.013209Z node 148 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 62us result status StatusSuccess 2024-11-21T08:52:14.013297Z node 148 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Yq_1::CreateQuery_With_Idempotency >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> Yq_1::ModifyConnections >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter |87.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2024-11-21T08:52:12.826197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:12.826220Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:12.826243Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:12.829236Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:12.829387Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:12.829454Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:12.830177Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:12.836749Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:12.836892Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:12.837030Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:12.837041Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:12.837046Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:12.837072Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:12.839373Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:12.839414Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:12.839438Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:12.839442Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:12.839445Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:12.839448Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:12.839513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:12.839532Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:12.839553Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:12.839569Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:12.839600Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:12.839604Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:12.839609Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:12.839613Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:12.839615Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:12.839618Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:12.839622Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:12.844917Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:12.844935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:12.844943Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:12.845209Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:12.845217Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:12.845231Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:12.845253Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:12.845261Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:12.845267Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:12.845274Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:12.845277Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:12.845280Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:12.845283Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:12.845342Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:12.845345Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:12.845348Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:12.845350Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:12.845357Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:12.845359Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:12.845361Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:12.845363Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:12.845366Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:12.866267Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:12.866289Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:12.866296Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:12.866308Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:12.866322Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:12.866439Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:12.866445Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:12.866452Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:12.866469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:12.866474Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:12.866513Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:12.866523Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:12.866527Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:12.866531Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:12.867108Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:12.867117Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:12.867160Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:12.867165Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:12.867171Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:12.867176Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:12.867197Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:12.867203Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:52:12.867207Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:12.867213Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:12.867216Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:12.867219Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:12.867223Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:12.867260Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:52:12.867264Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:12.867266Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:12.867270Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:12.867273Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:12.867283Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:12.867286Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:12.867289Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:12.867299Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:12.867311Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:52:12.867315Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:52:12.867318Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:52:12.867323Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:12.867326Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:12.867330Z node 1 :TX_DATASHARD TRACE: ... ated operation [0:2] at 9437184 2024-11-21T08:52:14.462396Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:14.462398Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:14.462401Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:52:14.462403Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:14.462409Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:14.462416Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2024-11-21T08:52:14.462419Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T08:52:14.462473Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:14.462479Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:14.462482Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:14.475659Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:14.475697Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:14.475716Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:14.475723Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:14.475729Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:14.475734Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2024-11-21T08:52:14.475762Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:14.475766Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:14.475770Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:14.475774Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2024-11-21T08:52:14.475786Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:14.475790Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:14.475794Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2024-11-21T08:52:14.483762Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:14.483786Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2024-11-21T08:52:14.483795Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2024-11-21T08:52:14.483820Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:14.715825Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T08:52:14.715857Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T08:52:14.715957Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:287:2272], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:14.715963Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:14.715971Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:286:2271], serverId# [3:287:2272], sessionId# [0:0:0] 2024-11-21T08:52:14.786412Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2024-11-21T08:52:14.786453Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:14.786497Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:14.808053Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2024-11-21T08:52:14.808094Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2024-11-21T08:52:14.808100Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2024-11-21T08:52:14.808106Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:14.808111Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:14.808122Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:14.808137Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2024-11-21T08:52:14.808143Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2024-11-21T08:52:14.808147Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:14.808151Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:52:14.808155Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:14.808164Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:14.808175Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2024-11-21T08:52:14.808180Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2024-11-21T08:52:14.808255Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:14.808261Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:14.808266Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:14.824694Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2024-11-21T08:52:14.824759Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2024-11-21T08:52:14.824770Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2024-11-21T08:52:14.824875Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:14.824882Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:14.845081Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2024-11-21T08:52:14.845123Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:14.903507Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:14.903542Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:14.903561Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:14.903568Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:14.903574Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:14.903579Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2024-11-21T08:52:14.903591Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2024-11-21T08:52:14.903594Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:14.903598Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:14.903601Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2024-11-21T08:52:14.903615Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2024-11-21T08:52:14.903618Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:14.903622Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2024-11-21T08:52:14.939225Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:14.939248Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2024-11-21T08:52:14.939257Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 7 ms, status: COMPLETE 2024-11-21T08:52:14.939281Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:14.939972Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T08:52:14.939989Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2024-11-21T08:52:14.940587Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:224:2220], Recipient [3:226:2221]: NKikimr::TEvTablet::TEvFollowerGcApplied >> SubDomainWithReboots::Delete >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase >> ForceDropWithReboots::ForceDeleteCreateTableInFly >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b (0, 1) | 3 39 620b (5, 7) + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b (0, 1) | 1 39 2466b (5, 7) + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 6 12 122b (1, 8) | 7 14 122b (2, NULL) | 8 16 122b (2, 4) | 9 18 122b (2, 7) | 10 20 122b (2, 10) | 11 22 122b (3, 3) | 12 24 122b (3, 6) | 13 26 122b (3, 8) | 14 28 122b (4, NULL) | 15 30 122b (4, 4) | 16 32 122b (4, 7) | 17 34 122b (4, 10) | 18 36 122b (5, 3) | 19 38 122b (5, 6) | 19 39 122b (5, 7) + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > (0, 4) | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > (0, 7) | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > (0, 10) | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > (1, 3) | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > (1, 6) | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > (1, 8) | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > (2, NULL) | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > (2, 4) | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > (2, 7) | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > (2, 10) | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > (3, 3) | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > (3, 6) | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > (3, 8) | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > (4, NULL) | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > (4, 4) | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > (4, 7) | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > (4, 10) | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > (5, 3) | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > (5, 6) | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 ... xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 7 12 122b (1, 8) | 8 14 122b (2, NULL) | 9 16 122b (2, 4) | 11 18 122b (2, 7) | 12 20 122b (2, 10) | 13 22 122b (3, 3) | 15 24 122b (3, 6) | 16 26 122b (3, 8) | 17 28 122b (4, NULL) | 19 30 122b (4, 4) | 20 32 122b (4, 7) | 21 34 122b (4, 10) | 24 36 122b (5, 3) | 25 38 122b (5, 6) | 25 39 122b (5, 7) + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> IncorrectQueries::Proto [GOOD] >> IncorrectQueries::BaseReadingTest >> Viewer::QueryExecuteScript [GOOD] >> Viewer::Plan2SvgOK >> IncorrectQueries::BaseReadingTest [GOOD] >> IncorrectQueries::EmptyGetTest [GOOD] >> IncorrectQueries::ProtoBlobGet >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> IncorrectQueries::ProtoBlobGet [GOOD] >> IncorrectQueries::EmptyTest >> IncorrectQueries::EmptyTest [GOOD] >> IncorrectQueries::BasePutTest >> IncorrectQueries::BasePutTest [GOOD] >> IncorrectQueries::MultiPutBaseTest >> ForceDropWithReboots::ForceDropDeleteInFly >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteScheme ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2024-11-21T08:52:14.267679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:14.267695Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:14.267706Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:14.271214Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:14.271313Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:14.271358Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:14.271997Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:14.278236Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:14.278368Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:14.278475Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:14.278485Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:14.278490Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:14.278517Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:14.281385Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:14.281446Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:14.281478Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:14.281483Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:14.281487Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:14.281492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:14.281551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:14.281567Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:14.281594Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:14.281615Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:14.281656Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:14.281661Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:14.281666Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:14.281671Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:14.281674Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:14.281678Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:14.281683Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:14.287569Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:14.287589Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:14.287596Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:14.287984Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:14.287993Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:14.288008Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:14.288028Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:14.288036Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:14.288042Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:14.288049Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:14.288052Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:14.288055Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:14.288057Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:14.288102Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:14.288105Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:14.288107Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:14.288109Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:14.288117Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:14.288119Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:14.288121Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:14.288123Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:14.288126Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:14.309119Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:14.309142Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:14.309148Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:14.309160Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:14.309176Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:14.309301Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:14.309309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:14.309318Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:14.309339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:14.309343Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:14.309405Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:14.309417Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:14.309421Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:14.309426Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:14.309929Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:14.309937Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:14.309989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:14.309993Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:14.309999Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:14.310004Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:14.310006Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:14.310011Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T08:52:14.310015Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:14.310019Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:14.310021Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:14.310024Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:14.310026Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:14.310050Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2024-11-21T08:52:14.310053Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:14.310055Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:14.310057Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:14.310060Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:14.310103Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:14.310105Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:14.310128Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T08:52:14.310131Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:14.310133Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:14.310135Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:14.310137Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:14.310145Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:14.310147Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:14.310149Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:14.310151Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:14.310160Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is ... 4-11-21T08:52:15.818621Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 out-of-order limits exceeded 2024-11-21T08:52:15.818624Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:15.818632Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 4 -> retry Change{14, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T08:52:15.818638Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} touch new 0b, 0b lo load (0b in total), 86213808b requested for data (96990534b in total) 2024-11-21T08:52:15.818642Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release tx data 2024-11-21T08:52:15.818645Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} released on update Res{3 10776726b}, Memory{0 dyn 0} 2024-11-21T08:52:15.818649Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} update Res{3 96990534b} type transaction 2024-11-21T08:52:15.818660Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:250:2223]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2024-11-21T08:52:15.818664Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:250:2223]) to queue queue_transaction 2024-11-21T08:52:15.818669Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:250:2223]) from queue queue_transaction 2024-11-21T08:52:15.818673Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:250:2223]) to queue queue_transaction 2024-11-21T08:52:15.818677Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:250:2223])) 2024-11-21T08:52:15.818685Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2024-11-21T08:52:15.818689Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2024-11-21T08:52:15.818772Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437185 restored its data 2024-11-21T08:52:15.872340Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2024-11-21T08:52:15.872384Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:15.872403Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2024-11-21T08:52:15.872410Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2024-11-21T08:52:15.872417Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompleteOperation 2024-11-21T08:52:15.872421Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2024-11-21T08:52:15.872509Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2024-11-21T08:52:15.872513Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2024-11-21T08:52:15.872517Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2024-11-21T08:52:15.872523Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2024-11-21T08:52:15.872529Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2024-11-21T08:52:15.872531Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2024-11-21T08:52:15.872535Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2024-11-21T08:52:15.872542Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:15.872546Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T08:52:15.872551Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T08:52:15.872554Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T08:52:15.872589Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{14, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2024-11-21T08:52:15.872603Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2024-11-21T08:52:15.872689Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2024-11-21T08:52:15.872716Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:354:2305]) (release resources {0, 96990534}) 2024-11-21T08:52:15.872740Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:354:2305])) 2024-11-21T08:52:15.872764Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:15.872769Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:15.873020Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2024-11-21T08:52:15.956554Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:15.956600Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:15.956624Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:15.956632Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:15.956639Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2024-11-21T08:52:15.956645Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2024-11-21T08:52:15.956751Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2024-11-21T08:52:15.956755Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2024-11-21T08:52:15.956758Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:15.956763Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2024-11-21T08:52:15.956769Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2024-11-21T08:52:15.956772Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:15.956776Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2024-11-21T08:52:15.956784Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:15.956789Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:15.956793Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:15.956797Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:15.956838Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{14, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2024-11-21T08:52:15.956853Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2024-11-21T08:52:15.956931Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:250:2223]) (release resources {0, 96990534}) 2024-11-21T08:52:15.956951Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:250:2223])) 2024-11-21T08:52:15.969076Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} commited cookie 1 for step 8 2024-11-21T08:52:15.969108Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T08:52:15.969117Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2024-11-21T08:52:15.969138Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:97:2132], exec latency: 2 ms, propose latency: 4 ms 2024-11-21T08:52:15.969152Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2024-11-21T08:52:15.969161Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2024-11-21T08:52:15.969240Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} commited cookie 1 for step 8 2024-11-21T08:52:15.969246Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:15.969254Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2024-11-21T08:52:15.969263Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:97:2132], exec latency: 2 ms, propose latency: 4 ms 2024-11-21T08:52:15.969270Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T08:52:15.969274Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:15.969324Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:332:2305], Recipient [3:437:2387]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2024-11-21T08:52:15.969331Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:52:15.969338Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2024-11-21T08:52:15.969353Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:228:2223], Recipient [3:437:2387]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T08:52:15.969356Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:52:15.969359Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 >> IncorrectQueries::MultiPutBaseTest [GOOD] >> IncorrectQueries::MultiPutCrcTest >> Viewer::Plan2SvgOK [GOOD] >> Viewer::Plan2SvgBad >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> IncorrectQueries::MultiPutCrcTest [GOOD] >> IncorrectQueries::MultiPutWithoutBlobs [GOOD] >> IncorrectQueries::ProtoHasOnlyVDiskId [GOOD] >> IncorrectQueries::EmptyProtoMultiPut >> Viewer::JsonAutocompleteColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> IncorrectQueries::EmptyProtoMultiPut [GOOD] >> IncorrectQueries::ManyQueriesThroughOneBSQueue [GOOD] >> SubDomainWithReboots::CreateTabletInsideWithStoragePools |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2024-11-21T08:52:10.635182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:10.635541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:10.635556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00404a/r3tmp/tmp2HDMlS/pdisk_1.dat 2024-11-21T08:52:10.741260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.759944Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:10.802584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:10.802614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:10.813104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:10.917103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.931246Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:10.931485Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:10.931566Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:52:10.931610Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:10.939830Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:10.939996Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:10.940021Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:10.940165Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:52:10.940173Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:52:10.940178Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:52:10.940237Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:10.943859Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:52:10.943936Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:10.943961Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:52:10.943966Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:52:10.943971Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:52:10.943976Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:10.944105Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:10.944112Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:10.944251Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:52:10.944268Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:52:10.944280Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:10.944284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:10.944291Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:52:10.944298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:52:10.944304Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:10.944311Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:52:10.944316Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:52:10.944320Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:52:10.944326Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:52:10.944331Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:52:10.944351Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:52:10.944355Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:10.944376Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:52:10.944422Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:52:10.944431Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:52:10.944448Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:52:10.944455Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:52:10.944459Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:52:10.944465Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:52:10.944468Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:52:10.944508Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:10.944513Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:52:10.944516Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:52:10.944520Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:52:10.944529Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:52:10.944532Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:52:10.944536Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:52:10.944539Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:52:10.944545Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:10.944790Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:52:10.944800Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:52:10.955130Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:52:10.955163Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:52:10.955171Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:52:10.955183Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:52:10.955197Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:11.135289Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:11.135316Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:11.135325Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:52:11.135346Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:52:11.135351Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:11.135378Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:52:11.135387Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:52:11.135392Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:52:11.135396Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:52:11.136086Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:52:11.136102Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:11.136257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:11.136264Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:11.136271Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:52:11.136278Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:11.136283Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:52:11.136291Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T08:52:16.501884Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:891:2736], 1001} after executionsCount# 1 2024-11-21T08:52:16.501889Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:891:2736], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:16.501896Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:891:2736], 1001} finished in read 2024-11-21T08:52:16.501901Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T08:52:16.501905Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T08:52:16.501908Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:52:16.501912Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:52:16.501917Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T08:52:16.501921Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:52:16.501924Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2024-11-21T08:52:16.501927Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T08:52:16.501935Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T08:52:16.501999Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:896:2741], Recipient [6:683:2563]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:16.502004Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:16.502008Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:895:2740], serverId# [6:896:2741], sessionId# [0:0:0] 2024-11-21T08:52:16.502021Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:894:2739], Recipient [6:683:2563]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T08:52:16.502122Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:899:2744], Recipient [6:683:2563]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:16.502127Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:16.502131Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:898:2743], serverId# [6:899:2744], sessionId# [0:0:0] 2024-11-21T08:52:16.502150Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:897:2742], Recipient [6:683:2563]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T08:52:16.502159Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2024-11-21T08:52:16.502163Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:16.502167Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2024-11-21T08:52:16.502172Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2024-11-21T08:52:16.502179Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2024-11-21T08:52:16.502183Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2024-11-21T08:52:16.502186Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:16.502190Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2024-11-21T08:52:16.502195Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2024-11-21T08:52:16.502200Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2024-11-21T08:52:16.502203Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:16.502206Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2024-11-21T08:52:16.502211Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2024-11-21T08:52:16.502219Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T08:52:16.502233Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:897:2742], 1002} after executionsCount# 1 2024-11-21T08:52:16.502238Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:897:2742], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:16.502244Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:897:2742], 1002} finished in read 2024-11-21T08:52:16.502249Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2024-11-21T08:52:16.502253Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2024-11-21T08:52:16.502256Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T08:52:16.502260Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2024-11-21T08:52:16.502265Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2024-11-21T08:52:16.502271Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T08:52:16.502274Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2024-11-21T08:52:16.502278Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2024-11-21T08:52:16.502286Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2024-11-21T08:52:16.502349Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:902:2747], Recipient [6:680:2561]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:16.502353Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:16.502358Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:901:2746], serverId# [6:902:2747], sessionId# [0:0:0] 2024-11-21T08:52:16.502370Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:900:2745], Recipient [6:680:2561]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T08:52:16.502453Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:905:2750], Recipient [6:680:2561]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:16.502457Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:16.502461Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:904:2749], serverId# [6:905:2750], sessionId# [0:0:0] 2024-11-21T08:52:16.502479Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:903:2748], Recipient [6:680:2561]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T08:52:16.502488Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2024-11-21T08:52:16.502516Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:16.502520Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2024-11-21T08:52:16.502525Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2024-11-21T08:52:16.502532Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2024-11-21T08:52:16.502535Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2024-11-21T08:52:16.502539Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:16.502543Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2024-11-21T08:52:16.502548Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2024-11-21T08:52:16.502552Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2024-11-21T08:52:16.502556Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:16.502559Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2024-11-21T08:52:16.502563Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2024-11-21T08:52:16.502571Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T08:52:16.502613Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:903:2748], 1003} after executionsCount# 1 2024-11-21T08:52:16.502619Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:903:2748], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:16.502626Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:903:2748], 1003} finished in read 2024-11-21T08:52:16.502631Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2024-11-21T08:52:16.502634Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2024-11-21T08:52:16.502638Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2024-11-21T08:52:16.502642Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2024-11-21T08:52:16.502647Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2024-11-21T08:52:16.502650Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2024-11-21T08:52:16.502654Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2024-11-21T08:52:16.502657Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2024-11-21T08:52:16.502665Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |87.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Viewer::Plan2SvgBad [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> IncorrectQueries::ManyQueriesThroughOneBSQueue [GOOD] Test command err: RandomSeed# 2814439219489940724 Reassign disk fromNodeId# 1 toNodeId# 4 2024-11-21T08:51:22.515202Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:22.515646Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7056394745434424970] 2024-11-21T08:51:22.516923Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:6:1024:1] 2024-11-21T08:51:22.516938Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:7:1024:2] 2024-11-21T08:51:22.516943Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:8:1024:1] 2024-11-21T08:51:22.516954Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:15:1024:1] 2024-11-21T08:51:22.516959Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:16:1024:2] 2024-11-21T08:51:22.516965Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:17:1024:1] 2024-11-21T08:51:22.516970Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:18:1024:2] 2024-11-21T08:51:22.516981Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:26:1024:1] 2024-11-21T08:51:22.516987Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:27:1024:2] 2024-11-21T08:51:22.516992Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:28:1024:1] 2024-11-21T08:51:22.517002Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:35:1024:1] 2024-11-21T08:51:22.517007Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:36:1024:2] 2024-11-21T08:51:22.517012Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:37:1024:1] 2024-11-21T08:51:22.517018Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:38:1024:2] 2024-11-21T08:51:22.517028Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:45:1024:2] 2024-11-21T08:51:22.517033Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:46:1024:1] 2024-11-21T08:51:22.517038Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:47:1024:2] 2024-11-21T08:51:22.517049Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:55:1024:1] 2024-11-21T08:51:22.517055Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:56:1024:2] 2024-11-21T08:51:22.517060Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:57:1024:1] 2024-11-21T08:51:22.517065Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:58:1024:2] 2024-11-21T08:51:22.517075Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:65:1024:2] 2024-11-21T08:51:22.517080Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:66:1024:1] 2024-11-21T08:51:22.517086Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:67:1024:2] 2024-11-21T08:51:22.517097Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:74:1024:3] 2024-11-21T08:51:22.517102Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:75:1024:1] 2024-11-21T08:51:22.517111Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:76:1024:2] 2024-11-21T08:51:22.517118Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:78:1024:2] 2024-11-21T08:51:22.517128Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:85:1024:2] 2024-11-21T08:51:22.517134Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:86:1024:1] 2024-11-21T08:51:22.517139Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:87:1024:2] 2024-11-21T08:51:22.517150Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:94:1024:3] 2024-11-21T08:51:22.517156Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:95:1024:1] 2024-11-21T08:51:22.517161Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:96:1024:2] 2024-11-21T08:51:22.517167Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:98:1024:2] 2024-11-21T08:51:22.517175Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:103:1024:3] 2024-11-21T08:51:22.517182Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:105:1024:2] 2024-11-21T08:51:22.517189Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:107:1024:2] 2024-11-21T08:51:22.517459Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 38 PartsResurrected# 38 Reassign disk fromNodeId# 1 toNodeId# 4 2024-11-21T08:51:22.942898Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:22.943352Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15045459805962682863] 2024-11-21T08:51:22.944380Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:662:1024:3] 2024-11-21T08:51:22.944395Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:664:1024:2] 2024-11-21T08:51:22.944402Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:666:1024:2] 2024-11-21T08:51:22.944410Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:671:1024:3] 2024-11-21T08:51:22.944416Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:673:1024:3] 2024-11-21T08:51:22.944422Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:675:1024:2] 2024-11-21T08:51:22.944432Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:682:1024:3] 2024-11-21T08:51:22.944438Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:684:1024:2] 2024-11-21T08:51:22.944444Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:686:1024:2] 2024-11-21T08:51:22.944452Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:691:1024:3] 2024-11-21T08:51:22.944458Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:693:1024:3] 2024-11-21T08:51:22.944464Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:695:1024:2] 2024-11-21T08:51:22.944472Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:700:1024:3] 2024-11-21T08:51:22.944478Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:702:1024:3] 2024-11-21T08:51:22.944484Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:704:1024:2] 2024-11-21T08:51:22.944494Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:711:1024:3] 2024-11-21T08:51:22.944500Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:713:1024:3] 2024-11-21T08:51:22.944506Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:715:1024:2] 2024-11-21T08:51:22.944519Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:720:1024:1] 2024-11-21T08:51:22.944527Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:722:1024:3] 2024-11-21T08:51:22.944533Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:724:1024:2] 2024-11-21T08:51:22.944542Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:729:1024:1] 2024-11-21T08:51:22.944547Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:731:1024:3] 2024-11-21T08:51:22.944554Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:733:1024:3] 2024-11-21T08:51:22.944565Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:10:740:1024:1] 2024-11-21T08:51:22.944738Z 4 00h00m31.010513s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 25 PartsResurrected# 25 2024-11-21T08:51:30.255328Z 1 00h00m30.011000s :BS_PROXY_GET ERROR: [33e4fe5b030036b8] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1162:4194304:0] DEADLINE Size# 0 RequestedSize# 4194304} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:51:30.256396Z 5 00h00m30.011000s :BS_PROXY_GET ERROR: [628ea9f3b419bf7d] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1161:10:0] DEADLINE Size# 0 RequestedSize# 10} ErrorReason# "status# DEADLINE from# [82000000:1:1:0:0]"} Marker# BPG29 2024-11-21T08:51:30.256440Z 9 00h00m30.011000s :BS_PROXY_GET ERROR: [58f8ee755f2309ef] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1183:10:0] DEADLINE Size# 0 RequestedSize# 10} ErrorReason# "status# DEADLINE from# [82000000:1:2:0:0]"} Marker# BPG29 2024-11-21T08:51:30.256470Z 2 00h00m30.011000s :BS_PROXY_GET ERROR: [1487dc71c0781638] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1205:10:0] DEADLINE Size# 0 RequestedSize# 10} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2024-11-21T08:51:30.256504Z 7 00h00m30.011000s :BS_PROXY_GET ERROR: [5005c9c760588fa7] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1227:10:0] DEADLINE Size# 0 RequestedSize# 10} ErrorReason# "status# DEADLINE from# [82000000:1:1:2:0]"} Marker# BPG29 2024-11-21T08:51:30.256530Z 11 00h00m30.011000s :BS_PROXY_GET ERROR: [d7a2074ae4c21b0b] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1249:10:0] DEADLINE Size# 0 RequestedSize# 10} ErrorReason# "status# DEADLINE from# [82000000:1:2:2:0]"} Marker# BPG29 2024-11-21T08:51:30.256557Z 2 00h00m30.011000s :BS_PROXY_GET ERROR: [3dc36b3a21d9c21f] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1271 ... tatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:52:13.723268Z 4 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:2345:10:0] PatchedBlobId# [1:1:11:10:51497:10:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:52:13.723276Z 8 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:7:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:22847:10:0] PatchedBlobId# [1:1:11:10:2367:10:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:52:13.723284Z 5 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:4:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:2389:10:0] PatchedBlobId# [1:1:11:10:2389:10:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:52:13.723291Z 2 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:72022:4194304:0] PatchedBlobId# [1:1:11:10:2390:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.723521Z 1 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:0:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:2192:4194304:0] PatchedBlobId# [1:1:11:10:2192:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.723990Z 7 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:2258:4194304:0] PatchedBlobId# [1:1:11:10:75986:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.724670Z 6 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:5:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:2368:4194304:0] PatchedBlobId# [1:1:11:10:100672:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.725159Z 8 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:7:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:2302:4194304:0] PatchedBlobId# [1:1:11:10:2302:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.725686Z 3 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:2:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:47336:4194304:0] PatchedBlobId# [1:1:11:10:2280:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.726160Z 7 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:6:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:2412:4194304:0] PatchedBlobId# [1:1:11:10:51564:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.726485Z 5 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:4:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:22694:4194304:0] PatchedBlobId# [1:1:11:10:2214:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:2:0] Marker# BSVSP01 2024-11-21T08:52:13.726767Z 4 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:22650:4194304:0] PatchedBlobId# [1:1:11:10:2170:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:52:13.727211Z 1 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:0:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:145706:4194304:0] PatchedBlobId# [1:1:11:10:2346:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.727641Z 2 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:96444:4194304:0] PatchedBlobId# [1:1:11:10:2236:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2024-11-21T08:52:13.728140Z 4 00h00m30.020000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:0:3:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:22804:4194304:0] PatchedBlobId# [1:1:11:10:2324:4194304:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2024-11-21T08:52:14.128337Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: ingress mismatch; id# [1:1:0:0:0:100:13] Marker# BSVS11 2024-11-21T08:52:14.331911Z 7 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: TEvVPut: buffer size does not match with part size; buffer size# 100 PartSize# 32 id# [1:1:0:0:0:100:13] Marker# BSVS01 2024-11-21T08:52:14.469585Z 3 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:2:0]: TEvVPut: ingress mismatch; id# [1:1:0:0:0:100:13] Marker# BSVS11 2024-11-21T08:52:14.660913Z 7 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: TEvVPut: ingress mismatch; id# [1:1:0:0:0:100:13] Marker# BSVS11 2024-11-21T08:52:14.906259Z 4 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:1:0:0]: TEvVPut: ingress mismatch; id# [1:1:0:0:0:100:13] Marker# BSVS11 2024-11-21T08:52:15.012600Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 58 PartSize# 100 id# [1:1:0:0:0:100:1] Marker# BSVS01 2024-11-21T08:52:15.012845Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 142 PartSize# 100 id# [1:1:0:0:0:100:1] Marker# BSVS01 2024-11-21T08:52:15.013027Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 0 PartSize# 100 id# [1:1:0:0:0:100:1] Marker# BSVS01 2024-11-21T08:52:15.150151Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:52:15.150424Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 0 id# [0:0:0:0:0:0:0] Marker# BSVS01 2024-11-21T08:52:15.150600Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 15 id# [2748:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:15.150776Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 15 id# [0:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:15.383164Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:52:15.383495Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 0 id# [0:0:0:0:0:0:0] Marker# BSVS01 2024-11-21T08:52:15.383754Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 32 id# [2748:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:15.384000Z 4 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:3:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 32 id# [0:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:15.533307Z 2 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:52:15.533609Z 2 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 0 id# [0:0:0:0:0:0:0] Marker# BSVS01 2024-11-21T08:52:15.533825Z 2 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 15 id# [2748:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:15.534031Z 4 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:3:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 15 id# [0:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:15.780611Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:52:15.780918Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 0 id# [0:0:0:0:0:0:0] Marker# BSVS01 2024-11-21T08:52:15.781156Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 15 id# [2748:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:15.781380Z 4 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:3:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 15 id# [0:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:16.022640Z 1 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:52:16.022860Z 1 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 0 id# [0:0:0:0:0:0:0] Marker# BSVS01 2024-11-21T08:52:16.023080Z 7 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:2:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 15 id# [2748:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:16.023311Z 8 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:2:1:0]: TEvVPut: buffer size does not match with part size; buffer size# 42 PartSize# 15 id# [0:0:4294967040:0:0:15:15] Marker# BSVS01 2024-11-21T08:52:16.773105Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVMultiPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:52:16.773259Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVMultiPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:52:16.773458Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 3 Marker# BSVS39 2024-11-21T08:52:16.773506Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 2 Marker# BSVS39 >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: Data has built Merge = 0.03752707665 Data has merged 2024-11-21T08:52:13.115813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652729067824697:2064];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:13.115833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:13.175218Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8105, node 1 2024-11-21T08:52:13.183467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:13.183479Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:13.183480Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:13.183527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:13.218378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:13.218410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:13.219487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:13.247525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:13.249902Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:13.251155Z node 1 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T08:52:13.427023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652729067825322:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:13.427066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:13.427178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652729067825349:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:13.427910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:13.430418Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:52:13.430507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652729067825351:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:13.519991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:14.601929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T08:52:14.602422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T08:52:14.602653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:0, at schemeshard: 72057594046644480 2024-11-21T08:52:15.890547Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:52:15.890901Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179135932, txId: 281474976715719] shutting down 2024-11-21T08:52:16.086436Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652741214210929:2064];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:16.086641Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 32462, node 2 2024-11-21T08:52:16.108973Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:16.110155Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:16.110159Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:16.110161Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:16.110206Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10891 2024-11-21T08:52:16.130753Z node 2 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T08:52:16.191953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:16.191987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:16.193693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:16.692513Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439652743302151346:2070];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:16.692595Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:16.706264Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3444, node 3 2024-11-21T08:52:16.717494Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:16.717515Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:16.717517Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:16.717570Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3511 2024-11-21T08:52:16.730510Z node 3 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T08:52:16.793679Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:16.793717Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:16.794805Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteSchemePOST >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::SelectStringWithNoBase64Encoding ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2024-11-21T08:52:13.372250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:13.372356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:13.372366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2611, node 1 TClient is connected to server localhost:3289 json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"},{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2024-11-21T08:52:14.231792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:14.231868Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:14.231888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 14454, node 2 TClient is connected to server localhost:17762 json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"},{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"}]},"Version":2} 2024-11-21T08:52:15.319061Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:15.319099Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:15.319107Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 28833, node 3 TClient is connected to server localhost:1494 json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"id","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"orders"}]},"Version":2} 2024-11-21T08:52:16.331474Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:89:2135], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:16.331558Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:16.331588Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 5996, node 4 TClient is connected to server localhost:8137 json result: {"Success":true,"Result":{"Total":6,"Entities":[{"Name":"name","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"products"},{"Name":"id","Type":"column","Parent":"orders"},{"Name":"id","Type":"column","Parent":"products"},{"Name":"description","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"products"}]},"Version":2} 2024-11-21T08:52:17.390108Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:17.390137Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:17.390149Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 61926, node 5 TClient is connected to server localhost:4407 json result: {"Success":true,"Result":{"Total":6,"Entities":[{"Name":"name","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"products"},{"Name":"id","Type":"column","Parent":"orders"},{"Name":"id","Type":"column","Parent":"products"},{"Name":"description","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"products"}]},"Version":2} >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail+StreamLookup |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2024-11-21T08:52:14.170302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:14.170402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:14.170412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 5264, node 1 TClient is connected to server localhost:29127 json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"},{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"}]},"Version":2} 2024-11-21T08:52:15.207413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:15.207474Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:15.207487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13797, node 2 TClient is connected to server localhost:21510 json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2024-11-21T08:52:16.232064Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:16.232103Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:16.232111Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8641, node 3 TClient is connected to server localhost:9181 json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2024-11-21T08:52:17.217624Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:89:2135], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:17.217701Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:17.217731Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19939, node 4 TClient is connected to server localhost:5171 json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"clients","Type":"table"},{"Name":"orders","Type":"table"},{"Name":"products","Type":"table"}]},"Version":2} 2024-11-21T08:52:18.217941Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.217983Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.218001Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 11210, node 5 TClient is connected to server localhost:3795 json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"clients","Type":"table"},{"Name":"orders","Type":"table"},{"Name":"products","Type":"table"}]},"Version":2} |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteSplitInFly >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail+StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/jptk/0016a3/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 20561, MsgBus: 28429 2024-11-21T08:52:09.539381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652710962257338:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:09.539412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016a3/r3tmp/tmpcEpb3B/pdisk_1.dat 2024-11-21T08:52:09.596580Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20561, node 1 2024-11-21T08:52:09.607664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:09.607676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:09.607678Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:09.607717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28429 2024-11-21T08:52:09.640427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:09.640459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:09.642116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:09.675294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.678580Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:09.686960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.706700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.740006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.809417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.889063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652710962258903:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:09.889097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:09.921069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.931835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.948796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.956552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.970787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.987978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.009801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652715257226700:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.009841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.009953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652715257226705:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.010953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:10.014884Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:52:10.015039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652715257226707:2427], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:14.539881Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652710962257338:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:14.539962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"6ace51c7-7278e0a9-6eda0483-88cfb07b") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"2e081e2b-593284d0-e81e87e-28828df8") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"c94f8772-264987ee-fb374ce0-1885b2ee")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2024-11-21T08:52:20.806096Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976715971. Error: [TEvError] Spilling Service not started 2024-11-21T08:52:20.807157Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652758206905106:4311], TxId: 281474976715971, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yr85deptfermsjw5w6gwh. SessionId : ydb://session/3?node_id=1&id=YzhkZDA2MTctZDdmNDY3MWMtNGJhMmYxY2EtNjYyZjc2YzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] Spilling Service not started }. 2024-11-21T08:52:20.807348Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652758206905107:4312], TxId: 281474976715971, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yr85deptfermsjw5w6gwh. SessionId : ydb://session/3?node_id=1&id=YzhkZDA2MTctZDdmNDY3MWMtNGJhMmYxY2EtNjYyZjc2YzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T08:52:20.807391Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652758206905105:4310], TxId: 281474976715971, task: 1. Ctx: { TraceId : 01jd6yr85deptfermsjw5w6gwh. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzhkZDA2MTctZDdmNDY3MWMtNGJhMmYxY2EtNjYyZjc2YzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T08:52:20.810475Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzhkZDA2MTctZDdmNDY3MWMtNGJhMmYxY2EtNjYyZjc2YzI=, ActorId: [1:7439652758206905092:4306], ActorState: ExecuteState, TraceId: 01jd6yr85deptfermsjw5w6gwh, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/jptk/001660/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 5116, MsgBus: 17487 2024-11-21T08:52:09.555610Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652714009727693:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:09.555660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001660/r3tmp/tmpEswgqQ/pdisk_1.dat 2024-11-21T08:52:09.614115Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5116, node 1 2024-11-21T08:52:09.632414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:09.632429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:09.632431Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:09.632472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17487 2024-11-21T08:52:09.656818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:09.656845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:09.657965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:09.693496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.700578Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:52:09.711015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:09.734643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.809554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.829250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:09.940441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652714009729122:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:09.940508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:09.945714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.958493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.970522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:09.985675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.005450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.026420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.038088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652718304696921:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.038120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.038134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652718304696926:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.038823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:10.046031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652718304696928:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:52:14.555572Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652714009727693:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:14.556231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"46ae71e-6786b6c1-e95f488c-b434267d") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"343fabcd-b5d9fdb2-41494f01-6b86ea7b") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"1666f54f-418ca550-40563b82-b45d74de")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) |87.6%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::DeclareAndDefine >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::SimpleFeatureFlags |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Yq_1::ModifyQuery [GOOD] >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage >> Viewer::SimpleFeatureFlags [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2024-11-21T08:52:15.472378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652736872711668:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:15.472575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:15.521551606 414808 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:15.521593606 414808 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003bc2/r3tmp/tmpGLsAph/pdisk_1.dat 2024-11-21T08:52:15.884282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:15.884664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652736872712145:2276], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 6744, node 1 TClient is connected to server localhost:17773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:15.935017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:15.935029Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:15.935031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:15.935084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:15.935197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:15.935202Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:16.179879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:16.180972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:16.180991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:16.181758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:16.181842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:16.181855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:52:16.182319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:16.182330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:16.182687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:16.183129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:16.183723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179136233, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:16.183737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:16.183822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:16.184256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:16.184318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:16.184329Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:16.184342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:16.184352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:16.184365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:16.184863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:16.184871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:16.184875Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:16.184889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:16.242224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:16.242259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:16.249863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:16.523622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:16.523696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:16.524385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:16.524431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:16.524481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:16.524499Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:16.524776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:16.524792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:16.524796Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:16.524840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:16.524847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:16.524849Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:16.525808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179136569, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:16.525824Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179136569, at schemeshard: 72057594046644480 2024-11-21T08:52:16.525850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:52:16.526314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:16.526398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:16.526412Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:52:16.526423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:52:16.526432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:52:16.526450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T08:52:16.526605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:16.526622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:16.526625Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:52:16.526657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:16.526660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:16.526661Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:52:16.526668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 2024-11-21T08:52:16.526834Z node 1 :FLAT_TX_SCHEMESHARD W ... _id, String : utqueoiuerq7cmg4ops8)], RetryAttempt: 0, ResolveAttempt: 0 } 2024-11-21T08:52:21.543686Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. effective maxinflight 1024 sorted 0 2024-11-21T08:52:21.543687Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. BEFORE: 1.0 2024-11-21T08:52:21.543699Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. Send EvRead to shardId: 72075186224037895, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2024-11-21T08:52:21.543714Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. AFTER: 0.1 2024-11-21T08:52:21.543715Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T08:52:21.543730Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:21.543732Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T08:52:21.543735Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T08:52:21.543953Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. Recv TEvReadResult from ShardID=72075186224037895, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2024-11-21T08:52:21.543956Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. Taken 0 locks 2024-11-21T08:52:21.543958Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. new data for read #0 seqno = 1 finished = 1 2024-11-21T08:52:21.543962Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T08:52:21.543966Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:21.543970Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T08:52:21.543973Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. enter pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:21.543980Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. exit pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 1 freeSpace: 8387510 2024-11-21T08:52:21.543984Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. returned 1 rows; processed 1 rows 2024-11-21T08:52:21.543998Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. dropping batch for read #0 2024-11-21T08:52:21.543999Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. effective maxinflight 1024 sorted 0 2024-11-21T08:52:21.544001Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T08:52:21.544003Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1, CA Id [4:7439652764584744241:2856]. returned async data processed rows 1 left freeSpace 8387510 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T08:52:21.544054Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:21.544056Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:21.544060Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744242:2857], TxId: 281474976715782, task: 2. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2024-11-21T08:52:21.544061Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T08:52:21.544072Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 2. Finish input channelId: 1, from: [4:7439652764584744241:2856] 2024-11-21T08:52:21.544080Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2024-11-21T08:52:21.544081Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744242:2857], TxId: 281474976715782, task: 2. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:21.544085Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:21.544087Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:21.544089Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1. Tasks execution finished 2024-11-21T08:52:21.544090Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744241:2856], TxId: 281474976715782, task: 1. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:21.544119Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 1. pass away 2024-11-21T08:52:21.544137Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744242:2857], TxId: 281474976715782, task: 2. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:21.544139Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744242:2857], TxId: 281474976715782, task: 2. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:21.544143Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:21.544146Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T08:52:21.544153Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715782;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:21.544184Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744242:2857], TxId: 281474976715782, task: 2. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:21.544186Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744242:2857], TxId: 281474976715782, task: 2. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:21.544188Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:21.544189Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 2. Tasks execution finished 2024-11-21T08:52:21.544191Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652764584744242:2857], TxId: 281474976715782, task: 2. Ctx: { TraceId : 01jd6yr8wdbp142g8qx2a4rec0. SessionId : ydb://session/3?node_id=4&id=NTc3NDQ5NDktNjgxODIzYWMtN2Y4OTgxZmYtZjhlZDFlMmM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:21.544224Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715782, task: 2. pass away 2024-11-21T08:52:21.544241Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715782;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:22.002384Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:30473: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:30473 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: 2024-11-21T08:52:14.605675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:14.605755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:14.605765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 17875, node 1 TClient is connected to server localhost:28451 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.1","Used":"10","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":1},"NodeId":1,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"10","AvailableSize":"90","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[1],"Overall":"Red"}]} 2024-11-21T08:52:15.478516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:15.478558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:15.478570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19280, node 2 TClient is connected to server localhost:19672 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.9","Used":"90","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":2},"NodeId":2,"VDiskState":"OK","DiskSpace":"Red","AllocatedSize":"90","AvailableSize":"10","Overall":"Red"}],"DiskSpace":"Red","GroupGeneration":1,"VDiskNodeIds":[2],"Overall":"Red"}]} 2024-11-21T08:52:16.494205Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:16.494232Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:16.494238Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 5524, node 3 TClient is connected to server localhost:12789 json result: {"TotalGroups":"1","FoundGroups":"0"} 2024-11-21T08:52:17.525178Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:89:2135], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:17.525248Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:17.525276Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 23441, node 4 TClient is connected to server localhost:29259 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.1","Used":"10","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":4},"NodeId":4,"VDiskState":"OK","DiskSpace":"Red","AllocatedSize":"10","AvailableSize":"90","Overall":"Red"}],"DiskSpace":"Red","GroupGeneration":1,"VDiskNodeIds":[4],"Overall":"Red"}]} 2024-11-21T08:52:18.480439Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.480484Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.480502Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 9342, node 5 TClient is connected to server localhost:23182 json result: {"TotalGroups":"1","FoundGroups":"0"} 2024-11-21T08:52:19.560675Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:19.560710Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:19.560718Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21976, node 6 TClient is connected to server localhost:16177 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.8","Used":"80","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":6},"NodeId":6,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"80","AvailableSize":"20","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[6],"Overall":"Red"}]} 2024-11-21T08:52:20.575769Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:20.575850Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:20.575858Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 29884, node 7 TClient is connected to server localhost:3510 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.9","Used":"90","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":7},"NodeId":7,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"90","AvailableSize":"10","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[7],"Overall":"Red"}]} 2024-11-21T08:52:21.631094Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:452:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:21.631152Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:21.631165Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:21.736556Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:21.823076Z node 8 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:21.826650Z node 8 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:21.886880Z node 8 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 12007, node 8 TClient is connected to server localhost:63650 2024-11-21T08:52:21.916878Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:21.916903Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:21.916908Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:21.917014Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration json result: {"Version":13,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"0000000010000110111111100000111","FieldsRequired":"0000000000000000000000000000101","Nodes":[{"NodeId":9,"Database":"/Root/shared","UptimeSeconds":-1732179141,"Disconnected":true,"CpuUsage":"nan","SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[170.359375,145.7788086,95.45947266],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-qcxhsi27zq.auto.internal","Version":".c239bee","Location":{"DataCenter":"2","Module":"2","Rack":"2","Unit":"2"},"CoresUsed":0,"CoresTotal":0}}]} 2024-11-21T08:52:22.661194Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7439652767202650337:2065];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:22.661420Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:22.676541Z node 11 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13089, node 11 2024-11-21T08:52:22.684099Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:22.684110Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:22.684112Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:22.684157Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5131 2024-11-21T08:52:22.764218Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:22.764252Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:22.765224Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> TSequenceReboots::CopyTableWithSequence [GOOD] >> SubDomainWithReboots::DropSplittedTabletInsideWithStoragePools |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > 0, a, false, 0 | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > 1, b, true, 10 | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > 2, c, false, 20 | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > 3, d, true, 30 | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > 4, e, false, 40 | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > 5, f, true, 50 | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > 6, g, false, 60 | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > 7, h, true, 70 | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > 8, i, false, 80 | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > 9, j, true, 90 | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > 0, a, false, 0 | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > 1, b, true, 10 | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > 2, c, false, 20 | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > 3, d, true, 30 | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > 4, e, false, 40 | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > 5, f, true, 50 | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > 6, g, false, 60 | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > 7, h, true, 70 | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > 8, i, false, 80 | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > 9, j, true, 90 | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > 10, k, false, 100 | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > 11, l, true, 110 | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > 12, m, false, 120 | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > 13, n, true, 130 | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > 14, o, false, 140 | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > 15, p, true, 150 | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > 16, q, false, 160 | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > 17, r, true, 170 | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > 18, s, false, 180 | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > 19, t, true, 190 | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > 0, x, NULL, NULL | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > 1, xx, NULL, NULL | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > 2, xxx, NULL, NULL | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > 3, xxxx, NULL, NULL | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > 4, xxxxx, NULL, NULL | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > 5, xxxxxx, NULL, NULL | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > 6, xxxxxxx, NULL, NULL | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > 7, xxxxxxxx, NULL, NULL | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > 8, xxxxxxxxx, NULL, NULL | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > 9, xxxxxxxxxx, NULL, NULL | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > 10, xxxxxxxxxx.., NULL, NULL | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > 11, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > 12, xxxxxxxxxx.., NULL, NULL | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > 13, xxxxxxxxxx.., NULL, NULL | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > 14, xxxxxxxxxx.., NULL, NULL | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > 15, xxxxxxxxxx.., NULL, NULL | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > 16, xxxxxxxxxx.., NULL, NULL | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > 17, xxxxxxxxxx.., NULL, NULL | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > 18, xxxxxxxxxx.., NULL, NULL | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > 19, xxxxxxxxxx.., NULL, NULL | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > 20, xxxxxxxxxx.., NULL, NULL | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > 21, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > 22, xxxxxxxxxx.., NULL, NULL | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > 23, xxxxxxxxxx.., NULL, NULL | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > 24, xxxxxxxxxx.., NULL, NULL | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > 25, xxxxxxxxxx.., NULL, NULL | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > 26, xxxxxxxxxx.., NULL, NULL | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > 27, xxxxxxxxxx.., NULL, NULL | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > 28, xxxxxxxxxx.., NULL, NULL | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > 29, xxxxxxxxxx.., NULL, NULL | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > 30, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > 31, xxxxxxxxxx.., NULL, NULL | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > 32, xxxxxxxxxx.., NULL, NULL | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > 33, xxxxxxxxxx.., NULL, NULL | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > 34, xxxxxxxxxx.., NULL, NULL | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > 35, xxxxxxxxxx.., NULL, NULL | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > 36, xxxxxxxxxx.., NULL, NULL | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > 37, xxxxxxxxxx.., NULL, NULL | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > 38, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > 39, xxxxxxxxxx.., NULL, NULL | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > 40, xxxxxxxxxx.., NULL, NULL | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > 41, xxxxxxxxxx.., NULL, NULL | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > 42, xxxxxxxxxx.., NULL, NULL | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > 43, xxxxxxxxxx.., NULL, NULL | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > 44, xxxxxxxxxx.., NULL, NULL | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > 45, xxxxxxxxxx.., NULL, NULL | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > 46, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 rev 1, 674b} | | | PageId: 10047 RowCount: 5928 DataSize: 49128 GroupDataSize: 97128 ErasedRowCount: 2568 | | | > 47, xxxxxxxxxx.., NULL, NULL | | | Pa ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} >> Viewer::TabletMerging [GOOD] >> Viewer::TabletMergingPacked >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[PipeResets] [GOOD] >> SubDomainWithReboots::CreateWithStoragePools >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup [GOOD] Test command err: 2024-11-21T08:52:18.841852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.842240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.842256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042e4/r3tmp/tmpCSsfi2/pdisk_1.dat 2024-11-21T08:52:18.946121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:18.963404Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:19.005839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:19.005870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:19.016554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:19.123255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:19.138074Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:19.138243Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:19.138314Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:52:19.138354Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:19.148064Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:19.148305Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:19.148338Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:19.148508Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:52:19.148526Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:52:19.148534Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:52:19.148581Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:19.152448Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:52:19.152534Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:19.152564Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:52:19.152569Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:52:19.152574Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:52:19.152580Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:19.152744Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:19.152751Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:19.152901Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:52:19.152921Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:52:19.152932Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:19.152936Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:19.152941Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:52:19.152948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:52:19.152955Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:19.152961Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:52:19.152966Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:52:19.152969Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:52:19.152975Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:52:19.152980Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:52:19.152995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:52:19.153000Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:19.153026Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:52:19.153071Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:52:19.153081Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:52:19.153098Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:52:19.153104Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:52:19.153108Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:52:19.153113Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:52:19.153117Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:52:19.153163Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:19.153166Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:52:19.153170Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:52:19.153172Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:52:19.153182Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:52:19.153185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:52:19.153189Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:52:19.153192Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:52:19.153197Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:19.153482Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:52:19.153497Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:52:19.164537Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:52:19.164570Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:52:19.164578Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:52:19.164594Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:52:19.164610Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:19.342371Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:19.342403Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:19.342416Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:52:19.342437Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:52:19.342443Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:19.342473Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:52:19.342483Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:52:19.342488Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:52:19.342494Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:52:19.345849Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:52:19.345894Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:52:19.346144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:19.346156Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:19.346170Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:52:19.346181Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:19.346191Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:52:19.346205Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... lved key sets: 1 2024-11-21T08:52:24.422400Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T08:52:24.422415Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2024-11-21T08:52:24.422452Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T08:52:24.422486Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715671. Shard resolve complete, resolved shards: 1 2024-11-21T08:52:24.422494Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2024-11-21T08:52:24.422499Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2024-11-21T08:52:24.422507Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:24.422512Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T08:52:24.422581Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T08:52:24.422593Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1206:2961], 2024-11-21T08:52:24.422597Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1206:2961], 2024-11-21T08:52:24.422600Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T08:52:24.422710Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1206:2961], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2024-11-21T08:52:24.422714Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1206:2961], 2024-11-21T08:52:24.422717Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1206:2961], 2024-11-21T08:52:24.422799Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1208:2961], Recipient [2:1130:2913]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2024-11-21T08:52:24.422823Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:52:24.422832Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v4001/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2024-11-21T08:52:24.422837Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2024-11-21T08:52:24.422845Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2024-11-21T08:52:24.422859Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T08:52:24.422862Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:52:24.422867Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:24.422869Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:52:24.422882Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2024-11-21T08:52:24.422885Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T08:52:24.422887Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:24.422890Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:52:24.422893Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:52:24.422901Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2024-11-21T08:52:24.422936Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1208:2961], 0} after executionsCount# 1 2024-11-21T08:52:24.422941Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1208:2961], 0} sends rowCount# 1, bytes# 24, quota rows left# 32766, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:52:24.422953Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1208:2961], 0} finished in read 2024-11-21T08:52:24.422959Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T08:52:24.422961Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:52:24.422964Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:52:24.422966Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:52:24.422974Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T08:52:24.422976Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:52:24.422979Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2024-11-21T08:52:24.422982Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:52:24.423099Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1208:2961], Recipient [2:1130:2913]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T08:52:24.423104Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T08:52:24.423195Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1206:2961], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 181 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 40 FinishTimeMs: 1732179144423 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 30 WaitInputTimeUs: 286 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732179144422 } MaxMemoryUsage: 1048576 } 2024-11-21T08:52:24.423200Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1206:2961] 2024-11-21T08:52:24.423224Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:52:24.423230Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd6yrbpx4gz0z1mwj2jwfmz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0YWE0MGUtNzA5ZWYyM2ItYWM5N2VhZjAtM2M1NTVjZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000181s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:40.133994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:40.134010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:40.134014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:40.134017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:40.134027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:40.134029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:40.134036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:40.134098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:40.142842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:40.142857Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:40.144826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:40.144908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:40.144929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:40.147455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:40.147520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:40.147634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.147833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:40.148602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.148909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:40.148924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.148938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:40.148945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:40.148952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:40.148992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:40.150442Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:40.169195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:40.169274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.169332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:40.169400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:40.169409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.170212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.170237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:40.170279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.170288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:40.170292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:40.170297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:40.170751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.170765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:40.170770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:40.171151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.171162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.171167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.171174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.171768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:40.172226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:40.172281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:40.172481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:40.172505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:40.172513Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.172568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:40.172575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:40.172601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:40.172613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:40.173060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:40.173071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:40.173111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:40.173117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:40.173199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:40.173206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:40.173217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:40.173221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.173227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:40.173232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:40.173237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:40.173240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:40.173251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:40.173257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:40.173261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:24.208560Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:24.208605Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 50us result status StatusSuccess 2024-11-21T08:52:24.208715Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:24.208756Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:24.208773Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 19us result status StatusSuccess 2024-11-21T08:52:24.208834Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> Yq_1::CreateQuery_Without_Connection [GOOD] >> SubDomainWithReboots::RootWithStoragePoolsAndTable |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/jptk/001637/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 17604, MsgBus: 1743 2024-11-21T08:52:09.962201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652710406488788:2057];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:09.962527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001637/r3tmp/tmpKW3enR/pdisk_1.dat 2024-11-21T08:52:10.026669Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17604, node 1 2024-11-21T08:52:10.043563Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:10.043576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:10.043578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:10.043617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1743 2024-11-21T08:52:10.064549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:10.064584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:10.067775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:10.098448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:10.102011Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:52:10.111809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:10.127191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:10.149559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:10.165860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:10.273251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652714701457604:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.273276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.309671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.365266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.375029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.382376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.437810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.446462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:10.460974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652714701458124:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.461009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.461013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652714701458129:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:10.461735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:10.465478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652714701458131:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:52:14.962441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652710406488788:2057];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:14.962508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"d5bcb526-3188ff20-1a627e12-d53144a") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"606bad1a-9edfa31e-4356433c-d271473d") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"43cc4375-663dc8a3-20cde290-f1545abe")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2024-11-21T08:52:25.020194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:52:25.020233Z node 1 :IMPORT WARN: Table profiles were not loaded |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2024-11-21T08:52:15.416295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652736375630884:2204];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:15.416339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:15.452314836 414567 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:15.452361097 414567 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T08:52:15.453542Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11425: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11425 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003bb9/r3tmp/tmpGdcKtB/pdisk_1.dat 2024-11-21T08:52:15.742934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652736375631106:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:15.742977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:15.773980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:15.774008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:15.778526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11425, node 1 TClient is connected to server localhost:3522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:15.841870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:15.841884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:15.841885Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:15.841927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:15.842301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:15.842307Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:16.117565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:16.118760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:16.118786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:16.120756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:16.120824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:16.120830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:52:16.121786Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:16.121796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:16.122204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:16.123164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179136170, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:16.123176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:16.123240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:16.123650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:16.123698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:16.123712Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:16.123722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:16.123729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:16.123740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 waiting... 2024-11-21T08:52:16.124459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:16.124481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:16.124485Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:16.124500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T08:52:16.128627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:16.459448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:16.459534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:16.460500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:16.460558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:16.460605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:16.460617Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:16.460842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:16.460846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:16.460851Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:16.460882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:16.460884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:16.460886Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:16.461895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179136506, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:16.461910Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179136506, at schemeshard: 72057594046644480 2024-11-21T08:52:16.461941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:52:16.462326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:16.462364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:16.462372Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:52:16.462382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:52:16.462391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:52:16.462401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T08:52:16.462549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:16.462553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:16.462556Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:52:16.462576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:16.462579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:16.462580Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublis ... .743536Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743540Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743544Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743548Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743552Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743560Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743564Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743574Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743581Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743584Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743590Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743593Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743600Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743609Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743612Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743616Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743626Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743632Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743635Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743640Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743649Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743658Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743661Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743665Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743669Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743678Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743682Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743686Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743690Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743699Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743703Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743707Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743714Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743721Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743723Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743732Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743736Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743745Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743749Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743752Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743761Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743765Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743769Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743772Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743776Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743784Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743788Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743792Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743797Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743805Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743812Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743818Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743827Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743832Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743840Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743848Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743852Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743855Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743859Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743866Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743877Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743881Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743884Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743893Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743898Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743902Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743910Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743914Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743918Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743926Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743930Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743934Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743938Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743944Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743952Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743957Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743962Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743970Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743978Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743981Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743988Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.743994Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744003Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744007Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744011Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744015Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744024Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744028Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744032Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744036Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744045Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744049Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744052Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744056Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744060Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744072Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744076Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744084Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744088Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744092Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744100Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744104Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744108Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744112Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744122Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744128Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744130Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744133Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744146Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744154Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744157Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744163Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744171Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744177Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744180Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744187Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744192Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744201Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744221Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744226Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744239Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744248Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744252Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744256Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744259Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744266Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744273Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744281Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744293Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:23.744301Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: [good] Yq_1::CreateQuery_Without_Connection >> ForceDropWithReboots::PathsAndShardsCountersSimultaneousAlterSubDomain |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> Yq_1::Basic_EmptyDict [GOOD] >> ForceDropWithReboots::ForceDelete >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter |87.7%| [TA] $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> ForceDropWithReboots::DoNotLostDeletedTablets |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> TCdcStreamWithRebootsTests::Attributes[TabletReboots] [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2024-11-21T08:52:14.659179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652733143200566:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:14.659239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:14.694475002 413074 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:14.694537205 413074 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T08:52:14.697068Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17001: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17001 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003bea/r3tmp/tmp83D2SN/pdisk_1.dat 2024-11-21T08:52:15.032623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 17001, node 1 TClient is connected to server localhost:6008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:15.070590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:15.070604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:15.070606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:15.070659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:15.070689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:15.070701Z node 1 :IMPORT WARN: Table profiles were not loaded Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:15.298447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:15.299439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:15.299451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:15.300261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:15.300346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:15.300351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:15.301006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:15.301024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:15.301520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:15.301815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:15.302644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179135351, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:15.302660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:15.302722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:15.303279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:15.303338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:15.303351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:15.303372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:15.303385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:15.303403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:15.304021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:15.304041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:15.304045Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:15.304062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:15.477300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:15.477336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:15.479024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:15.706802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:15.706872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:15.708340Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2024-11-21T08:52:15.708346Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T08:52:15.708349Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T08:52:15.709867Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2024-11-21T08:52:15.709887Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2024-11-21T08:52:15.709890Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2024-11-21T08:52:15.710126Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2024-11-21T08:52:15.710129Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2024-11-21T08:52:15.710131Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2024-11-21T08:52:15.710283Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2024-11-21T08:52:15.710285Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T08:52:15.710287Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T08:52:15.710432Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2024-11-21T08:52:15.710434Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2024-11-21T08:52:15.710435Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2024-11-21T08:52:15.710543Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2024-11-21T08:52:15.710545Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2024-11-21T08:52:15.710547Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2024-11-21T08:52:15.710710Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2024-11-21T08:52:15.710713Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T08:52:15.710714Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T08:52:15.710828Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2024-11-21T08:52:15.710830Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2024-11-21T08:52:15.710831Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2024-11-21T08:52:15.710942Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2024-11-21T08:52:15.710945Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T08:52:15.710946Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T08:52:15.711508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:15.711572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:15.711640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:15.711657Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:15.712095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:15.712104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:15.712108Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:15.712162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046 ... ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988748Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988754Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988760Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988766Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988772Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988782Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988790Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988796Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988805Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988812Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988821Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988828Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988833Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988844Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988851Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988860Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988866Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988875Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988881Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988887Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988897Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988903Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988912Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988919Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988928Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988933Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988941Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988950Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988955Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988962Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988974Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988982Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988992Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.988999Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989008Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989013Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989020Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989030Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989036Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989043Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989052Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989061Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989067Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989074Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989085Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989093Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989099Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989110Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989117Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989125Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989132Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989141Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989149Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989158Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989166Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989174Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989180Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989187Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989197Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989204Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989212Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989216Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989225Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989231Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989239Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989244Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989251Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989260Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989265Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989274Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989281Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989289Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989296Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989304Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989307Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989316Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989322Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989332Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989338Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989348Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989359Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989364Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989372Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989381Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989389Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989407Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989416Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989423Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989433Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989441Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989447Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989455Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989484Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989500Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989517Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989527Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989536Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989544Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989549Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989558Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989566Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989574Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989584Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989590Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989597Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989605Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989613Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989619Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989628Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989636Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989642Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989647Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989656Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989661Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989668Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989674Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989683Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989690Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989693Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989702Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989707Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989713Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989722Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989727Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989736Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989744Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989751Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989757Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989779Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:25.989789Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2024-11-21T08:52:13.902866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:13.902926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:13.902932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 12613, node 1 TClient is connected to server localhost:22545 2024-11-21T08:52:14.016107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:592:2504], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:14.016127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:602:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:14.016189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:14.029681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:14.029709Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:14.029714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:14.029772Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:14.030067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:14.073018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:14.073056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:14.074006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:52:14.086477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:14.192852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:606:2512], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:52:14.250193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd6yr1hz2bdk4zbf84c5gc2w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFiNTY5OTEtNzVkZTM1OTItY2IwN2M1MmQtZjI5ZDc0NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root json result: {"column0":"SGVsbG8="}] library/cpp/json/json_reader.cpp:427: Offset: 22, Code: 2, Error: The document root must not be followed by other values. 2024-11-21T08:52:15.114560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:15.114630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:15.114648Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8801, node 2 TClient is connected to server localhost:12514 2024-11-21T08:52:15.234581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:590:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:15.234614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:601:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:15.234624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:15.253137Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:15.253685Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:15.253705Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:15.253711Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:15.253793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:15.296125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:15.296157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:15.297194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:52:15.311063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:15.414995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:604:2510], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:52:15.453880Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd6yr2r23aw40psb9tp6xagj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTU3MmFiNjYtOWI3ZjYzNzMtYzA3M2Q0YmItZDYyYzMyNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root json result: {"version":4,"result":[{"column0":"SGVsbG8="}]} 2024-11-21T08:52:16.307360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:16.307398Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:16.307405Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19688, node 3 TClient is connected to server localhost:29945 2024-11-21T08:52:16.416013Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:586:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:16.416045Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:599:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:16.416056Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:16.432401Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:16.432823Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:16.432835Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:16.432840Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:16.432929Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:16.476785Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:16.476828Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:16.477937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:52:16.491990Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:16.596193Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:603:2509], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:52:16.636686Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd6yr3wz572zyzhztpkv9qmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTJkM2FlOGYtODA2NDk1MDQtMzQyZjVhYWQtOTEwOWZiZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root json result: {"version":4,"columns":[{"name":"column0","type":"String"}],"result":[["SGVsbG8="]]} 2024-11-21T08:52:17.571098Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:89:2135], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:17.571180Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:17.571211Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 20178, node 4 TClient is connected to server localhost:28958 2024-11-21T08:52:17.679887Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:590:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:17.679910Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:600:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_ ... 24-11-21T08:52:21.564812Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:603:2509], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:52:21.603972Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd6yr8rddhgbemkb9g33jf3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTRlOTk4MTMtYTgyOGJhZTItZWNjNWQ5NGEtYjcxNTA5ZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root json result: {"version":4,"columns":[{"name":"column0","type":"String"}],"result":[["Hello"]]} 2024-11-21T08:52:22.421813Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:22.421840Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:22.421852Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 64270, node 8 TClient is connected to server localhost:4830 2024-11-21T08:52:22.516626Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:588:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:22.516647Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:598:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:22.516654Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:22.531712Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:22.532004Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:22.532011Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:22.532014Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:22.532092Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:22.574002Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:22.574037Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:22.574862Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:52:22.586595Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:22.691305Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:603:2509], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:52:22.728525Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd6yr9vmb7bqmem72nwvrjzr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MjYxZmZlZTAtY2I3NjYzMmEtMzRlM2RhODctZGI2NjUzMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root json result: {"version":4,"result":[{"rows":[["Hello"]],"columns":[{"name":"column0","type":"String"}]}]} 2024-11-21T08:52:23.695904Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:23.695995Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:23.696035Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:23.788847Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:23.873603Z node 9 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:23.878476Z node 9 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:23.923281Z node 9 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 3675, node 9 TClient is connected to server localhost:64164 2024-11-21T08:52:23.950920Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:23.950941Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:23.950946Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:23.951070Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration json result: {"Version":13,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"0000000010000110111111100100111","FieldsRequired":"0000000000000000000000000100101","Problems":["no-database-board-info"],"Nodes":[{"NodeId":10,"Database":"/Root/shared","UptimeSeconds":-1732179143,"Disconnected":true,"CpuUsage":"nan","SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[171.4521484,146.4135742,95.93505859],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-qcxhsi27zq.auto.internal","Version":".c239bee","Location":{"DataCenter":"2","Module":"2","Rack":"2","Unit":"2"},"CoresUsed":0,"CoresTotal":0}}]} 2024-11-21T08:52:25.160158Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:452:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.160251Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:25.160270Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:25.257225Z node 11 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:25.342095Z node 11 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:25.345327Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:25.388505Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 17104, node 11 TClient is connected to server localhost:1610 2024-11-21T08:52:25.422185Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:25.422213Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:25.422218Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:25.422382Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration json result: {"Version":13,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"0000000010000110111111100000111","FieldsRequired":"0000000000000000000000000000101","Nodes":[{"NodeId":13,"Database":"/Root/serverless","UptimeSeconds":-1732179145,"Disconnected":true,"CpuUsage":"nan","SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[171.4521484,146.4135742,95.93505859],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-qcxhsi27zq.auto.internal","Version":".c239bee","Location":{"DataCenter":"3","Module":"3","Rack":"3","Unit":"3"},"CoresUsed":0,"CoresTotal":0}}]} 2024-11-21T08:52:26.808225Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:510:2383], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:26.808292Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:26.808309Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:26.886041Z node 14 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:26.970394Z node 14 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:26.973364Z node 14 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:27.014188Z node 14 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 26316, node 14 TClient is connected to server localhost:29966 2024-11-21T08:52:27.038098Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:27.038119Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:27.038124Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:27.038296Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration json result: {"Version":13,"TotalNodes":"2","FoundNodes":"2","FieldsAvailable":"0000000010100110111111100100111","FieldsRequired":"0000000000000000000000000100101","Nodes":[{"NodeId":16,"UptimeSeconds":-1732179147,"Disconnected":true,"CpuUsage":"nan","SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[171.4521484,146.4135742,95.93505859],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-qcxhsi27zq.auto.internal","Version":".c239bee","Location":{"DataCenter":"3","Module":"3","Rack":"3","Unit":"3"},"CoresUsed":0,"CoresTotal":0}},{"NodeId":17,"UptimeSeconds":-1732179147,"Disconnected":true,"CpuUsage":"nan","SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[171.4521484,146.4135742,95.93505859],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-qcxhsi27zq.auto.internal","Version":".c239bee","Location":{"DataCenter":"4","Module":"4","Rack":"4","Unit":"4"},"CoresUsed":0,"CoresTotal":0},"Tablets":[{"Type":"DataShard","State":"Green","Count":1}]}]} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] Leader for TabletID 9437184 is [1:130:2153] sender: [1:132:2057] recipient: [1:106:2138] 2024-11-21T08:52:13.263429Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:13.264497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:13.264505Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:13.265671Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:13.265777Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:13.265835Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:13.274860Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:13.276913Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:13.277058Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:13.277197Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:13.277209Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:13.277215Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:13.277250Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:13.280698Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:13.280747Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:13.280786Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:13.280792Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:13.280796Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:13.280801Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:13.280860Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.280878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.280918Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:13.280932Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:13.280939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:13.280945Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:13.280952Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:13.280956Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:13.280960Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:13.280965Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:13.280969Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 Leader for TabletID 9437184 is [1:130:2153] sender: [1:205:2057] recipient: [1:14:2061] 2024-11-21T08:52:13.289149Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.289167Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.289176Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:13.289621Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:13.289629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:13.289655Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:13.289682Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:13.289692Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:13.289700Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:13.289709Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:13.289713Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:13.289718Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:13.289721Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:13.289785Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:13.289789Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:13.289792Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:13.289796Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:13.289806Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:13.289809Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:13.289812Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:13.289815Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:13.289820Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:13.311577Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:13.311612Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:13.311620Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:13.311632Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:13.311648Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:13.311795Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.311807Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.311815Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:13.311839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:13.311844Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:13.311903Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:13.311913Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:13.311917Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:13.311922Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:13.312755Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:13.312777Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:13.312852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.312859Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.312869Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:13.312876Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:13.312881Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:13.312891Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T08:52:13.312895Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:13.312902Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:13.312906Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:13.312910Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:13.312914Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:13.312976Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T08:52:13.312982Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:13.312985Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:13.312989Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:13.312993Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:13.313008Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:13.313011Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:13.313014Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:13.313018Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:13.313031Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2024-11-21T08:52:13.313034Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2024-11-21T08:52:13.313038Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2024-11-21T08:52:13.313043Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:13.313046Z node 1 :TX_DATASHARD TRACE: Adv ... ard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 228 RawX2: 94489282735 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2024-11-21T08:52:27.277485Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [22:270:2258], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T08:52:27.277490Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T08:52:27.277503Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [22:119:2145], Recipient [22:228:2223]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2024-11-21T08:52:27.277507Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T08:52:27.277511Z node 22 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2024-11-21T08:52:27.277517Z node 22 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2024-11-21T08:52:27.308791Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [22:270:2258], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:52:27.308812Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:52:27.350068Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [22:281:2267], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:27.350096Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:27.350106Z node 22 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [22:279:2266], serverId# [22:281:2267], sessionId# [0:0:0] 2024-11-21T08:52:27.350141Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [22:278:2265], Recipient [22:228:2223]: NKikimrTabletBase.TEvGetCounters 2024-11-21T08:52:27.351929Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [22:97:2132], Recipient [22:228:2223]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 94489282644 } 2024-11-21T08:52:27.351948Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T08:52:27.352020Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [22:283:2269], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:27.352025Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:27.352029Z node 22 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [22:282:2268], serverId# [22:283:2269], sessionId# [0:0:0] 2024-11-21T08:52:27.352074Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [22:97:2132], Recipient [22:228:2223]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 94489282644 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2024-11-21T08:52:27.352081Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:27.352116Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:27.352381Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2024-11-21T08:52:27.352404Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:27.352408Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2024-11-21T08:52:27.352414Z node 22 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:27.352418Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:27.352431Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:27.352450Z node 22 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2024-11-21T08:52:27.352455Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:27.352462Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:27.352467Z node 22 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:52:27.352471Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:27.352481Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:27.352490Z node 22 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2024-11-21T08:52:27.352495Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T08:52:27.352583Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:27.352588Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:27.352592Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:27.353223Z node 22 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2024-11-21T08:52:27.353267Z node 22 :TX_DATASHARD DEBUG: tx 2 released its data 2024-11-21T08:52:27.353276Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T08:52:27.353337Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:27.353344Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:27.353503Z node 22 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2024-11-21T08:52:27.353514Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:27.353651Z node 22 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2024-11-21T08:52:27.353666Z node 22 :TX_DATASHARD DEBUG: tx 2 released its data 2024-11-21T08:52:27.353671Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T08:52:27.353711Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:27.353716Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:27.353787Z node 22 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2024-11-21T08:52:27.353793Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:27.353901Z node 22 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2024-11-21T08:52:27.353911Z node 22 :TX_DATASHARD DEBUG: tx 2 released its data 2024-11-21T08:52:27.353916Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T08:52:27.353940Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:27.353943Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:27.353994Z node 22 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2024-11-21T08:52:27.353999Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T08:52:27.414327Z node 22 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:27.414380Z node 22 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:27.414408Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:27.414418Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:27.414425Z node 22 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:27.414431Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2024-11-21T08:52:27.414477Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:27.414482Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:27.414486Z node 22 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:27.414490Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2024-11-21T08:52:27.414508Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T08:52:27.414512Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:27.414516Z node 22 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2024-11-21T08:52:27.425683Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:27.425715Z node 22 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2024-11-21T08:52:27.425730Z node 22 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T08:52:27.425764Z node 22 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:27.426018Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [22:288:2274], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:27.426027Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:27.426033Z node 22 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [22:287:2273], serverId# [22:288:2274], sessionId# [0:0:0] 2024-11-21T08:52:27.426057Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [22:286:2272], Recipient [22:228:2223]: NKikimrTabletBase.TEvGetCounters >> SubDomainWithReboots::SplitTabletInsideWithStoragePools >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::Attributes[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:41.619147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:41.619165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.619169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:41.619172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:41.619183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:41.619186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:41.619192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.619251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:41.629459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:41.629479Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.631495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:41.631594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:41.631622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:41.634124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:41.634190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:41.634323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.634460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.635092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.635343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.635354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.635366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:41.635372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.635378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:41.635417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:41.636544Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.649813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:41.649879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.649922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:41.649955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:41.649962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.650503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.650528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:41.650564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.650572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:41.650576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:41.650580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:41.650923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.650934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:41.650937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:41.651219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.651228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.651233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.651239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.651807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:41.652186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:41.652243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:41.652409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.652434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:41.652441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.652493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:41.652501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.652524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:41.652535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.652887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.652896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.652923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.652928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:41.652991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.652997Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:41.653006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:41.653010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.653016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:41.653020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.653024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:41.653028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:41.653038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:41.653043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:41.653047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 94046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.353920Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.353927Z node 145 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:27.353931Z node 145 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:52:27.353935Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:52:27.354090Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 266 } } 2024-11-21T08:52:27.354095Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:52:27.354108Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 266 } } 2024-11-21T08:52:27.354117Z node 145 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 266 } } 2024-11-21T08:52:27.354190Z node 145 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.354196Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.354198Z node 145 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:27.354200Z node 145 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:52:27.354219Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:27.354325Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 622770260237 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:27.354328Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:52:27.354338Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 622770260237 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:27.354342Z node 145 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:52:27.354346Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 622770260237 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:27.354357Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.354359Z node 145 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.354363Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:27.354367Z node 145 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T08:52:27.354435Z node 145 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.354440Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.354442Z node 145 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:27.354445Z node 145 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:52:27.354447Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:52:27.354452Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:52:27.355976Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.356008Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.356349Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.356386Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.356472Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.356480Z node 145 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T08:52:27.356492Z node 145 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T08:52:27.356496Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:52:27.356502Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:52:27.356517Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [145:403:2378] message: TxId: 1003 2024-11-21T08:52:27.356523Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:52:27.356528Z node 145 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:27.356533Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:27.356544Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:52:27.356553Z node 145 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:52:27.356556Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:52:27.356572Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:27.356577Z node 145 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:52:27.356580Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:52:27.356588Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:52:27.356673Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:27.357250Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:27.357263Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [145:601:2533] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:27.357348Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:27.357417Z node 145 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 58us result status StatusSuccess 2024-11-21T08:52:27.357540Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "key" Value: "value" } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false UserAttributes { Key: "key" Value: "value" } AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SubDomainWithReboots::RootWithStoragePools >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[TabletReboots] [GOOD] >> SubDomainWithReboots::DeleteWithStoragePools |87.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |87.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CopyTableWithSequence [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:50:51.836241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:50:51.836270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:51.836275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:50:51.836280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:50:51.836294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:50:51.836298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:50:51.836309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:50:51.836409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:50:51.869130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:50:51.869163Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:50:51.872171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:50:51.872318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:50:51.872354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:50:51.897544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:50:51.897701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:50:51.897849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:51.901229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:51.915184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:51.915638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:51.915650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:51.915666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:50:51.915675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:51.915681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:50:51.915733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:50:51.930077Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:50:51.968693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:50:51.968798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.968880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:50:51.968933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:50:51.968943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.972715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:51.972764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:50:51.972848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.972869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:50:51.972874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:50:51.972880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:50:51.973576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.973590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:50:51.973595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:50:51.974011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.974021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.974028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:51.974036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:50:51.974830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:50:51.975235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:50:51.975289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:50:51.975504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:50:51.975533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:50:51.975541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:51.975611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:50:51.975618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:50:51.975656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:50:51.975671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:50:51.976075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:50:51.976086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:50:51.976132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:50:51.976137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:50:51.976244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:50:51.976253Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:50:51.976265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:50:51.976269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:51.976276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:50:51.976282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:50:51.976286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:50:51.976291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:50:51.976305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:50:51.976311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:50:51.976316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2024-11-21T08:52:24.171548Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:3, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2024-11-21T08:52:24.171553Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TCopySequence TProposedCopySequence HandleReply TEvGetSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 1003:3 at tablet 72057594046678944 2024-11-21T08:52:24.171558Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TCopySequence TProposedCopySequence ProgressState sending TEvRestoreSequence to tablet 72075186233409546 operationId# 1003:3 at tablet 72057594046678944 2024-11-21T08:52:24.171561Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:52:24.171564Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 1003:3 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:6 2024-11-21T08:52:24.171802Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:52:24.171808Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:52:24.171811Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:3 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:10 msg type: 276299787 2024-11-21T08:52:24.171853Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxRestoreSequence.Execute PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Record# PathId { OwnerId: 72057594046678944 LocalId: 10 } TxId: 1003 TxPartId: 3 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2024-11-21T08:52:24.171877Z node 191 :SEQUENCESHARD NOTICE: [sequenceshard 72075186233409546] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Record# PathId { OwnerId: 72057594046678944 LocalId: 10 } TxId: 1003 TxPartId: 3 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2024-11-21T08:52:24.192995Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxRestoreSequence.Complete 2024-11-21T08:52:24.193097Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 276299788, Sender [191:349:2331], Recipient [191:121:2147]: NKikimrTxSequenceShard.TEvRestoreSequenceResult Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2024-11-21T08:52:24.193106Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSequenceShard::TEvSequenceShard::TEvRestoreSequenceResult 2024-11-21T08:52:24.193113Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvRestoreSequenceResult, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2024-11-21T08:52:24.193140Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:3, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2024-11-21T08:52:24.193150Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TCopySequence TProposedCopySequence HandleReply TEvRestoreSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 1003:3 at tablet 72057594046678944 2024-11-21T08:52:24.193190Z node 191 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:3 140 -> 240 2024-11-21T08:52:24.193213Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:52:24.193220Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 1003:3 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:10 2024-11-21T08:52:24.193672Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:52:24.193681Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:52:24.193688Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1003:3 2024-11-21T08:52:24.193721Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [191:121:2147], Recipient [191:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:52:24.193725Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T08:52:24.193731Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:52:24.193737Z node 191 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:3 ProgressState 2024-11-21T08:52:24.193747Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:52:24.193751Z node 191 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:3 progress is 4/4 2024-11-21T08:52:24.193754Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:52:24.193758Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T08:52:24.193768Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [191:462:2417] message: TxId: 1003 2024-11-21T08:52:24.193773Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:52:24.193784Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:24.193788Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:24.193822Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:52:24.193825Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:24.193829Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:52:24.193830Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:52:24.193834Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2024-11-21T08:52:24.193836Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:52:24.193838Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:52:24.193842Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2024-11-21T08:52:24.193844Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:52:24.193847Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:52:24.193849Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:52:24.193853Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2024-11-21T08:52:24.193856Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:52:24.194228Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:52:24.194249Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [191:462:2417] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2024-11-21T08:52:24.194290Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:24.194295Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [191:650:2578] 2024-11-21T08:52:24.194330Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [191:652:2580], Recipient [191:121:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:52:24.194334Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:52:24.194336Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:24.194427Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [191:754:2679], Recipient [191:121:2147]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/copy/myseq" Options { ShowPrivateTable: true } 2024-11-21T08:52:24.194432Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:52:24.194446Z node 191 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/copy/myseq" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:24.194495Z node 191 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/copy/myseq" took 44us result status StatusSuccess 2024-11-21T08:52:24.194554Z node 191 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/copy/myseq" PathDescription { Self { Name: "myseq" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 10 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:24.194950Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxAllocateSequence.Execute PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Cache# 1 2024-11-21T08:52:24.194970Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 72057594046678944, LocalPathId: 10] AllocationStart# 2 AllocationCount# 1 AllocationIncrement# 1 2024-11-21T08:52:24.205736Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxAllocateSequence.Complete >> ForceDropWithReboots::ForceDeleteCreateTableInFlyWithRebootAtCommit |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2024-11-21T08:52:12.853974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:12.853993Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:12.854006Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:12.856174Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:12.856309Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:12.856362Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:12.857038Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:12.863737Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:12.863862Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:12.863961Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:12.863970Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:12.863974Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:12.863997Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:12.866378Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:12.866426Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:12.866456Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:12.866460Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:12.866463Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:12.866467Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:12.866529Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:12.866544Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:12.866562Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:12.866579Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:12.866607Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:12.866611Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:12.866614Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:12.866618Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:12.866620Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:12.866623Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:12.866626Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:12.872834Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:12.872850Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:12.872857Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:12.873193Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:12.873202Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:12.873215Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:12.873235Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:12.873244Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:12.873251Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:12.873257Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:12.873260Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:12.873263Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:12.873265Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:12.873307Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:12.873309Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:12.873311Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:12.873313Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:12.873320Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:12.873322Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:12.873324Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:12.873326Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:12.873329Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:12.894288Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:12.894311Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:12.894316Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:12.894325Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:12.894339Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:12.894475Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:12.894485Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:12.894492Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:12.894511Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:12.894517Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:12.894564Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:12.894575Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:12.894579Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:12.894584Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:12.895107Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:12.895115Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:12.895158Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:12.895162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:12.895166Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:12.895171Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:12.895174Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:12.895179Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:52:12.895182Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:12.895186Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:12.895188Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:12.895191Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:12.895193Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:12.895224Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:52:12.895227Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:12.895229Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:12.895231Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:12.895233Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:12.895240Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:12.895242Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:12.895244Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:12.895247Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:12.895255Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:52:12.895257Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:52:12.895259Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:52:12.895264Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:12.895267Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:12.895271Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... llReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2024-11-21T08:52:28.325098Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:28.325118Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:28.325207Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2024-11-21T08:52:28.325220Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2024-11-21T08:52:28.325223Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2024-11-21T08:52:28.325226Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:28.325229Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:28.325237Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:28.325246Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2024-11-21T08:52:28.325249Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2024-11-21T08:52:28.325251Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:28.325254Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:52:28.325256Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:28.325262Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:28.325331Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:28.325337Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:28.325347Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:28.325349Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:28.325351Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:28.325353Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2024-11-21T08:52:28.325358Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2024-11-21T08:52:28.325360Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:28.325363Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:28.325366Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2024-11-21T08:52:28.325372Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2024-11-21T08:52:28.325374Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:28.325376Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2024-11-21T08:52:28.337180Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:28.337215Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2024-11-21T08:52:28.337227Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T08:52:28.337265Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2024-11-21T08:52:28.338679Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T08:52:28.338694Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T08:52:28.338858Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4518:6448], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:28.338864Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:28.338872Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4517:6447], serverId# [3:4518:6448], sessionId# [0:0:0] 2024-11-21T08:52:28.338946Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2024-11-21T08:52:28.338951Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:28.338979Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:28.339115Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2024-11-21T08:52:28.339133Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2024-11-21T08:52:28.339138Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2024-11-21T08:52:28.339143Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:28.339151Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:28.339162Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:28.339175Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2024-11-21T08:52:28.339180Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2024-11-21T08:52:28.339186Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:28.339191Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:52:28.339194Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:28.339202Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:28.339292Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:28.339303Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:28.339315Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:28.339319Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:28.339323Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:28.339327Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2024-11-21T08:52:28.339335Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2024-11-21T08:52:28.339338Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:28.339342Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:28.339346Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2024-11-21T08:52:28.339353Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2024-11-21T08:52:28.339357Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:28.339361Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2024-11-21T08:52:28.345805Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T08:52:28.345828Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2024-11-21T08:52:28.346209Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:28.346223Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2024-11-21T08:52:28.346233Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2024-11-21T08:52:28.346260Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:28.346815Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:224:2220], Recipient [3:226:2221]: NKikimr::TEvTablet::TEvFollowerGcApplied .2024-11-21T08:52:28.347550Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4532:6461], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:28.347562Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:28.347570Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4531:6460], serverId# [3:4532:6461], sessionId# [0:0:0] 2024-11-21T08:52:28.347651Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4530:6459], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1713 LastUpdateTime: 1713 } >> ForceDropWithReboots::ForceDeleteCreateSubdomainInfly >> SubDomainWithReboots::Fake [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[TabletReboots] [GOOD] >> SubDomainWithReboots::Create |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithInitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:38.567480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:38.567498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:38.567501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:38.567505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:38.567517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:38.567519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:38.567526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:38.567601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:38.578243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:38.578271Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:38.581647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:38.581770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:38.581805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:38.584078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:38.584129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:38.584255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:38.584429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:38.584900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:38.585132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:38.585140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:38.585151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:38.585156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:38.585161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:38.585205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:38.586573Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:38.605726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:38.605836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.605901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:38.605950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:38.605959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.606718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:38.606739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:38.606782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.606791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:38.606796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:38.606801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:38.607146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.607156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:38.607161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:38.607452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.607460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.607465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:38.607472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:38.608126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:38.608551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:38.608596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:38.608788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:38.608814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:38.608821Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:38.608876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:38.608884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:38.608916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:38.608927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:38.609290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:38.609299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:38.609334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:38.609340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:38.609426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:38.609433Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:38.609444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:38.609449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:38.609454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:38.609459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:38.609463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:38.609468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:38.609479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:38.609485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:38.609489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... nges ack message, operation: 1003:3, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:28.654601Z node 150 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:3 129 -> 240 2024-11-21T08:52:28.654760Z node 150 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.654778Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.654782Z node 150 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:28.654787Z node 150 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T08:52:28.654793Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:52:28.654847Z node 150 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.654855Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.654859Z node 150 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:28.654862Z node 150 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:52:28.654870Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:28.655011Z node 150 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.655027Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.655032Z node 150 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:28.655036Z node 150 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T08:52:28.655042Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:52:28.655666Z node 150 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.655686Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.655690Z node 150 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:28.655694Z node 150 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T08:52:28.655698Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 6 2024-11-21T08:52:28.655940Z node 150 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.655959Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.655964Z node 150 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:28.655969Z node 150 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T08:52:28.655974Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:52:28.655988Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/5, is published: true 2024-11-21T08:52:28.657566Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:52:28.657594Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:52:28.657659Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.657684Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:52:28.657695Z node 150 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:3 ProgressState 2024-11-21T08:52:28.657712Z node 150 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:3 progress is 5/5 2024-11-21T08:52:28.657716Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2024-11-21T08:52:28.657721Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 5/5, is published: true 2024-11-21T08:52:28.657734Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [150:460:2425] message: TxId: 1003 2024-11-21T08:52:28.657739Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2024-11-21T08:52:28.657745Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:28.657751Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:28.657763Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:52:28.657768Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:52:28.657771Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:52:28.657776Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:52:28.657779Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:52:28.657781Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:52:28.657785Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:52:28.657788Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:52:28.657790Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:52:28.657804Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:52:28.657807Z node 150 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:4 2024-11-21T08:52:28.657811Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:4 2024-11-21T08:52:28.657820Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:52:28.657984Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.658002Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.658365Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.658385Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:28.658882Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:28.658893Z node 150 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [150:657:2579] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:28.658997Z node 150 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:28.659075Z node 150 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 86us result status StatusSuccess 2024-11-21T08:52:28.659205Z node 150 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateScan SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::Fake [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableWithVirtualTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:41.570795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:41.570810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.570813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:41.570816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:41.570826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:41.570828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:41.570834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.570881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:41.578216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:41.578232Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.580242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:41.580362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:41.580390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:41.583193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:41.583270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:41.583399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.583589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.584287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.584526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.584537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.584547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:41.584553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.584558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:41.584590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:41.585840Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.603343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:41.603412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.603462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:41.603507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:41.603515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.606345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.606376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:41.606416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.606426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:41.606430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:41.606434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:41.607009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.607024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:41.607030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:41.607380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.607390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.607394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.607400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.607986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:41.608383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:41.608429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:41.608589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.608615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:41.608622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.608680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:41.608687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.608715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:41.608727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.609099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.609110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.609138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.609142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:41.609202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.609208Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:41.609217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:41.609221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.609225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:41.609230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.609234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:41.609237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:41.609246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:41.609251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:41.609254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... tablet: 72075186233409546, partId: 2 2024-11-21T08:52:29.975566Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 635655162132 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:29.975569Z node 148 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:52:29.975574Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 339 RawX2: 635655162132 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:29.975580Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:29.975583Z node 148 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:29.975585Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:29.975589Z node 148 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T08:52:29.975637Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.975643Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.975645Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:29.975648Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:52:29.975650Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:29.975698Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.975704Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.975706Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:29.975709Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T08:52:29.975712Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:52:29.975763Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.975771Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.975775Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:29.975778Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T08:52:29.975782Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:52:29.976184Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.976195Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.976198Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:29.976201Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T08:52:29.976219Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:52:29.976232Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T08:52:29.976885Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:29.977544Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.977571Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:29.977638Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:29.977645Z node 148 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T08:52:29.977656Z node 148 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T08:52:29.977659Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:52:29.977664Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T08:52:29.977676Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [148:465:2430] message: TxId: 1003 2024-11-21T08:52:29.977681Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:52:29.977686Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:29.977690Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:29.977701Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:52:29.977705Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:52:29.977708Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:52:29.977712Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:52:29.977715Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:52:29.977718Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:52:29.977731Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:52:29.977734Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:52:29.977737Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:52:29.977745Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:52:29.977812Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.977849Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.977884Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.978195Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:29.978576Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:29.978587Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [148:665:2587] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:29.978683Z node 148 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:29.978732Z node 148 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 56us result status StatusSuccess 2024-11-21T08:52:29.978837Z node 148 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[TabletReboots] [GOOD] >> SubDomainWithReboots::Delete [GOOD] >> Yq_1::Basic_Null >> Yq_1::CreateConnection_With_Existing_Name >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> ForceDropWithReboots::Fake [GOOD] >> Yq_1::ListConnections >> PrivateApi::PingTask |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] >> Yq_1::DeleteConnections |87.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |87.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::Fake [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |87.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |87.8%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::Delete [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:15.698490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:15.698518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:15.698522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:15.698528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:15.698544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:15.698547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:15.698556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:15.698626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:15.711183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:15.711210Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:15.716266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:15.716396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:15.716430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:15.734683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:15.734837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:15.734970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:15.738591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:15.744683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:15.745083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:15.745098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:15.745122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:15.745131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:15.745137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:15.745190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:15.748342Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:15.772435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:15.772523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.772610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:15.772685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:15.772694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.775806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:15.775854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:15.775946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.775960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:15.775965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:15.775972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:15.776695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.776714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:15.776721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:15.777121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.777133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.777140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:15.777150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:15.777877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:15.778312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:15.778376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:15.778586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:15.778614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:15.778622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:15.778681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:15.778689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:15.778724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:15.778737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:15.779148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:15.779162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:15.779216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:15.779223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:15.779334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.779341Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:15.779356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:15.779362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:15.779369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:15.779376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:15.779382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:15.779386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:15.779401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:15.779408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:15.779412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [63:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 2 2024-11-21T08:52:31.225298Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [63:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:52:31.225321Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:31.225325Z node 63 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T08:52:31.225331Z node 63 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:52:31.225333Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:31.225337Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:52:31.225339Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:31.225342Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:31.225345Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:31.225363Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:31.225367Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T08:52:31.225369Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T08:52:31.225371Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:52:31.225485Z node 63 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.225493Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.225498Z node 63 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:31.225500Z node 63 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T08:52:31.225503Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:52:31.225722Z node 63 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.225731Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.225734Z node 63 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:31.225736Z node 63 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:52:31.225739Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:31.225746Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:52:31.225749Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [63:424:2391] 2024-11-21T08:52:31.226121Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:31.226137Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:31.226304Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.226348Z node 63 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:52:31.226637Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:31.226693Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:31.226801Z node 63 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T08:52:31.227175Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:31.227218Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:31.227333Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:31.227338Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:31.227356Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:31.227453Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:31.227459Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:31.227470Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:31.227584Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.227643Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:31.227649Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [63:429:2396] 2024-11-21T08:52:31.227896Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:52:31.227905Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:52:31.228345Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:31.228358Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:52:31.228386Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:31.228393Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:52:31.228438Z node 63 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:52:31.228446Z node 63 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T08:52:31.228491Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:31.228516Z node 63 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 33us result status StatusPathDoesNotExist 2024-11-21T08:52:31.228537Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:31.228565Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:31.228579Z node 63 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 14us result status StatusSuccess 2024-11-21T08:52:31.228612Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:41.623354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:41.623372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.623375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:41.623378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:41.623389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:41.623391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:41.623397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:41.623454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:41.633870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:41.633888Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.636118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:41.636232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:41.636260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:41.639116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:41.639187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:41.639311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.639532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.640153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.640370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.640377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.640385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:41.640389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.640394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:41.640426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:41.641721Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:41.659900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:41.659967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.660019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:41.660064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:41.660073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.660758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.660781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:41.660814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.660822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:41.660826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:41.660831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:41.661233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.661242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:41.661247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:41.661632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.661642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.661647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.661653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.662307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:41.662697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:41.662735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:41.662901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:41.662923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:41.662929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.662985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:41.662992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:41.663018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:41.663030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:41.663422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:41.663430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:41.663455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:41.663459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:41.663518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:41.663524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:41.663534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:41.663538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.663544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:41.663549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:41.663553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:41.663558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:41.663567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:41.663573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:41.663577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 163921Z node 159 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.163934Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.163943Z node 159 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:31.163949Z node 159 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:52:31.163956Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:52:31.164192Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 271 } } 2024-11-21T08:52:31.164226Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:52:31.164248Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 271 } } 2024-11-21T08:52:31.164259Z node 159 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 271 } } 2024-11-21T08:52:31.164422Z node 159 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.164433Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.164437Z node 159 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:31.164441Z node 159 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:52:31.164446Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:31.164563Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 682899802379 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:31.164571Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T08:52:31.164583Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 682899802379 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:31.164589Z node 159 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:52:31.164596Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 682899802379 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:31.164609Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:31.164613Z node 159 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:52:31.164617Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:31.164623Z node 159 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T08:52:31.164751Z node 159 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.164763Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.164766Z node 159 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:31.164770Z node 159 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:52:31.164774Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T08:52:31.164785Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:52:31.166822Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.166938Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:52:31.167373Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.167406Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:52:31.167493Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:31.167541Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T08:52:31.167548Z node 159 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T08:52:31.167565Z node 159 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T08:52:31.167569Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:52:31.167576Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:52:31.167593Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [159:399:2374] message: TxId: 1003 2024-11-21T08:52:31.167599Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:52:31.167606Z node 159 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:31.167611Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:31.167626Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:52:31.167631Z node 159 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:52:31.167634Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:52:31.167650Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:31.167655Z node 159 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:52:31.167658Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:52:31.167667Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:52:31.168946Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:31.168964Z node 159 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [159:598:2530] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:31.169965Z node 159 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:31.170040Z node 159 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 90us result status StatusSuccess 2024-11-21T08:52:31.170145Z node 159 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b (1, aaa) | 1 3 88b (1, b) | 2 6 86b (2, NULL) | 3 9 86b (2, ccx) | 3 11 86b (2, cxz) + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > (1, b) | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > (2, NULL) | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > (2, ccx) | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b (1, aaa) | 1 3 88b (1, baaaa) | 2 6 86b (2, aaa) | 3 9 86b (2, ccx) | 3 11 86b (2, cxz) + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > (1, baaaa) | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > (2, aaa) | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > (2, ccx) | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, ab) | 2 2 42b (1, ac) | 3 3 42b (1, b) | 4 4 42b (1, bb) | 5 5 42b (2, NULL) | 6 6 42b (2, ab) | 7 7 42b (2, ac) | 8 8 42b (2, b) | 9 9 42b (2, bb) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, ab) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, ac) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, b) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bb) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, NULL) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, ab) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, ac) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, b) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bb) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, aba) | 2 2 42b (1, aca) | 3 3 42b (1, baa) | 4 4 42b (1, bba) | 5 5 42b (2, aaa) | 6 6 42b (2, aba) | 7 7 42b (2, aca) | 8 8 42b (2, baa) | 9 9 42b (2, bba) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, aba) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, aca) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, baa) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bba) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, aaa) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, aba) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, aca) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, baa) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bba) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0,1), [1,2), [2,4), [4,5), [5,7), [7,9), [9,9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, ab) | 2 2 42b (1, ac) | 3 3 42b (1, b) | 4 4 42b (1, bb) | 5 5 42b (2, NULL) | 6 6 42b (2, ab) | 7 7 42b (2, ac) | 8 8 42b (2, b) | 9 9 42b (2, bb) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, ab) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, ac) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, b) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bb) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, NULL) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, ab) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, ac) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, b) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bb) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, aba) | 2 2 42b (1, aca) | 3 3 42b (1, baa) | 4 4 42b (1, bba) | 5 5 42b (2, aaa) | 6 6 42b (2, aba) | 7 7 42b (2, aca) | 8 8 42b (2, baa) | 9 9 42b (2, bba) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, aba) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, aca) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, baa) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bba) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, aaa) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, aba) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, aca) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, baa) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bba) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 41b (ccccccd) | 1 1 41b (ccccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccccd) | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 43b (ccccccd) | 1 1 43b (ccccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccccd) | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 40b (cccccd) | 1 1 40b (cccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccccd) | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 42b (cccccd) | 1 1 42b (cccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccccd) | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 39b (ccccd) | 1 1 39b (ccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccd) | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 41b (ccccd) | 1 1 41b (ccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccd) | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 38b (cccd) | 1 1 38b (cccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccd) | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 40b (cccd) | 1 1 40b (cccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccd) | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 35b (d) | 1 1 35b (d) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 37b (d) | 1 1 37b (ddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b () | 1 1 35b (d) | 1 1 35b (d) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b () | 1 1 37b (d) | 1 1 37b (ddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | ... et 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,4), [6,8), [8,12), [14,16), [16,18), [20,28), [32,34), [34,38), [38,39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b (0, 1) | 2 2 66b (0, 4) | 4 4 82b (0, 7) | 8 6 66b (0, 10) | 11 8 66b (1, 3) | 14 10 82b (1, 6) | 20 12 66b (1, 8) | 23 14 66b (2, NULL) | 26 16 82b (2, 4) | 36 18 66b (2, 7) | 39 20 66b (2, 10) | 42 22 82b (3, 3) | 48 24 66b (3, 6) | 53 26 66b (3, 8) | 58 28 82b (4, NULL) | 64 30 66b (4, 4) | 67 32 66b (4, 7) | 70 34 82b (4, 10) | 82 36 66b (5, 3) | 87 38 66b (5, 6) | 87 39 66b (5, 7) + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,4), [6,8), [8,12), [14,16), [16,18), [20,28), [32,34), [34,38), [38,39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b (0, 1) | 2 2 66b (0, 4) | 4 4 82b (0, 7) | 8 6 66b (0, 10) | 11 8 66b (1, 3) | 14 10 82b (1, 6) | 20 12 66b (1, 8) | 23 14 66b (2, NULL) | 26 16 82b (2, 4) | 36 18 66b (2, 7) | 39 20 66b (2, 10) | 42 22 82b (3, 3) | 48 24 66b (3, 6) | 53 26 66b (3, 8) | 58 28 82b (4, NULL) | 64 30 66b (4, 4) | 67 32 66b (4, 7) | 70 34 82b (4, 10) | 82 36 66b (5, 3) | 87 38 66b (5, 6) | 87 39 66b (5, 7) + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> Yq_1::DescribeJob >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] Test command err: 2024-11-21T08:52:13.043779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:13.043863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:13.043872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6081, node 1 TClient is connected to server localhost:5223 json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"},{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2024-11-21T08:52:14.005781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:14.005829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:14.005841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 22074, node 2 TClient is connected to server localhost:18531 json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2024-11-21T08:52:18.878285Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:2763:2391], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.878789Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:864:2138], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.878856Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.879040Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.879065Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.879285Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.879455Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1487:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.879731Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:2760:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.879792Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.880004Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.880124Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.880181Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2769:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.880401Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.880783Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2767:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.880858Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:1489:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.880885Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:1491:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.880961Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.881110Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.881126Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.881135Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.881146Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.881238Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.881249Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.881256Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.882105Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2765:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.882350Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.882603Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:18.976226Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:19.079256Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:19.084115Z node 3 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:19.139288Z node 3 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 32676, node 3 TClient is connected to server localhost:5991 2024-11-21T08:52:19.176354Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:19.176376Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:19.176380Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:19.176522Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:25.356573Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:2728:2387], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.356905Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:25.357226Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:25.357594Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:2730:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.357645Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:2734:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.357850Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:2732:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.358033Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:25.358071Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:2736:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.358100Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1465:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .met ... kupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.358530Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:25.358546Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:25.358562Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:25.358771Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:25.358792Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:25.358803Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:25.358854Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:25.359420Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [20:1469:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.359646Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:25.359825Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:2725:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:25.359862Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:25.360135Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:25.360266Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:25.444597Z node 12 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:25.543808Z node 12 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:25.547252Z node 12 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:25.592844Z node 12 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 27182, node 12 TClient is connected to server localhost:15137 2024-11-21T08:52:25.621139Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:25.621166Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:25.621171Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:25.621338Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:31.699672Z node 21 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [21:2752:2388], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.699850Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.699994Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:31.700531Z node 25 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [25:1900:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.700613Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [22:2749:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.700655Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [23:2754:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.700856Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.700875Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [26:1902:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.701199Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:1908:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.701319Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701336Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701362Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701371Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701429Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:1906:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.701544Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701556Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701571Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701617Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701695Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701709Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:31.701845Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:31.702078Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [27:1904:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.702095Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.702101Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:31.702226Z node 24 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [24:2756:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:31.702427Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:31.702539Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:31.783262Z node 21 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:31.884234Z node 21 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:31.888059Z node 21 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:31.935182Z node 21 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 22865, node 21 TClient is connected to server localhost:8955 2024-11-21T08:52:31.964694Z node 21 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:31.964716Z node 21 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:31.964721Z node 21 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:31.964792Z node 21 :NET_CLASSIFIER ERROR: got bad distributable configuration >> Yq_1::DescribeConnection |87.8%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |87.8%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |87.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> VDiskBalancing::TestRandom_Block42 [GOOD] >> TTxDataShardMiniKQL::ReadSpecialColumns >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> KqpTx::CommitRoTx_TLI ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 6067350500867321037 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2024-11-21T08:51:58.407003Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2024-11-21T08:51:58.440062Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2024-11-21T08:51:58.677183Z 3 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:284:55] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2024-11-21T08:51:58.677235Z 6 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:287:58] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2024-11-21T08:51:58.677261Z 5 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7635:15] ServerId# [1:7644:1087] TabletId# 72057594037932033 PipeClientId# [5:7635:15] 2024-11-21T08:51:58.677279Z 4 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:285:56] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2024-11-21T08:51:58.677303Z 2 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:283:54] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2024-11-21T08:51:58.677323Z 7 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:288:59] TabletId# 72057594037932033 PipeClientId# [7:209:16] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Status ... 6 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Stop node 3 2024-11-21T08:52:25.906528Z 1 00h28m30.809429s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 4 2024-11-21T08:52:25.961669Z 1 00h28m40.809710s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Stop node 7 2024-11-21T08:52:26.234282Z 1 00h29m10.813584s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Stop node 1 2024-11-21T08:52:26.307014Z 1 00h29m20.814096s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Stop node 1 2024-11-21T08:52:26.381185Z 1 00h29m40.815632s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Starting nodes Start compaction 1 Start checking >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> IndexBuildTestReboots::DropIndexWithDataColumns [GOOD] >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> DataShardTxOrder::RandomPoints_DelayData >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> KqpTx::CommitRequired >> SubDomainWithReboots::CreateTabletInsideWithStoragePools [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> KqpTx::CommitRoTx_TLI [GOOD] >> KqpTx::CommitStats >> ScrubFast::SingleBlob [GOOD] >> SnapshotTesting::Compaction >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::DropIndexWithDataColumns [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:07.828058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:07.828102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:07.828108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:07.828113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:07.828129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:07.828133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:07.828144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:07.828259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:07.839765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:07.839797Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:07.842601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:07.842725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:07.842758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:07.845356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:07.845448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:07.845544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:07.845700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:07.846334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:07.846565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:07.846576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:07.846586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:07.846592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:07.846597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:07.846631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:07.847802Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:07.872914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:07.873000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.873054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:07.873097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:07.873106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.873770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:07.873797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:07.873839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.873848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:07.873853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:07.873858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:07.874232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.874243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:07.874248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:07.874548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.874558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.874563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:07.874570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:07.875193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:07.875563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:07.875611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:07.875813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:07.875840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:07.875847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:07.875906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:07.875913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:07.875937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:07.875950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:07.876349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:07.876360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:07.876393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:07.876398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:07.876464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:07.876471Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:07.876481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:07.876485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:07.876508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:07.876514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:07.876519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:07.876523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:07.876533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:07.876539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:07.876543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 65062Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:52:35.065080Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:52:35.065229Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:35.065238Z node 107 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:35.065306Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:52:35.065329Z node 107 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 3/3 2024-11-21T08:52:35.065332Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:52:35.065337Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:52:35.065341Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:52:35.065347Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:35.065351Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:35.065371Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:35.065375Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:52:35.065378Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:52:35.065383Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:52:35.065386Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:52:35.065389Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:52:35.065411Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:52:35.065937Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.065956Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.065961Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.066786Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.067182Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 338 RawX2: 459561502995 } TabletId: 72075186233409546 State: 4 2024-11-21T08:52:35.067200Z node 107 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:52:35.067603Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:35.067684Z node 107 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:52:35.068263Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:35.068323Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:52:35.068410Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:35.068418Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:52:35.068430Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:52:35.068435Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:52:35.068440Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:35.069126Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:35.069140Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2024-11-21T08:52:35.069233Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:52:35.069282Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:52:35.069288Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:52:35.069332Z node 107 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:52:35.069345Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:35.069347Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [107:563:2528] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:35.069428Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:35.069465Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 50us result status StatusSuccess 2024-11-21T08:52:35.069566Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:35.069628Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:35.069649Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0" took 24us result status StatusPathDoesNotExist 2024-11-21T08:52:35.069669Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/UserDefinedIndexByValue0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:35.069713Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:35.069727Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" took 28us result status StatusPathDoesNotExist 2024-11-21T08:52:35.069744Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::CreateTabletInsideWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:17.110776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:17.110801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:17.110806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:17.110810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:17.110823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:17.110827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:17.110837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:17.110912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:17.119600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:17.119619Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:17.121560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:17.121647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:17.121669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:17.124088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:17.124172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:17.124319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:17.124500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:17.125200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:17.125482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:17.125494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:17.125514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:17.125521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:17.125527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:17.125561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:17.126986Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:17.139328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:17.139406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:17.139463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:17.139515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:17.139521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:17.140176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:17.140197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:17.140261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:17.140270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:17.140273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:17.140276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:17.140736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:17.140748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:17.140752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:17.141063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:17.141069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:17.141073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:17.141078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:17.141602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:17.142069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:17.142135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:17.142340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:17.142371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:17.142379Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:17.142436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:17.142444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:17.142473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:17.142485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:17.142931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:17.142942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:17.142979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:17.142982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:17.143079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:17.143085Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:17.143094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:17.143097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:17.143101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:17.143104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:17.143108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:17.143111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:17.143123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:17.143129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:17.143132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:52:35.463969Z node 73 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.463991Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.463996Z node 73 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:35.464002Z node 73 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:52:35.464010Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:35.464170Z node 73 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.464180Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.464184Z node 73 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:35.464187Z node 73 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T08:52:35.464191Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:52:35.464219Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T08:52:35.464624Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 1003 Step: 150 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 341 } } 2024-11-21T08:52:35.464640Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409548, partId: 0 2024-11-21T08:52:35.464662Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 1003 Step: 150 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 341 } } 2024-11-21T08:52:35.464676Z node 73 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 1003 Step: 150 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 341 } } 2024-11-21T08:52:35.464797Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 453 RawX2: 313532615027 } Origin: 72075186233409548 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:35.464803Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409548, partId: 0 2024-11-21T08:52:35.464816Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 453 RawX2: 313532615027 } Origin: 72075186233409548 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:35.464822Z node 73 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:52:35.464829Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 453 RawX2: 313532615027 } Origin: 72075186233409548 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:35.464841Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:35.464845Z node 73 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:35.464849Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:52:35.464857Z node 73 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T08:52:35.465614Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.465690Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:35.467031Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:35.467090Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:35.467186Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:35.467197Z node 73 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:52:35.467216Z node 73 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:52:35.467220Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:35.467226Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T08:52:35.467242Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [73:423:2392] message: TxId: 1003 2024-11-21T08:52:35.467249Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:35.467255Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:35.467259Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:35.467286Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:52:35.467850Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:35.467878Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [73:496:2449] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:35.468026Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:35.468074Z node 73 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 58us result status StatusSuccess 2024-11-21T08:52:35.468190Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:35.468287Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:35.468342Z node 73 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 59us result status StatusSuccess 2024-11-21T08:52:35.468399Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 150 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "storage-pool-number-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2024-11-21T08:52:31.894566Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652807025952331:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:31.895320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:31.949999198 456661 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:31.950048959 456661 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T08:52:31.954041Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9196: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:9196 } ] 2024-11-21T08:52:31.954091Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9196: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:9196 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ba3/r3tmp/tmp14ytOG/pdisk_1.dat 2024-11-21T08:52:32.229811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:32.229945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652811320920017:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 9196, node 1 TClient is connected to server localhost:5154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:32.332951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:32.332966Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:32.333183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:32.333193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:32.333194Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:32.333244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:32.577661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.578529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.578550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.579163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:32.579215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:32.579223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:32.579685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.579702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:32.579756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:32.580133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.581406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179152627, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.581422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:32.581489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:32.582020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.582080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.582095Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:32.582107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:32.582119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:32.582138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:32.582604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:32.582627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:32.582632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:32.582647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:32.615816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:32.615841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:32.617288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:32.954873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.954938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.955636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:32.955697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.955753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.955774Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.955895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.955906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.955911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:32.955943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.955949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.955951Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:32.956626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179153005, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.956637Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179153005, at schemeshard: 72057594046644480 2024-11-21T08:52:32.956653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:52:32.957021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.957071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.957086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:52:32.957104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:52:32.957118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:52:32.957136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T08:52:32.957236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.957247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.957250Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:52:32.957271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7205759404 ... ardState{ TabletId: 72075186224037893, Last Key , Ranges: [#0: [(String : yandexcloud://WTF, String : ) ; (String : yandexcloud://WTF)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 } 2024-11-21T08:52:35.152277Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. effective maxinflight 1 sorted 1 2024-11-21T08:52:35.152278Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. BEFORE: 1.0 2024-11-21T08:52:35.152289Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. Send EvRead to shardId: 72075186224037893, tablePath: Root/yq/connections, ranges: [(String : yandexcloud://WTF, String : ) ; (String : yandexcloud://WTF)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2024-11-21T08:52:35.152299Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. AFTER: 0.1 2024-11-21T08:52:35.152304Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T08:52:35.152320Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.152326Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T08:52:35.152328Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T08:52:35.152475Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2024-11-21T08:52:35.152480Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. Taken 0 locks 2024-11-21T08:52:35.152481Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. new data for read #0 seqno = 1 finished = 1 2024-11-21T08:52:35.152484Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T08:52:35.152486Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.152488Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T08:52:35.152489Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:35.152491Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:35.152492Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. returned 0 rows; processed 0 rows 2024-11-21T08:52:35.152501Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. dropping batch for read #0 2024-11-21T08:52:35.152502Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. effective maxinflight 1 sorted 1 2024-11-21T08:52:35.152503Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T08:52:35.152504Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439652824596903341:2467]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T08:52:35.152514Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.152516Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:35.152518Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903343:2468], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2024-11-21T08:52:35.152519Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T08:52:35.152525Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Finish input channelId: 1, from: [4:7439652824596903341:2467] 2024-11-21T08:52:35.152536Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2024-11-21T08:52:35.152538Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903343:2468], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.152539Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.152540Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:35.152541Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1. Tasks execution finished 2024-11-21T08:52:35.152543Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903341:2467], TxId: 281474976715685, task: 1. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:35.152556Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903343:2468], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.152560Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1. pass away 2024-11-21T08:52:35.152563Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903343:2468], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:35.152567Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:35.152570Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T08:52:35.152584Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715685;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:35.152601Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903343:2468], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.152612Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903343:2468], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:35.152615Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:35.152616Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Tasks execution finished 2024-11-21T08:52:35.152618Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652824596903343:2468], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd6yrp5p2tvac4tk006ytzre. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NjVmZjk4YWUtNzVmMzMwZjYtYWZjYTg3YmItMjVjYjcxYTk=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:35.152630Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. pass away 2024-11-21T08:52:35.152644Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715685;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:35.320127Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: Client is stopped >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> KqpTx::CommitRequired [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> KqpTx::CommitRoTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2024-11-21T08:52:31.925857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652807363144292:2122];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:31.925920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:31.956040054 456952 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:31.956077532 456952 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003b73/r3tmp/tmpsX2dhM/pdisk_1.dat 2024-11-21T08:52:32.232276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652811658111887:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:32.232311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:32.252285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:32.252312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:32.255520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31272, node 1 TClient is connected to server localhost:21126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:32.357378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:32.357416Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:32.357671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:32.357681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:32.357683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:32.357741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:32.608952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.610053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.610072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.610750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:32.610807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:32.610817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:32.611177Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.611189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:32.611394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:32.611461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.612387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179152655, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.612414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:32.612476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:32.612982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.613047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.613068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:32.613080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:32.613089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:32.613102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:32.613612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:32.613630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:32.613635Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:32.613651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:32.958734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.958787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.959440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:32.959486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.959531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.959551Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.959683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.959693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.959696Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:32.959731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.959739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.959741Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:32.960304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179153005, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.960316Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179153005, at schemeshard: 72057594046644480 2024-11-21T08:52:32.960337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:52:32.960804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.960845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.960858Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:52:32.960884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:52:32.960895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:52:32.960907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T08:52:32.960990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.961007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.961014Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:52:32.961039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.961046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.961048Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:52:32.961053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 2024-11-21T08:52:32.961510Z node 1 :YQ_CONTROL_PLANE_STO ... n/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:35.473363Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. Send EvRead to shardId: 72075186224037899, tablePath: Root/yq/connections, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1732179155413), lockTxId = 281474976715699, lockNodeId = 4 2024-11-21T08:52:35.473374Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. AFTER: 0.1 2024-11-21T08:52:35.473375Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T08:52:35.473387Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.473393Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T08:52:35.473406Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T08:52:35.474008Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. Recv TEvReadResult from ShardID=72075186224037899, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976715699 DataShard: 72075186224037899 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 15, BrokenTxLocks= 2024-11-21T08:52:35.474017Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. Taken 1 locks 2024-11-21T08:52:35.474019Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. new data for read #0 seqno = 1 finished = 1 2024-11-21T08:52:35.474023Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T08:52:35.474027Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.474030Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T08:52:35.474033Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. enter pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:35.474041Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. exit pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 1 freeSpace: 8388557 2024-11-21T08:52:35.474044Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. returned 1 rows; processed 1 rows 2024-11-21T08:52:35.474057Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. dropping batch for read #0 2024-11-21T08:52:35.474064Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. effective maxinflight 1024 sorted 0 2024-11-21T08:52:35.474065Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T08:52:35.474067Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439652821932371304:2534]. returned async data processed rows 1 left freeSpace 8388557 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T08:52:35.474136Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.474143Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:35.474148Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T08:52:35.474151Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371305:2535], TxId: 281474976715704, task: 2. Ctx: { TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2024-11-21T08:52:35.474165Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Finish input channelId: 1, from: [4:7439652821932371304:2534] 2024-11-21T08:52:35.474180Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371305:2535], TxId: 281474976715704, task: 2. Ctx: { TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:35.474181Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2024-11-21T08:52:35.474185Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:35.474188Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:35.474189Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1. Tasks execution finished 2024-11-21T08:52:35.474191Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371304:2534], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:35.474221Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1. pass away 2024-11-21T08:52:35.474232Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371305:2535], TxId: 281474976715704, task: 2. Ctx: { TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:35.474239Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371305:2535], TxId: 281474976715704, task: 2. Ctx: { TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:35.474243Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:35.474244Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T08:52:35.474252Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715704;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:35.474278Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371305:2535], TxId: 281474976715704, task: 2. Ctx: { TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:35.474283Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371305:2535], TxId: 281474976715704, task: 2. Ctx: { TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:35.474285Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:35.474286Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Tasks execution finished 2024-11-21T08:52:35.474305Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652821932371305:2535], TxId: 281474976715704, task: 2. Ctx: { TraceId : 01jd6yrpfv7y8vs6fdb7b204ws. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:35.474323Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. pass away 2024-11-21T08:52:35.474346Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715704;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:35.474659Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715705. Ctx: { TraceId: 01jd6yrpfv7y8vs6fdb7b204ws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZTA5Mjg3ZTYtYjFkN2FhZDItNDZiMTViNTAtNTZlMmUxOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2024-11-21T08:52:31.898102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652807477033338:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:31.898153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:31.930443960 456716 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:31.930477383 456716 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T08:52:31.937893Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26484: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26484 2024-11-21T08:52:31.937931Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26484: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26484 } ] 2024-11-21T08:52:32.190506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:32.190629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652811772000806:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003b8f/r3tmp/tmpg8jAoV/pdisk_1.dat 2024-11-21T08:52:32.249425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652811772000806:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 26484, node 1 TClient is connected to server localhost:18742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:32.288229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:32.288250Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:32.288404Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:32.288413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:32.288415Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:32.288452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:32.587258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.588362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.588383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.588779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:32.588824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:32.588831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:32.589187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.589192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:32.589315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:32.589483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.590210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179152634, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.590220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:32.590262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:32.590571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.590610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.590620Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:32.590632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:32.590641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:32.590652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:32.591099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:32.591118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:32.591127Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:32.591142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:32.668595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:32.668621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:32.670451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:32.934056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.934123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.934962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:32.935031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.935104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.935129Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.935269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.935285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.935290Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:32.935338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.935341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.935343Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:32.936255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179152984, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.936271Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179152984, at schemeshard: 72057594046644480 2024-11-21T08:52:32.936311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:52:32.936789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.936832Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2024-11-21T08:52:32.936843Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T08:52:32.936845Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T08:52:32.936859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.936875Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:52:32.936891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:52:32.936904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:52:32.936920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T08:52:32.937161Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create ... ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515621Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515630Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515635Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515642Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515647Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515653Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515660Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515664Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515671Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515678Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515686Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515690Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515698Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515702Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515709Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515716Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515720Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515727Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515731Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515738Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515744Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515754Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515759Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515765Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515775Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515779Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515785Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515795Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515798Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515808Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515811Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515821Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515823Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515833Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515835Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515845Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515848Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515857Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515860Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515870Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515875Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515886Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515890Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515897Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515901Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515907Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515917Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515919Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515929Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515932Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515942Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515945Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515954Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515959Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515963Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515968Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515978Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515982Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515987Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515995Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.515999Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516008Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516013Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516018Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516026Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516031Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516036Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516044Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516048Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516053Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516060Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516065Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516070Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516076Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516084Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516088Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516097Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516100Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516110Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516112Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516123Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516126Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516137Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516139Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516150Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516152Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516162Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516165Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516175Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516178Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516188Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516191Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516200Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516485Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516497Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516522Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516524Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516532Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516541Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516551Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516558Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516566Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516578Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516587Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516594Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516598Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516606Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516610Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516617Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516627Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516631Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516640Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516643Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516654Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516656Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516665Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516668Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516679Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516681Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516692Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516696Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516702Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516707Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516711Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516721Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516724Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516734Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516736Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516746Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:35.516748Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> IndexBuildTestReboots::DropIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2024-11-21T08:52:34.983047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:34.983071Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:34.983089Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:34.992866Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:34.993116Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:34.993207Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:34.994368Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:35.003762Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:35.003950Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:35.004118Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:35.004132Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:35.004139Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:35.004179Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:35.007575Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:35.007646Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:35.007697Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:35.007703Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:35.007707Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:35.007712Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:35.007814Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.007832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.007859Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:35.007881Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:35.007932Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:35.007938Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:35.007945Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:35.007951Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:35.007954Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:35.007958Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:35.007963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:35.018895Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.018923Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.018934Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:35.019281Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:35.019289Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:35.019311Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:35.019337Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:35.019346Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:35.019353Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:35.019360Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:35.019363Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:35.019367Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:35.019369Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:35.019425Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:35.019427Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:35.019429Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:35.019431Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:35.019440Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:35.019442Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:35.019444Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:35.019446Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:35.019449Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:35.041024Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:35.041054Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:35.041061Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:35.041075Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:35.041093Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:35.041254Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.041273Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.041281Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:35.041302Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:35.041307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:35.041357Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:35.041366Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:35.041371Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:35.041376Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:35.042171Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:35.042186Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:35.042253Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.042258Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.042266Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:35.042273Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:35.042277Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:35.042284Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:52:35.042289Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:35.042296Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:35.042300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:35.042304Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:35.042308Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:35.042358Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:52:35.042363Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:35.042366Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:35.042369Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:35.042373Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:35.042383Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:35.042386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:35.042389Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:35.042398Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:35.042411Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:52:35.042415Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:52:35.042418Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:52:35.042423Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:35.042426Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:5 ... : TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:36.044513Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2024-11-21T08:52:36.044516Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:36.045018Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T08:52:36.045025Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T08:52:36.045055Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:290:2276], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:36.045057Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:36.045061Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:289:2275], serverId# [3:290:2276], sessionId# [0:0:0] 2024-11-21T08:52:36.045078Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2024-11-21T08:52:36.045663Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:36.045670Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:36.045716Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2024-11-21T08:52:36.045723Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2024-11-21T08:52:36.045726Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2024-11-21T08:52:36.045728Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:36.045730Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:36.045733Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:36.045738Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2024-11-21T08:52:36.045742Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2024-11-21T08:52:36.045744Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:36.045746Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:52:36.045748Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:36.045843Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:36.045849Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:36.045853Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2024-11-21T08:52:36.045855Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:36.045857Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:36.045859Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2024-11-21T08:52:36.045862Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:52:36.045868Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2024-11-21T08:52:36.045870Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:36.045871Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:36.045873Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2024-11-21T08:52:36.045877Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2024-11-21T08:52:36.045879Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:36.045881Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2024-11-21T08:52:36.045885Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:36.045888Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2024-11-21T08:52:36.045891Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpTx::CommitStats [GOOD] |87.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 17417, MsgBus: 1243 2024-11-21T08:52:35.061882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652824947302469:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:35.061902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dbf/r3tmp/tmpvx41fn/pdisk_1.dat 2024-11-21T08:52:35.114864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17417, node 1 2024-11-21T08:52:35.124176Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:35.124188Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:35.124190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:35.124237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1243 TClient is connected to server localhost:1243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:35.163427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:35.163451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:35.164580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:35.193122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.202074Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:35.218308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.246610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:35.265731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.277021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.371564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652824947304034:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.371594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.399184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.406261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.414246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.421608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.435788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.450131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.468830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652824947304539:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.468869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.468977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652824947304544:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.469978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:35.477833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652824947304546:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 5961, MsgBus: 14241 2024-11-21T08:52:35.926056Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652823581617938:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:35.926138Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dbf/r3tmp/tmp2amhPH/pdisk_1.dat 2024-11-21T08:52:35.936426Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5961, node 2 2024-11-21T08:52:35.946712Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:35.946726Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:35.946728Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:35.946766Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14241 TClient is connected to server localhost:14241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:36.027762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:36.027805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:36.028338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.028876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:52:36.030223Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.035222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.095623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:36.121171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.134394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.257721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652827876586779:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.257749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.262691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.275097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.282648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.289050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.296371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.303344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.312008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652827876587283:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.312033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.312076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652827876587288:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.312715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:36.316467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652827876587290:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::DropIndex [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:09.133010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:09.133034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:09.133039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:09.133043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:09.133058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:09.133063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:09.133071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:09.133148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:09.144535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:09.144562Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:09.146660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:09.146769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:09.146799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:09.149174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:09.149248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:09.149356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:09.149538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:09.150137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:09.150406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:09.150416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:09.150428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:09.150436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:09.150442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:09.150492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:09.151637Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:09.169440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:09.169529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.169595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:09.169638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:09.169645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.170607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:09.170650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:09.170722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.170739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:09.170749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:09.170758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:09.171275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.171289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:09.171294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:09.171717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.171728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.171734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:09.171742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:09.172360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:09.172739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:09.172786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:09.172915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:09.172936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:09.172942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:09.172984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:09.172989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:09.173012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:09.173020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:09.173404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:09.173415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:09.173456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:09.173459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:09.173521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.173526Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:09.173535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:09.173538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:09.173541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:09.173545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:09.173547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:09.173550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:09.173559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:09.173563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:09.173565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 41867Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:52:36.341880Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:52:36.341997Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:52:36.342002Z node 107 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:36.342047Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:52:36.342063Z node 107 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 3/3 2024-11-21T08:52:36.342065Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:52:36.342068Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:52:36.342070Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:52:36.342074Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:36.342076Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:36.342088Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:36.342091Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:52:36.342093Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:52:36.342095Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:52:36.342097Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:52:36.342099Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:52:36.342102Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:52:36.342510Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:36.342521Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:36.342524Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:36.343061Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:36.343432Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 338 RawX2: 459561502995 } TabletId: 72075186233409546 State: 4 2024-11-21T08:52:36.343449Z node 107 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:52:36.343715Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:36.343769Z node 107 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:52:36.344274Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:36.344321Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:52:36.344383Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:36.344386Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:52:36.344395Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:52:36.344400Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:52:36.344405Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:36.344837Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:36.344848Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2024-11-21T08:52:36.344931Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:52:36.344982Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:52:36.344987Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:52:36.345039Z node 107 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:52:36.345053Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:36.345058Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [107:563:2528] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:36.345119Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:36.345163Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 55us result status StatusSuccess 2024-11-21T08:52:36.345262Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:36.345315Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:36.345331Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0" took 17us result status StatusPathDoesNotExist 2024-11-21T08:52:36.345347Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/UserDefinedIndexByValue0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:36.345375Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:52:36.345383Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" took 10us result status StatusPathDoesNotExist 2024-11-21T08:52:36.345394Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |87.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |87.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> KqpTx::CommitRoTx [GOOD] >> KqpTx::BeginTransactionBadMode >> KqpTx::LocksAbortOnCommit >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 4950, MsgBus: 25558 2024-11-21T08:52:35.264629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652821707613680:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d67/r3tmp/tmpIZ3yG3/pdisk_1.dat 2024-11-21T08:52:35.287508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:35.307535Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4950, node 1 2024-11-21T08:52:35.323099Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:35.323111Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:35.323113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:35.323142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25558 TClient is connected to server localhost:25558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:35.385829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:35.385852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:35.386644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.386889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:52:35.391983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.406603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.425141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:35.436516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.608425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652821707615053:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.608489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.613065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.624233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.680305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.694468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.701562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.715662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.730924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652821707615567:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.730947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652821707615572:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.730948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.731533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:35.735237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652821707615574:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:35.962980Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715673; 2024-11-21T08:52:35.963863Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652821707616006:2453], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7439652821707615916:2453]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 2]`. ShardID=72075186224037888, Sink=[1:7439652821707616006:2453].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T08:52:35.965236Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652821707615999:2453], SessionActorId: [1:7439652821707615916:2453], Transaction locks invalidated. Table `/Root/TwoShard`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7439652821707615916:2453]. isRollback=0 2024-11-21T08:52:35.965300Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTZmNWQyOGMtMWVhZDkwZmItNjE3MWFhMDgtMjU2N2E4ZWM=, ActorId: [1:7439652821707615916:2453], ActorState: ExecuteState, TraceId: 01jd6yrpzk1y485ge1v8eahjmr, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7439652821707616000:2453] from: [1:7439652821707615999:2453] 2024-11-21T08:52:35.965366Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439652821707616000:2453] TxId: 281474976715673. Ctx: { TraceId: 01jd6yrpzk1y485ge1v8eahjmr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZmNWQyOGMtMWVhZDkwZmItNjE3MWFhMDgtMjU2N2E4ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/TwoShard`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T08:52:35.966363Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTZmNWQyOGMtMWVhZDkwZmItNjE3MWFhMDgtMjU2N2E4ZWM=, ActorId: [1:7439652821707615916:2453], ActorState: ExecuteState, TraceId: 01jd6yrpzk1y485ge1v8eahjmr, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6496, MsgBus: 61855 2024-11-21T08:52:36.133248Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652826509122783:2142];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d67/r3tmp/tmpnV1ga8/pdisk_1.dat 2024-11-21T08:52:36.142215Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:36.146749Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6496, node 2 2024-11-21T08:52:36.171172Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:36.171193Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:36.171195Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:36.171244Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61855 TClient is connected to server localhost:61855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:36.234804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:36.234848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:36.235878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:36.236545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.250922Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:36.260981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.279642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.299955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.313812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.472283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652826509124216:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.472323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.476575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.486394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.499646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.555479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.563973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.577201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.596816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652826509124722:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.596876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.596985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652826509124727:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.597819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:36.603678Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652826509124729:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:36.855371Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTIzNzQ4MmYtMzllNDZiNmUtMmQ1YmZiNzktNWQ4MmJhNDU=, ActorId: [2:7439652826509125023:2454], ActorState: ExecuteState, TraceId: 01jd6yrqvc5csxh49qw6tzkd8s, Create QueryResponse for error on request, msg: >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> KqpLocks::TwoPhaseTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 25240, MsgBus: 10225 2024-11-21T08:52:35.450619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652823325063894:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:35.450640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d47/r3tmp/tmpcTVzJT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25240, node 1 2024-11-21T08:52:35.521552Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:35.522552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:35.522561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:35.522563Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:35.522597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10225 2024-11-21T08:52:35.551930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:35.551957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:35.553024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:35.587476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.589766Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:52:35.599676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.616904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.636923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.693450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.801012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652823325065463:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.801095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.826679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.836067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.848667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.863128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.918478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.932889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.950711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652823325065978:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.950743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.950758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652823325065983:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.951461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:35.953798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652823325065985:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 63894, MsgBus: 7562 2024-11-21T08:52:36.355729Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652828569635070:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:36.355895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d47/r3tmp/tmpphwaZG/pdisk_1.dat 2024-11-21T08:52:36.370151Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63894, node 2 2024-11-21T08:52:36.376771Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:36.376786Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:36.376788Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:36.376831Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7562 TClient is connected to server localhost:7562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:36.456627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:36.456659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:36.457002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.457659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:36.466452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.484821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.505162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.515354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.699850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652828569636631:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.699892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.704553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.713185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.769475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.780951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.794358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.807911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.823203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652828569637136:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.823229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.823229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652828569637141:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.823894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:36.827124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652828569637143:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpSinkLocks::TInvalidate >> PrivateApi::Nodes [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] >> KqpSinkTx::OlapExplicitTcl >> KqpTx::BeginTransactionBadMode [GOOD] >> KqpTx::CommitPrepared >> SubDomainWithReboots::RootWithStoragePools [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2024-11-21T08:52:31.862046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652808257842337:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:31.862596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:31.890162400 456387 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:31.890202589 456387 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T08:52:31.892510Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14068: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14068 } ] 2024-11-21T08:52:31.893391Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14068: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:14068 2024-11-21T08:52:32.167208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:32.167394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652812552809800:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003b92/r3tmp/tmpozt8XZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14068, node 1 TClient is connected to server localhost:20292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:32.318647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:32.318659Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:32.318859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:32.318870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:32.318871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:32.318916Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:32.522251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.523339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.523363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.523914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:32.523973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:32.523982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:32.524397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.524408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:32.524665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:32.524819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.525962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179152571, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.525978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:32.526043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:32.526481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.526531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.526546Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:32.526562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:32.526575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:32.526592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:32.526981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:32.526997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:32.527001Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:32.527016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:32.584388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:32.584415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:32.586282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:32.895018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.895056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.895817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:32.895862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.895905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.895921Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.896028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.896043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.896049Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:32.896073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.896078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.896079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:32.896657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179152942, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.896668Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179152942, at schemeshard: 72057594046644480 2024-11-21T08:52:32.896684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:52:32.896995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.897031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.897042Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:52:32.897052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:52:32.897063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:52:32.897071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T08:52:32.897136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.897144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.897146Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:52:32.897162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7205 ... ing : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1732179157548), lockTxId = 281474976715680, lockNodeId = 7 2024-11-21T08:52:37.502500Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. AFTER: 0.1 2024-11-21T08:52:37.502507Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T08:52:37.502529Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:37.502537Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T08:52:37.502541Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T08:52:37.502710Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. Recv TEvReadResult from ShardID=72075186224037900, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= LockId: 281474976715680 DataShard: 72075186224037900 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 10, BrokenTxLocks= 2024-11-21T08:52:37.502721Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. Taken 1 locks 2024-11-21T08:52:37.502723Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. new data for read #0 seqno = 1 finished = 1 2024-11-21T08:52:37.502727Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2024-11-21T08:52:37.502733Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:37.502736Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T08:52:37.502739Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. enter pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:37.502741Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. exit pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:37.502743Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. returned 0 rows; processed 0 rows 2024-11-21T08:52:37.502755Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. dropping batch for read #0 2024-11-21T08:52:37.502774Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. effective maxinflight 1024 sorted 0 2024-11-21T08:52:37.502780Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T08:52:37.502783Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1, CA Id [7:7439652831979116564:2448]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T08:52:37.502803Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:37.502804Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116565:2449], TxId: 281474976715680, task: 2. Ctx: { SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2024-11-21T08:52:37.502807Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 2. Finish input channelId: 1, from: [7:7439652831979116564:2448] 2024-11-21T08:52:37.502810Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:37.502811Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116565:2449], TxId: 281474976715680, task: 2. Ctx: { SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:37.502816Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T08:52:37.502818Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2024-11-21T08:52:37.502820Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116565:2449], TxId: 281474976715680, task: 2. Ctx: { SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:37.502821Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116565:2449], TxId: 281474976715680, task: 2. Ctx: { SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:37.502824Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:37.502826Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T08:52:37.502831Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:37.502832Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116565:2449], TxId: 281474976715680, task: 2. Ctx: { SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:37.502832Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:37.502833Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116565:2449], TxId: 281474976715680, task: 2. Ctx: { SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:37.502834Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. Tasks execution finished 2024-11-21T08:52:37.502834Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:37.502835Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 2. Tasks execution finished 2024-11-21T08:52:37.502835Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116564:2448], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:37.502836Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439652831979116565:2449], TxId: 281474976715680, task: 2. Ctx: { SessionId : ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=. TraceId : 01jd6yrrc88zgahyb7vyp60bag. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:37.502846Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 2. pass away 2024-11-21T08:52:37.502854Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. pass away 2024-11-21T08:52:37.502856Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715680;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:37.502877Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715680;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:37.518645Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd6yrrg4142tg92kyg1ctd3m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWVlNWUwZTMtYzRhNDJiMjctMTM2ODI0ZDItYWNiN2Q1Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:37.518674Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd6yrrg06h1ye9mghwhvx1ax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDcwMmJhZmEtNTRjNDVhNDEtYWU4YTEyZWQtMzAwMzE0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpTx::ExplicitTcl >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::InvalidateOnError >> KqpSinkMvcc::SnapshotExpiration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::RootWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:28.801222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:28.801245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:28.801251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:28.801256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:28.801269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:28.801273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:28.801281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:28.801373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:28.812451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:28.812470Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:28.814698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:28.814802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:28.814830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:28.817229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:28.817306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:28.817437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.817630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:28.818246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.818483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:28.818492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.818511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:28.818518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:28.818524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:28.818559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:28.819691Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:28.838462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:28.838543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.838616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:28.838694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:28.838703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.839416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.839444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:28.839495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.839505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:28.839509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:28.839514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:28.839962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.839972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:28.839976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:28.840315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.840324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.840330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.840336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.840947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:28.841368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:28.841425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:28.841592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.841616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:28.841623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.841680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:28.841686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.841708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:28.841720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:28.842084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:28.842093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:28.842127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.842131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:28.842210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.842216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:28.842226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:28.842230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.842235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:28.842240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.842244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:28.842248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:28.842258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:28.842263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:28.842267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 38 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T08:52:38.152443Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } } } TxId: 1002 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:38.152478Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:38.152519Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:38.152559Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1002:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:38.152566Z node 38 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:38.152724Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T08:52:38.152734Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T08:52:38.152739Z node 38 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T08:52:38.153012Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1002, response: Status: StatusAccepted TxId: 1002 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:38.153033Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1002, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:38.153066Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:38.153077Z node 38 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:38.153081Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 ProgressState no shards to create, do next state 2024-11-21T08:52:38.153084Z node 38 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 2024-11-21T08:52:38.153457Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:38.153468Z node 38 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:38.153472Z node 38 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 3 -> 128 2024-11-21T08:52:38.153899Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:38.153911Z node 38 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:38.153917Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1002:0, at tablet 72057594046678944 2024-11-21T08:52:38.153923Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T08:52:38.153948Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:38.154293Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T08:52:38.154322Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 2024-11-21T08:52:38.154382Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:38.154401Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 163208759400 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:38.154407Z node 38 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T08:52:38.154456Z node 38 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 240 2024-11-21T08:52:38.154463Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T08:52:38.154486Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:38.154499Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T08:52:38.154929Z node 38 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:38.154936Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:38.154967Z node 38 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:38.154971Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [38:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T08:52:38.155022Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:38.155029Z node 38 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T08:52:38.155040Z node 38 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T08:52:38.155044Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:52:38.155049Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T08:52:38.155054Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:52:38.155058Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T08:52:38.155061Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T08:52:38.155071Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:52:38.155076Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 1, subscribers: 1 2024-11-21T08:52:38.155081Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2024-11-21T08:52:38.155151Z node 38 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:38.155161Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:38.155165Z node 38 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:52:38.155169Z node 38 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T08:52:38.155172Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:38.155184Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T08:52:38.155189Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [38:299:2291] 2024-11-21T08:52:38.155740Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:52:38.155763Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:52:38.155768Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [38:300:2292] TestWaitNotification: OK eventTxId 1002 2024-11-21T08:52:38.155856Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:38.155884Z node 38 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 35us result status StatusSuccess 2024-11-21T08:52:38.155972Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink |87.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |87.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut >> TCdcStreamTests::VirtualTimestamps ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 6199, MsgBus: 27622 2024-11-21T08:52:36.401107Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652828196888643:2127];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:36.401835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d1c/r3tmp/tmpVOBzxz/pdisk_1.dat 2024-11-21T08:52:36.484447Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6199, node 1 2024-11-21T08:52:36.502654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:36.502690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:36.503814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:36.506453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:36.506466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:36.506468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:36.506510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27622 TClient is connected to server localhost:27622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:36.560378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.572993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.595159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.616750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.677495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:36.735845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652828196890105:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.735893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.766119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.772860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.786656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.801173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.856482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.870508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:36.878869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652828196890620:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.878894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652828196890625:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.878899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:36.879532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:36.883566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652828196890627:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 21821, MsgBus: 25239 2024-11-21T08:52:37.481667Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652832318075902:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:37.481729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d1c/r3tmp/tmpt95OQV/pdisk_1.dat 2024-11-21T08:52:37.494761Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21821, node 2 2024-11-21T08:52:37.507657Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:37.507675Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:37.507677Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:37.507707Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25239 TClient is connected to server localhost:25239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:37.582469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:37.582502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:37.583174Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:37.583880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.589091Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:37.594977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.616154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.657430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.687971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.864838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652832318077439:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.864870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.872850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.889594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.944662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.952981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.962978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.977116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.993842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652832318077953:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.993873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.993940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652832318077958:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.994559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:37.996827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652832318077960:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpTx::CommitPrepared [GOOD] >> KqpTx::InvalidateOnError [GOOD] |87.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::InteractiveTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:47.675107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:47.675135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:47.675139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:47.675145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:47.675152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:47.675156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:47.675175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:47.675264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:47.686304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:47.686330Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:47.688935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:47.689042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:47.689073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:47.692096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:47.692200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:47.692322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.692531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:47.693287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:47.693597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:47.693608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:47.693621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:47.693629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:47.693636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:47.693682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:47.694992Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:47.710320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:47.710398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:47.710456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:47.710491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:47.710496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:47.711396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.711417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:47.711455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:47.711462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:47.711465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:47.711469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:47.712766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:47.712786Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:47.712794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:47.719862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:47.719906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:47.719916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:47.719931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:47.720878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:47.721912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:47.721997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:47.722275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:47.722320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:47.722331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:47.722430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:47.722445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:47.722493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:47.722511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:47.723164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:47.723177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:47.723239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:47.723245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:47.723345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:47.723354Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:47.723369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:47.723373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:47.723380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:47.723385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:47.723391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:47.723396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:47.723412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:47.723419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:47.723423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 6678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.931315Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.931318Z node 146 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T08:52:37.931321Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T08:52:37.931325Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:52:37.931335Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T08:52:37.931410Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:37.931416Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:52:37.931425Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:52:37.931471Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T08:52:37.931491Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:52:37.931510Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T08:52:37.931512Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T08:52:37.931516Z node 146 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T08:52:37.931554Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:37.931567Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 627065227369 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:37.931574Z node 146 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T08:52:37.931590Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T08:52:37.931596Z node 146 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T08:52:37.931598Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T08:52:37.931605Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:52:37.931610Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:52:37.931613Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T08:52:37.931618Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T08:52:37.931621Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T08:52:37.931623Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T08:52:37.931629Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:52:37.931633Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T08:52:37.931635Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T08:52:37.931638Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:52:37.932028Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:52:37.932038Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:52:37.932051Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.932422Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:52:37.932433Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:52:37.932451Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.932465Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T08:52:37.932511Z node 146 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:37.932515Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:37.932541Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:52:37.932558Z node 146 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:37.932563Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [146:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T08:52:37.932567Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [146:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 2024-11-21T08:52:37.932667Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.932673Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.932676Z node 146 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T08:52:37.932679Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:52:37.932681Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:52:37.932741Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.932747Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.932749Z node 146 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T08:52:37.932752Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:52:37.932754Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:52:37.932760Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T08:52:37.932763Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [146:122:2148] 2024-11-21T08:52:37.932789Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:37.932792Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:52:37.932798Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:52:37.933327Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.933409Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T08:52:37.933420Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T08:52:37.933443Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T08:52:37.933494Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T08:52:37.933774Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:52:37.933781Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:52:37.933832Z node 146 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:52:37.933843Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:37.933846Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [146:867:2801] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 18796, MsgBus: 22762 2024-11-21T08:52:37.348977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652830805079836:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:37.349001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d05/r3tmp/tmpG1aKd6/pdisk_1.dat 2024-11-21T08:52:37.398917Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18796, node 1 2024-11-21T08:52:37.409556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:37.409573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:37.409576Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:37.409619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22762 TClient is connected to server localhost:22762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:37.450165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:37.450196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:37.451202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:37.457160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.467599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.484476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.504303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.517687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.674902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652830805081393:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.674935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.714862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.727516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.741274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.755197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.770588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.785970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.811715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652830805081899:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.811748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.811864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652830805081904:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.812817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:37.815452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652830805081906:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 2892, MsgBus: 8951 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d05/r3tmp/tmpgmwvrp/pdisk_1.dat 2024-11-21T08:52:38.300317Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:38.309748Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2892, node 2 2024-11-21T08:52:38.320894Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:38.320914Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:38.320916Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:38.320967Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8951 TClient is connected to server localhost:8951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:38.383868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:38.383902Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:38.384318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.385098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:52:38.389519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.402670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.445491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.461463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.650495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652836568201813:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.650523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.655675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.667817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.677788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.693569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.751333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.765565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.781505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652836568202328:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.781530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.781691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652836568202333:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.782524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:38.790752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652836568202335:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> ForceDropWithReboots::ForceDropDeleteInFly [GOOD] >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> TCdcStreamTests::Basic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 18243, MsgBus: 29729 2024-11-21T08:52:37.465384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652830576309515:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:37.465660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cbd/r3tmp/tmpumuq20/pdisk_1.dat 2024-11-21T08:52:37.514720Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18243, node 1 2024-11-21T08:52:37.535824Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:37.535861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:37.535863Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:37.535911Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29729 2024-11-21T08:52:37.567069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:37.567098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:37.570250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:37.612764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.614835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:52:37.633679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:37.710081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.780533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.845555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.906316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652830576311052:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.906407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.912548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.920448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.935320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.949771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.956909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.969515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:37.980478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652830576311551:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.980507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.980512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652830576311556:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.981331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:37.990358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652830576311558:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:52:38.274542Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjExMmVhNjgtMTljY2M5Yi04NjdhZDJkYS00YjUwOTMyNA==, ActorId: [1:7439652834871279142:2454], ActorState: ExecuteState, TraceId: 01jd6yrs7g6eswtwdpz303j8rr, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T08:52:38.278588Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjExMmVhNjgtMTljY2M5Yi04NjdhZDJkYS00YjUwOTMyNA==, ActorId: [1:7439652834871279142:2454], ActorState: ReadyState, TraceId: 01jd6yrs86a5s0qns10qaqygym, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 28123, MsgBus: 23119 2024-11-21T08:52:38.589873Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652837898994872:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cbd/r3tmp/tmpKx5o1x/pdisk_1.dat 2024-11-21T08:52:38.596242Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 28123, node 2 2024-11-21T08:52:38.611194Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:38.612191Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:38.612202Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:38.612219Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:38.612257Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23119 TClient is connected to server localhost:23119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:38.688998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:38.689031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:38.690196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:38.692700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.700914Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:38.704970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.722794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.746742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.765427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.927197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652837898996261:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.927237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.931491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.949029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.959511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.971449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.986971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.003929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.017166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652842193964060:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.017202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.017331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652842193964065:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.018267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:39.026618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652842193964067:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:39.322102Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652842193964411:2466], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jd6yrt6m3hnzbnc7s1dy8697. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmY3MDVjODAtMTkwN2IyZWItMWZkZDk3MC04NTEyYjQ1Yw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T08:52:39.323610Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652842193964412:2467], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmY3MDVjODAtMTkwN2IyZWItMWZkZDk3MC04NTEyYjQ1Yw==. TraceId : 01jd6yrt6m3hnzbnc7s1dy8697. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439652842193964408:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T08:52:39.323861Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmY3MDVjODAtMTkwN2IyZWItMWZkZDk3MC04NTEyYjQ1Yw==, ActorId: [2:7439652842193964356:2454], ActorState: ExecuteState, TraceId: 01jd6yrt6m3hnzbnc7s1dy8697, Create QueryResponse for error on request, msg: 2024-11-21T08:52:39.337245Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmY3MDVjODAtMTkwN2IyZWItMWZkZDk3MC04NTEyYjQ1Yw==, ActorId: [2:7439652842193964356:2454], ActorState: ExecuteState, TraceId: 01jd6yrt8ybgx74h77a4nq2mv1, Create QueryResponse for error on request, msg: >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink >> Yq_1::Basic_TaggedLiteral [GOOD] >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDropDeleteInFly [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:16.673917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:16.673946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:16.673951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:16.673956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:16.673970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:16.673975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:16.673984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:16.674071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:16.686401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:16.686428Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:16.690528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:16.690689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:16.690725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:16.696324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:16.696441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:16.696575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:16.696816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:16.697557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:16.697815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:16.697822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:16.697838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:16.697843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:16.697847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:16.697883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:16.699049Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:16.718559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:16.718657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:16.718740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:16.718830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:16.718838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:16.721272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:16.721313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:16.721383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:16.721411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:16.721416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:16.721421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:16.721957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:16.721967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:16.721972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:16.722276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:16.722285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:16.722291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:16.722297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:16.722964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:16.723331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:16.723383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:16.723587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:16.723612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:16.723618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:16.723676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:16.723683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:16.723714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:16.723726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:16.724076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:16.724086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:16.724132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:16.724136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:16.724244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:16.724251Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:16.724262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:16.724266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:16.724272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:16.724276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:16.724281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:16.724284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:16.724295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:16.724300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:16.724304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... aths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:39.508446Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:39.508458Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:39.508482Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:39.508488Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:52:39.508544Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:39.508823Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T08:52:39.508910Z node 76 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:52:39.508921Z node 76 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:52:39.508927Z node 76 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T08:52:39.509000Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:39.509036Z node 76 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 43us result status StatusPathDoesNotExist 2024-11-21T08:52:39.509073Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:39.509134Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:39.509152Z node 76 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 21us result status StatusSuccess 2024-11-21T08:52:39.509205Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:39.509387Z node 76 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [76:122:2148] sender: [76:609:2058] recipient: [76:99:2134] Leader for TabletID 72057594046678944 is [76:122:2148] sender: [76:612:2058] recipient: [76:611:2559] Leader for TabletID 72057594046678944 is [76:122:2148] sender: [76:613:2058] recipient: [76:15:2062] Leader for TabletID 72057594046678944 is [76:614:2560] sender: [76:615:2058] recipient: [76:611:2559] 2024-11-21T08:52:39.514889Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:39.514907Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:39.514910Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:39.514913Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:39.514917Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:39.514919Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:39.514925Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:39.514967Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:39.515715Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:39.515958Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:39.515979Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:39.515996Z node 76 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:39.515999Z node 76 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:39.516015Z node 76 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:39.516100Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516112Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: DirA, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:52:39.516118Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516129Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516168Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516187Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516196Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516219Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516234Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516246Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516265Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516288Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516296Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516334Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516342Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516365Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516373Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516381Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516398Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516405Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516420Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516437Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516447Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516451Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.516455Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.517926Z node 76 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:39.517945Z node 76 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:39.518056Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:39.518067Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:39.518075Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:39.518311Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Leader for TabletID 72057594046678944 is [76:614:2560] sender: [76:669:2058] recipient: [76:15:2062] Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 >> KqpTx::RollbackTx2 >> KqpTx::InteractiveTx [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2024-11-21T08:52:31.863086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652807244732927:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:31.863112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:31.895804855 456467 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:31.895843762 456467 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T08:52:31.899253Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18128: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:18128 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003bab/r3tmp/tmpssMu8N/pdisk_1.dat 2024-11-21T08:52:32.193874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:32.193893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652811539700709:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 18128, node 1 TClient is connected to server localhost:8850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:32.270655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:32.270673Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:32.270793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:32.270802Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:32.270803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:32.270841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:32.565217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.566443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.566463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.567174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:32.567251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:32.567262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:32.567732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.567742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:32.567848Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:32.568110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.569190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179152613, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.569208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:32.569310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:32.569754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.569814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.569834Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:32.569851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:32.569868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:32.569885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:32.570447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:32.570466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:32.570473Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:32.570487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:32.593776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:32.593810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:32.595247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:32.898909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:32.898944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:32.899522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:32.899565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.899601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.899618Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:32.899735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.899748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.899751Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:32.899773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.899775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.899777Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:32.900366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179152949, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:32.900379Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179152949, at schemeshard: 72057594046644480 2024-11-21T08:52:32.900397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:52:32.900788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:32.900828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:32.900843Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:52:32.900860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:52:32.900873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:52:32.900884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T08:52:32.900899Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2024-11-21T08:52:32.900911Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T08:52:32.900913Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T08:52:32.901004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:32.901019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:32.901022Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:52:32.901044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 720575940466 ... ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526368Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526374Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526381Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526385Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526390Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526395Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526400Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526405Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526410Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526419Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526422Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526431Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526433Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526443Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526445Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526455Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526458Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526468Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526472Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526478Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526484Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526488Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526492Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526499Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526505Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526511Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526515Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526522Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526526Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526531Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526540Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526544Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526553Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526556Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526565Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526569Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526573Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526583Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526586Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526596Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526601Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526606Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526613Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526617Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526622Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526632Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526635Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526643Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526647Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526655Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526659Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526665Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526672Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526676Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526681Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526687Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526695Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526699Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526705Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526711Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526715Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526724Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526730Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526735Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526743Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526747Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526753Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526762Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526764Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526773Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526777Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526781Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526787Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526791Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526797Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526805Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526808Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526817Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526820Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526829Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526832Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526841Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526844Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526854Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526856Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526865Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526869Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526875Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526879Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526886Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526893Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526897Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526902Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526910Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526914Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526921Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526928Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526931Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526936Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526943Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526951Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526955Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526960Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526966Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526974Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526979Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526987Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526992Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.526998Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527002Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527009Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527015Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527021Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527026Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527032Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527039Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527043Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527048Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527054Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527062Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527067Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527074Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527078Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527082Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527088Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527097Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527101Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527106Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527113Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T08:52:39.527122Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 25435, MsgBus: 6039 2024-11-21T08:52:37.683559Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652832914310965:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:37.683633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b6d/r3tmp/tmpE4pIJN/pdisk_1.dat 2024-11-21T08:52:37.783354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:37.786262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:37.786280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:37.800873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25435, node 1 2024-11-21T08:52:37.813556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:37.813570Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:37.813572Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:37.813606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6039 TClient is connected to server localhost:6039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:37.871800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.874504Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:52:37.879992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.942377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.962330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.971775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.017595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652837209279663:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.017625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.048091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.055358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.067394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.074358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.081690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.096297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.112160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652837209280157:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.112197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.112246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652837209280162:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.113123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:38.115775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652837209280164:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:52:38.292699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.303755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.366610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24648, MsgBus: 11937 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b6d/r3tmp/tmpqLK9kB/pdisk_1.dat 2024-11-21T08:52:39.005100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:39.012658Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24648, node 2 2024-11-21T08:52:39.022488Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:39.022500Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:39.022503Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:39.022540Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11937 TClient is connected to server localhost:11937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:39.099063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:39.099084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:39.099938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.101889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:39.109588Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:39.124992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.163586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.193968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:39.206469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.373644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652842109776462:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.373679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.380618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.387828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.397810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.412029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.427343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.443918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.459777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652842109776963:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.459814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.459834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652842109776968:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.460642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:39.466921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652842109776970:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:39.701454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.718726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.733559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> KqpSinkLocks::InvalidateOnCommit >> KqpTx::DeferredEffects >> KqpSnapshotRead::TestSnapshotExpiration+withSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::InteractiveTx [GOOD] Test command err: Trying to start YDB, gRPC: 19413, MsgBus: 6781 2024-11-21T08:52:38.545301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652836985556639:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:38.545395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002aee/r3tmp/tmpLKVrOy/pdisk_1.dat 2024-11-21T08:52:38.595709Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19413, node 1 2024-11-21T08:52:38.603304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:38.603316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:38.603317Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:38.603355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6781 2024-11-21T08:52:38.644738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:38.644761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:38.645810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:38.661458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.663922Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:52:38.669977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.685920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.704067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.716891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.872199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652836985558034:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.872243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.911249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.919949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.929555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.946545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.006809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.025526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.037466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652841280525848:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.037488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.037612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652841280525853:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.038483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:39.041860Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:52:39.041966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652841280525855:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:52:39.344534Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTA0NTUwN2EtZTg0NTgxYmUtYmNmYjI4NzAtZThjNjc5OWI=, ActorId: [1:7439652841280526153:2454], ActorState: ReadyState, TraceId: 01jd6yrt9e2jkgwv269hpgcyzp, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29554, MsgBus: 5909 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002aee/r3tmp/tmpmwMmV2/pdisk_1.dat 2024-11-21T08:52:39.690954Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:39.691265Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 29554, node 2 2024-11-21T08:52:39.703468Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:39.703484Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:39.703487Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:39.703526Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5909 TClient is connected to server localhost:5909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:39.779892Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:39.779918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:39.780185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.780905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:39.781948Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:39.784270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.797957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.818741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.834551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.042549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652846023655527:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.042585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.047616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.054905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.062110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.117797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.125782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.140430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.155197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652846023656029:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.155232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.155286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652846023656034:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.155997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:40.159775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652846023656036:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> Yq_1::DescribeQuery [GOOD] >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink >> KqpTx::RollbackTx2 [GOOD] >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> KqpLocksTricky::TestNoLocksIssue-withSink |87.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |87.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut >> SubDomainWithReboots::DeleteWithStoragePools [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> KqpTx::DeferredEffects [GOOD] >> KqpTx::EmptyTxOnCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2024-11-21T08:52:33.785808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652815149033803:2081];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:33.786042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:33.814746806 462936 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:33.814794254 462936 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T08:52:33.817105Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26580: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26580 } ] 2024-11-21T08:52:34.088793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652819444001362:2276], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:34.088826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003b5e/r3tmp/tmpXraSMd/pdisk_1.dat 2024-11-21T08:52:34.142818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652819444001362:2276], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 26580, node 1 TClient is connected to server localhost:19751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:34.195647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:52:34.195664Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:34.195951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:34.195957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:34.195959Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:34.196007Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:34.421986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:34.423242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:34.423274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:34.423926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:34.423997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:34.424008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:34.424542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:34.424618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:34.424626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:34.425024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:34.425993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179154475, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:34.426010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:34.426100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:34.426589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:34.426642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:34.426659Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:34.426675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:34.426687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:34.426704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:34.427265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:34.427297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:34.427303Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:34.427321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:34.551994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:34.552041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:34.553564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:34.817758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:34.817832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:34.818748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:34.818814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:34.818865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:34.818890Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:34.819150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:34.819185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:34.819191Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:34.819292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:34.819301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:34.819303Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:34.819802Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2024-11-21T08:52:34.819816Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T08:52:34.819818Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T08:52:34.819894Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2024-11-21T08:52:34.819908Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T08:52:34.819909Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T08:52:34.820118Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2024-11-21T08:52:34.820132Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2024-11-21T08:52:34.820133Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2024-11-21T08:52:34.820154Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2024-11-21T08:52:34.820156Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2024-11-21T08:52:34.820158Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2024-11-21T08:52:34.820277Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2024-11-21T08:52:34.820286Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2024-11-21T08:52:34.820287Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2024-11-21T08:52:34.820334Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2024-11-21T08:52:34.820343Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2024-11-21T08:52:34.820344Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2024-11-21T08:52:34.820365Z node 1 :YQ_CONTROL_PLANE_STORA ... ution timeout 299.973049s 2024-11-21T08:52:40.720072Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2024-11-21T08:52:40.720097Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7439652844549207763 RawX2: 4503616807242493 } } DstEndpoint { ActorId { RawX1: 7439652844549207764 RawX2: 4503616807242494 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7439652844549207764 RawX2: 4503616807242494 } } DstEndpoint { ActorId { RawX1: 7439652844549207759 RawX2: 4503616807242036 } } InMemory: true } 2024-11-21T08:52:40.720098Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7439652844549207763:2813] 2024-11-21T08:52:40.720110Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:40.720319Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2024-11-21T08:52:40.720328Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. Taken 0 locks 2024-11-21T08:52:40.720329Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. new data for read #0 seqno = 1 finished = 1 2024-11-21T08:52:40.720332Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207763:2813], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2024-11-21T08:52:40.720335Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207763:2813], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.720337Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T08:52:40.720339Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:40.720345Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8386500 2024-11-21T08:52:40.720349Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. returned 1 rows; processed 1 rows 2024-11-21T08:52:40.720362Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. dropping batch for read #0 2024-11-21T08:52:40.720363Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. effective maxinflight 1024 sorted 0 2024-11-21T08:52:40.720365Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T08:52:40.720367Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1, CA Id [4:7439652844549207763:2813]. returned async data processed rows 1 left freeSpace 8386500 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T08:52:40.720409Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207763:2813], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.720411Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207763:2813], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:40.720417Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T08:52:40.720426Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2024-11-21T08:52:40.720463Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 2. Finish input channelId: 1, from: [4:7439652844549207763:2813] 2024-11-21T08:52:40.720493Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:40.720547Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207763:2813], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2024-11-21T08:52:40.720567Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207763:2813], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.720572Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:40.720575Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:40.720578Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207763:2813], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:40.720581Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:40.720581Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. Tasks execution finished 2024-11-21T08:52:40.720583Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T08:52:40.720584Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207763:2813], TxId: 281474976715771, task: 1. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:40.720602Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T08:52:40.720604Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T08:52:40.720606Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:40.720608Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 2. Tasks execution finished 2024-11-21T08:52:40.720610Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652844549207764:2814], TxId: 281474976715771, task: 2. Ctx: { TraceId : 01jd6yrvkmez4151p0jmhhn8jp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZGZkYTlmMDQtN2I4ZDM0ODgtYjU0ZjYxYjUtZmU5OGJkYjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:40.720612Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 1. pass away 2024-11-21T08:52:40.720630Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715771, task: 2. pass away 2024-11-21T08:52:40.720639Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715771;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:40.720659Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715771;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> TSchemeShardTest::RmDirTwice >> TCdcStreamTests::ReplicationAttribute [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx2 [GOOD] Test command err: Trying to start YDB, gRPC: 16331, MsgBus: 4050 2024-11-21T08:52:40.437413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652846426148886:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:40.437523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002aad/r3tmp/tmp2UuaSe/pdisk_1.dat 2024-11-21T08:52:40.493068Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16331, node 1 2024-11-21T08:52:40.507602Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:40.507619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:40.507621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:40.507661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4050 2024-11-21T08:52:40.537268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:40.537298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:40.538371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:40.569135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.574705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.590840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.605411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.616527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.800860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652846426150295:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.800892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.839514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.846369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.853166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.860063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.916139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.926237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.948381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652846426150805:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.948415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.948518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652846426150810:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.949523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:40.958450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652846426150812:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:41.248584Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2JkN2MyMDgtZGM1NjUzYWQtYTM2ZTE1N2ItODdjOWVkNw==, ActorId: [1:7439652850721118407:2454], ActorState: ReadyState, TraceId: 01jd6yrw4y749j5bp6mrn51hxx, Create QueryResponse for error on request, msg: >> TCdcStreamTests::Negative >> Yq_1::DeleteQuery [GOOD] >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::RebootSchemeShard >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode |87.9%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::DeleteWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:28.910019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:28.910036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:28.910039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:28.910042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:28.910056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:28.910059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:28.910064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:28.910126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:28.917975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:28.917991Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:28.919612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:28.919698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:28.919720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:28.921615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:28.921669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:28.921767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.921916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:28.922600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.922820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:28.922828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.922844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:28.922849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:28.922853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:28.922888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:28.924011Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:28.938724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:28.938806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.938869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:28.938933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:28.938939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.939594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.939618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:28.939664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.939672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:28.939675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:28.939679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:28.940008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.940016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:28.940020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:28.940283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.940290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.940295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.940300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.940801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:28.941170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:28.941213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:28.941382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.941419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:28.941425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.941473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:28.941478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.941499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:28.941510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:28.941812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:28.941819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:28.941844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.941848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:28.941914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.941920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:28.941928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:28.941932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.941936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:28.941940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.941944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:28.941947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:28.941955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:28.941960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:28.941963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 024-11-21T08:52:41.568621Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:41.568628Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T08:52:41.568636Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:52:41.568640Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:41.568646Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:52:41.568650Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:41.568656Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:41.568663Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:41.568695Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:41.568700Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2024-11-21T08:52:41.568704Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T08:52:41.568707Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:52:41.568821Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:41.568832Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:41.568837Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:41.568841Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T08:52:41.568845Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:52:41.568946Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:41.568956Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:41.568960Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:41.568965Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:52:41.568969Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:41.568977Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T08:52:41.569579Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:41.569593Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:41.569706Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:41.569723Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:41.569753Z node 50 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:52:41.569993Z node 50 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:52:41.570086Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:41.570149Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409547 2024-11-21T08:52:41.570379Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:41.570414Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:41.570487Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:41.570493Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:41.570540Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:41.570652Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:41.570659Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:41.570671Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:41.570969Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:52:41.570982Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:52:41.571375Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:41.571389Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:52:41.571432Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:41.571444Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:52:41.571495Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:52:41.571502Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:52:41.571561Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:52:41.571584Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:41.571589Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [50:462:2429] TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:52:41.571640Z node 50 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:52:41.571652Z node 50 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T08:52:41.571715Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:41.571749Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 45us result status StatusPathDoesNotExist 2024-11-21T08:52:41.571788Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:41.571833Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:41.571847Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 16us result status StatusSuccess 2024-11-21T08:52:41.571905Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2024-11-21T08:52:34.422528Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652820739945196:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:34.422600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 08:52:34.453183058 465174 dns_resolver.cc:162] no server name supplied in dns URI E1121 08:52:34.453223634 465174 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T08:52:34.454311Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11826: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11826 } ] 2024-11-21T08:52:34.760092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:34.760157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652820739945508:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003b48/r3tmp/tmpI0G1xU/pdisk_1.dat 2024-11-21T08:52:34.790018Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11826, node 1 2024-11-21T08:52:34.802137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:34.802150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:34.802151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:34.802209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:35.125455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.126222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:35.126240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.127481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:35.127560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:35.127571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:35.128075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:35.128088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:35.128153Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:35.128515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.129560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179155175, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:35.129575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:35.129655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:35.130097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:35.130156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:35.130173Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:35.130192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:35.130206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:35.130220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:35.130672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:52:35.130694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:52:35.130699Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:52:35.130729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:52:35.192653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:35.192702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:35.200557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:35.456198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.456284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:35.457443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T08:52:35.457518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:35.457592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:35.457608Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:35.458081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:35.458094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:35.458099Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:52:35.458164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:52:35.458168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:52:35.458169Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:52:35.459093Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2024-11-21T08:52:35.459097Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T08:52:35.459100Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T08:52:35.459605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:52:35.459836Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2024-11-21T08:52:35.459839Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2024-11-21T08:52:35.459841Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2024-11-21T08:52:35.460043Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2024-11-21T08:52:35.460047Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T08:52:35.460049Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T08:52:35.460102Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2024-11-21T08:52:35.460104Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2024-11-21T08:52:35.460105Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2024-11-21T08:52:35.460338Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2024-11-21T08:52:35.460342Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2024-11-21T08:52:35.460344Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2024-11-21T08:52:35.460404Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2024-11-21T08:52:35.460420Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T08:52:35.460421Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T08:52:35.460526Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2024-11-21T08:52:35.460539Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2024-11-21T08:52:35.460541Z node 1 :YQ_CONTROL_PLANE_S ... 75855Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. Send EvRead to shardId: 72075186224037892, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2024-11-21T08:52:40.275870Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. AFTER: 0.1 2024-11-21T08:52:40.275871Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T08:52:40.275890Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.275892Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T08:52:40.275895Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T08:52:40.276102Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2024-11-21T08:52:40.276111Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. Taken 0 locks 2024-11-21T08:52:40.276114Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. new data for read #0 seqno = 1 finished = 1 2024-11-21T08:52:40.276118Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2024-11-21T08:52:40.276121Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.276123Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T08:52:40.276126Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:40.276128Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T08:52:40.276129Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. returned 0 rows; processed 0 rows 2024-11-21T08:52:40.276140Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. dropping batch for read #0 2024-11-21T08:52:40.276141Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. effective maxinflight 1024 sorted 0 2024-11-21T08:52:40.276143Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T08:52:40.276145Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1, CA Id [4:7439652843577497699:2853]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T08:52:40.276161Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.276163Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:40.276168Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T08:52:40.276176Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497700:2854], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2024-11-21T08:52:40.276181Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. Finish input channelId: 1, from: [4:7439652843577497699:2853] 2024-11-21T08:52:40.276186Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497700:2854], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.276194Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497700:2854], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.276194Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2024-11-21T08:52:40.276201Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497700:2854], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:40.276219Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.276220Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:40.276222Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T08:52:40.276227Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:40.276231Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1. Tasks execution finished 2024-11-21T08:52:40.276233Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497700:2854], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T08:52:40.276234Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497699:2853], TxId: 281474976715783, task: 1. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:40.276235Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497700:2854], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T08:52:40.276237Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T08:52:40.276238Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. Tasks execution finished 2024-11-21T08:52:40.276240Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439652843577497700:2854], TxId: 281474976715783, task: 2. Ctx: { TraceId : 01jd6yrv5reb91kbgpc744k5xf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MjkxMGRiZDYtY2JmOTRmYmQtMmM0ZjEyMzItMjJiOWQyNjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T08:52:40.276255Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 2. pass away 2024-11-21T08:52:40.276261Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715783, task: 1. pass away 2024-11-21T08:52:40.276276Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715783;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:40.276289Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715783;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T08:52:40.278533Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqueoiue9s19bo6rg2p" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:561: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } 2024-11-21T08:52:40.856146Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:2350: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:2350 >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::DependentOps >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> KqpTx::EmptyTxOnCommit [GOOD] >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> TSchemeShardTest::AlterTableKeyColumns >> ForceDropWithReboots::ForceDelete [GOOD] >> TSchemeShardTest::CreateTable |87.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TSchemeShardTest::ReadOnlyMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] Test command err: Trying to start YDB, gRPC: 14826, MsgBus: 31100 2024-11-21T08:52:40.955982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652843952877854:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:40.956000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a77/r3tmp/tmpVRYVIW/pdisk_1.dat 2024-11-21T08:52:41.013217Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14826, node 1 2024-11-21T08:52:41.023069Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:41.023085Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:41.023087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:41.023139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31100 2024-11-21T08:52:41.056914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:41.056953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:41.057991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:41.091046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.101166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.116519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.183545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.208333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:41.220085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.330712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652848247846685:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.330778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.338237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.345188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.359361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.372751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.386762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.401234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.423919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652848247847199:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.423971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.424039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652848247847204:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.425012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:41.428683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:52:41.428769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652848247847206:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 22998, MsgBus: 12077 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a77/r3tmp/tmpF4v3vU/pdisk_1.dat 2024-11-21T08:52:41.976933Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:41.985756Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22998, node 2 2024-11-21T08:52:42.012605Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:42.012624Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:42.012626Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:42.012672Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12077 TClient is connected to server localhost:12077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:52:42.077262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:42.077292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:42.077727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.078744Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:42.079296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:42.087342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.099072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.121149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.134350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.306902Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652853697722417:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:42.306938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:42.313806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.322038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.331636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.345150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.359673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.374112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.389935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652853697722920:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:42.389962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652853697722925:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:42.389968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:42.390910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:42.399956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652853697722927:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TSchemeShardTest::SchemeErrors >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TSchemeShardTest::AlterTableFollowers >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> DSProxyCounters::MultiPutGeneratedSubrequestBytes [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TCdcStreamTests::DropStream [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TCdcStreamTests::AlterStreamImplShouldFail >> TDSProxyGetTest::TestBlock42GetSpecific2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TDSProxyPatchTest::SecuredOk_ErasureNone >> TSchemeShardTest::DropBlockStoreVolume2 |87.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |87.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDelete [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:27.089827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:27.089844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:27.089848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:27.089851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:27.089863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:27.089866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:27.089872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:27.089935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:27.097271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:27.097286Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:27.098932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:27.099011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:27.099030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:27.100957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:27.101021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:27.101098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.101244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:27.101744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:27.101926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:27.101932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:27.101949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:27.101956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:27.101963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:27.102007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:27.103030Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:27.114201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:27.114259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.114307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:27.114369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:27.114374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.114834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.114850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:27.114882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.114889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:27.114892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:27.114895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:27.115236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.115249Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:27.115253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:27.115549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.115555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.115559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:27.115563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:27.115953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:27.116273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:27.116317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:27.116446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.116466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:27.116471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:27.116506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:27.116510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:27.116529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:27.116537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:27.116859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:27.116865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:27.116890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:27.116893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:27.116946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.116951Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:27.116958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:27.116961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:27.116964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:27.116968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:27.116971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:27.116973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:27.116980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:27.116985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:27.116988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... erId: 72057594046678944, LocalPathId: 3] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 1002 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:52:43.013788Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:52:43.013831Z node 63 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 130 2024-11-21T08:52:43.013870Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:43.013884Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:43.014096Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T08:52:43.014422Z node 63 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:43.014430Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:52:43.014463Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:52:43.014486Z node 63 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:43.014491Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [63:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T08:52:43.014496Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [63:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T08:52:43.014564Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.014574Z node 63 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1002:0 ProgressState 2024-11-21T08:52:43.014585Z node 63 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T08:52:43.014589Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:52:43.014594Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T08:52:43.014598Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:52:43.014602Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T08:52:43.014606Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T08:52:43.014635Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:43.014640Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2024-11-21T08:52:43.014644Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T08:52:43.014647Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:52:43.014773Z node 63 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:43.014784Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:43.014789Z node 63 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:52:43.014793Z node 63 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T08:52:43.014797Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:52:43.014906Z node 63 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:43.014915Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:43.014919Z node 63 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:52:43.014923Z node 63 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:52:43.014926Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:43.014934Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T08:52:43.014939Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [63:424:2391] 2024-11-21T08:52:43.015302Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:43.015314Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:43.015617Z node 63 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:52:43.020063Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:43.020131Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:43.020271Z node 63 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:52:43.020326Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 Forgetting tablet 72075186233409547 2024-11-21T08:52:43.020783Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:43.020832Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:43.020934Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:43.020941Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:43.020966Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:43.021018Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:43.021023Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:43.021036Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:43.021352Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:52:43.021413Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:52:43.021420Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [63:431:2398] 2024-11-21T08:52:43.021525Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:52:43.021535Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:52:43.021972Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:43.021985Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:52:43.022028Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:43.022039Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:52:43.022119Z node 63 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:52:43.022128Z node 63 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T08:52:43.022202Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:43.022243Z node 63 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 49us result status StatusPathDoesNotExist 2024-11-21T08:52:43.022280Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 >> TDSProxyPatchTest::SecuredOk_ErasureNone [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_Erasure4Plus2Block >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> SubDomainWithReboots::RootWithStoragePoolsAndTable [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_Erasure4Plus2Block [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] |87.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> KqpSinkLocks::TInvalidate [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TBlobStorageProxySequenceTest::TestGivenMirror3DCGetWithFirstSlowDisk >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardTest::AlterTableSizeToSplit >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> SubDomainWithReboots::DeclareAndDefine [GOOD] >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkLocks::TInvalidateOlap >> KqpSinkTx::OlapExplicitTcl [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> KqpSinkTx::OlapInteractive >> TBlobStorageProxySequenceTest::TestGivenMirror3DCGetWithFirstSlowDisk [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> TSchemeShardTest::AlterTableSplitSchema >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> TDSProxyLooksLikeLostTheBlob::TDSProxyLooksLikeLostTheBlobBlock42 >> TSchemeShardTest::CreateWithIntermediateDirs >> KqpSinkMvcc::OlapNamedStatement >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::SimultaneousDropForceDrop >> TCdcStreamTests::MoveTableShouldFail >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TSchemeShardTest::SplitAlterCopy >> SnapshotTesting::Compaction [GOOD] >> SpaceCheckForDiskReassign::Basic >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize |87.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 11122, MsgBus: 3740 2024-11-21T08:52:37.767410Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652831861196456:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:37.810097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002c4d/r3tmp/tmpggFKuW/pdisk_1.dat 2024-11-21T08:52:37.851962Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11122, node 1 2024-11-21T08:52:37.877265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:37.877280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:37.877281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:37.877314Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3740 2024-11-21T08:52:37.917147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:37.917187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:37.918294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:37.936919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:37.944650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.007664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.031619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.041706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.120641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652836156165155:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.120689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.163577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.171809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.179691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.186547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.193303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.201311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.219763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652836156165658:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.219790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.219811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652836156165663:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.220727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:38.227548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652836156165665:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:38.540729Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQ0ZjQ2ZmEtMzg2ODdkZTYtZjdmZmJjZDEtNDgzZWVjMzU=, ActorId: [1:7439652836156165960:2454], ActorState: ExecuteState, TraceId: 01jd6yrsg6d12mrt6txxrfbsyp, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 25330, MsgBus: 4081 2024-11-21T08:52:39.254764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:39.254814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:39.254866Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002c4d/r3tmp/tmpjfu6Ho/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25330, node 2 TClient is connected to server localhost:4081 TClient is connected to server localhost:4081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:39.406997Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:39.407017Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:39.407022Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:39.407148Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:39.449486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:39.449531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:39.449905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.450734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:39.569573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.778097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.075722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.341377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.654822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1724:3344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.654859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.657158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.851429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.103361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.346917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.614707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.878701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.194651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2294:3787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:42.194697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:42.194763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2299:3792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:42.195956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:42.370360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2301:3794], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:42.576586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:52:42.823087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.120087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::RootWithStoragePoolsAndTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:25.571557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:25.571580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:25.571585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:25.571590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:25.571604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:25.571608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:25.571616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:25.571694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:25.582811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:25.582830Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:25.585150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:25.585255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:25.585291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:25.588201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:25.588323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:25.588451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:25.588650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:25.589370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:25.589659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:25.589669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:25.589692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:25.589699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:25.589705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:25.589743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:25.591322Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:25.605193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:25.605269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:25.605328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:25.605420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:25.605430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:25.606131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:25.606160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:25.606206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:25.606215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:25.606218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:25.606222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:25.606577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:25.606586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:25.606591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:25.606890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:25.606898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:25.606903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:25.606910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:25.607496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:25.607897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:25.607941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:25.608113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:25.608133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:25.608138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:25.608179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:25.608183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:25.608224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:25.608236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:25.608746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:25.608761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:25.608795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:25.608800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:25.608871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:25.608879Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:25.608889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:25.608894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:25.608899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:25.608904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:25.608909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:25.608913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:25.608925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:25.608931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:25.608934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:43.985210Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:43.985213Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:43.985215Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:52:43.985218Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:43.985227Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:52:43.985863Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 306 } } 2024-11-21T08:52:43.985876Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T08:52:43.985892Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 306 } } 2024-11-21T08:52:43.985905Z node 72 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 306 } } 2024-11-21T08:52:43.986100Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 309237647651 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:43.986107Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T08:52:43.986121Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 309237647651 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:43.986127Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:52:43.986134Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 351 RawX2: 309237647651 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:52:43.986147Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:43.986151Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.986156Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:52:43.986162Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T08:52:43.986991Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:43.987034Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:43.987919Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.987955Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.988033Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.988044Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:52:43.988063Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:52:43.988068Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:43.988075Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T08:52:43.988085Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:43.988093Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:43.988098Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:43.988126Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:52:43.988805Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:52:43.988815Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:52:43.988881Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:52:43.988900Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:43.988906Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [72:426:2401] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:43.988980Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:43.989030Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 64us result status StatusSuccess 2024-11-21T08:52:43.989147Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:43.989214Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:43.989249Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table_0" took 37us result status StatusSuccess 2024-11-21T08:52:43.989331Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TDSProxyGetTest::TestBlock42GetIntervalsWipedAllOk >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::TopicReserveSize [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::DeclareAndDefine [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:22.389664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:22.389692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:22.389698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:22.389704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:22.389719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:22.389724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:22.389733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:22.389833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:22.402297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:22.402323Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:22.405039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:22.405172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:22.405211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:22.408378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:22.408463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:22.408584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:22.408814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:22.409550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:22.409771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:22.409777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:22.409793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:22.409797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:22.409802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:22.409838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:22.410785Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:22.422042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:22.422124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:22.422198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:22.422268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:22.422276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:22.422854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:22.422878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:22.422924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:22.422934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:22.422938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:22.422943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:22.423269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:22.423277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:22.423281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:22.423529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:22.423537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:22.423542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:22.423548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:22.424116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:22.424467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:22.424512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:22.424692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:22.424714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:22.424720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:22.424769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:22.424775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:22.424802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:22.424814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:22.425140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:22.425149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:22.425189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:22.425194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:22.425276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:22.425283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:22.425294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:22.425298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:22.425303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:22.425308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:22.425313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:22.425317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:22.425326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:22.425332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:22.425336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409548, partId: 0 2024-11-21T08:52:44.571572Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2024-11-21T08:52:44.571577Z node 89 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1003:0 HandleReply TEvConfigureStatus operationId:1003:0 at schemeshard:72057594046678944 2024-11-21T08:52:44.571583Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#1003:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2024-11-21T08:52:44.571592Z node 89 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 3 -> 128 2024-11-21T08:52:44.571990Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:44.572019Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:44.572024Z node 89 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:44.572029Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1003:0, at tablet 72057594046678944 2024-11-21T08:52:44.572035Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1003 ready parts: 1/1 2024-11-21T08:52:44.572067Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1003 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:44.572453Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T08:52:44.572476Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T08:52:44.572538Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:44.572556Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 382252091497 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:44.572563Z node 89 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T08:52:44.572627Z node 89 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T08:52:44.572637Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T08:52:44.572666Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:44.572680Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:52:44.573055Z node 89 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:44.573063Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:52:44.573103Z node 89 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:44.573108Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [89:200:2203], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:52:44.573184Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:44.573194Z node 89 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:52:44.573206Z node 89 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:52:44.573211Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:44.573216Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:52:44.573220Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:44.573225Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:44.573229Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:44.573262Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:52:44.573266Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 1, subscribers: 0 2024-11-21T08:52:44.573270Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2024-11-21T08:52:44.573359Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:44.573368Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:44.573372Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:44.573376Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:52:44.573381Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:44.573393Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T08:52:44.574077Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:52:44.574139Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:52:44.574144Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:52:44.574190Z node 89 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:52:44.574204Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:44.574206Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [89:497:2452] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:52:44.574258Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:44.574285Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 34us result status StatusSuccess 2024-11-21T08:52:44.574349Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:44.574400Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:44.574410Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 12us result status StatusSuccess 2024-11-21T08:52:44.574441Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TCdcStreamWithInitialScanTests::DropStream >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> Viewer::TenantInfo5kkTablets [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter |87.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |87.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |87.9%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |87.9%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TDSProxyGetTest::TestBlock42GetIntervalsWipedError >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 19894, MsgBus: 3957 2024-11-21T08:52:40.159684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652844838819693:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:40.159704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002acd/r3tmp/tmpBnpjHj/pdisk_1.dat 2024-11-21T08:52:40.210299Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19894, node 1 2024-11-21T08:52:40.222236Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:40.222251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:40.222254Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:40.222296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3957 TClient is connected to server localhost:3957 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:52:40.260046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:40.260085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:40.261140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:40.273245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.281035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.346409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.369475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.381055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:40.466110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652844838821226:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.466134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.502418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.509324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.517632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.531206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.538497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.545762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:40.554856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652844838821732:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.554901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.554907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652844838821737:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:40.555890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:40.558603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652844838821739:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:52:42.788965Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2024-11-21T08:52:42.789613Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652853428756921:2453], Table: `/Root/EightShard` ([72057594046644480:3:1]), SessionActorId: [1:7439652844838822062:2453]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 3]`. ShardID=72075186224037891, Sink=[1:7439652853428756921:2453].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T08:52:42.789768Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652853428756902:2453], SessionActorId: [1:7439652844838822062:2453], Transaction locks invalidated. Table `/Root/EightShard`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7439652844838822062:2453]. isRollback=0 2024-11-21T08:52:42.789797Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDI2OThlNzItMmU2MGRiZDItZDI3MzU5YWItMjZkNDM0Yg==, ActorId: [1:7439652844838822062:2453], ActorState: ExecuteState, TraceId: 01jd6yrxks36xvhhn2jfvxcf0a, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7439652853428756903:2453] from: [1:7439652853428756902:2453] 2024-11-21T08:52:42.789864Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439652853428756903:2453] TxId: 281474976710673. Ctx: { TraceId: 01jd6yrxks36xvhhn2jfvxcf0a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDI2OThlNzItMmU2MGRiZDItZDI3MzU5YWItMjZkNDM0Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/EightShard`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T08:52:42.790500Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2024-11-21T08:52:42.790530Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1732179162833 : 281474976710673] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" } 2024-11-21T08:52:42.791194Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDI2OThlNzItMmU2MGRiZDItZDI3MzU5YWItMjZkNDM0Yg==, ActorId: [1:7439652844838822062:2453], ActorState: ExecuteState, TraceId: 01jd6yrxks36xvhhn2jfvxcf0a, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 23877, MsgBus: 31462 2024-11-21T08:52:43.069704Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652857769940929:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:43.070034Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002acd/r3tmp/tmpF31rbn/pdisk_1.dat 2024-11-21T08:52:43.081780Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23877, node 2 2024-11-21T08:52:43.090243Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:43.090262Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:43.090264Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:43.090314Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31462 TClient is connected to server localhost:31462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:43.170167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:43.170202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:43.171280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:43.171973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.173687Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:43.178210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.187512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.207069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.217312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.368979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652857769942467:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.369010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.374641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.383560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.394639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.408739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.414974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.430059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.445176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652857769942980:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.445206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652857769942985:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.445209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.445831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:43.449589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652857769942987:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:45.735154Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjYzYjk4NmEtNmRhYTAwNGEtOTYxNDM0LTlhYmZjN2Rl, ActorId: [2:7439652857769943272:2454], ActorState: ExecuteState, TraceId: 01jd6ys0ga58jh0rxyzz7xeagb, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken |87.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> TBlobStorageProxySequenceTest::TestGivenBlock42GetThenVGetResponseParts2523Nodata4ThenGetOk >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> TDSProxyGetTest::TestBlock42GetIntervalsAllOk >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits |87.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> TBlobStorageProxySequenceTest::TestGivenStripe42WhenGet2PartsOfBlobThenGetOk >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> TBlobStorageProxySequenceTest::TestGivenBlock42GetThenVGetResponseParts2523Nodata4ThenGetOk [GOOD] >> TDSProxyGetTest::TestBlock42WipedErrorWithTwoBlobs >> TDSProxyGetTest::TestBlock42GetIntervalsAllOk [GOOD] >> TDSProxyPatchTest::MovedOk_ErasureNone >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> ForceDropWithReboots::ForceDeleteCreateSubdomainInfly [GOOD] >> TBlobStorageProxySequenceTest::TestGivenStripe42WhenGet2PartsOfBlobThenGetOk [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_ErasureNone ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:52:42.159913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:42.159941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:42.159945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:42.159949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:42.159963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:42.159966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:42.159972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:42.160040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:42.170070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:42.170088Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:42.175838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:42.176715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:42.176752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:42.178334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:42.178592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:42.178692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:42.178790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:42.179901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:42.180228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:42.180244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:42.180287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:42.180295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:42.180300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:42.180315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.182416Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:52:42.196405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:42.196484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.196536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:42.196572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:42.196577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.197249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:42.197267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:42.197301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.197309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:42.197311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:42.197315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:42.197675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.197687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:42.197691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:42.197992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.197999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.198003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:42.198018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:42.198435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:42.198762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:42.198801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:42.198932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:42.198950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:42.198963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:42.199008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:42.199013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:42.199035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:42.199046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:42.199480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:42.199489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:42.199527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:42.199532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:52:42.199609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.199616Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:42.199630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:42.199634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:42.199639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:42.199645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:42.199648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:42.199651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:42.199659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:42.199662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:42.199665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:52:42.199924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:42.199936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:42.199941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:52:42.199945Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:52:42.199950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:42.199962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d: 104:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 2, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.079769Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2024-11-21T08:52:46.079776Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2024-11-21T08:52:46.079835Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2024-11-21T08:52:46.079847Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2024-11-21T08:52:46.079854Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2024-11-21T08:52:46.079859Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.079862Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2024-11-21T08:52:46.079865Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T08:52:46.082666Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.082721Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.082791Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.082821Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T08:52:46.090172Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T08:52:46.090185Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T08:52:46.090259Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2024-11-21T08:52:46.090265Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T08:52:46.090270Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2024-11-21T08:52:46.123707Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409552, partId: 0 2024-11-21T08:52:46.123774Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000005 2024-11-21T08:52:46.123785Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000005 2024-11-21T08:52:46.123798Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.123801Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T08:52:46.123854Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T08:52:46.123885Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T08:52:46.124524Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.124574Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:46.124578Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:52:46.124642Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:46.124645Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:202:2205], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T08:52:46.124658Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.124664Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T08:52:46.124674Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T08:52:46.124677Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:52:46.124682Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T08:52:46.124685Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:52:46.124691Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:52:46.124694Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:52:46.124731Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2024-11-21T08:52:46.124735Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 1 2024-11-21T08:52:46.124738Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T08:52:46.124941Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:52:46.124951Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:52:46.124960Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:52:46.124963Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:52:46.124967Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T08:52:46.124977Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2024-11-21T08:52:46.124980Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [13:433:2389] 2024-11-21T08:52:46.125743Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:52:46.125764Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:52:46.125769Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1551:3355] TestWaitNotification: OK eventTxId 104 2024-11-21T08:52:46.129035Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:46.129086Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 63us result status StatusSuccess 2024-11-21T08:52:46.129222Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] Test command err: BASE_PERF = 0.779757379 Build = 3.245385974 Merge = 5.456454685 Destroy = 3.222304906 Build = 0.148094505 Merge = 0.6265865951 Destroy = 0.004103330812 Data has built Merge = 0.05375116091 Data has merged 2024-11-21T08:52:27.759294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1519:2384], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:27.759403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:27.759431Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:680:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:27.759487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:27.759526Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:27.759555Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:27.759581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1517:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:27.759605Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1521:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:27.759654Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:27.759660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:27.759664Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:27.759710Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:27.760092Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1523:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:27.760175Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:27.760191Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:27.838939Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:27.930874Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:27.935130Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:27.979312Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20002, node 1 TClient is connected to server localhost:28777 2024-11-21T08:52:28.007276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:28.007293Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:28.007296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:28.007376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:28.725965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:28.726001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:28.728190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:28.728244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:28.728380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:28.728389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:28.728816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:28.728842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:28.728921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:28.728931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:28.740929Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:28.741065Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T08:52:28.741126Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T08:52:28.741156Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T08:52:28.741242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:28.741368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:28.741430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:28.741465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:28.741486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected Request timer = 16.46398768 BASE_PERF = 0.779757379 2024-11-21T08:52:45.803951Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439652864887119726:2207];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:45.807692Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:45.813773Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10267, node 6 2024-11-21T08:52:45.825187Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:45.825199Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:45.825200Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:45.825237Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:45.906741Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:45.906775Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:45.907237Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:45.907791Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:52:45.910948Z node 6 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T08:52:46.095410Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652869182087503:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:46.095429Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652869182087518:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:46.095435Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:46.096137Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:46.097692Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652869182087521:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:46.181678Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] >> Viewer::JsonStorageListingV2PDiskIdFilter >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> KqpSinkLocks::DifferentKeyUpdate >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_ErasureNone [GOOD] >> TDSProxyPatchTest::MovedOk_ErasureNone [GOOD] >> TDSProxyPatchTest::MovedError_Erasure4Plus2Block >> TDSProxyPatchTest::SecuredErrorOnPut_ErasureMirror3dc >> TDSProxyLooksLikeLostTheBlob::TDSProxyLooksLikeLostTheBlobBlock42 [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_Erasure4Plus2Block >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::OlapUncommittedRead >> TDSProxyPatchTest::MovedError_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_1_1_1_VdiskErrors >> TDSProxyPatchTest::SecuredErrorOnPut_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_0_0_VdiskErrors >> ForceDropWithReboots::ForceDeleteSplitInFly [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> TDSProxyPatchTest::SecuredErrorOnGetItem_Erasure4Plus2Block [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_1_VdiskErrors >> TSchemeShardTest::CreateTableWithConfig >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated |87.9%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteCreateSubdomainInfly [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:30.177423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:30.177440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:30.177443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:30.177447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:30.177460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:30.177462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:30.177468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:30.177524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:30.184977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:30.184991Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:30.186433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:30.186497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:30.186513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:30.188166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:30.188229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:30.188339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:30.188524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:30.189013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:30.189195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:30.189201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:30.189215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:30.189220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:30.189225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:30.189249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:30.190103Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:30.201447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:30.201513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.201564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:30.201618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:30.201623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.202091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:30.202107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:30.202144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.202151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:30.202154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:30.202157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:30.202382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.202387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:30.202390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:30.202570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.202575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.202579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:30.202583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:30.202935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:30.203177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:30.203209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:30.203332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:30.203349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:30.203354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:30.203386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:30.203390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:30.203412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:30.203424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:30.203689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:30.203695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:30.203722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:30.203726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:30.203780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.203785Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:30.203792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:30.203795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:30.203799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:30.203802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:30.203805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:30.203807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:30.203813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:30.203817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:30.203819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:46.659581Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:46.659721Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:52:46.660023Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:52:46.660055Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:52:46.660061Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [65:296:2288] 2024-11-21T08:52:46.660762Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:46.660793Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:52:46.660851Z node 65 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:46.660858Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:52:46.660892Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:52:46.660919Z node 65 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:46.660924Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [65:197:2200], at schemeshard: 72057594046678944, txId: 1003, path id: 2 2024-11-21T08:52:46.660929Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [65:197:2200], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:52:46.660977Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.660985Z node 65 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T08:52:46.660997Z node 65 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:52:46.661002Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:46.661009Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:52:46.661017Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:52:46.661022Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:52:46.661026Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:52:46.661064Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:46.661071Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T08:52:46.661074Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2024-11-21T08:52:46.661077Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:52:46.661238Z node 65 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:46.661251Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:46.661256Z node 65 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:46.661261Z node 65 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2024-11-21T08:52:46.661265Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:52:46.661377Z node 65 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:46.661387Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:52:46.661392Z node 65 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:52:46.661396Z node 65 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:52:46.661419Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:46.661429Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:52:46.661433Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [65:295:2287] 2024-11-21T08:52:46.662138Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:46.662151Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:46.662454Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 Leader for TabletID 72057594037968897 is [65:209:2209] sender: [65:331:2058] recipient: [65:15:2062] 2024-11-21T08:52:46.662599Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:52:46.662635Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:52:46.662640Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [65:296:2288] 2024-11-21T08:52:46.662701Z node 65 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T08:52:46.662744Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:46.662825Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:46.662892Z node 65 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T08:52:46.662922Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:46.662945Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:46.662977Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:46.662981Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:46.663014Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:46.663070Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:46.663076Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:46.663088Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:46.663622Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:52:46.664153Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:46.664478Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:46.664503Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:52:46.664573Z node 65 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:52:46.664585Z node 65 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T08:52:46.664669Z node 65 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:46.664713Z node 65 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 52us result status StatusPathDoesNotExist 2024-11-21T08:52:46.664760Z node 65 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:52:42.521082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:42.521112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:42.521117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:42.521123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:42.521141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:42.521147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:42.521158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:42.521238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:42.533592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:42.533613Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:42.535969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:42.536549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:42.536583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:42.537715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:42.537864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:42.537935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:42.538002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:42.538835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:42.539089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:42.539097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:42.539133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:42.539139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:42.539144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:42.539156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.540547Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:52:42.556890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:42.556980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.557045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:42.557085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:42.557090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.558075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:42.558109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:42.558167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.558179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:42.558184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:42.558188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:42.558732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.558746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:42.558750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:42.559198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.559209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.559217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:42.559252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:42.559868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:42.560537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:42.560610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:42.560861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:42.560895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:42.560920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:42.560984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:42.560992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:42.561029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:42.561043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:42.561568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:42.561578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:42.561637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:42.561643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:52:42.561745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:42.561752Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:42.561766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:42.561771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:42.561778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:42.561784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:42.561789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:42.561794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:42.561806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:42.561813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:42.561817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:52:42.562186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:42.562204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:42.562209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:52:42.562216Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:52:42.562221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:42.562239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2024-11-21T08:52:46.853637Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T08:52:46.854164Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.854202Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.854209Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.854215Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T08:52:46.854222Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T08:52:46.854258Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:46.854656Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T08:52:46.854685Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T08:52:46.854758Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:46.854781Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 64424511593 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:46.854787Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T08:52:46.854884Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T08:52:46.854894Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T08:52:46.854927Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:46.854939Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:52:46.854948Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:52:46.855333Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:46.855344Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:46.855389Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:52:46.855413Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:46.855419Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T08:52:46.855423Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T08:52:46.855487Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.855494Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T08:52:46.855508Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:52:46.855513Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:52:46.855519Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:52:46.855524Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:52:46.855533Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:52:46.855536Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:52:46.855567Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:52:46.855573Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T08:52:46.855578Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T08:52:46.855581Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T08:52:46.855792Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:52:46.855805Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:52:46.855810Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:52:46.855814Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:52:46.855819Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:46.856032Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:52:46.856047Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:52:46.856052Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:52:46.856056Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:52:46.856060Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:52:46.856073Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:52:46.857611Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:52:46.858630Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:52:46.858701Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:52:46.858708Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:52:46.858785Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:52:46.858807Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:52:46.858812Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [15:404:2372] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T08:52:46.859690Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "Topic1" TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 121 } MeteringMode: METERING_MODE_RESERVED_CAPACITY } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:46.859768Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /MyRoot/USER_1/Topic1, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:52:46.859825Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:402, at schemeshard: 72057594046678944 2024-11-21T08:52:46.863502Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_1/Topic1\', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:402" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:46.863558Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_1, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:402, operation: CREATE PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:52:46.863656Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:52:46.863665Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:52:46.863760Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:52:46.863788Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:52:46.863794Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:411:2379] TestWaitNotification: OK eventTxId 102 >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_1_1_1_VdiskErrors [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_0_0_VdiskErrors [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> ForceDropWithReboots::ForceDeleteCreateTableInFly [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_1_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3dc [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_1_1_1_VdiskErrors [GOOD] Test command err: 2024-11-21T08:52:47.270482Z node 4 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [4:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T08:52:47.270564Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T08:52:47.270571Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T08:52:47.270576Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:52:47.270580Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:52:47.270584Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T08:52:47.270587Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T08:52:47.278175Z node 4 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T08:52:47.278237Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:52:47.278243Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:52:47.278325Z node 4 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T08:52:47.278333Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T08:52:47.278338Z node 4 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T08:52:47.278362Z node 4 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T08:52:47.278377Z node 4 :BS_PROXY_PUT ERROR: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TPut3dcStrategy failed the Fail Model check" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T08:52:47.278387Z node 4 :BS_PROXY_PUT NOTICE: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TPut3dcStrategy failed the Fail Model check" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_0_0_VdiskErrors [GOOD] Test command err: 2024-11-21T08:52:47.256660Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T08:52:47.256754Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T08:52:47.256763Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T08:52:47.256768Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:52:47.256771Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:52:47.256775Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T08:52:47.256778Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T08:52:47.259073Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T08:52:47.259116Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:52:47.259120Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:52:47.259197Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T08:52:47.259233Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T08:52:47.259258Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T08:52:47.259264Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:52:47.259267Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:52:47.259289Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T08:52:47.259303Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T08:52:47.259309Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteSplitInFly [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:20.523527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:20.523554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:20.523559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:20.523563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:20.523578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:20.523582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:20.523590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:20.523668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:20.534704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:20.534724Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:20.537632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:20.537755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:20.537788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:20.543205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:20.543294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:20.543396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:20.543725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:20.544377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:20.544673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:20.544685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:20.544709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:20.544717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:20.544723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:20.544768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:20.546083Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:20.562536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:20.562634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:20.562720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:20.562813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:20.562822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:20.563614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:20.563645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:20.563708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:20.563719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:20.563724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:20.563729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:20.564173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:20.564186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:20.564191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:20.564593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:20.564605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:20.564611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:20.564617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:20.565219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:20.565698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:20.565755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:20.565975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:20.566007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:20.566014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:20.566069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:20.566076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:20.566106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:20.566118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:20.566572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:20.566586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:20.566633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:20.566639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:20.566727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:20.566736Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:20.566747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:20.566751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:20.566757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:20.566762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:20.566766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:20.566770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:20.566781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:20.566787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:20.566791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4-11-21T08:52:47.104796Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:47.104943Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:52:47.105272Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:52:47.105348Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2024-11-21T08:52:47.105422Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T08:52:47.105491Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:52:47.105554Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:52:47.105660Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:47.105694Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186233409546 2024-11-21T08:52:47.105966Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:52:47.106590Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:52:47.106638Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:52:47.106784Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2024-11-21T08:52:47.106857Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T08:52:47.106938Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:47.106966Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:47.107265Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:52:47.107295Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409547 2024-11-21T08:52:47.107380Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:47.107385Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:52:47.107397Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:52:47.107403Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:52:47.107408Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:47.107481Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:47.107485Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:47.107506Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:47.107644Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:52:47.107666Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:52:47.107681Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:52:47.107711Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [94:573:2521] 2024-11-21T08:52:47.108373Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:52:47.108416Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:52:47.108421Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:52:47.108431Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:52:47.108434Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:52:47.108509Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:47.108514Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:52:47.108524Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:52:47.108673Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:47.108729Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:47.108735Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:47.108749Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:47.108800Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:47.109135Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted 2024-11-21T08:52:47.109227Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:52:47.109239Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:52:47.109246Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:52:47.109253Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T08:52:47.109261Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 2024-11-21T08:52:47.109348Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:47.109392Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 52us result status StatusPathDoesNotExist 2024-11-21T08:52:47.109443Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:47.109500Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:47.109520Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 21us result status StatusSuccess 2024-11-21T08:52:47.109581Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Waiting until shard idx 72057594046678944:4 is deleted Waiting until shard idx 72057594046678944:5 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 Deleted shard idx 72057594046678944:4 Deleted shard idx 72057594046678944:5 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] Test command err: Trying to start YDB, gRPC: 20111, MsgBus: 27482 2024-11-21T08:52:35.035095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652824408178007:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:35.035342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dac/r3tmp/tmpUqfTFk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20111, node 1 2024-11-21T08:52:35.093073Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:35.096564Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:35.096580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:35.096582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:35.096621Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27482 TClient is connected to server localhost:27482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:35.136838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:35.136864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:35.138006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:35.162913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:35.377941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652824408178609:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.377971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.378047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652824408178636:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:35.378762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:35.380678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652824408178638:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:35.529322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.593836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:35.694915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:52:40.035449Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652824408178007:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:40.035482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21841, MsgBus: 16614 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dac/r3tmp/tmpoxMJ9A/pdisk_1.dat 2024-11-21T08:52:41.379701Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:41.387568Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21841, node 2 2024-11-21T08:52:41.404962Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:41.404979Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:41.404982Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:41.405029Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16614 TClient is connected to server localhost:16614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:41.479677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:41.479719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:41.480009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.482115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:41.484418Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:41.701606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652849294565993:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.701631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652849294566004:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.701639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.702453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:41.705041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652849294566014:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:41.777576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.787889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.914348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3dc [GOOD] Test command err: 2024-11-21T08:52:47.421108Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T08:52:47.421169Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421176Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421179Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421182Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421186Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421189Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421193Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421196Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421199Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421202Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421206Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421209Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421212Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421216Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421219Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421222Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421225Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421229Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.421234Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T08:52:47.421247Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T08:52:47.421254Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T08:52:47.421259Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:52:47.421263Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:52:47.421267Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T08:52:47.421270Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T08:52:47.421275Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T08:52:47.421278Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T08:52:47.421281Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T08:52:47.421284Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T08:52:47.421291Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T08:52:47.421294Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T08:52:47.424192Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T08:52:47.424252Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T08:52:47.424260Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T08:52:47.424265Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T08:52:47.424268Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T08:52:47.424272Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T08:52:47.424276Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T08:52:47.424280Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424284Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424288Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424293Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424296Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424300Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424303Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424307Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424310Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424314Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424318Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424321Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424326Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T08:52:47.424340Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T08:52:47.424345Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T08:52:47.424409Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T08:52:47.424421Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T08:52:47.424432Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2024-11-21T08:52:47.424441Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2024-11-21T08:52:47.424453Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2024-11-21T08:52:47.424491Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2024-11-21T08:52:47.424498Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T08:52:47.424502Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2024-11-21T08:52:47.424507Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2024-11-21T08:52:47.424510Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2024-11-21T08:52:47.424514Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2024-11-21T08:52:47.424517Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2024-11-21T08:52:47.424521Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2024-11-21T08:52:47.424524Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424527Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424531Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424534Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424537Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424540Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:52:47.424544Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T08:52:47.424550Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T08:52:47.424553Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T08:52:47.424585Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:7:0] Marker# BPP01 2024-11-21T08:52:47.424603Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T08:52:47.424610Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteCreateTableInFly [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:15.799101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:15.799126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:15.799132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:15.799137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:15.799153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:15.799157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:15.799166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:15.799241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:15.811926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:15.811947Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:15.814384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:15.814491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:15.814520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:15.819972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:15.820077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:15.820200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:15.821022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:15.822727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:15.823048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:15.823060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:15.823083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:15.823091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:15.823099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:15.823150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:15.824770Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:15.843235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:15.843329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.843409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:15.843490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:15.843498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.844539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:15.844567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:15.844637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.844690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:15.844695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:15.844701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:15.845206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.845217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:15.845222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:15.846214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.846227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.846234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:15.846242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:15.846836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:15.847249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:15.847304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:15.847501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:15.847530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:15.847538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:15.847594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:15.847600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:15.847634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:15.847648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:15.848039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:15.848050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:15.848097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:15.848102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:15.848188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:15.848196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:15.848226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:15.848231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:15.848237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:15.848243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:15.848250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:15.848254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:15.848265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:15.848272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:15.848278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... d: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:52:47.450780Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:47.451059Z node 125 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:52:47.451077Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:52:47.451081Z node 125 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:52:47.451085Z node 125 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:52:47.451089Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:52:47.451101Z node 125 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 1 2024-11-21T08:52:47.451105Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [125:457:2424] 2024-11-21T08:52:47.451227Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:47.451236Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:47.451239Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:47.451950Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:47.452082Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:47.452108Z node 125 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:52:47.452149Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:47.452188Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:47.452267Z node 125 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 Forgetting tablet 72075186233409546 2024-11-21T08:52:47.452485Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:52:47.452519Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:52:47.452567Z node 125 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:52:47.452645Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:47.452649Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:52:47.452656Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:52:47.452660Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:52:47.452665Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409547 2024-11-21T08:52:47.452721Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:47.452738Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:47.453069Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:47.453081Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:47.453104Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:47.453323Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:47.453349Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:47.453360Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:52:47.453365Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [125:458:2425] 2024-11-21T08:52:47.453904Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:52:47.453916Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:52:47.453930Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:52:47.453944Z node 125 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:47.453958Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:47.453962Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:47.453973Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:47.454000Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:47.454005Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:52:47.454032Z node 125 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:47.454320Z node 125 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 Ok notification wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T08:52:47.454404Z node 125 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:52:47.454415Z node 125 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:52:47.454425Z node 125 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T08:52:47.454491Z node 125 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:47.454524Z node 125 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 43us result status StatusPathDoesNotExist 2024-11-21T08:52:47.454556Z node 125 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:47.454600Z node 125 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:47.454617Z node 125 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 18us result status StatusSuccess 2024-11-21T08:52:47.454666Z node 125 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> TDataShardTrace::TestTraceDistributedUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T08:52:48.189832Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.189842Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.189846Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:52:48.189948Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:52:48.190142Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:52:48.191678Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.191788Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:52:48.192153Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.192158Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.192160Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:52:48.192243Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:52:48.192376Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:52:48.192432Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.192490Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:52:48.192569Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T08:52:48.192831Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.192836Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.192839Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:52:48.192892Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:52:48.193008Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:52:48.193033Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.193070Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:52:48.193325Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.193454Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T08:52:48.193479Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:52:48.193486Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T08:52:48.193790Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.193795Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.193799Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:52:48.193866Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:52:48.194008Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:52:48.194047Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.194090Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T08:52:48.194386Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:52:48.194439Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T08:52:48.194523Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T08:52:48.194544Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T08:52:48.194579Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:52:48.194586Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:52:48.194595Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:52:48.194627Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T08:52:48.194633Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T08:52:48.194637Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T08:52:48.194640Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:52:48.194656Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T08:52:48.194672Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T08:52:48.194675Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T08:52:48.194677Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:52:48.194702Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T08:52:48.194707Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T08:52:48.194710Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T08:52:48.194713Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:52:48.194726Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T08:52:48.195047Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.195051Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.195054Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:52:48.195099Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:52:48.195204Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:52:48.195250Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.195318Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T08:52:48.195448Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:52:48.195472Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T08:52:48.195517Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T08:52:48.195529Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T08:52:48.195561Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:52:48.195566Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:52:48.195569Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T08:52:48.195572Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T08:52:48.195577Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:52:48.195610Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2024-11-21T08:52:48.195623Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T08:52:48.195626Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T08:52:48.195629Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T08:52:48.195632Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T08:52:48.195635Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:52:48.195653Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2024-11-21T08:52:48.195988Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.195993Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.195996Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:52:48.196063Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:52:48.196142Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:52:48.196165Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:48.196228Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:52:48.196340Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:52:48.196379Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:52:48.196433Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T08:52:48.196447Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T08:52:48.196475Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:52:48.196481Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:52:48.196486Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T08:52:48.196489Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T08:52:48.196495Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T08:52:48.196500Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T08:52:48.196524Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T08:52:48.196546Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> SubDomainWithReboots::CreateWithStoragePools [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> TPersQueueTest::ReadFromSeveralPartitions >> TPersQueueTest::SetupLockSession2 >> TopicService::OneConsumer_TheRangesDoNotOverlap >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup-EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::CreateWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:24.686811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:24.686830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:24.686833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:24.686836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:24.686846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:24.686849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:24.686855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:24.686914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:24.694605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:24.694622Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:24.696625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:24.696756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:24.696787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:24.700064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:24.700162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:24.700330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:24.700581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:24.701367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:24.701670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:24.701686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:24.701715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:24.701724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:24.701732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:24.701781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:24.703351Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:24.725125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:24.725232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.725315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:24.725419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:24.725431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.726216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:24.726248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:24.726310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.726325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:24.726329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:24.726335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:24.726864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.726882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:24.726887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:24.727311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.727325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.727331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:24.727337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:24.728026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:24.728620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:24.728683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:24.728977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:24.729018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:24.729029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:24.729089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:24.729097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:24.729127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:24.729143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:24.729763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:24.729778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:24.729826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:24.729832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:24.729922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.729929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:24.729941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:24.729944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:24.729950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:24.729955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:24.729960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:24.729964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:24.729977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:24.729982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:24.729986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 72057594046678944 2024-11-21T08:52:48.543535Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T08:52:48.543570Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:48.544056Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T08:52:48.544086Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 2024-11-21T08:52:48.544159Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:48.544181Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 403726927976 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:48.544188Z node 94 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T08:52:48.544305Z node 94 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 240 2024-11-21T08:52:48.544319Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T08:52:48.544351Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:48.544362Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:48.544372Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T08:52:48.544861Z node 94 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:48.544870Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:52:48.544907Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:52:48.544943Z node 94 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:48.544949Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [94:200:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T08:52:48.544954Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [94:200:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T08:52:48.545018Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:48.545023Z node 94 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T08:52:48.545034Z node 94 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T08:52:48.545037Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:52:48.545041Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T08:52:48.545044Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:52:48.545047Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T08:52:48.545050Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T08:52:48.545073Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:48.545077Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2024-11-21T08:52:48.545099Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T08:52:48.545102Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:52:48.545226Z node 94 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:48.545236Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:48.545239Z node 94 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:52:48.545242Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:52:48.545245Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:52:48.545357Z node 94 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:48.545365Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:48.545368Z node 94 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:52:48.545370Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:52:48.545374Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:48.545381Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T08:52:48.545383Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [94:297:2289] 2024-11-21T08:52:48.546274Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:52:48.546330Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:52:48.546345Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:52:48.546351Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [94:298:2290] TestWaitNotification: OK eventTxId 1002 2024-11-21T08:52:48.546452Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:48.546490Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 46us result status StatusSuccess 2024-11-21T08:52:48.546599Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:48.546651Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:48.546675Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 13us result status StatusSuccess 2024-11-21T08:52:48.546712Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:52:43.201535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:43.201555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:43.201558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:43.201562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:43.201576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:43.201579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:43.201586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:43.201645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:43.210813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:43.210828Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:43.212944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:43.213621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:43.213654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:43.214808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:43.215002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:43.215063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:43.215119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:43.215752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:43.215976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:43.215982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:43.216009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:43.216014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:43.216018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:43.216027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.217220Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:52:43.230948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:43.231060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.231144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:43.231198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:43.231208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.231996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:43.232030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:43.232086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.232099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:43.232105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:43.232111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:43.235561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.235581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:43.235587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:43.236130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.236155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.236162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:43.236184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:43.236893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:43.237320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:43.237372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:43.237557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:43.237580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:43.237599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:43.237653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:43.237661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:43.237690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:43.237703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:43.238133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:43.238141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:43.238181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:43.238186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:52:43.238281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.238288Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:43.238297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:43.238301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:43.238307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:43.238313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:43.238317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:43.238320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:43.238330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:43.238336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:43.238340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:52:43.238697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:43.238713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:43.238718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:52:43.238723Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:52:43.238728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:43.238741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Z_full/Table1" took 33us result status StatusSuccess 2024-11-21T08:52:48.585246Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/Table1" PathDescription { Self { Name: "Table1" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 16 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:48.585321Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:48.585340Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" took 21us result status StatusSuccess 2024-11-21T08:52:48.585380Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" PathDescription { Self { Name: "DirA" PathId: 17 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 16 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "DirB" PathId: 18 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 23 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 17 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:48.585457Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:48.585479Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" took 23us result status StatusSuccess 2024-11-21T08:52:48.585533Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 23 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 23 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:48.585590Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:48.585603Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" took 14us result status StatusSuccess 2024-11-21T08:52:48.585633Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 18 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "Table3" PathId: 24 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 18 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 18 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:48.585683Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:48.585699Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" took 18us result status StatusSuccess 2024-11-21T08:52:48.585740Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 24 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 18 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 24 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> Donor::MultipleEvicts >> TSchemeShardTest::CreateSystemColumn [GOOD] >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> Donor::MultipleEvicts [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:52:43.306383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:43.306411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:43.306417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:43.306422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:43.306442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:43.306447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:43.306457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:43.306540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:43.316021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:43.316043Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:43.319141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:43.319972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:43.320009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:43.321615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:43.321880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:43.321983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:43.322071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:43.323202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:43.323510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:43.323525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:43.323591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:43.323601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:43.323609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:43.323622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.325125Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:52:43.342006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:43.342101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.342176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:43.342227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:43.342235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.343195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:43.343226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:43.343281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.343293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:43.343297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:43.343302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:43.343895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.343907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:43.343912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:43.344358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.344368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.344375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:43.344396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:43.344905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:43.345256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:43.345313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:43.345480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:43.345499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:43.345516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:43.345564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:43.345572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:43.345606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:43.345620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:43.346023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:43.346031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:43.346082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:43.346087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:52:43.346182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:43.346189Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:43.346202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:43.346207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:43.346213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:43.346219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:43.346224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:43.346228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:43.346240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:43.346246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:43.346250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:52:43.346548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:43.346562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:43.346567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:52:43.346572Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:52:43.346578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:43.346592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6678944] TDone opId# 102:0 ProgressState 2024-11-21T08:52:49.557017Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:52:49.557021Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:52:49.557028Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T08:52:49.557041Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:342:2317] message: TxId: 102 2024-11-21T08:52:49.557048Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:52:49.557054Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:52:49.557059Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:52:49.557077Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:52:49.557496Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:52:49.557508Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:343:2318] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2024-11-21T08:52:49.558146Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:49.558212Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:52:49.558321Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:49.558335Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T08:52:49.558339Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:52:49.558346Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:49.558367Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:49.558386Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:49.558504Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:49.558513Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:49.559074Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2024-11-21T08:52:49.559112Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2024-11-21T08:52:49.559154Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:49.559160Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:49.559199Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:52:49.559215Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:49.559219Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 1 2024-11-21T08:52:49.559224Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T08:52:49.559354Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:52:49.559364Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet72057594046678944 2024-11-21T08:52:49.559415Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T08:52:49.559555Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:52:49.559566Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:52:49.559570Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:52:49.559575Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T08:52:49.559579Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:52:49.559771Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:52:49.559782Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:52:49.559786Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:52:49.559790Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2024-11-21T08:52:49.559798Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:49.559811Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T08:52:49.560545Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2024-11-21T08:52:49.560588Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2024-11-21T08:52:49.560595Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T08:52:49.560764Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T08:52:49.560828Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2024-11-21T08:52:49.560850Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T08:52:49.560856Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T08:52:49.560872Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T08:52:49.560879Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T08:52:49.560884Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T08:52:49.560920Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2024-11-21T08:52:49.561418Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:52:49.561546Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:52:49.562123Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:52:49.562155Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:52:49.562164Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2024-11-21T08:52:49.562173Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2024-11-21T08:52:49.563031Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T08:52:49.563066Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2024-11-21T08:52:49.563087Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2024-11-21T08:52:49.563091Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 >> Donor::SlayAfterWiping ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 13784685250849975099 0 donors: 2024-11-21T08:52:49.945386Z 4 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.945660Z 4 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.946277Z 4 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 1:1000 2024-11-21T08:52:49.952823Z 1 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.953068Z 1 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.953549Z 1 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2024-11-21T08:52:49.959041Z 4 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.959262Z 4 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.959750Z 4 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 1:1000 2024-11-21T08:52:49.965165Z 1 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.965387Z 1 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.965807Z 1 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2024-11-21T08:52:49.971065Z 4 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.971313Z 4 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.971741Z 4 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 1:1000 2024-11-21T08:52:49.977260Z 1 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.977526Z 1 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.977948Z 1 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2024-11-21T08:52:49.983674Z 4 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.983926Z 4 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.984499Z 4 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 1:1000 2024-11-21T08:52:49.990704Z 1 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.990959Z 1 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.991397Z 1 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2024-11-21T08:52:49.997182Z 4 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:49.997442Z 4 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18188036457889519934] 2024-11-21T08:52:49.997932Z 4 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 1:1000 |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> Donor::SlayAfterWiping [GOOD] |88.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> KqpSinkLocks::TInvalidateOlap [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite >> ForceDropWithReboots::ForceDeleteCreateTableInFlyWithRebootAtCommit [GOOD] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 2580951391227598629 2024-11-21T08:52:50.512692Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:52:50.513038Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12317940705615282594] 2024-11-21T08:52:50.513920Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> KqpSinkTx::OlapInteractive [GOOD] >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> KqpScanArrowFormat::AggregateWithFunction >> TDataShardTrace::TestTraceDistributedUpsert [GOOD] |88.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite [GOOD] >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 29909, MsgBus: 12065 2024-11-21T08:52:37.490800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652831841659596:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:37.491098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cd1/r3tmp/tmp2looIo/pdisk_1.dat 2024-11-21T08:52:37.552778Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29909, node 1 2024-11-21T08:52:37.581129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:37.581145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:37.581147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:37.581188Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12065 2024-11-21T08:52:37.596637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:37.596665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:37.597618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:37.690323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.693534Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:37.893368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652831841660191:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.893499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652831841660183:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.893524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:37.894365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:37.897590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652831841660220:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:37.995486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.010144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:52:38.010183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:52:38.010218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:52:38.010231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:52:38.010248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:52:38.010262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:52:38.010275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:52:38.010287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:52:38.010296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:52:38.010298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:52:38.010308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:52:38.010310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:52:38.010381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:52:38.010396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439652836136627722:2315];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:52:38.010397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:52:38.010427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:52:38.010448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:52:38.010465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:52:38.010480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:52:38.010615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:52:38.010635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:52:38.010653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:52:38.010670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:52:38.011097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439652836136627724:2317];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:52:38.011693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:52:38.011704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:52:38.011717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:52:38.011723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAM ... 186224037988;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355609Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037975;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355707Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355878Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.355977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356230Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356249Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356269Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356328Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356491Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356660Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.356704Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.357746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T08:52:46.730661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7439652867633331144:2389];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T08:52:46.732491Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;task_id=fab53c8a-a7e511ef-b7c29ae9-417d2275;fline=with_appended.cpp:80;portions=3,;task_id=fab53c8a-a7e511ef-b7c29ae9-417d2275; 2024-11-21T08:52:46.738779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7439652867633331206:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T08:52:46.739847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[2:7439652867633331103:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T08:52:46.740755Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;task_id=fab6a5ca-a7e511ef-94e12f79-e580c259;fline=with_appended.cpp:80;portions=;task_id=fab6a5ca-a7e511ef-94e12f79-e580c259; 2024-11-21T08:52:46.740790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;task_id=fab67b36-a7e511ef-97fea317-c988a793;fline=with_appended.cpp:80;portions=;task_id=fab67b36-a7e511ef-97fea317-c988a793; 2024-11-21T08:52:46.742939Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7439652867633331216:2420];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T08:52:46.744343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7439652867633331218:2421];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T08:52:46.744873Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;task_id=fab71d3e-a7e511ef-acf72dc9-f68efd3;fline=with_appended.cpp:80;portions=;task_id=fab71d3e-a7e511ef-acf72dc9-f68efd3; 2024-11-21T08:52:46.745495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;task_id=fab7557e-a7e511ef-b4773962-3bc9cb3a;fline=with_appended.cpp:80;portions=;task_id=fab7557e-a7e511ef-b4773962-3bc9cb3a; 2024-11-21T08:52:46.745598Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:7439652867633331203:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T08:52:46.747589Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;task_id=fab7868e-a7e511ef-8ca5e109-916cf96d;fline=with_appended.cpp:80;portions=;task_id=fab7868e-a7e511ef-8ca5e109-916cf96d; 2024-11-21T08:52:46.751624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439652867633331190:2402];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T08:52:46.753507Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=fab86d92-a7e511ef-a0e5fa91-c51f86f7;fline=with_appended.cpp:80;portions=;task_id=fab86d92-a7e511ef-a0e5fa91-c51f86f7; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:52:49.924194Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652863338361909:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.924252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert [GOOD] Test command err: 2024-11-21T08:52:48.853936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:48.854329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:48.854348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046c4/r3tmp/tmpOWL57n/pdisk_1.dat 2024-11-21T08:52:48.955500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:48.971763Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:49.014305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.014360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.025041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:49.129004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:51.789612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:51.789642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:51.789653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:51.790513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:52:51.805355Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T08:52:52.007983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:52:52.085261Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ys6edc02zj7ngf6jjfdtv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjlkYmE5NmItMmNiNjIwZDUtM2M1NTQ3YjMtMTkyZjJkNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] >> SubDomainWithReboots::Create [GOOD] >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] >> ForceDropWithReboots::PathsAndShardsCountersSimultaneousAlterSubDomain [GOOD] >> TopicService::OneConsumer_TheRangesOverlap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] Test command err: 2024-11-21T08:52:49.827005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:49.827377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:49.827393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042e0/r3tmp/tmpRFBPVw/pdisk_1.dat 2024-11-21T08:52:49.922538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.939998Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:49.982288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.982321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.992828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:50.096631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.309179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.562921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.562948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.562958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.563780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:52:50.740196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:52:50.801874Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ys582e7vnnvcew1pvr3xq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI0OWUyYTktZTZlOWE3MGYtNzM5OGQwZmQtYWNkZjBlMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 890 RawX2: 4294969940 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\tz\003\000\000\000\000\000\000\021T\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\002\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CurrentExecutionId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=ZjI0OWUyYTktZTZlOWE3MGYtNzM5OGQwZmQtYWNkZjBlMGQ=\222\001\021\n\006PoolId\022\007default\222\001\026\n\022CustomerSuppliedId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001%\n\007TraceId\022\03201jd6ys582e7vnnvcew1pvr3xq\222\001\014\n\010Database\022\000\230\001\000\"\n\010\255\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 632 RawX2: 4294969833 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 62 } } 2024-11-21T08:52:50.821442Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ys5fya78s5pq7pvecdhqr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE1NzA2ZC1kZjZjMDY5Mi00ODdhY2YyOC1mMjgwMjZhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 912 RawX2: 4294970013 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\220\003\000\000\000\000\000\000\021\235\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\023\n\nDatabaseId\022\005/Root\222\001%\n\007TraceId\022\03201jd6ys5fya78s5pq7pvecdhqr\222\001\026\n\022CustomerSuppliedId\022\000\222\001\014\n\010Database\022\000\222\001\026\n\022CurrentExecutionId\022\000\222\001\021\n\006PoolId\022\007default\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=ZDE1NzA2ZC1kZjZjMDY5Mi00ODdhY2YyOC1mMjgwMjZhMw==\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 719 RawX2: 4294969895 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 60 } } 2024-11-21T08:52:51.127562Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ys5rjecfyrxrb8bz1qffh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY1NWRhZDktYWI2MTQ4YTYtOWE3NTkyZTYtNTNjYjJjMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T08:52:51.479389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ys64efxwn8kn8sh0g6wf9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM2MjA0NTQtODU0OTRkNWUtZTgwZTc0ZTItNmVkMDY0YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1003 RawX2: 4294970093 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\353\003\000\000\000\000\000\000\021\355\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\006\006\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CurrentExecutionId\022\000\222\001\021\n\006PoolId\022\007default\222\001%\n\007TraceId\022\03201jd6ys64efxwn8kn8sh0g6wf9\222\001\014\n\010Database\022\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=YjM2MjA0NTQtODU0OTRkNWUtZTgwZTc0ZTItNmVkMDY0YmE=\222\001\023\n\nDatabaseId\022\005/Root\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 OrderId: 281474976715664 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 21 ActorId { RawX1: 632 RawX2: 4294969833 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 71 } } 2024-11-21T08:52:51.498067Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ys652cymken3sy61j3t8e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY1NWRhZDktYWI2MTQ4YTYtOWE3NTkyZTYtNTNjYjJjMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:51.507254Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ys65b79f48k23v1kggvyp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY1NWRhZDktYWI2MTQ4YTYtOWE3NTkyZTYtNTNjYjJjMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:51.515337Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWY1NWRhZDktYWI2MTQ4YTYtOWE3NTkyZTYtNTNjYjJjMmM=, ActorId: [1:930:2747], ActorState: ExecuteState, TraceId: 01jd6ys65m35bxbezqc98qxj35, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T08:52:51.525803Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6ys65m35bxbezqc98qxj35, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY1NWRhZDktYWI2MTQ4YTYtOWE3NTkyZTYtNTNjYjJjMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvProposeTransaction 281474976715667 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1052 RawX2: 4294970043 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n" TxId: 281474976715667 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 4000 TxId: 18446744073709551615 } 2024-11-21T08:52:51.525983Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037888;tx_id=281474976715667; 2024-11-21T08:52:51.525991Z node 1 :TX_DATASHARD ERROR: MvccSnapshot without LockTxId is not implemented TEvProposeTransaction 281474976715667 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1052 RawX2: 4294970043 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000 \003\"\006\020\0020\000@\n" TxId: 281474976715667 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 4000 TxId: 18446744073709551615 } EvWriteResult 281474976715667 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_BAD_REQUEST Issues { message: "MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037888" } Origin: 72075186224037888 TxId: 281474976715667 2024-11-21T08:52:51.526860Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=YWY1NWRhZDktYWI2MTQ4YTYtOWE3NTkyZTYtNTNjYjJjMmM=, ActorId: [1:930:2747], ActorState: CleanupState, TraceId: 01jd6ys65m35bxbezqc98qxj35, Failed to cleanup:
: Error: Kikimr cluster or one of its subsystems was unavailable., code: 2005 2024-11-21T08:52:51.527041Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037889;tx_id=281474976715667; 2024-11-21T08:52:51.527048Z node 1 :TX_DATASHARD ERROR: MvccSnapshot without LockTxId is not implemented 2024-11-21T08:52:51.877575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.877607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.877626Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042e0/r3tmp/tmppBJfIx/pdisk_1.dat 2024-11-21T08:52:51.953009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:51.967512Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:52.010427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:52.010460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:52.020941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:52.127123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.336472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.589873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.589899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.589908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.590497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:52:52.765792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:52:52.811181Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ys77deqqy4x3t0rce8zxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTA2MDVjZS0zOGJiMzQ0NS0zYTVlZDUxNi02ODI3ODdmYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:52.831793Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ys7eq36h7hqdktyb55s4a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4MDU0ODYtMzkyMThhYjctZTAwMDMzMTctNmJjODEyOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:53.141605Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ys7q1fwrmvf1e5gtnrgzk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI1NTI0MzEtNTQyZTFkYzAtYjBlODc3OGUtZmMyZDc1MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T08:52:53.502839Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ys83q60g7aatdya8p9fbs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDhmMDQzOWItYTZjZWZiZjQtNTk3M2YwYmUtNjI2MmMwNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:53.522908Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ys84af01ge5pn4cnsc79s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI1NTI0MzEtNTQyZTFkYzAtYjBlODc3OGUtZmMyZDc1MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:53.531865Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ys84k8vhnx4k28khma43f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI1NTI0MzEtNTQyZTFkYzAtYjBlODc3OGUtZmMyZDc1MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:53.539673Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmI1NTI0MzEtNTQyZTFkYzAtYjBlODc3OGUtZmMyZDc1MDY=, ActorId: [2:928:2745], ActorState: ExecuteState, TraceId: 01jd6ys84w9jswthdvrg17fq7q, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T08:52:53.550196Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6ys84w9jswthdvrg17fq7q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmI1NTI0MzEtNTQyZTFkYzAtYjBlODc3OGUtZmMyZDc1MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |88.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> TPersQueueTest::SetupLockSession2 [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> TPersQueueTest::SetupLockSession >> OperationMapping::IndexBuildRejected [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> OperationMapping::IndexBuildSuccess [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration-withSink >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |88.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest |88.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest |88.0%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T08:52:49.618633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:49.619164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:49.619196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042dc/r3tmp/tmpnyIhyS/pdisk_1.dat 2024-11-21T08:52:49.720609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.737687Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:49.780305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.780335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.791008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:49.895249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.108306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T08:52:50.366438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.366467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.366478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.367434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:52:50.545391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:52:50.614350Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ys51y8rx29sj296c1xdqd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY2NDMxZTctZDlmNjczMzktYjQ3NjU4NTUtYzljMjcxZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:52:50.633756Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ys5a2cpv8symfam6s3fxx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc0NTU5N2MtNmYxNGM3MzgtN2NhN2M1MDAtYmY0YTAzYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2024-11-21T08:52:50.687146Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ys5aqcs8zdhjpmf18nnck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE4NjQ0YjgtNTY1NGY0ZTItZWNhOTAzNC1kMzBiNWY1NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T08:52:50.701003Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ys5c3bc7crfh5a28agn1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE4NjQ0YjgtNTY1NGY0ZTItZWNhOTAzNC1kMzBiNWY1NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2024-11-21T08:52:51.000128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ys5nabv0ddhwce2vt99c0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTAwNjI4ZmEtYjU5Y2NjZmQtZWQ2NGM4ZWItNDdjYzkyZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2024-11-21T08:52:51.490603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.490650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.490683Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042dc/r3tmp/tmpzs7x0V/pdisk_1.dat 2024-11-21T08:52:51.577006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:52:51.594640Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:51.637236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:51.637274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:51.647875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:51.754402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:51.981700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T08:52:52.238281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.238298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.238305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.238891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:52:52.415816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:52:52.457299Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ys6we41dswdjw9frcczhg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2QwZmM3NDUtZTExYTcwMWEtNjY2Nzk0ZDUtNjZkMmM4NTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 889 RawX2: 8589937235 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\ty\003\000\000\000\000\000\000\021S\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\002\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=M2QwZmM3NDUtZTExYTcwMWEtNjY2Nzk0ZDUtNjZkMmM4NTg=\222\001\014\n\010Database\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001\026\n\022CurrentExecutionId\022\000\222\001%\n\007TraceId\022\03201jd6ys6we41dswdjw9frcczhg\222\001\021\n\006PoolId\022\007default\230\001\000\"\n\010\242\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 86 } } 2024-11-21T08:52:52.477085Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ys73nekgs5155zhnhsbj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2YzZDNkYmMtYTkxYzMyNmQtYjE3MDcxZTgtYjQ3YzJhMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 911 RawX2: 8589937308 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\217\003\000\000\000\000\000\000\021\234\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001%\n\007TraceId\022\03201jd6ys73nekgs5155zhnhsbj6\222\001\021\n\006PoolId\022\007default\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=M2YzZDNkYmMtYTkxYzMyNmQtYjE3MDcxZTgtYjQ3YzJhMWM=\222\001\026\n\022CurrentExecutionId\022\000\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 80 } } ===== Begin SELECT 2024-11-21T08:52:52.530671Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ys74973evpkhd97vpenck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTE0Y2I2NzMtYzdhMzUzZDQtYjIyN2NlZS0zYjM4NDU2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T08:52:52.543138Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ys75n6dfhbx94460pzm4r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTE0Y2I2NzMtYzdhMzUzZDQtYjIyN2NlZS0zYjM4NDU2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\365\006\010\001\022\223\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_1\022\r\010\240\234\001\022\005\t\000\002\006\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001\026\n\022CurrentExecutionId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\021\n\006PoolId\022\007default\222\001%\n\007TraceId\022\03201jd6ys75n6dfhbx94460pzm4r\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ZTE0Y2I2NzMtYzdhMzUzZDQtYjIyN2NlZS0zYjM4NDU2Yw==\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\367\006\010\001\022\225\006\010\002\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\010\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\004\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004H\001\200\001\000\222\001\021\n\006PoolId\022\007default\222\001%\n\007TraceId\022\03201jd6ys75n6dfhbx94460pzm4r\222\001\026\n\022CustomerSuppliedId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\014\n\010Database\022\000\222\001\026\n\022CurrentExecutionId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ZTE0Y2I2NzMtYzdhMzUzZDQtYjIyN2NlZS0zYjM4NDU2Yw==\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037888 TxId: 281474976715664 MinStep: 2024 MaxStep: 32024 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 15 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 64 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037889 TxId: 281474976715664 MinStep: 2024 MaxStep: 32024 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 14 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 36 } } ... captured readset ... captured readset ===== restarting tablet EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 141 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 193 } } ===== Waiting for commit response ===== Last SELECT 2024-11-21T08:52:52.841755Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ys7ev1ce5kz31xnmx72k5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjQzMGZkYWYtYjc3ZTllYTctMWY2YmUwNGMtYjllODQ0Zjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } |88.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest |88.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] |88.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |88.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::TInvalidateOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28365, MsgBus: 2592 2024-11-21T08:52:37.861829Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652830994858263:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:37.861959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b40/r3tmp/tmpfgbOqp/pdisk_1.dat 2024-11-21T08:52:37.913657Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28365, node 1 2024-11-21T08:52:37.928266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:37.928279Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:37.928281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:37.928313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2592 TClient is connected to server localhost:2592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:37.964406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:37.964441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:37.968599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:37.990861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:37.993053Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:38.188637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652835289826143:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.188696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.189257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652835289826178:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.190187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:38.192045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652835289826180:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:38.304018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.373968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.595506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.892878Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2024-11-21T08:52:38.892941Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2024-11-21T08:52:38.892971Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2024-11-21T08:52:38.893026Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652835289833944:2931], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7439652835289833873:2931]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 6]`. ShardID=72075186224037888, Sink=[1:7439652835289833944:2931].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T08:52:38.893125Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652835289833930:2931], SessionActorId: [1:7439652835289833873:2931], Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7439652835289833873:2931]. isRollback=0 2024-11-21T08:52:38.893158Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmRlZTJjMzctOWVjYjE4NmItOWRmNTg1ODUtMzBjMjFkYWE=, ActorId: [1:7439652835289833873:2931], ActorState: ExecuteState, TraceId: 01jd6yrsv24ngdp5gdgd9rpe9a, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7439652835289833931:2931] from: [1:7439652835289833930:2931] 2024-11-21T08:52:38.893221Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439652835289833931:2931] TxId: 281474976715665. Ctx: { TraceId: 01jd6yrsv24ngdp5gdgd9rpe9a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRlZTJjMzctOWVjYjE4NmItOWRmNTg1ODUtMzBjMjFkYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T08:52:38.894390Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmRlZTJjMzctOWVjYjE4NmItOWRmNTg1ODUtMzBjMjFkYWE=, ActorId: [1:7439652835289833873:2931], ActorState: ExecuteState, TraceId: 01jd6yrsv24ngdp5gdgd9rpe9a, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:52:42.862019Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652830994858263:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:42.862062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 15122, MsgBus: 19336 2024-11-21T08:52:44.285938Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652862077833305:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:44.285959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b40/r3tmp/tmp0m6Lj8/pdisk_1.dat 2024-11-21T08:52:44.302802Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15122, node 2 2024-11-21T08:52:44.312349Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:44.312361Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:44.312363Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:44.312391Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19336 TClient is connected to server localhost:19336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:44.386344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:44.386374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:44.387427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:44.388723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:44.591292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652862077833875:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.591316Z node 2 :KQP_WORKLOAD_SERVICE WARN: ... e=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331882Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331904Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.331984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332098Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332448Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332471Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332519Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.332616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.333051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.373014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.373103Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.373158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.373202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.374710Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.374769Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.374819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.375104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.375533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.497653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 5061756C ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"1;Paul;"}}]}; 2024-11-21T08:52:45.525531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;local_tx_no=25;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;fline=interaction.h:353;batch=Group: [ 1 ] Name: [ 5061756C ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665},{"inc":{"count_include":1},"id":281474976715666}],"finishes":[{"inc":{"count_include":1},"id":281474976715665},{"inc":{"count_include":1},"id":281474976715666}]},"p":{"include":0,"pk":"1;Paul;"}}]}; 2024-11-21T08:52:45.530138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:89;broken_lock_id=281474976715665; 2024-11-21T08:52:45.530289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:45.546663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037893;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 536572676579 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"1;Paul;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"11;Sergey;"}}]}; 2024-11-21T08:52:45.547759Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037897;self_id=[2:7439652862077834150:2320];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=0; 2024-11-21T08:52:45.547915Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652866372809355:3253], SessionActorId: [2:7439652866372809336:3253], Got LOCKS BROKEN for table. ShardID=72075186224037897, Sink=[2:7439652866372809355:3253].{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T08:52:45.547933Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652866372809355:3253], SessionActorId: [2:7439652866372809336:3253], Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 }. statusCode=ABORTED. subIssues=
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 . sessionActorId=[2:7439652866372809336:3253]. isRollback=0 2024-11-21T08:52:45.547956Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdjMWZkZjAtMzAyMWU4ZmQtZDk4YmRkMWYtYzAwNzgzNTI=, ActorId: [2:7439652866372809336:3253], ActorState: ExecuteState, TraceId: 01jd6ys0awa2gr2szjhq2gec38, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439652866372809918:3253] from: [2:7439652866372809355:3253] 2024-11-21T08:52:45.547983Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439652866372809918:3253] TxId: 281474976715669. Ctx: { TraceId: 01jd6ys0awa2gr2szjhq2gec38, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTdjMWZkZjAtMzAyMWU4ZmQtZDk4YmRkMWYtYzAwNzgzNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T08:52:45.548067Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdjMWZkZjAtMzAyMWU4ZmQtZDk4YmRkMWYtYzAwNzgzNTI=, ActorId: [2:7439652866372809336:3253], ActorState: ExecuteState, TraceId: 01jd6ys0awa2gr2szjhq2gec38, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 2024-11-21T08:52:45.717814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7439652862077834150:2320];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=6; 2024-11-21T08:52:45.721039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=fa1ab1ce-a7e511ef-8c395b8f-cf9bcf95;fline=with_appended.cpp:80;portions=3,;task_id=fa1ab1ce-a7e511ef-8c395b8f-cf9bcf95; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:52:49.286071Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652862077833305:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.286100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 29994, MsgBus: 26301 2024-11-21T08:52:38.302189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652834629557864:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:38.302259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b0d/r3tmp/tmp9JeFg1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29994, node 1 2024-11-21T08:52:38.366095Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:52:38.366110Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:52:38.369350Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:38.372311Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:38.372324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:38.372325Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:38.372370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26301 2024-11-21T08:52:38.431571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:38.431596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:38.432493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:38.463179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:38.465647Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:38.629896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652834629558314:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.629940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.630090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652834629558349:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:38.630778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:38.632984Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:52:38.633030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652834629558351:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:38.741719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:38.755696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:52:38.755753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:52:38.755802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:52:38.755835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:52:38.755861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:52:38.755885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:52:38.755906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:52:38.755927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:52:38.755951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:52:38.755969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:52:38.755987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:52:38.756010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439652834629558554:2312];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:52:38.757294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:52:38.757315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:52:38.757333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:52:38.757341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:52:38.757366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:52:38.757378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:52:38.757391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:52:38.757419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:52:38.757438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:52:38.757448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:52:38.757455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:52:38.757468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:52:38.757541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:52:38.757552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:52:38.757569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:52:38.757578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:52:38.757595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... 08:52:46.207306Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207347Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207399Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207447Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207497Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207655Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207714Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207738Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207761Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037954;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207873Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037975;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207905Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207976Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207983Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.207995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208020Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208086Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208144Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208152Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208169Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208185Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208228Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208267Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.208653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:46.209216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T08:52:46.761859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439652867802930571:2374];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T08:52:46.765586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=faba003a-a7e511ef-b80f3c73-7bb0aa53;fline=with_appended.cpp:80;portions=3,;task_id=faba003a-a7e511ef-b80f3c73-7bb0aa53; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:52:50.009128Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652867802928936:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:50.009165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] Test command err: 2024-11-21T08:52:18.580694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:2711:2331], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:52:18.603875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:2724:2388], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.604607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.604630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.605175Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2736:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.605502Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:2726:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.605536Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:2728:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.605565Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2730:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.605603Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2734:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.605626Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.605650Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2738:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.605798Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2732:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:18.605855Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606373Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606398Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606430Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606448Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606535Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606632Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606641Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606654Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606663Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606681Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606689Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.606762Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:18.607336Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:18.607403Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:18.756200Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:18.855229Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:18.860380Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:18.905174Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 19800, node 1 TClient is connected to server localhost:23657 2024-11-21T08:52:18.935149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:18.935170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:18.935174Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:18.935274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:24.966836Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:2736:2388], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.967138Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:24.967419Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:24.967570Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:2733:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.967600Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:2738:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.967631Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:1607:2268], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.967664Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:1609:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.967703Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1613:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.967805Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:2744:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.967832Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:2746:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.967856Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:24.967943Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:24.967970Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:24.967982Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:24.967995Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1611:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:24.968013Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:24.968418Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:24.968447Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:24.968521Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_ ... LookupError; 2024-11-21T08:52:44.699055Z node 39 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:44.699073Z node 40 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:44.699088Z node 41 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:44.699107Z node 44 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:44.699986Z node 45 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [45:1623:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:44.700328Z node 45 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:44.700421Z node 38 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [38:2735:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:44.700459Z node 42 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [42:1617:2268], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:44.700560Z node 45 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:44.700735Z node 38 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:44.700816Z node 38 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:44.700826Z node 42 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:44.700836Z node 42 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:44.801060Z node 37 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:44.897085Z node 37 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:44.900509Z node 37 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:44.947588Z node 37 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20913, node 37 TClient is connected to server localhost:17122 2024-11-21T08:52:44.977487Z node 37 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:44.977510Z node 37 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:44.977515Z node 37 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:44.977645Z node 37 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:51.871128Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [46:2744:2388], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.871372Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:51.871433Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.871523Z node 49 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [49:2101:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.871693Z node 49 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:51.871771Z node 49 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872011Z node 52 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [52:2107:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.872074Z node 50 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [50:2103:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.872129Z node 53 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [53:2109:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.872151Z node 54 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [54:2111:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.872268Z node 52 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872358Z node 50 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872387Z node 52 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872394Z node 53 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872402Z node 54 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872524Z node 50 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872551Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [51:2105:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.872572Z node 53 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872581Z node 54 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872691Z node 51 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:51.872786Z node 51 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.873495Z node 47 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [47:2741:2333], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.873861Z node 47 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:51.874015Z node 47 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.874091Z node 48 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [48:854:2138], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:51.874272Z node 48 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:52:51.874396Z node 48 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T08:52:51.980173Z node 46 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:52.082488Z node 46 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T08:52:52.086172Z node 46 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T08:52:52.142583Z node 46 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 1524, node 46 TClient is connected to server localhost:16552 2024-11-21T08:52:52.173968Z node 46 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:52.173992Z node 46 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:52.173996Z node 46 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:52.174138Z node 46 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteCreateTableInFlyWithRebootAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:29.089859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:29.089882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:29.089888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:29.089893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:29.089907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:29.089911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:29.089919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:29.089996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:29.101452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:29.101471Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:29.103545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:29.103638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:29.103664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:29.106302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:29.106379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:29.106486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:29.106716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:29.107555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:29.107769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:29.107776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:29.107794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:29.107801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:29.107807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:29.107845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:29.108946Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:29.124084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:29.124139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:29.124182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:29.124258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:29.124265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:29.124781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:29.124800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:29.124833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:29.124839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:29.124842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:29.124845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:29.125162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:29.125169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:29.125172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:29.125390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:29.125407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:29.125411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:29.125414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:29.125805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:29.126102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:29.126133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:29.126252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:29.126268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:29.126272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:29.126305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:29.126309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:29.126324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:29.126331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:29.126623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:29.126629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:29.126649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:29.126652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:29.126696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:29.126700Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:29.126706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:29.126708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:29.126711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:29.126714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:29.126716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:29.126718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:29.126724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:29.126728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:29.126730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... tion: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:52:51.410648Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:52:51.410652Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:52:51.410655Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:51.410917Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:52:51.410929Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:52:51.410933Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:52:51.410937Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:52:51.410941Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:52:51.410950Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 1 2024-11-21T08:52:51.410955Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [89:300:2292] 2024-11-21T08:52:51.411189Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:51.411199Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:51.411203Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:52:51.411256Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 Leader for TabletID 72057594037968897 is [89:212:2212] sender: [89:348:2058] recipient: [89:15:2062] 2024-11-21T08:52:51.411602Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T08:52:51.411673Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:51.411723Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:51.411764Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T08:52:51.411808Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T08:52:51.411828Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:51.411853Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:52:51.411878Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:52:51.411939Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:52:51.411960Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:52:51.412113Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:51.412160Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:51.412166Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:52:51.412178Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:52:51.412185Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:52:51.412190Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:52:51.412232Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:51.412238Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:51.412267Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:52:51.412386Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:51.412403Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:52:51.412408Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [89:301:2293] 2024-11-21T08:52:51.412942Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:52:51.412967Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:52:51.412979Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:52:51.412994Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:51.413022Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:52:51.413027Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:52:51.413038Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:51.413093Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:52:51.413396Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 Ok notification wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T08:52:51.413491Z node 89 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:52:51.413503Z node 89 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:52:51.413509Z node 89 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T08:52:51.413578Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:51.413607Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 37us result status StatusPathDoesNotExist 2024-11-21T08:52:51.413641Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:52:51.413696Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:51.413714Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 20us result status StatusSuccess 2024-11-21T08:52:51.413766Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:52:39.164725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:39.164749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:39.164755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:39.164759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:39.164765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:39.164769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:39.164778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:39.164853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:39.174265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:39.174288Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:39.177326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:39.178256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:39.178292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:39.179469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:39.179612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:39.179690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:39.179776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:39.180632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:39.180920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:39.180930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:39.180970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:39.180977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:39.180983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:39.180999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.182410Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:52:39.199270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:39.199366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.199435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:39.199524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:39.199534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.200376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:39.200410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:39.200474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.200485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:39.200489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:39.200495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:39.200983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.201000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:39.201004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:39.201427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.201441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.201448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:39.201455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:39.202060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:39.202451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:39.202504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:39.202685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:39.202710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:39.202720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:39.202776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:39.202783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:39.202813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:39.202825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:39.203210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:39.203218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:39.203260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:39.203266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:52:39.203351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.203359Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:39.203370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:39.203374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:39.203381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:39.203386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:39.203391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:39.203395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:39.203407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:39.203412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:39.203417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:52:39.203723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:39.203737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:39.203743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:52:39.203748Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:52:39.203753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:39.203769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2024-11-21T08:52:47.879300Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2024-11-21T08:52:47.879311Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:47.879361Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2024-11-21T08:52:47.879372Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2024-11-21T08:52:47.879377Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2024-11-21T08:52:47.879560Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T08:52:47.879583Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T08:52:47.879588Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2024-11-21T08:52:47.879594Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2024-11-21T08:52:47.879602Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2024-11-21T08:52:47.880124Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T08:52:47.880148Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T08:52:47.880154Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2024-11-21T08:52:47.880159Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2024-11-21T08:52:47.880165Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2024-11-21T08:52:47.880185Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2024-11-21T08:52:47.881560Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2024-11-21T08:52:47.881670Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2024-11-21T08:52:47.893102Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 260 } } 2024-11-21T08:52:47.893140Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2024-11-21T08:52:47.893186Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 260 } } 2024-11-21T08:52:47.893205Z node 18 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 260 } } 2024-11-21T08:52:47.893592Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 744 RawX2: 77309413964 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:52:47.893610Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2024-11-21T08:52:47.893631Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 744 RawX2: 77309413964 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:52:47.893640Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2024-11-21T08:52:47.893648Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 744 RawX2: 77309413964 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T08:52:47.893666Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2024-11-21T08:52:47.893670Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T08:52:47.893675Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2024-11-21T08:52:47.893683Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2024-11-21T08:52:47.895003Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T08:52:47.895171Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T08:52:47.895295Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T08:52:47.895312Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2024-11-21T08:52:47.895336Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2024-11-21T08:52:47.895341Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T08:52:47.895349Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2024-11-21T08:52:47.895355Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T08:52:47.895363Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:47.895370Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T08:52:47.895389Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2024-11-21T08:52:47.895400Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2024-11-21T08:52:47.895404Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2024-11-21T08:52:47.895427Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2024-11-21T08:52:47.895431Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2024-11-21T08:52:47.895436Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2024-11-21T08:52:47.895441Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T08:52:50.197468Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T08:52:50.197545Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 91us result status StatusNameConflict 2024-11-21T08:52:50.197589Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T08:52:52.326308Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T08:52:52.326388Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 90us result status StatusNameConflict 2024-11-21T08:52:52.326431Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 15538, MsgBus: 11429 2024-11-21T08:52:40.857516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652844899319802:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:40.857534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a84/r3tmp/tmpwFN9a9/pdisk_1.dat 2024-11-21T08:52:40.935431Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15538, node 1 2024-11-21T08:52:40.950968Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:40.950983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:40.950984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:40.951035Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:40.959076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:40.959104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:40.960088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11429 TClient is connected to server localhost:11429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:41.008530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.010831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:41.193750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652849194287707:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.193777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.193945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652849194287727:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.194747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:41.197534Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:52:41.197617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652849194287729:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:41.314589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.379170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.593718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.923680Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2024-11-21T08:52:41.923745Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2024-11-21T08:52:41.923767Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2024-11-21T08:52:41.923817Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652849194295619:2932], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7439652849194295466:2932]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 6]`. ShardID=72075186224037888, Sink=[1:7439652849194295619:2932].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T08:52:41.923925Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652849194295608:2932], SessionActorId: [1:7439652849194295466:2932], Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7439652849194295466:2932]. isRollback=0 2024-11-21T08:52:41.923959Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE1MGIzOTQtOTllZTk2MzItZGUxMGNmMzYtNjI0OWU3OGE=, ActorId: [1:7439652849194295466:2932], ActorState: ExecuteState, TraceId: 01jd6yrwszf161jsbyjcdx63jx, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7439652849194295609:2932] from: [1:7439652849194295608:2932] 2024-11-21T08:52:41.924007Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439652849194295609:2932] TxId: 281474976715665. Ctx: { TraceId: 01jd6yrwszf161jsbyjcdx63jx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjE1MGIzOTQtOTllZTk2MzItZGUxMGNmMzYtNjI0OWU3OGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T08:52:41.924864Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE1MGIzOTQtOTllZTk2MzItZGUxMGNmMzYtNjI0OWU3OGE=, ActorId: [1:7439652849194295466:2932], ActorState: ExecuteState, TraceId: 01jd6yrwszf161jsbyjcdx63jx, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:52:45.857713Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652844899319802:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:45.857763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 29473, MsgBus: 8037 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a84/r3tmp/tmpxwyq13/pdisk_1.dat 2024-11-21T08:52:47.254376Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652873332520395:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:47.254514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:47.274911Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29473, node 2 2024-11-21T08:52:47.284371Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:47.284400Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:47.284403Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:47.284453Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8037 TClient is connected to server localhost:8037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:47.354495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:47.354533Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:47.355812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:47.356806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:47.363027Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:47.558966Z node 2 :KQP_WORKLOAD_SER ... ine=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.223941Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.223949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.223956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.223970Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.223979Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.223992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224011Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224053Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224071Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224089Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224125Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224172Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224189Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.224293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253021Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253238Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.253954Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:52:48.266319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037892;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 54455354 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"11;TEST;"}}]}; WAIT_INDEXATION: 0 2024-11-21T08:52:48.336478Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[2:7439652873332521207:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336505Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[2:7439652873332521213:2316];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336511Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[2:7439652873332521215:2318];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336517Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037892;self_id=[2:7439652873332521214:2317];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336520Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037893;self_id=[2:7439652873332521216:2319];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336526Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037894;self_id=[2:7439652873332521227:2320];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336535Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037891;self_id=[2:7439652873332521206:2313];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336535Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037895;self_id=[2:7439652873332521208:2315];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336543Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037896;self_id=[2:7439652873332521291:2321];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336543Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037897;self_id=[2:7439652873332521204:2312];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T08:52:48.336571Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652877627496325:3252], SessionActorId: [2:7439652877627496298:3252], Got BAD REQUEST for table. ShardID=72075186224037888, Sink=[2:7439652877627496325:3252].{
: Fatal: only single operation is supported } 2024-11-21T08:52:48.336580Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652877627496325:3252], SessionActorId: [2:7439652877627496298:3252], Bad request. {
: Fatal: only single operation is supported }. statusCode=BAD_REQUEST. subIssues=
: Fatal: only single operation is supported . sessionActorId=[2:7439652877627496298:3252]. isRollback=0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:52:52.254653Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652873332520395:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:52.254789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:30.442595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:30.442613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:30.442617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:30.442620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:30.442630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:30.442633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:30.442638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:30.442693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:30.450827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:30.450851Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:30.452818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:30.452886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:30.452909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:30.454904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:30.454959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:30.455047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:30.455183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:30.455780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:30.455994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:30.456000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:30.456016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:30.456020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:30.456025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:30.456051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:30.457150Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:30.469461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:30.469549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.469632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:30.469690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:30.469696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.470380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:30.470401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:30.470446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.470454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:30.470456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:30.470460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:30.470767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.470775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:30.470778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:30.471045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.471052Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.471056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:30.471060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:30.471578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:30.471882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:30.471924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:30.472080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:30.472102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:30.472107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:30.472146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:30.472150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:30.472173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:30.472182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:30.472626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:30.472634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:30.472670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:30.472673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:30.472752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:30.472758Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:30.472766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:30.472769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:30.472773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:30.472776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:30.472779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:30.472782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:30.472790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:30.472794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:30.472796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... perationId 1002:0, at tablet 72057594046678944 2024-11-21T08:52:54.413797Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T08:52:54.413824Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:54.414098Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T08:52:54.414118Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 2024-11-21T08:52:54.414169Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:54.414185Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 403726927976 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:54.414190Z node 94 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T08:52:54.414249Z node 94 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 240 2024-11-21T08:52:54.414255Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T08:52:54.414277Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:52:54.414285Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:54.414291Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T08:52:54.414570Z node 94 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:54.414577Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:52:54.414610Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:52:54.414628Z node 94 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:54.414631Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [94:200:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T08:52:54.414635Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [94:200:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T08:52:54.414687Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:52:54.414692Z node 94 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T08:52:54.414701Z node 94 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T08:52:54.414703Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:52:54.414707Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T08:52:54.414710Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:52:54.414713Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T08:52:54.414716Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T08:52:54.414736Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:52:54.414740Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2024-11-21T08:52:54.414743Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T08:52:54.414745Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:52:54.414831Z node 94 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:54.414838Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:54.414841Z node 94 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:52:54.414844Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:52:54.414847Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:52:54.414941Z node 94 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:54.414948Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:52:54.414950Z node 94 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:52:54.414953Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:52:54.414957Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:52:54.414963Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T08:52:54.414967Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [94:297:2289] 2024-11-21T08:52:54.415487Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:52:54.415584Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:52:54.415602Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:52:54.415608Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [94:298:2290] TestWaitNotification: OK eventTxId 1002 2024-11-21T08:52:54.415696Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:54.415724Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 35us result status StatusSuccess 2024-11-21T08:52:54.415787Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:54.415836Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:54.415844Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 9us result status StatusSuccess 2024-11-21T08:52:54.415872Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 22527, MsgBus: 29162 2024-11-21T08:52:42.377152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:42.377740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:42.377776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a3e/r3tmp/tmpO4eD2i/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22527, node 1 TClient is connected to server localhost:29162 TClient is connected to server localhost:29162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:42.523621Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:42.523648Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:42.523653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:42.528631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:42.563949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:42.563998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:42.564618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.569161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:42.693702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.928310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.225242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.488437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.809799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1728:3347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.809848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.812688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.022658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.296424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.527100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.805073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:45.059468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:45.357086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2301:3793], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:45.357125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:45.357177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2306:3798], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:45.358241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:45.537648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2308:3800], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:45.734117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:52:45.957928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:52:46.245593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14597, MsgBus: 11632 2024-11-21T08:52:47.052621Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652873317619623:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a3e/r3tmp/tmpuHChhD/pdisk_1.dat 2024-11-21T08:52:47.058298Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:47.064683Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14597, node 2 2024-11-21T08:52:47.078806Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:47.078825Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:47.078827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:47.078887Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11632 TClient is connected to server localhost:11632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:47.154500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:47.154527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:47.154950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:47.155483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:52:47.160687Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:47.359328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652873317620072:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:47.359378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:47.359435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652873317620084:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:47.360418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:47.362744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652873317620086:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:47.445807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:47.454990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:47.546021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:52:52.052118Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652873317619623:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:52.052174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2024-11-21T08:52:13.679861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:13.679880Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:13.679899Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:13.682276Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:13.682385Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:13.682431Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:13.683137Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:13.690535Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:13.690723Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:13.690908Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:13.690926Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:13.690935Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:13.691006Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:13.695236Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:13.695326Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:13.695381Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:13.695389Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:13.695393Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:13.695400Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:13.695520Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.695538Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.695568Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:13.695594Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:13.695675Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:13.695683Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:13.695690Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:13.695695Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:13.695699Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:13.695703Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:13.695709Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:13.707652Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.707674Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.707683Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:13.708159Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:13.708172Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:13.708195Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:13.708250Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:13.708268Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:13.708279Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:13.708288Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:13.708295Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:13.708302Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:13.708306Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:13.708376Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:13.708381Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:13.708385Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:13.708388Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:13.708399Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:13.708402Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:13.708406Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:13.708409Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:13.708415Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:13.731334Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:13.731358Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:13.731365Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:13.731376Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:13.731389Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:13.731502Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.731509Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:13.731515Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:13.731534Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:13.731539Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:13.731578Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:13.731586Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:13.731591Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:13.731596Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:13.732362Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:13.732379Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:13.732432Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.732437Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:13.732443Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:13.732449Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:13.732453Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:13.732459Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:52:13.732463Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:13.732469Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:13.732473Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:13.732478Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:13.732485Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:13.732525Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:52:13.732530Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:13.732534Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:13.732537Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:13.732541Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:13.732552Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:13.732555Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:13.732558Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:13.732562Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:13.732570Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:52:13.732574Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:52:13.732577Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:52:13.732585Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:13.732588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:13.732592Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... propose latency: 58 ms, status: COMPLETE 2024-11-21T08:52:49.411616Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2024-11-21T08:52:49.411621Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:49.411625Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:49.411629Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2024-11-21T08:52:49.411644Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2024-11-21T08:52:49.411648Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:49.411652Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2024-11-21T08:52:49.416970Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:49.417002Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2024-11-21T08:52:49.417025Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:50.218449Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:227:2222]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T08:52:50.218479Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T08:52:50.218582Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:627:2606], Recipient [3:227:2222]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:50.218589Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:50.218598Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:626:2605], serverId# [3:627:2606], sessionId# [0:0:0] 2024-11-21T08:52:50.218655Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:227:2222]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2024-11-21T08:52:50.218661Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:50.218698Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.218896Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2024-11-21T08:52:50.220025Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2024-11-21T08:52:50.220041Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2024-11-21T08:52:50.220048Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:50.220052Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:50.220067Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T08:52:50.220086Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2024-11-21T08:52:50.220091Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2024-11-21T08:52:50.220094Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:50.220098Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:52:50.220102Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.222144Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T08:52:50.222201Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T08:52:50.222209Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T08:52:50.241725Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.241765Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.242022Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T08:52:50.244923Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T08:52:50.244999Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T08:52:50.245015Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T08:52:50.271989Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.272017Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.272226Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T08:52:50.273012Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T08:52:50.273043Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T08:52:50.273052Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T08:52:50.294119Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.294154Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.294347Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T08:52:50.300884Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2024-11-21T08:52:50.300956Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T08:52:50.300967Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T08:52:50.301107Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.301114Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.301247Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T08:52:50.395728Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2024-11-21T08:52:50.395897Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T08:52:50.395908Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T08:52:50.408682Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.408719Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.408921Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T08:52:50.411220Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T08:52:50.411291Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T08:52:50.411304Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T08:52:50.414558Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.414583Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.414782Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T08:52:50.415240Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T08:52:50.415261Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T08:52:50.415270Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T08:52:50.419205Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.419226Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.419399Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T08:52:50.420264Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T08:52:50.420293Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T08:52:50.420302Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T08:52:50.467848Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:50.467891Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T08:52:50.468150Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T08:52:50.819269Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2024-11-21T08:52:50.819323Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:52:50.819351Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2024-11-21T08:52:50.819361Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:52:50.819367Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:50.819375Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2024-11-21T08:52:50.819389Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 62 ms, propose latency: 62 ms, status: COMPLETE 2024-11-21T08:52:50.819446Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2024-11-21T08:52:50.819452Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:50.819457Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2024-11-21T08:52:50.819462Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2024-11-21T08:52:50.819478Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2024-11-21T08:52:50.819482Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2024-11-21T08:52:50.819487Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2024-11-21T08:52:50.825715Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:50.825749Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2024-11-21T08:52:50.825774Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 7751, MsgBus: 19829 2024-11-21T08:52:48.986960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652879933456438:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:48.987083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00483b/r3tmp/tmpjC6vup/pdisk_1.dat 2024-11-21T08:52:49.039948Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7751, node 1 2024-11-21T08:52:49.049313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:49.049330Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:49.049332Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:49.049376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19829 TClient is connected to server localhost:19829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:49.087891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.087913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.089020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:49.120305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.125838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.194029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.255329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.271149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.332406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652884228425292:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.332468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.338305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.345843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.402591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.458392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.471194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.484721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.492638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652884228425810:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.492660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652884228425815:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.492667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.493271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:49.497439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652884228425817:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:49.675898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.745500Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439652884228426238:2476] TxId: 281474976715676. Ctx: { TraceId: 01jd6ys4e12r4v79rsq073nfzm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY1Mjc5ZGQtZDU1OTU3NjQtNGNlOGUxODYtMzg4OGMxNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:52:49.747403Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179169791, txId: 281474976715675] shutting down 864000000000 Trying to start YDB, gRPC: 8561, MsgBus: 21712 2024-11-21T08:52:49.862589Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652883251793632:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.862791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00483b/r3tmp/tmpeJeSB9/pdisk_1.dat 2024-11-21T08:52:49.870826Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8561, node 2 2024-11-21T08:52:49.879342Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:49.879356Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:49.879357Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:49.879407Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21712 TClient is connected to server localhost:21712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:49.964901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.964927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.965269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.966101Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:49.972572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.981785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.997274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:50.010831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:50.13 ... ... 2024-11-21T08:52:50.657746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:50.668694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:50.685153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:50.696430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:50.838132Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439652889259646229:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.838157Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.844031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.852294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.864068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.877895Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.891927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.906144Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:50.924463Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439652889259646741:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.924497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.924514Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439652889259646746:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:50.925533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:50.932737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439652889259646748:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:51.538345Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179171233, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 8228, MsgBus: 26135 2024-11-21T08:52:51.907877Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439652893678130522:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:51.907944Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00483b/r3tmp/tmpVIe30C/pdisk_1.dat 2024-11-21T08:52:51.924700Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8228, node 4 2024-11-21T08:52:51.932881Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:51.932898Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:51.932900Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:51.932942Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26135 TClient is connected to server localhost:26135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:52.012022Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:52.012058Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:52.012505Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.013056Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:52:52.014049Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:52.024617Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:52.082481Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:52.102608Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:52.117477Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:52.247338Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439652897973099358:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.247400Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.252181Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.259204Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.271008Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.284675Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.291361Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.298683Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:52.314692Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439652897973099872:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.314710Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439652897973099877:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.314720Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:52.315402Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:52.318609Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439652897973099879:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:52.591125Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179172556, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::PathsAndShardsCountersSimultaneousAlterSubDomain [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:26.493992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:26.494014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:26.494019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:26.494024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:26.494039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:26.494043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:26.494052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:26.494131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:26.504361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:26.504381Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:26.506567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:26.506668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:26.506700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:26.509603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:26.509681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:26.509812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:26.510036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:26.510874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:26.511177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:26.511188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:26.511210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:26.511217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:26.511225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:26.511269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:26.512714Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:26.527765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:26.527841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:26.527903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:26.527961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:26.527967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:26.528570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:26.528592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:26.528637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:26.528649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:26.528652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:26.528655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:26.528988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:26.528996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:26.528999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:26.529272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:26.529278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:26.529283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:26.529288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:26.529741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:26.530127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:26.530172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:26.530357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:26.530381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:26.530388Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:26.530439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:26.530445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:26.530474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:26.530487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:26.530867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:26.530875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:26.530915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:26.530924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:26.531012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:26.531019Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:26.531030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:26.531034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:26.531040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:26.531045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:26.531049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:26.531053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:26.531064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:26.531078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:26.531082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:52:55.755326Z node 103 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:52:55.755330Z node 103 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T08:52:55.755334Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:52:55.755346Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T08:52:55.755442Z node 103 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:52:55.755450Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:52:55.755454Z node 103 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:52:55.755458Z node 103 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 9 2024-11-21T08:52:55.755461Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 7 2024-11-21T08:52:55.755553Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 1005 Step: 200 OrderId: 1005 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 182 } } 2024-11-21T08:52:55.755560Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409549, partId: 0 2024-11-21T08:52:55.755576Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 1005 Step: 200 OrderId: 1005 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 182 } } 2024-11-21T08:52:55.755586Z node 103 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 1005 Step: 200 OrderId: 1005 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 182 } } 2024-11-21T08:52:55.755638Z node 103 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:52:55.755646Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:52:55.755652Z node 103 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:52:55.755657Z node 103 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2024-11-21T08:52:55.755660Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:52:55.755669Z node 103 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2024-11-21T08:52:55.755674Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [103:426:2393] 2024-11-21T08:52:55.755744Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 572 RawX2: 442381634007 } Origin: 72075186233409549 State: 2 TxId: 1005 Step: 0 Generation: 2 2024-11-21T08:52:55.755749Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409549, partId: 0 2024-11-21T08:52:55.755760Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Source { RawX1: 572 RawX2: 442381634007 } Origin: 72075186233409549 State: 2 TxId: 1005 Step: 0 Generation: 2 2024-11-21T08:52:55.755764Z node 103 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:52:55.755771Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1005:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 572 RawX2: 442381634007 } Origin: 72075186233409549 State: 2 TxId: 1005 Step: 0 Generation: 2 2024-11-21T08:52:55.755779Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1005:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:55.755782Z node 103 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:52:55.755786Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T08:52:55.755790Z node 103 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T08:52:55.756491Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:55.756583Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:52:55.757003Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:52:55.757038Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:52:55.757052Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:52:55.757063Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:52:55.757068Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [103:620:2560] 2024-11-21T08:52:55.757093Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:52:55.757175Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:52:55.757183Z node 103 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T08:52:55.757195Z node 103 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:52:55.757198Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:52:55.757203Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T08:52:55.757215Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [103:426:2393] message: TxId: 1005 2024-11-21T08:52:55.757221Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:52:55.757225Z node 103 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:52:55.757229Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:52:55.757255Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:52:55.757779Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:52:55.757789Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [103:620:2560] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 2024-11-21T08:52:55.757903Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:52:55.757941Z node 103 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 48us result status StatusSuccess 2024-11-21T08:52:55.758043Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 150 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 200 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "dir" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1004 CreateStep: 200 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 |88.0%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |88.0%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |88.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots |88.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> TBackupCollectionWithRebootsTests::CreateDroppedAndDropWithReboots |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |88.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> TopicService::OneConsumer_TheRangesOverlap [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |88.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> Donor::ConsistentWritesWhenSwitchingToDonorMode |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TopicService::DifferentConsumers_TheRangesOverlap >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TPersQueueTest::Init |88.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TBackupCollectionWithRebootsTests::ParallelCreateDrop |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::CreateWithReboots |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> KqpPg::CopyTableSerialColumns |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> KqpPg::TypeCoercionBulkUpsert >> TTxAllocatorClientTest::InitiatingRequest >> KqpPg::JoinWithQueryService+StreamLookup >> KqpPg::CopyTableSerialColumns [GOOD] >> KqpPg::CreateIndex >> TTxAllocatorClientTest::InitiatingRequest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] Leader for TabletID 9437184 is [1:130:2153] sender: [1:132:2057] recipient: [1:106:2138] 2024-11-21T08:52:35.308179Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:35.309361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:35.309382Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:35.310664Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:35.310789Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:35.310887Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:35.317917Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:35.319868Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:35.320025Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:35.320193Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:35.320223Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:35.320232Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:35.320279Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:35.323786Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:35.323871Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:35.323920Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:35.323926Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:35.323931Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:35.323937Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:35.324023Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.324039Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.324090Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:35.324113Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:35.324121Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:35.324127Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:35.324134Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:35.324140Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:35.324145Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:35.324150Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:35.324155Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 Leader for TabletID 9437184 is [1:130:2153] sender: [1:205:2057] recipient: [1:14:2061] 2024-11-21T08:52:35.332776Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.332798Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.332808Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:35.333288Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:35.333308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:35.333343Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:35.333381Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:35.333393Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:35.333417Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:35.333428Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:35.333433Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:35.333439Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:35.333443Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:35.333523Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:35.333530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:35.333534Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:35.333537Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:35.333553Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:35.333556Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:35.333559Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:35.333562Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:35.333568Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:35.354879Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:35.354912Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:35.354920Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:35.354933Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:35.354950Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:35.355095Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.355104Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.355112Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:35.355135Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:35.355141Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:35.355190Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:35.355202Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:35.355207Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:35.355213Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:35.355912Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:35.355926Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:35.355989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.355996Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.356004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:35.356013Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:35.356017Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:35.356026Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T08:52:35.356031Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:35.356038Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:35.356042Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:35.356046Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:35.356050Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:35.356105Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T08:52:35.356109Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:35.356112Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:35.356116Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:35.356119Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:35.356130Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:35.356133Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:35.356136Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:35.356140Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:35.356155Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2024-11-21T08:52:35.356158Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2024-11-21T08:52:35.356162Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2024-11-21T08:52:35.356167Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:52:35.356170Z node 1 :TX_DATASHARD TRACE: Adv ... 9437184 to execution unit ExecuteDataTx 2024-11-21T08:53:02.465450Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit ExecuteDataTx 2024-11-21T08:53:02.465492Z node 41 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437184 with status COMPLETE 2024-11-21T08:53:02.465499Z node 41 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:53:02.465505Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2024-11-21T08:53:02.465509Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:53:02.465512Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompleteOperation 2024-11-21T08:53:02.465516Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompleteOperation 2024-11-21T08:53:02.465554Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is DelayComplete 2024-11-21T08:53:02.465558Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompleteOperation 2024-11-21T08:53:02.465561Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompletedOperations 2024-11-21T08:53:02.465565Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompletedOperations 2024-11-21T08:53:02.465569Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2024-11-21T08:53:02.465572Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompletedOperations 2024-11-21T08:53:02.465575Z node 41 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437184 has finished 2024-11-21T08:53:02.465579Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:02.465583Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:53:02.465586Z node 41 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:53:02.465589Z node 41 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:53:02.465626Z node 41 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [41:434:2384], Recipient [41:434:2384]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:02.465631Z node 41 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:02.465637Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2024-11-21T08:53:02.465641Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:02.465645Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T08:53:02.465648Z node 41 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437186 2024-11-21T08:53:02.465652Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit PlanQueue 2024-11-21T08:53:02.465659Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465662Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit PlanQueue 2024-11-21T08:53:02.465665Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit LoadTxDetails 2024-11-21T08:53:02.465669Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit LoadTxDetails 2024-11-21T08:53:02.465756Z node 41 :TX_DATASHARD DEBUG: LoadTxDetails at 9437186 loaded tx from db 7:6 keys extracted: 1 2024-11-21T08:53:02.465761Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465764Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit LoadTxDetails 2024-11-21T08:53:02.465768Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit FinalizeDataTxPlan 2024-11-21T08:53:02.465771Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit FinalizeDataTxPlan 2024-11-21T08:53:02.465775Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465778Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit FinalizeDataTxPlan 2024-11-21T08:53:02.465782Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit BuildAndWaitDependencies 2024-11-21T08:53:02.465785Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit BuildAndWaitDependencies 2024-11-21T08:53:02.465792Z node 41 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437186 2024-11-21T08:53:02.465796Z node 41 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437186 2024-11-21T08:53:02.465800Z node 41 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437186 2024-11-21T08:53:02.465805Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465809Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit BuildAndWaitDependencies 2024-11-21T08:53:02.465812Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit BuildDataTxOutRS 2024-11-21T08:53:02.465816Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit BuildDataTxOutRS 2024-11-21T08:53:02.465822Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465825Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit BuildDataTxOutRS 2024-11-21T08:53:02.465828Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit StoreAndSendOutRS 2024-11-21T08:53:02.465831Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit StoreAndSendOutRS 2024-11-21T08:53:02.465835Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465839Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit StoreAndSendOutRS 2024-11-21T08:53:02.465842Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit PrepareDataTxInRS 2024-11-21T08:53:02.465845Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit PrepareDataTxInRS 2024-11-21T08:53:02.465849Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465852Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit PrepareDataTxInRS 2024-11-21T08:53:02.465856Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit LoadAndWaitInRS 2024-11-21T08:53:02.465859Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit LoadAndWaitInRS 2024-11-21T08:53:02.465862Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465866Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit LoadAndWaitInRS 2024-11-21T08:53:02.465871Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit ExecuteDataTx 2024-11-21T08:53:02.465875Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2024-11-21T08:53:02.465917Z node 41 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2024-11-21T08:53:02.465922Z node 41 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:53:02.465930Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465933Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2024-11-21T08:53:02.465937Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompleteOperation 2024-11-21T08:53:02.465940Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2024-11-21T08:53:02.465979Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is DelayComplete 2024-11-21T08:53:02.465983Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2024-11-21T08:53:02.465986Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompletedOperations 2024-11-21T08:53:02.465990Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2024-11-21T08:53:02.465994Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T08:53:02.465997Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2024-11-21T08:53:02.466000Z node 41 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437186 has finished 2024-11-21T08:53:02.466004Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:02.466007Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T08:53:02.466010Z node 41 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2024-11-21T08:53:02.466013Z node 41 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2024-11-21T08:53:02.477581Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2024-11-21T08:53:02.477611Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2024-11-21T08:53:02.477626Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:53:02.477637Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2024-11-21T08:53:02.477665Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [41:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:53:02.477687Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:53:02.477803Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2024-11-21T08:53:02.477808Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2024-11-21T08:53:02.477815Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:53:02.477820Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2024-11-21T08:53:02.477831Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [41:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:53:02.477835Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:53:02.477956Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2024-11-21T08:53:02.477964Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2024-11-21T08:53:02.477971Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T08:53:02.477976Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2024-11-21T08:53:02.477985Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [41:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:53:02.477990Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2024-11-21T08:53:03.054606Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:53:03.054689Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:53:03.054774Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:53:03.055069Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:53:03.055153Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:53:03.056596Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:53:03.056610Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:53:03.056616Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:53:03.056628Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:53:03.056649Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:53:03.056660Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:53:03.056671Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:53:03.056758Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T08:53:03.056816Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:53:03.056821Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:53:03.056828Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2024-11-21T08:53:03.056832Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::Insert_Serial >> TBackupCollectionWithRebootsTests::CreateDroppedAndDropWithReboots [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:00.615841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:00.615868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.615874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:00.615879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:00.615892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:00.615896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:00.615906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.615982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:00.625335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:00.625355Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:00.627816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:00.627884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:00.627912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:00.629679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:00.629739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:00.629848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.629916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:00.630340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.630600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.630611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.630643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:00.630650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.630656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:00.630675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.632031Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:00.645111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:00.645187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.645237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:00.645272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:00.645278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.646200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.646222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:00.646267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.646275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:00.646278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:00.646282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:00.646617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.646623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:00.646626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:00.646892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.646898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.646902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.646906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.647329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:00.647829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:00.647868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:00.648012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.648030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:00.648041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.648082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:00.648086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.648108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.648116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:00.648767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.648788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.648850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.648855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:00.648958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.648968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:00.648980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:00.648983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.648988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:00.648993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.648997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:00.649001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:00.649031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:00.649038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:00.649043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:00.649472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.649491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.649497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:00.649502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:00.649508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.649526Z node 1 ... 2024-11-21T08:53:04.469963Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:04.469968Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2024-11-21T08:53:04.469973Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:04.470251Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:04.470264Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:04.470268Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:04.470273Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2024-11-21T08:53:04.470277Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:53:04.470289Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T08:53:04.470608Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T08:53:04.470642Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T08:53:04.470732Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:04.470753Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:04.470760Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1004:0, HandleReply TEvOperationPlan: step# 5000005 2024-11-21T08:53:04.470776Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:53:04.470795Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 1 -> 240 2024-11-21T08:53:04.470821Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:04.470829Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:53:04.470993Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:04.471229Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:53:04.471517Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:04.471524Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:04.471545Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T08:53:04.471568Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:04.471573Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T08:53:04.471581Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2024-11-21T08:53:04.471632Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:53:04.471638Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:53:04.471650Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:53:04.471654Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:04.471660Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T08:53:04.471665Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:04.471670Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:53:04.471674Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:53:04.471684Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:53:04.471690Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T08:53:04.471694Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 7 2024-11-21T08:53:04.471698Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T08:53:04.471755Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:04.471763Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:04.471767Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:04.471772Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:53:04.471775Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:53:04.471810Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:04.471815Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:53:04.471823Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:04.471854Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:04.471861Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:04.471864Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:04.471871Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T08:53:04.471874Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:04.471881Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T08:53:04.472644Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:04.472668Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:04.472679Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:53:04.472731Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:53:04.472739Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:53:04.472815Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:53:04.472832Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:53:04.472837Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [16:382:2374] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:53:04.472907Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:04.472934Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 39us result status StatusPathDoesNotExist 2024-11-21T08:53:04.472971Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::CreateDroppedAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:00.656382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:00.656410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.656416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:00.656421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:00.656433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:00.656437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:00.656448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.656536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:00.669131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:00.669161Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:00.671750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:00.671813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:00.671844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:00.673932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:00.673991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:00.674111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.674169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:00.674615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.674893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.674906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.674943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:00.674950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.674956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:00.674975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.676563Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:00.696167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:00.696292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.696365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:00.696418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:00.696428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.697284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.697313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:00.697375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.697387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:00.697391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:00.697397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:00.697838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.697852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:00.697857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:00.698200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.698211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.698217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.698224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.698837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:00.699221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:00.699270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:00.699446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.699474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:00.699493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.699554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:00.699561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.699591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.699606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:00.700036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.700045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.700107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.700112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:00.700186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.700193Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:00.700220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:00.700225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.700230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:00.700236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.700241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:00.700245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:00.700257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:00.700263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:00.700267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:00.700575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.700589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.700594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:00.700600Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:00.700605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.700620Z node 1 ... 11-21T08:53:04.606970Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:53:04.606975Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 10 2024-11-21T08:53:04.606979Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:04.607090Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:04.607099Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:04.607103Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:53:04.607107Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2024-11-21T08:53:04.607111Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:53:04.607121Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T08:53:04.607608Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T08:53:04.607634Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T08:53:04.607899Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:04.607918Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:04.607925Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1006:0, HandleReply TEvOperationPlan: step# 5000007 2024-11-21T08:53:04.607942Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:53:04.607958Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 1 -> 240 2024-11-21T08:53:04.607985Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:04.607994Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:53:04.608058Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:53:04.608103Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T08:53:04.608427Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:04.608433Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:04.608451Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T08:53:04.608471Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:04.608475Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T08:53:04.608480Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 6 2024-11-21T08:53:04.608526Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:53:04.608532Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T08:53:04.608543Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T08:53:04.608547Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:53:04.608553Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T08:53:04.608561Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:53:04.608566Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T08:53:04.608570Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T08:53:04.608579Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:53:04.608585Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T08:53:04.608589Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 4], 11 2024-11-21T08:53:04.608593Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2024-11-21T08:53:04.608646Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:04.608654Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:04.608658Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:53:04.608662Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T08:53:04.608666Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:53:04.608704Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:04.608708Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T08:53:04.608717Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:04.608742Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:04.608749Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:04.608752Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:53:04.608756Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T08:53:04.608759Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:04.608766Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T08:53:04.609783Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:53:04.609820Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:04.609836Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T08:53:04.609905Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T08:53:04.609914Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T08:53:04.610001Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:53:04.610022Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:53:04.610027Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [16:437:2429] TestWaitNotification: OK eventTxId 1006 2024-11-21T08:53:04.610108Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:04.610142Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 49us result status StatusPathDoesNotExist 2024-11-21T08:53:04.610186Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence >> KqpPg::Insert_Serial [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert >> TBackupCollectionWithRebootsTests::ParallelCreateDrop [GOOD] >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2024-11-21T08:53:02.313829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:02.314202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:02.314221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f51/r3tmp/tmpOGZpf0/pdisk_1.dat 2024-11-21T08:53:02.407921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.408893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:02.414531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.414863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:02.415253Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2024-11-21T08:53:02.415267Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T08:53:02.429030Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:02.429813Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2024-11-21T08:53:02.473095Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:307:2347] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T08:53:02.473166Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2024-11-21T08:53:02.473203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:02.473209Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T08:53:02.473214Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:53:02.473220Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T08:53:02.473223Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:53:02.473238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:02.473301Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T08:53:02.473307Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T08:53:02.473311Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T08:53:02.473316Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T08:53:02.473351Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2024-11-21T08:53:02.483661Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2024-11-21T08:53:02.483691Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:307:2347]) 2024-11-21T08:53:02.483707Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T08:53:02.483854Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2024-11-21T08:53:02.483867Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Execute 2024-11-21T08:53:02.483872Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:53:02.483887Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Complete 2024-11-21T08:53:02.483944Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 202797645824 } 2024-11-21T08:53:02.483949Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2024-11-21T08:53:02.483954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:02.483999Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2024-11-21T08:53:02.484006Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T08:53:02.484008Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:53:02.484030Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T08:53:02.484034Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T08:53:02.484037Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T08:53:02.484041Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T08:53:02.494368Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2024-11-21T08:53:02.494401Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T08:53:02.586248Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2024-11-21T08:53:02.586290Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T08:53:02.586395Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2024-11-21T08:53:02.586542Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2024-11-21T08:53:02.586557Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2024-11-21T08:53:02.586798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:02.587122Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T08:53:02.587147Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T08:53:02.587152Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2024-11-21T08:53:02.587157Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2024-11-21T08:53:02.587344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:02.587362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:53:02.587561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T08:53:02.588158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.588417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:02.588426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.588527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2024-11-21T08:53:02.589054Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2024-11-21T08:53:02.590613Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2024-11-21T08:53:02.590644Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T08:53:02.590710Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2024-11-21T08:53:02.590722Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2024-11-21T08:53:02.590731Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T08:53:02.590760Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2024-11-21T08:53:02.590886Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T08:53:02.590924Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T08:53:02.591084Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T08:53:02.591184Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2024-11-21T08:53:02.591208Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{78572105663584}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2024-11-21T08:53:02.591223Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{78572105663584}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2024-11-21T08:53:02.591247Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{78572105663584}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2024-11-21T08:53:02.591256Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{78572105663584}: tablet 72075186224037888 channel 2 assigned to group 2181038080 ... 5186224037888 OK) 2024-11-21T08:53:05.324466Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:53:05.324488Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2024-11-21T08:53:05.324556Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2024-11-21T08:53:05.324563Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T08:53:05.324619Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2024-11-21T08:53:05.335064Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:05.335415Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2024-11-21T08:53:05.335425Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 step# 3001 Status# 16 SEND to# [2:379:2374] Proxy marker# C1 2024-11-21T08:53:05.345752Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2024-11-21T08:53:05.458859Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715666 has been planned 2024-11-21T08:53:05.458893Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T08:53:05.458898Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2024-11-21T08:53:05.458966Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2024-11-21T08:53:05.459114Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715666 marker# C2 2024-11-21T08:53:05.459128Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:379:2374] Proxy 2024-11-21T08:53:05.459268Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:05.459422Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715666 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T08:53:05.459430Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:05.459467Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:05.459473Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:05.459482Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T08:53:05.459525Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T08:53:05.459549Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:05.459585Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:05.459600Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2024-11-21T08:53:05.459684Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:05.459926Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T08:53:05.459931Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T08:53:05.459939Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:05.460072Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:05.460089Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:05.460100Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2024-11-21T08:53:05.460109Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:05.460122Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T08:53:05.460135Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T08:53:05.460138Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2024-11-21T08:53:05.460141Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2024-11-21T08:53:05.460149Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 acknowledged 2024-11-21T08:53:05.460333Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2024-11-21T08:53:05.460955Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2024-11-21T08:53:05.460974Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T08:53:05.461137Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2024-11-21T08:53:05.461164Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 1 2024-11-21T08:53:05.461283Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2024-11-21T08:53:05.461363Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:05.461604Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 INFO ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2024-11-21T08:53:05.461628Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 INFO ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2024-11-21T08:53:05.461639Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 INFO ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:59: Begin, root #43 2024-11-21T08:53:05.461643Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 INFO ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2024-11-21T08:53:05.461650Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 TRACE ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:387: {0}, callable #43 2024-11-21T08:53:05.461663Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 INFO ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:577: Node #43 finished execution 2024-11-21T08:53:05.461683Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 INFO ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2024-11-21T08:53:05.461687Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 INFO ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2024-11-21T08:53:05.461690Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 INFO ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2024-11-21T08:53:05.461712Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 NOTE ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2024-11-21T08:53:05.461716Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 NOTE ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2024-11-21T08:53:05.461719Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ODQyMzlmZDEtNTA5MjBjZjktODU5YmUxYjEtOGFkZDE4MjE= 2024-11-21 08:53:05.461 NOTE ydb-core-tx-datashard-ut_minstep(pid=515237, tid=0x00007FDE5A7A5BC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2024-11-21T08:53:05.472779Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T08:53:05.472878Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2024-11-21T08:53:05.473298Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T08:53:05.473543Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2024-11-21T08:53:05.473654Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2024-11-21T08:53:05.473664Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2024-11-21T08:53:05.473690Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2024-11-21T08:53:05.473712Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2024-11-21T08:53:05.473732Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TBackupCollectionWithRebootsTests::CreateWithReboots [GOOD] >> TUserAttrsTestWithReboots::InSubdomain |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> SubDomainWithReboots::SplitTabletInsideWithStoragePools [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:01.962212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:01.962235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:01.962238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:01.962242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:01.962252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:01.962256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:01.962265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:01.962350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:01.971881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:01.971904Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:01.974120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:01.974192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:01.974221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:01.976133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:01.976231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:01.976338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:01.976402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:01.976854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:01.977106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:01.977114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:01.977147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:01.977151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:01.977156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:01.977169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.978489Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:01.990724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:01.990800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.990857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:01.990896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:01.990901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.991633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:01.991654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:01.991707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.991717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:01.991722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:01.991727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:01.992059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.992067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:01.992070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:01.992398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.992409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.992415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:01.992420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:01.992876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:01.993211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:01.993255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:01.993392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:01.993428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:01.993443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:01.993485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:01.993489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:01.993510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:01.993518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:01.993880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:01.993884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:01.993919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:01.993922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:01.993978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.993982Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:01.993990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:01.993992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:01.993996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:01.993999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:01.994002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:01.994005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:01.994012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:01.994016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:01.994019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:01.994256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:01.994267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:01.994270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:01.994273Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:01.994277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:01.994288Z node 1 ... 2024-11-21T08:53:05.815634Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:05.815638Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2024-11-21T08:53:05.815642Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:05.815750Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:05.815761Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:05.815764Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:05.815768Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2024-11-21T08:53:05.815771Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:53:05.815779Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T08:53:05.816169Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2024-11-21T08:53:05.816198Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000005 2024-11-21T08:53:05.816298Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:05.816314Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:05.816322Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1005:0, HandleReply TEvOperationPlan: step# 5000005 2024-11-21T08:53:05.816335Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:53:05.816348Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 1 -> 240 2024-11-21T08:53:05.816369Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:05.816376Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:53:05.816456Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:53:05.816713Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T08:53:05.816945Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:05.816951Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:05.816969Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T08:53:05.816987Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:05.816991Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T08:53:05.816998Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2024-11-21T08:53:05.817033Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:53:05.817038Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T08:53:05.817050Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:53:05.817054Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:53:05.817059Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T08:53:05.817063Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:53:05.817068Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:53:05.817071Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:53:05.817081Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:53:05.817086Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T08:53:05.817090Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 7 2024-11-21T08:53:05.817093Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T08:53:05.817151Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:05.817159Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:05.817163Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:05.817167Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:53:05.817170Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:53:05.817200Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:05.817204Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:53:05.817211Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:05.817233Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:05.817239Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:05.817242Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:05.817247Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T08:53:05.817250Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:05.817256Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T08:53:05.817677Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:53:05.817863Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:05.817874Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T08:53:05.817913Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:53:05.817918Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:53:05.818006Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:53:05.818022Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:53:05.818026Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [16:388:2380] TestWaitNotification: OK eventTxId 1005 2024-11-21T08:53:05.818083Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:05.818101Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 26us result status StatusPathDoesNotExist 2024-11-21T08:53:05.818129Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::CreateWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:02.081873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:02.081917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:02.081924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:02.081929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:02.081942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:02.081945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:02.081953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:02.082033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:02.091187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:02.091207Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:02.093177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:02.093231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:02.093256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:02.094392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:02.094432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:02.094513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:02.094557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:02.094837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:02.095035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:02.095042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:02.095065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:02.095070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:02.095075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:02.095087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:02.095959Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:02.107234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:02.107307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:02.107359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:02.107398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:02.107404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:02.108054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:02.108074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:02.108118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:02.108127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:02.108131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:02.108136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:02.108525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:02.108536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:02.108541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:02.108881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:02.108891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:02.108897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:02.108904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:02.109364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:02.109726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:02.109765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:02.109890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:02.109907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:02.109917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:02.109955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:02.109959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:02.109981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:02.109990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:02.110255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:02.110259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:02.110288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:02.110291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:02.110342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:02.110346Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:02.110354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:02.110357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:02.110360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:02.110363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:02.110366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:02.110368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:02.110374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:02.110379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:02.110382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:02.110566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:02.110575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:02.110578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:02.110581Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:02.110585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:02.110593Z node 1 ... 11-21T08:53:05.951697Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:53:05.951701Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:53:05.951705Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:05.951715Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T08:53:05.951719Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [16:299:2291] 2024-11-21T08:53:05.952004Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:53:05.952015Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateBackupCollection TPropose, operationId: 1003:0ProgressState 2024-11-21T08:53:05.952022Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1003 ready parts: 1/1 2024-11-21T08:53:05.952045Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1003 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:05.952137Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:53:05.952486Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:53:05.952527Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:53:05.952543Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:53:05.952548Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [16:300:2292] 2024-11-21T08:53:05.952728Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T08:53:05.952753Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T08:53:05.952816Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:05.952833Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:05.952841Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateBackupCollection TPropose, operationId: 1003:0HandleReply TEvOperationPlan: step# 5000004 2024-11-21T08:53:05.952862Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T08:53:05.952888Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:05.952898Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:53:05.953221Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:05.953228Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:05.953249Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T08:53:05.953265Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T08:53:05.953276Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:05.953280Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T08:53:05.953285Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T08:53:05.953288Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T08:53:05.953328Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:53:05.953335Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:53:05.953345Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:53:05.953348Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:53:05.953354Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:53:05.953359Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:53:05.953364Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:53:05.953368Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:53:05.953377Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:53:05.953382Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T08:53:05.953386Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T08:53:05.953390Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 1 2024-11-21T08:53:05.953550Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:05.953560Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:05.953564Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:05.953573Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:53:05.953577Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:05.953773Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:05.953784Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:05.953788Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:05.953791Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2024-11-21T08:53:05.953795Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:53:05.953803Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:53:05.953808Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [16:299:2291] 2024-11-21T08:53:05.954146Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:53:05.954350Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:53:05.954368Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:53:05.954373Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [16:300:2292] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:53:05.954475Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:05.954509Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 41us result status StatusSuccess 2024-11-21T08:53:05.954591Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 1 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 1 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 BackupCollectionVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::InsertValuesFromTableWithDefaultText [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 22568, MsgBus: 14098 2024-11-21T08:52:38.873056Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652837133402368:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:38.873159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002adb/r3tmp/tmpCevAhj/pdisk_1.dat 2024-11-21T08:52:38.934993Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22568, node 1 2024-11-21T08:52:38.950074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:38.950086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:38.950088Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:38.950119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14098 2024-11-21T08:52:38.972190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:38.972235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:38.973116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:39.029305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:39.032683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:39.239505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652841428370127:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.239701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652841428370119:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.239738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:39.240365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:52:39.242248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652841428370133:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:52:39.369858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.431424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:52:39.556402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.872536Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652837133402368:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:43.872582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:52:53.935369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:52:53.935427Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:54.849144Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652905852889380:3251], TxId: 281474976715680, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ODRlMDdmYTMtMzE5Yzk4MDktNGZiNDI1NGUtMzBkNzBmNmE=. TraceId : 01jd6ys9dz6d9bxfz9r0wmecng. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1732179159739/18446744073709551615 shard 72075186224037889 with lowWatermark v1732179159760/18446744073709551615 (node# 1 state# Ready) } } 2024-11-21T08:52:54.849754Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652905852889380:3251], TxId: 281474976715680, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ODRlMDdmYTMtMzE5Yzk4MDktNGZiNDI1NGUtMzBkNzBmNmE=. TraceId : 01jd6ys9dz6d9bxfz9r0wmecng. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1732179159739/18446744073709551615 shard 72075186224037889 with lowWatermark v1732179159760/18446744073709551615 (node# 1 state# Ready) } }. 2024-11-21T08:52:54.849862Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652905852889383:3253], TxId: 281474976715680, task: 3. Ctx: { TraceId : 01jd6ys9dz6d9bxfz9r0wmecng. SessionId : ydb://session/3?node_id=1&id=ODRlMDdmYTMtMzE5Yzk4MDktNGZiNDI1NGUtMzBkNzBmNmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439652905852889374:2931], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T08:52:54.851928Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODRlMDdmYTMtMzE5Yzk4MDktNGZiNDI1NGUtMzBkNzBmNmE=, ActorId: [1:7439652841428377567:2931], ActorState: ExecuteState, TraceId: 01jd6ys9dz6d9bxfz9r0wmecng, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 5300, MsgBus: 19622 2024-11-21T08:53:00.153627Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652933032525643:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:00.156725Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002adb/r3tmp/tmpkPCnHD/pdisk_1.dat 2024-11-21T08:53:00.184367Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5300, node 2 2024-11-21T08:53:00.207243Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:00.207260Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:00.207262Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:00.207303Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19622 TClient is connected to server localhost:19622 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:53:00.253648Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:00.253684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:00.254692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:00.258600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:00.456751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652933032526240:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:00.456770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652933032526252:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:00.456775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:00.457466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:00.458988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652933032526255:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:00.518199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:53:00.525943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:53:00.631930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:53:00.917768Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDU5ZDc2ZTMtZmZiZjZiMmMtYTBkODQ5OTItYTQ1MThkNzI=, ActorId: [2:7439652933032533924:2931], ActorState: ExecuteState, TraceId: 01jd6ysfbg9997ewjef2jc5m98, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:53:05.153826Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652933032525643:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:05.153889Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::SplitTabletInsideWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:28.490476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:28.490498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:28.490502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:28.490505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:28.490517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:28.490520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:28.490526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:28.490597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:28.498241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:28.498264Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:28.500574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:28.500685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:28.500718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:28.503149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:28.503233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:28.503353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.503518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:28.504193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.504497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:28.504509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.504530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:28.504537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:28.504542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:28.504583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:28.505643Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:28.518565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:28.518644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.518699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:28.518760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:28.518765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.519289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.519311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:28.519350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.519357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:28.519359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:28.519363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:28.519667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.519677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:28.519681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:28.519967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.519975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.519979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.519983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.520500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:28.521022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:28.521091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:28.521301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:28.521337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:28.521347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.521427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:28.521438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:28.521466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:28.521480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:28.521962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:28.521977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:28.522010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:28.522013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:28.522077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:28.522084Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:28.522094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:28.522097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.522102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:28.522107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:28.522111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:28.522114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:28.522125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:28.522131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:28.522135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TabletId: 72075186233409548 2024-11-21T08:53:06.221321Z node 135 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#1004:0 HandleReply TEvSplitAck, at schemeshard: 72057594046678944, message: OperationCookie: 1004 TabletId: 72075186233409548 2024-11-21T08:53:06.221385Z node 135 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 131 -> 132 2024-11-21T08:53:06.221422Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T08:53:06.221724Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.221757Z node 135 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:06.221761Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:06.221814Z node 135 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:06.221820Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [135:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T08:53:06.221886Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.221891Z node 135 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:06.221897Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186233409548 on partitioning changed splitOp# 1004 at tablet 72057594046678944 2024-11-21T08:53:06.221986Z node 135 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:06.221994Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:06.221997Z node 135 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:06.222000Z node 135 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:53:06.222004Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 7 2024-11-21T08:53:06.222014Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T08:53:06.222487Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269553158 2024-11-21T08:53:06.222784Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:06.223097Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: OperationCookie: 1004 TabletId: 72075186233409548 2024-11-21T08:53:06.223106Z node 135 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 1004:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:53:06.223117Z node 135 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:53:06.223120Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:06.223124Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T08:53:06.223128Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:06.223132Z node 135 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:53:06.223135Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:53:06.223162Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T08:53:06.223854Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.223865Z node 135 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 1004:0 2024-11-21T08:53:06.224046Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 459 RawX2: 579820587383 } TabletId: 72075186233409548 State: 4 2024-11-21T08:53:06.224061Z node 135 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:53:06.224445Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:06.224529Z node 135 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:53:06.224588Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:53:06.224658Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 Forgetting tablet 72075186233409548 2024-11-21T08:53:06.225489Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:53:06.225505Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:53:06.225597Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:53:06.225602Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:53:06.225654Z node 135 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:53:06.225671Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:53:06.225675Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [135:755:2667] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:53:06.225750Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:06.225791Z node 135 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 66us result status StatusSuccess 2024-11-21T08:53:06.225884Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 150 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "storage-pool-number-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409548 is deleted 2024-11-21T08:53:06.225933Z node 135 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409548 2024-11-21T08:53:06.225973Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:06.225985Z node 135 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 14us result status StatusSuccess 2024-11-21T08:53:06.226026Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 150 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "storage-pool-number-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TopicService::DifferentConsumers_TheRangesOverlap [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TopicService::UnknownConsumer |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull [GOOD] >> KqpPg::LongDomainName |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> KqpPg::LongDomainName [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 64366, MsgBus: 22915 2024-11-21T08:53:03.209346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652942323611513:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:03.209546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00450a/r3tmp/tmphZuQb4/pdisk_1.dat 2024-11-21T08:53:03.255978Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64366, node 1 2024-11-21T08:53:03.272795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:03.272813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:03.272816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:03.272858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22915 TClient is connected to server localhost:22915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:53:03.309429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:03.309456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:03.310582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:03.335172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:03.461181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652942323612111:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.461205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.470677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:03.532892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652942323612216:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.532918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.535758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:53:03.543250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652942323612292:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.543276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.543289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652942323612297:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.543952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:53:03.546085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652942323612299:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } Trying to start YDB, gRPC: 61906, MsgBus: 27412 2024-11-21T08:53:03.889713Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652942573485211:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:03.889920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00450a/r3tmp/tmpId0W7l/pdisk_1.dat 2024-11-21T08:53:03.901019Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61906, node 2 2024-11-21T08:53:03.922041Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:03.922057Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:03.922059Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:03.922106Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27412 TClient is connected to server localhost:27412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:03.989872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:03.989900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:03.990956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:03.992135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:04.262493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652946868453105:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:04.262553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:04.264072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:04.322601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652946868453210:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:04.322628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:04.325400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:53:04.336781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652946868453287:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:04.336809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652946868453292:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:04.336816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:04.337528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:53:04.344726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652946868453294:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } Trying to start YDB, gRPC: 31663, MsgBus: 62548 2024-11-21T08:53:04.806586Z node 3 :METADATA_PROVIDER WARN: flin ... 9446Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439652957654706217:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:06.849474Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:06.849552Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439652957654706229:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:06.850324Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:06.854709Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:53:06.854831Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439652957654706231:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:06.962448Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11014, MsgBus: 14390 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00450a/r3tmp/tmpj7gcTk/pdisk_1.dat 2024-11-21T08:53:07.223329Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:07.246020Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11014, node 6 2024-11-21T08:53:07.269059Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:07.269075Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:07.269077Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:07.269133Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14390 2024-11-21T08:53:07.314577Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:07.314601Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:07.316143Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:07.332908Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:07.352411Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:07.566953Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652959319188878:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:07.566975Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652959319188890:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:07.566982Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:07.567759Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:07.569762Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439652959319188892:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:07.664088Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439652959319188960:2306], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Default expr b is nullable or optional, but column has not null constraint. 2024-11-21T08:53:07.664591Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTc1YWY4NWYtMjU4N2UzNWItMjhmZDNjYjItN2E4NDJhNDk=, ActorId: [6:7439652959319188876:2297], ActorState: ExecuteState, TraceId: 01jd6ysnmz67yfwmeacs6j1qw2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Default expr b is nullable or optional, but column has not null constraint. Trying to start YDB, gRPC: 29009, MsgBus: 65357 2024-11-21T08:53:07.928364Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439652962910093352:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:07.928382Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00450a/r3tmp/tmpDdpN0v/pdisk_1.dat 2024-11-21T08:53:07.941555Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29009, node 7 2024-11-21T08:53:07.955137Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:07.955151Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:07.955153Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:07.955195Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65357 TClient is connected to server localhost:65357 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Processi... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. waiting... 2024-11-21T08:53:08.029040Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:08.029085Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:08.030221Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:08.031052Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:08.239644Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439652967205061257:2301], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:08.239662Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439652967205061246:2298], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:08.239803Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:08.240572Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:08.242887Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439652967205061260:2302], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:08.323398Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> SpaceCheckForDiskReassign::Basic [GOOD] >> VDiskAssimilation::Test >> TUserAttrsTestWithReboots::Reboots |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::CheckPgAutoParams [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPersQueueTest::Init [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TUserAttrsTestWithReboots::AllowedSymbolsReboots >> TPersQueueTest::NoDecompressionMemoryLeaks >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams [GOOD] Test command err: Trying to start YDB, gRPC: 6560, MsgBus: 23697 2024-11-21T08:53:02.303481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652939990675304:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:02.303674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e4/r3tmp/tmpv9BibI/pdisk_1.dat 2024-11-21T08:53:02.358437Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6560, node 1 2024-11-21T08:53:02.369952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:02.369968Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:02.369970Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:02.370017Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23697 TClient is connected to server localhost:23697 2024-11-21T08:53:02.403794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:02.403825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:53:02.404853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:02.431928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:02.593941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652939990675903:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:02.593964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:02.603351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.619761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652939990676032:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:02.619788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:02.619814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652939990676037:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:02.620550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:53:02.621918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652939990676039:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:02.761453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715665:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7837, MsgBus: 12840 2024-11-21T08:53:03.202513Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652945667988940:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:03.202784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e4/r3tmp/tmp0kHH2a/pdisk_1.dat 2024-11-21T08:53:03.213704Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7837, node 2 2024-11-21T08:53:03.232869Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:03.232886Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:03.232887Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:03.232922Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12840 TClient is connected to server localhost:12840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:03.302828Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:03.302863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:03.303916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:03.305626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:03.604880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652945667989536:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.604901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652945667989547:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.604908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:03.605530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:03.607161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652945667989550:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:03.688963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:53:03.700250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:53:03.708727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:53:03.728987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:2, at schemeshard: 72057594046644480 2024-11-21T08:53:03.736439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2024-11-21T08:53:03.755455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:03.763786Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmRjNWExNmQtNGRiOWY0OWUtMTNlMzNmMGItNjZmY2VmODU=, ActorId: [2:7439652945667989517:2297], ActorState: ExecuteState, TraceId: 01jd6ysj46f1rwmyxtcahgadr4, Create QueryResponse for error on request, msg: 2024-11-21T08:53:03.769215Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmRjNWExNmQtNGRiOWY0OWUtMTNlMzNmMGItNjZmY2VmODU=, ActorId: [2:7439652945667989517:2297], ActorState: ExecuteState, TraceId: 01jd6ysj4n3jjt6tc9e0vczs0j, Create QueryResponse for error on request, msg: 2024-11-21T08:53:03.773526Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmRjNWExNmQtNGRiOWY0OWUtMTNlMzNmMGItNjZmY2VmODU=, ActorId: [2:7439652945667989517:2297], ActorState: ExecuteState, TraceId: 01jd6ysj4tbwthaea9nj4qtjcf, Crea ... : NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:53:08.903180Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439652964746722450:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:08.981592Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15584, MsgBus: 26137 2024-11-21T08:53:09.277067Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439652970815030411:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:09.277399Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e4/r3tmp/tmpHP9hNG/pdisk_1.dat 2024-11-21T08:53:09.309627Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15584, node 9 2024-11-21T08:53:09.327162Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:09.327191Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:09.327193Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:09.327247Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26137 TClient is connected to server localhost:26137 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:53:09.379938Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:09.379964Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:09.384163Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:09.385323Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:09.387599Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:09.637636Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439652970815031013:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:09.637670Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439652970815031004:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:09.637756Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:09.638561Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:09.644379Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7439652970815031018:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:09.748372Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:53:09.813723Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:09.909919Z node 9 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [9:7439652970815031391:2361], owner: [9:7439652970815030970:2290], statement id: 0 2024-11-21T08:53:09.909993Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=M2MxYzBmMTctMzczMDFjMjEtZjcxNzI3ODYtNGRjMzdiOGI=, ActorId: [9:7439652970815031389:2360], ActorState: ExecuteState, TraceId: 01jd6ysr4m7d4xajp65b4n68vf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:53:09.953656Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439652970815031417:2371], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2024-11-21T08:53:09.954385Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=OWIzYTAxY2UtYmVmOGMzMWUtZTg3ZTU5ZjMtZGFjYTFiZjA=, ActorId: [9:7439652970815031414:2369], ActorState: ExecuteState, TraceId: 01jd6ysr5w477fgeyd0qwe347e, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:53:09.961397Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439652970815031430:2377], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2024-11-21T08:53:09.961541Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=NDgwOTljY2MtYjIzNmExNGEtNDY2ZWU2ZmMtNmE0M2MwY2U=, ActorId: [9:7439652970815031427:2375], ActorState: ExecuteState, TraceId: 01jd6ysr633bajhhy4h7ytsdre, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:53:09.964872Z node 9 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6ysr6a7sskqcjr11e3y1b5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NjY4ZjlkOTQtMmI0YmZjZTYtZDQ1ZTZkYTktNGQwMmU4ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2024-11-21T08:53:09.965227Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=NjY4ZjlkOTQtMmI0YmZjZTYtZDQ1ZTZkYTktNGQwMmU4ZDc=, ActorId: [9:7439652970815031439:2381], ActorState: ExecuteState, TraceId: 01jd6ysr6a7sskqcjr11e3y1b5, Create QueryResponse for error on request, msg: 2024-11-21T08:53:09.971825Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:53:09.985070Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:53:09.996587Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439652970815031605:2406], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2024-11-21T08:53:09.996680Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=OTRiYzA2YTUtMjI1OWMyYjItZWQ2MmIxMjUtNTdlMTg1MWM=, ActorId: [9:7439652970815031602:2404], ActorState: ExecuteState, TraceId: 01jd6ysr79dqrzc455js78p3va, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:53:10.000585Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439652970815031617:2412], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2024-11-21T08:53:10.001028Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZjBhMjg3Zi1iMDZkZTc2Mi02ZGJmYjgyZi00MzI0N2E5YQ==, ActorId: [9:7439652970815031614:2410], ActorState: ExecuteState, TraceId: 01jd6ysr7d5ffwdgenmn17fmm4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:53:10.037092Z node 9 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd6ysr7j334hnq26hjwnd671, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MmU2NzcyNjktYWZmOTljYmMtM2VhMDE5MTItNmYyMjAwZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2024-11-21T08:53:10.037263Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MmU2NzcyNjktYWZmOTljYmMtM2VhMDE5MTItNmYyMjAwZmU=, ActorId: [9:7439652975109998922:2416], ActorState: ExecuteState, TraceId: 01jd6ysr7j334hnq26hjwnd671, Create QueryResponse for error on request, msg: 2024-11-21T08:53:10.042480Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:53:10.085282Z node 9 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 9, TabletId: 72075186224037892 not found 2024-11-21T08:53:10.086401Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 >> ForceDropWithReboots::DoNotLostDeletedTablets [GOOD] >> BasicUsage::BrokenCredentialsProvider [GOOD] >> TCdcStreamWithRebootsTests::MergeTable[TabletReboots] [GOOD] >> LocalTableWriter::SupportedTypes >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> LocalTableWriter::WriteTable >> KqpPg::TypeCoercionInsert [GOOD] >> LocalTableWriter::ConsistentWrite >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> LocalTableWriter::ConsistentWrite [GOOD] >> KqpPg::TableSelect >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> LocalTableWriter::WriteTable [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> VDiskAssimilation::Test [GOOD] >> TopicService::UnknownConsumer [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> TopicService::UnknownTopic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:226:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:156:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:156:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:156:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:155:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:155:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:159:2057] recipient: [10:157:2177] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:161:2057] recipient: [10:157:2177] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:160:2178] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:230:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:158:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:161:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:160:2180] Leader for TabletID 72057594037927937 is [11:163:2181] sender: [11:164:2057] recipient: [11:160:2180] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:163:2181] Leader for TabletID 72057594037927937 is [11:163:2181] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 7 ... etID 72057594037927937 is [106:105:2137] sender: [106:106:2057] recipient: [106:99:2133] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:139:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:234:2057] recipient: [106:97:2132] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:237:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:238:2057] recipient: [106:236:2246] Leader for TabletID 72057594037927937 is [106:239:2247] sender: [106:240:2057] recipient: [106:236:2246] !Reboot 72057594037927937 (actor [106:105:2137]) rebooted! !Reboot 72057594037927937 (actor [106:105:2137]) tablet resolver refreshed! new actor is[106:239:2247] Leader for TabletID 72057594037927937 is [106:239:2247] sender: [106:292:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:101:2057] recipient: [107:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:101:2057] recipient: [107:99:2133] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:106:2057] recipient: [107:99:2133] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:139:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:239:2057] recipient: [107:97:2132] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:242:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:243:2057] recipient: [107:241:2251] Leader for TabletID 72057594037927937 is [107:244:2252] sender: [107:245:2057] recipient: [107:241:2251] !Reboot 72057594037927937 (actor [107:105:2137]) rebooted! !Reboot 72057594037927937 (actor [107:105:2137]) tablet resolver refreshed! new actor is[107:244:2252] Leader for TabletID 72057594037927937 is [107:244:2252] sender: [107:314:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:101:2057] recipient: [108:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:101:2057] recipient: [108:99:2133] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:106:2057] recipient: [108:99:2133] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:139:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:239:2057] recipient: [108:97:2132] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:242:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:243:2057] recipient: [108:241:2251] Leader for TabletID 72057594037927937 is [108:244:2252] sender: [108:245:2057] recipient: [108:241:2251] !Reboot 72057594037927937 (actor [108:105:2137]) rebooted! !Reboot 72057594037927937 (actor [108:105:2137]) tablet resolver refreshed! new actor is[108:244:2252] Leader for TabletID 72057594037927937 is [108:244:2252] sender: [108:314:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:101:2057] recipient: [109:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:101:2057] recipient: [109:99:2133] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:106:2057] recipient: [109:99:2133] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:139:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:240:2057] recipient: [109:97:2132] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:243:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:244:2057] recipient: [109:242:2251] Leader for TabletID 72057594037927937 is [109:245:2252] sender: [109:246:2057] recipient: [109:242:2251] !Reboot 72057594037927937 (actor [109:105:2137]) rebooted! !Reboot 72057594037927937 (actor [109:105:2137]) tablet resolver refreshed! new actor is[109:245:2252] Leader for TabletID 72057594037927937 is [109:245:2252] sender: [109:293:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:101:2057] recipient: [110:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:101:2057] recipient: [110:99:2133] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:106:2057] recipient: [110:99:2133] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:139:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:242:2057] recipient: [110:97:2132] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:245:2057] recipient: [110:244:2253] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:246:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:247:2254] sender: [110:248:2057] recipient: [110:244:2253] !Reboot 72057594037927937 (actor [110:105:2137]) rebooted! !Reboot 72057594037927937 (actor [110:105:2137]) tablet resolver refreshed! new actor is[110:247:2254] Leader for TabletID 72057594037927937 is [110:247:2254] sender: [110:317:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:101:2057] recipient: [111:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:101:2057] recipient: [111:99:2133] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:106:2057] recipient: [111:99:2133] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:139:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:242:2057] recipient: [111:97:2132] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:245:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:246:2057] recipient: [111:244:2253] Leader for TabletID 72057594037927937 is [111:247:2254] sender: [111:248:2057] recipient: [111:244:2253] !Reboot 72057594037927937 (actor [111:105:2137]) rebooted! !Reboot 72057594037927937 (actor [111:105:2137]) tablet resolver refreshed! new actor is[111:247:2254] Leader for TabletID 72057594037927937 is [111:247:2254] sender: [111:317:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:101:2057] recipient: [112:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:101:2057] recipient: [112:99:2133] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:106:2057] recipient: [112:99:2133] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:139:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:243:2057] recipient: [112:97:2132] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:246:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:247:2057] recipient: [112:245:2253] Leader for TabletID 72057594037927937 is [112:248:2254] sender: [112:249:2057] recipient: [112:245:2253] !Reboot 72057594037927937 (actor [112:105:2137]) rebooted! !Reboot 72057594037927937 (actor [112:105:2137]) tablet resolver refreshed! new actor is[112:248:2254] Leader for TabletID 72057594037927937 is [112:248:2254] sender: [112:318:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:101:2057] recipient: [113:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:101:2057] recipient: [113:99:2133] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:106:2057] recipient: [113:99:2133] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:139:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:248:2057] recipient: [113:97:2132] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:250:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:252:2057] recipient: [113:251:2258] Leader for TabletID 72057594037927937 is [113:253:2259] sender: [113:254:2057] recipient: [113:251:2258] !Reboot 72057594037927937 (actor [113:105:2137]) rebooted! !Reboot 72057594037927937 (actor [113:105:2137]) tablet resolver refreshed! new actor is[113:253:2259] Leader for TabletID 72057594037927937 is [113:253:2259] sender: [113:323:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:101:2057] recipient: [114:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:101:2057] recipient: [114:99:2133] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:106:2057] recipient: [114:99:2133] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:139:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:248:2057] recipient: [114:97:2132] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:251:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:252:2057] recipient: [114:250:2258] Leader for TabletID 72057594037927937 is [114:253:2259] sender: [114:254:2057] recipient: [114:250:2258] !Reboot 72057594037927937 (actor [114:105:2137]) rebooted! !Reboot 72057594037927937 (actor [114:105:2137]) tablet resolver refreshed! new actor is[114:253:2259] Leader for TabletID 72057594037927937 is [114:253:2259] sender: [114:323:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:101:2057] recipient: [115:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:101:2057] recipient: [115:99:2133] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:106:2057] recipient: [115:99:2133] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:139:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:249:2057] recipient: [115:97:2132] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:252:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:253:2057] recipient: [115:251:2258] Leader for TabletID 72057594037927937 is [115:254:2259] sender: [115:255:2057] recipient: [115:251:2258] !Reboot 72057594037927937 (actor [115:105:2137]) rebooted! !Reboot 72057594037927937 (actor [115:105:2137]) tablet resolver refreshed! new actor is[115:254:2259] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:101:2057] recipient: [116:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:101:2057] recipient: [116:99:2133] Leader for TabletID 72057594037927937 is [116:105:2137] sender: [116:106:2057] recipient: [116:99:2133] Leader for TabletID 72057594037927937 is [116:105:2137] sender: [116:139:2057] recipient: [116:14:2061] |88.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |88.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> TPersQueueTest::SetupLockSession [GOOD] >> ColumnStatistics::CountMinSketchStatistics >> KqpPg::TableSelect [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs >> BasicStatistics::TwoNodes >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> KqpPg::V1CreateTable |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |88.2%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |88.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |88.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::MergeTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:32.742724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:32.742745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:32.742749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:32.742752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:32.742764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:32.742767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:32.742773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:32.742832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:32.751937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:32.751954Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:32.754146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:32.754251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:32.754276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:32.756450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:32.756524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:32.756658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.756836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:32.757404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.757650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:32.757659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.757670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:32.757677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:32.757683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:32.757717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:32.758846Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:32.771150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:32.771217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.771269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:32.771307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:32.771313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.771827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.771854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:32.771894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.771904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:32.771908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:32.771913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:32.772243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.772253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:32.772258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:32.772580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.772591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.772597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.772604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.773028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:32.773366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:32.773418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:32.773575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.773593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:32.773598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.773641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:32.773645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.773667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:32.773676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:32.774175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:32.774191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:32.774235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.774240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:32.774333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.774341Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:32.774353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:32.774358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.774365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:32.774370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.774375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:32.774379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:32.774392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:32.774398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:32.774402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:11.035082Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:11.035164Z node 133 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 94us result status StatusSuccess 2024-11-21T08:53:11.035377Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:11.035486Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:11.035521Z node 133 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 38us result status StatusSuccess 2024-11-21T08:53:11.035618Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2024-11-21T08:53:12.192175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652980919887117:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:12.192372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004500/r3tmp/tmpbgueIs/pdisk_1.dat 2024-11-21T08:53:12.249320Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29970 TServer::EnableGrpc on GrpcPort 29056, node 1 2024-11-21T08:53:12.280086Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:12.280099Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:12.280101Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:12.280137Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:12.292182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:12.292232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:12.293324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:12.327443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:12.330560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179192429 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2024-11-21T08:53:12.391127Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652980919887762:2337] Handshake: worker# [1:7439652980919887672:2278] 2024-11-21T08:53:12.391241Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652980919887762:2337] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:53:12.391302Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652980919887762:2337] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T08:53:12.391426Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652980919887762:2337] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 36b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T08:53:12.391468Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652980919887762:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2024-11-21T08:53:12.391520Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652980919887765:2337] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T08:53:12.391532Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652980919887762:2337] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:53:12.391548Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652980919887765:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T08:53:12.392536Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652980919887765:2337] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:53:12.392560Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652980919887762:2337] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:53:12.392569Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652980919887762:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2024-11-21T08:53:12.149857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652984303388154:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:12.150119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044fb/r3tmp/tmpAPmz6d/pdisk_1.dat 2024-11-21T08:53:12.206671Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15154 TServer::EnableGrpc on GrpcPort 30654, node 1 2024-11-21T08:53:12.236310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:12.236321Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:12.236323Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:12.236358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:12.249996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:12.250016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:15154 2024-11-21T08:53:12.251094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:12.268448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:12.272464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732179192373 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "uint32_value" Type: "... (TRUNCATED) 2024-11-21T08:53:12.335024Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652984303388801:2337] Handshake: worker# [1:7439652984303388711:2278] 2024-11-21T08:53:12.335109Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652984303388801:2337] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:53:12.335169Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652984303388801:2337] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T08:53:12.335322Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652984303388801:2337] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 4 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 5 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 7 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 44b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 9 Data: 66b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 11 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 12 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 13 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 14 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 15 Data: 58b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 16 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 17 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 18 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 19 Data: 76b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 20 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 21 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 22 Data: 61b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 23 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 24 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 25 Data: 46b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 26 Data: 47b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 27 Data: 50b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 28 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 29 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 30 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 31 Data: 64b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T08:53:12.335459Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652984303388801:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2024-11-21T08:53:12.335534Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652984303388804:2337] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T08:53:12.335545Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652984303388801:2337] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:53:12.335586Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652984303388804:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2024-11-21T08:53:12.343648Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652984303388804:2337] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:53:12.343677Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652984303388801:2337] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:53:12.343688Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652984303388801:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2024-11-21T08:52:48.692905Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1732179168692895 2024-11-21T08:52:48.834268Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652877433364886:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:48.834343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:48.835916Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652880996067602:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:48.836297Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042ef/r3tmp/tmpIlfahY/pdisk_1.dat 2024-11-21T08:52:48.862113Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:48.868493Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:48.887162Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63720, node 1 2024-11-21T08:52:48.902913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0042ef/r3tmp/yandexccMNAV.tmp 2024-11-21T08:52:48.902929Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0042ef/r3tmp/yandexccMNAV.tmp 2024-11-21T08:52:48.903006Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0042ef/r3tmp/yandexccMNAV.tmp 2024-11-21T08:52:48.903054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:48.908253Z INFO: TTestServer started on Port 2685 GrpcPort 63720 TClient is connected to server localhost:2685 PQClient connected to localhost:63720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:48.930672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:48.933910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:48.933943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:48.935236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:52:48.960774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:48.960796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:52:48.961980Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:48.962252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:49.146652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652881728332939:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.146678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652881728332950:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.146686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.147445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T08:52:49.148041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652881728332982:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.148062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.152948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652881728332953:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T08:52:49.182065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.202869Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652885291035249:2284], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:49.203004Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTY1NDdiMjUtZmU0ZDg4YTQtNTYyMWM1MGEtMzc1MzllNTg=, ActorId: [2:7439652885291035185:2277], ActorState: ExecuteState, TraceId: 01jd6ys3xb8nr79aemyw9x5hen, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:49.203742Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:49.211560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.238139Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652881728333228:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:49.238589Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjFlNDVjY2EtMWI2M2JkZDUtYzFjODdiNjItMzdiZjJlZmE=, ActorId: [1:7439652881728332936:2299], ActorState: ExecuteState, TraceId: 01jd6ys3vt1555ftqmf667pym8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:49.238844Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:49.292658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:63720", true, true, 1000); 2024-11-21T08:52:49.332062Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd6ys40zb4gskavzfeqv6tms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Y0MzFkZWYtYTc0M2I3YmItNjFmYzdlOTktZmM5YWFkOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439652881728333424:2932] 2024-11-21T08:52:53.834135Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652877433364886:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:53.834185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:52:53.836198Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652880996067602:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:53.836246Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:52:54.403503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:63720 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:52:54.415247Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:63720 MetaRequest { CmdCr ... 21T08:53:10.850753Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:26307 2024-11-21T08:53:10.851110Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T08:53:10.851545Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:53:10.851562Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T08:53:10.851765Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T08:53:10.851811Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:38274 2024-11-21T08:53:10.851825Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:38274 proto=v1 topic=test-topic durationSec=0 2024-11-21T08:53:10.851830Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:10.852298Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T08:53:10.852333Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T08:53:10.852335Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:10.852337Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:10.852343Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439652973881825006:2470] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:53:10.852851Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439652973881825006:2470] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:53:10.873368Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439652973881825006:2470] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T08:53:10.873441Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439652973881825043:2470] connected; active server actors: 1 2024-11-21T08:53:10.873449Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439652973881825006:2470] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T08:53:10.873453Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439652973881825006:2470] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T08:53:10.875731Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439652973881825043:2470] disconnected; active server actors: 1 2024-11-21T08:53:10.875744Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439652973881825043:2470] disconnected no session 2024-11-21T08:53:10.887313Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439652973881825006:2470] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T08:53:10.887344Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439652973881825006:2470] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T08:53:10.887348Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439652973881825006:2470] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T08:53:10.887359Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:10.887625Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:10.887652Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7439652973881825063:2470], now have 1 active actors on pipe 2024-11-21T08:53:10.887696Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2024-11-21T08:53:10.887763Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:53:10.887779Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:53:10.887822Z node 6 :PERSQUEUE INFO: new Cookie src|831645cd-1782db45-672b816e-d9a0037e_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T08:53:10.887868Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:53:10.887900Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:53:10.888080Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:53:10.888091Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:53:10.888122Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:53:10.888194Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|831645cd-1782db45-672b816e-d9a0037e_0 2024-11-21T08:53:10.888649Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179190888 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:53:10.888680Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|831645cd-1782db45-672b816e-d9a0037e_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T08:53:10.888818Z :INFO: [] MessageGroupId [src] SessionId [src|831645cd-1782db45-672b816e-d9a0037e_0] Write session: close. Timeout = 0 ms 2024-11-21T08:53:10.888823Z :INFO: [] MessageGroupId [src] SessionId [src|831645cd-1782db45-672b816e-d9a0037e_0] Write session will now close 2024-11-21T08:53:10.888830Z :DEBUG: [] MessageGroupId [src] SessionId [src|831645cd-1782db45-672b816e-d9a0037e_0] Write session: aborting 2024-11-21T08:53:10.888939Z :INFO: [] MessageGroupId [src] SessionId [src|831645cd-1782db45-672b816e-d9a0037e_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:53:10.888943Z :DEBUG: [] MessageGroupId [src] SessionId [src|831645cd-1782db45-672b816e-d9a0037e_0] Write session: destroy 2024-11-21T08:53:10.890431Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|831645cd-1782db45-672b816e-d9a0037e_0 grpc read done: success: 0 data: 2024-11-21T08:53:10.890442Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|831645cd-1782db45-672b816e-d9a0037e_0 grpc read failed 2024-11-21T08:53:10.890449Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|831645cd-1782db45-672b816e-d9a0037e_0 grpc closed 2024-11-21T08:53:10.890454Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|831645cd-1782db45-672b816e-d9a0037e_0 is DEAD 2024-11-21T08:53:10.890808Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:53:10.890938Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:10.890965Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439652973881825063:2470] destroyed 2024-11-21T08:53:10.890981Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:53:10.891944Z :INFO: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] Starting read session 2024-11-21T08:53:10.891958Z :DEBUG: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] Starting session to cluster null (localhost:26307) 2024-11-21T08:53:10.892284Z :DEBUG: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:10.892289Z :DEBUG: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:10.892293Z :DEBUG: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T08:53:10.892355Z :ERROR: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2024-11-21T08:53:10.892360Z :DEBUG: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:10.892362Z :DEBUG: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:10.892373Z :INFO: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2024-11-21T08:53:10.892409Z :NOTICE: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:53:10.892413Z :DEBUG: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2024-11-21T08:53:10.892421Z :INFO: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] Closing read session. Close timeout: 0.000000s 2024-11-21T08:53:10.892426Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T08:53:10.892431Z :INFO: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:53:10.892437Z :NOTICE: [/Root] [/Root] [ed7122c1-a477588c-ed89ede1-f3c10100] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::DoNotLostDeletedTablets [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:27.398826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:27.398842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:27.398846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:27.398849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:27.398859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:27.398861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:27.398867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:27.398930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:27.407395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:27.407413Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:27.409357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:27.409465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:27.409491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:27.412181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:27.412288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:27.412419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.412664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:27.413359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:27.413645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:27.413653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:27.413667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:27.413671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:27.413676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:27.413702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:27.414814Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:27.428183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:27.428280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.428337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:27.428425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:27.428431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.429018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.429039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:27.429077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.429085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:27.429088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:27.429091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:27.429406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.429414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:27.429417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:27.429644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.429650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.429653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:27.429658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:27.430054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:27.430377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:27.430413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:27.430549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:27.430567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:27.430573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:27.430627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:27.430633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:27.430656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:27.430669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:27.430942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:27.430948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:27.430968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:27.430973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:27.431023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:27.431027Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:27.431034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:27.431037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:27.431040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:27.431043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:27.431046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:27.431048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:27.431055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:27.431059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:27.431063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ublication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:53:11.053570Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:53:11.053574Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:53:11.053582Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T08:53:11.054006Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:11.054019Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:11.054023Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:11.054027Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:11.054030Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:11.054034Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:11.054206Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:53:11.054473Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T08:53:11.055107Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:53:11.055156Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T08:53:11.055210Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 Forgetting tablet 72075186233409546 2024-11-21T08:53:11.055418Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:11.055459Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:53:11.055529Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:53:11.055752Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2024-11-21T08:53:11.056316Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:53:11.056362Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Forgetting tablet 72075186233409548 2024-11-21T08:53:11.056974Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:53:11.057007Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T08:53:11.057033Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409551 2024-11-21T08:53:11.057242Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2024-11-21T08:53:11.057351Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:53:11.057375Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186233409549 2024-11-21T08:53:11.057970Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:11.057978Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:53:11.057989Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:53:11.058037Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:53:11.058104Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:53:11.058124Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:11.058158Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:53:11.058196Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:53:11.058201Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T08:53:11.058812Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:53:11.058823Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:53:11.058884Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:53:11.058888Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:53:11.059044Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T08:53:11.059049Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T08:53:11.059070Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:53:11.059073Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:53:11.059107Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:11.059123Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:11.059128Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:53:11.059140Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:11.059182Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:11.059187Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:53:11.059210Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:11.059236Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:53:11.059241Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:53:11.059613Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:11.059634Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:11.059643Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:11.059647Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:53:11.059663Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:11.059971Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T08:53:11.060034Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T08:53:11.060041Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T08:53:11.060103Z node 94 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:53:11.060115Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:53:11.060118Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [94:917:2833] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted 2024-11-21T08:53:11.060174Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:53:11.060187Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:53:11.060192Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:53:11.060202Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T08:53:11.060227Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T08:53:11.060232Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2024-11-21T08:53:12.244826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652982235629434:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:12.245028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044f7/r3tmp/tmpmSgTbZ/pdisk_1.dat 2024-11-21T08:53:12.296825Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32228 TServer::EnableGrpc on GrpcPort 26460, node 1 2024-11-21T08:53:12.323762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:12.323774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:12.323776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:12.323814Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32228 2024-11-21T08:53:12.344936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:12.344971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:53:12.345971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:12.375637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:12.378416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179192478 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2024-11-21T08:53:12.437174Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handshake: worker# [1:7439652982235629987:2278] 2024-11-21T08:53:12.437274Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:53:12.437329Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T08:53:12.437395Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 48b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T08:53:12.438146Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2024-11-21T08:53:12.438174Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2024-11-21T08:53:12.438220Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652982235630080:2337] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T08:53:12.438228Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:53:12.438240Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652982235630080:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2024-11-21T08:53:12.439580Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652982235630080:2337] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:53:12.439596Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:53:12.439603Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2024-11-21T08:53:12.439690Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 4 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T08:53:12.439739Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 5 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T08:53:12.439777Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 7 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T08:53:12.439823Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2024-11-21T08:53:12.439842Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2024-11-21T08:53:12.439859Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652982235630080:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2024-11-21T08:53:12.441099Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652982235630080:2337] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:53:12.441123Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:53:12.441132Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2024-11-21T08:53:12.441196Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 9 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T08:53:12.441229Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2024-11-21T08:53:12.441250Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652982235630080:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2024-11-21T08:53:12.442065Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439652982235630080:2337] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:53:12.442083Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T08:53:12.442088Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2024-11-21T08:53:12.442183Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439652982235630077:2337] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 11 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2024-11-21T08:53:02.489319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:02.489890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:02.489925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f5e/r3tmp/tmpaH7GGC/pdisk_1.dat 2024-11-21T08:53:02.595533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.596634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:02.603650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.603899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:02.604318Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2024-11-21T08:53:02.604335Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T08:53:02.618011Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:02.618700Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2024-11-21T08:53:02.661323Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:307:2347] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T08:53:02.661373Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2024-11-21T08:53:02.661399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:02.661415Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T08:53:02.661418Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:53:02.661422Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T08:53:02.661424Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:53:02.661435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:02.661481Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T08:53:02.661487Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T08:53:02.661491Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T08:53:02.661497Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T08:53:02.661529Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2024-11-21T08:53:02.671829Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2024-11-21T08:53:02.671860Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:307:2347]) 2024-11-21T08:53:02.671876Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T08:53:02.672022Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2024-11-21T08:53:02.672035Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Execute 2024-11-21T08:53:02.672040Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:53:02.672058Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Complete 2024-11-21T08:53:02.672100Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 202797645824 } 2024-11-21T08:53:02.672105Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2024-11-21T08:53:02.672110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:02.672156Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2024-11-21T08:53:02.672163Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T08:53:02.672165Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:53:02.672186Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T08:53:02.672190Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T08:53:02.672192Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T08:53:02.672196Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T08:53:02.682481Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2024-11-21T08:53:02.682508Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T08:53:02.774673Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2024-11-21T08:53:02.774713Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T08:53:02.774824Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2024-11-21T08:53:02.774953Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2024-11-21T08:53:02.774966Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2024-11-21T08:53:02.775209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:02.775493Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T08:53:02.775510Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T08:53:02.775514Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2024-11-21T08:53:02.775519Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2024-11-21T08:53:02.775685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:02.775700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:53:02.775864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T08:53:02.776449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.776732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:02.776742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:02.776870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2024-11-21T08:53:02.777347Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2024-11-21T08:53:02.778831Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2024-11-21T08:53:02.778855Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T08:53:02.778912Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2024-11-21T08:53:02.778921Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2024-11-21T08:53:02.778931Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T08:53:02.778961Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2024-11-21T08:53:02.779061Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T08:53:02.779098Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T08:53:02.779217Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T08:53:02.779300Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2024-11-21T08:53:02.779319Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{77685194916960}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2024-11-21T08:53:02.779332Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{77685194916960}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2024-11-21T08:53:02.779354Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{77685194916960}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2024-11-21T08:53:02.779363Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{77685194916960}: tablet 72075186224037888 channel 2 assigned to group 2181038080 ... 2:4 2024-11-21T08:53:12.866313Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715667 at tablet 72075186224037889 2024-11-21T08:53:12.866350Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:12.866374Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:12.866384Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:12.866388Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:12.866392Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2024-11-21T08:53:12.876774Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:12.876824Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:12.877194Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 HANDLE EvProposeTransaction marker# C0 2024-11-21T08:53:12.877208Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 step# 32501 Status# 16 SEND to# [2:379:2374] Proxy marker# C1 2024-11-21T08:53:12.969208Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715667 has been planned 2024-11-21T08:53:12.969256Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T08:53:12.969261Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 2024-11-21T08:53:12.969326Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2024-11-21T08:53:12.969455Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715667 marker# C2 2024-11-21T08:53:12.969465Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:379:2374] Proxy 2024-11-21T08:53:12.969595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:12.969761Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715667 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715667 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T08:53:12.969768Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:12.969821Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:12.969827Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:12.969834Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715667] in PlanQueue unit at 72075186224037889 2024-11-21T08:53:12.969879Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715667 keys extracted: 0 2024-11-21T08:53:12.969903Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:12.969928Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:12.969943Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2024-11-21T08:53:12.970023Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:12.970377Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 33000 txid# 281474976715667} 2024-11-21T08:53:12.970384Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2024-11-21T08:53:12.970390Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:12.970420Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T08:53:12.970431Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T08:53:12.970437Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2024-11-21T08:53:12.970441Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 acknowledged 2024-11-21T08:53:12.970445Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 acknowledged 2024-11-21T08:53:12.970490Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:12.970505Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715667] from 72075186224037889 at tablet 72075186224037889 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:12.970515Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715667 state PreOffline TxInFly 0 2024-11-21T08:53:12.970529Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:12.970630Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715667, done: 0, blocked: 1 2024-11-21T08:53:12.971033Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715667 datashard 72075186224037889 state PreOffline 2024-11-21T08:53:12.971042Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T08:53:12.971144Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2024-11-21T08:53:12.971159Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715667, publications: 2, subscribers: 1 2024-11-21T08:53:12.971271Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715667, subscribers: 1 2024-11-21T08:53:12.971338Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:12.971580Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 INFO ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2024-11-21T08:53:12.971608Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 INFO ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2024-11-21T08:53:12.971621Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 INFO ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:59: Begin, root #43 2024-11-21T08:53:12.971627Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 INFO ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2024-11-21T08:53:12.971636Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 TRACE ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:387: {0}, callable #43 2024-11-21T08:53:12.971651Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 INFO ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:577: Node #43 finished execution 2024-11-21T08:53:12.971671Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 INFO ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2024-11-21T08:53:12.971677Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 INFO ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2024-11-21T08:53:12.971683Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 INFO ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2024-11-21T08:53:12.971707Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 NOTE ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2024-11-21T08:53:12.971711Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 NOTE ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2024-11-21T08:53:12.971715Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZGNmNzBiMDgtZWYxYWY5ZDYtYmVhMWEwY2EtM2I3YTU5ZjY= 2024-11-21 08:53:12.971 NOTE ydb-core-tx-datashard-ut_minstep(pid=515377, tid=0x00007F440B2E2BC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2024-11-21T08:53:12.982726Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T08:53:12.982818Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2024-11-21T08:53:12.983283Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T08:53:12.983500Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2024-11-21T08:53:12.983599Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2024-11-21T08:53:12.983609Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2024-11-21T08:53:12.983633Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2024-11-21T08:53:12.983655Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2024-11-21T08:53:12.983673Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 22517, MsgBus: 12417 2024-11-21T08:52:41.142103Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652848949724591:2194];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:41.168589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a6b/r3tmp/tmpBbruEJ/pdisk_1.dat 2024-11-21T08:52:41.205412Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22517, node 1 2024-11-21T08:52:41.236351Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:41.236364Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:41.236365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:41.236396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12417 2024-11-21T08:52:41.271913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:41.271948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:41.274942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:41.316089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.324851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:41.338738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.416971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.444182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.501773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:41.562309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652848949725989:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.562360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.566627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.576754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.643132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.652663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.667425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.680110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:41.695236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652848949726507:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.695283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.695422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652848949726512:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:41.696297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:41.699220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652848949726514:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:46.139995Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652848949724591:2194];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:46.140022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:52:56.194117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:52:56.194152Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:56.425310Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652913374237098:2629], TxId: 281474976715686, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTI1NTNkMDUtYjNmNjg2N2UtNWRkN2IzMDItOTc1YmJmZmM=. CustomerSuppliedId : . TraceId : 01jd6ysayj78eghbza3k84rf83. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1732179162000/18446744073709551615 shard 72075186224037888 with lowWatermark v1732179162056/18446744073709551615 (node# 1 state# Ready) } } 2024-11-21T08:52:56.425554Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652913374237098:2629], TxId: 281474976715686, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTI1NTNkMDUtYjNmNjg2N2UtNWRkN2IzMDItOTc1YmJmZmM=. CustomerSuppliedId : . TraceId : 01jd6ysayj78eghbza3k84rf83. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1732179162000/18446744073709551615 shard 72075186224037888 with lowWatermark v1732179162056/18446744073709551615 (node# 1 state# Ready) } }. 2024-11-21T08:52:56.425667Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439652913374237099:2630], TxId: 281474976715686, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTI1NTNkMDUtYjNmNjg2N2UtNWRkN2IzMDItOTc1YmJmZmM=. CustomerSuppliedId : . TraceId : 01jd6ysayj78eghbza3k84rf83. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439652913374237094:2453], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T08:52:56.426798Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTI1NTNkMDUtYjNmNjg2N2UtNWRkN2IzMDItOTc1YmJmZmM=, ActorId: [1:7439652848949726855:2453], ActorState: ExecuteState, TraceId: 01jd6ysayj78eghbza3k84rf83, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2397, MsgBus: 23464 2024-11-21T08:52:56.758479Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652913905405029:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:56.758512Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a6b/r3tmp/tmpNBG2NV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2397, node 2 2024-11-21T08:52:56.773899Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:56.775714Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:56.775724Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:56.775726Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:56.775767Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23464 TClient is connected to server localhost:23464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:56.858906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:56.858944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:56.860045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:56.863612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:56.865017Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:56.872672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:56.888171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:56.906444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:56.917325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:57.065744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652918200373872:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:57.065778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:57.072662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:57.083032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:57.094142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:57.108041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:57.126290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:57.135897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:57.153466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652918200374384:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:57.153505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:57.153990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652918200374389:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:57.154728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:57.162667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652918200374391:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:53:01.759039Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652913905405029:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:01.759070Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:53:11.763564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:53:11.763595Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:12.870441Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652982624884987:2644], TxId: 281474976715687, task: 1. Ctx: { TraceId : 01jd6ysv0f54jf595jmx264mna. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Yzk3OGQwMDEtMzFkNzg4YTEtMzc5ODcwODktYWI3YTMyMzg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1732179177379/18446744073709551615 shard 72075186224037888 with lowWatermark v1732179177407/18446744073709551615 (node# 2 state# Ready) } } 2024-11-21T08:53:12.870462Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652982624884987:2644], TxId: 281474976715687, task: 1. Ctx: { TraceId : 01jd6ysv0f54jf595jmx264mna. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Yzk3OGQwMDEtMzFkNzg4YTEtMzc5ODcwODktYWI3YTMyMzg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1732179177379/18446744073709551615 shard 72075186224037888 with lowWatermark v1732179177407/18446744073709551615 (node# 2 state# Ready) } }. 2024-11-21T08:53:12.870530Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439652982624884988:2645], TxId: 281474976715687, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=Yzk3OGQwMDEtMzFkNzg4YTEtMzc5ODcwODktYWI3YTMyMzg=. CustomerSuppliedId : . TraceId : 01jd6ysv0f54jf595jmx264mna. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439652982624884983:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T08:53:12.870789Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzk3OGQwMDEtMzFkNzg4YTEtMzc5ODcwODktYWI3YTMyMzg=, ActorId: [2:7439652918200374676:2454], ActorState: ExecuteState, TraceId: 01jd6ysv0f54jf595jmx264mna, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2024-11-21T08:51:07.844148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652444261988753:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:07.844351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6f/r3tmp/tmpw7aI0H/pdisk_1.dat 2024-11-21T08:51:07.906294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2429, node 1 2024-11-21T08:51:07.923092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:51:07.923105Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:51:07.923108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:51:07.923148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63494 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:51:07.943373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:51:07.943398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:51:07.944482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:51:07.978850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:51:07.981894Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:51:07.988493Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:51:08.259672Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:51:08.259842Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652448556956490:2298], Start check tables existence, number paths: 2 2024-11-21T08:51:08.262486Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:51:08.262501Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:51:08.263188Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T08:51:08.263207Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652448556956490:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:51:08.263216Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652448556956490:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:51:08.263220Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439652448556956490:2298], Successfully finished 2024-11-21T08:51:08.263240Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:51:08.264137Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDMxNDJhMDMtNmE3ODNlMWMtNGRlMzJmNmItYzQ4NWQ5NDI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDMxNDJhMDMtNmE3ODNlMWMtNGRlMzJmNmItYzQ4NWQ5NDI= 2024-11-21T08:51:08.265762Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDMxNDJhMDMtNmE3ODNlMWMtNGRlMzJmNmItYzQ4NWQ5NDI=, ActorId: [1:7439652448556956506:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:08.268486Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652448556956508:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:08.269313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:51:08.269860Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652448556956508:2284], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T08:51:08.270368Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652448556956508:2284], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T08:51:08.271485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652448556956508:2284], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:51:08.368482Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652448556956508:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T08:51:08.369503Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652448556956508:2284], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T08:51:08.370229Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY= 2024-11-21T08:51:08.370305Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY=, ActorId: [1:7439652448556956568:2300], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:51:08.370357Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY=, ActorId: [1:7439652448556956568:2300], ActorState: ReadyState, TraceId: 01jd6yp1ejazsg45x9wmg3maex, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439652448556956567:2323] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T08:51:08.370377Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T08:51:08.370379Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T08:51:08.370391Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439652448556956568:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY= 2024-11-21T08:51:08.370401Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652448556956570:2301], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:08.370417Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652448556956571:2302], Database: /Root, Start database fetching 2024-11-21T08:51:08.370742Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439652448556956571:2302], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T08:51:08.370773Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652448556956570:2301], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:08.370779Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T08:51:08.370786Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T08:51:08.370789Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T08:51:08.370861Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439652448556956581:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY=, Start pool fetching 2024-11-21T08:51:08.370865Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652448556956583:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T08:51:08.370878Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652448556956582:2304], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T08:51:08.371091Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652448556956583:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T08:51:08.371098Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652448556956582:2304], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T08:51:08.371107Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439652448556956581:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY=, Pool info successfully resolved 2024-11-21T08:51:08.371119Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY= 2024-11-21T08:51:08.371137Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439652448556956582:2304], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7439652448556956568:2300], session id: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY= 2024-11-21T08:51:08.371147Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MjQ3MGRlMjktZjk4YjliYzUtYTFjMWU3Y2UtMTI2YjdlZDY= 2024-11-21T08:51:08.371155Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request ... Id: ydb://session/3?node_id=6&id=NWJhZDk2YzgtNDdmY2NhZmYtNmFiYzVhZDctN2Y1YWIxMjc=, ActorId: [6:7439652585052863732:2399], ActorState: ExecuteState, TraceId: 01jd6yq0ak9wnks0cr2a8ajv1t, Sent query response back to proxy, proxyRequestId: 18, proxyId: [6:7439652585052862877:2256] 2024-11-21T08:51:39.990646Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=6&id=NWJhZDk2YzgtNDdmY2NhZmYtNmFiYzVhZDctN2Y1YWIxMjc=, TxId: 2024-11-21T08:51:39.990671Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=6&id=NWJhZDk2YzgtNDdmY2NhZmYtNmFiYzVhZDctN2Y1YWIxMjc=, TxId: 2024-11-21T08:51:39.990706Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439652585052863335:2305], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2024-11-21T08:51:39.990739Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NWJhZDk2YzgtNDdmY2NhZmYtNmFiYzVhZDctN2Y1YWIxMjc=, ActorId: [6:7439652585052863732:2399], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:51:39.990748Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NWJhZDk2YzgtNDdmY2NhZmYtNmFiYzVhZDctN2Y1YWIxMjc=, ActorId: [6:7439652585052863732:2399], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:51:39.990751Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NWJhZDk2YzgtNDdmY2NhZmYtNmFiYzVhZDctN2Y1YWIxMjc=, ActorId: [6:7439652585052863732:2399], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:51:39.990753Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NWJhZDk2YzgtNDdmY2NhZmYtNmFiYzVhZDctN2Y1YWIxMjc=, ActorId: [6:7439652585052863732:2399], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:51:39.990764Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NWJhZDk2YzgtNDdmY2NhZmYtNmFiYzVhZDctN2Y1YWIxMjc=, ActorId: [6:7439652585052863732:2399], ActorState: unknown state, Session actor destroyed 2024-11-21T08:51:39.995354Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439652585052863524:2328], DatabaseId: /Root, PoolId: default, Got delete notification 2024-11-21T08:51:39.995388Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T08:51:39.995412Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652585052863824:2417], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T08:51:39.995515Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439652585052863824:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:39.995545Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:51:40.001258Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzkyZmVlOGQtMjc1Mzk4OGEtNDVhZjJmNjQtNjM3NWU0Nzc=, ActorId: [6:7439652585052863259:2299], ActorState: ExecuteState, TraceId: 01jd6yq0af0sje11s5y3ydffse, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [6:7439652585052863770:2299] WorkloadServiceCleanup: 0 2024-11-21T08:51:40.001955Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzkyZmVlOGQtMjc1Mzk4OGEtNDVhZjJmNjQtNjM3NWU0Nzc=, ActorId: [6:7439652585052863259:2299], ActorState: CleanupState, TraceId: 01jd6yq0af0sje11s5y3ydffse, EndCleanup, isFinal: 0 2024-11-21T08:51:40.001983Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzkyZmVlOGQtMjc1Mzk4OGEtNDVhZjJmNjQtNjM3NWU0Nzc=, ActorId: [6:7439652585052863259:2299], ActorState: CleanupState, TraceId: 01jd6yq0af0sje11s5y3ydffse, Sent query response back to proxy, proxyRequestId: 17, proxyId: [6:7439652585052862877:2256] Wait pool handlers 0.000007s: number handlers = 2 Wait pool handlers 1.000098s: number handlers = 2 Wait pool handlers 2.000201s: number handlers = 2 Wait pool handlers 3.000308s: number handlers = 2 Wait pool handlers 4.000409s: number handlers = 2 2024-11-21T08:51:44.139208Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439652585052862665:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:51:44.140012Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Wait pool handlers 5.000527s: number handlers = 2 Wait pool handlers 6.000637s: number handlers = 2 Wait pool handlers 7.001553s: number handlers = 2 Wait pool handlers 8.005561s: number handlers = 2 Wait pool handlers 9.005704s: number handlers = 2 Wait pool handlers 10.006166s: number handlers = 2 Wait pool handlers 11.009553s: number handlers = 2 Wait pool handlers 12.010296s: number handlers = 2 Wait pool handlers 13.012216s: number handlers = 2 Wait pool handlers 14.012945s: number handlers = 2 2024-11-21T08:51:54.147519Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:51:54.147536Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:51:54.849167Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439652585052863335:2305], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 15.013046s: number handlers = 2 Wait pool handlers 16.013527s: number handlers = 2 Wait pool handlers 17.013750s: number handlers = 2 Wait pool handlers 18.017567s: number handlers = 2 Wait pool handlers 19.021561s: number handlers = 2 Wait pool handlers 20.022903s: number handlers = 2 Wait pool handlers 21.025560s: number handlers = 2 Wait pool handlers 22.026234s: number handlers = 2 Wait pool handlers 23.026336s: number handlers = 2 Wait pool handlers 24.029597s: number handlers = 2 Wait pool handlers 25.032435s: number handlers = 2 Wait pool handlers 26.033641s: number handlers = 2 Wait pool handlers 27.037276s: number handlers = 2 Wait pool handlers 28.038835s: number handlers = 2 Wait pool handlers 29.041565s: number handlers = 2 Wait pool handlers 30.043803s: number handlers = 2 Wait pool handlers 31.043907s: number handlers = 2 Wait pool handlers 32.044033s: number handlers = 2 Wait pool handlers 33.044135s: number handlers = 2 Wait pool handlers 34.044245s: number handlers = 2 Wait pool handlers 35.044355s: number handlers = 2 Wait pool handlers 36.044458s: number handlers = 2 Wait pool handlers 37.044562s: number handlers = 2 Wait pool handlers 38.045552s: number handlers = 2 Wait pool handlers 39.047738s: number handlers = 2 Wait pool handlers 40.049582s: number handlers = 2 Wait pool handlers 41.049704s: number handlers = 2 Wait pool handlers 42.049809s: number handlers = 2 Wait pool handlers 43.049905s: number handlers = 2 Wait pool handlers 44.050004s: number handlers = 2 Wait pool handlers 45.050107s: number handlers = 2 Wait pool handlers 46.050219s: number handlers = 2 Wait pool handlers 47.050310s: number handlers = 2 Wait pool handlers 48.050421s: number handlers = 2 Wait pool handlers 49.050555s: number handlers = 2 Wait pool handlers 50.050665s: number handlers = 2 Wait pool handlers 51.050779s: number handlers = 2 Wait pool handlers 52.050887s: number handlers = 2 Wait pool handlers 53.050981s: number handlers = 2 Wait pool handlers 54.051090s: number handlers = 2 Wait pool handlers 55.051218s: number handlers = 2 Wait pool handlers 56.051313s: number handlers = 2 Wait pool handlers 57.051410s: number handlers = 2 Wait pool handlers 58.051521s: number handlers = 2 Wait pool handlers 59.052515s: number handlers = 2 Wait pool handlers 60.052608s: number handlers = 2 Wait pool handlers 61.053561s: number handlers = 2 Wait pool handlers 62.057577s: number handlers = 2 Wait pool handlers 63.057677s: number handlers = 2 Wait pool handlers 64.058089s: number handlers = 2 Wait pool handlers 65.058190s: number handlers = 2 Wait pool handlers 66.059172s: number handlers = 2 Wait pool handlers 67.059278s: number handlers = 2 Wait pool handlers 68.059379s: number handlers = 2 Wait pool handlers 69.059464s: number handlers = 2 Wait pool handlers 70.059564s: number handlers = 2 Wait pool handlers 71.059637s: number handlers = 2 Wait pool handlers 72.059746s: number handlers = 2 Wait pool handlers 73.061557s: number handlers = 2 Wait pool handlers 74.061864s: number handlers = 2 Wait pool handlers 75.061960s: number handlers = 2 Wait pool handlers 76.062053s: number handlers = 2 Wait pool handlers 77.063061s: number handlers = 2 Wait pool handlers 78.063186s: number handlers = 2 Wait pool handlers 79.063279s: number handlers = 2 Wait pool handlers 80.063378s: number handlers = 2 Wait pool handlers 81.063482s: number handlers = 2 Wait pool handlers 82.063581s: number handlers = 2 Wait pool handlers 83.063699s: number handlers = 2 Wait pool handlers 84.063807s: number handlers = 2 Wait pool handlers 85.063931s: number handlers = 2 Wait pool handlers 86.064259s: number handlers = 2 Wait pool handlers 87.064355s: number handlers = 2 Wait pool handlers 88.064476s: number handlers = 2 Wait pool handlers 89.064570s: number handlers = 2 2024-11-21T08:53:09.535599Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439652585052863524:2328], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2024-11-21T08:53:09.535731Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439652585052863335:2305], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2024-11-21T08:53:09.535792Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2024-11-21T08:53:09.535827Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T08:53:10.069254Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzkyZmVlOGQtMjc1Mzk4OGEtNDVhZjJmNjQtNjM3NWU0Nzc=, ActorId: [6:7439652585052863259:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:53:10.069276Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzkyZmVlOGQtMjc1Mzk4OGEtNDVhZjJmNjQtNjM3NWU0Nzc=, ActorId: [6:7439652585052863259:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:53:10.069279Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzkyZmVlOGQtMjc1Mzk4OGEtNDVhZjJmNjQtNjM3NWU0Nzc=, ActorId: [6:7439652585052863259:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:53:10.069293Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzkyZmVlOGQtMjc1Mzk4OGEtNDVhZjJmNjQtNjM3NWU0Nzc=, ActorId: [6:7439652585052863259:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:53:10.069358Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzkyZmVlOGQtMjc1Mzk4OGEtNDVhZjJmNjQtNjM3NWU0Nzc=, ActorId: [6:7439652585052863259:2299], ActorState: unknown state, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> VDiskAssimilation::Test [GOOD] Test command err: RandomSeed# 11411380314115105572 2024-11-21T08:51:22.378213Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: data is too large; id# [1:1:0:0:0:20971520:1] size# 20971520 chunkSize# 134217728 Marker# BSVS02 2024-11-21T08:51:22.513092Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: buffer size does not match with part size; buffer size# 99 PartSize# 100 id# [1:1:0:0:0:100:1] Marker# BSVS01 2024-11-21T08:51:22.955545Z 7 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: TEvVPut: ingress mismatch; id# [1:1:0:0:0:100:2] Marker# BSVS11 2024-11-21T08:51:23.082068Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:23.082525Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 3 Marker# BSVS41 2024-11-21T08:51:23.351708Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:23.352049Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 2 Marker# BSVS41 2024-11-21T08:51:23.471814Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:23.472137Z 1 00h00m06.060512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 3 Marker# BSVS41 2024-11-21T08:51:23.682964Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:23.683388Z 7 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: ydb/core/erasure/erasure.cpp:2116: Unknown crcMode = 2 Marker# BSVS41 2024-11-21T08:51:23.896533Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:23.896945Z 7 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: ydb/core/erasure/erasure.cpp:2116: Unknown crcMode = 3 Marker# BSVS41 2024-11-21T08:51:24.042919Z 2 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:24.043357Z 3 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:2:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 2 Marker# BSVS41 2024-11-21T08:51:24.189064Z 2 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:24.189528Z 3 00h00m21.210512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:2:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 3 Marker# BSVS41 2024-11-21T08:51:24.405798Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:24.406253Z 7 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 2 Marker# BSVS41 2024-11-21T08:51:24.624766Z 2 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:24.625268Z 7 00h00m41.410512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 3 Marker# BSVS41 2024-11-21T08:51:24.875062Z 1 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:24.875465Z 8 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:2:1:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 2 Marker# BSVS41 2024-11-21T08:51:25.123602Z 1 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:0:0:0:0:0:0] Marker# BSVS43 2024-11-21T08:51:25.124043Z 8 00h00m46.460512s :BS_VDISK_PUT ERROR: VDISK[82000000:_:2:1:0]: ydb/core/erasure/erasure.cpp:2103: Unknown crcMode = 3 Marker# BSVS41 *** PUT BLOB [72075186270680851:57:3905:6:786432:4194304:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:4194304:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:4194304:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:4194304:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:4194304:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** 0 5 1 6 2 7 3 0 4 1 5 2 6 3 7 4 2024-11-21T08:51:25.461959Z 8 00h02m00.060512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72075186270680851:57:3905:6:786432:4194304:3] barrier# {Soft# {Gen# 57 Step# 3905} Hard# } 2024-11-21T08:51:25.462005Z 2 00h02m00.060512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72075186270680851:57:3905:6:786432:4194304:5] barrier# {Soft# {Gen# 57 Step# 3905} Hard# } 2024-11-21T08:51:25.463288Z 8 00h02m00.060512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72075186270680851:57:3905:6:786432:4194304:3] barrier# {Soft# {Gen# 57 Step# 3905} Hard# } 2024-11-21T08:51:25.463818Z 2 00h02m00.060512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72075186270680851:57:3905:6:786432:4194304:5] barrier# {Soft# {Gen# 57 Step# 3905} Hard# } 0 5 1 6 2 7 3 0 4 1 5 2 6 3 7 4 BlobsWritten# 18144 step 0 waiting for replies scanning parts step 1 waiting for replies scanning parts step 2 waiting for replies scanning parts step 3 waiting for replies scanning parts step 4 waiting for replies scanning parts empty@ 4 0 empty@ 4 1 empty@ 4 2 empty@ 4 3 empty@ 4 4 empty@ 4 5 empty@ 4 6 empty@ 4 7 empty@ 4 8 empty@ 4 9 empty@ 4 10 empty@ 4 11 empty@ 4 12 empty@ 4 13 empty@ 4 14 empty@ 4 15 empty@ 4 16 empty@ 4 17 empty@ 4 18 empty@ 4 19 empty@ 4 20 empty@ 4 21 empty@ 4 22 empty@ 4 23 empty@ 4 24 empty@ 4 25 empty@ 4 26 empty@ 4 27 empty@ 4 28 empty@ 4 29 empty@ 4 30 empty@ 4 31 empty@ 4 32 empty@ 4 33 empty@ 4 34 empty@ 4 35 empty@ 4 36 empty@ 4 37 empty@ 4 38 empty@ 4 39 empty@ 4 40 empty@ 4 41 empty@ 4 42 empty@ 4 43 empty@ 4 44 empty@ 4 45 empty@ 4 46 empty@ 4 47 empty@ 4 120 empty@ 4 121 empty@ 4 122 empty@ 4 123 empty@ 4 124 empty@ 4 125 empty@ 4 126 empty@ 4 127 empty@ 4 128 empty@ 4 129 empty@ 4 130 empty@ 4 131 empty@ 4 132 empty@ 4 133 empty@ 4 134 empty@ 4 135 empty@ 4 136 empty@ 4 137 empty@ 4 138 empty@ 4 139 empty@ 4 140 empty@ 4 141 empty@ 4 142 empty@ 4 143 empty@ 4 144 empty@ 4 145 empty@ 4 146 empty@ 4 147 empty@ 4 148 empty@ 4 149 empty@ 4 150 empty@ 4 151 empty@ 4 152 empty@ 4 153 empty@ 4 154 empty@ 4 155 empty@ 4 156 empty@ 4 157 empty@ 4 158 empty@ 4 159 empty@ 4 160 empty@ 4 161 empty@ 4 162 empty@ 4 163 empty@ 4 164 empty@ 4 165 empty@ 4 166 empty@ 4 167 empty@ 4 240 empty@ 4 241 empty@ 4 242 empty@ 4 243 empty@ 4 244 empty@ 4 245 empty@ 4 246 empty@ 4 247 empty@ 4 248 empty@ 4 249 empty@ 4 250 empty@ 4 251 empty@ 4 252 empty@ 4 253 empty@ 4 254 empty@ 4 255 empty@ 4 256 empty@ 4 257 empty@ 4 258 empty@ 4 259 empty@ 4 260 empty@ 4 261 empty@ 4 262 empty@ 4 263 empty@ 4 264 empty@ 4 265 empty@ 4 266 empty@ 4 267 empty@ 4 268 empty@ 4 269 empty@ 4 270 empty@ 4 271 empty@ 4 272 empty@ 4 273 empty@ 4 274 empty@ 4 275 empty@ 4 276 empty@ 4 277 empty@ 4 278 empty@ 4 279 empty@ 4 280 empty@ 4 281 empty@ 4 282 empty@ 4 283 empty@ 4 284 empty@ 4 285 empty@ 4 286 empty@ 4 287 empty@ 4 432 empty@ 4 433 empty@ 4 434 empty@ 4 435 empty@ 4 436 empty@ 4 437 empty@ 4 438 empty@ 4 439 empty@ 4 440 empty@ 4 441 empty@ 4 442 empty@ 4 443 empty@ 4 444 empty@ 4 445 empty@ 4 446 empty@ 4 447 empty@ 4 448 empty@ 4 449 empty@ 4 450 empty@ 4 451 empty@ 4 452 empty@ 4 453 empty@ 4 454 empty@ 4 455 empty@ 4 456 empty@ 4 457 empty@ 4 458 empty@ 4 459 empty@ 4 460 empty@ 4 461 empty@ 4 462 empty@ 4 463 empty@ 4 464 empty@ 4 465 empty@ 4 466 empty@ 4 467 empty@ 4 468 empty@ 4 469 empty@ 4 470 empty@ 4 471 empty@ 4 472 empty@ 4 473 empty@ 4 474 empty@ 4 475 empty@ 4 476 empty@ 4 477 empty@ 4 478 empty@ 4 479 empty@ 4 552 empty@ 4 553 empty@ 4 554 empty@ 4 555 empty@ 4 556 empty@ 4 557 empty@ 4 558 empty@ 4 559 empty@ 4 560 empty@ 4 561 empty@ 4 562 empty@ 4 563 empty@ 4 564 empty@ 4 565 empty@ 4 566 empty@ 4 567 empty@ 4 568 empty@ 4 569 empty@ 4 570 empty@ 4 571 empty@ 4 572 empty@ 4 573 empty@ 4 574 empty@ 4 575 empty@ 4 576 empty@ 4 577 empty@ 4 578 empty@ 4 579 empty@ 4 580 empty@ 4 581 empty@ 4 582 empty@ 4 583 empty@ 4 584 empty@ 4 585 empty@ 4 586 empty@ 4 587 empty@ 4 588 empty@ 4 589 empty@ 4 590 empty@ 4 591 empty@ 4 592 empty@ 4 593 empty@ 4 594 empty@ 4 595 empty@ 4 596 empty@ 4 597 empty@ 4 598 empty@ 4 599 empty@ 4 672 empty@ 4 673 empty@ 4 674 empty@ 4 675 empty@ 4 676 empty@ 4 677 empty@ 4 678 empty@ 4 679 empty@ 4 680 empty@ 4 681 empty@ 4 682 empty@ 4 683 empty@ 4 684 empty@ 4 685 empty@ 4 686 empty@ 4 687 empty@ 4 688 empty@ 4 689 empty@ 4 690 empty@ 4 691 empty@ 4 692 empty@ 4 693 empty@ 4 694 empty@ 4 695 empty@ 4 696 empty@ 4 697 empty@ 4 698 empty@ 4 699 empty@ 4 700 empty@ 4 701 empty@ 4 702 empty@ 4 703 empty@ 4 704 empty@ 4 705 empty@ 4 706 empty@ 4 707 empty@ 4 708 empty@ 4 709 empty@ 4 710 empty@ 4 711 empty@ 4 712 empty@ 4 713 empty@ 4 714 empty@ 4 715 empty@ 4 716 empty@ 4 717 empty@ 4 718 empty@ 4 719 empty@ 4 720 empty@ 4 721 empty@ 4 722 empty@ 4 723 empty@ 4 724 empty@ 4 725 empty@ 4 726 empty@ 4 727 empty@ 4 728 empty@ 4 729 empty@ 4 730 empty@ 4 731 empty@ 4 732 empty@ 4 733 empty@ 4 734 empty@ 4 735 empty@ 4 736 empty@ 4 737 empty@ 4 738 empty@ 4 739 empty@ 4 740 empty@ 4 741 empty@ 4 742 empty@ 4 743 empty@ 4 744 empty@ 4 745 empty@ 4 746 empty@ 4 747 empty@ 4 748 empty@ 4 749 empty@ 4 750 empty@ 4 751 empty@ 4 752 empty@ 4 753 empty@ 4 754 empty@ 4 755 empty@ 4 756 empty@ 4 757 empty@ 4 758 empty@ 4 759 empty@ 4 760 empty@ 4 761 empty@ 4 762 empty@ 4 763 empty@ 4 764 empty@ 4 765 empty@ 4 766 empty@ 4 767 empty@ 4 840 empty@ 4 841 empty@ 4 842 empty@ 4 843 empty@ 4 844 empty@ 4 845 empty@ 4 846 empty@ 4 847 empty@ 4 848 empty@ 4 849 empty@ 4 850 empty@ 4 851 empty@ 4 852 empty@ 4 853 empty@ 4 854 empty@ 4 855 empty@ 4 856 empty@ 4 857 empty@ 4 858 empty@ 4 859 empty@ 4 860 empty@ 4 861 empty@ 4 862 empty@ 4 863 empty@ 4 864 empty@ 4 865 empty@ 4 866 empty@ 4 867 empty@ 4 868 empty@ 4 869 empty@ 4 870 empty@ 4 871 empty@ 4 872 empty@ 4 873 empty@ 4 874 empty@ 4 875 empty@ 4 876 empty@ 4 877 empty@ 4 878 empty@ 4 879 empty@ 4 880 empty@ 4 881 empty@ 4 882 empty@ 4 883 empty@ 4 884 empty@ 4 885 empty@ 4 886 empty@ 4 887 empty@ 4 960 empty@ 4 961 empty@ 4 962 empty@ 4 963 empty@ 4 964 empty@ 4 965 empty@ 4 966 empty@ 4 967 empty@ 4 968 empty@ 4 969 empty@ 4 970 empty@ 4 971 empty@ 4 972 empty@ 4 973 empty@ 4 974 empty@ 4 975 empty@ 4 976 empty@ 4 977 empty@ 4 978 empty@ 4 979 empty@ 4 980 empty@ 4 981 empty@ 4 982 empty@ 4 983 empty@ 4 984 empty@ 4 985 empty@ 4 986 empty@ 4 987 empty@ 4 988 empty@ 4 989 empty@ 4 990 empty@ 4 991 empty@ 4 992 empty@ 4 993 empty@ 4 994 empty@ 4 995 empty@ 4 996 empty@ 4 997 empty@ 4 998 empty@ 4 999 empty@ 4 1000 empty@ 4 1001 empty@ 4 1002 empty@ 4 1003 empty@ 4 1004 empty@ 4 1005 empty@ 4 1006 empty@ 4 1007 empty@ 4 1440 empty@ 4 1441 empty@ 4 1442 empty@ 4 1443 empty@ 4 1444 empty@ 4 1445 empty@ 4 1446 empty@ 4 1447 empty@ 4 1448 empty@ 4 1449 empty@ 4 1450 empty@ 4 1451 empty@ 4 1452 empty@ 4 1453 empty@ 4 1454 empty@ 4 1455 empty@ 4 1456 empty@ 4 1457 empty@ 4 1458 empty@ 4 1459 empty@ 4 1460 empty@ 4 1461 empty@ 4 1462 empty@ 4 1463 empty@ 4 1464 empty@ 4 1465 empty@ 4 1466 empty@ 4 1467 empty@ 4 1468 empty@ 4 1469 empty@ 4 1470 empty@ 4 1471 empty@ 4 1472 empt ... 0/1280/45 0/1279/45 0/1278/45 0/1277/45 0/1276/45 0/1275/45 0/1274/45 0/1273/45 0/1272/45 0/1271/45 0/1270/45 0/1269/45 0/1268/45 0/1267/45 0/1266/45 0/1265/45 0/1264/45 0/1263/45 0/1262/45 0/1261/45 0/1260/45 0/1259/45 0/1258/45 0/1257/45 0/1256/45 0/1255/45 0/1254/45 0/1253/45 0/1252/45 0/1251/45 0/1250/45 0/1249/45 0/1248/45 0/1247/45 0/1246/45 0/1245/45 0/1244/45 0/1243/45 0/1242/45 0/1241/45 0/1240/45 0/1239/45 0/1238/45 0/1237/45 0/1236/45 0/1235/45 0/1234/45 0/1233/45 0/1232/45 0/1231/45 0/1230/45 0/1229/45 0/1228/45 0/1227/45 0/1226/45 0/1225/45 0/1224/45 0/1223/45 0/1222/45 0/1221/45 0/1220/45 0/1219/45 0/1218/45 0/1217/45 0/1216/45 0/1215/45 0/1214/45 0/1213/45 0/1212/45 0/1211/45 0/1210/45 0/1209/45 0/1208/45 0/1207/45 0/1206/45 0/1205/45 0/1204/45 0/1203/45 0/1202/45 0/1201/45 0/1200/45 0/1199/45 0/1198/45 0/1197/45 0/1196/45 0/1195/45 0/1194/45 0/1193/45 0/1192/45 0/1191/45 0/1190/45 0/1189/45 0/1188/45 0/1187/45 0/1186/45 0/1185/45 0/1184/45 0/1183/45 0/1182/45 0/1181/45 0/1180/45 0/1179/45 0/1178/45 0/1177/45 0/1176/45 0/1175/45 0/1174/45 0/1173/45 0/1172/45 0/1171/45 0/1170/45 0/1169/45 0/1168/45 0/1167/45 0/1166/45 0/1165/45 0/1164/45 0/1163/45 0/1162/45 0/1161/45 0/1160/45 0/1159/45 0/1158/45 0/1157/45 0/1156/45 0/1155/45 0/1154/45 0/1153/45 0/1152/45 0/1151/45 0/1150/45 0/1149/45 0/1148/45 0/1147/45 0/1146/45 0/1145/45 0/1144/45 0/1143/45 0/1142/45 0/1141/45 0/1140/45 0/1139/45 0/1138/45 0/1137/45 0/1136/45 0/1135/45 0/1134/45 0/1133/45 0/1132/45 0/1131/45 0/1130/45 0/1129/45 0/1128/45 0/1127/45 0/1126/45 0/1125/45 0/1124/45 0/1123/45 0/1122/45 0/1121/45 0/1120/45 0/1119/45 0/1118/45 0/1117/45 0/1116/45 0/1115/45 0/1114/45 0/1113/45 0/1112/45 0/1111/45 0/1110/45 0/1109/45 0/1108/45 0/1107/45 0/1106/45 0/1105/45 0/1104/45 0/1103/45 0/1102/45 0/1101/45 0/1100/45 0/1099/45 0/1098/45 0/1097/45 0/1096/45 0/1095/45 0/1094/45 0/1093/45 0/1092/45 0/1091/45 0/1090/45 0/1089/45 0/1088/45 0/1087/45 0/1086/45 0/1085/45 0/1084/45 0/1083/45 0/1082/45 0/1081/45 0/1080/45 0/1079/45 0/1078/45 0/1077/45 0/1076/45 0/1075/45 0/1074/45 0/1073/45 0/1072/45 0/1071/45 0/1070/45 0/1069/45 0/1068/45 0/1067/45 0/1066/45 0/1065/45 0/1064/45 0/1063/45 0/1062/45 0/1061/45 0/1060/45 0/1059/45 0/1058/45 0/1057/45 0/1056/45 0/1055/45 0/1054/45 0/1053/45 0/1052/45 0/1051/45 0/1050/45 0/1049/45 0/1048/45 0/1047/45 0/1046/45 0/1045/45 0/1044/45 0/1043/45 0/1042/45 0/1041/45 0/1040/45 0/1039/45 0/1038/45 0/1037/45 0/1036/45 0/1035/45 0/1034/45 0/1033/45 0/1032/45 0/1031/45 0/1030/45 0/1029/45 0/1028/45 0/1027/45 0/1026/45 0/1025/45 0/1024/45 0/1023/45 0/1022/45 0/1021/45 0/1020/45 0/1019/45 0/1018/45 0/1017/45 0/1016/45 0/1015/45 0/1014/45 0/1013/45 0/1012/45 0/1011/45 0/1010/45 0/1009/45 0/1008/45 0/1007/45 0/1006/45 0/1005/45 0/1004/45 0/1003/45 0/1002/45 0/1001/45 0/1000/45 0/999/45 0/998/45 0/997/45 0/996/45 0/995/45 0/994/45 0/993/45 0/992/45 0/991/45 0/990/45 0/989/45 0/988/45 0/987/45 0/986/45 0/985/45 0/984/45 0/983/45 0/982/45 0/981/45 0/980/45 0/979/45 0/978/45 0/977/45 0/976/45 0/975/45 0/974/45 0/973/45 0/972/45 0/971/45 0/970/45 0/969/45 0/968/45 0/967/45 0/966/45 0/965/45 0/964/45 0/963/45 0/962/45 0/961/45 0/960/45 0/959/45 0/958/45 0/957/45 0/956/45 0/955/45 0/954/45 0/953/45 0/952/45 0/951/45 0/950/45 0/949/45 0/948/45 0/947/45 0/946/45 0/945/45 0/944/45 0/943/45 0/942/45 0/941/45 0/940/45 0/939/45 0/938/45 0/937/45 0/936/45 0/935/45 0/934/45 0/933/45 0/932/45 0/931/45 0/930/45 0/929/45 0/928/45 0/927/45 0/926/45 0/925/45 0/924/45 0/923/45 0/922/45 0/921/45 0/920/45 0/919/45 0/918/45 0/917/45 0/916/45 0/915/45 0/914/45 0/913/45 0/912/45 0/911/45 0/910/45 0/909/45 0/908/45 0/907/45 0/906/45 0/905/45 0/904/45 0/903/45 0/902/45 0/901/45 0/900/45 0/899/45 0/898/45 0/897/45 0/896/45 0/895/45 0/894/45 0/893/45 0/892/45 0/891/45 0/890/45 0/889/45 0/888/45 0/887/45 0/886/45 0/885/45 0/884/45 0/883/45 0/882/45 0/881/45 0/880/45 0/879/45 0/878/45 0/877/45 0/876/45 0/875/45 0/874/45 0/873/45 0/872/45 0/871/45 0/870/45 0/869/45 0/868/45 0/867/45 0/866/45 0/865/45 0/864/45 0/863/45 0/862/45 0/861/45 0/860/45 0/859/45 0/858/45 0/857/45 0/856/45 0/855/45 0/854/45 0/853/45 0/852/45 0/851/45 0/850/45 0/849/45 0/848/45 0/847/45 0/846/45 0/845/45 0/844/45 0/843/45 0/842/45 0/841/45 0/840/45 0/839/45 0/838/45 0/837/45 0/836/45 0/835/45 0/834/45 0/833/45 0/832/45 0/831/45 0/830/45 0/829/45 0/828/45 0/827/45 0/826/45 0/825/45 0/824/45 0/823/45 0/822/45 0/821/45 0/820/45 0/819/45 0/818/45 0/817/45 0/816/45 0/815/45 0/814/45 0/813/45 0/812/45 0/811/45 0/810/45 0/809/45 0/808/45 0/807/45 0/806/45 0/805/45 0/804/45 0/803/45 0/802/45 0/801/45 0/800/45 0/799/45 0/798/45 0/797/45 0/796/45 0/795/45 0/794/45 0/793/45 0/792/45 0/791/45 0/790/45 0/789/45 0/788/45 0/787/45 0/786/45 0/785/45 0/784/45 0/783/45 0/782/45 0/781/45 0/780/45 0/779/45 0/778/45 0/777/45 0/776/45 0/775/45 0/774/45 0/773/45 0/772/45 0/771/45 0/770/45 0/769/45 0/768/45 0/767/45 0/766/45 0/765/45 0/764/45 0/763/45 0/762/45 0/761/45 0/760/45 0/759/45 0/758/45 0/757/45 0/756/45 0/755/45 0/754/45 0/753/45 0/752/45 0/751/45 0/750/45 0/749/45 0/748/45 0/747/45 0/746/45 0/745/45 0/744/45 0/743/45 0/742/45 0/741/45 0/740/45 0/739/45 0/738/45 0/737/45 0/736/45 0/735/45 0/734/45 0/733/45 0/732/45 0/731/45 0/730/45 0/729/45 0/728/45 0/727/45 0/726/45 0/725/45 0/724/45 0/723/45 0/722/45 0/721/45 0/720/45 0/719/45 0/718/45 0/717/45 0/716/45 0/715/45 0/714/45 0/713/45 0/712/45 0/711/45 0/710/45 0/709/45 0/708/45 0/707/45 0/706/45 0/705/45 0/704/45 0/703/45 0/702/45 0/701/45 0/700/45 0/699/45 0/698/45 0/697/45 0/696/45 0/695/45 0/694/45 0/693/45 0/692/45 0/691/45 0/690/45 0/689/45 0/688/45 0/687/45 0/686/45 0/685/45 0/684/45 0/683/45 0/682/45 0/681/45 0/680/45 0/679/45 0/678/45 0/677/45 0/676/45 0/675/45 0/674/45 0/673/45 0/672/45 0/671/45 0/670/45 0/669/45 0/668/45 0/667/45 0/666/45 0/665/45 0/664/45 0/663/45 0/662/45 0/661/45 0/660/45 0/659/45 0/658/45 0/657/45 0/656/45 0/655/45 0/654/45 0/653/45 0/652/45 0/651/45 0/650/45 0/649/45 0/648/45 0/647/45 0/646/45 0/645/45 0/644/45 0/643/45 0/642/45 0/641/45 0/640/45 0/639/45 0/638/45 0/637/45 0/636/45 0/635/45 0/634/45 0/633/45 0/632/45 0/631/45 0/630/45 0/629/45 0/628/45 0/627/45 0/626/45 0/625/45 0/624/45 0/623/45 0/622/45 0/621/45 0/620/45 0/619/45 0/618/45 0/617/45 0/616/45 0/615/45 0/614/45 0/613/45 0/612/45 0/611/45 0/610/45 0/609/45 0/608/45 0/607/45 0/606/45 0/605/45 0/604/45 0/603/45 0/602/45 0/601/45 0/600/45 0/599/45 0/598/45 0/597/45 0/596/45 0/595/45 0/594/45 0/593/45 0/592/45 0/591/45 0/590/45 0/589/45 0/588/45 0/587/45 0/586/45 0/585/45 0/584/45 0/583/45 0/582/45 0/581/45 0/580/45 0/579/45 0/578/45 0/577/45 0/576/45 0/575/45 0/574/45 0/573/45 0/572/45 0/571/45 0/570/45 0/569/45 0/568/45 0/567/45 0/566/45 0/565/45 0/564/45 0/563/45 0/562/45 0/561/45 0/560/45 0/559/45 0/558/45 0/557/45 0/556/45 0/555/45 0/554/45 0/553/45 0/552/45 0/551/45 0/550/45 0/549/45 0/548/45 0/547/45 0/546/45 0/545/45 0/544/45 0/543/45 0/542/45 0/541/45 0/540/45 0/539/45 0/538/45 0/537/45 0/536/45 0/535/45 0/534/45 0/533/45 0/532/45 0/531/45 0/530/45 0/529/45 0/528/45 0/527/45 0/526/45 0/525/45 0/524/45 0/523/45 0/522/45 0/521/45 0/520/45 0/519/45 0/518/45 0/517/45 0/516/45 0/515/45 0/514/45 0/513/45 0/512/45 0/511/45 0/510/45 0/509/45 0/508/45 0/507/45 0/506/45 0/505/45 0/504/45 0/503/45 0/502/45 0/501/45 0/500/45 0/499/45 0/498/45 0/497/45 0/496/45 0/495/45 0/494/45 0/493/45 0/492/45 0/491/45 0/490/45 0/489/45 0/488/45 0/487/45 0/486/45 0/485/45 0/484/45 0/483/45 0/482/45 0/481/45 0/480/45 0/479/45 0/478/45 0/477/45 0/476/45 0/475/45 0/474/45 0/473/45 0/472/45 0/471/45 0/470/45 0/469/45 0/468/45 0/467/45 0/466/45 0/465/45 0/464/45 0/463/45 0/462/45 0/461/45 0/460/45 0/459/45 0/458/45 0/457/45 0/456/45 0/455/45 0/454/45 0/453/45 0/452/45 0/451/45 0/450/45 0/449/45 0/448/45 0/447/45 0/446/45 0/445/45 0/444/45 0/443/45 0/442/45 0/441/45 0/440/45 0/439/45 0/438/45 0/437/45 0/436/45 0/435/45 0/434/45 0/433/45 0/432/45 0/431/45 0/430/45 0/429/45 0/428/45 0/427/45 0/426/45 0/425/45 0/424/45 0/423/45 0/422/45 0/421/45 0/420/45 0/419/45 0/418/45 0/417/45 0/416/45 0/415/45 0/414/45 0/413/45 0/412/45 0/411/45 0/410/45 0/409/45 0/408/45 0/407/45 0/406/45 0/405/45 0/404/45 0/403/45 0/402/45 0/401/45 0/400/45 0/399/45 0/398/45 0/397/45 0/396/45 0/395/45 0/394/45 0/393/45 0/392/45 0/391/45 0/390/45 0/389/45 0/388/45 0/387/45 0/386/45 0/385/45 0/384/45 0/383/45 0/382/45 0/381/45 0/380/45 0/379/45 0/378/45 0/377/45 0/376/45 0/375/45 0/374/45 0/373/45 0/372/45 0/371/45 0/370/45 0/369/45 0/368/45 0/367/45 0/366/45 0/365/45 0/364/45 0/363/45 0/362/45 0/361/45 0/360/45 0/359/45 0/358/45 0/357/45 0/356/45 0/355/45 0/354/45 0/353/45 0/352/45 0/351/45 0/350/45 0/349/45 0/348/45 0/347/45 0/346/45 0/345/45 0/344/45 0/343/45 0/342/45 0/341/45 0/340/45 0/339/45 0/338/45 0/337/45 0/336/45 0/335/45 0/334/45 0/333/45 0/332/45 0/331/45 0/330/45 0/329/45 0/328/45 0/327/45 0/326/45 0/325/45 0/324/45 0/323/45 0/322/45 0/321/45 0/320/45 0/319/45 0/318/45 0/317/45 0/316/45 0/315/45 0/314/45 0/313/45 0/312/45 0/311/45 0/310/45 0/309/45 0/308/45 0/307/45 0/306/45 0/305/45 0/304/45 0/303/45 0/302/45 0/301/45 0/300/45 0/299/45 0/298/45 0/297/45 0/296/45 0/295/45 0/294/45 0/293/45 0/292/45 0/291/45 0/290/45 0/289/45 0/288/45 0/287/45 0/286/45 0/285/45 0/284/45 0/283/45 0/282/45 0/281/45 0/280/45 0/279/45 0/278/45 0/277/45 0/276/45 0/275/45 0/274/45 0/273/45 0/272/45 0/271/45 0/270/45 0/269/45 0/268/45 0/267/45 0/266/45 0/265/45 0/264/45 0/263/45 0/262/45 0/261/45 0/260/45 0/259/45 0/258/45 0/257/45 0/256/45 0/255/45 0/254/45 0/253/45 0/252/45 0/251/45 0/250/45 0/249/45 0/248/45 0/247/45 0/246/45 0/245/45 0/244/45 0/243/45 0/242/45 0/241/45 0/240/45 0/239/45 0/238/45 0/237/45 0/236/45 0/235/45 0/234/45 0/233/45 0/232/45 0/231/45 0/230/45 0/229/45 0/228/45 0/227/45 0/226/45 0/225/45 0/224/45 0/223/45 0/222/45 0/221/45 0/220/45 0/219/45 0/218/45 0/217/45 0/216/45 0/215/45 0/214/45 0/213/45 0/212/45 0/211/45 0/210/45 0/209/45 0/208/45 0/207/45 0/206/45 0/205/45 0/204/45 0/203/45 0/202/45 0/201/45 0/200/45 0/199/45 0/198/45 0/197/45 0/196/45 0/195/45 0/194/45 0/193/45 0/192/45 0/191/45 0/190/45 0/189/45 0/188/45 0/187/45 0/186/45 0/185/45 0/184/45 0/183/45 0/182/45 0/181/45 0/180/45 0/179/45 0/178/45 0/177/45 0/176/45 0/175/45 0/174/45 0/173/45 0/172/45 0/171/45 0/170/45 0/169/45 0/168/45 0/167/45 0/166/45 0/165/45 0/164/45 0/163/45 0/162/45 0/161/45 0/160/45 0/159/45 0/158/45 0/157/45 0/156/45 0/155/45 0/154/45 0/153/45 0/152/45 0/151/45 0/150/45 0/149/45 0/148/45 0/147/45 0/146/45 0/145/45 0/144/45 0/143/45 0/142/45 0/141/45 0/140/45 0/139/45 0/138/45 0/137/45 0/136/45 0/135/45 0/134/45 0/133/45 0/132/45 0/131/45 0/130/45 0/129/45 0/128/45 0/127/45 0/126/45 0/125/45 0/124/45 0/123/45 0/122/45 0/121/45 0/120/45 0/119/45 0/118/45 0/117/45 0/116/45 0/115/45 0/114/45 0/113/45 0/112/45 0/111/45 0/110/45 0/109/45 0/108/45 0/107/45 0/106/45 0/105/45 0/104/45 0/103/45 0/102/45 0/101/45 0/100/45 0/99/45 0/98/45 0/97/45 0/96/45 0/95/45 0/94/45 0/93/45 0/92/45 0/91/45 0/90/45 0/89/45 0/88/45 0/87/45 0/86/45 0/85/45 0/84/45 0/83/45 0/82/45 0/81/45 0/80/45 0/79/45 0/78/45 0/77/45 0/76/45 0/75/45 0/74/45 0/73/45 0/72/45 0/71/45 0/70/45 0/69/45 0/68/45 0/67/45 0/66/45 0/65/45 0/64/45 0/63/45 0/62/45 0/61/45 0/60/45 0/59/45 0/58/45 0/57/45 0/56/45 0/55/45 0/54/45 0/53/45 0/52/45 0/51/45 0/50/45 0/49/45 0/48/45 0/47/45 0/46/45 0/45/45 0/44/45 0/43/45 0/42/45 0/41/45 0/40/45 0/39/45 0/38/45 0/37/45 0/36/45 0/35/45 0/34/45 0/33/45 0/32/45 0/31/45 0/30/45 0/29/45 0/28/45 0/27/45 0/26/45 0/25/45 0/24/45 0/23/45 0/22/45 0/21/45 0/20/45 0/19/45 0/18/45 0/17/45 0/16/45 0/15/45 0/14/45 0/13/45 0/12/45 0/11/45 0/10/45 0/9/45 0/8/45 0/7/45 0/6/45 0/5/45 0/4/45 0/3/45 0/2/45 0/1/45 0/0/45 0/0/44 0/0/43 0/0/42 0/0/41 0/0/40 0/0/39 0/0/38 0/0/37 0/0/36 0/0/35 0/0/34 0/0/33 0/0/32 0/0/31 0/0/30 0/0/29 0/0/28 0/0/27 0/0/26 0/0/25 0/0/24 0/0/23 0/0/22 0/0/21 0/0/20 0/0/19 0/0/18 0/0/17 0/0/16 0/0/15 0/0/14 0/0/13 0/0/12 0/0/11 0/0/10 0/0/9 0/0/8 0/0/7 0/0/6 0/0/5 0/0/4 0/0/3 0/0/2 0/0/1 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery |88.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpRequest::Status >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> BasicStatistics::NotFullStatisticsDatashard >> HttpRequest::AnalyzeServerless |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TopicService::UnknownTopic [GOOD] >> TExternalDataSourceTestReboots::ParallelCreateDrop |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::CreateExternalDataSourceWithReboots |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TopicService::UseDoubleSlashInTopicPath |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable |88.3%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::MergeTableResolvedTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:04.381305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:04.381331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:04.381337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:04.381342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:04.381356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:04.381361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:04.381371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:04.381469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:04.393777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:04.393805Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:04.396362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:04.396479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:04.396510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:04.399128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:04.399206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:04.399356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.399517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:04.400158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.400502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:04.400517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.400530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:04.400537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:04.400544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:04.400591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:04.401942Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:04.421139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:04.421226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.421316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:04.421366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:04.421387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.422162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.422190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:04.422242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.422252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:04.422257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:04.422262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:04.422863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.422878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:04.422883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:04.423235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.423246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.423251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.423259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.423828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:04.424297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:04.424352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:04.424535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:04.424557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:04.424565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.424641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:04.424648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:04.424682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:04.424695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:04.425035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:04.425045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:04.425091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:04.425096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:04.425212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:04.425217Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:04.425229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:04.425233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.425238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:04.425243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:04.425247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:04.425252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:04.425262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:04.425268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:04.425272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:17.657732Z node 172 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:17.657764Z node 172 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 37us result status StatusSuccess 2024-11-21T08:53:17.657864Z node 172 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:17.657909Z node 172 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:17.657926Z node 172 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 18us result status StatusSuccess 2024-11-21T08:53:17.657985Z node 172 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\001\000\004\000\000\000\377\377\377\177" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 20456, MsgBus: 5989 2024-11-21T08:52:41.978926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:52:41.979472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:52:41.979507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a50/r3tmp/tmpFVqZms/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20456, node 1 TClient is connected to server localhost:5989 TClient is connected to server localhost:5989 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:52:42.137561Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:52:42.137583Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:52:42.137587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:52:42.137659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:42.165798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:42.165844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:42.166224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.168396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:42.284325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.499463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:42.813890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.055647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:43.396883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1726:3347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.396931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:43.400707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.605975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.867361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.105675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.354455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.593843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.893012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2296:3791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.893041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.893103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2301:3796], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.893942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:52:45.062475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2303:3798], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:52:45.278977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:52:45.537390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:52:45.862268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 GRpc shutdown warning: left infly: 1, spent: 3.050418 sec GRpc shutdown warning: left infly: 1, spent: 6.105885 sec GRpc shutdown warning: left infly: 1, spent: 9.137809 sec GRpc shutdown warning: left infly: 1, spent: 12.167253 sec GRpc shutdown warning: left infly: 1, spent: 15.2115 sec GRpc shutdown warning: left infly: 1, spent: 18.261012 sec GRpc shutdown warning: left infly: 1, spent: 21.363135 sec GRpc shutdown warning: left infly: 1, spent: 24.506249 sec GRpc shutdown warning: left infly: 1, spent: 27.548416 sec GRpc shutdown warning: failed to shutdown all connections, left infly: 1, spent: 30.009446 sec Trying to start YDB, gRPC: 21713, MsgBus: 24367 2024-11-21T08:53:16.878999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:16.879032Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:53:16.879052Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a50/r3tmp/tmpduWTcP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21713, node 2 TClient is connected to server localhost:24367 TClient is connected to server localhost:24367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:16.991015Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:16.991036Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:16.991040Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:16.991132Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:17.032909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:17.032942Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:17.033318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:17.034112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:17.148111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:17.345372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:17.631849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:17.888253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:18.174884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1725:3344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.174926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.176979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:53:18.390046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:18.647337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:53:18.893377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:19.138565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:53:19.376043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:53:19.668049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2296:3787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:19.668087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:19.668128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2301:3792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:19.669303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:53:19.848622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2303:3794], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:53:20.007828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:53:20.236131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:53:20.541953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceWithReboots |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [GOOD] >> TPersQueueTest::StreamReadCommitAndStatusMsgs |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceAndDropWithReboots >> TUserAttrsTestWithReboots::Reboots [GOOD] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::Reboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:09.517201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:09.517226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:09.517231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:09.517236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:09.517249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:09.517253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:09.517263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:09.517341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:09.528577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:09.528606Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:09.530884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:09.530970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:09.530996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:09.533432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:09.533489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:09.533599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:09.533836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:09.534576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:09.534821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:09.534829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:09.534839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:09.534846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:09.534852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:09.534886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:09.536076Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:09.551813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:09.551871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:09.551916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:09.551954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:09.551961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:09.552474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:09.552492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:09.552518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:09.552537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:09.552541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:09.552545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:09.552852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:09.552860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:09.552864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:09.553143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:09.553150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:09.553155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:09.553159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:09.553636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:09.553970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:09.554008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:09.554154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:09.554175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:09.554181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:09.554223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:09.554229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:09.554253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:09.554263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:09.554599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:09.554607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:09.554631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:09.554635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:09.554692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:09.554697Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:09.554706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:09.554710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:09.554715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:09.554719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:09.554723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:09.554726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:09.554735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:09.554739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:09.554743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:53:24.189622Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:53:24.189659Z node 60 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:53:24.189668Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:53:24.189671Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [60:363:2355] TestWaitNotification: OK eventTxId 1005 2024-11-21T08:53:24.189709Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:24.189727Z node 60 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 24us result status StatusSuccess 2024-11-21T08:53:24.189776Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 1006 2024-11-21T08:53:24.190095Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirB" UserAttributes { Key: "AttrA3" } UserAttributes { Key: "AttrA1" } } } TxId: 1006 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:24.190106Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirB, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.190115Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T08:53:24.190126Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1006:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:24.190129Z node 60 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.190454Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1006, response: Status: StatusAccepted TxId: 1006 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:24.190468Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1006, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirB 2024-11-21T08:53:24.190484Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.190487Z node 60 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.190491Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2024-11-21T08:53:24.190502Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:24.190768Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T08:53:24.190784Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000006 2024-11-21T08:53:24.190835Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:24.190845Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 257698039914 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:24.190849Z node 60 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 1006:0, stepId:5000006, at schemeshard: 72057594046678944 2024-11-21T08:53:24.190865Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T08:53:24.190867Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:53:24.190872Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:24.190877Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T08:53:24.190881Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:53:24.190884Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:53:24.190886Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T08:53:24.190888Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T08:53:24.190893Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:24.190896Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 1, subscribers: 0 2024-11-21T08:53:24.190899Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 6 2024-11-21T08:53:24.191223Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:24.191229Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:53:24.191244Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:24.191246Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 3 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T08:53:24.191293Z node 60 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:24.191298Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:24.191301Z node 60 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:53:24.191303Z node 60 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2024-11-21T08:53:24.191306Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:24.191314Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T08:53:24.191619Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T08:53:24.191651Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T08:53:24.191654Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T08:53:24.191691Z node 60 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:53:24.191698Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:53:24.191701Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [60:380:2372] TestWaitNotification: OK eventTxId 1006 2024-11-21T08:53:24.191740Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:24.191754Z node 60 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 19us result status StatusSuccess 2024-11-21T08:53:24.191784Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TopicService::UseDoubleSlashInTopicPath [GOOD] >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots2 |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TopicService::RelativePath >> HttpRequest::Status [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> KqpPg::V1CreateTable [GOOD] >> KqpPg::TempTablesSessionsIsolation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2024-11-21T08:53:19.548130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:19.548169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:19.548177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0038cc/r3tmp/tmp0ZmeLo/pdisk_1.dat 2024-11-21T08:53:19.635222Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22846, node 1 2024-11-21T08:53:19.732744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:19.732765Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:19.732769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:19.732878Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:19.739443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:19.816143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:19.816177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:19.828036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31262 2024-11-21T08:53:20.239863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:21.097567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:21.097600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:21.131078Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:21.131995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:21.182425Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:21.190513Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:21.190534Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:21.196265Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:21.196379Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:21.196393Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:21.196396Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:21.196400Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:21.196404Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:21.196407Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:21.196411Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:21.196490Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:21.372336Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:21.372359Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:21.374044Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:53:21.376625Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:53:21.376759Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:53:21.377690Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:53:21.382717Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:21.382740Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:21.382753Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:53:21.385910Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:21.385943Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:21.387360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:21.389163Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:21.389194Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:21.392139Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:21.405255Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:21.427845Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:21.550994Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:21.718897Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:22.454540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.454579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.458253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:53:22.515243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:22.515310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:22.515360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:22.515388Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:22.515408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:22.515428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:22.515448Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:22.515468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:22.515495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:22.515516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:22.515536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:22.515557Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:22.523621Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:22.523657Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:22.523707Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:22.523728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:22.523749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:22.523771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... ::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:22.577453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:22.577480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:22.577485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:22.577501Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:22.577507Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:22.577518Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:53:22.577524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:53:22.577541Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:53:22.577547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:53:22.577559Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:53:22.577564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:53:22.577606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:22.577612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:22.577621Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:22.577627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:22.577642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:22.577647Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:22.577656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:53:22.577662Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:53:22.577671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:53:22.577677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:53:22.577686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:22.577691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:22.577735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:22.577740Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:22.577756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:22.577762Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:22.577773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:53:22.577778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:53:22.577793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:53:22.577799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:53:22.577809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:53:22.577814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:53:23.597578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2925:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:23.597647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:23.598842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2024-11-21T08:53:24.258700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3072:3165], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:24.258739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:24.260994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000012s 2024-11-21T08:53:26.084758Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3400:3610] 2024-11-21T08:53:26.085442Z node 2 :STATISTICS DEBUG: [72075186224037897] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TDSProxyGetTest::TestBlock42WipedErrorWithTwoBlobs [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_Erasure4Plus2Block >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> TDSProxyPatchTest::NaiveErrorOnPut_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestBlock42PutAllOk >> TDSProxyPutTest::TestBlock42PutAllOk [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block >> TDSProxyGetTest::TestBlock42GetIntervalsWipedError [GOOD] >> TDSProxyPatchTest::SecuredOk_Erasure4Plus2Block >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive >> TDSProxyPatchTest::SecuredOk_Erasure4Plus2Block [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureMirror3dc >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_0_0_VdiskErrors |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> KqpPg::TempTablesDrop [GOOD] >> KqpPg::TempTablesWithCache >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_0_0_VdiskErrors [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T08:53:26.337313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653044653357576:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:26.337495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.353566Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653042592153862:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:26.353759Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.373943Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.375183Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e42/r3tmp/tmpPmzY2I/pdisk_1.dat 2024-11-21T08:53:26.413695Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26090, node 1 2024-11-21T08:53:26.435168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002e42/r3tmp/yandex0sYSSD.tmp 2024-11-21T08:53:26.435181Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002e42/r3tmp/yandex0sYSSD.tmp 2024-11-21T08:53:26.435264Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002e42/r3tmp/yandex0sYSSD.tmp 2024-11-21T08:53:26.435310Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:26.436636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.436659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.437883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:26.438352Z INFO: TTestServer started on Port 17472 GrpcPort 26090 TClient is connected to server localhost:17472 PQClient connected to localhost:26090 === TenantModeEnabled() = 1 === Init PQ - start server on port 26090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:26.478362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.478386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.480515Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:26.488277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:26.492631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:53:26.492686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.492758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:53:26.492812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:26.492825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.500585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:26.500615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:26.500651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.500659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:26.500662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T08:53:26.500666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T08:53:26.501307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.501318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:26.501322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T08:53:26.501753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.501766Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.501771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:26.501776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:26.502594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:26.503144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T08:53:26.503176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:53:26.503763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179206548, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439653044653358130 RawX2: 4294969646 } } Step: 1732179206548 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:26.503873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T08:53:26.503883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:26.503911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:53:26.503919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:53:26.504118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T08:53:26.504125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: false 2024-11-21T08:53:26.504129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T08:53:26.504392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:26.504400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:53:26.504446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:26.504449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439653044653358158:2374], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T08:53:26.504456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.504461Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T08:53:26.504473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T08:53:26.504475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:26.504485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T08:53:26.504489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:26.504491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T08:53:26.504494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T08:53:26.504505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:53:26.504509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T08:53:26.504512Z node 1 :FLAT_TX_SCHEMESHAR ... node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:28.061254Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653052942170928:2335] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:53:28.061257Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:28.061443Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T08:53:28.061478Z node 3 :PERSQUEUE INFO: new Cookie 12345678|8d24f082-ad5ade0e-662d85fe-dffafc2f_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2024-11-21T08:53:28.061694Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|8d24f082-ad5ade0e-662d85fe-dffafc2f_0 2024-11-21T08:53:28.062213Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|8d24f082-ad5ade0e-662d85fe-dffafc2f_0 grpc read done: success: 0 data: 2024-11-21T08:53:28.062229Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|8d24f082-ad5ade0e-662d85fe-dffafc2f_0 grpc read failed 2024-11-21T08:53:28.062319Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|8d24f082-ad5ade0e-662d85fe-dffafc2f_0 2024-11-21T08:53:28.062328Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|8d24f082-ad5ade0e-662d85fe-dffafc2f_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2024-11-21T08:53:28.062443Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2024-11-21T08:53:28.070208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:42590" , at schemeshard: 72057594046644480 2024-11-21T08:53:28.070271Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.070306Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2024-11-21T08:53:28.070316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T08:53:28.070384Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:28.070402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.070432Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2024-11-21T08:53:28.070441Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2024-11-21T08:53:28.070463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2024-11-21T08:53:28.070488Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2024-11-21T08:53:28.070508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2024-11-21T08:53:28.070528Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2024-11-21T08:53:28.070534Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2024-11-21T08:53:28.070540Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2024-11-21T08:53:28.070543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2024-11-21T08:53:28.071389Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:28.071447Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, add access: -():test_user_0@builtin:-, add access: -():test_user_1@builtin:-, add access: -():test_user_2@builtin:- 2024-11-21T08:53:28.071517Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:28.071525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T08:53:28.071579Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:28.071583Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7439653048647202781:2351], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2024-11-21T08:53:28.071750Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T08:53:28.071760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T08:53:28.071762Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2024-11-21T08:53:28.071766Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2024-11-21T08:53:28.071771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2024-11-21T08:53:28.071801Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2024-11-21T08:53:28.072263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2024-11-21T08:53:28.072561Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:53:28.072574Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T08:53:28.072687Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2024-11-21T08:53:28.072709Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:42578 2024-11-21T08:53:28.072719Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42578 proto=v1 topic=/Root/acc/topic1 durationSec=0 2024-11-21T08:53:28.072722Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:28.072953Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T08:53:28.073004Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T08:53:28.073005Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:28.073007Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:28.073021Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653052942170958:2343] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:53:28.073025Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:28.073147Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T08:53:28.073196Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|2d9c82e3-17471b8a-294462a0-30fa07ca_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2024-11-21T08:53:28.073299Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|2d9c82e3-17471b8a-294462a0-30fa07ca_0 2024-11-21T08:53:28.074695Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|2d9c82e3-17471b8a-294462a0-30fa07ca_0 grpc read done: success: 1 data: update_token_request [content omitted] 2024-11-21T08:53:28.074768Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|2d9c82e3-17471b8a-294462a0-30fa07ca_0 grpc read done: success: 1 data: update_token_request [content omitted] 2024-11-21T08:53:28.074778Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|2d9c82e3-17471b8a-294462a0-30fa07ca_0 2024-11-21T08:53:28.074862Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|2d9c82e3-17471b8a-294462a0-30fa07ca_0 is DEAD 2024-11-21T08:53:28.074955Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T08:53:26.078145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653041340227542:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:26.078778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.082816Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653042185514426:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e51/r3tmp/tmpAvHDRK/pdisk_1.dat 2024-11-21T08:53:26.108483Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.110607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.108448Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.142136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11671, node 1 2024-11-21T08:53:26.163901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002e51/r3tmp/yandextuEl1P.tmp 2024-11-21T08:53:26.163916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002e51/r3tmp/yandextuEl1P.tmp 2024-11-21T08:53:26.163979Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002e51/r3tmp/yandextuEl1P.tmp 2024-11-21T08:53:26.163996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:26.167027Z INFO: TTestServer started on Port 28642 GrpcPort 11671 TClient is connected to server localhost:28642 PQClient connected to localhost:11671 === TenantModeEnabled() = 1 === Init PQ - start server on port 11671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:53:26.184276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.184309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:26.191175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:26.209567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.209604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.212603Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:26.212990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:26.219094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:53:26.219159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.219241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:53:26.219294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:26.219310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.220158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:26.220190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:26.220270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.220279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:26.220281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T08:53:26.220286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:53:26.220746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.220751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:26.220754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:26.220808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:26.220813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T08:53:26.220816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:26.221118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.221128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.221131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:26.221135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:53:26.221916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:26.222364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2024-11-21T08:53:26.222395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:53:26.222955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179206268, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:26.222985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7439653041340227958 RawX2: 4294969649 } } Step: 1732179206268 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:26.222990Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:26.223053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:26.223058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:26.223084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:53:26.223092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:53:26.223465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:26.223472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:53:26.223509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:26.223513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439653041340227979:2371], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T08:53:26.223520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.223524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:26.223534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:26.223537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:53:26.223541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2024-11-21T08:53:26.223544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:53:26.223547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:26.223549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T08:53:26.223558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:53:26.223562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:26.223564Z node 1 :FLAT_TX_SCHEMESHARD ... Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:28.006476Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:28.006486Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653049912462612:2335] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:53:28.006489Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:28.006593Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T08:53:28.006630Z node 3 :PERSQUEUE INFO: new Cookie 12345678|be6d7bab-7c553754-88befa3c-5d4f3b77_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2024-11-21T08:53:28.006746Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|be6d7bab-7c553754-88befa3c-5d4f3b77_0 2024-11-21T08:53:28.007077Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|be6d7bab-7c553754-88befa3c-5d4f3b77_0 grpc read done: success: 0 data: 2024-11-21T08:53:28.007085Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|be6d7bab-7c553754-88befa3c-5d4f3b77_0 grpc read failed 2024-11-21T08:53:28.007170Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|be6d7bab-7c553754-88befa3c-5d4f3b77_0 2024-11-21T08:53:28.007179Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|be6d7bab-7c553754-88befa3c-5d4f3b77_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2024-11-21T08:53:28.007254Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2024-11-21T08:53:28.014291Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:49562" , at schemeshard: 72057594046644480 2024-11-21T08:53:28.014348Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.014376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2024-11-21T08:53:28.014383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T08:53:28.014427Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:28.014437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.014459Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2024-11-21T08:53:28.014467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2024-11-21T08:53:28.014484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2024-11-21T08:53:28.014506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2024-11-21T08:53:28.014513Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2024-11-21T08:53:28.014516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2024-11-21T08:53:28.014519Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2024-11-21T08:53:28.014523Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2024-11-21T08:53:28.014526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2024-11-21T08:53:28.015161Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:28.015206Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, add access: -():test_user@builtin:- 2024-11-21T08:53:28.015252Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:28.015260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T08:53:28.015301Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:28.015310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7439653045617494493:2373], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2024-11-21T08:53:28.015446Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T08:53:28.015461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T08:53:28.015464Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2024-11-21T08:53:28.015467Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2024-11-21T08:53:28.015470Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2024-11-21T08:53:28.015493Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2024-11-21T08:53:28.015882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2024-11-21T08:53:28.016742Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:53:28.016753Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T08:53:28.016879Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2024-11-21T08:53:28.016908Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:49560 2024-11-21T08:53:28.016912Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:49560 proto=v1 topic=/Root/acc/topic1 durationSec=0 2024-11-21T08:53:28.016916Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:28.017186Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T08:53:28.017234Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T08:53:28.017242Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:28.017243Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:28.017256Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653049912462642:2343] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:53:28.017260Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:28.017447Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T08:53:28.017484Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|8f4d57f-471aa1e-49ce8f5b-8c0580ce_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2024-11-21T08:53:28.017553Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|8f4d57f-471aa1e-49ce8f5b-8c0580ce_0 2024-11-21T08:53:28.017944Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|8f4d57f-471aa1e-49ce8f5b-8c0580ce_0 grpc read done: success: 1 data: update_token_request [content omitted] 2024-11-21T08:53:28.018046Z node 3 :PQ_WRITE_PROXY INFO: updating token 2024-11-21T08:53:28.018060Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:28.018203Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|8f4d57f-471aa1e-49ce8f5b-8c0580ce_0 describe result for acl check 2024-11-21T08:53:28.018224Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|8f4d57f-471aa1e-49ce8f5b-8c0580ce_0 2024-11-21T08:53:28.018302Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|8f4d57f-471aa1e-49ce8f5b-8c0580ce_0 is DEAD 2024-11-21T08:53:28.018388Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T08:53:26.353836Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653042607059192:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:26.354368Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653044264671302:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e16/r3tmp/tmpAuJEkL/pdisk_1.dat 2024-11-21T08:53:26.384345Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.385248Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.386201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.387882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.420105Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1469, node 1 2024-11-21T08:53:26.440238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002e16/r3tmp/yandexHu7cgO.tmp 2024-11-21T08:53:26.440250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002e16/r3tmp/yandexHu7cgO.tmp 2024-11-21T08:53:26.440734Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002e16/r3tmp/yandexHu7cgO.tmp 2024-11-21T08:53:26.440779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:26.445389Z INFO: TTestServer started on Port 28218 GrpcPort 1469 2024-11-21T08:53:26.450433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.450461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.453564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28218 PQClient connected to localhost:1469 === TenantModeEnabled() = 1 === Init PQ - start server on port 1469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:26.484379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.484409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.485088Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:26.488368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:26.495978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:53:26.496040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.496104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:53:26.496153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:26.496160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.499984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:26.500017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:26.500058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.500072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:26.500075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T08:53:26.500078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T08:53:26.500882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.500894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:26.500897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T08:53:26.501289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.501298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.501303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:26.501309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:26.502202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:26.502600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T08:53:26.502647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:53:26.503149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T08:53:26.503158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179206548, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439653044264671699 RawX2: 4294969647 } } Step: 1732179206548 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:26.503343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T08:53:26.503350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:26.503380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:53:26.503400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:53:26.503805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:53:26.503858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439653044264671719:2368], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T08:53:26.503868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.503872Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T08:53:26.503883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T08:53:26.503885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:26.503891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T08:53:26.503895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:26.503898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T08:53:26.503900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T08:53:26.503909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:53:26.503913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T08:53:26.503915Z node 1 :FLAT_TX_SCHEMESHARD DEB ... end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:27.975629Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976720663 2024-11-21T08:53:27.975649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976720663 2024-11-21T08:53:27.975652Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976720663 2024-11-21T08:53:27.975655Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720663, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T08:53:27.975658Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T08:53:27.975692Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976720663 2024-11-21T08:53:27.975709Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976720663 2024-11-21T08:53:27.975710Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976720663 2024-11-21T08:53:27.975711Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720663, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2024-11-21T08:53:27.975712Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 2 2024-11-21T08:53:27.975725Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720663 2024-11-21T08:53:27.975730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720663 2024-11-21T08:53:27.975731Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720663 2024-11-21T08:53:27.975732Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720663, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2024-11-21T08:53:27.975733Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2024-11-21T08:53:27.975738Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720663, subscribers: 1 2024-11-21T08:53:27.975740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7439653044999066669:2324] 2024-11-21T08:53:27.976261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720663 2024-11-21T08:53:27.976281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720663 2024-11-21T08:53:27.976289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720663 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2024-11-21T08:53:28.079155Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:53:28.079176Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T08:53:28.079366Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2024-11-21T08:53:28.079394Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:42038 2024-11-21T08:53:28.079406Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42038 proto=v1 topic=Root/acc/topic1 durationSec=0 2024-11-21T08:53:28.079411Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:28.079946Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T08:53:28.080015Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T08:53:28.080024Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:28.080026Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:28.080043Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653049294034165:2335] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:53:28.080049Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:28.080225Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T08:53:28.080297Z node 3 :PERSQUEUE INFO: new Cookie 12345678|36b71292-c24d33c-ed0e70d0-427ef821_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2024-11-21T08:53:28.080514Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|36b71292-c24d33c-ed0e70d0-427ef821_0 Finish: 0 === InitializeWritePQService done === PersQueueClient 2024-11-21T08:53:28.081011Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|36b71292-c24d33c-ed0e70d0-427ef821_0 grpc read done: success: 0 data: 2024-11-21T08:53:28.081019Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|36b71292-c24d33c-ed0e70d0-427ef821_0 grpc read failed 2024-11-21T08:53:28.081086Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|36b71292-c24d33c-ed0e70d0-427ef821_0 2024-11-21T08:53:28.081091Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|36b71292-c24d33c-ed0e70d0-427ef821_0 is DEAD 2024-11-21T08:53:28.081207Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed 2024-11-21T08:53:28.084161Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:53:28.084175Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T08:53:28.084341Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2024-11-21T08:53:28.084364Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:42038 2024-11-21T08:53:28.084374Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42038 proto=v1 topic=topic1 durationSec=0 2024-11-21T08:53:28.084378Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:28.084646Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T08:53:28.084699Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T08:53:28.084706Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:28.084707Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:28.084722Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653049294034176:2341] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:53:28.084732Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:28.084878Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T08:53:28.084916Z node 3 :PERSQUEUE INFO: new Cookie 12345678|a0e2da6d-89a6ca98-57aca09a-cc3bd502_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2024-11-21T08:53:28.085055Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|a0e2da6d-89a6ca98-57aca09a-cc3bd502_0 2024-11-21T08:53:28.085482Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|a0e2da6d-89a6ca98-57aca09a-cc3bd502_0 grpc read done: success: 0 data: 2024-11-21T08:53:28.085494Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|a0e2da6d-89a6ca98-57aca09a-cc3bd502_0 grpc read failed 2024-11-21T08:53:28.085500Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|a0e2da6d-89a6ca98-57aca09a-cc3bd502_0 grpc closed 2024-11-21T08:53:28.085503Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|a0e2da6d-89a6ca98-57aca09a-cc3bd502_0 is DEAD 2024-11-21T08:53:28.085732Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block [GOOD] >> TUserAttrsTestWithReboots::InSubdomain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_0_0_VdiskErrors [GOOD] Test command err: 2024-11-21T08:53:28.359541Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T08:53:28.359607Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T08:53:28.359612Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T08:53:28.359615Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:53:28.359618Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:53:28.359621Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T08:53:28.359623Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T08:53:28.362460Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T08:53:28.362508Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:53:28.362513Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:53:28.362552Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T08:53:28.362585Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T08:53:28.362612Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T08:53:28.362617Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:53:28.362620Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:53:28.362646Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T08:53:28.362653Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T08:53:28.362657Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T08:53:28.362662Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T08:53:28.362666Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T08:53:28.362699Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2024-11-21T08:53:28.362707Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:2:0] Marker# BPP01 2024-11-21T08:53:28.362720Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T08:53:28.362726Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TPersQueueTest::StreamReadCommitAndStatusMsgs [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead >> KqpPg::TempTablesWithCache [GOOD] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> HttpRequest::AnalyzeServerless [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesWithCache [GOOD] Test command err: Trying to start YDB, gRPC: 22109, MsgBus: 14644 2024-11-21T08:53:02.965734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652940963496517:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:02.965881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004504/r3tmp/tmpcD2hoi/pdisk_1.dat 2024-11-21T08:53:03.009820Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22109, node 1 2024-11-21T08:53:03.028979Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:03.028996Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:03.028998Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:03.029037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14644 TClient is connected to server localhost:14644 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:53:03.065810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:03.065848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:53:03.066974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:03.074992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:03.244111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229'Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character(2) 2024-11-21T08:53:03.313729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465'Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character(2) 2024-11-21T08:53:03.323644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 abcd 2024-11-21T08:53:03.389796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2024-11-21T08:53:03.417839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 abcd 2024-11-21T08:53:03.437774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2024-11-21T08:53:03.508068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229'Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character varying(2) 2024-11-21T08:53:03.523291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465'Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character varying(2) 2024-11-21T08:53:03.538241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 abcd 2024-11-21T08:53:03.565068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2024-11-21T08:53:03.584881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 abcd 2024-11-21T08:53:03.606267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2024-11-21T08:53:03.635273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336'Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(2) 2024-11-21T08:53:03.650811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517'Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(2) 2024-11-21T08:53:03.665379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 1111 2024-11-21T08:53:03.683907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 {1111,1111} 2024-11-21T08:53:03.711047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336'Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(6) 2024-11-21T08:53:03.770881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517'Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(6) 2024-11-21T08:53:03.830784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336'Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string too long for type bit varying(2) 2024-11-21T08:53:03.842127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517'Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string too long for type bit varying(2) 2024-11-21T08:53:03.854705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 1111 2024-11-21T08:53:03.880927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 {1111,1111} 2024-11-21T08:53:03.909901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 1111 2024-11-21T08:53:03.936683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 {1111,1111} 2024-11-21T08:53:03.965026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgnumeric_17472595041006102391_7644398022171395976'Unable to coerce value for pgnumeric: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: numeric field overflow DETAIL: A field with precision 2, scale 0 must round to an absolute value less than 10^2. 2024-11-21T08:53:03.978627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... de 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:27.661110Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:27.661112Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:27.661173Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1717 TClient is connected to server localhost:1717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:27.732576Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:27.732606Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:27.733679Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:27.736053Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:27.927954Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653047659457825:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:27.927954Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653047659457836:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:27.927993Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:27.928684Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:27.930867Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439653047659457839:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:28.006043Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 2024-11-21T08:53:28.043343Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037888 not found 2024-11-21T08:53:28.044991Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439653051954425445:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:28.045079Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YzI0MDk4NmQtNjQ2MWI3NGQtNmJjOGY0YzEtZjA4ZjFjNDk=, ActorId: [6:7439653047659457806:2297], ActorState: ExecuteState, TraceId: 01jd6yt9vb4nppwe67xzqj0cy8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:28.051417Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439653051954425457:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:28.051498Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Y2QyMzk4YTgtYTUyYmYwNGUtMzY5ZTYxYjgtNDFhMjRmYWQ=, ActorId: [6:7439653051954425453:2337], ActorState: ExecuteState, TraceId: 01jd6yt9vhewgkfejk56dye23m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 14755, MsgBus: 2187 2024-11-21T08:53:28.334431Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653050551546740:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004504/r3tmp/tmpdIf3LH/pdisk_1.dat 2024-11-21T08:53:28.340224Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 14755, node 7 2024-11-21T08:53:28.350683Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:28.358458Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:28.358474Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:28.358476Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:28.358522Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2187 TClient is connected to server localhost:2187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:28.432981Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:28.433010Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:28.434045Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:28.437346Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:28.672653Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653050551547192:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:28.672654Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653050551547203:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:28.672682Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:28.673466Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:28.675313Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439653050551547206:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:28.741570Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.762703Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2024-11-21T08:53:28.767447Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.821598Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.860339Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480 2024-11-21T08:53:28.930650Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2024-11-21T08:53:28.944997Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2024-11-21T08:53:28.948307Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7439653050551547977:2414], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:28.948390Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=N2RkNGQwN2QtOWQyNGQzYmQtNzRjMzA4LTk3YmFlOGE1, ActorId: [7:7439653050551547975:2413], ActorState: ExecuteState, TraceId: 01jd6ytaqjb6vgnxkkkwb093y8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::InSubdomain [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:06.271838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:06.271865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:06.271870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:06.271874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:06.271890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:06.271894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:06.271902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:06.272000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:06.283118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:06.283143Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:06.285906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:06.286023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:06.286056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:06.289087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:06.289163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:06.289281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:06.289494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:06.290226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:06.290513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:06.290523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:06.290536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:06.290545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:06.290551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:06.290593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:06.291855Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:06.307065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:06.307138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.307229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:06.307276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:06.307284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.308430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:06.308457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:06.308507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.308528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:06.308532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:06.308537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:06.308988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.308997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:06.309001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:06.309312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.309321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.309327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:06.309334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:06.309920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:06.310673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:06.310740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:06.310959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:06.310987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:06.310995Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:06.311051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:06.311058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:06.311094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:06.311108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:06.312349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:06.312364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:06.312410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:06.312416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:06.312505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:06.312513Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:06.312526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:06.312531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:06.312538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:06.312543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:06.312548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:06.312552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:06.312565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:06.312572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:06.312576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... eady parts: 1/1 2024-11-21T08:53:28.794669Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T08:53:28.794673Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:28.794678Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:53:28.794682Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:53:28.794712Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:53:28.794719Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T08:53:28.794725Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T08:53:28.794729Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:53:28.794835Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:28.794846Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:28.794851Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:28.794855Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T08:53:28.794859Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:28.794983Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:28.794994Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:28.794998Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:28.795002Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:53:28.795006Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:53:28.795015Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T08:53:28.795598Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:28.795611Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:28.795616Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:53:28.795737Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:28.795850Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:53:28.796012Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:28.796069Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:53:28.796375Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:53:28.796454Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:53:28.796495Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:53:28.796542Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2024-11-21T08:53:28.796873Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:53:28.796912Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:28.797123Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:28.797131Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:53:28.797153Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:28.797354Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:28.797387Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:28.797392Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:53:28.797403Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:53:28.797493Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:53:28.797500Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:53:28.797850Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:53:28.797859Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:53:28.797874Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:53:28.797879Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:53:28.798228Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:28.798249Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409547 TestWaitNotification wait txId: 1004 2024-11-21T08:53:28.798318Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:53:28.798325Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:53:28.798385Z node 89 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:53:28.798400Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:53:28.798405Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [89:536:2491] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:53:28.798472Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:28.798504Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 41us result status StatusPathDoesNotExist 2024-11-21T08:53:28.798533Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:53:28.798578Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:28.798593Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 17us result status StatusSuccess 2024-11-21T08:53:28.798647Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T08:53:27.095620Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653048627458570:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:27.095767Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:27.094094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653048132887468:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:27.094204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002df3/r3tmp/tmpyoWWT9/pdisk_1.dat 2024-11-21T08:53:27.121053Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:27.126303Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:27.146906Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23016, node 1 2024-11-21T08:53:27.160557Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002df3/r3tmp/yandexcT6CRG.tmp 2024-11-21T08:53:27.160571Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002df3/r3tmp/yandexcT6CRG.tmp 2024-11-21T08:53:27.160624Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002df3/r3tmp/yandexcT6CRG.tmp 2024-11-21T08:53:27.160660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:27.164257Z INFO: TTestServer started on Port 5776 GrpcPort 23016 TClient is connected to server localhost:5776 PQClient connected to localhost:23016 === TenantModeEnabled() = 1 === Init PQ - start server on port 23016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:27.192949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:27.192984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:27.194532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:27.221246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:27.221275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:27.222326Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:27.222604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:27.230554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:53:27.230621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.230689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:53:27.230754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:27.230767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.231350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:27.231378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:27.231433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.231448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:27.231451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T08:53:27.231455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T08:53:27.231850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.231860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:27.231862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T08:53:27.232001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T08:53:27.232010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T08:53:27.232013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T08:53:27.232154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.232162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.232166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:27.232170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:27.232823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:27.233221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T08:53:27.233257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:53:27.233759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179207283, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:27.233788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439653048132887862 RawX2: 4294969638 } } Step: 1732179207283 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:27.233798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:27.233855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T08:53:27.233865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:27.233897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:53:27.233911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:53:27.234222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:27.234230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:53:27.234265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:27.234271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439653048132887910:2377], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T08:53:27.234276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.234279Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T08:53:27.234287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T08:53:27.234293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:27.234296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T08:53:27.234298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:27.234300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T08:53:27.234302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T08:53:27.234310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:53:27.234317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T08:53:27.234318Z node 1 :FLAT_TX_SCHEMESHARD D ... get = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:28.890540Z :INFO: [/Root] [/Root] [fe9a19ee-56b08eb-bca4af5d-7750d40c] [null] Closing session to cluster: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: no read rule provided for consumer 'non_existing' in topic '/Root/account1/write_topic' in current cluster '', code: 500003 " } 2024-11-21T08:53:28.890578Z :NOTICE: [/Root] [/Root] [fe9a19ee-56b08eb-bca4af5d-7750d40c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:53:28.890588Z :DEBUG: [/Root] [/Root] [fe9a19ee-56b08eb-bca4af5d-7750d40c] [null] Abort session to cluster Got new read session event: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: no read rule provided for consumer 'non_existing' in topic '/Root/account1/write_topic' in current cluster '', code: 500003 " } 2024-11-21T08:53:28.890599Z :INFO: [/Root] [/Root] [fe9a19ee-56b08eb-bca4af5d-7750d40c] Closing read session. Close timeout: 0.000000s 2024-11-21T08:53:28.890608Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T08:53:28.890614Z :INFO: [/Root] [/Root] [fe9a19ee-56b08eb-bca4af5d-7750d40c] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:53:28.890621Z :NOTICE: [/Root] [/Root] [fe9a19ee-56b08eb-bca4af5d-7750d40c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:53:28.890657Z :INFO: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] Starting read session 2024-11-21T08:53:28.890661Z :DEBUG: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] Starting session to cluster null (localhost:18391) 2024-11-21T08:53:28.890679Z :DEBUG: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:28.890681Z :DEBUG: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:28.890683Z :DEBUG: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T08:53:28.890866Z :DEBUG: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] [null] Successfully connected. Initializing session 2024-11-21T08:53:28.890987Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2024-11-21T08:53:28.890995Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2024-11-21T08:53:28.891111Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2024-11-21T08:53:28.891144Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 read init: from# ipv6:[::1]:34360, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2024-11-21T08:53:28.891177Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 auth for : consumer_aba 2024-11-21T08:53:28.891318Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 Handle describe topics response 2024-11-21T08:53:28.891338Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 auth is DEAD 2024-11-21T08:53:28.891340Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 auth ok: topics# 1, initDone# 0 2024-11-21T08:53:28.891581Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 register session: topic# /Root/account1/write_topic 2024-11-21T08:53:28.891657Z :INFO: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] [null] Server session id: consumer_aba_3_2_1291437551473698528_v1 2024-11-21T08:53:28.891715Z :DEBUG: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:28.891810Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T08:53:28.891873Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 got read request: guid# 4fcb29f6-53cbc0ed-1f2543bf-75ac5820 2024-11-21T08:53:28.891851Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7439653050057616741:2359] connected; active server actors: 1 2024-11-21T08:53:28.892026Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7439653050057616741:2359] session consumer_aba_3_2_1291437551473698528_v1 2024-11-21T08:53:28.892047Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2024-11-21T08:53:28.892072Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2024-11-21T08:53:28.892089Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_1291437551473698528_v1" (Sender=[3:7439653050057616738:2359], Pipe=[3:7439653050057616741:2359], Partitions=[], ActiveFamilyCount=0) 2024-11-21T08:53:28.892093Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2024-11-21T08:53:28.892109Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T08:53:28.892118Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_1291437551473698528_v1" (Sender=[3:7439653050057616738:2359], Pipe=[3:7439653050057616741:2359], Partitions=[], ActiveFamilyCount=0) 2024-11-21T08:53:28.892133Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_1291437551473698528_v1" sender [3:7439653050057616738:2359] lock partition 0 for ReadingSession "consumer_aba_3_2_1291437551473698528_v1" (Sender=[3:7439653050057616738:2359], Pipe=[3:7439653050057616741:2359], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2024-11-21T08:53:28.892153Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T08:53:28.892161Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000049s 2024-11-21T08:53:28.892343Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_1291437551473698528_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7439653050057616741 RawX2: 4503612512274743 } Path: "/Root/account1/write_topic" } 2024-11-21T08:53:28.892373Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2024-11-21T08:53:28.892464Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1 2024-11-21T08:53:28.892492Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_1291437551473698528_v1:1 with generation 1 2024-11-21T08:53:28.894470Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1732179208789 CreateTimestampMS: 1732179208789 SizeLag: 165 WriteTimestampEstimateMS: 1732179208789 } Cookie: 18446744073709551615 } 2024-11-21T08:53:28.894490Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2024-11-21T08:53:28.894509Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2024-11-21T08:53:28.894778Z :INFO: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] Closing read session. Close timeout: 0.000000s 2024-11-21T08:53:28.894791Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2024-11-21T08:53:28.894799Z :INFO: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:53:28.894819Z :NOTICE: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:53:28.894826Z :DEBUG: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] [null] Abort session to cluster 2024-11-21T08:53:28.894948Z :NOTICE: [/Root] [/Root] [98883c7a-c4003a60-583dc8db-a843e7fe] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:53:28.895091Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 grpc read done: success# 0, data# { } 2024-11-21T08:53:28.895101Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 grpc read failed 2024-11-21T08:53:28.895106Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 grpc closed 2024-11-21T08:53:28.895118Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1291437551473698528_v1 is DEAD 2024-11-21T08:53:28.895198Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_1291437551473698528_v1 2024-11-21T08:53:28.895471Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7439653050057616741:2359] disconnected; active server actors: 1 2024-11-21T08:53:28.895488Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7439653050057616741:2359] client consumer_aba disconnected session consumer_aba_3_2_1291437551473698528_v1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: 2024-11-21T08:53:19.673517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:19.673572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:19.673584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0038b4/r3tmp/tmp05zlxZ/pdisk_1.dat 2024-11-21T08:53:19.752958Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11815, node 1 2024-11-21T08:53:19.847728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:19.847748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:19.847751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:19.847820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:19.852782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:19.929108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:19.929145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:19.940954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3526 2024-11-21T08:53:20.345187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:21.144180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:21.144223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:21.177915Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:21.178911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:21.236990Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:21.247898Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:21.247928Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:21.255177Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:21.255364Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:21.255385Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:21.255391Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:21.255397Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:21.255403Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:21.255409Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:21.255416Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:21.255534Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:21.433460Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:21.433490Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:21.434817Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:53:21.437013Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:53:21.437135Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:53:21.438063Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T08:53:21.442740Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:21.442758Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:21.442769Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T08:53:21.444660Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:21.444693Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:21.446164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:21.447960Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:21.447998Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:21.451357Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:21.463625Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:21.485937Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:21.600071Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:21.767226Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:22.497511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:53:23.116011Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:23.231058Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T08:53:23.231085Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:53:23.231101Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2489:2902], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:53:23.231326Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2490:2903] 2024-11-21T08:53:23.231371Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2490:2903], schemeshard id = 72075186224037899 2024-11-21T08:53:24.026757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2619:3193], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:24.026816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:24.030466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T08:53:24.089270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:24.089349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:24.089398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:24.089439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:24.089459Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:24.089478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:24.089504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:24.089524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:24.089544Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:24.089564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:24.089584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:24.089604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2771:3041];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:24.098755Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2777:3042];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:24.098796Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2777: ... hed;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:24.143468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:24.143487Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:24.143493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:24.143505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:53:24.143511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:53:24.143528Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:53:24.143533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:53:24.143545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:53:24.143551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:53:24.145787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:24.145813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:24.145826Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:24.145832Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:24.145852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:24.145859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:24.145869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:53:24.145877Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:53:24.145888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:53:24.145894Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:53:24.145906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:24.145913Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:24.145967Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:24.145975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:24.146017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:24.146025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:24.146039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:53:24.146046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:53:24.146062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:53:24.146068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:53:24.146080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:53:24.146085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:53:25.573768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3417:3292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:25.573819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:25.576368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037899 2024-11-21T08:53:26.686182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3575:3339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.686224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.688864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 waiting actualization: 0/0.000019s 2024-11-21T08:53:28.986881Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3906:3829] 2024-11-21T08:53:28.987585Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:3902:3427] , Record { OperationId: "\000\000\000\000\031\304_\244\226$^\375\024\367t\206" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2024-11-21T08:53:28.987602Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal operation, OperationId=_$^t 2024-11-21T08:53:28.987609Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal table, OperationId=_$^t , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 00000006e4byj9c92yzmafex46' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T08:53:26.321154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653044274421966:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:26.321277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.331230Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653040769367711:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e27/r3tmp/tmpmIl5ji/pdisk_1.dat 2024-11-21T08:53:26.378033Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.378867Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.386143Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.411373Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:26.420079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.420109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.421351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6738, node 1 2024-11-21T08:53:26.427344Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002e27/r3tmp/yandexwHt4pD.tmp 2024-11-21T08:53:26.427358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002e27/r3tmp/yandexwHt4pD.tmp 2024-11-21T08:53:26.427434Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002e27/r3tmp/yandexwHt4pD.tmp 2024-11-21T08:53:26.427475Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:26.431150Z INFO: TTestServer started on Port 13442 GrpcPort 6738 TClient is connected to server localhost:13442 PQClient connected to localhost:6738 === TenantModeEnabled() = 1 === Init PQ - start server on port 6738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:26.478147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.478179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.480575Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:26.480976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:26.483604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:53:26.483654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.483713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:53:26.483790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:26.483802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.484321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:26.484343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:26.484382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.484389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:26.484391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T08:53:26.484394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:26.484936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.484951Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:26.484954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:26.485436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.485446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.485451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:26.485456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:53:26.486188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:26.486597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2024-11-21T08:53:26.486636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:53:26.487285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179206534, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:26.487327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7439653044274422376 RawX2: 4294969648 } } Step: 1732179206534 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:26.487344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:26.487401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:26.487413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:26.487453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:53:26.487467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:53:26.487846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:26.487857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:53:26.487896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:26.487907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439653044274422394:2367], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T08:53:26.487916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.487925Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:26.487934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:26.487941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:53:26.487945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2024-11-21T08:53:26.487948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:53:26.487952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:26.487955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T08:53:26.487965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:53:26.487974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:53:26.487976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T08:53:26.488421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:26.488443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg ... d: 10] was 4 2024-11-21T08:53:28.049986Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720665, subscribers: 0 ===Make write stream 2024-11-21T08:53:28.050528Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720665 2024-11-21T08:53:28.050585Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:53:28.050598Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T08:53:28.050735Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2024-11-21T08:53:28.050767Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:49302 2024-11-21T08:53:28.050779Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:49302 proto=v1 topic=/Root/acc/topic1 durationSec=0 2024-11-21T08:53:28.050784Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:28.051100Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T08:53:28.051150Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T08:53:28.051162Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:28.051165Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:28.051178Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653053199567277:2338] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:53:28.051182Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:28.051466Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 4, Generation: 1 2024-11-21T08:53:28.051612Z node 4 :PERSQUEUE INFO: new Cookie test-group-id|b6b60a7a-b509796e-8886ce1a-f314d601_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2024-11-21T08:53:28.051900Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|b6b60a7a-b509796e-8886ce1a-f314d601_0 ===Assert streaming op1 ===Assert streaming op2 2024-11-21T08:53:28.052370Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|b6b60a7a-b509796e-8886ce1a-f314d601_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T08:53:28.052476Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T08:53:28.052712Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::IEventHandle 2024-11-21T08:53:28.054045Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::IEventHandle ===ModifyAcl BEFORE MODIFY PERMISSIONS 2024-11-21T08:53:28.055768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976720666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:49312" , at schemeshard: 72057594046644480 2024-11-21T08:53:28.055823Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976720666:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.055857Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2024-11-21T08:53:28.055864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T08:53:28.055907Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:28.055917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720666:0, at schemeshard: 72057594046644480 2024-11-21T08:53:28.055940Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720666:0 progress is 1/1 2024-11-21T08:53:28.055948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720666 ready parts: 1/1 2024-11-21T08:53:28.055965Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2024-11-21T08:53:28.055984Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720666, ready parts: 1/1, is published: false 2024-11-21T08:53:28.055996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2024-11-21T08:53:28.056002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720666 ready parts: 1/1 2024-11-21T08:53:28.056006Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720666:0 2024-11-21T08:53:28.056011Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720666, publications: 1, subscribers: 0 2024-11-21T08:53:28.056018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720666, [OwnerId: 72057594046644480, LocalPathId: 10], 4 2024-11-21T08:53:28.056744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720666, response: Status: StatusSuccess TxId: 281474976720666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:28.056788Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: -():test_user_0@builtin:- 2024-11-21T08:53:28.056832Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:28.056840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720666, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T08:53:28.056880Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:28.056883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7439653048904599152:2365], at schemeshard: 72057594046644480, txId: 281474976720666, path id: 10 2024-11-21T08:53:28.057045Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976720666 2024-11-21T08:53:28.057067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976720666 2024-11-21T08:53:28.057070Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720666 2024-11-21T08:53:28.057073Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720666, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2024-11-21T08:53:28.057078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2024-11-21T08:53:28.057104Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720666, subscribers: 0 ===Wait for session created with token with removed ACE to die2024-11-21T08:53:28.057560Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720666 2024-11-21T08:53:28.924865Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439653053199567333:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:28.924981Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDYzYjc2MDAtYTdiOTdiOTgtNGY5NDA5MDctMjE0NTFkNDQ=, ActorId: [3:7439653053199567326:2343], ActorState: ExecuteState, TraceId: 01jd6ytapsb1xepsh3bdw38a5c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:28.925265Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:29.052182Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:29.052584Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|b6b60a7a-b509796e-8886ce1a-f314d601_0 describe result for acl check 2024-11-21T08:53:29.052637Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|b6b60a7a-b509796e-8886ce1a-f314d601_0 status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2024-11-21T08:53:29.052875Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|b6b60a7a-b509796e-8886ce1a-f314d601_0 is DEAD 2024-11-21T08:53:29.052978Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceWithReboots [GOOD] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T08:53:27.173009Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653045770346085:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:27.173054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:27.175377Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653047357001245:2242];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002de8/r3tmp/tmpmU8AyL/pdisk_1.dat 2024-11-21T08:53:27.196475Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:27.197835Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:27.200613Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:27.223986Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5405, node 1 2024-11-21T08:53:27.235967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002de8/r3tmp/yandex6CvfuE.tmp 2024-11-21T08:53:27.235981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002de8/r3tmp/yandex6CvfuE.tmp 2024-11-21T08:53:27.236049Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002de8/r3tmp/yandex6CvfuE.tmp 2024-11-21T08:53:27.236105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:27.241684Z INFO: TTestServer started on Port 12167 GrpcPort 5405 TClient is connected to server localhost:12167 PQClient connected to localhost:5405 === TenantModeEnabled() = 1 === Init PQ - start server on port 5405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:27.272880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:27.272920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:27.274581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:27.296815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:27.296843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:27.298354Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:27.298638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:27.308106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:53:27.308235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.308306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:53:27.308355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:27.308366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.309264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:27.309297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:27.309356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.309372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:27.309379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T08:53:27.309383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T08:53:27.309932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.309940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:27.309943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T08:53:27.310041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T08:53:27.310051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T08:53:27.310055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T08:53:27.310311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.310320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.310324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:27.310328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:27.310893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:27.311326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T08:53:27.311390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:53:27.311911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179207360, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:27.311961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439653045770346420 RawX2: 4294969644 } } Step: 1732179207360 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:27.311971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:27.312033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T08:53:27.312046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T08:53:27.312080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:53:27.312096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:53:27.312470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:27.312482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:53:27.312525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:27.312533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439653045770346457:2378], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T08:53:27.312541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.312550Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T08:53:27.312562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T08:53:27.312569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:27.312573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T08:53:27.312576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T08:53:27.312579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T08:53:27.312581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T08:53:27.312591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:53:27.312600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T08:53:27.312602Z node 1 :FLAT_TX_SCHEMESHARD DEB ... 29.491732Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:53:29.491814Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:29.491835Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7439653057357542478:2369] destroyed 2024-11-21T08:53:29.491848Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:53:29.492327Z :DEBUG: [] MessageGroupId [123] SessionId [] Write session: try to update token 2024-11-21T08:53:29.492498Z :INFO: [] MessageGroupId [123] SessionId [] Write session: Do CDS request 2024-11-21T08:53:29.492503Z :INFO: [] MessageGroupId [123] SessionId [] Start write session. Will connect to endpoint: localhost:5531 2024-11-21T08:53:29.493534Z :DEBUG: [] MessageGroupId [123] SessionId [] Write session: send init request: init_request { topic: "/Root/PQ/account/topic" message_group_id: "123" } 2024-11-21T08:53:29.493599Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:53:29.493614Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 4 2024-11-21T08:53:29.493760Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/PQ/account/topic" message_group_id: "123" } 2024-11-21T08:53:29.493786Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 4 topic: "/Root/PQ/account/topic" message_group_id: "123" from ipv6:[::1]:34768 2024-11-21T08:53:29.493795Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=4 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:34768 proto=v1 topic=/Root/PQ/account/topic durationSec=0 2024-11-21T08:53:29.493805Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:29.494066Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: describe result for acl check 2024-11-21T08:53:29.494108Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T08:53:29.494115Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:29.494116Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:29.494130Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653057357542494:2375] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:53:29.494144Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:53:29.494271Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 3, Generation: 1 2024-11-21T08:53:29.494287Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:29.494298Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7439653057357542497:2375], now have 1 active actors on pipe 2024-11-21T08:53:29.494307Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:53:29.494313Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2024-11-21T08:53:29.494344Z node 3 :PERSQUEUE INFO: new Cookie 123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2024-11-21T08:53:29.494385Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:53:29.494409Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:53:29.494451Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:53:29.494458Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2024-11-21T08:53:29.494474Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:53:29.494498Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0 2024-11-21T08:53:29.494703Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179209494 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:53:29.494728Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0" topic: "PQ/account/topic" 2024-11-21T08:53:29.494822Z :DEBUG: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write 1 messages with Id from 1 to 1 2024-11-21T08:53:29.494842Z :DEBUG: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write session: try to update token 2024-11-21T08:53:29.494847Z :DEBUG: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Send 1 message(s) (0 left), first sequence number is 3 2024-11-21T08:53:29.494903Z :INFO: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write session: close. Timeout = 10000 ms 2024-11-21T08:53:29.494993Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T08:53:29.495103Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T08:53:29.495151Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:53:29.495166Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2024-11-21T08:53:29.495201Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T08:53:29.495216Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:53:29.495244Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:53:29.495252Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2024-11-21T08:53:29.495265Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2024-11-21T08:53:29.495296Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2024-11-21T08:53:29.495319Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2024-11-21T08:53:29.495361Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2024-11-21T08:53:29.495390Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2024-11-21T08:53:29.495447Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1732179209495 2024-11-21T08:53:29.495483Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:53:29.496424Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 2024-11-21T08:53:29.496440Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:53:29.496456Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T08:53:29.496468Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:53:29.496492Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:53:29.496714Z :DEBUG: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { } 2024-11-21T08:53:29.496724Z :DEBUG: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write session: acknoledged message 1 2024-11-21T08:53:29.594983Z :INFO: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write session will now close 2024-11-21T08:53:29.595005Z :DEBUG: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write session: aborting 2024-11-21T08:53:29.595222Z :INFO: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:53:29.595231Z :DEBUG: [] MessageGroupId [123] SessionId [123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0] Write session: destroy 2024-11-21T08:53:29.595526Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0 grpc read done: success: 0 data: 2024-11-21T08:53:29.595547Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0 grpc read failed 2024-11-21T08:53:29.595555Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0 grpc closed 2024-11-21T08:53:29.595558Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|4b5c2640-399dc374-9b6a04f7-1360e8d5_0 is DEAD 2024-11-21T08:53:29.595898Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:53:29.595950Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:29.595968Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7439653057357542497:2375] destroyed 2024-11-21T08:53:29.595988Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:21.966270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:21.966298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:21.966303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:21.966308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:21.966320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:21.966324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:21.966334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:21.966421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:21.978972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:21.978996Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:21.981305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:21.981445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:21.981471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:21.983863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:21.983932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:21.984024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:21.984186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:21.984897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:21.985121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:21.985130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:21.985139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:21.985146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:21.985151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:21.985181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:21.986325Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:22.000136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:22.000245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.000310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:22.000371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:22.000378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.001184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:22.001206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:22.001249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.001257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:22.001260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:22.001263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:22.001688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.001700Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:22.001703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:22.002066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.002075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.002079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:22.002084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:22.002519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:22.002833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:22.002875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:22.003028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:22.003046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:22.003051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:22.003090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:22.003094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:22.003122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:22.003131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:22.003449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:22.003458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:22.003492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:22.003496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:22.003563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.003569Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:22.003577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:22.003580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:22.003584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:22.003588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:22.003591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:22.003593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:22.003602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:22.003607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:22.003609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ator: [32:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T08:53:29.927036Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [32:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T08:53:29.927087Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:53:29.927093Z node 32 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1005:0 ProgressState 2024-11-21T08:53:29.927100Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1005 ready parts: 1/1 2024-11-21T08:53:29.927123Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1005 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:29.927210Z node 32 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.927219Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.927223Z node 32 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:29.927227Z node 32 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T08:53:29.927232Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:29.927355Z node 32 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.927365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.927369Z node 32 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:29.927373Z node 32 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T08:53:29.927381Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:53:29.927390Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T08:53:29.927893Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2024-11-21T08:53:29.927919Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000006 2024-11-21T08:53:29.927999Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:29.928018Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 137438955626 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:29.928025Z node 32 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1005:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T08:53:29.928041Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:29.928054Z node 32 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 128 -> 240 2024-11-21T08:53:29.928077Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:29.928085Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:29.928199Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.928510Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T08:53:29.928805Z node 32 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:29.928811Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:29.928832Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:29.928853Z node 32 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:29.928857Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [32:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T08:53:29.928862Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [32:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T08:53:29.928910Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:53:29.928916Z node 32 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T08:53:29.928927Z node 32 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:53:29.928930Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:53:29.928939Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T08:53:29.928944Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:53:29.928950Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:53:29.928953Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:53:29.928963Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:29.928969Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T08:53:29.928973Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T08:53:29.928976Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:53:29.929020Z node 32 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.929028Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.929032Z node 32 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:29.929037Z node 32 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:53:29.929041Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:29.929079Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:29.929084Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:53:29.929092Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:29.929109Z node 32 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.929116Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.929120Z node 32 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:29.929124Z node 32 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:53:29.929127Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:29.929135Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T08:53:29.929852Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:53:29.929873Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:29.929886Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T08:53:29.929932Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:53:29.929940Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:53:29.930008Z node 32 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:53:29.930025Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:53:29.930029Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [32:405:2397] TestWaitNotification: OK eventTxId 1005 >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots [GOOD] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |88.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> IndexBuildTestReboots::BaseCaseWithDataColumns [GOOD] |88.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:22.145103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:22.145135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:22.145142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:22.145147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:22.145161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:22.145166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:22.145175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:22.145277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:22.157832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:22.157854Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:22.160535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:22.160658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:22.160696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:22.163784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:22.163878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:22.164005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:22.164238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:22.165066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:22.165407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:22.165441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:22.165458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:22.165466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:22.165473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:22.165523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:22.167179Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:22.187830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:22.187944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.188050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:22.188134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:22.188149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.189098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:22.189129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:22.189201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.189213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:22.189216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:22.189221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:22.189731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.189750Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:22.189756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:22.190195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.190211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.190218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:22.190227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:22.190874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:22.191536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:22.191603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:22.191842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:22.191875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:22.191884Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:22.191943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:22.191952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:22.191997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:22.192012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:22.192496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:22.192511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:22.192563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:22.192574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:22.192673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:22.192681Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:22.192693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:22.192697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:22.192703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:22.192708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:22.192714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:22.192718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:22.192731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:22.192739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:22.192743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.416123Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:30.416128Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:53:30.416132Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:30.416245Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.416257Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.416261Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:30.416265Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:53:30.416270Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:53:30.416280Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T08:53:30.416920Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T08:53:30.416946Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T08:53:30.417114Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:30.417135Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 141733922920 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:30.417143Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1003:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T08:53:30.417163Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:30.417177Z node 33 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T08:53:30.417200Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:30.417209Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:30.417475Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.417547Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:53:30.417860Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:30.417866Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:30.417884Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:53:30.417900Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:30.417903Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T08:53:30.417906Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:53:30.417941Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:53:30.417946Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:53:30.417955Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:53:30.417957Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:53:30.417960Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:53:30.417964Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:53:30.417967Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:53:30.417969Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:53:30.417977Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:30.417980Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2024-11-21T08:53:30.417983Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:53:30.417988Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:53:30.418034Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.418039Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.418042Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:30.418045Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:53:30.418048Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:30.418076Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:30.418080Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:53:30.418085Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:30.418102Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.418107Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.418109Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:30.418111Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:53:30.418113Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:30.418118Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T08:53:30.418791Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:53:30.418820Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:30.418826Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:53:30.418855Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:53:30.418859Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:53:30.418905Z node 33 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:53:30.418917Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:53:30.418922Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [33:350:2342] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:53:30.418969Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:30.418989Z node 33 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 26us result status StatusPathDoesNotExist 2024-11-21T08:53:30.419012Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TExternalDataSourceTestReboots::CreateExternalDataSourceWithReboots [GOOD] |88.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |88.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard >> BasicStatistics::Serverless >> TExternalDataSourceTestReboots::ParallelCreateDrop [GOOD] >> TopicService::RelativePath [GOOD] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::CreateExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:19.631844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:19.631867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:19.631872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:19.631877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:19.631889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:19.631893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:19.631902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:19.631975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:19.641036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:19.641063Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:19.643497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:19.643606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:19.643634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:19.646537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:19.646656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:19.646770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:19.646992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:19.647717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:19.647988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:19.648010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:19.648020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:19.648025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:19.648030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:19.648066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:19.649314Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:19.662972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:19.663059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.663121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:19.663173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:19.663179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.664004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:19.664049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:19.664103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.664130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:19.664135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:19.664140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:19.664590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.664602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:19.664607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:19.665079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.665095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.665102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:19.665109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:19.665773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:19.666294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:19.666356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:19.666577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:19.666606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:19.666613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:19.666670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:19.666678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:19.666720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:19.666733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:19.667158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:19.667169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:19.667216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:19.667222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:19.667314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.667323Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:19.667336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:19.667341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:19.667347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:19.667352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:19.667357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:19.667361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:19.667373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:19.667380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:19.667384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 2024-11-21T08:53:31.614527Z node 48 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:53:31.614530Z node 48 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T08:53:31.614534Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:31.614543Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T08:53:31.614548Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [48:299:2291] 2024-11-21T08:53:31.614856Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:53:31.614867Z node 48 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalDataSource TPropose, operationId: 1003:0ProgressState 2024-11-21T08:53:31.614874Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1003 ready parts: 1/1 2024-11-21T08:53:31.614900Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1003 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:31.615017Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:53:31.615621Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:53:31.615659Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:53:31.615664Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [48:300:2292] 2024-11-21T08:53:31.615699Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T08:53:31.615721Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T08:53:31.615778Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:31.615796Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 206158432361 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:31.615802Z node 48 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalDataSource TPropose, operationId: 1003:0HandleReply TEvOperationPlan: step# 5000004 2024-11-21T08:53:31.615819Z node 48 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T08:53:31.615835Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:31.615841Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:53:31.616223Z node 48 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:31.616230Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:53:31.616251Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:31.616265Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:31.616278Z node 48 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:31.616282Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [48:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:53:31.616287Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [48:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T08:53:31.616290Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [48:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T08:53:31.616335Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:53:31.616341Z node 48 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:53:31.616351Z node 48 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:53:31.616354Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:53:31.616359Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:53:31.616364Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:53:31.616368Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:53:31.616372Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:53:31.616382Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:53:31.616386Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T08:53:31.616389Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T08:53:31.616393Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2024-11-21T08:53:31.616516Z node 48 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:31.616527Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:31.616530Z node 48 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:31.616534Z node 48 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:53:31.616537Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:31.616814Z node 48 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:31.616826Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:31.616830Z node 48 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:31.616835Z node 48 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T08:53:31.616839Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:31.616850Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:53:31.616854Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [48:299:2291] 2024-11-21T08:53:31.617352Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:53:31.617679Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:53:31.617697Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:53:31.617701Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [48:300:2292] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:53:31.617773Z node 48 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirExternalDataSource/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:31.617801Z node 48 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirExternalDataSource/MyExternalDataSource" took 31us result status StatusSuccess 2024-11-21T08:53:31.617855Z node 48 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirExternalDataSource/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:19.557800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:19.557829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:19.557835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:19.557841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:19.557856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:19.557860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:19.557871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:19.557954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:19.571341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:19.571369Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:19.574099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:19.574210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:19.574246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:19.577556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:19.577655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:19.577770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:19.577989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:19.578726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:19.579031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:19.579041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:19.579055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:19.579062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:19.579068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:19.579116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:19.580681Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:19.600652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:19.600744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.600822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:19.600900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:19.600910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.601803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:19.601832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:19.601888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.601900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:19.601905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:19.601910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:19.602295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.602305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:19.602310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:19.602625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.602634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.602641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:19.602648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:19.603362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:19.603789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:19.603843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:19.604064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:19.604091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:19.604100Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:19.604162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:19.604169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:19.604234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:19.604250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:19.604659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:19.604668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:19.604713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:19.604719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:19.604808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:19.604816Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:19.604828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:19.604833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:19.604839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:19.604845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:19.604850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:19.604855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:19.604866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:19.604873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:19.604877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... eBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.859430Z node 49 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:31.859433Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:53:31.859438Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:31.859498Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.859504Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.859507Z node 49 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:31.859509Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:53:31.859511Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:53:31.859516Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T08:53:31.859770Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T08:53:31.859789Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000004 2024-11-21T08:53:31.859986Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:31.860001Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 210453399658 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:31.860007Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T08:53:31.860021Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:31.860031Z node 49 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T08:53:31.860050Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:31.860055Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:31.860150Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.860163Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:53:31.860449Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:31.860453Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:31.860471Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:53:31.860488Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:31.860491Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T08:53:31.860493Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T08:53:31.860526Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:53:31.860531Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:53:31.860539Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:53:31.860541Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:31.860545Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T08:53:31.860548Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:31.860551Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:53:31.860553Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:53:31.860561Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:31.860565Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T08:53:31.860567Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:53:31.860570Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:53:31.860611Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.860617Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.860619Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:31.860622Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:53:31.860624Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:31.860644Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:31.860649Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:53:31.860657Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:31.860697Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.860705Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.860711Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:31.860715Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:53:31.860718Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:31.860726Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T08:53:31.861321Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:31.861339Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:31.861345Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:53:31.861379Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:53:31.861383Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:53:31.861445Z node 49 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:53:31.861458Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:53:31.861461Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [49:354:2346] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:53:31.861510Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DropMe" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:31.861532Z node 49 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DropMe" took 30us result status StatusPathDoesNotExist 2024-11-21T08:53:31.861561Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DropMe\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DropMe" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless >> ColumnStatistics::CountMinSketchServerlessStatistics >> TopicService::AccessRights |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceAndDropWithReboots [GOOD] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots [GOOD] |88.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:24.485517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:24.485540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:24.485543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:24.485547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:24.485556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:24.485559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:24.485566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:24.485647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:24.495388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:24.495412Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:24.497682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:24.497774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:24.497808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:24.500258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:24.500334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:24.500436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:24.500626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:24.501242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:24.501518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:24.501527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:24.501541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:24.501548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:24.501553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:24.501599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:24.503233Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:24.520317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:24.520390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.520441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:24.520502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:24.520511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.521162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:24.521183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:24.521216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.521226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:24.521230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:24.521235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:24.521652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.521663Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:24.521668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:24.522013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.522022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.522027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:24.522033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:24.522456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:24.522829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:24.522872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:24.523013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:24.523030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:24.523035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:24.523077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:24.523083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:24.523114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:24.523125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:24.523539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:24.523549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:24.523588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:24.523592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:24.523658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.523663Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:24.523671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:24.523674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:24.523678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:24.523682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:24.523685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:24.523687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:24.523697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:24.523700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:24.523703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.696975Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:32.696980Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T08:53:32.696983Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:32.697061Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.697069Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.697073Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:32.697076Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T08:53:32.697080Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:53:32.697090Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T08:53:32.697742Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2024-11-21T08:53:32.697770Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000006 2024-11-21T08:53:32.698029Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:32.698054Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 141733922920 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:32.698063Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1005:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T08:53:32.698083Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:32.698100Z node 33 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 128 -> 240 2024-11-21T08:53:32.698127Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:32.698136Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:32.698259Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.698307Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T08:53:32.698624Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:32.698630Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:32.698652Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:32.698672Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:32.698676Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T08:53:32.698681Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T08:53:32.698726Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:53:32.698732Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T08:53:32.698744Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:53:32.698748Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:53:32.698753Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T08:53:32.698760Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:53:32.698765Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:53:32.698769Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:53:32.698778Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:32.698783Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T08:53:32.698787Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T08:53:32.698791Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:53:32.698843Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.698851Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.698855Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:32.698859Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:53:32.698862Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:32.698896Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:32.698901Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:53:32.698909Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:32.698943Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.698951Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.698955Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:53:32.698960Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:53:32.698963Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:32.698971Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T08:53:32.699480Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:53:32.699683Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:32.699694Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T08:53:32.699733Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:53:32.699740Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:53:32.699800Z node 33 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:53:32.699813Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:53:32.699817Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [33:405:2397] TestWaitNotification: OK eventTxId 1005 2024-11-21T08:53:32.699867Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:32.699887Z node 33 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 29us result status StatusPathDoesNotExist 2024-11-21T08:53:32.699915Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |88.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:24.642231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:24.642262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:24.642267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:24.642273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:24.642287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:24.642292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:24.642302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:24.642399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:24.650639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:24.650661Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:24.652537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:24.652625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:24.652653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:24.654713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:24.654791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:24.654890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:24.655084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:24.655637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:24.655877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:24.655885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:24.655895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:24.655900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:24.655904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:24.655937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:24.657023Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:24.669351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:24.669456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.669519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:24.669589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:24.669595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.670302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:24.670322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:24.670362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.670370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:24.670373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:24.670377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:24.670632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.670638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:24.670641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:24.670839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.670844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.670849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:24.670854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:24.671258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:24.671604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:24.671651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:24.671815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:24.671834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:24.671842Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:24.671892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:24.671898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:24.671929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:24.671940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:24.672259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:24.672267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:24.672302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:24.672305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:24.672384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:24.672390Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:24.672401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:24.672406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:24.672411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:24.672416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:24.672422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:24.672424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:24.672432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:24.672436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:24.672439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.826615Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:32.826620Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T08:53:32.826624Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:32.826717Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.826728Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.826732Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:32.826736Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T08:53:32.826740Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:53:32.826752Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T08:53:32.827440Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T08:53:32.827476Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000006 2024-11-21T08:53:32.827842Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.827901Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:32.827924Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 141733922920 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:32.827934Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T08:53:32.827957Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:32.827973Z node 33 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T08:53:32.828002Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:32.828010Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:32.828137Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:53:32.828543Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:32.828553Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:32.828580Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:32.828604Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:32.828608Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T08:53:32.828612Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T08:53:32.828666Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:53:32.828673Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:53:32.828686Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:53:32.828690Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:32.828696Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T08:53:32.828705Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:53:32.828711Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:53:32.828714Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:53:32.828726Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:32.828731Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T08:53:32.828735Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T08:53:32.828738Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:53:32.828797Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.828807Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.828811Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:32.828815Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:53:32.828819Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:32.828866Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:32.828871Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:53:32.828881Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:32.828917Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.828925Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.828928Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:53:32.828931Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:53:32.828935Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:32.828943Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T08:53:32.829748Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:53:32.829774Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:32.829787Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:53:32.829845Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:53:32.829853Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:53:32.829933Z node 33 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:53:32.829953Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:53:32.829958Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [33:406:2398] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:53:32.830035Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:32.830065Z node 33 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 40us result status StatusPathDoesNotExist 2024-11-21T08:53:32.830101Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> KqpDataIntegrityTrails::Select |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> IndexBuildTestReboots::BaseCase [GOOD] >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots2 [GOOD] |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> KqpDataIntegrityTrails::Select [GOOD] |88.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:25.787437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:25.787460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:25.787465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:25.787470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:25.787482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:25.787486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:25.787495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:25.787564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:25.798873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:25.798893Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:25.802207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:25.802295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:25.802318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:25.805184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:25.805259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:25.805363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:25.805591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:25.806216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:25.806445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:25.806454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:25.806465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:25.806471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:25.806476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:25.806515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:25.807679Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:25.822897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:25.822953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:25.822990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:25.823032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:25.823037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:25.823534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:25.823553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:25.823582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:25.823590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:25.823594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:25.823598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:25.823929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:25.823937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:25.823941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:25.824284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:25.824291Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:25.824296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:25.824301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:25.824828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:25.825431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:25.825475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:25.825628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:25.825651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:25.825657Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:25.825703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:25.825709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:25.825734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:25.825746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:25.826171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:25.826180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:25.826216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:25.826220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:25.826290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:25.826296Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:25.826306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:25.826310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:25.826315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:25.826320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:25.826325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:25.826329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:25.826338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:25.826343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:25.826347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.904283Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:33.904288Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:53:33.904292Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:33.904391Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.904402Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.904406Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:33.904410Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:53:33.904414Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:53:33.904422Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T08:53:33.904991Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T08:53:33.905023Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T08:53:33.905183Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:33.905205Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 141733922920 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:33.905214Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1003:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T08:53:33.905234Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:33.905249Z node 33 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T08:53:33.905275Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:33.905284Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:33.905570Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.905657Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:53:33.906051Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:33.906059Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:33.906087Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:53:33.906108Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:33.906112Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T08:53:33.906117Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:53:33.906164Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:53:33.906171Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:53:33.906183Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:53:33.906187Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:53:33.906193Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:53:33.906198Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:53:33.906203Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:53:33.906206Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:53:33.906218Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:33.906224Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2024-11-21T08:53:33.906228Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:53:33.906235Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:53:33.906297Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.906307Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.906312Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:33.906316Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:53:33.906320Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:53:33.906360Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:33.906366Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:53:33.906374Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:33.906403Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.906410Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.906414Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:53:33.906418Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:53:33.906422Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:33.906429Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T08:53:33.907278Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:53:33.907304Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:33.907314Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:53:33.907356Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:53:33.907365Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:53:33.907431Z node 33 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:53:33.907449Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:53:33.907456Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [33:350:2342] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:53:33.907522Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:33.907549Z node 33 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 36us result status StatusPathDoesNotExist 2024-11-21T08:53:33.907581Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |88.5%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient |88.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 16814, MsgBus: 15117 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f19/r3tmp/tmpjifdnz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16814, node 1 TClient is connected to server localhost:15117 TClient is connected to server localhost:15117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> YdbSdkSessions::MultiThreadSync >> YdbSdkSessions::TestMultipleSessions >> YdbSdkSessions::SessionsServerLimit |88.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> YdbSdkSessions::TestSessionPool [GOOD] >> TColumnShardTestSchema::ExportAfterFail >> TColumnShardTestSchema::OneTierExternalTtl >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumn |88.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSessionPool [GOOD] Test command err: 2024-11-21T08:53:34.690026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653077523891817:2090];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:34.690084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cd0/r3tmp/tmprsZJ02/pdisk_1.dat 2024-11-21T08:53:34.746987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11624, node 1 2024-11-21T08:53:34.762009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:34.762023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:34.762025Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:34.762063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:53:34.789857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.789886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.791753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:34.824955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.826002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.826022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.826744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:34.826807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:34.826816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:34.827273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:34.827284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:34.827328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:34.827688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.828936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179214878, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:34.828967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:34.829093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:34.829759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:34.829807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:34.829826Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:34.829836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:34.829856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:34.829867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:34.830428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:34.830448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:34.830452Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:34.830469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:53:34.997598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653077523892704:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.997602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653077523892718:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.997648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.998140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.998178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.998182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T08:53:34.998201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.998203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:53:34.998212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.998219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:34.998254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T08:53:34.998308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.998312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:35.000624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.000676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.000760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.000775Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T08:53:35.000816Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.000822Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.000829Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.001456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.001462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.001465Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:53:35.001499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.001502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.001503Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:53:35.001513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.001514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.001515Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T08:53:35.001523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.001523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.001524Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T08:53:35.001532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.001534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.001534Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T08:53:35.001904Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:53:35.013272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215060, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.013295Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215060, at schemeshard: 72057594046644480 2024-11-21T08:53:35.013327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:53:35.013350Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215060, at schemeshard: 72057594046644480 2024-11-21T08:53:35.013359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T08:53:35.013368Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215060, at schemeshard: 72057594046644480 2024-11-21T08:53:35.013389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T08:53:35.013401Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732179215060 2024-11-21T08:53:35.013407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T08:53:35.013902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.013965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.013973Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T08:53:35.013982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T08:53:35.014006Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T08:53:35.014009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T08:53:35.014016Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:53:35.014019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T08:53:35.014025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T08:53:35.014029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T08:53:35.014033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:53:35.014040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T08:53:35.014043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T08:53:35.014045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T08:53:35.014049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T08:53:35.014370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.014379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.014382Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:53:35.014422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.014434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.014435Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T08:53:35.014451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.014455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.014456Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T08:53:35.014470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.014472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.014473Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T08:53:35.014487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.014489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.014491Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T08:53:35.014496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T08:53:35.015197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653077523892720:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:35.114275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.114314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:35.115065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> YdbSdkSessions::TestMultipleSessions [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay >> TDSProxyGetTest::TestBlock42GetIntervalsWipedAllOk [GOOD] >> TDSProxyPatchTest::SecuredErrorOnPut_ErasureNone >> YdbSdkSessions::SessionsServerLimit [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool >> TDSProxyPatchTest::SecuredErrorOnPut_ErasureNone [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_Erasure4Plus2Block >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] Test command err: 2024-11-21T08:53:34.699457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653077168629876:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:34.699630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cc4/r3tmp/tmpo9jTvg/pdisk_1.dat 2024-11-21T08:53:34.769339Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1636, node 1 2024-11-21T08:53:34.783133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:34.783148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:34.783150Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:34.783192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:34.799102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.799135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.800751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:34.840653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.841778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.841801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.843626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:34.843691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:34.843696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:34.844307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:34.844332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:34.844980Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.845985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179214892, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:34.846028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:34.846133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:34.846757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:34.846847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:34.846883Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:34.846909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:34.846929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:34.846957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T08:53:34.847522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:34.847547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:34.847557Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:34.847575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T08:53:34.847718Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:35.001237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653081463598099:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.001262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.027660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.027788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T08:53:35.027909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.027917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.028708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T08:53:35.028779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.028844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.028868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:53:35.028938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:53:35.029108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.029119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.029123Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:53:35.029158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.029160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.029162Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T08:53:35.030695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:53:35.030726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T08:53:35.031151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T08:53:35.083072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:53:35.083085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:53:35.083107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T08:53:35.083537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T08:53:35.084286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215130, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.084301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732179215130 2024-11-21T08:53:35.084329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T08:53:35.084779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.084853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.084871Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:53:35.085145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.085161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.085165Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerI ... 08:53:35.098953Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.098991Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:53:35.099369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.099382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.099387Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:53:35.099431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.099440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.099442Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T08:53:35.099458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.099468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.099469Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T08:53:35.099484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.099491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.099493Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T08:53:35.099506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.099516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.099518Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T08:53:35.100228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215144, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.100244Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215144, at schemeshard: 72057594046644480 2024-11-21T08:53:35.100292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T08:53:35.100369Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215144, at schemeshard: 72057594046644480 2024-11-21T08:53:35.100385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T08:53:35.100399Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215144, at schemeshard: 72057594046644480 2024-11-21T08:53:35.100411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T08:53:35.100425Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732179215144 2024-11-21T08:53:35.100437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T08:53:35.100826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.100933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.100951Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T08:53:35.100962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T08:53:35.100995Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T08:53:35.101006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T08:53:35.101017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T08:53:35.101026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T08:53:35.101051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T08:53:35.101061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T08:53:35.101066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:53:35.101081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T08:53:35.101085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T08:53:35.101087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T08:53:35.101091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T08:53:35.101283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.101301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.101306Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T08:53:35.101350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.101358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.101359Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T08:53:35.101376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.101384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.101386Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T08:53:35.101401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.101409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.101410Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T08:53:35.101438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.101442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.101443Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T08:53:35.101449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:53:35.102030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653081463598277:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:35.178386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.178432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:35.179404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.190119Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ytgqr38azt8ny1t531frb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWUyMjdhYzgtYWQ5OTY0NC02YzUxZmM4NC03MWE2NWM4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:35.297998Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ytgy1a5ebpya7qq6kmv2b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTIzNDQ4ZGEtOTI2OTMwOWQtODAwYjc4YTItYjhlNTFhNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] Test command err: 2024-11-21T08:53:34.808809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653077345983608:2180];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:34.808894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ca5/r3tmp/tmpKLku13/pdisk_1.dat 2024-11-21T08:53:34.874015Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7808, node 1 2024-11-21T08:53:34.893867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:34.893885Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:34.893887Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:34.893929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:53:34.911406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.911438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.913671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:34.956796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.957616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.957623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.958137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:34.958177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:34.958180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:34.958551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:34.958561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:34.958826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:34.959007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.959777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215004, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:34.959798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:34.959877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:34.960280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:34.960321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:34.960331Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:34.960340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:34.960348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:34.960358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:34.960734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:34.960750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:34.960754Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:34.960766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:53:35.105683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653081640951705:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.105718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.126616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.126744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T08:53:35.126881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.126892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.127714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T08:53:35.127769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.127821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.127848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T08:53:35.127921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:53:35.128089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.128101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.128105Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:53:35.128149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.128157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.128158Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T08:53:35.130051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:53:35.130080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T08:53:35.130560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T08:53:35.183623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T08:53:35.183636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:53:35.183668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T08:53:35.184337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T08:53:35.185566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215228, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.185587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732179215228 2024-11-21T08:53:35.185628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T08:53:35.186176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.186266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.186293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T08:53:35.186675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.186694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.186698Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [O ... 08:53:35.199633Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.199654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:53:35.199816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.199825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.199829Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:53:35.199859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.199866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.199868Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T08:53:35.199882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.199889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.199891Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T08:53:35.199904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.199911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.199913Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T08:53:35.199925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.199931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.199933Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T08:53:35.201050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215249, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.201063Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215249, at schemeshard: 72057594046644480 2024-11-21T08:53:35.201094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T08:53:35.201119Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215249, at schemeshard: 72057594046644480 2024-11-21T08:53:35.201134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T08:53:35.201148Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215249, at schemeshard: 72057594046644480 2024-11-21T08:53:35.201160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T08:53:35.201170Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732179215249 2024-11-21T08:53:35.201181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T08:53:35.201554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.201673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.201697Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T08:53:35.201712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T08:53:35.201751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T08:53:35.201764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T08:53:35.201774Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T08:53:35.201780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T08:53:35.201791Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T08:53:35.201802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T08:53:35.201813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:53:35.201822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T08:53:35.201830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T08:53:35.201832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T08:53:35.201837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T08:53:35.202050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.202062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.202066Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T08:53:35.202100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.202108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.202110Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T08:53:35.202123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.202131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.202133Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T08:53:35.202148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.202156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.202158Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T08:53:35.202170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:35.202173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:35.202175Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T08:53:35.202180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:53:35.202651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653081640951882:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:35.256146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.256200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:35.257310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.268893Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ytgtx1jkapxxth3pyf1np, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWEzNDQwNjctNTdkYTRiMS05NmVkYTYyZS0yZGIzODg2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:35.376151Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yth0fah6nmjqnfx7rvcer, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzY5YjI4MDEtM2Y1NzYzNGMtNGE4ZTdjYjItZTNkOGI2YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> DataShardReadTableSnapshots::ReadTableSplitBefore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] Test command err: 2024-11-21T08:53:35.030516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653081253016017:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:35.030535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c7b/r3tmp/tmprF3Dgc/pdisk_1.dat 2024-11-21T08:53:35.080860Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1343, node 1 2024-11-21T08:53:35.097717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:35.097739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:35.097742Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:35.097779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:35.130639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:35.130676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:35.132252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:35.157014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.158170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.158192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.158770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:35.158818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:35.158826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:35.159273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.159285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:35.159336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:35.159622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.160496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215207, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.160523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:35.160634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:35.161042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.161090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.161105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:35.161119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:35.161133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:35.161149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:35.161597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:35.161623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:35.161632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:35.161646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:53:35.341665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653081253016947:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.341668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653081253016960:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.341685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.342149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653081253016991:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.342167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.342269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.342334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653081253016995:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.342352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.342357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.342380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.342387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:53:35.342401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.342413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.342475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T08:53:35.342543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.342554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:35.342757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.342783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:35.343206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653081253017012:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.343224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653081253017016:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.343230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.343405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.343463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusMultipleModifications, reason: Check failed: path ... ltipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.344775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:53:35.345128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215389, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.345145Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215389, at schemeshard: 72057594046644480 2024-11-21T08:53:35.345176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:53:35.345196Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215389, at schemeshard: 72057594046644480 2024-11-21T08:53:35.345209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T08:53:35.345223Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179215389, at schemeshard: 72057594046644480 2024-11-21T08:53:35.345242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T08:53:35.345255Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732179215389 2024-11-21T08:53:35.345265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T08:53:35.345728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.345819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.345839Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T08:53:35.345856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T08:53:35.345893Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T08:53:35.345904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T08:53:35.345916Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:53:35.345926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T08:53:35.345937Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T08:53:35.345953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T08:53:35.345958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:53:35.345966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T08:53:35.345968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T08:53:35.345971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T08:53:35.345978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 3 2024-11-21T08:53:35.346234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.346255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.346258Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:53:35.346293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.346304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.346306Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T08:53:35.346320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.346322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.346324Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T08:53:35.346338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.346340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.346341Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T08:53:35.346353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:35.346355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:35.346356Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T08:53:35.346361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 3 2024-11-21T08:53:35.346775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653081253016962:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:35.346787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653081253017018:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:35.346792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653081253016997:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:35.414987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.415018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:35.415626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.432502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715662:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.432544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715662:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:35.433351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715662, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.434242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715663:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.434261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715663:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:35.434660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715663, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> TDSProxyPatchTest::NaiveErrorOnGetItem_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_1_0_VdiskErrors >> IndexBuildTestReboots::IndexPartitioning [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] >> TPersQueueTest::SetupWriteSession >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_1_0_VdiskErrors [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::BaseCaseWithDataColumns [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:04.178915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:04.178945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:04.178951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:04.178956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:04.178973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:04.178977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:04.178988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:04.179112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:04.191292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:04.191322Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:04.194193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:04.194316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:04.194349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:04.201559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:04.201724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:04.201856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:04.202095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:04.203043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:04.203399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:04.203417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:04.203432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:04.203441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:04.203448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:04.203505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:04.212764Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:04.235488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:04.235595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:04.235670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:04.235718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:04.235727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:04.236649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:04.236681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:04.236735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:04.236747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:04.236751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:04.236756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:04.237244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:04.237258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:04.237264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:04.237635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:04.237647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:04.237653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:04.237661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:04.238246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:04.238641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:04.238697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:04.238905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:04.238934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:04.238941Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:04.238996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:04.239002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:04.239032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:04.239045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:04.239437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:04.239450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:04.239496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:04.239502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:04.239587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:04.239593Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:04.239605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:04.239609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:04.239634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:04.239640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:04.239646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:04.239650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:04.239662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:04.239668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:04.239672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... sion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "value" DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:30.740933Z node 276 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:30.740981Z node 276 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1" took 53us result status StatusSuccess 2024-11-21T08:53:30.741155Z node 276 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1" PathDescription { Self { Name: "index1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "value" DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:30.741265Z node 276 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:30.741318Z node 276 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1/indexImplTable" took 56us result status StatusSuccess 2024-11-21T08:53:30.741482Z node 276 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_2_1_0_VdiskErrors [GOOD] Test command err: 2024-11-21T08:53:36.459631Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T08:53:36.459702Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T08:53:36.459707Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T08:53:36.459712Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:53:36.459714Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:53:36.459717Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T08:53:36.459719Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T08:53:36.462390Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T08:53:36.462445Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:53:36.462451Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:53:36.462496Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T08:53:36.462504Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T08:53:36.462508Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T08:53:36.462546Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T08:53:36.462582Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T08:53:36.462587Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T08:53:36.462590Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T08:53:36.462601Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2024-11-21T08:53:36.462618Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T08:53:36.462634Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T08:53:36.462645Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] Test command err: 2024-11-21T08:53:34.452323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653075026401294:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:34.452571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d09/r3tmp/tmpIUl3R5/pdisk_1.dat 2024-11-21T08:53:34.511271Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61919, node 1 2024-11-21T08:53:34.515614Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:34.515630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:34.515632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:34.515662Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:34.552443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.552476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.554179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.581745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.582941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.582967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.583667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:34.583737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:34.583747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:34.584285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:34.584318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:34.584325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:34.584870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.585880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179214633, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:34.585912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:34.586009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:34.586476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:34.586532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:34.586550Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:34.586561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:34.586576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:34.586588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:34.587004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:34.587025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:34.587029Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:34.587045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:53:35.489319Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653079597155983:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:35.489366Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d09/r3tmp/tmpfdvN7m/pdisk_1.dat 2024-11-21T08:53:35.502955Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25834, node 4 2024-11-21T08:53:35.515933Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:35.515945Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:35.515947Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:35.515992Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:35.589710Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:35.589741Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:35.591279Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:35.594588Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.594690Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.594700Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.595264Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:35.595299Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:35.595305Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:53:35.595715Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.595721Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T08:53:35.596103Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:35.596184Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.597236Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215641, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.597254Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:35.597336Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:35.597806Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.597861Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.597879Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:35.597895Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:35.597911Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:35.597922Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:35.598079Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:35.598095Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:35.598100Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:35.598113Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] Test command err: 2024-11-21T08:53:35.103406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653083199931637:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:35.103423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c77/r3tmp/tmp2F95Ry/pdisk_1.dat 2024-11-21T08:53:35.156250Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26468, node 1 2024-11-21T08:53:35.170872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:35.170887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:35.170890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:35.170930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:35.196043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.196951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.196968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.197817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:35.197885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:35.197896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:53:35.198440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.198448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:53:35.198480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:53:35.198893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.199972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215249, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.200001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:53:35.200086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:53:35.200670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.200727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.200739Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:53:35.200753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:53:35.200779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:53:35.200797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:53:35.201204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:53:35.201215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:53:35.201219Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:35.201237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:53:35.203486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:35.203509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:35.204953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:35.411839Z node 1 :KQP_PROXY WARN: TraceId: "01jd6yth1k9qsqpnzce9xfd9rd", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:35.413345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653083199932565:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.413354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653083199932573:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.413361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.414024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.414077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.414087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.414101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.414109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T08:53:35.414120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.414133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.414184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2024-11-21T08:53:35.414256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.414266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:35.415129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.415235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.415393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.415430Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2024-11-21T08:53:35.415492Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.415513Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.415528Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.415556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T08:53:35.415832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:53:35.415849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:53:35.415852Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:53:35.415885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:53:35.415891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:53:35.415892Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:53:35.415902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644 ... 480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.486377Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.486379Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:53:36.486400Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.486409Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.486410Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T08:53:36.486424Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.486433Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.486435Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T08:53:36.486449Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.486459Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.486460Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T08:53:36.487843Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179216537, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:36.487861Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179216537, at schemeshard: 72057594046644480 2024-11-21T08:53:36.487897Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:53:36.487923Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179216537, at schemeshard: 72057594046644480 2024-11-21T08:53:36.487933Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T08:53:36.487950Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179216537, at schemeshard: 72057594046644480 2024-11-21T08:53:36.487962Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T08:53:36.487977Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732179216537 2024-11-21T08:53:36.487988Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T08:53:36.488498Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:36.488618Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:36.488632Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T08:53:36.488643Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T08:53:36.488673Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T08:53:36.488677Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T08:53:36.488685Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:53:36.488694Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T08:53:36.488701Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T08:53:36.488704Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T08:53:36.488710Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:53:36.488718Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T08:53:36.488721Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T08:53:36.488723Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T08:53:36.488728Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T08:53:36.489070Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.489081Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.489085Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:53:36.489133Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.489136Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.489137Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T08:53:36.489151Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.489153Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.489155Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T08:53:36.489166Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.489169Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.489171Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T08:53:36.489184Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:53:36.489186Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:53:36.489187Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T08:53:36.489193Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T08:53:36.489776Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653084317155753:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:36.542408Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.542459Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.543352Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.552414Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj584sch4vygxdpxymr0", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.553433Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj59bkk5ss9db0t7h7dp", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.554057Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5a55wsfpm6x5pghjgp", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.554641Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5a2x1t16sdhx7c8hpy", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.555063Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5b96wgm20g8x81bc45", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.555441Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5b3e36en7mkepn20fz", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.555901Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5bf055k8mhkq617442", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.556332Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5c1gw575p5gmh4tdh8", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.556753Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5cbrxq7t0c150f1fc0", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.557102Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5d3m8w9szmzahf2x37", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.557464Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5d4ayw099dycmqddhs", Active sessions limit exceeded, maximum allowed: 2 2024-11-21T08:53:36.557888Z node 4 :KQP_PROXY WARN: TraceId: "01jd6ytj5d4vg5rmp6gcymyq38", Active sessions limit exceeded, maximum allowed: 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] Test command err: 2024-11-21T08:53:34.851251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653078846941986:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:34.851463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cac/r3tmp/tmpKnKgOd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23144, node 1 2024-11-21T08:53:34.951632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.951661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.953063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.959698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:34.959713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:34.959715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:34.959746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:53:34.959751Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:34.959761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:35.005514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.006548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.006564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.008889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:35.008936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:35.008946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:53:35.009446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.009456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T08:53:35.009933Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:35.010293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.011562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215060, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.011590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:35.011661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:35.012104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.012148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.012163Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:35.012172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:35.012186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:35.012197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:35.012638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:35.012652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:35.012656Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:35.012668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:53:35.153317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653083141910226:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.153318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653083141910214:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.153334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.153643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653083141910252:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.153662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653083141910258:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.153670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.153854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.153900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.153910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.153921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.153929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:53:35.153938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.153951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.153992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T08:53:35.154052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.154061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:35.154249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:35.154275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:35.154536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653083141910278:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.154562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653083141910282:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.154574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.154863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:35.154916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: ... 53:36.292111Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T08:53:36.292574Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:36.292681Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:36.292716Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T08:53:36.292730Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T08:53:36.292766Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T08:53:36.292775Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T08:53:36.292786Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T08:53:36.292795Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T08:53:36.292805Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T08:53:36.292813Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T08:53:36.292818Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:53:36.292832Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T08:53:36.292843Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T08:53:36.292850Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T08:53:36.292857Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 2 2024-11-21T08:53:36.293183Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:36.293202Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:36.293207Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T08:53:36.293251Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:36.293263Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:36.293265Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T08:53:36.293281Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:36.293284Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:36.293285Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T08:53:36.293298Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:36.293300Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:36.293302Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T08:53:36.293314Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:53:36.293318Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:53:36.293324Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T08:53:36.293330Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 2 2024-11-21T08:53:36.293750Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653084240005185:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:36.293765Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653084240005241:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:36.347169Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.347215Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.349356Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.356011Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715662:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.356059Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715662:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.357838Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NGQ3MjM2MGUtYjFlODQ0YTAtZjdjZmVlZTQtMTdlZDIxYTA=, ActorId: [4:7439653084240005490:2460], ActorState: ExecuteState, TraceId: 01jd6ythz366jkja77c5v4wehh, Create QueryResponse for error on request, msg: 2024-11-21T08:53:36.358357Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715662, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.399649Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7439653084240005570:2512] TxId: 281474976715663. Ctx: { TraceId: 01jd6ytj0ec53je7s3ctc1tp5r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NWJkZWNhOWMtZTg5NTc5ZTUtNzQwOWE2M2EtNTVjODZkZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:53:36.400847Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NWJkZWNhOWMtZTg5NTc5ZTUtNzQwOWE2M2EtNTVjODZkZjQ=, ActorId: [4:7439653084240005568:2512], ActorState: ExecuteState, TraceId: 01jd6ytj0ec53je7s3ctc1tp5r, Create QueryResponse for error on request, msg: 2024-11-21T08:53:36.401470Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ytj0ec53je7s3ctc1tp5r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NWJkZWNhOWMtZTg5NTc5ZTUtNzQwOWE2M2EtNTVjODZkZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:36.460301Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTZjYTg3ZTAtYTZhOWI5MWEtYjM4NmQ5YmEtNzZlOTZmMTQ=, ActorId: [4:7439653084240005690:2598], ActorState: ExecuteState, TraceId: 01jd6ytj2a9yhmaw6d0nmhp9f4, Create QueryResponse for error on request, msg: 2024-11-21T08:53:36.461573Z node 4 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=4&id=ZTZjYTg3ZTAtYTZhOWI5MWEtYjM4NmQ5YmEtNzZlOTZmMTQ=, ActorId: [4:7439653084240005690:2598], ActorState: ReadyState, Internal error, message: TKqpSessionActor in state ReadyState received unexpected event NKikimr::NGRpcService::TEvClientLost(0x108c0001) sender: [4:8320808721877066593:7169396] 2024-11-21T08:53:36.529890Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDVhYjE1MjEtOWY3YWNkNjAtOTA1OWU1NDAtMmM0ODVlYWQ=, ActorId: [4:7439653084240005834:2707], ActorState: ExecuteState, TraceId: 01jd6ytj4g8ceqrxe95x9dfwys, Create QueryResponse for error on request, msg: 2024-11-21T08:53:36.529981Z node 4 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=4&id=NDVhYjE1MjEtOWY3YWNkNjAtOTA1OWU1NDAtMmM0ODVlYWQ=, ActorId: [4:7439653084240005834:2707], ActorState: ReadyState, Internal error, message: TKqpSessionActor in state ReadyState received unexpected event NKikimr::NGRpcService::TEvClientLost(0x108c0001) sender: [4:8320808721877066593:7169396] 2024-11-21T08:53:36.540151Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6ytj4vdja3ff6aj8cf305k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=M2RiZWQzNjktMWIzOGVhYjEtNmVhYWU0ZDYtODJlOTY1MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:36.541857Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=M2RiZWQzNjktMWIzOGVhYjEtNmVhYWU0ZDYtODJlOTY1MDE=, ActorId: [4:7439653084240005855:2723], ActorState: ExecuteState, TraceId: 01jd6ytj4vdja3ff6aj8cf305k, Create QueryResponse for error on request, msg: 2024-11-21T08:53:36.542019Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ytj4vdja3ff6aj8cf305k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=M2RiZWQzNjktMWIzOGVhYjEtNmVhYWU0ZDYtODJlOTY1MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> DataShardReadTableSnapshots::ReadTableSnapshot >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber >> TColumnShardTestSchema::RebootHotTiersTtlWithStat >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T08:53:27.288010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653046886320305:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:27.288031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:27.293932Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653047542504511:2248];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dc8/r3tmp/tmp1cIyzq/pdisk_1.dat 2024-11-21T08:53:27.313501Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:27.313800Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:27.314546Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:27.337674Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20455, node 1 2024-11-21T08:53:27.352975Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002dc8/r3tmp/yandex2BTchu.tmp 2024-11-21T08:53:27.352988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002dc8/r3tmp/yandex2BTchu.tmp 2024-11-21T08:53:27.356929Z INFO: TTestServer started on Port 29080 GrpcPort 20455 TClient is connected to server localhost:29080 PQClient connected to localhost:20455 === TenantModeEnabled() = 1 === Init PQ - start server on port 20455 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:53:27.366951Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002dc8/r3tmp/yandex2BTchu.tmp 2024-11-21T08:53:27.367110Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:27.388317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:27.388357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:27.389885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:27.389975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:53:27.390015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.390060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:53:27.390102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:27.390106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.390487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:27.390501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:27.390535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.390541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:27.390542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T08:53:27.390544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:53:27.390902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.390912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:27.390916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:53:27.391112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:53:27.391116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2024-11-21T08:53:27.391119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:53:27.391409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.391412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.391416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:53:27.391420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2024-11-21T08:53:27.392148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:27.392636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T08:53:27.392695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:53:27.393428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179207437, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:27.393477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439653046886320849 RawX2: 4294969642 } } Step: 1732179207437 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:53:27.393491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:53:27.393544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:53:27.393554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:53:27.393592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:53:27.393610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:53:27.394119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:27.394131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:53:27.394179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:27.394189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439653046886320885:2378], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T08:53:27.394198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.394203Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:53:27.394216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:53:27.394225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T08:53:27.394229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2024-11-21T08:53:27.394233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T08:53:27.394236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:53:27.394238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2024-11-21T08:53:27.394251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:53:27.394255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:53:27.394258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T08:53:27.394768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:53:27.394793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation ... K, TabletId: 72075186224037899, NodeId 1, Generation: 1 2024-11-21T08:53:36.153330Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:36.153338Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server connected, pipe [1:7439653085541028928:2559], now have 1 active actors on pipe 2024-11-21T08:53:36.153348Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:53:36.153356Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2024-11-21T08:53:36.153394Z node 1 :PERSQUEUE INFO: new Cookie 1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0 generated for partition 0 topic 'PQ/account3/folder1/folder2/topic' owner 1236 2024-11-21T08:53:36.153449Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:53:36.153499Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:53:36.153553Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:53:36.153561Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2024-11-21T08:53:36.153593Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:53:36.153646Z node 1 :PQ_WRITE_PROXY INFO: session inited cookie: 22 partition: 0 MaxSeqNo: 0 sessionId: 1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0 2024-11-21T08:53:36.153852Z :INFO: [] MessageGroupId [1236] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179216153 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:53:36.153873Z :INFO: [] MessageGroupId [1236] SessionId [] Write session established. Init response: session_id: "1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0" topic: "PQ/account3/folder1/folder2/topic" 2024-11-21T08:53:36.154108Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write 1 messages with Id from 1 to 1 2024-11-21T08:53:36.154281Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write session: try to update token 2024-11-21T08:53:36.154291Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Send 1 message(s) (0 left), first sequence number is 1 2024-11-21T08:53:36.154973Z :INFO: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write session: close. Timeout = 10000 ms 2024-11-21T08:53:36.155601Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T08:53:36.155807Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T08:53:36.155869Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:53:36.155880Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2024-11-21T08:53:36.155928Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T08:53:36.155962Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:53:36.156002Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:53:36.156009Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2024-11-21T08:53:36.156082Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2024-11-21T08:53:36.156135Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2024-11-21T08:53:36.156178Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2024-11-21T08:53:36.156185Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2024-11-21T08:53:36.156243Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2024-11-21T08:53:36.732871Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653085541028944:2565], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:36.733459Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYxOGQ3ZTAtZjEyYzQwMTMtZGY4NDJlNDAtZTVkOTI2MWE=, ActorId: [1:7439653085541028942:2564], ActorState: ExecuteState, TraceId: 01jd6ytjat765yfmyq091qatf5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:36.733687Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:37.024501Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2024-11-21T08:53:37.024584Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2024-11-21T08:53:37.024604Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2024-11-21T08:53:37.024614Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2024-11-21T08:53:37.024873Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2024-11-21T08:53:37.025002Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1732179217024 2024-11-21T08:53:37.025200Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:53:37.027601Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 2024-11-21T08:53:37.027621Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:53:37.027641Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2024-11-21T08:53:37.027650Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:53:37.027656Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2024-11-21T08:53:37.027662Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:53:37.027667Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2024-11-21T08:53:37.027671Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:53:37.027701Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:53:37.028075Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 3 queued_in_partition_duration_ms: 868 throttled_on_partition_duration_ms: 868 } 2024-11-21T08:53:37.028094Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write session: acknoledged message 1 2024-11-21T08:53:37.055731Z :INFO: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write session will now close 2024-11-21T08:53:37.055767Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write session: aborting 2024-11-21T08:53:37.056043Z :INFO: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:53:37.056059Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0] Write session: destroy 2024-11-21T08:53:37.056275Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0 grpc read done: success: 0 data: 2024-11-21T08:53:37.056300Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0 grpc read failed 2024-11-21T08:53:37.056310Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0 grpc closed 2024-11-21T08:53:37.056316Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|312a8a06-4bcf381f-f804c497-ec1c2b4c_0 is DEAD DURATION 2.938972s 2024-11-21T08:53:37.056647Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:53:37.056729Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:37.056755Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7439653085541028928:2559] destroyed 2024-11-21T08:53:37.056788Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> HttpRequest::Analyze [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] Test command err: 2024-11-21T08:53:34.910211Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653076813548217:2058];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:34.910335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c88/r3tmp/tmpww2UT8/pdisk_1.dat 2024-11-21T08:53:34.964985Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23869, node 1 2024-11-21T08:53:34.977992Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:34.978008Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:34.978011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:34.978054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:35.010242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:35.010272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:35.011960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:35.042999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.044105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.044130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.044869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:35.044933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:35.044943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:35.045300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.045311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:35.045448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:35.045731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.046546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215095, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.046570Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:35.046646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:35.046997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.047048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.047063Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:35.047080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:35.047092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:35.047102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:35.047586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:35.047612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:35.047616Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:35.047630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:53:36.346456Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653085090657294:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:36.346642Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c88/r3tmp/tmpqGbUVy/pdisk_1.dat 2024-11-21T08:53:36.361284Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20969, node 4 2024-11-21T08:53:36.377804Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:36.377817Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:36.377818Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:36.377858Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:36.446895Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:36.446929Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:36.448649Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:36.450208Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.450317Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:36.450329Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.450766Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:36.450837Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:36.450849Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:36.451221Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:36.451236Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:36.451416Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:36.451686Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.452730Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179216502, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:36.452745Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:36.452806Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:36.453186Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:36.453252Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:36.453274Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:36.453288Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:36.453298Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:36.453312Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:36.453449Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:36.453463Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:36.453468Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:36.453486Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> TopicService::AccessRights [GOOD] >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2024-11-21T08:53:30.640098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:30.640143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:30.640151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00386a/r3tmp/tmp50yXd1/pdisk_1.dat 2024-11-21T08:53:30.721198Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10630, node 1 2024-11-21T08:53:30.816535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:30.816553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:30.816556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:30.816649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:30.822750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:30.899959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:30.899992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:30.912067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3229 2024-11-21T08:53:31.317762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:32.104872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:32.104909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:32.139258Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:32.140534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:32.200226Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:32.209585Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:32.209611Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:32.217015Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:32.217185Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:32.217208Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:32.217214Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:32.217221Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:32.217227Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:32.217233Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:32.217241Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:32.217357Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:32.394314Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:32.394346Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:32.396138Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:53:32.398445Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:53:32.398590Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:53:32.399468Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:53:32.405098Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:32.405121Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:32.405133Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:53:32.406875Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:32.406902Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:32.408159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:32.410022Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:32.410063Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:32.414199Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:32.427036Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:32.449826Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:32.573142Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:32.733303Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:33.477615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:33.477662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:33.481375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:53:33.526133Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:33.526194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:33.526224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:33.526252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:33.526268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:33.526282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:33.526294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:33.526310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:33.526325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:33.526338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:33.526358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:33.526377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:33.533409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:33.533467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:33.533514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:33.533536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:33.533556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:33.533578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... d;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:33.586433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:33.586448Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:33.586455Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:33.586467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:53:33.586474Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:53:33.586492Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:53:33.586497Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:53:33.586509Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:53:33.586514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:53:33.590668Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:33.590694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:33.590709Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:33.590716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:33.590738Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:33.590745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:33.590756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:53:33.590763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:53:33.590776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:53:33.590782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:53:33.590790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:33.590798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:33.590846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:33.590854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:33.590873Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:33.590879Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:33.590891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:53:33.590899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:53:33.590917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:53:33.590924Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:53:33.590938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:53:33.590943Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:53:34.809877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2943:3130], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.809932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.811130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2024-11-21T08:53:35.479270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3095:3174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.479318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.482022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000015s 2024-11-21T08:53:37.393569Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3427:3628] 2024-11-21T08:53:37.394223Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:3424:3263] , Record { OperationId: "\000\000\000\000\023\361n\3352\004jw\225\201\003\310" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } } 2024-11-21T08:53:37.394237Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal operation, OperationId=n2jw 2024-11-21T08:53:37.394243Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal table, OperationId=n2jw , PathId [OwnerId: 72075186224037889, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 00000004zhdvek413aeyar20y8' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest |88.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |88.6%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TopicService::ThereAreGapsInTheOffsetRanges >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> TColumnShardTestSchema::ExternalTTL_Types ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] Test command err: 2024-11-21T08:53:35.723613Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T08:53:35.737669Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:53:35.739538Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:53:35.739630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:53:35.741350Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T08:53:35.741369Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:53:35.741434Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T08:53:35.742335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:35.742402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:35.742434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:35.742456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:35.742471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:35.742485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:35.742499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:35.742519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:35.742536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:35.742551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:35.742568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:35.742600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:35.746104Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:53:35.746175Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T08:53:35.746190Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T08:53:35.746198Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T08:53:35.747370Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T08:53:35.747439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:53:35.747447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:53:35.747475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:35.747519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:35.747530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:35.747535Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:53:35.747544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:53:35.747554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:35.747560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:35.747565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:53:35.747581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:35.747587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:35.747593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:35.747597Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:53:35.747606Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:53:35.747612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:53:35.747619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:53:35.747625Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:53:35.747634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:53:35.747640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:53:35.747643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:53:35.747651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:35.747657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:35.747660Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T08:53:35.747689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2024-11-21T08:53:35.747697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T08:53:35.747705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T08:53:35.747714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T08:53:35.747731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:35.747738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:35.747741Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T08:53:35.747762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:35.747769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:35.747773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T08:53:35.747785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:53:35.747792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024 ... :367:2378], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2024-11-21T08:53:38.289950Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2024-11-21T08:53:38.290056Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[25] (CS::INDEXATION) apply at tablet 9437184 2024-11-21T08:53:38.290816Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 3 2024-11-21T08:53:38.290847Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9575952;raw_bytes=15906843; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T08:53:38.290855Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=19545fe0-a7e611ef-bf337870-1933a847;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T08:53:38.290957Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:368:2379], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2024-11-21T08:53:38.290982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T08:53:38.290990Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T08:53:38.290997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=4; 2024-11-21T08:53:38.291004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=4; 2024-11-21T08:53:38.291012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T08:53:38.291028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:53:38.291035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T08:53:38.291040Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T08:53:38.291151Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:53:38.291175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:53:38.291179Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T08:53:38.291188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=80000; 2024-11-21T08:53:38.291201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=timestamp; 2024-11-21T08:53:38.291229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:369:2380] send ScanData to [1:364:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T08:53:38.291239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:53:38.291250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:53:38.291256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:53:38.291277Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:53:38.291283Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:53:38.291288Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:53:38.291292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:369:2380] finished for tablet 9437184 2024-11-21T08:53:38.291308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:369:2380] send ScanData to [1:364:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:53:38.291368Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:369:2380] and sent to [1:364:2375] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.024}],"full":{"a":1732179218266502,"name":"_full_task","f":1732179218266502,"d_finished":0,"c":0,"l":1732179218291317,"d":24815},"events":[{"name":"bootstrap","f":1732179218266639,"d_finished":347,"c":1,"l":1732179218266986,"d":347},{"a":1732179218291275,"name":"ack","f":1732179218291146,"d_finished":113,"c":1,"l":1732179218291259,"d":155},{"a":1732179218291274,"name":"processing","f":1732179218267101,"d_finished":442,"c":5,"l":1732179218291259,"d":485},{"name":"ProduceResults","f":1732179218266843,"d_finished":225,"c":8,"l":1732179218291291,"d":225},{"a":1732179218291291,"name":"Finish","f":1732179218291291,"d_finished":0,"c":0,"l":1732179218291317,"d":26},{"name":"task_result","f":1732179218267104,"d_finished":309,"c":4,"l":1732179218291054,"d":309}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2024-11-21T08:53:38.291381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:53:38.266303Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T08:53:38.291386Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:53:38.291404Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.022705s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.021438s;size=0.0063152;details={columns=1;};};]};; 2024-11-21T08:53:38.291410Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> TColumnShardTestSchema::HotTiersWithStat >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] Test command err: 2024-11-21T08:53:37.428052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:37.428588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:37.428616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042dd/r3tmp/tmpJy9PF9/pdisk_1.dat 2024-11-21T08:53:37.534907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:37.552989Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:37.595596Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:53:37.595895Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:53:37.595960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:37.595981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:37.606527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:37.730006Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:53:37.730046Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:53:37.730098Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:53:37.769772Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:53:37.770116Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:53:37.770132Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:53:37.770194Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:53:37.770244Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:53:37.770261Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:53:37.770345Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:53:37.770809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:37.771163Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:53:37.771185Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:53:37.785658Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:53:37.785963Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:53:37.786081Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:37.786149Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:37.794360Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:53:37.794595Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:37.794629Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:37.794798Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:37.794808Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:37.794816Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:37.794870Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:37.798838Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:37.798950Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:37.798982Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:37.798988Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:37.798996Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:37.799002Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:37.799175Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:37.799183Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:37.799343Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:37.799365Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:37.799377Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:37.799381Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:37.799388Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:37.799396Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:37.799402Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:37.799408Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:53:37.799413Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:53:37.799417Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:53:37.799421Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:37.799426Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:37.799444Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:53:37.799449Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:53:37.799474Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:37.799522Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:53:37.799532Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:37.799549Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:37.799557Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:53:37.799562Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:53:37.799567Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:53:37.799571Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:37.799620Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:53:37.799624Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:53:37.799627Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:53:37.799631Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:37.799644Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:53:37.799648Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:53:37.799651Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:53:37.799654Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:53:37.799658Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:53:37.799932Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:53:37.799947Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:37.810351Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:37.810393Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:37.810401Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:37.810417Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... ponse 2024-11-21T08:53:38.706859Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:984:2786], Recipient [1:984:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.706864Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.706870Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T08:53:38.706873Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:38.706877Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037892 for WaitForStreamClearance 2024-11-21T08:53:38.706881Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit WaitForStreamClearance 2024-11-21T08:53:38.706884Z node 1 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037892 2024-11-21T08:53:38.706888Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2024-11-21T08:53:38.706892Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit WaitForStreamClearance 2024-11-21T08:53:38.706895Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit ReadTableScan 2024-11-21T08:53:38.706898Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit ReadTableScan 2024-11-21T08:53:38.706921Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Continue 2024-11-21T08:53:38.706925Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:38.706928Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2024-11-21T08:53:38.706932Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2024-11-21T08:53:38.706935Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2024-11-21T08:53:38.706943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T08:53:38.707015Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715665 ShardId: 72075186224037892 2024-11-21T08:53:38.707021Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037892 2024-11-21T08:53:38.707027Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037892 2024-11-21T08:53:38.707041Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2024-11-21T08:53:38.707071Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [1:1084:2865], Recipient [1:984:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:38.707076Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:38.707106Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037892, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T08:53:38.707123Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037892 Status: RESPONSE_DATA TxId: 281474976715665 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2024-11-21T08:53:38.707128Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received stream data from ShardId# 72075186224037892 2024-11-21T08:53:38.707132Z node 1 :TX_PROXY TRACE: [ReadTable [1:930:2740] TxId# 281474976715662] Sending TEvStreamDataAck to [1:1084:2865] ShardId# 72075186224037892 2024-11-21T08:53:38.707147Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037892, TxId: 281474976715665, PendingAcks: 0 2024-11-21T08:53:38.707156Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715665 ShardId: 72075186224037892 2024-11-21T08:53:38.707160Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037892 2024-11-21T08:53:38.707206Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [1:929:2740], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2024-11-21T08:53:38.707211Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2024-11-21T08:53:38.707214Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037892 2024-11-21T08:53:38.707220Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2024-11-21T08:53:38.707227Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2024-11-21T08:53:38.707246Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715665 ShardId: 72075186224037892 2024-11-21T08:53:38.707250Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRelease from ShardId# 72075186224037892 2024-11-21T08:53:38.707253Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Released quota 1 reserved messages from ShardId# 72075186224037892 2024-11-21T08:53:38.707259Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2024-11-21T08:53:38.707263Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037892 2024-11-21T08:53:38.707283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:984:2786], Recipient [1:984:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.707289Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.707294Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T08:53:38.707299Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:38.707304Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037892 for ReadTableScan 2024-11-21T08:53:38.707308Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit ReadTableScan 2024-11-21T08:53:38.707311Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037892 error: , IsFatalError: 0 2024-11-21T08:53:38.707316Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2024-11-21T08:53:38.707319Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit ReadTableScan 2024-11-21T08:53:38.707323Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit FinishPropose 2024-11-21T08:53:38.707326Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit FinishPropose 2024-11-21T08:53:38.707331Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is DelayComplete 2024-11-21T08:53:38.707334Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit FinishPropose 2024-11-21T08:53:38.707337Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit CompletedOperations 2024-11-21T08:53:38.707341Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit CompletedOperations 2024-11-21T08:53:38.707346Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2024-11-21T08:53:38.707349Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit CompletedOperations 2024-11-21T08:53:38.707352Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037892 has finished 2024-11-21T08:53:38.707355Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:38.707357Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2024-11-21T08:53:38.707360Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2024-11-21T08:53:38.707363Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2024-11-21T08:53:38.707370Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T08:53:38.707374Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037892 on unit FinishPropose 2024-11-21T08:53:38.707378Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037892 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:53:38.707387Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2024-11-21T08:53:38.707422Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [1:984:2786], Recipient [1:930:2740]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037892 Status: COMPLETE TxId: 281474976715665 Step: 0 OrderId: 281474976715665 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037892 CpuTimeUsec: 41 } } 2024-11-21T08:53:38.707425Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received stream complete from ShardId# 72075186224037892 2024-11-21T08:53:38.707438Z node 1 :TX_PROXY INFO: [ReadTable [1:930:2740] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.010979s execute time: 0.380145s total time: 0.391124s 2024-11-21T08:53:38.707524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:846:2676]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2024-11-21T08:53:38.707571Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:848:2678]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2024-11-21T08:53:38.707595Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:982:2784]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2024-11-21T08:53:38.707611Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:984:2786]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2024-11-21T08:53:36.256634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:36.257246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:36.257284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042f9/r3tmp/tmpeurmns/pdisk_1.dat 2024-11-21T08:53:36.358263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.376317Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:36.418991Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:53:36.419322Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:53:36.419376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:36.419395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:36.430128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:36.534067Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:53:36.534097Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:53:36.534136Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:53:36.542939Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:53:36.543228Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:53:36.543245Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:53:36.543302Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:53:36.543354Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:53:36.543371Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:53:36.543459Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:53:36.543955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.544300Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:53:36.544317Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:53:36.558719Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:53:36.558968Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:53:36.559068Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:36.559120Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:36.566683Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:53:36.566893Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:36.566919Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:36.567076Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:36.567086Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:36.567093Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:36.567145Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:36.570573Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:36.570671Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:36.570706Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:36.570713Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:36.570717Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:36.570722Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:36.570879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:36.570886Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:36.571045Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:36.571068Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:36.571082Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:36.571088Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:36.571095Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:36.571103Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:36.571110Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:36.571118Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:53:36.571123Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:53:36.571128Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:53:36.571133Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:36.571138Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:36.571156Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:53:36.571161Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:53:36.571187Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:36.571251Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:53:36.571263Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:36.571281Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:36.571290Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:53:36.571294Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:53:36.571300Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:53:36.571304Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:36.571356Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:53:36.571360Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:53:36.571366Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:53:36.571369Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:36.571382Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:53:36.571385Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:53:36.571388Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:53:36.571390Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:53:36.571396Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:53:36.571643Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:53:36.571652Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:36.581998Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:36.582033Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:36.582043Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:36.582054Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... :281474976715659] at 72075186224037888 is Executed 2024-11-21T08:53:38.647932Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit MakeScanSnapshot 2024-11-21T08:53:38.647934Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit WaitForStreamClearance 2024-11-21T08:53:38.647937Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T08:53:38.647945Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:707:2590] for [0:281474976715659] at 72075186224037888 2024-11-21T08:53:38.647948Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2024-11-21T08:53:38.647955Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:38.647977Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287427, Sender [2:630:2536], Recipient [2:707:2590]: NKikimrTx.TEvStreamClearanceRequest TxId: 281474976715659 ShardId: 72075186224037888 KeyRange { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } 2024-11-21T08:53:38.647981Z node 2 :TX_PROXY DEBUG: [ReadTable [2:707:2590] TxId# 281474976715658] Received TEvStreamClearanceRequest from ShardId# 72075186224037888 2024-11-21T08:53:38.647985Z node 2 :TX_PROXY DEBUG: [ReadTable [2:707:2590] TxId# 281474976715658] Sending TEvStreamClearanceResponse to [2:630:2536] ShardId# 72075186224037888 2024-11-21T08:53:38.648004Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:707:2590], Recipient [2:630:2536]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715659 2024-11-21T08:53:38.648008Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2024-11-21T08:53:38.648042Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:707:2590], Recipient [2:630:2536]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2024-11-21T08:53:38.648046Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T08:53:38.648055Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:630:2536], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.648058Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.648064Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:38.648070Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:38.648074Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2024-11-21T08:53:38.648077Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T08:53:38.648081Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715659] at 72075186224037888 2024-11-21T08:53:38.648083Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2024-11-21T08:53:38.648087Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2024-11-21T08:53:38.648089Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2024-11-21T08:53:38.648091Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2024-11-21T08:53:38.648129Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2024-11-21T08:53:38.648132Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:38.648135Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:53:38.648138Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:53:38.648140Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:53:38.648145Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:38.648230Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:741:2611], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:38.648235Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:38.648245Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:741:2611], Recipient [2:707:2590]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2024-11-21T08:53:38.648248Z node 2 :TX_PROXY DEBUG: [ReadTable [2:707:2590] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2024-11-21T08:53:38.648295Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:706:2590], Recipient [2:707:2590]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2024-11-21T08:53:38.648300Z node 2 :TX_PROXY DEBUG: [ReadTable [2:707:2590] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2024-11-21T08:53:38.648303Z node 2 :TX_PROXY DEBUG: [ReadTable [2:707:2590] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2024-11-21T08:53:38.648309Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2024-11-21T08:53:38.648341Z node 2 :TX_DATASHARD ERROR: Got scan fatal error: Invalid DyNumber binary representation 2024-11-21T08:53:38.648348Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2024-11-21T08:53:38.648377Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T08:53:38.648382Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2024-11-21T08:53:38.648394Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:741:2611], Recipient [2:707:2590]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2024-11-21T08:53:38.648396Z node 2 :TX_PROXY DEBUG: [ReadTable [2:707:2590] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2024-11-21T08:53:38.648400Z node 2 :TX_PROXY DEBUG: [ReadTable [2:707:2590] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2024-11-21T08:53:38.648416Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:630:2536], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.648419Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.648422Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:38.648427Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:38.648430Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2024-11-21T08:53:38.648433Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2024-11-21T08:53:38.648437Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2024-11-21T08:53:38.648444Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2024-11-21T08:53:38.648446Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2024-11-21T08:53:38.648449Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:53:38.648451Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:38.648457Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2024-11-21T08:53:38.648459Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:53:38.648461Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:53:38.648463Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:53:38.648472Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2024-11-21T08:53:38.648474Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:53:38.648477Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2024-11-21T08:53:38.648479Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:38.648481Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:53:38.648483Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:53:38.648485Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:53:38.648489Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:38.648492Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:38.648497Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2024-11-21T08:53:38.648503Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2024-11-21T08:53:38.648514Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:38.648554Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:630:2536], Recipient [2:707:2590]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 79 } } 2024-11-21T08:53:38.648560Z node 2 :TX_PROXY DEBUG: [ReadTable [2:707:2590] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2024-11-21T08:53:38.648573Z node 2 :TX_PROXY ERROR: [ReadTable [2:707:2590] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2024-11-21T08:53:38.648619Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:707:2590], Recipient [2:630:2536]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 |88.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |88.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> CdcStreamChangeCollector::UpsertToSameKey >> AsyncIndexChangeCollector::InsertSingleRow >> AsyncIndexChangeCollector::UpsertSingleRow |88.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> AsyncIndexChangeCollector::DeleteNothing ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: 2024-11-21T08:53:36.692703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:36.693129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:36.693149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042f6/r3tmp/tmptXighh/pdisk_1.dat 2024-11-21T08:53:36.791350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.808696Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:36.851096Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:53:36.851371Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:53:36.851409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:36.851422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:36.861936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:36.965236Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:53:36.965262Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:53:36.965301Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:53:36.972170Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:53:36.972469Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:53:36.972491Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:53:36.972556Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:53:36.972612Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:53:36.972630Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:53:36.972719Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:53:36.973182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.973506Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:53:36.973521Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:53:36.987895Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:53:36.988102Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:53:36.988185Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:36.988251Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:36.996873Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:53:36.997074Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:36.997100Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:36.997263Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:36.997272Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:36.997279Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:36.997332Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:37.001055Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:37.001152Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:37.001186Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:37.001193Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:37.001198Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:37.001204Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:37.001354Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:37.001362Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:37.001530Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:37.001554Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:37.001567Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:37.001573Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:37.001580Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:37.001588Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:37.001594Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:37.001602Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:53:37.001607Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:53:37.001611Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:53:37.001617Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:37.001622Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:37.001641Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:53:37.001646Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:53:37.001671Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:37.001727Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:53:37.001738Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:37.001757Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:37.001767Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:53:37.001771Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:53:37.001778Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:53:37.001782Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:37.001831Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:53:37.001836Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:53:37.001841Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:53:37.001845Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:37.001855Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:53:37.001858Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:53:37.001862Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:53:37.001866Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:53:37.001871Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:53:37.002126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:53:37.002133Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:37.012442Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:37.012472Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:37.012478Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:37.012488Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 5186224037896 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:39.338995Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037896 2024-11-21T08:53:39.338997Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2024-11-21T08:53:39.338999Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2024-11-21T08:53:39.339002Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2024-11-21T08:53:39.339057Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1307:3035], Recipient [2:1034:2818]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2024-11-21T08:53:39.339062Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2024-11-21T08:53:39.339066Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2024-11-21T08:53:39.339080Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1307:3035], Recipient [2:1210:2958]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:39.339083Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:39.339090Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2024-11-21T08:53:39.339124Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T08:53:39.339140Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1307:3035], Recipient [2:1034:2818]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2024-11-21T08:53:39.339142Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2024-11-21T08:53:39.339144Z node 2 :TX_PROXY TRACE: [ReadTable [2:1034:2818] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1307:3035] ShardId# 72075186224037896 2024-11-21T08:53:39.339152Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2024-11-21T08:53:39.339156Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1307:3035], Recipient [2:1034:2818]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2024-11-21T08:53:39.339158Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2024-11-21T08:53:39.339200Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:1033:2818], Recipient [2:1034:2818]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2024-11-21T08:53:39.339206Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2024-11-21T08:53:39.339209Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2024-11-21T08:53:39.339219Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2024-11-21T08:53:39.339225Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2024-11-21T08:53:39.339240Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037896 2024-11-21T08:53:39.339244Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037896 2024-11-21T08:53:39.339255Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1307:3035], Recipient [2:1034:2818]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2024-11-21T08:53:39.339258Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2024-11-21T08:53:39.339261Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2024-11-21T08:53:39.339278Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1210:2958], Recipient [2:1210:2958]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:39.339280Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:39.339284Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2024-11-21T08:53:39.339286Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:39.339290Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2024-11-21T08:53:39.339292Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2024-11-21T08:53:39.339295Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2024-11-21T08:53:39.339298Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2024-11-21T08:53:39.339300Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2024-11-21T08:53:39.339303Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2024-11-21T08:53:39.339305Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2024-11-21T08:53:39.339308Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2024-11-21T08:53:39.339310Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2024-11-21T08:53:39.339312Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2024-11-21T08:53:39.339314Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2024-11-21T08:53:39.339318Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2024-11-21T08:53:39.339320Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2024-11-21T08:53:39.339322Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2024-11-21T08:53:39.339324Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:39.339326Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037896 2024-11-21T08:53:39.339328Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2024-11-21T08:53:39.339331Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2024-11-21T08:53:39.339336Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2024-11-21T08:53:39.339339Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2024-11-21T08:53:39.339344Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:53:39.339352Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2024-11-21T08:53:39.339381Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1210:2958], Recipient [2:1034:2818]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 28 } } 2024-11-21T08:53:39.339385Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1034:2818] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2024-11-21T08:53:39.339397Z node 2 :TX_PROXY INFO: [ReadTable [2:1034:2818] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.012416s execute time: 0.387323s total time: 0.399739s 2024-11-21T08:53:39.339482Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1034:2818], Recipient [2:842:2674]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2024-11-21T08:53:39.339522Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1034:2818], Recipient [2:949:2754]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2024-11-21T08:53:39.339575Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1034:2818], Recipient [2:952:2756]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2024-11-21T08:53:39.339633Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1310:3038], Recipient [2:1101:2876]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:39.339636Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:39.339640Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1308:3036], serverId# [2:1310:3038], sessionId# [0:0:0] 2024-11-21T08:53:39.339646Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1034:2818], Recipient [2:1207:2956]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2024-11-21T08:53:39.339688Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1034:2818], Recipient [2:1101:2876]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2024-11-21T08:53:39.339713Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1034:2818], Recipient [2:1210:2958]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2024-11-21T08:53:39.339732Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1311:3039], Recipient [2:1103:2878]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:39.339735Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:39.339737Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1309:3037], serverId# [2:1311:3039], sessionId# [0:0:0] 2024-11-21T08:53:39.339751Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1034:2818], Recipient [2:1103:2878]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> CdcStreamChangeCollector::UpsertManyRows >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> AsyncIndexChangeCollector::UpsertToSameKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::BaseCase [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:09.158198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:09.158226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:09.158232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:09.158236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:09.158249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:09.158253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:09.158264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:09.158355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:09.170461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:09.170485Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:09.172981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:09.173076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:09.173107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:09.175303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:09.175382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:09.175484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:09.175641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:09.176273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:09.176547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:09.176559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:09.176572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:09.176579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:09.176586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:09.176633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:09.177691Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:09.193656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:09.193738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.193793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:09.193832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:09.193839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.194465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:09.194490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:09.194529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.194539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:09.194543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:09.194548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:09.194951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.194964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:09.194969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:09.195278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.195287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.195293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:09.195299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:09.195914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:09.196254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:09.196299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:09.196483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:09.196508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:09.196514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:09.196574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:09.196581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:09.196605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:09.196616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:09.197005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:09.197015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:09.197043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:09.197047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:09.197105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:09.197111Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:09.197120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:09.197124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:09.197129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:09.197134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:09.197138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:09.197142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:09.197151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:09.197156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:09.197160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:33.879934Z node 276 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:33.879959Z node 276 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1" took 28us result status StatusSuccess 2024-11-21T08:53:33.880076Z node 276 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1" PathDescription { Self { Name: "index1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:33.880158Z node 276 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:33.880195Z node 276 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1/indexImplTable" took 44us result status StatusSuccess 2024-11-21T08:53:33.880320Z node 276 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> CdcStreamChangeCollector::InsertSingleRow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2024-11-21T08:53:26.342117Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653043078101250:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:26.342464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.353762Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653043743688146:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002df5/r3tmp/tmp3N2ew1/pdisk_1.dat 2024-11-21T08:53:26.379189Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.380470Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.386153Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.412717Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6085, node 1 2024-11-21T08:53:26.426610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002df5/r3tmp/yandexrJtZxV.tmp 2024-11-21T08:53:26.426625Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002df5/r3tmp/yandexrJtZxV.tmp 2024-11-21T08:53:26.426690Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002df5/r3tmp/yandexrJtZxV.tmp 2024-11-21T08:53:26.426742Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:26.434013Z INFO: TTestServer started on Port 25519 GrpcPort 6085 2024-11-21T08:53:26.441691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.441719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.443218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25519 PQClient connected to localhost:6085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:26.480765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.480800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.481338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.483062Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:26.483317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:53:26.524680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:53:26.723631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653043078102086:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.723663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.723679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653043078102104:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.724395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:53:26.729858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653043078102107:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:53:26.760258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.816339Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653043078102312:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:26.816425Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTJlMTAzZGItOTE4NTFkYjYtNTVmODExZmUtZjFmZjc0NDY=, ActorId: [1:7439653043078102075:2300], ActorState: ExecuteState, TraceId: 01jd6yt8j13523nz8wc9ppt2nx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:26.817048Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:26.819519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.839395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:53:26.890816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yt8pc45mhzn4ctjth9hsc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNjNjhiYzYtZWZhNTc4MDAtMTQ4OGU5ZWQtMjMxNjEwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653043078102625:3048] 2024-11-21T08:53:31.341826Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653043078101250:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:31.341859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:53:31.354156Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653043743688146:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:31.354213Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:53:31.990577Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653043078101351:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:53:31.990663Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653043078101351:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2024-11-21T08:53:31.990683Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653043078101351:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439653043078101782:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732179206541 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:53:31.990699Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439653043078101351:2149], cacheItem# { Subscriber: { Subscriber: [1:7439653043078101782:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732179206541 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 14 IsSync: true Partial: 0 } 2024-11-21T08:53:31.990752Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439653064552939507:3297], recipient# [1:7439653064552939506:3296], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: ... ion 2(assignId:3) wait data in partition inited, cookie 1 from offset0 2024-11-21T08:53:39.771820Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2' requestId: 2024-11-21T08:53:39.771828Z node 4 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:53:39.771831Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] got client message batch for topic 'account2/topic2' partition 1 2024-11-21T08:53:39.771861Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902, Partition: 1, State: StateIdle] read cookie 32 Topic 'account2/topic2' partition 1 user userx offset 0 count 12 size 12587172 endOffset 10 max time lag 0ms effective offset 6 2024-11-21T08:53:39.771868Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902, Partition: 1, State: StateIdle] read cookie 32 added 0 blobs, size 0 count 0 last offset 6 2024-11-21T08:53:39.772607Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902, Partition: 1, State: StateIdle] Reading cookie 32. All data is from uncompacted head. 2024-11-21T08:53:39.772623Z node 4 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:53:39.772693Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic2' partition: 1 messageNo: 0 requestId: cookie: 0 2024-11-21T08:53:39.774368Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 TopicId: Topic /Root/account2/topic2 in database: Root, partition 1(assignId:4) initDone 1 event { CmdReadResult { MaxOffset: 10 BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 4195724 RealReadOffset: 9 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T08:53:39.774393Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 TopicId: Topic /Root/account2/topic2 in database: Root, partition 1(assignId:4) wait data in partition inited, cookie 1 from offset10 2024-11-21T08:53:39.774401Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 after read state TopicId: Topic /Root/account2/topic2 in database: Root, partition 1(assignId:4) EndOffset 10 ReadOffset 10 ReadGuid 419a5a8c-3a205f12-8a068d6d-cc603eed has messages 0 2024-11-21T08:53:39.774449Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 read done: guid# 419a5a8c-3a205f12-8a068d6d-cc603eed, partition# TopicId: Topic /Root/account2/topic2 in database: Root, partition 1(assignId:4), size# 52 2024-11-21T08:53:39.774467Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 empty read result, start new reading: guid# 419a5a8c-3a205f12-8a068d6d-cc603eed 2024-11-21T08:53:39.774473Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 Process answer. Aval parts: 0 >>>>> Iteration: 30 Closing session. Got 0 messages 2024-11-21T08:53:39.776338Z :INFO: [/Root] [/Root] [dc660fcf-be6651f5-a3369823-307d6b08] Closing read session. Close timeout: 1.000000s 2024-11-21T08:53:39.776370Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account2/topic2:4:2:0:0 null:account2/topic2:3:1:0:0 null:account2/topic2:2:5:0:0 null:account2/topic2:1:4:0:0 null:account2/topic2:0:3:0:0 2024-11-21T08:53:39.776383Z :INFO: [/Root] [/Root] [dc660fcf-be6651f5-a3369823-307d6b08] Counters: { Errors: 0 CurrentSessionLifetimeMs: 10 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } >>>>> Iteration: 30 Session closed 2024-11-21T08:53:39.776648Z :INFO: [/Root] [/Root] [dc660fcf-be6651f5-a3369823-307d6b08] Closing read session. Close timeout: 0.000000s 2024-11-21T08:53:39.776655Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account2/topic2:4:2:0:0 null:account2/topic2:3:1:0:0 null:account2/topic2:2:5:0:0 null:account2/topic2:1:4:0:0 null:account2/topic2:0:3:0:0 2024-11-21T08:53:39.776658Z :INFO: [/Root] [/Root] [dc660fcf-be6651f5-a3369823-307d6b08] Counters: { Errors: 0 CurrentSessionLifetimeMs: 10 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:53:39.776690Z :NOTICE: [/Root] [/Root] [dc660fcf-be6651f5-a3369823-307d6b08] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:53:39.776830Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 grpc read done: success# 0, data# { } 2024-11-21T08:53:39.776851Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 grpc read failed 2024-11-21T08:53:39.776858Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 grpc closed 2024-11-21T08:53:39.776880Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_2586915970328806162_v1 is DEAD 2024-11-21T08:53:39.776986Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.776997Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Destroy direct read session userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777004Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] server disconnected, pipe [3:7439653099703573135:2759] destroyed 2024-11-21T08:53:39.777010Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.777013Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777016Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7439653099703573128:2756] destroyed 2024-11-21T08:53:39.777019Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.777024Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777028Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7439653099703573125:2755] destroyed 2024-11-21T08:53:39.777041Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777048Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777050Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777098Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.777114Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777127Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439653099703573129:2757] destroyed 2024-11-21T08:53:39.777132Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.777134Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777137Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439653099703573131:2758] destroyed 2024-11-21T08:53:39.777148Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777152Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.777361Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037905][topic2] pipe [3:7439653099703573123:2752] disconnected; active server actors: 1 2024-11-21T08:53:39.777377Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037905][topic2] pipe [3:7439653099703573123:2752] client userx disconnected session userx_3_31_2586915970328806162_v1 2024-11-21T08:53:39.779403Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0] Write session: close. Timeout = 0 ms 2024-11-21T08:53:39.779411Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0] Write session will now close 2024-11-21T08:53:39.779419Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0] Write session: aborting 2024-11-21T08:53:39.779590Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:53:39.779595Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0] Write session: destroy 2024-11-21T08:53:39.779691Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0 grpc read done: success: 0 data: 2024-11-21T08:53:39.779702Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0 grpc read failed 2024-11-21T08:53:39.779719Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0 grpc closed 2024-11-21T08:53:39.779722Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|4781b7b1-ec6f979-c4b5ccdf-d2cd8eb2_0 is DEAD 2024-11-21T08:53:39.779942Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037902 (partition=1) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:53:39.780064Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.780098Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439653095408604849:2496] destroyed 2024-11-21T08:53:39.780107Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902, Partition: 1, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:53:39.932301Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439653069638798131:2137], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:53:39.932364Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439653069638798131:2137], cacheItem# { Subscriber: { Subscriber: [3:7439653073933766798:3052] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:53:39.932410Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439653099703573157:4684], recipient# [3:7439653099703573156:2760], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2024-11-21T08:53:27.261890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653045352883460:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:27.261926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:27.265565Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653046455398068:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:27.265982Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:27.289739Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dc9/r3tmp/tmpOfLFTP/pdisk_1.dat 2024-11-21T08:53:27.297973Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:27.319967Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15004, node 1 2024-11-21T08:53:27.337664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002dc9/r3tmp/yandex9WiM1Y.tmp 2024-11-21T08:53:27.337680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002dc9/r3tmp/yandex9WiM1Y.tmp 2024-11-21T08:53:27.342143Z INFO: TTestServer started on Port 30900 GrpcPort 15004 TClient is connected to server localhost:30900 PQClient connected to localhost:15004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:53:27.362293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:27.362334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:27.363726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:27.363732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:27.366968Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002dc9/r3tmp/yandex9WiM1Y.tmp 2024-11-21T08:53:27.367186Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:27.367395Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:27.376721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:27.395451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:27.395474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:27.396917Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:27.397237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:53:27.584916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653045352884473:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:27.584948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:27.585051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653045352884500:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:27.585729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:53:27.586300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653045352884527:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:27.586331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:27.590306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653045352884502:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:53:27.616043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.665962Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653045352884657:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:27.668577Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGM3NmEzOTAtYWZjOTkzNDEtNjE5YjYzYjYtM2MxM2NhMDI=, ActorId: [1:7439653045352884470:2303], ActorState: ExecuteState, TraceId: 01jd6yt9d04w2aj7swrc2cy0c5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:27.669246Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:27.704090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:27.724784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:53:27.765083Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd6yt9j3f90nafpx5pfahc6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ3MmNmYi02MGNlNGUyMC02OWU0OTliZS1jNGRjMTU4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653045352884950:3029] 2024-11-21T08:53:32.262014Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653045352883460:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:32.262042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:53:32.265564Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653046455398068:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:32.265607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:53:32.820286Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653045352883785:2192], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:53:32.820371Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653045352883785:2192], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2024-11-21T08:53:32.820389Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653045352883785:2192], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439653045352884120:2420] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732179207409 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:53:32.820406Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439653045352883785:2192], cacheItem# { Subscriber: { Subscriber: [1:7439653045352884120:2420] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732179207409 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId ... tHash: "" } Result { Offset: 2 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 3 WriteTimestampMS: 1732179219692 CreateTimestampMS: 1732179219689 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 4 WriteTimestampMS: 1732179219692 CreateTimestampMS: 1732179219689 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 58 RealReadOffset: 3 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T08:53:39.830992Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5) wait data in partition inited, cookie 1 from offset4 2024-11-21T08:53:39.831001Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 after read state TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5) EndOffset 4 ReadOffset 4 ReadGuid edec47b1-fe35be2c-aa3e1e8f-31058682 has messages 1 2024-11-21T08:53:39.831026Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 read done: guid# edec47b1-fe35be2c-aa3e1e8f-31058682, partition# TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5), size# 416 2024-11-21T08:53:39.831038Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 response to read: guid# edec47b1-fe35be2c-aa3e1e8f-31058682 2024-11-21T08:53:39.831116Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 Process answer. Aval parts: 0 2024-11-21T08:53:39.831298Z :DEBUG: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:53:39.831379Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2024-11-21T08:53:39.831405Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T08:53:39.831429Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T08:53:39.831439Z :DEBUG: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: 2024-11-21T08:53:39.831447Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 got read request: guid# f2f6fcb6-d81bc23e-f29e5993-85a17eec DataReceived { PartitionStreamId: 3 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 3 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 0 SeqNo: 1 MessageGroupId: "123" CreateTime: 2024-11-21T08:53:39.689000Z WriteTime: 2024-11-21T08:53:39.691000Z Ip: "ipv6:[::1]:57206" UncompressedSize: 10 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:57206" } } } } 2024-11-21T08:53:39.831466Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 1} (1-1) 2024-11-21T08:53:39.831469Z :DEBUG: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 3 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 3 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 1 SeqNo: 2 MessageGroupId: "123" CreateTime: 2024-11-21T08:53:39.689000Z WriteTime: 2024-11-21T08:53:39.691000Z Ip: "ipv6:[::1]:57206" UncompressedSize: 10 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:57206" } } } } 2024-11-21T08:53:39.831478Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (2-2) 2024-11-21T08:53:39.831481Z :DEBUG: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 3 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 3 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "123" CreateTime: 2024-11-21T08:53:39.689000Z WriteTime: 2024-11-21T08:53:39.692000Z Ip: "ipv6:[::1]:57206" UncompressedSize: 10 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:57206" } } } } 2024-11-21T08:53:39.831489Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (3-3) 2024-11-21T08:53:39.831494Z :DEBUG: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 3 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 3 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 3 SeqNo: 4 MessageGroupId: "123" CreateTime: 2024-11-21T08:53:39.689000Z WriteTime: 2024-11-21T08:53:39.692000Z Ip: "ipv6:[::1]:57206" UncompressedSize: 10 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:57206" } } } } 2024-11-21T08:53:39.831511Z :INFO: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] Closing read session. Close timeout: 0.000000s 2024-11-21T08:53:39.831521Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account2/topic2:4:1:0:0 null:account2/topic2:3:2:0:0 null:account2/topic2:2:5:0:0 null:account2/topic2:1:4:0:0 null:account2/topic2:0:3:3:0 2024-11-21T08:53:39.831526Z :INFO: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 21 BytesRead: 40 MessagesRead: 4 BytesReadCompressed: 92 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:53:39.831545Z :NOTICE: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:53:39.831555Z :DEBUG: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] [null] Abort session to cluster 2024-11-21T08:53:39.831721Z :NOTICE: [/Root] [/Root] [46c4880e-a65283a0-8d435b58-b789e86e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:53:39.831877Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 grpc read done: success# 0, data# { } 2024-11-21T08:53:39.831885Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 grpc read failed 2024-11-21T08:53:39.831889Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 grpc closed 2024-11-21T08:53:39.831905Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_16070983673987555437_v1 is DEAD 2024-11-21T08:53:39.831997Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.832008Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Destroy direct read session user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832014Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] server disconnected, pipe [3:7439653098937119319:2526] destroyed 2024-11-21T08:53:39.832019Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.832022Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832027Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7439653098937119316:2523] destroyed 2024-11-21T08:53:39.832031Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.832038Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832041Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7439653098937119311:2522] destroyed 2024-11-21T08:53:39.832058Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832065Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832067Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832071Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.832088Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832115Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439653098937119318:2525] destroyed 2024-11-21T08:53:39.832120Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:39.832122Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832127Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439653098937119317:2524] destroyed 2024-11-21T08:53:39.832135Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832144Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.832154Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037905][topic2] pipe [3:7439653098937119309:2519] disconnected; active server actors: 1 2024-11-21T08:53:39.832158Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037905][topic2] pipe [3:7439653098937119309:2519] client user1 disconnected session user1_3_2_16070983673987555437_v1 2024-11-21T08:53:39.972326Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439653073167312598:2148], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:53:39.972381Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439653073167312598:2148], cacheItem# { Subscriber: { Subscriber: [3:7439653073167313297:2650] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:53:39.972403Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439653098937119345:4288], recipient# [3:7439653098937119344:2528], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2024-11-21T08:53:37.670666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:37.671194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:37.671222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042d8/r3tmp/tmpkO2nh6/pdisk_1.dat 2024-11-21T08:53:37.794261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:37.814686Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:37.860456Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:53:37.860746Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:53:37.860785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:37.860799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:37.871412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:37.974662Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:53:37.974684Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:53:37.974716Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:53:37.980866Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:53:37.981077Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:53:37.981088Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:53:37.981130Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:53:37.981170Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:53:37.981183Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:53:37.981240Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:53:37.981604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:37.981799Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:53:37.981809Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:53:37.994882Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:53:37.995029Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:53:37.995088Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:37.995125Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:38.000695Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:53:38.000825Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:38.000842Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:38.000952Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:38.000958Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:38.000963Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:38.000997Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:38.003491Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:38.003551Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:38.003572Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:38.003577Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:38.003583Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:38.003588Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:38.003714Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.003723Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.003856Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:38.003869Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:38.003879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:38.003883Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:38.003889Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:38.003895Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:38.003900Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:38.003907Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:53:38.003910Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:53:38.003913Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:53:38.003917Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:38.003921Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:38.003936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:53:38.003939Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:53:38.003959Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:38.004012Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:53:38.004023Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:38.004042Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:38.004050Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:53:38.004054Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:53:38.004060Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:53:38.004064Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:38.004112Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:53:38.004115Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:53:38.004120Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:53:38.004122Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:38.004131Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:53:38.004134Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:53:38.004137Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:53:38.004140Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:53:38.004146Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:53:38.004399Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:53:38.004409Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:38.014776Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:38.014830Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:38.014839Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:38.014855Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:40.330690Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for WaitForStreamClearance 2024-11-21T08:53:40.330694Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit WaitForStreamClearance 2024-11-21T08:53:40.330699Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715663] at 72075186224037890 2024-11-21T08:53:40.330704Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2024-11-21T08:53:40.330709Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit WaitForStreamClearance 2024-11-21T08:53:40.330713Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit ReadTableScan 2024-11-21T08:53:40.330717Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2024-11-21T08:53:40.330740Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Continue 2024-11-21T08:53:40.330744Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:40.330748Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T08:53:40.330751Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T08:53:40.330755Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:53:40.330760Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:53:40.330830Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:959:2767], Recipient [2:928:2738]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2024-11-21T08:53:40.330835Z node 2 :TX_PROXY DEBUG: [ReadTable [2:928:2738] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2024-11-21T08:53:40.330839Z node 2 :TX_PROXY DEBUG: [ReadTable [2:928:2738] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:959:2767] 2024-11-21T08:53:40.330852Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2024-11-21T08:53:40.330878Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:959:2767], Recipient [2:847:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:40.330882Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:40.330916Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T08:53:40.330938Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:959:2767], Recipient [2:928:2738]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2024-11-21T08:53:40.330943Z node 2 :TX_PROXY DEBUG: [ReadTable [2:928:2738] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2024-11-21T08:53:40.330947Z node 2 :TX_PROXY TRACE: [ReadTable [2:928:2738] TxId# 281474976715662] Sending TEvStreamDataAck to [2:959:2767] ShardId# 72075186224037890 2024-11-21T08:53:40.330953Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2024-11-21T08:53:40.330963Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:959:2767], Recipient [2:928:2738]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2024-11-21T08:53:40.330967Z node 2 :TX_PROXY DEBUG: [ReadTable [2:928:2738] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2024-11-21T08:53:40.331008Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:927:2738], Recipient [2:928:2738]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2024-11-21T08:53:40.331014Z node 2 :TX_PROXY DEBUG: [ReadTable [2:928:2738] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2024-11-21T08:53:40.331017Z node 2 :TX_PROXY DEBUG: [ReadTable [2:928:2738] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:959:2767] 2024-11-21T08:53:40.331029Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2024-11-21T08:53:40.331036Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T08:53:40.331052Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:959:2767], Recipient [2:928:2738]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2024-11-21T08:53:40.331056Z node 2 :TX_PROXY DEBUG: [ReadTable [2:928:2738] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2024-11-21T08:53:40.331059Z node 2 :TX_PROXY TRACE: [ReadTable [2:928:2738] TxId# 281474976715662] Sending TEvStreamDataAck to [2:959:2767] ShardId# 72075186224037890 2024-11-21T08:53:40.331073Z node 2 :TX_PROXY INFO: [ReadTable [2:928:2738] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.011216s execute time: 0.187948s total time: 0.199164s 2024-11-21T08:53:40.331112Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2024-11-21T08:53:40.331118Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2024-11-21T08:53:40.331163Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2024-11-21T08:53:40.331168Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037890 2024-11-21T08:53:40.331226Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:928:2738], Recipient [2:842:2674]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2024-11-21T08:53:40.331292Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:847:2676], Recipient [2:847:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:40.331296Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:40.331301Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T08:53:40.331305Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:40.331310Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2024-11-21T08:53:40.331314Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2024-11-21T08:53:40.331318Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2024-11-21T08:53:40.331323Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2024-11-21T08:53:40.331327Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2024-11-21T08:53:40.331331Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2024-11-21T08:53:40.331337Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2024-11-21T08:53:40.331343Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2024-11-21T08:53:40.331346Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2024-11-21T08:53:40.331349Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T08:53:40.331353Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2024-11-21T08:53:40.331358Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2024-11-21T08:53:40.331362Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T08:53:40.331365Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2024-11-21T08:53:40.331368Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.331372Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2024-11-21T08:53:40.331375Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T08:53:40.331378Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:53:40.331384Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:53:40.331388Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2024-11-21T08:53:40.331392Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:53:40.331401Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T08:53:40.331435Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [2:928:2738], Recipient [2:847:2676]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2024-11-21T08:53:40.331440Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2024-11-21T08:53:40.331444Z node 2 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2024-11-21T08:53:40.331450Z node 2 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2024-11-21T08:53:40.331474Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [2:928:2738], Recipient [2:847:2676]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2024-11-21T08:53:40.331478Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2024-11-21T08:53:40.331489Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:928:2738], Recipient [2:847:2676]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2024-11-21T08:53:37.871627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:37.872103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:37.872126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042cb/r3tmp/tmpJkjQCz/pdisk_1.dat 2024-11-21T08:53:37.973805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:37.993903Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:38.036675Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:53:38.037067Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:53:38.037130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:38.037151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:38.047796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:38.151234Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T08:53:38.151264Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:53:38.151304Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T08:53:38.158985Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:53:38.159203Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:53:38.159214Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:53:38.159254Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:53:38.159289Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:53:38.159301Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:53:38.159361Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:53:38.159707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:38.159919Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:53:38.159927Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T08:53:38.173818Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:53:38.174012Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:53:38.174093Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:38.174185Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:38.180094Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:53:38.180273Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:38.180296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:38.180446Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:38.180456Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:38.180463Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:38.180505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:38.183323Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:38.183402Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:38.183424Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:38.183428Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:38.183431Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:38.183435Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:38.183535Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.183540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:38.183641Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:38.183654Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:38.183664Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:38.183667Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:38.183672Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:38.183677Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:38.183681Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:38.183687Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:53:38.183690Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:53:38.183693Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:53:38.183696Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:38.183700Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:38.183712Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:53:38.183716Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:53:38.183732Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:38.183765Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:53:38.183772Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:38.183783Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:38.183789Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:53:38.183792Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:53:38.183795Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:53:38.183798Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:38.183835Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:53:38.183837Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:53:38.183840Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:53:38.183842Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:38.183847Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:53:38.183850Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:53:38.183852Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:53:38.183854Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:53:38.183857Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:53:38.184026Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:53:38.184031Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:38.194300Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:38.194330Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:53:38.194336Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:53:38.194347Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 186224037890 has no attached operations 2024-11-21T08:53:40.445444Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:53:40.445448Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:53:40.445493Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:956:2763], Recipient [2:822:2658]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2024-11-21T08:53:40.445497Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2024-11-21T08:53:40.445501Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2024-11-21T08:53:40.445509Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:956:2763], Recipient [2:863:2690]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:40.445513Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T08:53:40.445538Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2024-11-21T08:53:40.445590Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T08:53:40.445612Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:956:2763], Recipient [2:822:2658]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2024-11-21T08:53:40.445618Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2024-11-21T08:53:40.445622Z node 2 :TX_PROXY TRACE: [ReadTable [2:822:2658] TxId# 281474976715661] Sending TEvStreamDataAck to [2:956:2763] ShardId# 72075186224037890 2024-11-21T08:53:40.445635Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:956:2763], Recipient [2:822:2658]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2024-11-21T08:53:40.445639Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2024-11-21T08:53:40.445650Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2024-11-21T08:53:40.445699Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:821:2658], Recipient [2:822:2658]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2024-11-21T08:53:40.445704Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2024-11-21T08:53:40.445707Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2024-11-21T08:53:40.445712Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2024-11-21T08:53:40.445720Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T08:53:40.445733Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:956:2763], Recipient [2:822:2658]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2024-11-21T08:53:40.445737Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2024-11-21T08:53:40.445740Z node 2 :TX_PROXY TRACE: [ReadTable [2:822:2658] TxId# 281474976715661] Sending TEvStreamDataAck to [2:956:2763] ShardId# 72075186224037890 2024-11-21T08:53:40.445746Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2024-11-21T08:53:40.445754Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:956:2763], Recipient [2:822:2658]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2024-11-21T08:53:40.445757Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2024-11-21T08:53:40.445788Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:821:2658], Recipient [2:822:2658]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2024-11-21T08:53:40.445791Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2024-11-21T08:53:40.445796Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2024-11-21T08:53:40.445803Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2024-11-21T08:53:40.445809Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2024-11-21T08:53:40.445829Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:956:2763], Recipient [2:822:2658]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2024-11-21T08:53:40.445832Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2024-11-21T08:53:40.445835Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2024-11-21T08:53:40.445841Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2024-11-21T08:53:40.445845Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037890 2024-11-21T08:53:40.445868Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:863:2690], Recipient [2:863:2690]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:40.445872Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:53:40.445878Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T08:53:40.445882Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:53:40.445887Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2024-11-21T08:53:40.445891Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2024-11-21T08:53:40.445895Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2024-11-21T08:53:40.445900Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2024-11-21T08:53:40.445903Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2024-11-21T08:53:40.445906Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2024-11-21T08:53:40.445910Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2024-11-21T08:53:40.445915Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2024-11-21T08:53:40.445918Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2024-11-21T08:53:40.445921Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T08:53:40.445924Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2024-11-21T08:53:40.445929Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2024-11-21T08:53:40.445932Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T08:53:40.445936Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2024-11-21T08:53:40.445939Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.445942Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2024-11-21T08:53:40.445946Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T08:53:40.445949Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T08:53:40.445956Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:53:40.445960Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2024-11-21T08:53:40.445964Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:53:40.445973Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T08:53:40.446007Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:863:2690], Recipient [2:822:2658]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 48 } } 2024-11-21T08:53:40.446011Z node 2 :TX_PROXY DEBUG: [ReadTable [2:822:2658] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2024-11-21T08:53:40.446025Z node 2 :TX_PROXY INFO: [ReadTable [2:822:2658] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.010972s execute time: 0.211681s total time: 0.222653s 2024-11-21T08:53:40.446092Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:822:2658], Recipient [2:630:2536]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2024-11-21T08:53:40.446130Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:822:2658], Recipient [2:859:2688]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2024-11-21T08:53:40.446193Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:822:2658], Recipient [2:863:2690]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages |88.6%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |88.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |88.6%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] |88.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |88.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TPDiskTest::DeviceHaltTooLong [GOOD] >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TPDiskTest::ChangePDiskKey >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing |88.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> ObjectStorageListingTest::FilterListing [GOOD] >> TUserAttrsTestWithReboots::AllowedSymbolsReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:00.922580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:00.922607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.922612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:00.922617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:00.922630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:00.922635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:00.922644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.922718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:00.933677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:00.933701Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:00.936879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:00.937655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:00.937695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:00.939561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:00.939784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:00.939880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.939952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:00.940979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.941247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.941259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.941295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:00.941303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.941309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:00.941324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.942637Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:00.962399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:00.962485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.962549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:00.962628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:00.962634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.963424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.963458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:00.963521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.963534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:00.963540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:00.963545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:00.963986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.963997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:00.964002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:00.964518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.964530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.964537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.964545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.965246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:00.965850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:00.965926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:00.966162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.966205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:00.966224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.966313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:00.966323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.966363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.966380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:00.966967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.966976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.967021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.967026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:00.967113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.967122Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:00.967134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:00.967138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.967144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:00.967149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.967154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:00.967158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:00.967170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:00.967176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:00.967181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:00.967522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.967537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.967541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:00.967546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:00.967550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.967563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 09546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1, LoadedSize 102, Spent{0.000s wa 0.000s cnt 1}, HistogramKeys 1 2024-11-21T08:53:41.839705Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1229:3171], Recipient [3:305:2293]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2024-11-21T08:53:41.839715Z node 3 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186233409546, for tableId 2 2024-11-21T08:53:41.840914Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:294:2284], Recipient [3:305:2293]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T08:53:41.841555Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.147000Z 2024-11-21T08:53:41.841574Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 6 2024-11-21T08:53:41.841583Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:123:2149]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:53:41.841647Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:305:2293], Recipient [3:123:2149]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2024-11-21T08:53:41.841656Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2024-11-21T08:53:41.841673Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2024-11-21T08:53:41.841686Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 3 ms, with status# 0, next wakeup in# 0.997000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2024-11-21T08:53:41.842196Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:294:2284], Recipient [3:305:2293]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T08:53:41.853073Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:19.147000Z 2024-11-21T08:53:42.130335Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:53:42.130364Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:53:42.130382Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:305:2293]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:53:42.130405Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2024-11-21T08:53:42.130432Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:53:42.130436Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:53:42.130512Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:305:2293], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 5 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4228 Memory: 123880 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2024-11-21T08:53:42.130517Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T08:53:42.130528Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.4228 2024-11-21T08:53:42.130541Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T08:53:42.130546Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2024-11-21T08:53:42.161198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2024-11-21T08:53:42.161249Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 0, Rows# 0, Deletes# 0, Compaction# 1970-01-01T00:00:19.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2024-11-21T08:53:42.161285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 30 seconds 2024-11-21T08:53:42.161324Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:53:42.161332Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:53:42.161335Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2024-11-21T08:53:42.161351Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T08:53:42.161355Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2024-11-21T08:53:42.161390Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [3:123:2149], Recipient [3:305:2293]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2024-11-21T08:53:42.161450Z node 3 :TX_DATASHARD INFO: Started background compaction# 7 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:123:2149], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2024-11-21T08:53:42.161536Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2024-11-21T08:53:42.161556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2024-11-21T08:53:42.161566Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2024-11-21T08:53:42.161584Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:19.000000Z at schemeshard 72057594046678944 2024-11-21T08:53:42.161621Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:53:42.161816Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.147000Z 2024-11-21T08:53:42.161823Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409546 2024-11-21T08:53:42.161827Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 7 2024-11-21T08:53:42.162788Z node 3 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186233409546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1, LoadedSize 102, Spent{0.001s wa 0.001s cnt 1}, HistogramKeys 1 2024-11-21T08:53:42.163001Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1257:3197], Recipient [3:305:2293]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2024-11-21T08:53:42.163015Z node 3 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186233409546, for tableId 2 2024-11-21T08:53:42.163668Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:294:2284], Recipient [3:305:2293]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T08:53:42.164799Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 7, ts 1970-01-01T00:00:20.148000Z 2024-11-21T08:53:42.164821Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 7, front# 7 2024-11-21T08:53:42.164831Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:123:2149]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:53:42.164993Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:305:2293], Recipient [3:123:2149]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2024-11-21T08:53:42.165003Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2024-11-21T08:53:42.165021Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2024-11-21T08:53:42.165036Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2024-11-21T08:53:42.165469Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:294:2284], Recipient [3:305:2293]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T08:53:42.175703Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:53:42.175728Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:53:42.175745Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T08:53:42.186544Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:20.148000Z >> TPDiskTest::ChangePDiskKey [GOOD] >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> ReadOnlyVDisk::TestWrites >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties |88.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |88.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2024-11-21T08:53:41.801529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:41.801979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:41.802003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e8c/r3tmp/tmpzQmiyK/pdisk_1.dat 2024-11-21T08:53:41.899375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:41.919406Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:41.962367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:41.962411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:41.973114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:42.077132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:42.092085Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:42.092145Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:42.098349Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:42.098384Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:42.098513Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:42.098531Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:42.098536Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:42.098571Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:42.101223Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:42.101287Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:42.101308Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:42.101312Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:42.101315Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:42.101319Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:42.101544Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:42.101561Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:42.101571Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:42.101578Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:42.101582Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:42.101590Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:42.101594Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:42.101628Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:42.101689Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:42.101707Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:42.101958Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:42.112310Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:42.112358Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:42.286854Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:53:42.287791Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:42.287811Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:42.287932Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:42.287943Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:42.287954Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:42.288038Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:42.288076Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:42.288266Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:42.288284Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:42.288703Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:42.288843Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:42.289198Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:42.289207Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:42.289326Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:42.289334Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:42.289343Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:42.289577Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:42.289587Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:42.289593Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:42.289613Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:42.289624Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:42.289636Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:42.290293Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:42.290713Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:42.290744Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:42.290752Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:42.292749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:42.292774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:42.292784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:42.293721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:42.294665Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:42.481957Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:42.482448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:42.556027Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6ytqrm6d2w8r7y7xafrrqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM2ZGQxOTUtYWIwYTA4YTQtNzQzOWE0NzItZTRkNGY4YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:42.557158Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T08:53:42.557264Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:42.568225Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:42.568332Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:42.569654Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T08:53:42.569698Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2024-11-21T08:53:42.569758Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2024-11-21T08:53:42.569784Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T08:53:42.570058Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:830:2667], serverId# [1:831:2668], sessionId# [0:0:0] 2024-11-21T08:53:42.570081Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2024-11-21T08:53:42.570103Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2024-11-21T08:53:42.570125Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:830:2667], serverId# [1:831:2668], sessionId# [0:0:0] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> TExternalDataSourceTest::CreateExternalDataSource >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::AllowedSymbolsReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:53:10.947591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:10.947621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:10.947626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:10.947632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:10.947647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:10.947651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:10.947659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:10.947743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:10.959116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:10.959143Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:53:10.961536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:10.961669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:10.961705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:10.965044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:10.965119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:10.965226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:10.965477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:10.966171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:10.966435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:10.966446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:10.966459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:10.966467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:10.966472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:10.966511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:53:10.967815Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:53:10.982036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:10.982104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:10.982161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:10.982198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:10.982204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:10.982893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:10.982917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:10.982963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:10.982980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:10.982983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:10.982987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:10.983377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:10.983389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:10.983393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:10.983756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:10.983766Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:10.983786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:10.983793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:10.984374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:10.984932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:10.984992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:10.985199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:10.985225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:10.985233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:10.985288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:10.985295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:10.985334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:10.985346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:10.985826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:10.985838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:10.985882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:10.985888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:10.985976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:10.985984Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:10.985996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:10.986001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:10.986007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:10.986012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:10.986017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:10.986021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:10.986033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:10.986039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:10.986043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T08:53:42.838767Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:53:42.838773Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T08:53:42.838779Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:53:42.838784Z node 126 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T08:53:42.838789Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T08:53:42.838801Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:53:42.838808Z node 126 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T08:53:42.838815Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T08:53:42.838819Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2024-11-21T08:53:42.838966Z node 126 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:42.838977Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:42.838980Z node 126 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:53:42.838983Z node 126 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T08:53:42.838986Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:53:42.839096Z node 126 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:42.839103Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:53:42.839106Z node 126 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:53:42.839108Z node 126 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T08:53:42.839110Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:53:42.839116Z node 126 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T08:53:42.839780Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:53:42.839801Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestModificationResults wait txId: 1007 2024-11-21T08:53:42.840334Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/Dir@" OperationType: ESchemeOpMkDir MkDir { Name: "Dir@" } } TxId: 1007 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:42.840775Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1007, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/Dir@/Dir@\', error: symbol \'@\' is not allowed in the path part \'Dir@\', source_location: ydb/core/tx/schemeshard/schemeshard__operation.cpp:935" TxId: 1007 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:42.840800Z node 126 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1007, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/Dir@/Dir@', error: symbol '@' is not allowed in the path part 'Dir@', source_location: ydb/core/tx/schemeshard/schemeshard__operation.cpp:935, operation: CREATE DIRECTORY, path: /MyRoot/Dir@/Dir@ TestModificationResult got TxId: 1007, wait until txId: 1007 TestWaitNotification wait txId: 1006 2024-11-21T08:53:42.840848Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T08:53:42.840852Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T08:53:42.840904Z node 126 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:53:42.840920Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:53:42.840925Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [126:549:2508] TestWaitNotification: OK eventTxId 1006 2024-11-21T08:53:42.840985Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:42.841004Z node 126 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2024-11-21T08:53:42.841075Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dir!" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Dir0:" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Dir@" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1005 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:42.841116Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir@" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:42.841127Z node 126 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir@" took 14us result status StatusSuccess 2024-11-21T08:53:42.841162Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir@" PathDescription { Self { Name: "Dir@" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1005 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "Dir!" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1006 CreateStep: 5000006 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:42.841198Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir!" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:42.841205Z node 126 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir!" took 8us result status StatusSuccess 2024-11-21T08:53:42.841223Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir!" PathDescription { Self { Name: "Dir!" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> ReadOnlyVDisk::TestWrites [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::IndexPartitioning [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:08.263054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:08.263080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:08.263086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:08.263091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:08.263109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:08.263113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:08.263124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:08.263247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:08.275645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:08.275675Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:08.278526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:08.278646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:08.278680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:08.281595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:08.281704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:08.281827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:08.282021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:08.282766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:08.283100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:08.283116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:08.283132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:08.283140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:08.283146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:08.283200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:08.284817Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:08.304651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:08.304750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:08.304827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:08.304874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:08.304884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:08.305867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:08.305899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:08.305958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:08.305970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:08.305974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:08.305980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:08.306452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:08.306466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:08.306471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:08.306811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:08.306824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:08.306830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:08.306838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:08.307451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:08.307830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:08.307892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:08.308110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:08.308136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:08.308143Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:08.308224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:08.308232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:08.308265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:08.308278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:08.308673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:08.308683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:08.308730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:08.308736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:08.308828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:08.308835Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:08.308847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:08.308851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:08.308885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:08.308891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:08.308896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:08.308900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:08.308912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:08.308918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:08.308922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 4 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:36.460674Z node 320 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:36.460705Z node 320 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 33us result status StatusSuccess 2024-11-21T08:53:36.460836Z node 320 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 4 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:36.460890Z node 320 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:36.460933Z node 320 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 45us result status StatusSuccess 2024-11-21T08:53:36.461068Z node 320 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 7396873006516677113 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2024-11-21T08:53:43.239030Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2024-11-21T08:53:43.240864Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T08:53:43.243041Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T08:53:43.243615Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2024-11-21T08:53:43.244950Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2024-11-21T08:53:43.245262Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2024-11-21T08:53:43.245825Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2024-11-21T08:53:43.246141Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2024-11-21T08:53:43.367401Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:43.367432Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.367460Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:43.367622Z 1 00h03m30.111536s :BS_PROXY_PUT ERROR: [d2ec2a42a990debd] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2024-11-21T08:53:43.367900Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:43.367925Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:43.368084Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2024-11-21T08:53:43.368478Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:43.368619Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:43.368721Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2024-11-21T08:53:43.368892Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.369172Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:43.369257Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:15:0:0:32768:0] 2024-11-21T08:53:43.369383Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.369395Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:43.369557Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2024-11-21T08:53:43.369860Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.369873Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:43.370148Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2024-11-21T08:53:43.370354Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:43.370374Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.370380Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2024-11-21T08:53:43.370645Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:43.370680Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:43.370692Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2024-11-21T08:53:43.371045Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:43.371066Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.371075Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2024-11-21T08:53:43.371385Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:43.371396Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.371407Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] S ... 2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2024-11-21T08:53:43.622236Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.622285Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2024-11-21T08:53:43.623228Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:43.623575Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2024-11-21T08:53:43.624368Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2024-11-21T08:53:43.624910Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.624922Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2024-11-21T08:53:43.625404Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.625434Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2024-11-21T08:53:43.626028Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.626040Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2024-11-21T08:53:43.626529Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:43.626585Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2024-11-21T08:53:43.627164Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.627180Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2024-11-21T08:53:43.627634Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:43.627654Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:43.202942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:43.202972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:43.202977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:43.202982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:43.202990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:43.202993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:43.203002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:43.203084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:43.213593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:43.213621Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:43.216618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:43.217411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:43.217482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:43.218874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:43.219053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:43.219159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.219251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:43.220064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.220372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:43.220385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.220431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:43.220439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:43.220445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:43.220460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.222575Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:43.236425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:43.236523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.236590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:43.236643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:43.236651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.237564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.237587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:43.237649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.237659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:43.237665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:43.237670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:43.238071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.238081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:43.238084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:43.238373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.238381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.238384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.238391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.238809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:43.239102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:43.239147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:43.239291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.239310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:43.239320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.239360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:43.239364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.239391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.239400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:43.239747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:43.239753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:43.239789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.239792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:43.239866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.239871Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:43.239881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:43.239884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.239888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:43.239892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.239895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:43.239897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:43.239906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:43.239910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:43.239912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:43.240126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:43.240136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:43.240139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:43.240143Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:43.240146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.240155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 43.469457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:43.469461Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:53:43.469465Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T08:53:43.469470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:43.469559Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:43.469568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:43.469572Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:53:43.469576Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:53:43.469579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:53:43.469587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T08:53:43.469956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:53:43.469983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T08:53:43.470218Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.470239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:43.470248Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2024-11-21T08:53:43.470265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:43.470279Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T08:53:43.470306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.470316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:53:43.470625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:53:43.470647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:53:43.470725Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:43.470730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:43.470748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:53:43.470766Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.470771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T08:53:43.470775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:53:43.470815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.470821Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:53:43.470832Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:53:43.470836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:53:43.470842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T08:53:43.470847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:53:43.470852Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:53:43.470855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:53:43.470865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:43.470870Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T08:53:43.470874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:53:43.470877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T08:53:43.470936Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:43.470946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:43.470950Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:53:43.470954Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:53:43.470958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:53:43.470995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:43.471001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:53:43.471008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:43.471033Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:43.471040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:43.471044Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:53:43.471047Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:53:43.471051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.471058Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T08:53:43.471679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:53:43.471698Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:43.471710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:53:43.471752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:53:43.471758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:53:43.471817Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:53:43.471832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:53:43.471837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:327:2319] TestWaitNotification: OK eventTxId 102 2024-11-21T08:53:43.471902Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:43.471923Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 29us result status StatusPathDoesNotExist 2024-11-21T08:53:43.471964Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> TExternalDataSourceTest::SchemeErrors >> TExternalDataSourceTest::ReadOnlyMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:43.727352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:43.727374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:43.727379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:43.727384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:43.727389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:43.727393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:43.727401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:43.727474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:43.738924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:43.738949Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:43.742197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:43.743087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:43.743135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:43.746210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:43.746463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:43.746553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.746645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:43.747504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.747740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:43.747748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.747783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:43.747788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:43.747792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:43.747803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.749158Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:43.764123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:43.764228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.764288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:43.764338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:43.764344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.765335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.765362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:43.765429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.765438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:43.765441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:43.765445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:43.765876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.765889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:43.765894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:43.766284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.766306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.766311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.766317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.766765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:43.767170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:43.767238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:43.767447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.767473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:43.767487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.767540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:43.767547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.767582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.767596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:43.768118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:43.768128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:43.768176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.768180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:43.768293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.768300Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:43.768313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:43.768317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.768323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:43.768329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.768334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:43.768338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:43.768351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:43.768356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:43.768358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:43.768638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:43.768653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:43.768658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:43.768663Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:43.768668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.768681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... OperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:43.773253Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T08:53:43.773280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T08:53:43.773314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.773325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:53:43.773729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:43.773736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:43.773773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:53:43.773788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.773791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T08:53:43.773795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T08:53:43.773873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.773878Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T08:53:43.773888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:53:43.773891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:53:43.773895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:53:43.773899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:53:43.773902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:53:43.773905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:53:43.773913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:43.773916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T08:53:43.773919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T08:53:43.773921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T08:53:43.773993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:43.774000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:43.774003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:53:43.774006Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T08:53:43.774008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:43.774082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:43.774088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:43.774091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:53:43.774093Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:53:43.774095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:53:43.774100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:53:43.774730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:53:43.775154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:53:43.775218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:53:43.775225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:53:43.775309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:53:43.775333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:53:43.775339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:296:2288] TestWaitNotification: OK eventTxId 101 2024-11-21T08:53:43.775415Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:43.775452Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 47us result status StatusSuccess 2024-11-21T08:53:43.775555Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T08:53:43.776359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:43.776415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2024-11-21T08:53:43.776431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2024-11-21T08:53:43.776462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:94, at schemeshard: 72057594046678944 2024-11-21T08:53:43.777030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:94" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T08:53:43.777061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:94, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:53:43.777117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:53:43.777123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:53:43.777185Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:53:43.777202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:53:43.777207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:304:2296] TestWaitNotification: OK eventTxId 102 >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TopicService::ThereAreGapsInTheOffsetRanges [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:43.681329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:43.681356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:43.681361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:43.681365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:43.681371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:43.681375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:43.681384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:43.681497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:43.690586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:43.690613Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:43.693827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:43.694502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:43.694553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:43.696428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:43.696649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:43.696760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.696846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:43.697791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.698065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:43.698075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.698119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:43.698125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:43.698131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:43.698144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.699231Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:43.714366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:43.714474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.714545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:43.714595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:43.714604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.716626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.716660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:43.716732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.716743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:43.716747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:43.716752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:43.717270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.717281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:43.717286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:43.717650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.717660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.717666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.717673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.718217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:43.718614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:43.718670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:43.718863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:43.718887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:43.718901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.718957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:43.718964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:43.718998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.719011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:43.719422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:43.719429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:43.719475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:43.719480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:43.719576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:43.719582Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:43.719592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:43.719596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.719602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:43.719607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:43.719611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:43.719615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:43.719626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:43.719631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:43.719634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:43.719893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:43.719905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:43.719909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:43.719913Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:43.719917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:43.719930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... -21T08:53:43.933380Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:53:43.933382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:53:43.933386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:53:43.933389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:53:43.933392Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:53:43.933394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:53:43.933402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:53:43.933406Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T08:53:43.933408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T08:53:43.933409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T08:53:43.933491Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:43.933497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:43.933500Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:53:43.933502Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:53:43.933504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:43.933593Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:43.933600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:43.933602Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:53:43.933605Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:53:43.933607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:43.933613Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:53:43.934029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:53:43.934048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:53:43.934081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:53:43.934085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:53:43.934122Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:53:43.934132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:53:43.934136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:301:2293] TestWaitNotification: OK eventTxId 101 2024-11-21T08:53:43.934179Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:43.934200Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 27us result status StatusSuccess 2024-11-21T08:53:43.934248Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T08:53:43.934683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:43.934707Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2024-11-21T08:53:43.934714Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2024-11-21T08:53:43.934729Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100, at schemeshard: 72057594046678944 2024-11-21T08:53:43.935028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T08:53:43.935048Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:53:43.935075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:53:43.935078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:53:43.935107Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:53:43.935116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:53:43.935119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:309:2301] TestWaitNotification: OK eventTxId 102 2024-11-21T08:53:43.935161Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:43.935174Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 15us result status StatusSuccess 2024-11-21T08:53:43.935207Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::SchemeErrors [GOOD] >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::DropTableTwice >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:44.197347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:44.197374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.197379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:44.197384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:44.197390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:44.197394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:44.197403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.197501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:44.209404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:44.209440Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:44.212414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:44.213238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:44.213289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:44.214950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:44.215158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:44.215268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.215377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:44.216536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.216849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.216862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.216910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:44.216918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.216925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:44.216940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.218591Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:44.236038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.236130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.236199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:44.236279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:44.236287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.237125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.237152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:44.237214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.237222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:44.237228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:44.237232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:44.237645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.237654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:44.237659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:44.237979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.237987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.237993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.238001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.238661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:44.239046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:44.239095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:44.239286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.239308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.239324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.239375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:44.239381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.239414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.239428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:44.239792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.239799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.239842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.239848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:44.239940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.239946Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:44.239958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:44.239963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.239969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:44.239976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.239981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:44.239986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:44.239997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.240002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:44.240007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:44.240328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.240343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.240349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:44.240354Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:44.240359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.240372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 3:44.258119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:44.258124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:53:44.258128Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T08:53:44.258132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.258224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:44.258233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:44.258237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:53:44.258241Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:53:44.258245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:53:44.258252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T08:53:44.258663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T08:53:44.258692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2024-11-21T08:53:44.258784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.258802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.258809Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T08:53:44.258824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:44.258837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T08:53:44.258859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.258867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:53:44.258954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:53:44.259199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T08:53:44.259431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.259437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.259455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:53:44.259472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.259477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 1 2024-11-21T08:53:44.259481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T08:53:44.259507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.259513Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T08:53:44.259524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T08:53:44.259528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:53:44.259534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T08:53:44.259539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:53:44.259544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:53:44.259548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:53:44.259561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:44.259566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2024-11-21T08:53:44.259570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T08:53:44.259574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T08:53:44.259626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:44.259635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:44.259639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:53:44.259643Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:53:44.259647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:53:44.259693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:53:44.259698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:53:44.259705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.259730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:44.259737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:44.259741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:53:44.259745Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:53:44.259749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.259756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T08:53:44.260411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:53:44.260441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:53:44.260449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T08:53:44.260495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T08:53:44.260504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T08:53:44.260607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T08:53:44.260624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:53:44.260629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:388:2380] TestWaitNotification: OK eventTxId 104 2024-11-21T08:53:44.260700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:44.260721Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 30us result status StatusPathDoesNotExist 2024-11-21T08:53:44.260750Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2024-11-21T08:53:40.023908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:40.024509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:40.024549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003113/r3tmp/tmpXQQx8l/pdisk_1.dat 2024-11-21T08:53:40.139264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.158966Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:40.159915Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2024-11-21T08:53:40.201746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:40.201776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:40.212283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:40.316251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.330776Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:40.330833Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:40.338560Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:40.338602Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:40.338758Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:40.338770Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:40.338777Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:40.338819Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:40.342277Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:40.342344Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:40.342370Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:40.342375Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:40.342379Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:40.342385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.342623Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:40.342653Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:40.342678Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:40.342690Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.342700Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.342711Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:40.342718Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.342769Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:40.342845Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:40.342869Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:40.343341Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:40.353679Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:40.353725Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:40.532897Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:53:40.533743Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:40.533768Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.533896Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.533905Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:40.533915Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:40.533979Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:40.534009Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:40.534165Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.534184Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:40.534557Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:40.534675Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.535024Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:40.535037Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.535161Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:40.535168Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:40.535177Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.535407Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.535419Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:40.535424Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:40.535440Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:40.535450Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:40.535459Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.536066Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:40.537321Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:40.537370Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:40.537377Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:40.539128Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:40.539169Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:40.539181Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T08:53:40.539185Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T08:53:40.560272Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:40.843817Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:40.843847Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.843899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.843909Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:40.843921Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:40.843983Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T08:53:40.844018Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:40.844048Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.844253Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.855461Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T08:53:40.855486Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:53:40.855501Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.855508Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.855520Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.855549Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], ex ... dinators count is 1 buckets per mediator 2 2024-11-21T08:53:43.560781Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:43.561085Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:43.561095Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:43.561228Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:43.561235Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:43.561243Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:43.561382Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:43.561391Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:43.561396Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:43.561411Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:43.561437Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:43.561448Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:43.561616Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:43.561902Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:43.561991Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:43.561998Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:43.563410Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:43.563445Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:43.563456Z node 3 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T08:53:43.563460Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T08:53:43.584452Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:43.866874Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:43.866912Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:43.866978Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:43.866991Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:43.867002Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:43.867068Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T08:53:43.867111Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:43.867144Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:43.867355Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:43.878924Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T08:53:43.878956Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:53:43.878974Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:43.878983Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:43.878997Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:43.879033Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:43.879053Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T08:53:43.879073Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:43.879710Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T08:53:43.879728Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:43.881518Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:855:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:43.881544Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:866:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:43.881556Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:43.882626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:53:43.883909Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.090652Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.091255Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:869:2692], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:44.143524Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ytsa99q0b5tv3yw50c7gq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTc0YmUxNmQtZTVkN2VkOTktYmQwOWQ4NGItYzAzZGFiMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.143681Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:953:2748], serverId# [3:954:2749], sessionId# [0:0:0] 2024-11-21T08:53:44.143733Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:44.144015Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732179224143985 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:44.154482Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:44.154545Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:44.154554Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.164818Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ytsjv0spnm5hf6nfae0tv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDlkM2M0OTgtODllZjZiMDctNWVmNmQ4ZDItNzc3Y2I1OGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.164988Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:44.165214Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732179224165180 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:44.175744Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:44.175828Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:44.175840Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.189195Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ytskh8t2d3h4zs1nt7vth, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmI1MmM5MTItNzRhZTJlNDAtMTc3ZGUzZS03MDUzYjVlNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.189344Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:44.189649Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1732179224189624 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:44.200103Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:44.200189Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:44.200201Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.200943Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:992:2780], serverId# [3:993:2781], sessionId# [0:0:0] 2024-11-21T08:53:44.202040Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:994:2782], serverId# [3:995:2783], sessionId# [0:0:0] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:44.229283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:44.229313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.229319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:44.229323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:44.229330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:44.229333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:44.229342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.229444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:44.241042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:44.241065Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:44.243890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:44.244681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:44.244728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:44.246117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:44.246309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:44.246422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.246506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:44.247466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.247720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.247729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.247769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:44.247777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.247783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:44.247796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.249216Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:44.262185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.262275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.262333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:44.262387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:44.262395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.263091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.263119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:44.263167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.263176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:44.263180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:44.263184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:44.263616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.263631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:44.263635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:44.263971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.263979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.263983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.263988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.264454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:44.264836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:44.264883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:44.265030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.265049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.265060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.265099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:44.265104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.265128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.265138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:44.265600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.265611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.265650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.265655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:44.265733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.265739Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:44.265748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:44.265751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.265755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:44.265758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.265762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:44.265764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:44.265773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.265777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:44.265779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:44.266028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.266038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.266041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:44.266045Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:44.266049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.266058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:44.275936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:53:44.275940Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T08:53:44.275945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.276000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:44.276008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:44.276011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:53:44.276014Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:53:44.276018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:53:44.276025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T08:53:44.277319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:53:44.277405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T08:53:44.277565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.277590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.277599Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2024-11-21T08:53:44.277643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T08:53:44.277685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.277698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:44.277850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:53:44.277875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:53:44.278337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.278345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.278378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:53:44.278391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:53:44.278403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.278408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T08:53:44.278413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:53:44.278416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:53:44.278491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.278498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:53:44.278512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:53:44.278515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:53:44.278521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T08:53:44.278526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:53:44.278531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:53:44.278535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:53:44.278550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:53:44.278555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T08:53:44.278559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:53:44.278561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T08:53:44.278691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:44.278702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:44.278706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:53:44.278710Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:53:44.278714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.278994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:44.279008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:53:44.279012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:53:44.279016Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:53:44.279021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:44.279033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T08:53:44.279475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:53:44.279701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:53:44.279752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:53:44.279758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:53:44.279825Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:53:44.279842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:53:44.279847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:329:2321] TestWaitNotification: OK eventTxId 102 2024-11-21T08:53:44.279907Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:44.279937Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 42us result status StatusSuccess 2024-11-21T08:53:44.279994Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:44.311782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:44.311808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.311813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:44.311819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:44.311825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:44.311829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:44.311838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.311918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:44.322762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:44.322788Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:44.325973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:44.326785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:44.326832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:44.328163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:44.328531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:44.328641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.328726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:44.329621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.329887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.329899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.329936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:44.329944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.329950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:44.329965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.331227Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:44.347816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.347908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.347978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:44.348024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:44.348032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.348833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.348859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:44.348918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.348929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:44.348933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:44.348938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:44.349292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.349303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:44.349308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:44.349612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.349621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.349627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.349633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.350193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:44.350600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:44.350652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:44.350820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.350843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.350861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.350912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:44.350918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.350945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.350957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:44.351360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.351368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.351406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.351411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:44.351487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.351493Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:44.351505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:44.351509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.351514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:44.351519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.351523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:44.351527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:44.351538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.351543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:44.351547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:44.351824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.351839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.351844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:44.351848Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:44.351852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.351867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6: External source with type DataStream was not found, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2024-11-21T08:53:44.361035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.361076Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2024-11-21T08:53:44.361086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2024-11-21T08:53:44.361121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2024-11-21T08:53:44.361588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.361612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2024-11-21T08:53:44.362149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.362193Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2024-11-21T08:53:44.362205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2024-11-21T08:53:44.362228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2024-11-21T08:53:44.362656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.362678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2024-11-21T08:53:44.363190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.363228Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2024-11-21T08:53:44.363241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2024-11-21T08:53:44.363262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2024-11-21T08:53:44.363651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.363671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2024-11-21T08:53:44.364106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.364133Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2024-11-21T08:53:44.364142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2024-11-21T08:53:44.364155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100, at schemeshard: 72057594046678944 2024-11-21T08:53:44.364571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.364596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:44.306389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:44.306411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.306414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:44.306418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:44.306422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:44.306424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:44.306429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.306495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:44.315333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:44.315354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:44.318035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:44.318785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:44.318828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:44.320256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:44.320480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:44.320581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.320673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:44.321672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.321912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.321920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.321952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:44.321957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.321962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:44.321975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.323059Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:44.336015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.336092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.336146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:44.336186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:44.336191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.336917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.336939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:44.336985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.336991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:44.336994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:44.336998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:44.337318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.337329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:44.337334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:44.337743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.337757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.337762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.337788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.338245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:44.338676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:44.338728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:44.338888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.338909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.338919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.338965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:44.338972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.339001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.339010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:44.339386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.339392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.339429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.339432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:44.339515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.339521Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:44.339530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:44.339533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.339536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:44.339540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.339543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:44.339546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:44.339554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.339558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:44.339560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:44.339788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.339798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.339801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:44.339804Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:44.339807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.339815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46678944 2024-11-21T08:53:44.443553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 0 2024-11-21T08:53:44.443567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:44.443646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:44.443657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:44.444625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAccepted TxId: 128 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2024-11-21T08:53:44.444664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2024-11-21T08:53:44.444712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.444716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.444773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:44.444792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.444799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:449:2410], at schemeshard: 72057594046678944, txId: 128, path id: 1 2024-11-21T08:53:44.444805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:449:2410], at schemeshard: 72057594046678944, txId: 128, path id: 4 2024-11-21T08:53:44.444816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.444822Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#128:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:44.444832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2024-11-21T08:53:44.444858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:44.445069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T08:53:44.445080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T08:53:44.445084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2024-11-21T08:53:44.445089Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:53:44.445094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:53:44.445246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T08:53:44.445256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T08:53:44.445259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2024-11-21T08:53:44.445263Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T08:53:44.445267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:44.445276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2024-11-21T08:53:44.445937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2024-11-21T08:53:44.445981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:53:44.446066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2024-11-21T08:53:44.446361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2024-11-21T08:53:44.446411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.446449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.446457Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T08:53:44.446484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 128:0 128 -> 240 2024-11-21T08:53:44.446508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:44.446513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 FAKE_COORDINATOR: Erasing txId 128 2024-11-21T08:53:44.447004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.447014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.447046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:44.447060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.447065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:449:2410], at schemeshard: 72057594046678944, txId: 128, path id: 1 2024-11-21T08:53:44.447070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:449:2410], at schemeshard: 72057594046678944, txId: 128, path id: 4 2024-11-21T08:53:44.447126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.447133Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 128:0 ProgressState 2024-11-21T08:53:44.447144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2024-11-21T08:53:44.447149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2024-11-21T08:53:44.447154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2024-11-21T08:53:44.447160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2024-11-21T08:53:44.447164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 128:0 2024-11-21T08:53:44.447166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 128:0 2024-11-21T08:53:44.447177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:44.447181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2024-11-21T08:53:44.447184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:53:44.447199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2024-11-21T08:53:44.447276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T08:53:44.447282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T08:53:44.447288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2024-11-21T08:53:44.447291Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:53:44.447293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:53:44.447376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T08:53:44.447383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T08:53:44.447386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2024-11-21T08:53:44.447388Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T08:53:44.447390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:53:44.447396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2024-11-21T08:53:44.447919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2024-11-21T08:53:44.447938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 >> TopicService::OnePartitionAndNoGapsInTheOffsets >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2024-11-21T08:53:39.927249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:39.927678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:39.927699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003111/r3tmp/tmp7m3SEs/pdisk_1.dat 2024-11-21T08:53:40.029275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.047999Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:40.090741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:40.090783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:40.101374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:40.205472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.219912Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T08:53:40.219968Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:40.226377Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:40.226452Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:40.226616Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:40.226634Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:40.226641Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:40.226687Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:40.229694Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:40.229750Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:40.229771Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T08:53:40.229776Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:40.229779Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:40.229783Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.230080Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T08:53:40.230106Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:40.230920Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:40.230942Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:40.231023Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T08:53:40.231057Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.231062Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.231068Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:40.231072Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.231106Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:40.231154Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:40.231169Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:40.231354Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:40.231369Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:40.231447Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T08:53:40.231452Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T08:53:40.231456Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T08:53:40.231482Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:40.231488Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T08:53:40.231495Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:40.231504Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T08:53:40.231507Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:53:40.231509Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T08:53:40.231512Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.231595Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T08:53:40.231600Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T08:53:40.231612Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.231614Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.231617Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T08:53:40.231620Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:40.231650Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T08:53:40.231662Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:40.231682Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:40.231688Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T08:53:40.231770Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:40.231776Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:40.242069Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:40.242108Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:40.242236Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:40.242246Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:40.416084Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T08:53:40.416128Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T08:53:40.416925Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T08:53:40.416950Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.417030Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.417039Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:40.417050Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T08:53:40.417129Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:40.417166Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:40.417286Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:40.417292Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.417310Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.417323Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:40.417736Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:40.417851Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.418081Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.418089Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:40.418095Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:40.418133Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:40.418155Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:40.418218Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T08:53:40.418224Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.418262Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.418271Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... in PlanQueue unit at 72075186224037889 2024-11-21T08:53:44.183376Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:44.183410Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:44.183520Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:44.183539Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:44.183688Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:44.183789Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:44.184136Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:44.184148Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.184169Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T08:53:44.184173Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.184251Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:44.184258Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:44.184265Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:44.184311Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:44.184335Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:44.184606Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:44.184624Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2024-11-21T08:53:44.184695Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:44.184761Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:44.184979Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2024-11-21T08:53:44.184993Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2024-11-21T08:53:44.185005Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:44.185431Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:44.185448Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:53:44.185455Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2024-11-21T08:53:44.185475Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:44.185486Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:44.185501Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.185661Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:44.185671Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:44.185678Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:44.185875Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:44.185885Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.186056Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:44.186065Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:44.186069Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:44.186080Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:44.186088Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:44.186098Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.186377Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.186391Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:44.186768Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T08:53:44.186775Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T08:53:44.186807Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:44.186838Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:44.186960Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:44.186967Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:44.188260Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:748:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.188279Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:758:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.188286Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.188998Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:44.189649Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.189663Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:44.376302Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.376360Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:44.376822Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:762:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:44.442757Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6ytskw5fzc14bejykagh0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2Y1Y2Q5NTAtY2JhM2Y2ZjMtZmMyYmYwNTAtNGQ2NTAyNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.443801Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:916:2716], serverId# [4:917:2717], sessionId# [0:0:0] 2024-11-21T08:53:44.443948Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:53:44.444502Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ytskw5fzc14bejykagh0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2Y1Y2Q5NTAtY2JhM2Y2ZjMtZmMyYmYwNTAtNGQ2NTAyNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.445067Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ytskw5fzc14bejykagh0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2Y1Y2Q5NTAtY2JhM2Y2ZjMtZmMyYmYwNTAtNGQ2NTAyNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.445204Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:44.445526Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732179224445498 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.455981Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:44.456036Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2024-11-21T08:53:44.456065Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T08:53:44.456076Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.456478Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2024-11-21T08:53:44.456490Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.457650Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:933:2726], serverId# [4:934:2727], sessionId# [0:0:0] 2024-11-21T08:53:44.458461Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:935:2728], serverId# [4:936:2729], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2024-11-21T08:53:40.140020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:40.140509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:40.140533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00310a/r3tmp/tmpdocRZ1/pdisk_1.dat 2024-11-21T08:53:40.241786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.257717Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:40.300045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:40.300103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:40.310707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:40.414823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.430454Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T08:53:40.430518Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:40.439250Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:40.439308Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:40.439483Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:40.439500Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:40.439507Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:40.439550Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:40.443471Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:40.443540Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:40.443566Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T08:53:40.443572Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:40.443578Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:40.443584Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.443998Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T08:53:40.444035Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:40.445394Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:40.445446Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:40.445566Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T08:53:40.445619Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.445627Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.445637Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:40.445644Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.445685Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:40.445747Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:40.445767Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:40.446014Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:40.446038Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:40.446172Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T08:53:40.446180Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T08:53:40.446187Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T08:53:40.446225Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:40.446232Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T08:53:40.446247Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:40.446260Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T08:53:40.446265Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:53:40.446270Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T08:53:40.446275Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.446401Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T08:53:40.446409Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T08:53:40.446428Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.446434Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.446439Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T08:53:40.446444Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:40.446497Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T08:53:40.446517Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:40.446559Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:40.446569Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T08:53:40.446692Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:40.446702Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:40.457071Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:40.457126Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:40.457322Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:40.457332Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:40.631872Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T08:53:40.631942Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T08:53:40.632781Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T08:53:40.632802Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.632877Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.632887Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:40.632898Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T08:53:40.632980Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:40.633019Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:40.633146Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:40.633153Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.633170Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.633183Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:40.633606Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:40.633719Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.633916Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.633922Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:40.633927Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:40.633962Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:40.633981Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:40.634039Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T08:53:40.634045Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.634079Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.634087Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... ate, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T08:53:44.152702Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:44.152707Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:44.152711Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:44.152723Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:44.152730Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:44.152739Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.152948Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:53:44.152957Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T08:53:44.152961Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2024-11-21T08:53:44.152971Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:44.152977Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:44.152986Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T08:53:44.153719Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:44.153755Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T08:53:44.153762Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T08:53:44.153894Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:44.153968Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:44.154008Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:44.154014Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:44.154094Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2024-11-21T08:53:44.154100Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2024-11-21T08:53:44.155733Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.155758Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:808:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.155767Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.156638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:44.157540Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.157559Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:44.157568Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T08:53:44.351349Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.351389Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:44.351404Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T08:53:44.351942Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:812:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:44.423974Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6ytsjv13cq6zer52f12b3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDg0YzkyZDUtZTdkYzk5LWM4MzVhZWUwLWI1ZTczYWJi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.425146Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1006:2770], serverId# [4:1007:2771], sessionId# [0:0:0] 2024-11-21T08:53:44.425308Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:53:44.425906Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ytsjv13cq6zer52f12b3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDg0YzkyZDUtZTdkYzk5LWM4MzVhZWUwLWI1ZTczYWJi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.426573Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ytsjv13cq6zer52f12b3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDg0YzkyZDUtZTdkYzk5LWM4MzVhZWUwLWI1ZTczYWJi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.426713Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:44.427079Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732179224427036 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.427120Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732179224427036 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.437545Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:44.437598Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2024-11-21T08:53:44.437635Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T08:53:44.437646Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.438049Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2024-11-21T08:53:44.438063Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.449661Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ytsvq1j5qtq1fkn2f8zwh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OGQ5MTlmM2EtMzU0NDg1MDktYjJjMzYwOTEtNDY4ODRkYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.449814Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:44.450139Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1732179224450105 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.450176Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1732179224450105 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.450187Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1732179224450105 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.450198Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1732179224450105 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.460593Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:44.460676Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T08:53:44.460688Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.461787Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1059:2813], serverId# [4:1060:2814], sessionId# [0:0:0] 2024-11-21T08:53:44.462690Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1061:2815], serverId# [4:1062:2816], sessionId# [0:0:0] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> ReadOnlyVDisk::TestSync ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2024-11-21T08:53:40.259877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:40.260332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:40.260352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003103/r3tmp/tmpc2K1bu/pdisk_1.dat 2024-11-21T08:53:40.366845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.385483Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:40.427876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:40.427906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:40.438505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:40.542616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.558911Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T08:53:40.558991Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:40.565312Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:40.565383Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:40.565574Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:40.565590Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:40.565595Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:40.565630Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:40.568762Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:40.568824Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:40.568846Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T08:53:40.568852Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:40.568858Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:40.568863Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.569308Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T08:53:40.569385Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:40.570897Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:40.570936Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:40.571073Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T08:53:40.571135Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.571145Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.571154Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:40.571161Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.571205Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:40.571270Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:40.571288Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:40.571481Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:40.571497Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:40.571602Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T08:53:40.571609Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T08:53:40.571613Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T08:53:40.571640Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:40.571646Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T08:53:40.571658Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:40.571666Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T08:53:40.571669Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:53:40.571674Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T08:53:40.571677Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.571782Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T08:53:40.571791Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T08:53:40.571808Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.571813Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.571819Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T08:53:40.571824Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:40.571877Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T08:53:40.571894Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:40.571926Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:40.571936Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T08:53:40.572064Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:40.572076Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:40.582377Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:40.582442Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:40.582629Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:40.582641Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:40.757563Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T08:53:40.757630Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T08:53:40.758407Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T08:53:40.758423Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.758482Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.758491Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:40.758502Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T08:53:40.758585Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:40.758618Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:40.758743Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:40.758748Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.758765Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:40.758780Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:40.759173Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:40.759275Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.759462Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.759468Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:40.759474Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:40.759510Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:40.759529Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:40.759588Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T08:53:40.759593Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:40.759628Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.759635Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... n request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:44.350514Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:44.350743Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:44.350750Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.350762Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T08:53:44.350765Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.350793Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:44.350797Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:44.350801Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:44.350827Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:44.350839Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:44.350994Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:44.351002Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2024-11-21T08:53:44.351046Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:44.351085Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:44.351265Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2024-11-21T08:53:44.351274Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2024-11-21T08:53:44.351281Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:44.351522Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:44.351533Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:53:44.351537Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2024-11-21T08:53:44.351548Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:44.351555Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:44.351562Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.351635Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:44.351640Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:44.351645Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:44.351731Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:44.351735Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.351818Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:44.351822Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:44.351824Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:44.351831Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:44.351835Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:44.351839Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:44.351991Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.352002Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:44.352468Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T08:53:44.352479Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T08:53:44.352508Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:44.352527Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:44.352594Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:44.352598Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:44.353705Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:748:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.353727Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:758:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.353735Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:44.354423Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:44.355069Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.355084Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:44.550701Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:44.550760Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:44.551247Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:762:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:44.595887Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6ytss13pxf3d9fg2ht8qc0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=N2M5ZDQzMWMtNWIzZDY0Ny04N2MyZjI0Yi1kNjJiZTZkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.596066Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:904:2708], serverId# [4:905:2709], sessionId# [0:0:0] 2024-11-21T08:53:44.596129Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:44.596479Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732179224596445 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.606945Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:44.607023Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T08:53:44.607034Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.619595Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ytt10ccx232ymv9447r2y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MTlmODA5MTItMmVmYzc5NzYtNWI4MWEwOGQtNTBkZTg3MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:44.619762Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:44.620088Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732179224620056 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.620124Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1732179224620056 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:44.633069Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:44.633149Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T08:53:44.633162Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:44.634322Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:939:2739], serverId# [4:940:2740], sessionId# [0:0:0] 2024-11-21T08:53:44.635369Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:941:2741], serverId# [4:942:2742], sessionId# [0:0:0] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> ReadOnlyVDisk::TestDiscover >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:44.687402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:44.687428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.687433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:44.687438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:44.687445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:44.687448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:44.687457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.687537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:44.698457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:44.698481Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:44.701441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:44.702290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:44.702339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:44.703718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:44.703869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:44.703963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.704050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:44.704837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.705122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.705133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.705176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:44.705183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.705189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:44.705203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.706405Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:44.719618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:44.719718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.719784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:44.719828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:44.719836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.720837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.720868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:44.720935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.720947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:44.720951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:44.720956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:44.721391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.721404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:44.721408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:44.721776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.721785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.721791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.721798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.722317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:44.722742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:44.722799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:44.722977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:44.722994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.723007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.723057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:44.723062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:44.723107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.723119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:44.723503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:44.723510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:44.723547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:44.723550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:44.723634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:44.723639Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:44.723648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:44.723651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.723655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:44.723658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:44.723661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:44.723664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:44.723672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:44.723676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:44.723678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:44.723943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.723954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:44.723957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:44.723960Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:44.723962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:44.723973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 5 TestWaitNotification wait txId: 126 2024-11-21T08:53:44.985234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2024-11-21T08:53:44.985237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 2024-11-21T08:53:44.985334Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2024-11-21T08:53:44.985358Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2024-11-21T08:53:44.985367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2024-11-21T08:53:44.985376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:336:2328] 2024-11-21T08:53:44.985398Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2024-11-21T08:53:44.985404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2024-11-21T08:53:44.985408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:336:2328] 2024-11-21T08:53:44.985449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2024-11-21T08:53:44.985453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:336:2328] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2024-11-21T08:53:44.985529Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:44.985577Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 63us result status StatusSuccess 2024-11-21T08:53:44.985700Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.985817Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:44.985841Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 27us result status StatusSuccess 2024-11-21T08:53:44.985876Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.985950Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:44.985966Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 17us result status StatusSuccess 2024-11-21T08:53:44.986015Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.986076Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:44.986090Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 16us result status StatusSuccess 2024-11-21T08:53:44.986124Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:44.986169Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:44.986183Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 16us result status StatusSuccess 2024-11-21T08:53:44.986212Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:45.242784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:45.242812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:45.242818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:45.242823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:45.242829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:45.242834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:45.242843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:45.242948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:45.254984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:45.255012Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:45.258538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:45.259406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:45.259463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:45.261471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:45.261714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:45.261834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:45.261947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:45.263221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:45.263541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:45.263555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:45.263608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:45.263617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:45.263625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:45.263645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.265341Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:45.283955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:45.284053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.284116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:45.284168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:45.284175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.284999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:45.285026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:45.285073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.285082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:45.285087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:45.285091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:45.285530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.285541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:45.285546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:45.285914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.285924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.285929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:45.285935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:45.286517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:45.287120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:45.287192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:45.287366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:45.287404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:45.287419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:45.287476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:45.287482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:45.287514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:45.287530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:45.288038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:45.288044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:45.288079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:45.288083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:45.288157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.288163Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:45.288173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:45.288176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:45.288180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:45.288184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:45.288187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:45.288190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:45.288200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:45.288232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:45.288237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:45.288512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:45.288524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:45.288527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:45.288531Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:45.288534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:45.288545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T08:53:45.289068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T08:53:45.289149Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T08:53:45.289372Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T08:53:45.290308Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T08:53:45.290883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:45.290932Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2024-11-21T08:53:45.290945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2024-11-21T08:53:45.290951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2024-11-21T08:53:45.291102Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:53:45.291930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:45.291968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2024-11-21T08:53:45.292124Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:53:45.292177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:53:45.292185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:53:45.292270Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:53:45.292289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:53:45.292294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:281:2273] TestWaitNotification: OK eventTxId 101 2024-11-21T08:53:45.292359Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:45.292388Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 39us result status StatusPathDoesNotExist 2024-11-21T08:53:45.292442Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TPersQueueTest::SetupWriteSession [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:44.987497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:44.987527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.987533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:44.987538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:44.987545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:44.987549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:44.987558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:44.987651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:44.999309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:44.999335Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:45.002898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:45.003785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:45.003840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:45.005756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:45.006008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:45.006129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:45.006250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:45.007455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:45.007816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:45.007830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:45.007883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:45.007892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:45.007900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:45.007922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.009510Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:45.027931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:45.028033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.028105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:45.028156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:45.028165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.029151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:45.029184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:45.029253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.029266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:45.029271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:45.029276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:45.029731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.029744Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:45.029749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:45.030128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.030140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.030146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:45.030154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:45.030790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:45.031171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:45.031234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:45.031452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:45.031479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:45.031494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:45.031552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:45.031560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:45.031595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:45.031610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:45.031990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:45.032000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:45.032050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:45.032057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:45.032165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:45.032173Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:45.032187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:45.032192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:45.032198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:45.032224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:45.032230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:45.032235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:45.032248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:45.032254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:45.032258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:45.032743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:45.032765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:45.032772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:45.032777Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:45.032783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:45.032797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 89Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:53:45.283446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:53:45.283452Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:53:45.283456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:53:45.283465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:53:45.283469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:53:45.283474Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2024-11-21T08:53:45.283478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:53:45.283481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T08:53:45.283484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T08:53:45.283621Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:45.283629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:45.283633Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:53:45.283637Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:53:45.283641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:53:45.283740Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:45.283747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:45.283750Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:53:45.283754Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:53:45.283757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:45.283957Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:45.283967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:53:45.283971Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:53:45.283975Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:53:45.283979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:53:45.283987Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:53:45.284276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:53:45.284508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:53:45.284805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:53:45.284874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:53:45.284880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:53:45.284952Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:53:45.284971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:53:45.284976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:331:2323] TestWaitNotification: OK eventTxId 101 2024-11-21T08:53:45.285042Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:45.285071Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 41us result status StatusSuccess 2024-11-21T08:53:45.285155Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T08:53:45.285890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:45.285923Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2024-11-21T08:53:45.285936Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2024-11-21T08:53:45.286426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:45.286452Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:53:45.286504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:53:45.286509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:53:45.286566Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:53:45.286580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:53:45.286584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:339:2331] TestWaitNotification: OK eventTxId 103 2024-11-21T08:53:45.286655Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:45.286680Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 32us result status StatusSuccess 2024-11-21T08:53:45.286745Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2024-11-21T08:53:40.852189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:40.852721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:40.852743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030f4/r3tmp/tmpDmaVYQ/pdisk_1.dat 2024-11-21T08:53:40.958130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.974574Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:41.017315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:41.017357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:41.027832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:41.136517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:41.153399Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T08:53:41.153497Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:41.161724Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:41.161789Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:41.161941Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:41.161955Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:41.161962Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:41.162008Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:41.165562Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:41.165642Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:41.165672Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T08:53:41.165678Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:41.165682Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:41.165687Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.166159Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T08:53:41.166201Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:41.167500Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:41.167534Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:41.167650Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T08:53:41.167704Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.167710Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.167719Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:41.167724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.167765Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:41.167816Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:41.167832Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:41.168031Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:41.168052Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:41.168164Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T08:53:41.168171Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T08:53:41.168177Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T08:53:41.168230Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:41.168238Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T08:53:41.168253Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:41.168263Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T08:53:41.168268Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:53:41.168272Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T08:53:41.168279Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:41.168410Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T08:53:41.168421Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T08:53:41.168438Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:41.168442Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.168447Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T08:53:41.168451Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:41.168509Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T08:53:41.168531Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:41.168569Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:41.168581Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T08:53:41.168703Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:41.168714Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:41.179070Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:41.179116Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:41.179300Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:41.179311Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:41.353846Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T08:53:41.353894Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T08:53:41.354434Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T08:53:41.354442Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:41.354478Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:41.354484Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:41.354493Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T08:53:41.354545Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:41.354573Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:41.354659Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:41.354662Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.354674Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:41.354683Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:41.354941Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:41.355008Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.355130Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.355133Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:41.355137Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:41.355160Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:41.355172Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:41.355211Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T08:53:41.355214Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:41.355235Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.355240Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... 45.150720Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2024-11-21T08:53:45.150780Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:45.150827Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:45.151023Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T08:53:45.151031Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037890 tableId# [OwnerId: 72057594046644480, LocalPathId: 6] schema version# 1 2024-11-21T08:53:45.151069Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:45.151111Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:45.151628Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2024-11-21T08:53:45.151638Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2024-11-21T08:53:45.151646Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:45.151917Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:45.151926Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:53:45.151931Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2024-11-21T08:53:45.151951Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:45.151961Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:45.151970Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:45.152005Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:45.152009Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:45.152014Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:45.152163Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:45.152171Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.152179Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 1000 txid# 281474976715657} 2024-11-21T08:53:45.152183Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2024-11-21T08:53:45.152188Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:53:45.152403Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2024-11-21T08:53:45.152409Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T08:53:45.152697Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:45.152709Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:45.152717Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T08:53:45.152732Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:45.152736Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:45.152740Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:45.152752Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:45.152756Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:45.152762Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.152917Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T08:53:45.152922Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T08:53:45.152924Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2024-11-21T08:53:45.152930Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:45.152934Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:45.152940Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T08:53:45.153440Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:45.153471Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T08:53:45.153479Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T08:53:45.153608Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:45.153675Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:45.153705Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:45.153708Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:45.153813Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2024-11-21T08:53:45.153817Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2024-11-21T08:53:45.155212Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:45.155233Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:808:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:45.155239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:45.155934Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:45.156670Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:45.156686Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:45.156694Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T08:53:45.345171Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:45.345221Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:45.345240Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T08:53:45.345875Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:812:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:45.394218Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yttj38egw5xddq12x9m89, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZGJiMTNlNGItOWQ3ZDhiNzMtMWJiMTg2NjAtYzM1NzI4YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:45.394448Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:994:2762], serverId# [4:995:2763], sessionId# [0:0:0] 2024-11-21T08:53:45.394542Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:45.394998Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732179225394948 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:45.395046Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732179225394948 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T08:53:45.405635Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:45.405728Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T08:53:45.405741Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:45.407090Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1001:2768], serverId# [4:1002:2769], sessionId# [0:0:0] 2024-11-21T08:53:45.408256Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1003:2770], serverId# [4:1004:2771], sessionId# [0:0:0] >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> ReadOnlyVDisk::TestReads >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> ReadOnlyVDisk::TestDiscover [GOOD] >> ReadOnlyVDisk::TestGarbageCollect >> ReadOnlyVDisk::TestStorageLoad |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2024-11-21T08:53:40.835701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:40.836319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:40.836356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030f9/r3tmp/tmpUvkWAW/pdisk_1.dat 2024-11-21T08:53:40.949872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.965785Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:41.008043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:41.008075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:41.018650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:41.123177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:41.138857Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T08:53:41.138927Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:41.146891Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:41.146947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:41.147117Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:41.147131Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:41.147138Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:41.147182Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:41.150496Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:41.150550Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:41.150572Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T08:53:41.150577Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:41.150582Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:41.150587Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.150902Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T08:53:41.150931Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:41.151948Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:41.151973Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:41.152075Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T08:53:41.152115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.152121Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.152131Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:41.152137Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.152179Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:41.152253Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:41.152271Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:41.152484Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:41.152503Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:41.152631Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T08:53:41.152638Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T08:53:41.152645Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T08:53:41.152680Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:41.152686Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T08:53:41.152697Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:41.152708Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T08:53:41.152712Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T08:53:41.152715Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T08:53:41.152719Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:41.152829Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T08:53:41.152836Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T08:53:41.152850Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:41.152854Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.152858Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T08:53:41.152862Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:53:41.152902Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T08:53:41.152917Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T08:53:41.152941Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:41.152949Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T08:53:41.153047Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:41.153055Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T08:53:41.163323Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:41.163380Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:41.163533Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T08:53:41.163540Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:41.337866Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T08:53:41.337939Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T08:53:41.338751Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T08:53:41.338777Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:41.338857Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:41.338867Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:41.338878Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T08:53:41.338966Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:41.339007Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:41.339151Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:41.339157Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.339174Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:53:41.339190Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:41.339622Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:41.339739Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.339937Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.339944Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:41.339951Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:41.339987Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:41.340007Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:41.340071Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T08:53:41.340077Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:53:41.340114Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.340122Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... 98 RawX2: 17179871629 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:45.029227Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.029327Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:45.029337Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:45.029348Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:45.029466Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:45.029506Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:45.029698Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:45.029717Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:45.029845Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:45.029949Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:45.030371Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:45.030382Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.030512Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:45.030519Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:45.030529Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:45.030742Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:45.030752Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:45.030759Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:45.030778Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:45.030789Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:45.030801Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.031016Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:45.031374Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:45.031400Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:45.031407Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:45.033272Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:45.033318Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:45.033334Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T08:53:45.033339Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T08:53:45.058885Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:45.346989Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:45.347015Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.347050Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:45.347057Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:45.347064Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:45.347109Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T08:53:45.347135Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:45.347165Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:45.347294Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:45.358490Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T08:53:45.358523Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:53:45.358541Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:45.358548Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:45.358560Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.358600Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:45.358616Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T08:53:45.358631Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.359249Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T08:53:45.359264Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:45.360683Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:856:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:45.360704Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:866:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:45.360714Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:45.361408Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:53:45.362324Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:45.572697Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:45.573330Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:870:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:45.635802Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yttrgdywhmr46fydt7r8a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NWJhYTRjNmQtNjcxN2M1NzktN2Q2ODdmMzktMzc2YjdkMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:45.636024Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:954:2749], serverId# [4:955:2750], sessionId# [0:0:0] 2024-11-21T08:53:45.636109Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:45.645798Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732179225645746 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:45.656590Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:45.656662Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:45.656672Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.682574Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ytv1sacd4mqpkkpebddpm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzI0MTcwMS0xODk5YWUzLWU4ZWYyMmQxLTczYTJjYmM5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:45.682775Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:45.683112Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732179225683078 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:45.696636Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:45.696710Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:45.696721Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.697411Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:978:2769], serverId# [4:979:2770], sessionId# [0:0:0] 2024-11-21T08:53:45.698242Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:980:2771], serverId# [4:981:2772], sessionId# [0:0:0] >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 13486846175099617073 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T08:53:45.639190Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5286:695] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T08:53:45.680091Z 1 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5286:695] 2024-11-21T08:53:45.680523Z 2 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5293:702] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2024-11-21T08:53:45.716827Z 3 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5300:709] 2024-11-21T08:53:45.717138Z 1 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5286:695] 2024-11-21T08:53:45.717260Z 2 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5293:702] 2024-11-21T08:53:45.717296Z 1 00h02m30.160512s :BS_PROXY_PUT ERROR: [32536f831b7742ac] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] Test command err: 2024-11-21T08:46:45.503484Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:46:45.513319Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 15616004283304079329 MagicNextLogChunkReference: 15228063338745007941 MagicLogChunk: 11780369395351329522 MagicDataChunk: 10012051947308659440 MagicSysLogChunk: 7875211918477792512 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732178805428659 (2024-11-21T08:46:45.428659Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:46:45.524633Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T08:46:45.533376Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T08:46:45.533628Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:46:45.533808Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.542314Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T08:46:45.544313Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T08:46:45.551891Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T08:46:45.568333Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 4 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 3 PDiskId# 1 2024-11-21T08:46:45.576455Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 5 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 4 PDiskId# 1 2024-11-21T08:46:45.584299Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 6 vDiskId# [3:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 5 PDiskId# 1 2024-11-21T08:46:45.592347Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 7 vDiskId# [4:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 6 PDiskId# 1 2024-11-21T08:46:45.600530Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 8 vDiskId# [5:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 7 PDiskId# 1 2024-11-21T08:46:45.608522Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 9 vDiskId# [6:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 8 PDiskId# 1 2024-11-21T08:46:45.613997Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 10 vDiskId# [7:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 9 PDiskId# 1 2024-11-21T08:46:45.620510Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 11 vDiskId# [8:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 10 PDiskId# 1 2024-11-21T08:46:45.628509Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 12 vDiskId# [9:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 11 PDiskId# 1 2024-11-21T08:46:45.633960Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 13 vDiskId# [a:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 12 PDiskId# 1 2024-11-21T08:46:45.640509Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 14 vDiskId# [b:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 13 PDiskId# 1 2024-11-21T08:46:45.648505Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 15 vDiskId# [c:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 14 PDiskId# 1 2024-11-21T08:46:45.652601Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 16 vDiskId# [d:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 15 PDiskId# 1 2024-11-21T08:46:45.660538Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 17 vDiskId# [e:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 16 PDiskId# 1 2024-11-21T08:46:45.668506Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 18 vDiskId# [f:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 17 PDiskId# 1 2024-11-21T08:46:45.672652Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 19 vDiskId# [10:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 18 PDiskId# 1 2024-11-21T08:46:45.680506Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 20 vDiskId# [11:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 19 PDiskId# 1 2024-11-21T08:46:45.688497Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 21 vDiskId# [12:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 20 PDiskId# 1 2024-11-21T08:46:45.692744Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 22 vDiskId# [13:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 21 PDiskId# 1 2024-11-21T08:46:45.703257Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 23 vDiskId# [14:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 22 PDiskId# 1 2024-11-21T08:46:45.712479Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 24 vDiskId# [15:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 23 PDiskId# 1 2024-11-21T08:46:45.728475Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 25 vDiskId# [16:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 24 PDiskId# 1 2024-11-21T08:46:45.738664Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 26 vDiskId# [17:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 25 PDiskId# 1 2024-11-21T08:46:45.748473Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 27 vDiskId# [18:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 26 PDiskId# 1 2024-11-21T08:46:45.754003Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 28 vDiskId# [19:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 27 PDiskId# 1 2024-11-21T08:46:45.764194Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 29 vDiskId# [1a:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 28 PDiskId# 1 2024-11-21T08:46:45.773525Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 30 vDiskId# [1b:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 29 PDiskId# 1 2024-11-21T08:46:45.783749Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 31 vDiskId# [1c:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 30 PDiskId# 1 2024-11-21T08:46:45.792462Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 32 vDiskId# [1d:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 31 PDiskId# 1 2024-11-21T08:46:45.800474Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 33 vDiskId# [1e:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 32 PDiskId# 1 2024-11-21T08:46:45.812497Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 34 vDiskId# [1f:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 33 PDiskId# 1 2024-11-21T08:46:45.820494Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 35 vDiskId# [20:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 34 PDiskId# 1 2024-11-21T08:46:45.828530Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 36 vDiskId# [21:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 35 PDiskId# 1 2024-11-21T08:46:45.835046Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 37 vDiskId# [22:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 36 PDiskId# 1 2024-11-21T08:46:45.850189Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 38 vDiskId# [23:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 37 PDiskId# 1 2024-11-21T08:46:45.850734Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 39 vDiskId# [24:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 38 PDiskId# 1 2024-11-21T08:46:45.851159Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 40 vDiskId# [25:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 39 PDiskId# 1 2024-11-21T08:46:45.851461Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 41 vDiskId# [26:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 40 PDiskId# 1 2024-11-21T08:46:45.851713Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 42 vDiskId# [27:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 41 PDiskId# 1 2024-11-21T08:46:45.851972Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 43 vDiskId# [28:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 42 PDiskId# 1 2024-11-21T08:46:45.852223Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 44 vDiskId# [29:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 43 PDiskId# 1 2024-11-21T08:46:45.852490Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 45 vDiskId# [2a:_:0:0:0] FirstNonceToKeep# 1836099 CutLogId# [0:0:0] ownerRound# 44 PDiskId# 1 2024-11-21T08:46:45.852709Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 46 vDiskId# [2b:_:0:0:0] FirstNonceToKee ... ReadPart failed due to unknown reason 2024-11-21T08:53:43.023794Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560112430 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023812Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560113198 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023825Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560113966 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023834Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560114734 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023840Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560115502 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023847Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560116270 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023859Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560117038 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023866Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560117806 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023877Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560118574 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023888Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560119342 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023895Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560120110 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023906Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560120878 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023919Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560121646 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023931Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560122414 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023945Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560123182 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023956Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560123950 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023962Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560124718 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023973Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560125486 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023979Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560126254 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.023991Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560127022 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024002Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560127790 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024008Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560128558 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024014Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560129326 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024020Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560130094 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024028Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560130862 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024039Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560131630 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024046Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560132398 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024057Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560133166 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024063Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560133934 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024077Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560134702 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024088Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560135470 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024094Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560136238 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024100Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560137006 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024107Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560137774 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024118Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560138542 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024130Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560139310 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024139Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560140078 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024146Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560140846 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024156Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560141614 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024162Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560142382 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024168Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560143150 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024173Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560143918 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024179Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560144686 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024185Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560145454 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024196Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560146222 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024203Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560146990 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024249Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560147758 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024255Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560148526 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024268Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560149294 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024279Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560150062 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024286Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560150830 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024296Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560151598 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024303Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560152366 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024313Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560153134 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024319Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560153902 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024330Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560154670 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024344Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560155438 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024369Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560156206 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024386Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560156974 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024393Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560157742 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024398Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560158510 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024404Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560159278 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.024409Z node 1421 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560160046 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T08:53:43.026407Z node 1421 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T08:53:43.026550Z node 1421 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13550594332785493594 MagicNextLogChunkReference: 15839852839115193798 MagicLogChunk: 12862089863442164261 MagicDataChunk: 5901028568638422526 MagicSysLogChunk: 13086236370271306483 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732179222981497 (2024-11-21T08:53:42.981497Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T08:53:43.027804Z node 1421 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1733160 NonceLog# 1115523 NonceData# 1560644} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [10248:4294967295:0:0:0]} PDiskId# 1 2024-11-21T08:53:43.028501Z node 1421 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T08:53:43.028603Z node 1421 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 47 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 192512} PDiskId# 1 2024-11-21T08:53:43.028625Z node 1421 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 192512} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T08:53:43.032138Z node 1421 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-204 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:53:43.032321Z node 1421 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-204 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T08:53:43.032392Z node 1421 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 all chunk reads are received all chunk writes are received all log writes are received |88.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |88.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |88.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] |88.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 12095137626041734482 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2024-11-21T08:53:46.243813Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2024-11-21T08:53:46.245752Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T08:53:46.247672Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T08:53:46.248099Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2024-11-21T08:53:46.249894Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2024-11-21T08:53:46.250427Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2024-11-21T08:53:46.250862Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2024-11-21T08:53:46.251181Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2024-11-21T08:53:46.496305Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T08:53:46.496338Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T08:53:46.496370Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T08:53:46.496524Z 1 00h05m30.211024s :BS_PROXY_PUT ERROR: [455bc86ff3f7140b] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2024-11-21T08:53:46.496868Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T08:53:46.497144Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T08:53:46.497410Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2024-11-21T08:53:46.497733Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T08:53:46.497858Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T08:53:46.498001Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2024-11-21T08:53:46.498178Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T08:53:46.498434Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T08:53:46.498527Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2024-11-21T08:53:46.498686Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T08:53:46.498697Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T08:53:46.498831Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2024-11-21T08:53:46.499063Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T08:53:46.499075Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T08:53:46.499334Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2024-11-21T08:53:46.499605Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T08:53:46.499634Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T08:53:46.499642Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2024-11-21T08:53:46.500014Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T08:53:46.500049Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T08:53:46.500064Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2024-11-21T08:53:46.500544Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T08:53:46.500581Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T08:53:46.500596Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2024-11-21T08:53:46.500981Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T08:53:46.500997Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T08:53:46.501017Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2024-11-21T08:53:46.502158Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:695] 2024-11-21T08:53:46.502190Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:702] 2024-11-21T08:53:46.502199Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:709] 2024-11-21T08:53:46.502285Z 1 00h05m30.211024s :BS_PROXY_GET ERROR: [930a122cf4940c0f] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED"} Marker# BPG29 2024-11-21T08:53:46.502306Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:702] 2024-11-21T08:53:46.502316Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:709] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED"} |88.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> CdcStreamChangeCollector::NewImage [GOOD] >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> ReadOnlyVDisk::TestReads [GOOD] |88.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> ReadOnlyVDisk::TestSync [GOOD] >> CdcStreamChangeCollector::OldImage [GOOD] >> ObjectStorageListingTest::ListingNoFilter |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |88.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] Test command err: 2024-11-21T08:53:34.934768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653077094263815:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:34.935073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c80/r3tmp/tmpKKajcA/pdisk_1.dat 2024-11-21T08:53:34.989873Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10886, node 1 2024-11-21T08:53:35.002541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:35.002556Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:35.002558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:35.002598Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:35.027139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.027930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:35.027944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.028699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:35.028761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:35.028768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:53:35.029142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:35.029152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:53:35.029207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:53:35.029484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.030215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179215074, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:35.030237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:53:35.030289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:53:35.030754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:35.030797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:35.030812Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:53:35.030822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:53:35.030835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:53:35.030853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:53:35.031281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:53:35.031297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:53:35.031301Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:35.031313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:53:35.035044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:35.035069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:35.036547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:35.959212Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653082642562332:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:35.959334Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c80/r3tmp/tmp698v03/pdisk_1.dat 2024-11-21T08:53:35.976649Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11958, node 4 2024-11-21T08:53:35.990847Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:35.990865Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:35.990867Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:35.990898Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:36.059654Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:36.059686Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:36.061335Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:36.064751Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.064861Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:36.064872Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.065239Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:36.065290Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:36.065299Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:36.065544Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:36.065553Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:36.065809Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:36.065855Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.066645Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179216110, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:36.066658Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:36.066721Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:36.067099Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:36.067139Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:36.067153Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:36.067167Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:36.067179Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:36.067194Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:36.067351Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7205759404 ... lt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.331773Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715739, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.332666Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715740:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.332691Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715740:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.333175Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715740, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.337057Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715742:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.337094Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715742:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.337248Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715743:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.337265Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715743:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.337314Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715741:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.337349Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715741:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.337403Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715744:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.337432Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715744:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.337482Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715745:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.337497Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715745:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.337855Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715742, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.337901Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715743, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.337927Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715741, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.337951Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715744, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.337974Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715745, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.341966Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715746:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.341996Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715746:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.342779Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715746, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:36.347171Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715747:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:36.347233Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715747:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:36.349366Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715747, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:40.959815Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439653082642562332:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:40.959851Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 2812934196784635713 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 9875217152221018208 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2024-11-21T08:53:45.464288Z 1 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:8781:934] 2024-11-21T08:53:45.464411Z 2 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8788:941] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2024-11-21T08:53:45.834525Z 3 00h06m00.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:8795:948] 2024-11-21T08:53:45.834575Z 2 00h06m00.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8788:941] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T08:53:46.435792Z 5 00h14m00.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8809:962] 2024-11-21T08:53:46.435816Z 4 00h14m00.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:8802:955] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T08:53:46.751014Z 6 00h18m01.050512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8816:969] 2024-11-21T08:53:46.751042Z 5 00h18m01.050512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8809:962] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2024-11-21T08:53:47.056426Z 7 00h22m01.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8823:976] 2024-11-21T08:53:47.056459Z 6 00h22m01.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8816:969] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2024-11-21T08:53:47.346168Z 7 00h26m01.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8823:976] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 143890651272573231 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2024-11-21T08:53:46.382822Z 1 00h01m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.382916Z 2 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:6] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T08:53:46.382947Z 8 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T08:53:46.382969Z 7 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T08:53:46.383012Z 6 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T08:53:46.383130Z 5 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T08:53:46.383321Z 3 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 2} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2024-11-21T08:53:46.385725Z 1 00h01m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T08:53:46.517822Z 1 00h03m20.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.517938Z 8 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:5] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T08:53:46.517967Z 7 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:4] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T08:53:46.517993Z 6 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:3] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T08:53:46.518026Z 5 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:2] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T08:53:46.518049Z 4 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:1] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T08:53:46.518101Z 2 00h03m20.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:46.518319Z 3 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:6] barrier# {Soft# {Gen# 1 Step# 3} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2024-11-21T08:53:46.580318Z 1 00h04m20.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.580364Z 2 00h04m20.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T08:53:46.620999Z 1 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.621121Z 8 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:46.621209Z 7 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:46.621255Z 6 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:46.621307Z 5 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:46.621353Z 4 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:46.621450Z 2 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:46.621723Z 3 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:46.621764Z 1 00h05m00.310512s :BS_PROXY_PUT ERROR: [3fb5cb883d4c316b] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} 2024-11-21T08:53:46.678106Z 1 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.678157Z 2 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:46.678168Z 3 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2024-11-21T08:53:46.774661Z 1 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.774711Z 2 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:46.774720Z 3 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:46.774728Z 4 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2024-11-21T08:53:46.809205Z 1 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.809252Z 2 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:46.809261Z 3 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:46.809270Z 4 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T08:53:46.809277Z 5 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2024-11-21T08:53:46.839579Z 1 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.839630Z 2 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:46.839641Z 3 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:46.839649Z 4 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T08:53:46.839656Z 5 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T08:53:46.839663Z 6 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2024-11-21T08:53:46.863675Z 1 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T08:53:46.863722Z 2 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:46.863732Z 3 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:46.863741Z 4 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T08:53:46.863748Z 5 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T08:53:46.863757Z 6 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T08:53:46.863765Z 7 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2024-11-21T08:53:46.891297Z 2 00h10m20.579540s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T08:53:46.891321Z 3 00h10m20.579540s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:46.891330Z 4 00h10m20.579540s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T08:53:46.891338Z 5 00h10m20.579540s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T08:53:46.891346Z 6 00h10m20.579540s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T08:53:46.891354Z 7 00h10m20.579540s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2024-11-21T08:53:46.924334Z 3 00h11m00.611536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T08:53:46.924358Z 4 00h11m00.611536s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T08:53:46.924367Z 5 00h11m00.611536s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T08:53:46.924375Z 6 00h11m00.611536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T08:53:46.924383Z 7 00h11m00.611536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2024-11-21T08:53:46.961511Z 4 00h11m40.661536s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T08:53:46.961530Z 5 00h11m40.661536s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T08:53:46.961536Z 6 00h11m40.661536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T08:53:46.961542Z 7 00h11m40.661536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2024-11-21T08:53:47.003136Z 5 00h12m20.711536s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T08:53:47.003160Z 6 00h12m20.711536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T08:53:47.003168Z 7 00h12m20.711536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2024-11-21T08:53:47.112510Z 6 00h14m00.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T08:53:47.112534Z 7 00h14m00.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2024-11-21T08:53:47.166955Z 7 00h14m40.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T08:53:47.284905Z 1 00h16m30.911024s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:6] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:47.284963Z 8 00h16m30.911024s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:47.284977Z 7 00h16m30.911024s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:47.285019Z 6 00h16m30.911024s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:47.285051Z 5 00h16m30.911024s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T08:53:47.285099Z 4 00h16m30.911024s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 4} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2024-11-21T08:53:40.903583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:40.904119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:40.904140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030ec/r3tmp/tmpiSUJ2z/pdisk_1.dat 2024-11-21T08:53:41.004148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:41.022071Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:41.022863Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2024-11-21T08:53:41.064841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:41.064884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:41.075413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:41.181179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:41.195981Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:41.196065Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:41.203379Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:41.203417Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:41.203565Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:41.203578Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:41.203583Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:41.203626Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:41.206492Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:41.206565Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:41.206589Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:41.206595Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:41.206599Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:41.206606Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.206891Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:41.206908Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:41.206919Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:41.206925Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.206930Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.206938Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:41.206944Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.206983Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:41.207046Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:41.207066Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:41.207381Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:41.217705Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:41.217765Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:41.391874Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:53:41.392677Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:41.392696Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.392820Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.392829Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:41.392839Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:41.392914Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:41.392949Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:41.393090Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.393103Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:41.393465Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:41.393572Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.393836Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:41.393844Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.393937Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:41.393944Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:41.393954Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.394116Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.394123Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:41.394127Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:41.394142Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:41.394150Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:41.394159Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.394658Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:41.394972Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:41.395022Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:41.395029Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:41.396431Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:41.396473Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:41.396486Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T08:53:41.396491Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T08:53:41.417449Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:41.694508Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:41.694535Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.694567Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.694573Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:41.694582Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:41.694630Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T08:53:41.694655Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:41.694673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.694813Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.705703Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T08:53:41.705745Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:53:41.705761Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.705767Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.705776Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.705797Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], ex ... 2024-11-21T08:53:45.930768Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:45.930873Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:45.931280Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:45.931290Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.931414Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:45.931421Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:45.931430Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:45.931637Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:45.931647Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:45.931653Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:45.931672Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:45.931684Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:45.931695Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:45.931968Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:45.932385Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:45.932425Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:45.932434Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:45.933909Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:45.933942Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:45.933953Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T08:53:45.933981Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T08:53:45.954895Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:46.235944Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:46.235977Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:46.236015Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:46.236025Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:46.236036Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:46.236102Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T08:53:46.236138Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:46.236177Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:46.236391Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:46.247770Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T08:53:46.247823Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:53:46.247844Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:46.247852Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:46.247865Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:46.247894Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:46.247924Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T08:53:46.247943Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:46.248606Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T08:53:46.248623Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:46.250274Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:856:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:46.250298Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:866:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:46.250309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:46.251333Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:53:46.252486Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:46.461980Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:46.462672Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:870:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:46.545291Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ytvm9eg9d2mh52nn8xgxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YmE0MGU4NDYtZDA0NDA5ZGItN2ZiN2RiNjYtZGE3NGZhOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:46.546331Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:966:2757], serverId# [4:967:2758], sessionId# [0:0:0] 2024-11-21T08:53:46.546494Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:53:46.547150Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ytvm9eg9d2mh52nn8xgxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YmE0MGU4NDYtZDA0NDA5ZGItN2ZiN2RiNjYtZGE3NGZhOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:46.547710Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ytvm9eg9d2mh52nn8xgxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YmE0MGU4NDYtZDA0NDA5ZGItN2ZiN2RiNjYtZGE3NGZhOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:46.547822Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:46.548070Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732179226548038 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:46.560724Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:46.560781Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2024-11-21T08:53:46.560815Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:46.560824Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:46.561197Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2024-11-21T08:53:46.561211Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:46.576161Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ytvy25ye61e0vqgdcjm0x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzZmOTM1MC0zZjEwYjhlZC1jOTk3NjZlMi04ZjY1NjIwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:46.576556Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:46.577070Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732179226577011 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:46.587704Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:46.587771Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:46.587779Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:46.588628Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1001:2782], serverId# [4:1002:2783], sessionId# [0:0:0] 2024-11-21T08:53:46.589858Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1003:2784], serverId# [4:1004:2785], sessionId# [0:0:0] >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage |88.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> TPQTest::TestDirectReadHappyWay |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |88.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] >> TPQTest::TestDirectReadHappyWay [GOOD] |88.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TPartitionTests::SetOffset >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> SubDomainWithReboots::DropSplittedTabletInsideWithStoragePools [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TopicService::OnePartitionAndNoGapsInTheOffsets [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |88.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TPQTest::TestDescribeBalancer >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TPartitionTests::OldPlanStep [GOOD] >> TPQTabletTests::UpdateConfig_1 >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPQTest::TestSeveralOwners >> SystemView::CollectPreparedQueries >> TPartitionTests::SetOffset [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TPQTest::TestDescribeBalancer [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TPartitionTests::IncorrectRange >> TPartitionTests::IncorrectRange [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPartitionTests::NonConflictingCommitsBatch >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> TPQTest::TestCheckACL >> TPartitionTests::ShadowPartitionCounters >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] |88.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TMeteringSink::UsedStorageV1 [GOOD] >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestCheckACL [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test >> TPQTest::TestAlreadyWrittenWithoutDeduplication ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] Test command err: 2024-11-21T08:53:51.134565Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:53:51.135435Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:53:51.135491Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T08:53:51.135501Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:53:51.135504Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T08:53:51.135507Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:53:51.135513Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:51.135518Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T08:53:51.135522Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:51.137894Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:51.137906Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T08:53:51.137917Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:53:51.139308Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:51.139988Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T08:53:51.140009Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:51.140394Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.140428Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.140440Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitConfigStep 2024-11-21T08:53:51.140515Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T08:53:51.140586Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T08:53:51.140767Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T08:53:51.140774Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2024-11-21T08:53:51.140783Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:53:51.140892Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T08:53:51.140899Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T08:53:51.140931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.140956Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:51.141025Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T08:53:51.141053Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2024-11-21T08:53:51.141185Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Completed. 2024-11-21T08:53:51.141190Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2024-11-21T08:53:51.141196Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:53:51.141278Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2024-11-21T08:53:51.141282Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2024-11-21T08:53:51.141297Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.141314Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:51.141365Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:53:51.141407Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:51.142018Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:51.142045Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:51.142106Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:51.142117Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2024-11-21T08:53:51.142438Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:51.142446Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2024-11-21T08:53:51.142632Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 0 Important: false } Consumers { Name: "client-3" Generation: 7 Important: false } } BootstrapConfig { } } 2024-11-21T08:53:51.142653Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T08:53:51.142659Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T08:53:51.142664Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T08:53:51.142703Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T08:53:51.142727Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:51.143394Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:53:51.143404Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T08:53:51.143408Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T08:53:51.144044Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T08:53:51.144056Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 678 ... : 0 Generation: 5 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:52.084454Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T08:53:52.084510Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T08:53:52.084544Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:239:2234] 2024-11-21T08:53:52.084671Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T08:53:52.084675Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:239:2234] 2024-11-21T08:53:52.084680Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:53:52.084733Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 5 2024-11-21T08:53:52.084737Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 5 done 2024-11-21T08:53:52.084756Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:52.084783Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:52.084840Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.085267Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:52.085318Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:52.085322Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:246:2239], now have 1 active actors on pipe 2024-11-21T08:53:52.085535Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:52.085544Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:251:2243], now have 1 active actors on pipe 2024-11-21T08:53:52.085578Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 21474838668 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } ReceivingShards: 33333 ReceivingShards: 33334 Immediate: false } 2024-11-21T08:53:52.085583Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T08:53:52.085592Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T08:53:52.085595Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T08:53:52.085601Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T08:53:52.085621Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T08:53:52.085632Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.086234Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:53:52.086242Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T08:53:52.086245Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T08:53:52.086279Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 173 RawX2: 21474838668 } } Step: 100 2024-11-21T08:53:52.086283Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2024-11-21T08:53:52.086286Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2024-11-21T08:53:52.086289Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2024-11-21T08:53:52.086308Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T08:53:52.086316Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.086830Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:53:52.086838Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2024-11-21T08:53:52.086841Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2024-11-21T08:53:52.086843Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2024-11-21T08:53:52.086846Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2024-11-21T08:53:52.086852Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2024-11-21T08:53:52.086860Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2024-11-21T08:53:52.086883Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2024-11-21T08:53:52.086887Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2024-11-21T08:53:52.086891Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2024-11-21T08:53:52.086894Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T08:53:52.086898Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T08:53:52.086903Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T08:53:52.086929Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T08:53:52.086939Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.088120Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:53:52.088130Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2024-11-21T08:53:52.088135Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2024-11-21T08:53:52.088142Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 2 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2024-11-21T08:53:52.088146Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 33334 2024-11-21T08:53:52.088165Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 33333 2024-11-21T08:53:52.088172Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2024-11-21T08:53:52.088178Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2024-11-21T08:53:52.088181Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2024-11-21T08:53:52.088187Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T08:53:52.088193Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2024-11-21T08:53:52.088233Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.088803Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2024-11-21T08:53:52.088811Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 33334 2024-11-21T08:53:52.088981Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2024-11-21T08:53:52.088986Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 33333 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.089294Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:52.089322Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2024-11-21T08:53:52.089328Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T08:53:52.089331Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T08:53:52.089337Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2024-11-21T08:53:52.089342Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2024-11-21T08:53:52.089346Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2024-11-21T08:53:52.089373Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T08:53:52.089385Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.090022Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:53:52.090032Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2024-11-21T08:53:52.090036Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T08:53:52.090042Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2024-11-21T08:53:52.090046Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/2 2024-11-21T08:53:52.090049Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T08:53:52.090053Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> Decommit3dc::Test [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2024-11-21T08:53:47.984101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:47.984513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:47.984535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e38/r3tmp/tmpwtWNQu/pdisk_1.dat 2024-11-21T08:53:48.085158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:48.102805Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:48.145571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:48.145610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:48.156203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:48.260176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:48.274984Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:48.275054Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:48.283621Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:48.283669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:48.283842Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:48.283867Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:48.283876Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:48.283927Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:48.287773Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:48.287869Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:48.287904Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:48.287910Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:48.287915Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:48.287922Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:48.288240Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:48.288272Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:48.288290Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:48.288300Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:48.288307Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:48.288316Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:48.288322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:48.288366Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:48.288433Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:48.288454Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:48.288787Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:48.299125Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:48.299170Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:48.473913Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:53:48.474891Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:48.474918Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:48.475098Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:48.475127Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:48.475141Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:48.475228Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:48.475271Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:48.475465Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:48.475485Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:48.475923Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:48.476073Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:48.476502Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:48.476516Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:48.476657Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:48.476665Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:48.476674Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:48.476932Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:48.476944Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:48.476951Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:48.476971Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:48.476982Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:48.476994Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:48.477741Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:48.478191Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:48.478225Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:48.478233Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:48.480289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:48.480321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:48.480333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:48.481281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:48.482437Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:48.669635Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:48.670042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:53:48.734688Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6ytxsz83tksfjbtsjmgg86, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk0ZTVmYzgtZjc1ODVkZTAtYTljNmViYzAtYzJhM2U0ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:48.735835Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T08:53:48.735943Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:48.746983Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:48.747051Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:48.748330Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T08:53:48.748387Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2024-11-21T08:53:48.748447Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2024-11-21T08:53:48.748488Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2024-11-21T08:53:26.036695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653043975284162:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:26.036774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.047042Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653043037389140:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e77/r3tmp/tmptklCIn/pdisk_1.dat 2024-11-21T08:53:26.077786Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.079181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:26.091576Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:26.119970Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23266, node 1 2024-11-21T08:53:26.136266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.136298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.137734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:26.143484Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/002e77/r3tmp/yandexOkMuSd.tmp 2024-11-21T08:53:26.143504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/002e77/r3tmp/yandexOkMuSd.tmp 2024-11-21T08:53:26.143580Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/002e77/r3tmp/yandexOkMuSd.tmp 2024-11-21T08:53:26.143630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:26.149351Z INFO: TTestServer started on Port 17037 GrpcPort 23266 TClient is connected to server localhost:17037 PQClient connected to localhost:23266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:26.182952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:26.195018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:26.195047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:26.197224Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:26.197617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:53:26.225641Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 2024-11-21T08:53:26.235839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:53:26.387994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653043975284902:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.388050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.388419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653043975284937:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.389341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2024-11-21T08:53:26.389639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653043975284968:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.389655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:26.409289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653043975284939:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T08:53:26.428505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.491803Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653043975285132:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:26.492443Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmM5NzQwMjctZTIxZDg3NzItODMxZjNmZGMtZmQ0YWU0NzQ=, ActorId: [1:7439653043975284898:2299], ActorState: ExecuteState, TraceId: 01jd6yt87dfxcpprcq0e8j6m86, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:26.493002Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:26.505213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:26.596601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:53:26.653120Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd6yt8f3022xtf7jdcynbgke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIxNmQxZDgtNjEyZWZjOS0yNGUxZmM0Ni1iODRmYWI4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653043975285437:3019] 2024-11-21T08:53:31.036854Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653043975284162:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:31.036887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:53:31.042465Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653043037389140:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:31.042493Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:53:32.704402Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653043975284193:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:53:32.704492Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653043975284193:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2024-11-21T08:53:32.704512Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653043975284193:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439653043975284623:2427] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732179206233 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:53:32.704532Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439653043975284193:2149], cacheItem# { Subscriber: { Subscriber: [1:7439653043975284623:2427] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732179206233 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion ... onsumer":"some@random@consumer","sdk_build_info":"ydb-cpp-sdk\/2.6.2","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":396,"kind":"RATE"},{"labels":{"topic":"\/Root\/account2\/topic2","sensor":"BytesWrittenByUserAgent","sdk_build_info":"ydb-cpp-sdk\/2.6.2","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":460,"kind":"RATE"}]} ===Request counters with query: /counters/counters=pqproxy/subsystem=userAgents/json counters: {"sensors":[{"labels":{"sensor":"BytesReadByUserAgent","consumer":"some@random@consumer","sdk_build_info":"ydb-cpp-sdk\/2.6.2","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":396,"kind":"RATE"},{"labels":{"topic":"\/Root\/account2\/topic2","sensor":"BytesWrittenByUserAgent","sdk_build_info":"ydb-cpp-sdk\/2.6.2","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":460,"kind":"RATE"}]} 2024-11-21T08:53:51.875642Z :INFO: [/Root] [/Root] [9368ed20-ef273071-c50d4e04-31f55e1f] Closing read session. Close timeout: 0.000000s 2024-11-21T08:53:51.875655Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:account2/topic2:4:1:0:0 -:account2/topic2:3:2:0:0 -:account2/topic2:2:4:0:0 -:account2/topic2:1:5:0:0 -:account2/topic2:0:3:3:0 2024-11-21T08:53:51.875659Z :INFO: [/Root] [/Root] [9368ed20-ef273071-c50d4e04-31f55e1f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 6 BytesRead: 40 MessagesRead: 4 BytesReadCompressed: 40 BytesInflightUncompressed: 30 BytesInflightCompressed: 0 BytesInflightTotal: 30 MessagesInflight: 3 } 2024-11-21T08:53:51.875677Z :NOTICE: [/Root] [/Root] [9368ed20-ef273071-c50d4e04-31f55e1f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:53:51.875684Z :DEBUG: [/Root] [/Root] [9368ed20-ef273071-c50d4e04-31f55e1f] [] Returning serverBytesSize = 0 to budget 2024-11-21T08:53:51.875702Z :DEBUG: [/Root] [/Root] [9368ed20-ef273071-c50d4e04-31f55e1f] [] Abort session to cluster 2024-11-21T08:53:51.875890Z :NOTICE: [/Root] [/Root] [9368ed20-ef273071-c50d4e04-31f55e1f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:53:51.875970Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_2506343980358897025_v1 grpc read done: success# 0, data# { } 2024-11-21T08:53:51.875977Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_2506343980358897025_v1 grpc read failed 2024-11-21T08:53:51.875980Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_2506343980358897025_v1 grpc closed 2024-11-21T08:53:51.876002Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_2506343980358897025_v1 is DEAD 2024-11-21T08:53:51.876086Z :INFO: [/Root] SessionId [123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0] MessageGroupId [123] Write session: close. Timeout 0.000000s 2024-11-21T08:53:51.876089Z :INFO: [/Root] SessionId [123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0] MessageGroupId [123] Write session will now close 2024-11-21T08:53:51.876093Z :DEBUG: [/Root] SessionId [123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0] MessageGroupId [123] Write session: aborting 2024-11-21T08:53:51.876074Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:51.876083Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876088Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7439653148481636675:2478] destroyed 2024-11-21T08:53:51.876091Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:51.876093Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876096Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7439653148481636668:2475] destroyed 2024-11-21T08:53:51.876098Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:51.876099Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876101Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7439653148481636665:2474] destroyed 2024-11-21T08:53:51.876109Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876111Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876113Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876144Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:51.876155Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876164Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7439653148481636673:2476] destroyed 2024-11-21T08:53:51.876167Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:51.876168Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876219Z :DEBUG: [/Root] SessionId [123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0] MessageGroupId [123] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T08:53:51.876225Z :INFO: [/Root] SessionId [123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0] MessageGroupId [123] Write session: gracefully shut down, all writes complete 2024-11-21T08:53:51.876234Z :DEBUG: [/Root] SessionId [123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0] MessageGroupId [123] Write session is aborting and will not restart 2024-11-21T08:53:51.876180Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7439653148481636674:2477] destroyed 2024-11-21T08:53:51.876191Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876195Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876250Z :DEBUG: [/Root] SessionId [123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0] MessageGroupId [123] Write session: destroy 2024-11-21T08:53:51.876288Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [7:7439653148481636659:2471] disconnected; active server actors: 1 2024-11-21T08:53:51.876294Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [7:7439653148481636659:2471] client some@random@consumer disconnected session some@random@consumer_7_1_2506343980358897025_v1 2024-11-21T08:53:51.876313Z node 7 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0 grpc read done: success: 0 data: 2024-11-21T08:53:51.876322Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0 grpc read failed 2024-11-21T08:53:51.876325Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0 grpc closed 2024-11-21T08:53:51.876326Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|dcd5264c-9bb55c7e-6b78be55-7fabd7fa_0 is DEAD 2024-11-21T08:53:51.876480Z node 7 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:53:51.876524Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:53:51.876538Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7439653148481636649:2467] destroyed 2024-11-21T08:53:51.876546Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:53:52.152341Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7439653127006797741:2124], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:53:52.152391Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7439653127006797741:2124], cacheItem# { Subscriber: { Subscriber: [7:7439653131301766364:3067] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:53:52.152419Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7439653152776604001:3905], recipient# [7:7439653152776604000:2480], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:53:52.154838Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7439653127006797741:2124], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:53:52.154870Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7439653127006797741:2124], cacheItem# { Subscriber: { Subscriber: [7:7439653131301766364:3067] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:53:52.154892Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7439653152776604005:3908], recipient# [7:7439653152776604004:2481], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |88.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [GOOD] >> SystemView::CollectPreparedQueries [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TPQTest::TestReserveBytes >> IndexBuildTest::WithFollowers >> Defragmentation::DoesItWork >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge >> IndexBuildTest::BaseCase >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault >> SystemView::CollectScanQueries >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> TTopicYqlTest::DropTopicYql >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionTests::Batching >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSourceIdDropByUserWrites >> Defragmentation::DoesItWork [GOOD] >> Defragmentation::DefragCompactionRace >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> IndexBuildTest::BaseCase [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::DropSplittedTabletInsideWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:52:24.492713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:24.492730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:24.492733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:24.492736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:24.492747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:24.492749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:24.492755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:24.492814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:24.501672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:24.501692Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:52:24.503938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:24.504043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:24.504076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:24.506902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:24.506987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:24.507092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:24.507259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:24.507894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:24.508179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:24.508191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:24.508233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:24.508241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:24.508247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:24.508296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:52:24.509728Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:52:24.523623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:24.523693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.523750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:24.523802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:24.523807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.524454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:24.524482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:24.524541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.524551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:24.524555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:24.524560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:24.525013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.525027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:24.525032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:24.525381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.525390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.525408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:24.525414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:24.526032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:24.526581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:24.526640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:24.526871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:24.526902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:24.526909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:24.526958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:24.526963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:24.526988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:24.526998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:24.527394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:24.527401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:24.527439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:24.527442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:24.527533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:24.527539Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:24.527546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:24.527548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:24.527552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:24.527555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:24.527557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:24.527560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:24.527567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:24.527571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:24.527574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ndToSchemeshard, txId 1006 2024-11-21T08:53:50.944531Z node 187 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:53:50.944547Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:53:50.944552Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [187:920:2831] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted 2024-11-21T08:53:50.944641Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:53:50.944651Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:53:50.944658Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:53:50.944665Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T08:53:50.944675Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 2024-11-21T08:53:50.944766Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:50.944799Z node 187 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 44us result status StatusPathDoesNotExist 2024-11-21T08:53:50.944836Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:53:50.944895Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:50.944912Z node 187 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 18us result status StatusSuccess 2024-11-21T08:53:50.944968Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:50.945180Z node 187 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [187:122:2148] sender: [187:927:2058] recipient: [187:99:2134] Leader for TabletID 72057594046678944 is [187:122:2148] sender: [187:930:2058] recipient: [187:15:2062] Leader for TabletID 72057594046678944 is [187:122:2148] sender: [187:931:2058] recipient: [187:929:2838] Leader for TabletID 72057594046678944 is [187:932:2839] sender: [187:933:2058] recipient: [187:929:2838] 2024-11-21T08:53:50.954811Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:50.954845Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:50.954851Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:50.954856Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:50.954863Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:50.954867Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:50.954876Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:50.954952Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:50.956283Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:50.956719Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:50.956768Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:50.956820Z node 187 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:50.956826Z node 187 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:50.956876Z node 187 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:50.957010Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957031Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: DirA, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:53:50.957044Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957057Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957126Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957161Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957178Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957195Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957213Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957229Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957258Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957298Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957313Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957367Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957377Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957412Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957444Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957458Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957486Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957497Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957526Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957557Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957574Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957580Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.957587Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:50.959016Z node 187 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:50.959028Z node 187 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:50.959090Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:50.959096Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:50.959100Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:50.959140Z node 187 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Waiting until shard idx 72057594046678944:4 is deleted Waiting until shard idx 72057594046678944:5 is deleted Leader for TabletID 72057594046678944 is [187:932:2839] sender: [187:990:2058] recipient: [187:15:2062] Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 Deleted shard idx 72057594046678944:4 Deleted shard idx 72057594046678944:5 |88.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::CollectScanQueries [GOOD] >> TPartitionTests::Batching [GOOD] >> IndexBuildTest::RejectsCreate >> IndexBuildTest::CancellationNotEnoughRetries >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> TPartitionTests::ShadowPartitionCountersRestore >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> IndexBuildTest::WithFollowers [GOOD] >> TSourceIdTests::ExpensiveCleanup >> IndexBuildTest::CancelBuild >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TPQTabletTests::ProposeTx_Unknown_WriteId >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsDropIndex >> IndexBuildTest::CheckLimitWithDroppedIndex >> VectorIndexBuildTest::BaseCase >> TPartitionTests::AfterRestart_1 [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> IndexBuildTest::ShadowDataEdgeCases >> IndexBuildTest::Lock >> Defragmentation::DefragCompactionRace [GOOD] >> IndexBuildTest::CancelBuild [GOOD] >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] Test command err: 2024-11-21T08:53:34.597072Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653076254182902:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:34.597307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cd5/r3tmp/tmpR0WqfM/pdisk_1.dat 2024-11-21T08:53:34.648411Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7603, node 1 2024-11-21T08:53:34.661723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:34.661750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:34.661752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:34.661785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:34.697308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.697347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.699110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.725385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.726418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.726439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.727038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:53:34.727102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:53:34.727113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:53:34.727583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:53:34.727594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:53:34.727655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:53:34.728240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.729511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179214773, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:53:34.729541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:53:34.729623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:53:34.730256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:53:34.730316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:53:34.730331Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:53:34.730369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:53:34.730388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:53:34.730406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:53:34.730899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:53:34.730923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:53:34.730930Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:53:34.730949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:53:34.897921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653076254183995:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.897943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653076254183996:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.897949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653076254183997:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.897953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653076254183998:2453], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.897978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653076254183965:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.897983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653076254183993:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.897989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653076254183994:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.898124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:34.899172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.899229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.899239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T08:53:34.899252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.899261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:53:34.899270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.899284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:34.899327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T08:53:34.899400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:53:34.899415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:53:34.899641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:34.899670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:34.899790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:34.899808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:34.899859Z node 1 :FLAT_TX_SCHEMESH ... =4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 477 2024-11-21T08:53:45.732517Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 482 2024-11-21T08:53:45.732521Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 483 2024-11-21T08:53:45.732524Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 484 2024-11-21T08:53:45.732528Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 485 2024-11-21T08:53:45.732531Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 486 2024-11-21T08:53:45.732536Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 488 2024-11-21T08:53:45.732540Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 491 2024-11-21T08:53:45.732544Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 492 2024-11-21T08:53:45.732547Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 493 2024-11-21T08:53:45.732551Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 494 2024-11-21T08:53:45.732554Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 496 2024-11-21T08:53:45.732559Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 497 2024-11-21T08:53:45.732563Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 498 2024-11-21T08:53:45.732567Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 502 2024-11-21T08:53:45.732571Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGEzN2FiZDAtOWFkNjA4ZWItM2U2NDM0MzgtYmJlNTFjYmE=, ActorId: [4:7439653122760745582:2306], ActorState: ExecuteState, TraceId: 01jd6ytv3f096zd3adx1e5qyzp, Reply query error, msg: Pending previous query completion proxyRequestId: 508 2024-11-21T08:53:45.732577Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 476 2024-11-21T08:53:45.732582Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 487 2024-11-21T08:53:45.732588Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 489 2024-11-21T08:53:45.732592Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 490 2024-11-21T08:53:45.732596Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 495 2024-11-21T08:53:45.732600Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 499 2024-11-21T08:53:45.732604Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 500 2024-11-21T08:53:45.732607Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 501 2024-11-21T08:53:45.732611Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 503 2024-11-21T08:53:45.732616Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 504 2024-11-21T08:53:45.732619Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 505 2024-11-21T08:53:45.732622Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 506 2024-11-21T08:53:45.732627Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 507 2024-11-21T08:53:45.732631Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 509 2024-11-21T08:53:45.734101Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 510 2024-11-21T08:53:45.734111Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGJiZjM0Mi04YTZjZDlhYy02N2M0ZTk5Zi0xNGJlNGQ4OQ==, ActorId: [4:7439653122760745580:2304], ActorState: ExecuteState, TraceId: 01jd6ytv3g934762fa0few54nn, Reply query error, msg: Pending previous query completion proxyRequestId: 511 2024-11-21T08:53:45.770659Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:53:45.770704Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T08:53:45.771515Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:53:50.320881Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439653122760744683:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:50.320936Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> IndexBuildTest::RejectsDropIndex [GOOD] >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> Defragmentation::CorruptedReadHandling >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> TPartitionTests::AfterRestart_2 >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> IndexBuildTest::DropIndex >> TPartitionTests::AfterRestart_2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] Test command err: 2024-11-21T08:53:50.158626Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:50.158651Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:50.161814Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:50.161991Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:50.254496Z node 1 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:50.431486Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:50.431512Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:50.434591Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:50.435372Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:177:2192] 2024-11-21T08:53:50.435423Z node 2 :PERSQUEUE INFO: new Cookie owner1|bfdd72bb-743b522c-f3bc997a-6c7beeab_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send write: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send write: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 2 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 3 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 4 Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 5 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 6 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 7 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 8 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 9 Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got write info response. Body keys: 1, head: 10, src id info: 1 2024-11-21T08:53:53.273446Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:53.273475Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:53.279214Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [3:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:53.279459Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [3:177:2192] 2024-11-21T08:53:53.279502Z node 3 :PERSQUEUE INFO: new Cookie owner1|3c41e0a3-19266469-97bbb484-22203e7a_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send write: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send write: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 2 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 3 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 4 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 5 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 6 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 7 Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 8 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 9 Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got write info response. Body keys: 1, head: 10, src id info: 1 2024-11-21T08:53:56.111413Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:56.111433Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:56.113586Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:57.490643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:57.490663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:57.490666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:57.490669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:57.490681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:57.490684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:57.490690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:57.490741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:57.498299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:57.498312Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:57.500431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:57.501007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:57.501036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:57.502158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:57.502308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:57.502392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:57.502443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:57.503250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:57.503480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:57.503488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:57.503517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:57.503521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:57.503525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:57.503534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.504448Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:57.515534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:57.515596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.515645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:57.515690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:57.515698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.516306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:57.516326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:57.516371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.516379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:57.516384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:57.516388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:57.516680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.516688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:57.516692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:57.516958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.516964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.516967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:57.516972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:57.517341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:57.517647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:57.517693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:57.517834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:57.517850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:57.517854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:57.517889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:57.517893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:57.517917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:57.517928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:57.518195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:57.518199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:57.518227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:57.518232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:57.518302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.518307Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:57.518316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:57.518319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:57.518323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:57.518326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:57.518329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:57.518331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:57.518338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:57.518342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:57.518345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:57.518560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:57.518569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:57.518572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:57.518575Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:57.518578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:57.518586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... r: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:59.011476Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding/indexImplLevelTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:59.011514Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding/indexImplLevelTable" took 41us result status StatusSuccess 2024-11-21T08:53:59.011624Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 12345 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 54321 } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\00090\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409556 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0001\324\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409557 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409558 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:59.011710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding/indexImplPostingTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:53:59.011731Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding/indexImplPostingTable" took 23us result status StatusSuccess 2024-11-21T08:53:59.011803Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "covered" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 12345 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 54321 } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\00090\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0001\324\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T08:52:43.579475Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732179163579467 2024-11-21T08:52:43.707187Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652856108238869:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:43.707244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:43.710040Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652858863427517:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:43.710369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:43.739481Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:43.742745Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004269/r3tmp/tmpzRNQCL/pdisk_1.dat 2024-11-21T08:52:43.770616Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14986, node 1 2024-11-21T08:52:43.786910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004269/r3tmp/yandexJ9KgT4.tmp 2024-11-21T08:52:43.786926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004269/r3tmp/yandexJ9KgT4.tmp 2024-11-21T08:52:43.787004Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004269/r3tmp/yandexJ9KgT4.tmp 2024-11-21T08:52:43.787058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:43.791589Z INFO: TTestServer started on Port 8088 GrpcPort 14986 TClient is connected to server localhost:8088 PQClient connected to localhost:14986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:43.807572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:43.807615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:43.809446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:43.846343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:43.846370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:43.847318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:43.848006Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:43.848518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:52:44.044961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652863158395105:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.045014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.045196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652863158395117:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.046620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652860403207056:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.046648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.046734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652860403207090:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:44.046986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:52:44.051863Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652863158395119:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:52:44.052126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652860403207092:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:52:44.088939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.124446Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652863158395199:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:44.124775Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjU1Y2MxNzYtODA4OTk0M2EtY2I5NjZjZDQtY2ExYzMyOTI=, ActorId: [2:7439652863158395103:2277], ActorState: ExecuteState, TraceId: 01jd6yrywc41fe7gyj3v5sfyz3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:44.125308Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:44.129630Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652860403207270:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:44.129953Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTI2NjExOTQtOGU2YzE4MGEtMjI2YTFhMWUtYTA4NTBhY2M=, ActorId: [1:7439652860403207050:2298], ActorState: ExecuteState, TraceId: 01jd6yrywddk9dwcx5ankqqcp0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:44.130196Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:44.171669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:44.205313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:14986", true, true, 1000); 2024-11-21T08:52:44.253749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd6yrz2a856qy25atndhahx4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcyMzUwYjgtYTBkMDVmZi00MzliYjE2MS02NmZiYjc1NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439652860403207570:2957] 2024-11-21T08:52:48.707320Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652856108238869:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:48.707372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:52:48.710348Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652858863427517:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:48.710384Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:52:49.291332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose it ... sionId: ydb://session/3?node_id=16&id=ZGIzMjhlN2MtYTE2OGNiMDMtNjRjOGNjNDctY2M3ZjVkYzQ=, ActorId: [16:7439653159348879883:2458], ActorState: ExecuteState, TraceId: 01jd6yv2x6dyzrsbsagak7gecc, Create QueryResponse for error on request, msg: 2024-11-21T08:53:53.888660Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yv2xy2xmrp1yb9pnm8t8r" } } YdbStatus: UNAVAILABLE ConsumedRu: 15 } 2024-11-21T08:53:54.420384Z :INFO: [/Root] MessageGroupId [test-message-group-id] Running cds request ms 2024-11-21T08:53:54.421502Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Got CDS response: write_sessions_clusters { clusters { endpoint: "localhost:12001" name: "dc1" available: true } primary_cluster_selection_reason: CLIENT_LOCATION } version: 1 2024-11-21T08:53:54.421513Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Start write session. Will connect to endpoint: localhost:12001 2024-11-21T08:53:54.421798Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T08:53:54.421870Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:53:54.421884Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T08:53:54.421998Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T08:53:54.422032Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:58436 2024-11-21T08:53:54.422039Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:58436 proto=v1 topic=test-topic durationSec=0 2024-11-21T08:53:54.422041Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:53:54.422445Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T08:53:54.422471Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T08:53:54.422477Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:53:54.422478Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:53:54.422482Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439653161506004475:2540] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:53:54.422874Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439653161506004475:2540] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:53:54.584917Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715700. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T08:53:54.584969Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439653161506004488:2542] TxId: 281474976715700. Ctx: { TraceId: 01jd6yv3kpchdrpacd0h20bj48, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YzJkOWYzZWQtMjkzZDBmYzctNjY3MzVjZGUtMzRjYzEzMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T08:53:54.585092Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=YzJkOWYzZWQtMjkzZDBmYzctNjY3MzVjZGUtMzRjYzEzMzA=, ActorId: [15:7439653161506004476:2542], ActorState: ExecuteState, TraceId: 01jd6yv3kpchdrpacd0h20bj48, Create QueryResponse for error on request, msg: 2024-11-21T08:53:54.585485Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7439653161506004475:2540] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=YzJkOWYzZWQtMjkzZDBmYzctNjY3MzVjZGUtMzRjYzEzMzA=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yv3kq7f4hx1022jqc1xpj" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T08:53:54.585511Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=YzJkOWYzZWQtMjkzZDBmYzctNjY3MzVjZGUtMzRjYzEzMzA=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yv3kq7f4hx1022jqc1xpj" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2024-11-21T08:53:54.585727Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2024-11-21T08:53:54.585894Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=YzJkOWYzZWQtMjkzZDBmYzctNjY3MzVjZGUtMzRjYzEzMzA=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yv3kq7f4hx1022jqc1xpj" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T08:53:54.585900Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Write session will restart in 2.000000s 2024-11-21T08:53:54.585922Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Write session: Do CDS request 2024-11-21T08:53:54.585929Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Do schedule cds request after 2000 ms 2024-11-21T08:53:54.963789Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715702. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T08:53:54.963842Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439653161506004547:2544] TxId: 281474976715702. Ctx: { TraceId: 01jd6yv3yw54bag2qy8jx02e34, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YjA0NjNkMzgtYjhkMzQ0ZmItODhmNDUwZWMtZWYxNTk4NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T08:53:54.963964Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=YjA0NjNkMzgtYjhkMzQ0ZmItODhmNDUwZWMtZWYxNTk4NmE=, ActorId: [15:7439653161506004534:2544], ActorState: ExecuteState, TraceId: 01jd6yv3yw54bag2qy8jx02e34, Create QueryResponse for error on request, msg: 2024-11-21T08:53:54.964287Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd6yv3zkfrnfes19zr5tzzgs" } } YdbStatus: UNAVAILABLE ConsumedRu: 14 } 2024-11-21T08:53:55.078026Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720685. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:53:55.078074Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439653163643847293:2469] TxId: 281474976720685. Ctx: { TraceId: 01jd6yv429eb53zczjx6fp0hbt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YTBlMWMzZDctMzBkOGZjYjItNTM0MzhlOTItNzIxNDgxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:53:55.078190Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=YTBlMWMzZDctMzBkOGZjYjItNTM0MzhlOTItNzIxNDgxYWM=, ActorId: [16:7439653163643847280:2469], ActorState: ExecuteState, TraceId: 01jd6yv429eb53zczjx6fp0hbt, Create QueryResponse for error on request, msg: 2024-11-21T08:53:55.078485Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yv435chw5bpf2jthfy3n2" } } YdbStatus: UNAVAILABLE ConsumedRu: 18 } 2024-11-21T08:53:55.422055Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Write session: close. Timeout = 0 ms 2024-11-21T08:53:55.422067Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Write session will now close 2024-11-21T08:53:55.422077Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Write session: aborting 2024-11-21T08:53:55.422261Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T08:53:55.422265Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d50b5a29-8b3d437f-77c58de8-5d1926f7_0] Write session: destroy 2024-11-21T08:53:55.563829Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715704. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:53:55.563881Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439653165800971945:2553] TxId: 281474976715704. Ctx: { TraceId: 01jd6yv4jb6sywzvwxjeekg7qs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=N2M0NzBjZWEtZWRjODFhYzctYThlYmEzZDYtYjQwNjg4Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:53:55.563974Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=N2M0NzBjZWEtZWRjODFhYzctYThlYmEzZDYtYjQwNjg4Y2U=, ActorId: [15:7439653165800971942:2553], ActorState: ExecuteState, TraceId: 01jd6yv4jb6sywzvwxjeekg7qs, Create QueryResponse for error on request, msg: 2024-11-21T08:53:55.564201Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yv4jb6sywzvwxjfqtes8p" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TTopicYqlTest::DropTopicYql [GOOD] >> Defragmentation::CorruptedReadHandling [GOOD] |88.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |88.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |88.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |88.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |88.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut >> Defragmentation::GappedReadHandling >> TTopicYqlTest::CreateTopicYqlBackCompatibility >> IndexBuildTest::DropIndex [GOOD] >> IndexBuildTest::Lock [GOOD] >> TReplicaTest::Update >> TReplicaTest::Commit >> TReplicaTest::UpdateWithoutHandshake >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::Update [GOOD] >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TReplicaTest::AckNotifications [GOOD] |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |88.8%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |88.8%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |88.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |88.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |88.8%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] Test command err: 2024-11-21T08:53:38.877714Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T08:53:38.896419Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:53:38.899185Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:53:38.899355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:53:38.902633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T08:53:38.902677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:53:38.902757Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T08:53:38.903614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:38.903676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:38.903727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:38.903751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:38.903769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:38.903786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:38.903801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:38.903823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:38.903846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:38.903865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:38.903883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:38.903916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:38.908098Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:53:38.908172Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T08:53:38.908187Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T08:53:38.908195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T08:53:38.909681Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T08:53:38.909776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:53:38.909785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:53:38.909822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:38.909878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:38.909892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:38.909897Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:53:38.909907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:53:38.909917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:38.909924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:38.909928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:53:38.909945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:38.909953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:38.909960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:38.909964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:53:38.909973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:53:38.909979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:53:38.909989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:53:38.909995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:53:38.910006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:53:38.910011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:53:38.910013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:53:38.910019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:38.910024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:38.910027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T08:53:38.910058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2024-11-21T08:53:38.910067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2024-11-21T08:53:38.910074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=6; 2024-11-21T08:53:38.910083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T08:53:38.910098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:38.910104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:38.910106Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T08:53:38.910123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:38.910129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:38.910131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T08:53:38.910140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:53:38.910144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024 ... =9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=16; 2024-11-21T08:53:49.146472Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T08:53:49.146502Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T08:53:49.146511Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T08:53:49.146517Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T08:53:49.146565Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T08:53:49.146570Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T08:53:49.146576Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=16; 2024-11-21T08:53:49.146582Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=16; 2024-11-21T08:53:49.146589Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T08:53:49.146597Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T08:53:49.146600Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T08:53:49.146605Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T08:53:49.146648Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:53:49.146668Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T08:53:49.146672Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T08:53:49.146682Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2024-11-21T08:53:49.146692Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2024-11-21T08:53:49.146724Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: uint64 2024-11-21T08:53:49.146735Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T08:53:49.146745Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T08:53:49.146753Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T08:53:49.146773Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:53:49.146781Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T08:53:49.146787Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T08:53:49.146792Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] finished for tablet 9437184 2024-11-21T08:53:49.146805Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:53:49.146880Z node 4 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [4:277:2289] and sent to [4:272:2284] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.025}],"full":{"a":1732179229121208,"name":"_full_task","f":1732179229121208,"d_finished":0,"c":0,"l":1732179229146813,"d":25605},"events":[{"name":"bootstrap","f":1732179229121358,"d_finished":514,"c":1,"l":1732179229121872,"d":514},{"a":1732179229146771,"name":"ack","f":1732179229146644,"d_finished":112,"c":1,"l":1732179229146756,"d":154},{"a":1732179229146769,"name":"processing","f":1732179229121969,"d_finished":450,"c":5,"l":1732179229146756,"d":494},{"name":"ProduceResults","f":1732179229121655,"d_finished":242,"c":8,"l":1732179229146790,"d":242},{"a":1732179229146790,"name":"Finish","f":1732179229146790,"d_finished":0,"c":0,"l":1732179229146813,"d":23},{"name":"task_result","f":1732179229121973,"d_finished":319,"c":4,"l":1732179229146615,"d":319}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T08:53:49.146897Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:53:49.120948Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T08:53:49.146903Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:53:49.146921Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.024558s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.022917s;size=0.0063152;details={columns=9;};};]};; 2024-11-21T08:53:49.146928Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> TReplicaTest::AckNotificationsUponPathRecreation >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2024-11-21T08:53:55.038049Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:55.038074Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:55.041213Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitConfigStep 2024-11-21T08:53:55.041259Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T08:53:55.041322Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:177:2192] 2024-11-21T08:53:55.041453Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:55.041469Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitMetaStep 2024-11-21T08:53:55.041479Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitInfoRangeStep 2024-11-21T08:53:55.041572Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDataRangeStep 2024-11-21T08:53:55.041592Z node 1 :PERSQUEUE DEBUG: Got data topic Root/PQ/rt3.dc1--account--topic partition 1 offset 0 count 10 size 0 so 0 eo 10 d0000000001_00000000000000000000_00000_0000000010_00000 2024-11-21T08:53:55.041598Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDataStep 2024-11-21T08:53:55.041601Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Completed. 2024-11-21T08:53:55.041604Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:177:2192] 2024-11-21T08:53:55.041609Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 10 Head Offset 10 PartNo 0 PackedSize 0 count 0 nextOffset 10 batches 0 SYNC INIT DATA KEY: d0000000001_00000000000000000000_00000_0000000010_00000 size 0 2024-11-21T08:53:55.041641Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:53:55.041645Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 send read request for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T08:53:55.041665Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2024-11-21T08:53:55.041677Z node 1 :PERSQUEUE INFO: new Cookie owner1|bde97709-9971c8a4-cd57fcdd-3b92450f_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:55.041715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 offset 3 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 3 2024-11-21T08:53:55.041725Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] read cookie 0 added 1 blobs, size 0 count 7 last offset 4 2024-11-21T08:53:55.041730Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Reading cookie 0. Send blob request. Send disk status response with cookie: 0 2024-11-21T08:53:55.041756Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:55.041767Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2024-11-21T08:53:55.041814Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2024-11-21T08:53:55.041825Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2024-11-21T08:53:55.041838Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2024-11-21T08:53:55.041866Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2024-11-21T08:53:55.041897Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 10 endOffset 10 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:55.072366Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T08:53:55.072397Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2024-11-21T08:53:55.072413Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:55.367121Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2024-11-21T08:53:55.387402Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2024-11-21T08:53:55.387440Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2024-11-21T08:53:55.387470Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2024-11-21T08:53:55.387498Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2024-11-21T08:53:55.387508Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 old key x0000000001_00000000000000000100_00000_0000000001_00000 new key d0000000001_00000000000000000100_00000_0000000001_00000 size 104 WTime 1329 2024-11-21T08:53:55.387529Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2024-11-21T08:53:55.387559Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 100 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000| size 105 WTime 1329 2024-11-21T08:53:55.407870Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T08:53:55.407900Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2024-11-21T08:53:55.407914Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk processed_blobs=41800 quoted_time=9.980000s Iteration 0 Iteration 1 Iteration 2 Iteration 3 Iteration 4 Iteration 5 Iteration 6 Iteration 7 Iteration 8 Iteration 9 Iteration 10 Iteration 11 Iteration 12 Iteration 13 Iteration 14 Iteration 15 Iteration 16 Iteration 17 Iteration 18 Iteration 19 Iteration 20 Iteration 21 Iteration 22 Iteration 23 Iteration 24 Iteration 25 Iteration 26 Iteration 27 Iteration 28 Iteration 29 Iteration 30 Iteration 31 Iteration 32 Iteration 33 Iteration 34 Iteration 35 Iteration 36 Iteration 37 Iteration 38 Iteration 39 Iteration 40 Iteration 41 Iteration 42 Iteration 43 Iteration 44 Iteration 45 Iteration 46 Iteration 47 Iteration 48 Iteration 49 Iteration 50 Iteration 51 Iteration 52 Iteration 53 Iteration 54 Iteration 55 Iteration 56 Iteration 57 Iteration 58 Iteration 59 Iteration 60 Iteration 61 Iteration 62 Iteration 63 Iteration 64 Iteration 65 Iteration 66 Iteration 67 Iteration 68 Iteration 69 Iteration 70 Iteration 71 Iteration 72 Iteration 73 Iteration 74 Iteration 75 Iteration 76 Iteration 77 Iteration 78 Iteration 79 Iteration 80 Iteration 81 Iteration 82 Iteration 83 Iteration 84 Iteration 85 Iteration 86 Iteration 87 Iteration 88 Iteration 89 Iteration 90 Iteration 91 Iteration 92 Iteration 93 Iteration 94 Iteration 95 Iteration 96 Iteration 97 Iteration 98 Iteration 99 Iteration 100 Iteration 101 Iteration 102 Iteration 103 Iteration 104 Iteration 105 Iteration 106 Iteration 107 Iteration 108 Iteration 109 Iteration 110 Iteration 111 Iteration 112 Iteration 113 Iteration 114 Iteration 115 Iteration 116 Iteration 117 Iteration 118 Iteration 119 Iteration 120 Iteration 121 Iteration 122 Iteration 123 Iteration 124 Iteration 125 Iteration 126 Iteration 127 Iteration 128 Iteration 129 Iteration 130 Iteration 131 Iteration 132 Iteration 133 Iteration 134 Iteration 135 Iteration 136 Iteration 137 Iteration 138 Iteration 139 Iteration 140 Iteration 141 Iteration 142 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2024-11-21T08:54:04.846406Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:04.846426Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:04.846477Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:04.846481Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:04.846998Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:04.847020Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T08:54:04.847031Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:04.847046Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:6:2053] 2024-11-21T08:54:04.847061Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:6:2053], path# path 2024-11-21T08:54:04.847066Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T08:54:04.847070Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T08:54:04.847076Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:6:2053] 2024-11-21T08:54:04.847079Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:6:2053], path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:05.050675Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T08:54:05.050695Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:05.050720Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:05.050725Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:05.050734Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:05.050745Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2024-11-21T08:54:04.995183Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:04.995205Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:5:2052] Reject update from unknown populator: sender# [1:6:2053], owner# 1, generation# 1 2024-11-21T08:54:04.995222Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T08:54:04.995226Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path 2024-11-21T08:54:04.995241Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:04.995257Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:6:2053] 2024-11-21T08:54:04.995271Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:6:2053], path# path 2024-11-21T08:54:04.995277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T08:54:04.995279Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:04.995283Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T08:54:04.995288Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:6:2053] 2024-11-21T08:54:04.995291Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:6:2053], path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:05.199823Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T08:54:05.199844Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:05.199874Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:05.199878Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:5:2052] Reject update from stale populator: sender# [2:6:2053], owner# 1, generation# 0, pending generation# 1 2024-11-21T08:54:05.199889Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:6:2053] 2024-11-21T08:54:05.199893Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path 2024-11-21T08:54:05.199905Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:05.199918Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] 2024-11-21T08:54:05.199923Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Unsubscribe: subscriber# [2:6:2053], path# path 2024-11-21T08:54:05.199929Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:6:2053] 2024-11-21T08:54:05.199931Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:05.199935Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:6:2053], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T08:54:05.199940Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:6:2053] 2024-11-21T08:54:05.199943Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Unsubscribe: subscriber# [2:6:2053], path# [OwnerId: 1, LocalPathId: 1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2024-11-21T08:54:04.862234Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:04.862254Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:04.862267Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:04.862271Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Commit generation: owner# 1, generation# 1 2024-11-21T08:54:04.862276Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:6:2053] 2024-11-21T08:54:04.862278Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 2 2024-11-21T08:54:05.066945Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2024-11-21T08:54:05.066966Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path 2024-11-21T08:54:05.066989Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2024-11-21T08:54:05.067004Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T08:54:05.067008Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:05.067040Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:05.067044Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:05.067672Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:05.067713Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7:2054] 2024-11-21T08:54:05.067723Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:05.067726Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T08:54:05.067728Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:05.067733Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:7:2054] 2024-11-21T08:54:05.271767Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:05.271788Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:05.271818Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:05.271822Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:05.271832Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:05.271848Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2024-11-21T08:54:05.271865Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2024-11-21T08:54:05.271873Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:05.271875Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:05.271878Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:05.271917Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:05.271919Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T08:54:05.271921Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:05.271928Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path 2024-11-21T08:54:05.271931Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2024-11-21T08:54:05.271934Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:05.271943Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:7:2054] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:03.203878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:03.203897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:03.203901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:03.203904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:03.203913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:03.203916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:03.203923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:03.203987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:03.211516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:03.211534Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:03.213578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:03.214144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:03.214166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:03.215081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:03.215211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:03.215293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.215344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:03.216043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.216283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.216290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.216317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:03.216322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.216326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:03.216336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.217164Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:03.227785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:03.227849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.227893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:03.227935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:03.227940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.228455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.228479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:03.228529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.228539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:03.228544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:03.228550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:03.228920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.228930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:03.228935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:03.229233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.229241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.229247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.229255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.229766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:03.230076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:03.230119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:03.230266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.230286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:03.230292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.230337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:03.230342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.230368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.230378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:03.230713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.230720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.230759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.230764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:03.230841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.230848Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:03.230860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:03.230864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.230870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:03.230876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.230881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:03.230884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:03.230895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:03.230901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:03.230908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:03.231190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:03.231205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:03.231210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:03.231215Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:03.231219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.231231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 9 2024-11-21T08:54:04.141445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:54:04.141450Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:54:04.141460Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:54:04.141463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T08:54:04.141466Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2024-11-21T08:54:04.141529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2024-11-21T08:54:04.141533Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2024-11-21T08:54:04.141540Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2024-11-21T08:54:04.141543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2024-11-21T08:54:04.141548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: false 2024-11-21T08:54:04.141581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2024-11-21T08:54:04.141584Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:2 ProgressState at tablet: 72057594046678944 2024-11-21T08:54:04.141588Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:2, at schemeshard: 72057594046678944 2024-11-21T08:54:04.141590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T08:54:04.141593Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:2 129 -> 240 2024-11-21T08:54:04.141624Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.141631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.141634Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:54:04.141637Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T08:54:04.141639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:54:04.141685Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.141691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.141693Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:54:04.141696Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2024-11-21T08:54:04.141698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2024-11-21T08:54:04.141800Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.141807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.141809Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:54:04.141812Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:54:04.141814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:04.141971Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.141980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.141982Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:54:04.142012Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142019Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:54:04.142038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:54:04.142042Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:04.142086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:54:04.142102Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2024-11-21T08:54:04.142104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2024-11-21T08:54:04.142107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2024-11-21T08:54:04.142167Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142174Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:54:04.142177Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2024-11-21T08:54:04.142179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2024-11-21T08:54:04.142185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2024-11-21T08:54:04.142462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2024-11-21T08:54:04.142468Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:04.142492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2024-11-21T08:54:04.142502Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2024-11-21T08:54:04.142504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2024-11-21T08:54:04.142507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2024-11-21T08:54:04.142517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:421:2376] message: TxId: 105 2024-11-21T08:54:04.142520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2024-11-21T08:54:04.142523Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T08:54:04.142526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T08:54:04.142536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T08:54:04.142539Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2024-11-21T08:54:04.142541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2024-11-21T08:54:04.142544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2024-11-21T08:54:04.142546Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:2 2024-11-21T08:54:04.142547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:2 2024-11-21T08:54:04.142551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2024-11-21T08:54:04.142612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:04.142877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:04.143052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:54:04.143058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:923:2853] TestWaitNotification: OK eventTxId 105 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:03.815431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:03.815453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:03.815456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:03.815459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:03.815469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:03.815472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:03.815478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:03.815543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:03.822907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:03.822926Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:03.824973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:03.825518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:03.825542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:03.826610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:03.826766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:03.826850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.826893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:03.827556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.827749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.827755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.827777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:03.827782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.827786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:03.827794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.828681Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:03.839592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:03.839661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.839716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:03.839760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:03.839765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.840326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.840345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:03.840391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.840397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:03.840401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:03.840404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:03.840791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.840810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:03.840814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:03.841171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.841180Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.841184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.841189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.841616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:03.841932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:03.841975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:03.842108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.842126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:03.842132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.842173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:03.842179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.842203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.842212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:03.842540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.842546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.842576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.842579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:03.842648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.842653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:03.842662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:03.842668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.842672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:03.842675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.842678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:03.842680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:03.842688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:03.842693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:03.842696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:03.842920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:03.842931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:03.842933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:03.842937Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:03.842939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.842947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:04.494296Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:54:04.494314Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 19us result status StatusSuccess 2024-11-21T08:54:04.494396Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:04.494429Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:54:04.494456Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 28us result status StatusSuccess 2024-11-21T08:54:04.494546Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2024-11-21T08:53:40.540099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:40.540698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:40.540734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030ff/r3tmp/tmpe9i5jR/pdisk_1.dat 2024-11-21T08:53:40.657624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.676715Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:40.677473Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2024-11-21T08:53:40.719274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:40.719302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:40.729850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:40.839425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:40.853963Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:53:40.854027Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:40.861532Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:40.861564Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:53:40.861743Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:53:40.861761Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:53:40.861768Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:53:40.861814Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:53:40.865146Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:53:40.865204Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:53:40.865226Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:53:40.865231Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:40.865236Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:53:40.865242Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:40.865477Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:53:40.865499Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:53:40.865512Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:53:40.865519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:40.865527Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:40.865535Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:53:40.865541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:40.865577Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:40.865639Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:53:40.865656Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:53:40.865947Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:40.876238Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:40.876282Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:53:41.051291Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:53:41.052197Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:41.052246Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.052379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.052389Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:41.052400Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:41.052491Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:53:41.052530Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:41.052772Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.052791Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:53:41.053201Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:41.053334Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.053725Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:41.053734Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.053865Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:41.053872Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:41.053881Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.054115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.054126Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:41.054132Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:41.054151Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:41.054163Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:41.054175Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.054809Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:41.055206Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:41.055247Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:41.055254Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:41.056921Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:41.056959Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:41.056973Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T08:53:41.056977Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T08:53:41.078068Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:41.362317Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:41.362343Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.362380Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.362387Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:41.362395Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:41.362440Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T08:53:41.362465Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:41.362482Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:41.362635Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:41.373552Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T08:53:41.373578Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:53:41.373590Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.373594Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:41.373604Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:41.373628Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], ex ... 2024-11-21T08:53:46.826768Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:53:46.826850Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:46.827160Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:53:46.827171Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:46.827317Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:53:46.827324Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:53:46.827332Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:46.827514Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:46.827522Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:53:46.827526Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:53:46.827544Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:46.827552Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:53:46.827560Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:46.827727Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:46.827979Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:53:46.827998Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:53:46.828002Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:46.829696Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:46.829739Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:53:46.829753Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T08:53:46.829758Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T08:53:46.850802Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:47.145063Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:53:47.145095Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:47.145133Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:47.145143Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:53:47.145154Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T08:53:47.145215Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T08:53:47.145247Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:53:47.145285Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:53:47.145495Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:47.156813Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T08:53:47.156844Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T08:53:47.156860Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:47.156867Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:53:47.156880Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:47.156950Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:53:47.156965Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T08:53:47.156982Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:47.158363Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T08:53:47.158384Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:53:47.159893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:856:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:47.159913Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:866:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:47.159921Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:47.160822Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:53:47.161828Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:47.368931Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:53:47.369559Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:870:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:47.447741Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6ytwgqe8a0thx8d1tmcw19, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTcyYTEyYzgtYWRmNzc4YTMtOTA0MjhlYjAtNWYyOWJmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:47.449067Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:966:2757], serverId# [4:967:2758], sessionId# [0:0:0] 2024-11-21T08:53:47.449229Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:53:47.449970Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6ytwgqe8a0thx8d1tmcw19, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTcyYTEyYzgtYWRmNzc4YTMtOTA0MjhlYjAtNWYyOWJmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:47.450600Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6ytwgqe8a0thx8d1tmcw19, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTcyYTEyYzgtYWRmNzc4YTMtOTA0MjhlYjAtNWYyOWJmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:47.450742Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:47.451102Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732179227451061 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:47.461613Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:47.461666Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2024-11-21T08:53:47.461696Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:47.461724Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:47.462134Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2024-11-21T08:53:47.462149Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:47.474820Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6ytwt7bqfzpfppg3gs9svv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWUzOTQwMzktNGQ1ZDI5MTctNTkxNDA1MWMtYjdkYTgzNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:47.475025Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:53:47.475429Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732179227475387 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:53:47.485932Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:53:47.485996Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T08:53:47.486004Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:53:47.486666Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1001:2782], serverId# [4:1002:2783], sessionId# [0:0:0] 2024-11-21T08:53:47.487498Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1003:2784], serverId# [4:1004:2785], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:00.887879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:00.887906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.887911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:00.887916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:00.887933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:00.887939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:00.887947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.888028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:00.896335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:00.896354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:00.898548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:00.899361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:00.899391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:00.900520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:00.900697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:00.900781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.900833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:00.901706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.901970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.901979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.902017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:00.902023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.902030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:00.902044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.903112Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:00.918520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:00.918608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.918671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:00.918748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:00.918755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.919451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.919471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:00.919521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.919530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:00.919534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:00.919538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:00.919828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.919835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:00.919838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:00.920088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.920094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.920100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.920106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.920683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:00.921006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:00.921052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:00.921211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.921229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:00.921245Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.921295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:00.921301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.921331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.921342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:00.921668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.921674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.921713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.921717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:00.921794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.921800Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:00.921810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:00.921814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.921820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:00.921825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.921830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:00.921834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:00.921842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:00.921848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:00.921853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:00.922112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.922122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.922126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:00.922131Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:00.922135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.922145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 024-11-21T08:53:53.016384Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T08:53:53.016401Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T08:53:53.057540Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:318:2303]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:53:53.057576Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409546 2024-11-21T08:53:53.057618Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1 2024-11-21T08:53:53.057662Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2024-11-21T08:53:53.057782Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:318:2303], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 13 Memory: 123880 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 42 TableOwnerId: 72057594046678944 FollowerId: 0 2024-11-21T08:53:53.057790Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T08:53:53.057802Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0013 2024-11-21T08:53:53.057815Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T08:53:53.057821Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2024-11-21T08:53:53.078394Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:318:2303]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T08:53:53.078417Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T08:53:53.078445Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2024-11-21T08:53:53.078464Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:53.078470Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2024-11-21T08:53:53.078474Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2024-11-21T08:53:53.078477Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2024-11-21T08:53:53.088687Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:321:2304]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:53:53.088715Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409547 2024-11-21T08:53:53.088749Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409547, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T08:53:53.088788Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2024-11-21T08:53:53.088888Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:321:2304], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 14 Memory: 119000 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 42 TableOwnerId: 72057594046678944 FollowerId: 0 2024-11-21T08:53:53.088897Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T08:53:53.088910Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0014 2024-11-21T08:53:53.088920Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T08:53:53.099086Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:321:2304]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T08:53:53.099107Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T08:53:53.099130Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2024-11-21T08:53:53.099146Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:53.099152Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409547 2024-11-21T08:53:53.099156Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409547 has no attached operations 2024-11-21T08:53:53.099161Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409547 2024-11-21T08:53:53.130114Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:53:53.130141Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:53:53.130146Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T08:53:53.130168Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2024-11-21T08:53:53.130172Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2024-11-21T08:53:53.130205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2024-11-21T08:53:53.130223Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2024-11-21T08:53:53.130230Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2024-11-21T08:53:53.130252Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2024-11-21T08:53:53.130277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T08:53:53.130281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2024-11-21T08:53:53.130284Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T08:53:53.130288Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2024-11-21T08:53:53.130305Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:53:53.140501Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:53:53.140528Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:53:53.140534Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T08:53:53.161366Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1329:3253], Recipient [3:318:2303]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:53.161391Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:53.161402Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409546, clientId# [3:1328:3252], serverId# [3:1329:3253], sessionId# [0:0:0] 2024-11-21T08:53:53.161477Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1327:3251], Recipient [3:318:2303]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2024-11-21T08:53:53.161742Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1332:3256], Recipient [3:321:2304]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:53.161751Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:53.161758Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409547, clientId# [3:1331:3255], serverId# [3:1332:3256], sessionId# [0:0:0] 2024-11-21T08:53:53.161786Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1330:3254], Recipient [3:321:2304]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::WithFollowers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:54.576145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:54.576171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:54.576176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:54.576181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:54.576192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:54.576195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:54.576202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:54.576303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:54.585282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:54.585303Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:54.587491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:54.588059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:54.588086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:54.589021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:54.589158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:54.589250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:54.589312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:54.589933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:54.590168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:54.590176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:54.590205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:54.590210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:54.590215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:54.590224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:54.591015Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:54.601678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:54.601739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:54.601785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:54.601832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:54.601838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:54.602353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:54.602372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:54.602414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:54.602421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:54.602424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:54.602428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:54.602811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:54.602826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:54.602830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:54.603154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:54.603161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:54.603166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:54.603171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:54.603570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:54.603834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:54.603872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:54.603991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:54.604009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:54.604014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:54.604049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:54.604053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:54.604075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:54.604083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:54.604382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:54.604388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:54.604418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:54.604421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:54.604487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:54.604491Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:54.604500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:54.604502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:54.604506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:54.604510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:54.604513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:54.604516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:54.604523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:54.604527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:54.604530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:54.604748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:54.604758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:54.604761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:54.604764Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:54.604769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:54.604778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 04, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:54.820826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:54.820829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T08:53:54.820832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T08:53:54.820834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T08:53:54.820838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 4 2024-11-21T08:53:54.820909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T08:53:54.820913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:2 ProgressState at tablet: 72057594046678944 2024-11-21T08:53:54.820923Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T08:53:54.820926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:2, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:53:54.820929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:2 129 -> 240 2024-11-21T08:53:54.820965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:1, at schemeshard: 72057594046678944 2024-11-21T08:53:54.820968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:1 ProgressState 2024-11-21T08:53:54.820973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2024-11-21T08:53:54.820975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2024-11-21T08:53:54.820978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: false 2024-11-21T08:53:54.821012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:53:54.821025Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:53:54.821028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:53:54.821099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:53:54.821109Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T08:53:54.821111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:53:54.821296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:53:54.821347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:53:54.821357Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:53:54.821359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:53:54.821366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T08:53:54.821460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T08:53:54.821465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:54.821502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:53:54.821514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2024-11-21T08:53:54.821517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T08:53:54.821519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2024-11-21T08:53:54.821526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 104 2024-11-21T08:53:54.821529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T08:53:54.821532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:53:54.821535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:53:54.821546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:53:54.821549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2024-11-21T08:53:54.821551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2024-11-21T08:53:54.821554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:53:54.821556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2024-11-21T08:53:54.821558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2024-11-21T08:53:54.821561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:53:54.821820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:53:54.821992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:53:54.822147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:53:54.822153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:685:2646] TestWaitNotification: OK eventTxId 104 2024-11-21T08:53:54.822228Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/WithFollowers" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:53:54.822253Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/WithFollowers" took 30us result status StatusSuccess 2024-11-21T08:53:54.822321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/WithFollowers" PathDescription { Self { Name: "WithFollowers" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "WithFollowers" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "valueFloat" Type: "Float" TypeId: 33 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:55.165693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:55.165715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:55.165719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:55.165722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:55.165734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:55.165736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:55.165744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:55.165827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:55.175436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:55.175460Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:55.178016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:55.178714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:55.178745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:55.180004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:55.180249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:55.180358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:55.180432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:55.181279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:55.181557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:55.181565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:55.181592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:55.181597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:55.181600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:55.181612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:55.182588Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:55.194938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:55.195012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:55.195069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:55.195118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:55.195124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:55.195829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:55.195852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:55.195903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:55.195911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:55.195914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:55.195918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:55.196228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:55.196236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:55.196239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:55.196513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:55.196520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:55.196524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:55.196529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:55.196914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:55.197191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:55.197234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:55.197373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:55.197390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:55.197396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:55.197448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:55.197454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:55.197475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:55.197484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:55.197775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:55.197780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:55.197810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:55.197814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:55.197881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:55.197886Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:55.197896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:55.197898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:55.197902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:55.197906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:55.197909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:55.197911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:55.197919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:55.197923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:55.197925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:55.198129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:55.198138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:55.198140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:55.198143Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:55.198162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:55.198170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hed: true 2024-11-21T08:54:02.650043Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T08:54:02.650093Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:02.650106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:02.650111Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T08:54:02.650114Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T08:54:02.650389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.650396Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T08:54:02.650403Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T08:54:02.650405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T08:54:02.650408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T08:54:02.650416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:123:2149] message: TxId: 281474976710760 2024-11-21T08:54:02.650420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T08:54:02.650423Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T08:54:02.650425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T08:54:02.650432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T08:54:02.650687Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T08:54:02.650696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T08:54:02.650702Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T08:54:02.650710Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:54:02.650936Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T08:54:02.650946Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:54:02.650952Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2024-11-21T08:54:02.651171Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T08:54:02.651179Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:54:02.651181Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T08:54:02.651196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:54:02.651201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1234:3088] TestWaitNotification: OK eventTxId 102 2024-11-21T08:54:02.651442Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2024-11-21T08:54:02.651490Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } 2024-11-21T08:54:02.651601Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:02.651659Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 62us result status StatusSuccess 2024-11-21T08:54:02.651732Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:02.651837Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:54:02.651850Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 14us result status StatusPathDoesNotExist 2024-11-21T08:54:02.651871Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicaTest::Subscribe >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2024-11-21T08:53:50.229969Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653145359384970:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:50.230147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:50.233008Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653143856496828:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:50.233159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:50.264520Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0014cc/r3tmp/tmp1hUhmU/pdisk_1.dat 2024-11-21T08:53:50.270370Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:50.290572Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14129, node 1 2024-11-21T08:53:50.306044Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0014cc/r3tmp/yandexczbvlr.tmp 2024-11-21T08:53:50.306061Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0014cc/r3tmp/yandexczbvlr.tmp 2024-11-21T08:53:50.306134Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0014cc/r3tmp/yandexczbvlr.tmp 2024-11-21T08:53:50.306198Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:50.311036Z INFO: TTestServer started on Port 19583 GrpcPort 14129 TClient is connected to server localhost:19583 PQClient connected to localhost:14129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:53:50.330030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:50.330061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:50.331623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:50.367232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:50.367262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:50.372763Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:50.373095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:50.373186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:53:50.419919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:53:50.509237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653145359385937:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.509266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653145359385913:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.509286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.509238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653143856497187:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.509263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653143856497169:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.509271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.510405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2024-11-21T08:53:50.517507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653145359385950:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T08:53:50.517545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653143856497192:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T08:53:50.549668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:50.574534Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653143856497233:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:50.574659Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmQ2YjYzMmMtNGQ2YjJjMjItNDJlNjAwNjctYzJkNGI4N2U=, ActorId: [2:7439653143856497161:2280], ActorState: ExecuteState, TraceId: 01jd6ytzsc66mhs1jasq6aapkq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:50.575212Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:50.609233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T08:53:50.619338Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653145359386268:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:50.619417Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzExOTMwYmMtNmEwMDFkNmUtZTYzZjIwOGUtMzBmOTZiYjc=, ActorId: [1:7439653145359385906:2295], ActorState: ExecuteState, TraceId: 01jd6ytzscars2znh6jpd7ysdy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:50.619643Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:50.626430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:53:50.659469Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd6ytzxgdsxzw7x1557r1572, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y0ZTllNWYtN2E2MjRkNjQtNzk3NmM0NTQtNzRiYTQzYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653145359386488:3057] 2024-11-21T08:53:55.230226Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653145359384970:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:55.230263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:53:55.233269Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653143856496828:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:55.233292Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initia ... DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439653171129191474:2580], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T08:53:56.243319Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653171129191483:2586], TxId: 281474976720699, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDExOTQ0ODQtN2IzNjE2YjctYWI5ODg3NGYtNjU1YzdiNTk=. TraceId : 01jd6yv5c87evj0tekqnr0pz4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439653171129191473:2586], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T08:53:56.548191Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653170295909052:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:56.548346Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:56.548924Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653170555805201:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:56.548999Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0014cc/r3tmp/tmp5jgB6R/pdisk_1.dat 2024-11-21T08:53:56.551797Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:56.552617Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:56.561545Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9719, node 3 2024-11-21T08:53:56.568587Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0014cc/r3tmp/yandexQekuQo.tmp 2024-11-21T08:53:56.568601Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0014cc/r3tmp/yandexQekuQo.tmp 2024-11-21T08:53:56.568653Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0014cc/r3tmp/yandexQekuQo.tmp 2024-11-21T08:53:56.568692Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:56.571168Z INFO: TTestServer started on Port 21007 GrpcPort 9719 TClient is connected to server localhost:21007 PQClient connected to localhost:9719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:56.648603Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:56.648638Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:56.650055Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:56.650393Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:56.650407Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:56.650644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:56.651284Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T08:53:56.651505Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:53:56.656338Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:53:56.778802Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653170295910027:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:56.778820Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653170295910016:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:56.778834Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:56.779422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:53:56.782797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:56.782989Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653170295910030:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:53:56.785165Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7439653170555805573:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:56.785240Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTdmMTIxZi03Zjg2MjM5Ni1iMWRlMmVhNC1mMThkNjc5Ng==, ActorId: [4:7439653170555805547:2280], ActorState: ExecuteState, TraceId: 01jd6yv5xef8nsxp87ms3btadp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:56.785466Z node 4 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:56.841636Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:53:56.858197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:56.864006Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439653170295910445:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:56.864061Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTgxMDAzMzUtYTA3MTViNWUtZjI1ZDA1MS1hZDE2NzNhZQ==, ActorId: [3:7439653170295910013:2300], ActorState: ExecuteState, TraceId: 01jd6yv5xa53fvvycmy8b3znna, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:56.864247Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:53:56.881992Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yv6041gvde1kmk04v56hh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=M2UzZjgzNTctNTQ1ZjVkYTEtZmE4NTdiMDItMTcxNjdhZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [3:7439653170295910548:3027] 2024-11-21T08:54:01.548285Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439653170295909052:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:01.548312Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:54:01.549314Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439653170555805201:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:01.549349Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2024-11-21T08:54:01.990050Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653191770747550:3315] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2024-11-21T08:54:01.990061Z node 3 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [3:7439653191770747550:3315] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch [GOOD] Test command err: 2024-11-21T08:53:51.001305Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:51.001326Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.003896Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.004072Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:51.235173Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:51.235191Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.237753Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.237929Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:51.488654Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:51.488680Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.491474Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.491767Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:177:2192] 2024-11-21T08:53:51.491816Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2024-11-21T08:53:51.491831Z node 3 :PERSQUEUE INFO: new Cookie owner1|75ca6fd3-95e2b9e7-8a409c8d-44f150b0_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.777407Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2024-11-21T08:53:51.927003Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:51.927023Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:53:51.929494Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:51.929834Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 6 Wait batch completion Wait kv request Wait tx committed for tx 3 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } |88.8%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:02.847476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:02.847506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:02.847511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:02.847515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:02.847529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:02.847533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:02.847542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:02.847619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:02.858260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:02.858285Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:02.861468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:02.862229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:02.862265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:02.863425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:02.863578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:02.863674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:02.863732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:02.864507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:02.864761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:02.864769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:02.864803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:02.864809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:02.864813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:02.864823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.865909Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:02.880015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:02.880090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.880162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:02.880230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:02.880238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.880923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:02.880948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:02.881002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.881010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:02.881014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:02.881018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:02.881386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.881396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:02.881400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:02.881718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.881726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.881731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:02.881737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:02.882225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:02.882561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:02.882608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:02.882772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:02.882794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:02.882802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:02.882849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:02.882856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:02.882881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:02.882892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:02.883232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:02.883239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:02.883276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:02.883281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:02.883359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.883365Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:02.883375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:02.883378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:02.883383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:02.883388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:02.883392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:02.883396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:02.883405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:02.883410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:02.883413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:02.883657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:02.883669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:02.883672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:02.883676Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:02.883680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:02.883692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... to tablet: 72057594046316545 cookie: 0:107 msg type: 269090816 2024-11-21T08:54:03.823229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 107 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 107 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 107 at step: 5000004 2024-11-21T08:54:03.823329Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.823342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:03.823347Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#107:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T08:54:03.823356Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 136 2024-11-21T08:54:03.824068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.824080Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T08:54:03.824085Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, no renaming has been detected for this operation 2024-11-21T08:54:03.824088Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 136 -> 137 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T08:54:03.824288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 130 } } 2024-11-21T08:54:03.824295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2024-11-21T08:54:03.824305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 130 } } 2024-11-21T08:54:03.824313Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 130 } } FAKE_COORDINATOR: Erasing txId 107 2024-11-21T08:54:03.824612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936897 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2024-11-21T08:54:03.824627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2024-11-21T08:54:03.824643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936897 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2024-11-21T08:54:03.824647Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T08:54:03.824740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.824745Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T08:54:03.824750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:54:03.824753Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2024-11-21T08:54:03.824760Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T08:54:03.824779Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2024-11-21T08:54:03.824794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.824815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:54:03.825137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.825301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.825327Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.825330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.825356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:54:03.825372Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.825375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 107, path id: 1 2024-11-21T08:54:03.825378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 107, path id: 2 2024-11-21T08:54:03.825454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.825459Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:54:03.825469Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.825472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:54:03.825475Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2024-11-21T08:54:03.825574Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:54:03.825582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:54:03.825584Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T08:54:03.825587Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:54:03.825590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:03.825860Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:54:03.825879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:54:03.825882Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T08:54:03.825888Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:54:03.825892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:54:03.825903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2024-11-21T08:54:03.826258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.826266Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:03.826314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:54:03.826332Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T08:54:03.826335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T08:54:03.826338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T08:54:03.826349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:383:2348] message: TxId: 107 2024-11-21T08:54:03.826354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T08:54:03.826358Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T08:54:03.826362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T08:54:03.826378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:54:03.826527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T08:54:03.826861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T08:54:03.826934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T08:54:03.826940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:571:2534] TestWaitNotification: OK eventTxId 107 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] Test command err: 2024-11-21T08:53:49.240828Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:53:49.241688Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:53:49.241748Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T08:53:49.241758Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:53:49.241761Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T08:53:49.241765Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:53:49.241770Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:49.241775Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T08:53:49.241778Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:49.244254Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:49.244269Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T08:53:49.244281Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:53:49.245710Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:49.246405Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T08:53:49.246421Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:49.246747Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:49.246773Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:49.246781Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitConfigStep 2024-11-21T08:53:49.246837Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T08:53:49.246894Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T08:53:49.247011Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T08:53:49.247016Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2024-11-21T08:53:49.247022Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:53:49.247113Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T08:53:49.247118Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T08:53:49.247144Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:49.247162Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:49.247215Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T08:53:49.247231Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2024-11-21T08:53:49.247312Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Completed. 2024-11-21T08:53:49.247315Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2024-11-21T08:53:49.247319Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:53:49.247379Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2024-11-21T08:53:49.247384Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2024-11-21T08:53:49.247397Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:49.247413Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:49.247451Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:53:49.247499Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:49.247945Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:49.247964Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:49.248008Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:49.248013Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2024-11-21T08:53:49.248285Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:49.248293Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2024-11-21T08:53:49.248426Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2024-11-21T08:53:49.248431Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T08:53:49.248444Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T08:53:49.248448Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T08:53:49.248454Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T08:53:49.248480Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T08:53:49.248494Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:49.249003Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:53:49.249011Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T08:53:49.249014Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T08:53:49.249459Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T08:53:49.249468Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2024-11-21T08:53:49.249471Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2024-11-21T08:53:49.249474Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2024-11-21T08:53:49.249493Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T08:53:49.249505Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_P ... RSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T08:53:50.226578Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T08:53:50.226605Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T08:53:50.226618Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:50.230836Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:50.231266Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:50.231770Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] has a tx info 2024-11-21T08:53:50.231782Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890, ExecStep 0, ExecTxId 0 2024-11-21T08:53:50.231803Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2024-11-21T08:53:50.231830Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Load tx TxId: 67890 State: PLANNED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T08:53:50.231839Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=1, PlannedTxs.size=1 2024-11-21T08:53:50.231843Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] top tx queue (100, 67890) 2024-11-21T08:53:50.231903Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:50.231906Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:50.231926Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T08:53:50.231967Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T08:53:50.231997Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:308:2294] 2024-11-21T08:53:50.232106Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:50.232274Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitMetaStep 2024-11-21T08:53:50.232303Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInfoRangeStep 2024-11-21T08:53:50.232371Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDataRangeStep 2024-11-21T08:53:50.232390Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDataStep 2024-11-21T08:53:50.232394Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T08:53:50.232398Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [5:308:2294] 2024-11-21T08:53:50.232402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:53:50.232419Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 5 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:50.232445Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2024-11-21T08:53:50.232449Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2024-11-21T08:53:50.232451Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2024-11-21T08:53:50.232456Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2024-11-21T08:53:50.232464Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2024-11-21T08:53:50.232496Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:53:50.232511Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2024-11-21T08:53:50.232514Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2024-11-21T08:53:50.232517Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2024-11-21T08:53:50.232519Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T08:53:50.232521Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T08:53:50.232524Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T08:53:50.232545Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T08:53:50.232554Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:50.233204Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:53:50.233214Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2024-11-21T08:53:50.233217Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2024-11-21T08:53:50.233223Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2024-11-21T08:53:50.233226Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2024-11-21T08:53:50.233241Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:50.233529Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2024-11-21T08:53:50.233537Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:50.233849Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:50.233855Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:334:2313], now have 1 active actors on pipe 2024-11-21T08:53:50.233873Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2024-11-21T08:53:50.233880Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2024-11-21T08:53:50.233884Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2024-11-21T08:53:50.233887Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2024-11-21T08:53:50.233889Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2024-11-21T08:53:50.233893Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2024-11-21T08:53:50.233896Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2024-11-21T08:53:50.233901Z node 5 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2024-11-21T08:53:50.233907Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T08:53:50.233913Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2024-11-21T08:53:50.233935Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:50.234284Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:50.234301Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2024-11-21T08:53:50.234306Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T08:53:50.234310Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T08:53:50.234315Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2024-11-21T08:53:50.234320Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2024-11-21T08:53:50.234324Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2024-11-21T08:53:50.234352Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T08:53:50.234362Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:50.234921Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:53:50.234930Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2024-11-21T08:53:50.234933Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T08:53:50.234941Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2024-11-21T08:53:50.234946Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2024-11-21T08:53:50.234951Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2024-11-21T08:53:50.234955Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T08:53:50.234958Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:00.785109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:00.785130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.785133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:00.785136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:00.785150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:00.785152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:00.785158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.785213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:00.792940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:00.792966Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:00.795244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:00.795736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:00.795758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:00.797617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:00.797900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:00.798006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.798080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:00.799248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.799603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.799616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.799659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:00.799667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.799673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:00.799690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.801242Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:00.818480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:00.818576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.818643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:00.818717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:00.818725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.819613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.819638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:00.819697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.819707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:00.819712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:00.819717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:00.820136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.820146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:00.820151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:00.820540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.820552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.820558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.820565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.821221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:00.821627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:00.821679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:00.821861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.821884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:00.821905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.821962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:00.821969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.822001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.822014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:00.822444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.822452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.822499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.822504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:00.822592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.822599Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:00.822612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:00.822616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.822623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:00.822628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.822633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:00.822638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:00.822649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:00.822655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:00.822659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:00.822967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.822979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.822984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:00.822988Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:00.822993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.823008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1-21T08:53:57.529383Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:353:2332], Recipient [3:353:2332]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:53:57.529386Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:53:57.549775Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:353:2332]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2024-11-21T08:53:57.549803Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2024-11-21T08:53:57.549823Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:353:2332]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2024-11-21T08:53:57.549826Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2024-11-21T08:53:57.549830Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2024-11-21T08:53:57.549848Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2024-11-21T08:53:57.549862Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2024-11-21T08:53:57.641954Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:760:2650]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:53:57.641990Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409552 2024-11-21T08:53:57.642022Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409552, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T08:53:57.642059Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2024-11-21T08:53:57.642153Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:760:2650], Recipient [3:884:2747]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 11 Memory: 119000 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 120 TableOwnerId: 72075186233409549 FollowerId: 0 2024-11-21T08:53:57.642162Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T08:53:57.642174Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0011 2024-11-21T08:53:57.642184Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T08:53:57.642191Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2024-11-21T08:53:57.652313Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:760:2650]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T08:53:57.652339Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T08:53:57.652360Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2024-11-21T08:53:57.652377Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:57.652384Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409552 2024-11-21T08:53:57.652389Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409552 has no attached operations 2024-11-21T08:53:57.652392Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409552 2024-11-21T08:53:57.662571Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:763:2651]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T08:53:57.662597Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T08:53:57.662618Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2024-11-21T08:53:57.662634Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:53:57.662643Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409553 2024-11-21T08:53:57.662648Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409553 has no attached operations 2024-11-21T08:53:57.662650Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409553 2024-11-21T08:53:57.662679Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:763:2651]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:53:57.662686Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409553 2024-11-21T08:53:57.662699Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409553, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T08:53:57.662714Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2024-11-21T08:53:57.662789Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:763:2651], Recipient [3:884:2747]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 7 Memory: 119000 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 120 TableOwnerId: 72075186233409549 FollowerId: 0 2024-11-21T08:53:57.662795Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T08:53:57.662808Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0007 2024-11-21T08:53:57.662818Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T08:53:57.673274Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:884:2747]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:53:57.673302Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:53:57.673315Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:884:2747], Recipient [3:884:2747]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:53:57.673320Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:53:57.683464Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:884:2747]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2024-11-21T08:53:57.683486Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2024-11-21T08:53:57.683549Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:884:2747]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2024-11-21T08:53:57.683556Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2024-11-21T08:53:57.683560Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2024-11-21T08:53:57.683576Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2024-11-21T08:53:57.683589Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2024-11-21T08:53:57.683613Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269746180, Sender [3:2045:3869], Recipient [3:884:2747]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2024-11-21T08:53:57.683616Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2024-11-21T08:53:57.703985Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2048:3872], Recipient [3:760:2650]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:57.704006Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:57.704013Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409552, clientId# [3:2047:3871], serverId# [3:2048:3872], sessionId# [0:0:0] 2024-11-21T08:53:57.704045Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2046:3870], Recipient [3:760:2650]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2024-11-21T08:53:57.704115Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2051:3875], Recipient [3:763:2651]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:57.704118Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:57.704121Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409553, clientId# [3:2050:3874], serverId# [3:2051:3875], sessionId# [0:0:0] 2024-11-21T08:53:57.704129Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2049:3873], Recipient [3:763:2651]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::AfterRestart_2 [GOOD] Test command err: 2024-11-21T08:53:51.230243Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653150019278360:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:51.230403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:51.232533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653148235396219:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:51.232691Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001474/r3tmp/tmpkWqmGL/pdisk_1.dat 2024-11-21T08:53:51.254410Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:51.256386Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:51.271652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15340, node 1 2024-11-21T08:53:51.282555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/001474/r3tmp/yandexEKB0xQ.tmp 2024-11-21T08:53:51.282565Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/001474/r3tmp/yandexEKB0xQ.tmp 2024-11-21T08:53:51.282620Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/001474/r3tmp/yandexEKB0xQ.tmp 2024-11-21T08:53:51.282650Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:51.285755Z INFO: TTestServer started on Port 62223 GrpcPort 15340 TClient is connected to server localhost:62223 PQClient connected to localhost:15340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:51.330561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:51.330584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:51.332088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:51.351198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:51.351223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:51.351911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:51.352128Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:51.352345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:53:51.364312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:53:51.480510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653148235396575:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:51.480545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653148235396550:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:51.480554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:51.480837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653150019279349:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:51.480855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653150019279341:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:51.480883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:51.481355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2024-11-21T08:53:51.483802Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720662, at schemeshard: 72057594046644480 2024-11-21T08:53:51.484201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653150019279355:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T08:53:51.484272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653148235396579:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T08:53:51.510132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:51.567811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2024-11-21T08:53:51.570859Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653150019279581:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:51.570930Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTdkYjc1ZDEtMjVkMTA1Y2QtNGNlNmZmNDctODkyNDJjN2Y=, ActorId: [1:7439653150019279338:2300], ActorState: ExecuteState, TraceId: 01jd6yv0qr81skjp78nkx5hry0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:51.571388Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:51.573541Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653148235396623:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:51.573611Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjcwOGJjMWMtZTEwZTZlZjktZmFhY2M2YWYtMmY1MTEzMzc=, ActorId: [2:7439653148235396548:2280], ActorState: ExecuteState, TraceId: 01jd6yv0qqc5wfwftbxzk9sn92, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:51.573800Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:51.631681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:53:51.659708Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd6yv0wx1x72s0fkb66nca0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTkwMzVlMmMtYmEwY2E5MTYtZWJjMjU2NzItYjMxYTk2M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653150019279885:3056] 2024-11-21T08:53:56.230767Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653150019278360:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:56.230797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:53:56.232824Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653148235396219:2053];send_to=[0:7307199536658146131:7762515]; ... 8353:2049], Recipient [1:7439653175789084826:3759]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2024-11-21T08:53:57.004246Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439653175789084826:3759] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:53:57.004673Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [1:7439653150019278577:2256], Recipient [1:7439653175789084826:3759]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=1&id=ZmJiY2EyZDgtYTE1ZjRiZjQtMTQ1NWI5MDAtNDM5OWM3YmI=" NodeId: 1 } YdbStatus: SUCCESS ResourceExhausted: false 2024-11-21T08:53:57.004685Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439653175789084826:3759] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:53:57.015844Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [1:7439653150019278577:2256], Recipient [1:7439653175789084826:3759]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=1&id=ZmJiY2EyZDgtYTE1ZjRiZjQtMTQ1NWI5MDAtNDM5OWM3YmI=" PreparedQuery: "76c5a405-41ef01fc-e710ed57-56c87e7" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jd6yv64p5gpe9gn9x6x6ks0g" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1732179236989 } items { uint64_value: 1732179236989 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 6 2024-11-21T08:53:57.015874Z node 1 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [1:7439653175789084826:3759] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2024-11-21T08:53:57.015878Z node 1 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [1:7439653175789084826:3759] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2024-11-21T08:53:57.015899Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [1:7439653175789084848:3759], Recipient [1:7439653171494116889:3352]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T08:53:57.015906Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [1:7439653175789084826:3759], Recipient [1:7439653171494116889:3352]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2024-11-21T08:53:57.015920Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [1:7439653171494116889:3352], Recipient [1:7439653175789084826:3759]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T08:53:57.015928Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439653175789084826:3759] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2024-11-21T08:53:57.015958Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [1:7439653175789084826:3759], Recipient [1:7439653171494116889:3352]: NActors::TEvents::TEvPoison Received TEvChooseResult: 1 2024-11-21T08:53:57.025499Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [1:7439653150019278577:2256], Recipient [1:7439653175789084826:3759]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=1&id=ZmJiY2EyZDgtYTE1ZjRiZjQtMTQ1NWI5MDAtNDM5OWM3YmI=" PreparedQuery: "ad3abea9-3c505549-24a5d967-8c43b86f" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 5 2024-11-21T08:53:57.025512Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439653175789084826:3759] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T08:53:57.025521Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439653175789084826:3759] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2024-11-21T08:53:57.025527Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439653175789084826:3759] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2024-11-21T08:53:57.037566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720700. Ctx: { TraceId: 01jd6yv653cz5as2tcx59q8pe0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRlNGZmODEtYjA5YzUzMGItOTg1MDkxM2EtNzY1ZTgzYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:57.242059Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653175789084926:2604] TxId: 281474976720701. Ctx: { TraceId: 01jd6yv6bhbrj73sq01xfw22kj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjgwZWQzODYtMzZkMzI3ODctMjFhOTJmNDEtNjg0ZGUyMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2024-11-21T08:53:57.242060Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653175789084927:2599] TxId: 281474976720702. Ctx: { TraceId: 01jd6yv6bf33k82hz1bs2k5xs5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWEyZDk4YmItYmJhNzJjYzctNTg5YTQyNzYtNmY3N2QzYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2024-11-21T08:53:57.242925Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653175789084938:2599], TxId: 281474976720702, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZWEyZDk4YmItYmJhNzJjYzctNTg5YTQyNzYtNmY3N2QzYTY=. TraceId : 01jd6yv6bf33k82hz1bs2k5xs5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439653175789084927:2599], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T08:53:57.242931Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653175789084937:2604], TxId: 281474976720701, task: 2. Ctx: { TraceId : 01jd6yv6bhbrj73sq01xfw22kj. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjgwZWQzODYtMzZkMzI3ODctMjFhOTJmNDEtNjg0ZGUyMDQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439653175789084926:2604], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T08:53:57.542673Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:57.542691Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:57.545763Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:177:2192] 2024-11-21T08:53:57.546012Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T08:53:57.780560Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:57.780579Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:57.782834Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [4:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:57.783048Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [4:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } 2024-11-21T08:53:58.017679Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:58.017697Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:58.019807Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [5:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:58.020030Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [5:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 2231396604200652162 2024-11-21T08:53:46.969067Z 1 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:2507136:2] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T08:53:46.969230Z 5 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:2507136:6] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T08:53:46.969244Z 4 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:2507136:5] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T08:53:46.969253Z 2 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:2507136:3] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T08:53:46.969268Z 8 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:2507136:1] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T08:53:46.969280Z 3 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:2507136:4] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2024-11-21T08:53:47.148913Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.149519Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.150367Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.151582Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.151608Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.155028Z 1 00h02m38.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.157910Z 1 00h02m38.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.167919Z 1 00h02m38.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.170935Z 1 00h02m38.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.176514Z 1 00h02m38.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.179848Z 1 00h02m38.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.196118Z 1 00h02m39.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.196176Z 1 00h02m39.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.198688Z 1 00h02m39.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.218207Z 1 00h02m39.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.223387Z 1 00h02m39.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.228764Z 1 00h02m39.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.240586Z 1 00h02m39.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.251451Z 1 00h02m40.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.255084Z 1 00h02m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.255132Z 1 00h02m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.258316Z 1 00h02m40.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.273822Z 1 00h02m40.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.281192Z 1 00h02m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.284888Z 1 00h02m40.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.288360Z 1 00h02m40.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.291858Z 1 00h02m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.295301Z 1 00h02m40.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.309316Z 1 00h02m40.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.315020Z 1 00h02m41.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.318258Z 1 00h02m41.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.318315Z 1 00h02m41.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.324162Z 1 00h02m41.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.326838Z 1 00h02m41.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.351343Z 1 00h02m41.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.355526Z 1 00h02m41.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.361558Z 1 00h02m41.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.367553Z 1 00h02m42.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.376830Z 1 00h02m42.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.376890Z 1 00h02m42.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.384470Z 1 00h02m42.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.388610Z 1 00h02m42.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.393951Z 1 00h02m42.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.401162Z 1 00h02m42.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.412201Z 1 00h02m42.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.415928Z 1 00h02m43.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.418862Z 1 00h02m43.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.418903Z 1 00h02m43.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.432560Z 1 00h02m43.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.435418Z 1 00h02m43.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.442539Z 1 00h02m43.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.449097Z 1 00h02m43.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.455713Z 1 00h02m43.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.459375Z 1 00h02m43.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.462414Z 1 00h02m43.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.469573Z 1 00h02m43.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.475929Z 1 00h02m44.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.480090Z 1 00h02m44.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.489761Z 1 00h02m44.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.494556Z 1 00h02m44.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.504474Z 1 00h02m44.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.507846Z 1 00h02m44.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.517238Z 1 00h02m44.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.520042Z 1 00h02m44.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.529146Z 1 00h02m45.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.534879Z 1 00h02m45.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.537600Z 1 00h02m45.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.541695Z 1 00h02m45.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.543421Z 1 00h02m45.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.550848Z 1 00h02m45.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.558882Z 1 00h02m45.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.564958Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.565000Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.565009Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T08:53:47.565915Z 5 00h02m46.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:3:11:0:11:1019658:3] barrier# {Soft# {Gen# 3 Step# 8} Hard# {Gen# 3 Step# 4294967295}} Setting VDisk read-only to 1 for ... ailable in read-only Sender# [1:5294:704] 2024-11-21T08:53:48.428892Z 3 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5301:711] 2024-11-21T08:53:48.428898Z 4 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5308:718] 2024-11-21T08:53:48.428904Z 5 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5315:725] 2024-11-21T08:53:48.428911Z 6 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5322:732] 2024-11-21T08:53:48.428917Z 7 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5329:739] 2024-11-21T08:53:48.428922Z 8 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.428980Z 1 00h10m24.561024s :BS_LOAD_TEST ERROR: TabletId# 1 Generation# 4 recieved not OK, msg# TEvBlockResult {Status# ERROR ErrorReason# "Status# ERROR From# [82000000:1:0:2:0] NodeId# 3 QuorumTracker# {Erroneous# 00000111 Successful# 00000000}"} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2024-11-21T08:53:48.782588Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.783427Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.788170Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.790816Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.790898Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.795887Z 8 00h20m55.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.801859Z 8 00h20m55.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.817398Z 8 00h20m55.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.829717Z 8 00h20m55.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.838621Z 8 00h20m55.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.843004Z 8 00h20m55.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.862264Z 8 00h20m56.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.862340Z 8 00h20m56.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.866695Z 8 00h20m56.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.874194Z 8 00h20m56.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.878421Z 8 00h20m56.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.887058Z 8 00h20m56.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.891095Z 8 00h20m56.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.898901Z 8 00h20m56.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.914853Z 8 00h20m57.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.915158Z 8 00h20m57.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.920366Z 8 00h20m57.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.927595Z 8 00h20m57.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.932076Z 8 00h20m57.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.941768Z 8 00h20m57.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.951219Z 8 00h20m57.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.958079Z 8 00h20m57.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.962633Z 8 00h20m57.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.975665Z 8 00h20m57.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.985570Z 8 00h20m58.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:48.985610Z 8 00h20m58.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.016241Z 8 00h20m58.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.019264Z 8 00h20m58.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.026795Z 8 00h20m58.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.029844Z 8 00h20m58.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.046665Z 8 00h20m58.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.050047Z 8 00h20m58.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.059296Z 8 00h20m59.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.059368Z 8 00h20m59.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.079376Z 8 00h20m59.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.082142Z 8 00h20m59.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.093170Z 8 00h20m59.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.101155Z 8 00h20m59.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.110534Z 8 00h20m59.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.116846Z 8 00h20m59.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.122177Z 8 00h21m00.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.122206Z 8 00h21m00.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.133110Z 8 00h21m00.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.135950Z 8 00h21m00.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.154312Z 8 00h21m00.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.158806Z 8 00h21m00.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.167825Z 8 00h21m00.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.172446Z 8 00h21m00.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.190428Z 8 00h21m01.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.190459Z 8 00h21m01.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.194783Z 8 00h21m01.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.197309Z 8 00h21m01.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.199992Z 8 00h21m01.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.209272Z 8 00h21m01.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.221533Z 8 00h21m01.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.239268Z 8 00h21m01.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.242659Z 8 00h21m01.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.249550Z 8 00h21m02.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.252885Z 8 00h21m02.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.256088Z 8 00h21m02.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.261148Z 8 00h21m02.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.267608Z 8 00h21m02.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.273364Z 8 00h21m02.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.275605Z 8 00h21m02.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.294581Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.294648Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.294758Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T08:53:49.296986Z 5 00h21m03.052048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:1401444:2] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:57.547523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:57.547540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:57.547548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:57.547553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:57.547563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:57.547566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:57.547572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:57.547634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:57.554994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:57.555009Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:57.556948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:57.557533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:57.557555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:57.558465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:57.558593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:57.558671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:57.558719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:57.559361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:57.559578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:57.559586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:57.559611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:57.559616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:57.559620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:57.559629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.560563Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:57.571890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:57.571943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.571983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:57.572023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:57.572027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.572588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:57.572610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:57.572655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.572664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:57.572670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:57.572675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:57.573072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.573082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:57.573087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:57.573475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.573487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.573494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:57.573501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:57.574068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:57.574351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:57.574387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:57.574501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:57.574517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:57.574521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:57.574563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:57.574567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:57.574587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:57.574595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:57.574876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:57.574881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:57.574909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:57.574912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:57.574972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:57.574976Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:57.574986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:57.574989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:57.574993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:57.574996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:57.574999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:57.575002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:57.575008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:57.575012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:57.575015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:57.575223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:57.575231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:57.575234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:57.575237Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:57.575240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:57.575248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46744073709551615 PrepareArriveTime: 152000 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 29 } } 2024-11-21T08:53:58.034563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2024-11-21T08:53:58.034583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 152000 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 29 } } 2024-11-21T08:53:58.034593Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#109:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 152000 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 29 } } 2024-11-21T08:53:58.034598Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T08:53:58.034621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 109:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:53:58.034625Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 3 -> 128 2024-11-21T08:53:58.034953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T08:53:58.034983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T08:53:58.034987Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#109:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:58.034996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2024-11-21T08:53:58.035032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:58.035237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2024-11-21T08:53:58.035261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2024-11-21T08:53:58.035355Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:58.035370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:58.035375Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2024-11-21T08:53:58.035416Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 128 -> 129 2024-11-21T08:53:58.035435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:53:58.036023Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:58.036031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:53:58.036078Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:58.036081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 109, path id: 4 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2024-11-21T08:53:58.036183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T08:53:58.036189Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:53:58.036378Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2024-11-21T08:53:58.036388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2024-11-21T08:53:58.036391Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2024-11-21T08:53:58.036394Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T08:53:58.036398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:53:58.036407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 109 2024-11-21T08:53:58.037135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2024-11-21T08:53:58.037315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 161 } } 2024-11-21T08:53:58.037322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2024-11-21T08:53:58.037336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 161 } } 2024-11-21T08:53:58.037347Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 161 } } 2024-11-21T08:53:58.037433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 662 RawX2: 8589937212 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2024-11-21T08:53:58.037436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2024-11-21T08:53:58.037444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 662 RawX2: 8589937212 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2024-11-21T08:53:58.037447Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:53:58.037452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 662 RawX2: 8589937212 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2024-11-21T08:53:58.037460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:58.037462Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T08:53:58.037465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:53:58.037468Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 129 -> 240 2024-11-21T08:53:58.037794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T08:53:58.037837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T08:53:58.037862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T08:53:58.037867Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 109:0 ProgressState 2024-11-21T08:53:58.037874Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2024-11-21T08:53:58.037877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2024-11-21T08:53:58.037880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2024-11-21T08:53:58.037887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:339:2314] message: TxId: 109 2024-11-21T08:53:58.037891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2024-11-21T08:53:58.037895Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2024-11-21T08:53:58.037897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 109:0 2024-11-21T08:53:58.037909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:53:58.038112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2024-11-21T08:53:58.038118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:765:2714] TestWaitNotification: OK eventTxId 109 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T08:52:48.232481Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732179168232462 2024-11-21T08:52:48.358480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652878578065476:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:48.358504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:48.361747Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652881362495179:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:48.361764Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004308/r3tmp/tmpnzK1fy/pdisk_1.dat 2024-11-21T08:52:48.384560Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:48.385756Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:48.406987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12878, node 1 2024-11-21T08:52:48.421439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004308/r3tmp/yandexCUDLYC.tmp 2024-11-21T08:52:48.421456Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004308/r3tmp/yandexCUDLYC.tmp 2024-11-21T08:52:48.421517Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004308/r3tmp/yandexCUDLYC.tmp 2024-11-21T08:52:48.421564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:48.424879Z INFO: TTestServer started on Port 27185 GrpcPort 12878 TClient is connected to server localhost:27185 PQClient connected to localhost:12878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:48.458973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:48.459014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:48.460590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:48.484098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:48.484130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:48.485076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:48.485251Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:48.485549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:52:48.699543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652878578066373:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:48.699589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:48.699682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652878578066385:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:48.700533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T08:52:48.705995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652878578066387:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T08:52:48.734505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:48.734678Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652881362495490:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:48.734765Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGY2ZmVkMTEtYzYwNzFiZTgtMzdjMDM3ZmItZDgyN2VkZjM=, ActorId: [2:7439652881362495447:2277], ActorState: ExecuteState, TraceId: 01jd6ys3dyc8xw1a6x0j446ksv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:48.735199Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:48.789219Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652878578066556:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:48.789333Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWVmZTRmMy1kYzI4ZGMyNy00YTU4MmQyNS1kMWMzMDgwOA==, ActorId: [1:7439652878578066355:2299], ActorState: ExecuteState, TraceId: 01jd6ys3dr1rcj10rm27wa3cmj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:48.789701Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:48.802275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T08:52:48.873864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:12878", true, true, 1000); 2024-11-21T08:52:48.954697Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd6ys3nb90phyth0k8bx2fna, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNhYWU1NzMtODRmOTM3NmYtOGYwZDI3NWQtMWQxY2VkNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439652878578066864:2930] 2024-11-21T08:52:53.358976Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652878578065476:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:53.359018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:52:53.362048Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652881362495179:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:53.362077Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:52:54.987212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720681:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:12878 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:52:55.000369Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:12878 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" ... d [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T08:54:00.350504Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:54:00.350520Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T08:54:00.350654Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T08:54:00.350680Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:40248 2024-11-21T08:54:00.350687Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:40248 proto=v1 topic=test-topic durationSec=0 2024-11-21T08:54:00.350690Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:54:00.351012Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T08:54:00.351041Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T08:54:00.351043Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:54:00.351044Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:54:00.351048Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439653188542638990:2546] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:54:00.351412Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439653188542638990:2546] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:54:00.511503Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720702. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T08:54:00.511549Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439653188542639003:2548] TxId: 281474976720702. Ctx: { TraceId: 01jd6yv9czd0qaknyc4gy7gv5z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NDFmMWFmZjMtNmUwMWI3N2YtODkwNmE2NjktNjcyMmRjMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T08:54:00.511648Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=NDFmMWFmZjMtNmUwMWI3N2YtODkwNmE2NjktNjcyMmRjMjA=, ActorId: [15:7439653188542638991:2548], ActorState: ExecuteState, TraceId: 01jd6yv9czd0qaknyc4gy7gv5z, Create QueryResponse for error on request, msg: 2024-11-21T08:54:00.511922Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7439653188542638990:2546] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NDFmMWFmZjMtNmUwMWI3N2YtODkwNmE2NjktNjcyMmRjMjA=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yv9czd0qaknyc4kk7046s" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T08:54:00.511952Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NDFmMWFmZjMtNmUwMWI3N2YtODkwNmE2NjktNjcyMmRjMjA=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yv9czd0qaknyc4kk7046s" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2024-11-21T08:54:00.512115Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2024-11-21T08:54:00.512317Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NDFmMWFmZjMtNmUwMWI3N2YtODkwNmE2NjktNjcyMmRjMjA=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yv9czd0qaknyc4kk7046s" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T08:54:00.512327Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Write session will restart in 2.000000s 2024-11-21T08:54:00.512348Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Write session: Do CDS request 2024-11-21T08:54:00.512353Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Do schedule cds request after 2000 ms 2024-11-21T08:54:00.637471Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715682. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:54:00.637510Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439653189812494743:2465] TxId: 281474976715682. Ctx: { TraceId: 01jd6yv9g810ydwt04mk2bg5p3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ODYzZTE0NWItYjQ5NmNhZjEtY2Q3NzIyMTItMjJmY2E4ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:54:00.637608Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=ODYzZTE0NWItYjQ5NmNhZjEtY2Q3NzIyMTItMjJmY2E4ODI=, ActorId: [16:7439653189812494730:2465], ActorState: ExecuteState, TraceId: 01jd6yv9g810ydwt04mk2bg5p3, Create QueryResponse for error on request, msg: 2024-11-21T08:54:00.637833Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yv9h0bct54ny8myyephq5" } } YdbStatus: UNAVAILABLE ConsumedRu: 15 } 2024-11-21T08:54:00.889382Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720704. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:54:00.889433Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439653188542639072:2552] TxId: 281474976720704. Ctx: { TraceId: 01jd6yv9r4411krtd901fzbecs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZGYxOWM1NS02NDE3MjFhMy04NWY2MjgzNy04ZTNiNTBhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:54:00.889527Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=ZGYxOWM1NS02NDE3MjFhMy04NWY2MjgzNy04ZTNiNTBhZg==, ActorId: [15:7439653188542639059:2552], ActorState: ExecuteState, TraceId: 01jd6yv9r4411krtd901fzbecs, Create QueryResponse for error on request, msg: 2024-11-21T08:54:00.889820Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yv9rt8ngknbtjenkbjmde" } } YdbStatus: UNAVAILABLE ConsumedRu: 14 } 2024-11-21T08:54:01.350756Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Write session: close. Timeout = 0 ms 2024-11-21T08:54:01.350769Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Write session will now close 2024-11-21T08:54:01.350776Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Write session: aborting 2024-11-21T08:54:01.350924Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T08:54:01.350928Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|41d97550-b4f3384a-621cdf4c-2c6b4155_0] Write session: destroy 2024-11-21T08:54:01.413801Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715684. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:54:01.413862Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439653194107462126:2474] TxId: 281474976715684. Ctx: { TraceId: 01jd6yva96043qehptewgjepyx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=N2NhZWI3ZmUtMTZkZTY0YjUtNWM5ZDMxM2EtZDgwMmNjNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:54:01.413998Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=N2NhZWI3ZmUtMTZkZTY0YjUtNWM5ZDMxM2EtZDgwMmNjNDE=, ActorId: [16:7439653194107462123:2474], ActorState: ExecuteState, TraceId: 01jd6yva96043qehptewgjepyx, Create QueryResponse for error on request, msg: 2024-11-21T08:54:01.414376Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yva96043qehpteyn4hkpv" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2024-11-21T08:54:01.510868Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720706. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:54:01.510905Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439653192837606460:2559] TxId: 281474976720706. Ctx: { TraceId: 01jd6yvac3130qmnna31kdms4a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZWMzZTI5MS01OTJlZTYyMC1jZWRiYjY5NS1hYWU1OGJkYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:54:01.510993Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=ZWMzZTI5MS01OTJlZTYyMC1jZWRiYjY5NS1hYWU1OGJkYg==, ActorId: [15:7439653192837606457:2559], ActorState: ExecuteState, TraceId: 01jd6yvac3130qmnna31kdms4a, Create QueryResponse for error on request, msg: 2024-11-21T08:54:01.511232Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yvac3130qmnna34ze6z0e" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::CollectScanQueries [GOOD] Test command err: 2024-11-21T08:50:49.464293Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652368422647928:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:49.464374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00358f/r3tmp/tmp1r5duS/pdisk_1.dat 2024-11-21T08:50:49.568396Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:49.569741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:49.569763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:49.577902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5155, node 1 2024-11-21T08:50:49.638960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:50:49.638974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:50:49.638976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:50:49.639015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:50:49.708864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.721481Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:50:49.932710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:50:49.951085Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652369344518628:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:50:49.961980Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/PQ/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2024-11-21T08:50:49.978064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:49.978084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:49.982558Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T08:50:49.982626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:50:49.982638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:50:49.986314Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:50:49.986634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:49.986863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:50:50.076404Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [1:7439652368422647793:2057] 2024-11-21T08:50:50.076630Z node 1 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [1:7439652368422647793:2057], database# /Root, no sysview processor 2024-11-21T08:50:50.111665Z node 2 :SYSTEM_VIEWS INFO: [72075186224037895] OnActivateExecutor 2024-11-21T08:50:50.111680Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInitSchema::Execute 2024-11-21T08:50:50.115556Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:50:50.115575Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T08:50:50.135420Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [2:7439652369344518478:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T08:50:50.135687Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInitSchema::Complete 2024-11-21T08:50:50.135694Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInit::Execute 2024-11-21T08:50:50.135766Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T08:50:50.135771Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval metrics: query count# 0 2024-11-21T08:50:50.135776Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval query tops: total query count# 0 2024-11-21T08:50:50.135780Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T08:50:50.135784Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 6, result count# 0 2024-11-21T08:50:50.135792Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 7, result count# 0 2024-11-21T08:50:50.135795Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 8, result count# 0 2024-11-21T08:50:50.135798Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 9, result count# 0 2024-11-21T08:50:50.135801Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 10, result count# 0 2024-11-21T08:50:50.135803Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 11, result count# 0 2024-11-21T08:50:50.135806Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 12, result count# 0 2024-11-21T08:50:50.135808Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 13, result count# 0 2024-11-21T08:50:50.135811Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 14, result count# 0 2024-11-21T08:50:50.135813Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 15, result count# 0 2024-11-21T08:50:50.135817Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 16, partCount count# 0 2024-11-21T08:50:50.135819Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 19, partCount count# 0 2024-11-21T08:50:50.135823Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 17, result count# 0 2024-11-21T08:50:50.135826Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 18, result count# 0 2024-11-21T08:50:50.135844Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Reset: interval end# 2024-11-21T08:50:50.000000Z 2024-11-21T08:50:50.152330Z node 2 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [2:7439652369344518478:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/PQ 2024-11-21T08:50:50.157900Z node 2 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [2:7439652369344518478:2055], database# /Root/PQ, no sysview processor 2024-11-21T08:50:50.161558Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInit::Complete 2024-11-21T08:50:50.165553Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxConfigure::Execute: database# /Root/PQ 2024-11-21T08:50:50.167593Z node 2 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037895 2024-11-21T08:50:50.176267Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxConfigure::Complete 2024-11-21T08:50:50.188863Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [3:7439652370119127790:2055] iteration 0 2024-11-21T08:50:50.316261Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [2:7439652369344518428:2061] 2024-11-21T08:50:50.429133Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [2:7439652369344518478:2055] 2024-11-21T08:50:50.429651Z node 2 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [2:7439652369344518478:2055], database# /Root/PQ, processor id# 72075186224037895 2024-11-21T08:50:50.515939Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [1:7439652368422647694:2070] 2024-11-21T08:50:50.544496Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [3:7439652370119127741:2061] 2024-11-21T08:50:50.552294Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Execute 2024-11-21T08:50:50.552321Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryResults: interval end# 2024-11-21T08:50:50.000000Z, query count# 0 2024-11-21T08:50:50.552327Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T08:50:50.000000Z, query count# 0, persisted# 0 2024-11-21T08:50:50.552330Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T08:50:50.000000Z, query count# 0, persisted# 0 2024-11-21T08:50:50.552333Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T08:50:50.000000Z, query count# 0, persisted# 0 2024-11-21T08:50:50.552336Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T08:50:50.000000Z, query count# 0, persisted# 0 2024-11-21T08:50:50.552339Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 9, interval end# 2024-11-21T09:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T08:50:50.552342Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 11, interval end# 2024-11-21T09:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T08:50:50.552345Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 13, interval end# 2024-11-21T09:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T08:50:50.552349Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 15, interval end# 2024-11-21T09:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T08:50:50.555477Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Complete 2024-11-21T08:50:50.606553Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [1:7439652368422647793:2057] 2024-11-21T08:50:50.609460Z node 2 :SYSTEM_VIEWS DEBUG: Handle ... le: (empty maybe) 2024-11-21T08:53:52.556434Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:52.556476Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:52.580168Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:52.621610Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:52.986023Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653153910981672:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:52.986056Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:52.994035Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653153910981684:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:53.004236Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:53:53.005693Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439653153910981686:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:55.686870Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yv26d2n5e8gyajkgmsywt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWMxMjM2ZDYtYTRkNGIwYy0yYWQ4N2NjYy1kMTA3ODJlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:56.157466Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yv5934y43f8r54xjm0vqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2ZhZWIyMzItNDc0MGFkZGItNmQwNzc0YjktODYxYjc0NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:56.383900Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yv5d31ettw5j9cp1efe51, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzI2MDAxMTctNzJkMzUwZmYtZTk1MDk0NDUtNWM0NjQ4M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:56.438620Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439653171090851044:2339], owner: [7:7439653171090851040:2337], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T08:53:56.438794Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439653171090851044:2339], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T08:53:56.438893Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439653171090851044:2339], row count: 2, finished: 1 2024-11-21T08:53:56.438905Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439653171090851044:2339], owner: [7:7439653171090851040:2337], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T08:53:56.444717Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179236357, txId: 281474976715663] shutting down 2024-11-21T08:53:56.748174Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439653170578598558:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:56.748252Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00358f/r3tmp/tmpPz8oGP/pdisk_1.dat 2024-11-21T08:53:56.756056Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19504, node 8 2024-11-21T08:53:56.770829Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:56.770847Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:56.770849Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:56.770893Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:56.848697Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:56.848738Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:56.849785Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:56.850434Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:56.852361Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:53:56.979244Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439653170578599227:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:56.979245Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439653170578599238:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:56.979271Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:56.979772Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:53:56.981057Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439653170578599241:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:53:57.076144Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yv63jam55w9rk7jhsknbp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZTE1ODdmYzItOTk3NGNjOGItMTNmMTJkNzAtYjdiZWY1YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:57.086276Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yv66p7q7n387405w01hq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZTU3N2Q5NDQtY2M4YWI3ZjEtYmU3NWJjOWQtNGFmMTFhNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:57.095287Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179237131, txId: 281474976715662] shutting down 2024-11-21T08:53:57.106895Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6yv6784jzbft1sknky7jpy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZTgwNTYwY2ItZDUwY2QyY2UtNjM1ZWI4NzAtYzMxNjU5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:53:57.107316Z node 8 :SYSTEM_VIEWS INFO: Scan started, actor: [8:7439653174873566702:2333], owner: [8:7439653174873566698:2331], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T08:53:57.107427Z node 8 :SYSTEM_VIEWS INFO: Scan prepared, actor: [8:7439653174873566702:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T08:53:57.107491Z node 8 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [8:7439653174873566702:2333], row count: 2, finished: 1 2024-11-21T08:53:57.107500Z node 8 :SYSTEM_VIEWS INFO: Scan finished, actor: [8:7439653174873566702:2333], owner: [8:7439653174873566698:2331], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T08:53:57.107947Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179237106, txId: 281474976715664] shutting down |88.8%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion |88.9%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicaTest::SyncVersion [GOOD] |88.9%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicaTest::HandshakeWithStaleGeneration >> TReplicaTest::Unsubscribe >> TReplicaTest::Merge |88.9%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2024-11-21T08:54:06.393196Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:06.393217Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:06.393252Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:06.393257Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:06.393792Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:06.393814Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T08:54:06.393824Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:06.393841Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:06.393843Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T08:54:06.393846Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:06.600023Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:6:2053] 2024-11-21T08:54:06.600045Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path 2024-11-21T08:54:06.600060Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:06.808696Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:06.808724Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:06.808763Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 76 2024-11-21T08:54:06.808771Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:06.808786Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2024-11-21T08:54:06.808806Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:6:2053] 2024-11-21T08:54:06.808822Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:06.808839Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:6:2053], cookie# 1 >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> TReplicaTest::Handshake >> TReplicaTest::CommitWithoutHandshake |88.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |88.9%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |88.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TPQTestInternal::TestPartitionedBigTest >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> PQCountersLabeled::Partition >> TPQTest::TestWriteSplit >> TPQTest::TestPQPartialRead >> TReplicaTest::Handshake [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |88.9%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> AnalyzeColumnshard::AnalyzeMultiOperationId >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> AnalyzeDatashard::AnalyzeTwoTables |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicaCombinationTest::MigratedPathRecreation >> TReplicaTest::IdempotencyUpdatesVariant2 >> TPQTestInternal::TestBatchPacking [GOOD] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TPQTestInternal::TestKeyRange [GOOD] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TraverseColumnShard::TraverseColumnTable >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] |88.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestZigZagCodec [GOOD] Test command err: Size: 8002 Create chunk: 0.000016s Read by index: 0.000007s Iterate: 0.000005s Size: 8256 Create chunk: 0.000034s Read by index: 0.000014s Iterate: 0.000011s Size: 8532 Create chunk: 0.000047s Read by index: 0.000008s Iterate: 0.000003s Size: 7769 Create chunk: 0.000020s Read by index: 0.000005s Iterate: 0.000005s Size: 2853 Create chunk: 0.000017s Read by index: 0.000029s Iterate: 0.000008s Size: 2419 Create chunk: 0.000022s Read by index: 0.000023s Iterate: 0.000011s Size: 2929 Create chunk: 0.000016s Read by index: 0.000018s Iterate: 0.000008s Size: 2472 Create chunk: 0.000026s Read by index: 0.000024s Iterate: 0.000010s |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TReplicaTest::DoubleDelete [GOOD] >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> TReplicaTest::Delete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2024-11-21T08:54:07.224066Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:07.224082Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.224093Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2024-11-21T08:54:07.224097Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path 2024-11-21T08:54:07.224108Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.224116Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2024-11-21T08:54:07.224119Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.224147Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.224151Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.224686Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.224752Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2024-11-21T08:54:07.224757Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:7:2054], path# path 2024-11-21T08:54:07.224765Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:07.224768Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T08:54:07.224770Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.429213Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTestInternal::TestAsIntWide [GOOD] >> Defragmentation::GappedReadHandling [GOOD] >> Discover::TestForceBlockedGenerationTEvDiscoverRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2024-11-21T08:54:07.217303Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:07.217324Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 800, generation# 1 2024-11-21T08:54:07.217339Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:07.217344Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Commit generation: owner# 800, generation# 1 2024-11-21T08:54:07.217355Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2024-11-21T08:54:07.217358Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 800, generation# 1 2024-11-21T08:54:07.217363Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2024-11-21T08:54:07.217367Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Commit generation: owner# 800, generation# 1 2024-11-21T08:54:07.217450Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 103 2024-11-21T08:54:07.217457Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.218297Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T08:54:07.218330Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 103 2024-11-21T08:54:07.218335Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.218341Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T08:54:07.218359Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:8:2055] 2024-11-21T08:54:07.218371Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:8:2055], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2024-11-21T08:54:07.223672Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:10:2057] 2024-11-21T08:54:07.223686Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Successful handshake: owner# 800, generation# 1 2024-11-21T08:54:07.223699Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:10:2057] 2024-11-21T08:54:07.223704Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Commit generation: owner# 800, generation# 1 2024-11-21T08:54:07.223712Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:11:2058] 2024-11-21T08:54:07.223718Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Successful handshake: owner# 900, generation# 1 2024-11-21T08:54:07.223723Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:11:2058] 2024-11-21T08:54:07.223727Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Commit generation: owner# 900, generation# 1 2024-11-21T08:54:07.223747Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:10:2057], cookie# 0, event size# 103 2024-11-21T08:54:07.223753Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.223762Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T08:54:07.223773Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:11:2058], cookie# 0, event size# 103 2024-11-21T08:54:07.223776Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.223783Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2024-11-21T08:54:07.223788Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T08:54:07.223799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:12:2059] 2024-11-21T08:54:07.223806Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:12:2059], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2024-11-21T08:54:07.223848Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:14:2061] 2024-11-21T08:54:07.223852Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Successful handshake: owner# 800, generation# 1 2024-11-21T08:54:07.223857Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:14:2061] 2024-11-21T08:54:07.223861Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Commit generation: owner# 800, generation# 1 2024-11-21T08:54:07.223868Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2024-11-21T08:54:07.223872Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Successful handshake: owner# 800, generation# 1 2024-11-21T08:54:07.223877Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2024-11-21T08:54:07.223882Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Commit generation: owner# 800, generation# 1 2024-11-21T08:54:07.223889Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:14:2061], cookie# 0, event size# 103 2024-11-21T08:54:07.223893Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.223898Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:13:2060] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T08:54:07.223908Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:15:2062], cookie# 0, event size# 103 2024-11-21T08:54:07.223911Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.223916Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:13:2060] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T08:54:07.223924Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:16:2063] 2024-11-21T08:54:07.223928Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:13:2060] Subscribe: subscriber# [1:16:2063], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2024-11-21T08:54:07.223971Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:18:2065] 2024-11-21T08:54:07.223975Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Successful handshake: owner# 800, generation# 1 2024-11-21T08:54:07.223980Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:18:2065] 2024-11-21T08:54:07.223984Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Commit generation: owner# 800, generation# 1 2024-11-21T08:54:07.223990Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:19:2066] 2024-11-21T08:54:07.223993Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Successful handshake: owner# 900, generation# 1 2024-11-21T08:54:07.223998Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:19:2066] 2024-11-21T08:54:07.224002Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Commit generation: owner# 900, generation# 1 2024-11-21T08:54:07.224009Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:18:2065], cookie# 0, event size# 103 2024-11-21T08:54:07.224012Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.224017Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:17:2064] Upsert description: path# /Root/Tenant, pathId# [Ow ... ble_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2024-11-21T08:54:07.439183Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:393:2440] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:396:2443] 2024-11-21T08:54:07.439185Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:393:2440] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T08:54:07.439188Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:393:2440] Subscribe: subscriber# [2:396:2443], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2024-11-21T08:54:07.439308Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:398:2445] 2024-11-21T08:54:07.439310Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Successful handshake: owner# 910, generation# 1 2024-11-21T08:54:07.439314Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:398:2445] 2024-11-21T08:54:07.439315Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Commit generation: owner# 910, generation# 1 2024-11-21T08:54:07.439319Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2024-11-21T08:54:07.439321Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Successful handshake: owner# 910, generation# 1 2024-11-21T08:54:07.439324Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2024-11-21T08:54:07.439326Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Commit generation: owner# 910, generation# 1 2024-11-21T08:54:07.439330Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:398:2445], cookie# 0, event size# 64 2024-11-21T08:54:07.439332Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2024-11-21T08:54:07.439334Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:397:2444] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2024-11-21T08:54:07.439338Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:399:2446], cookie# 0, event size# 130 2024-11-21T08:54:07.439341Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2024-11-21T08:54:07.439343Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2024-11-21T08:54:07.439347Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:400:2447] 2024-11-21T08:54:07.439349Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:397:2444] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T08:54:07.439351Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:397:2444] Subscribe: subscriber# [2:400:2447], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2024-11-21T08:54:07.439464Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:402:2449] 2024-11-21T08:54:07.439466Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Successful handshake: owner# 910, generation# 1 2024-11-21T08:54:07.439469Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:402:2449] 2024-11-21T08:54:07.439471Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Commit generation: owner# 910, generation# 1 2024-11-21T08:54:07.439474Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2024-11-21T08:54:07.439476Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Successful handshake: owner# 910, generation# 1 2024-11-21T08:54:07.439479Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2024-11-21T08:54:07.439481Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Commit generation: owner# 910, generation# 1 2024-11-21T08:54:07.439485Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:402:2449], cookie# 0, event size# 64 2024-11-21T08:54:07.439487Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2024-11-21T08:54:07.439489Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:401:2448] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2024-11-21T08:54:07.439492Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:403:2450], cookie# 0, event size# 64 2024-11-21T08:54:07.439494Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2024-11-21T08:54:07.439498Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:404:2451] 2024-11-21T08:54:07.439500Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:401:2448] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T08:54:07.439503Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:401:2448] Subscribe: subscriber# [2:404:2451], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2024-11-21T08:54:07.633598Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:07.633615Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 800, generation# 1 2024-11-21T08:54:07.633626Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:07.633629Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Commit generation: owner# 800, generation# 1 2024-11-21T08:54:07.633634Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:7:2054] 2024-11-21T08:54:07.633636Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 900, generation# 1 2024-11-21T08:54:07.633640Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:7:2054] 2024-11-21T08:54:07.633642Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Commit generation: owner# 900, generation# 1 2024-11-21T08:54:07.633666Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 118 2024-11-21T08:54:07.633672Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2024-11-21T08:54:07.633682Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T08:54:07.633693Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 117 2024-11-21T08:54:07.633696Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2024-11-21T08:54:07.633703Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2024-11-21T08:54:07.633708Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2024-11-21T08:54:07.633713Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T08:54:07.633725Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:8:2055] 2024-11-21T08:54:07.633737Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:8:2055], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2024-11-21T08:54:07.235052Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2024-11-21T08:54:07.235069Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path 2024-11-21T08:54:07.235083Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.235096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2024-11-21T08:54:07.235099Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.235103Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.235109Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:07.235112Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.235139Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.235142Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.235640Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.235682Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:07.235685Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T08:54:07.235687Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.439037Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T08:54:07.439050Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.439060Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2024-11-21T08:54:07.439063Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.439071Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.439086Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.439089Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.439096Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.439113Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:07.439115Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T08:54:07.439117Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.439123Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:7:2054] 2024-11-21T08:54:07.439139Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Unsubscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.439144Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.439146Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.439149Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.439152Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.439154Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.439157Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.439163Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:8:2055] 2024-11-21T08:54:07.439166Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.644755Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:7:2054] 2024-11-21T08:54:07.644777Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path 2024-11-21T08:54:07.644792Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 1, capabilities# 2024-11-21T08:54:07.644815Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:07.644821Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.644831Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:07.644835Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Commit generation: owner# 1, generation# 1 2024-11-21T08:54:07.644846Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2024-11-21T08:54:07.211830Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:6:2053] 2024-11-21T08:54:07.211853Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 2 2024-11-21T08:54:07.211867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:07.211873Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:5:2052] Reject handshake from stale populator: sender# [1:6:2053], owner# 1, generation# 1, pending generation# 2 2024-11-21T08:54:07.426760Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T08:54:07.426774Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.426786Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2024-11-21T08:54:07.426789Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.426802Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.426835Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.426839Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.427470Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.427511Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:07.427514Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T08:54:07.427516Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.427522Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.427525Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.427527Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.427531Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.427533Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.427537Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.427547Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:8:2055] 2024-11-21T08:54:07.427551Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.633494Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:07.633515Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.633543Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.633548Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.633557Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.633566Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.633569Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.633572Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.633575Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.633579Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:07.633581Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2024-11-21T08:54:07.633583Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2024-11-21T08:54:07.633586Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.633588Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.633591Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.633594Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.633596Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T08:54:07.633598Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2024-11-21T08:54:07.288178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:07.288199Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.492581Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T08:54:07.492596Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.492641Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.492645Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.493272Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.493303Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:6:2053] 2024-11-21T08:54:07.493322Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.493336Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] 2024-11-21T08:54:07.493350Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Unsubscribe: subscriber# [2:6:2053], path# path 2024-11-21T08:54:07.493354Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] 2024-11-21T08:54:07.698004Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:07.698024Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.698040Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2024-11-21T08:54:07.698044Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path 2024-11-21T08:54:07.698061Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.698085Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.698089Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.698097Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.698134Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:07.698137Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T08:54:07.698139Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T08:54:07.698147Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2024-11-21T08:54:07.698150Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.698155Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:07.698157Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2024-11-21T08:54:07.288192Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:07.288224Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:5:2052] Reject commit from unknown populator: sender# [1:6:2053], owner# 1, generation# 1 2024-11-21T08:54:07.288232Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T08:54:07.288235Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.492505Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:6:2053] 2024-11-21T08:54:07.492525Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 0 2024-11-21T08:54:07.492534Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2024-11-21T08:54:07.492536Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.492543Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2024-11-21T08:54:07.492546Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Commit generation: owner# 1, generation# 1 2024-11-21T08:54:07.492550Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:6:2053] 2024-11-21T08:54:07.492554Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:5:2052] Reject commit from stale populator: sender# [2:6:2053], owner# 1, generation# 0, pending generation# 1 2024-11-21T08:54:07.492557Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:6:2053] 2024-11-21T08:54:07.492559Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 2 2024-11-21T08:54:07.698005Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T08:54:07.698024Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T08:54:07.698066Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T08:54:07.698071Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2024-11-21T08:54:07.698688Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T08:54:07.698716Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2024-11-21T08:54:07.698723Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.698736Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:8:2055] 2024-11-21T08:54:07.698739Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:8:2055], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.698749Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 40 2024-11-21T08:54:07.698752Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2024-11-21T08:54:07.698754Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2024-11-21T08:54:07.698774Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2024-11-21T08:54:07.698776Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.698783Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:10:2057] 2024-11-21T08:54:07.698786Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:10:2057], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T08:54:07.698791Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:11:2058] 2024-11-21T08:54:07.698794Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:11:2058], path# path, domainOwnerId# 0, capabilities# |88.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> Discover::TestForceBlockedGenerationTEvDiscoverRequest [GOOD] >> DiskTimeAvailable::Scaling [GOOD] >> DonorMode::BlobReplicationFromDonorDisk >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |88.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |88.9%| [TA] $(B)/ydb/core/blobstorage/pdisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> TraverseColumnShard::TraverseServerlessColumnTable >> AnalyzeDatashard::AnalyzeOneTable >> TraverseDatashard::TraverseOneTableServerless |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate |88.9%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeDatashard::DropTableNavigateError >> AnalyzeColumnshard::AnalyzeDeadline >> DonorMode::BlobReplicationFromDonorDisk [GOOD] >> DonorMode::BaseReadingTest [GOOD] >> DsProxyLwTrace::TestGetDSProxyVDiskRequestDuration |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> DsProxyLwTrace::TestGetDSProxyVDiskRequestDuration [GOOD] >> ExtraBlockChecks::Basic >> ExtraBlockChecks::Basic [GOOD] >> GarbageCollection::EmptyGcCmd [GOOD] >> Get::TestBlockedEvGetRequest >> Get::TestBlockedEvGetRequest [GOOD] >> Get::TestForceBlockTabletDataWithIndexRestoreGetRequest |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> Get::TestForceBlockTabletDataWithIndexRestoreGetRequest [GOOD] >> GroupLayoutSanitizer::Test3dc |88.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> TCdcStreamTests::MeteringDedicated [GOOD] >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] >> TCdcStreamTests::ChangeOwner >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> DataShardReadIterator::ShouldReadRangeCellVec >> DataShardReadIterator::ShouldReadKeyCellVec >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestChangeConfig >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] Test command err: 2024-11-21T08:52:49.320261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652885579097615:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.324259Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652884923989540:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.324576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ef6/r3tmp/tmplLIGzp/pdisk_1.dat 2024-11-21T08:52:49.356714Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:49.359824Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:49.361087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:49.382933Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3689, node 1 2024-11-21T08:52:49.401920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003ef6/r3tmp/yandexxodh8p.tmp 2024-11-21T08:52:49.401950Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003ef6/r3tmp/yandexxodh8p.tmp 2024-11-21T08:52:49.402025Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003ef6/r3tmp/yandexxodh8p.tmp 2024-11-21T08:52:49.402058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:49.406748Z INFO: TTestServer started on Port 9670 GrpcPort 3689 2024-11-21T08:52:49.414515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.414548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:9670 2024-11-21T08:52:49.416145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:3689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:49.454084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.454112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.454802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.455079Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:49.455405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:52:49.463909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:52:49.620949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652885579098440:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.620949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652885579098467:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.620979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.621338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652885579098495:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.621354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.621733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:52:49.625789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652885579098469:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:52:49.657523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.686692Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652885579098661:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:49.686786Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjk2NWZlNGItNjllMmEyYjItNDY4NTY0ZWYtZmFkZWNkYmY=, ActorId: [1:7439652885579098437:2300], ActorState: ExecuteState, TraceId: 01jd6ys4am4fs9pt146c696f86, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:49.687373Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:49.717091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.783776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:52:49.816291Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6ys4g4ekzmnzxdqfgymf7j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzkxZTQ5M2YtYjRkMmU5MTItM2I5N2YzNjktOTVkMDI1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439652885579098977:3037] 2024-11-21T08:52:54.319756Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652885579097615:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:54.319796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:52:54.324654Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439652884923989540:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:54.324686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: /Root/PQ/rt3.dc1--topic1 2024-11-21T08:52:54.847185Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652885579097708:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:52:54.847222Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439652907053935867:3292], recipient# [1:7439652907053935866:2419], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: RootUnknown Kind: KindUnknown DomainInfo }] } Create topic: /Root/PQ/rt3.dc1--topic1 AddTopic: /Root/PQ/rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = /Root/PQ/rt3.dc1/topic1, dc = unknown 2024-11-21T08:52:54.847967Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2024-11-21T08:52:54.848154Z node 1 :KQP_PROXY DEBUG: TraceId: "01jd6ys9e06618dt4h36q74rhk", Request has 5.009058s seconds to be completed 2024-11-21T08:52:54.848161Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439652885579097708:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: ... BytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T08:54:09.645369Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:09.645595Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 1. Step TInitConfigStep 2024-11-21T08:54:09.645611Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892] Config applied version 0 actor [25:7439653205260311245:2198] txId 281474976715679 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T08:54:09.645619Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Step TInitConfigStep 2024-11-21T08:54:09.645634Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037893] Config applied version 0 actor [25:7439653205260311245:2198] txId 281474976715679 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T08:54:09.645666Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 1. Step TInitInternalFieldsStep 2024-11-21T08:54:09.645704Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 1, State: StateInit] bootstrapping 1 [26:7439653225992677121:2372] 2024-11-21T08:54:09.645709Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T08:54:09.645800Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 0, State: StateInit] bootstrapping 0 [25:7439653226735149567:2451] 2024-11-21T08:54:09.646141Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 1. Completed. 2024-11-21T08:54:09.646150Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--legacy--topic1' partition 1 generation 1 [26:7439653225992677121:2372] 2024-11-21T08:54:09.646154Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:54:09.646412Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 1 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:54:09.646449Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2024-11-21T08:54:09.646478Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Completed. 2024-11-21T08:54:09.646493Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--legacy--topic1' partition 0 generation 1 [25:7439653226735149567:2451] 2024-11-21T08:54:09.646498Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:54:09.646783Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:54:09.646845Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvPQ::TEvPartitionCounters PartitionId 0 TClient::Ls request: /Root/PQ/rt3.dc1--legacy--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--legacy--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976715679 CreateStep: 1732179249696 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186224037894 } PersQueueGroup { Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: ... (TRUNCATED) === PATH DESCRIPTION: Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037893 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 2 |88.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestChangeConfig [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T08:53:49.131292Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:53:49.132222Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:53:49.132287Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T08:53:49.132293Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:53:49.132295Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T08:53:49.132299Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:53:49.132310Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:49.132314Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T08:53:49.132317Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:178:2057] recipient: [1:14:2061] 2024-11-21T08:53:49.135738Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:49.135757Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:177:2192], now have 1 active actors on pipe 2024-11-21T08:53:49.135770Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:53:49.138137Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:49.139020Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2024-11-21T08:53:49.139042Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:49.139213Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2024-11-21T08:53:49.139227Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Step TInitConfigStep 2024-11-21T08:53:49.139280Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T08:53:49.139335Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T08:53:49.139709Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Completed. 2024-11-21T08:53:49.139715Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2024-11-21T08:53:49.139720Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T08:53:49.140076Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2024-11-21T08:53:49.140081Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2024-11-21T08:53:49.140084Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2024-11-21T08:53:49.140086Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2024-11-21T08:53:49.140113Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:53:49.140117Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:49.140142Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:53:49.140224Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:49.140587Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:53:49.140641Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:49.140647Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:192:2203], now have 1 active actors on pipe 2024-11-21T08:53:49.142202Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:49.142214Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:197:2207], now have 1 active actors on pipe 2024-11-21T08:53:49.142226Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T08:53:49.142230Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T08:53:49.142681Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2024-11-21T08:53:49.143035Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2024-11-21T08:53:49.143408Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2024-11-21T08:53:49.143805Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2024-11-21T08:53:49.143852Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2024-11-21T08:53:49.143856Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2024-11-21T08:53:49.143877Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2024-11-21T08:53:49.143896Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2024-11-21T08:53:49.143899Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2024-11-21T08:53:49.143947Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:49.143952Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:199:2209], now have 1 active actors on pipe 2024-11-21T08:53:49.143965Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T08:53:49.143969Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T08:53:49.143987Z node 1 :PERSQUEUE INFO: new Cookie default|876b10ec-7f25613d-25cd8d4d-de5d902b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2024-11-21T08:53:49.144009Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:53:49.144034Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:53:49.144078Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:53:49.144083Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2211], now have 1 active actors on pipe 2024-11-21T08:53:49.144095Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T08:53:49.144099Z node 1 :PERSQUEUE DEBUG: [PQ: 7205 ... s { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 20 Important: false } Consumers { Name: "bbb" Generation: 21 Important: true } Consumers { Name: "ccc" Generation: 21 Important: true } 2024-11-21T08:54:10.613779Z node 25 :PERSQUEUE INFO: new Cookie default|ec9370d2-9a2cfca6-3162365c-c23dbdaa_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:10.614445Z node 25 :PERSQUEUE INFO: new Cookie default|49731b08-a2c36bfd-28d1a26a-9b37bbae_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:10.615137Z node 25 :PERSQUEUE INFO: new Cookie default|54a709f2-355a6a0c-e8939624-f8acadda_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] 2024-11-21T08:54:10.829461Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:10.829479Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [26:147:2057] recipient: [26:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [26:147:2057] recipient: [26:145:2168] Leader for TabletID 72057594037927938 is [26:151:2172] sender: [26:152:2057] recipient: [26:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:177:2057] recipient: [26:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:10.833027Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:10.833328Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 22 actor [26:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 22 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 22 ReadRuleGenerations: 22 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 22 Important: false } Consumers { Name: "aaa" Generation: 22 Important: true } 2024-11-21T08:54:10.833485Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [26:184:2197] 2024-11-21T08:54:10.833906Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [26:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:10.834354Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [26:185:2198] 2024-11-21T08:54:10.834771Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [26:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:10.835240Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [26:186:2199] 2024-11-21T08:54:10.835556Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [26:186:2199] 2024-11-21T08:54:10.835811Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [26:187:2200] 2024-11-21T08:54:10.836093Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [26:187:2200] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:10.836517Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [26:188:2201] 2024-11-21T08:54:10.836835Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [26:188:2201] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:10.838771Z node 26 :PERSQUEUE INFO: new Cookie default|c02b4bc8-7e7542a1-4137823-eef408ff_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:10.840054Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:10.841039Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 6, State: StateInit] bootstrapping 6 [26:238:2238] 2024-11-21T08:54:10.841362Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [26:238:2238] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:10.841936Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 7, State: StateInit] bootstrapping 7 [26:239:2239] 2024-11-21T08:54:10.842328Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [26:239:2239] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:10.842907Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 8, State: StateInit] bootstrapping 8 [26:240:2240] 2024-11-21T08:54:10.843178Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [26:240:2240] 2024-11-21T08:54:10.843577Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 9, State: StateInit] bootstrapping 9 [26:241:2241] 2024-11-21T08:54:10.843825Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [26:241:2241] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:10.844293Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 5, State: StateInit] bootstrapping 5 [26:237:2237] 2024-11-21T08:54:10.844557Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [26:237:2237] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:10.850128Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 23 actor [26:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 ImportantClientId: "bbb" ImportantClientId: "ccc" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 23 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } ReadRuleGenerations: 22 ReadRuleGenerations: 23 ReadRuleGenerations: 23 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 22 Important: false } Consumers { Name: "bbb" Generation: 23 Important: true } Consumers { Name: "ccc" Generation: 23 Important: true } 2024-11-21T08:54:10.850770Z node 26 :PERSQUEUE INFO: new Cookie default|1fad96d9-cf20f19b-c54c3ed7-6245d86e_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:10.851579Z node 26 :PERSQUEUE INFO: new Cookie default|fbcccb18-4c845b4d-250b2d7b-c7406749_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:10.852380Z node 26 :PERSQUEUE INFO: new Cookie default|cbac087a-7a938121-ea45524c-be578839_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:52:39.934762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:52:39.934794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:39.934800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:52:39.934805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:52:39.934813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:52:39.934817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:52:39.934827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:52:39.934919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:39.944573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:39.944598Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:39.947528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:39.948340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:52:39.948381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:52:39.949910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:52:39.950146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:52:39.950247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:39.950333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:52:39.951723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:39.952027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:39.952041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:39.952079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:52:39.952087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:39.952094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:52:39.952111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.953415Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:52:39.969892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:52:39.969983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.970050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:52:39.970130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:52:39.970139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.970927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:39.970956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:52:39.971006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.971016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:52:39.971021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:52:39.971025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:52:39.971434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.971444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:52:39.971448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:52:39.971930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.971945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.971951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:39.971958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:52:39.972565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:39.972984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:52:39.973034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:52:39.973212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:52:39.973239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:52:39.973246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:39.973298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:52:39.973306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:52:39.973335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:39.973348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:52:39.973803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:52:39.973812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:52:39.973872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:52:39.973878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:52:39.973964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:52:39.973973Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:52:39.973986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:52:39.973991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:39.973996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:52:39.974002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:52:39.974006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:52:39.974010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:52:39.974022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:52:39.974028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:52:39.974032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:52:39.974335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:39.974354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:52:39.974359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:52:39.974364Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:52:39.974369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:52:39.974384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:54:10.891341Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:10.891526Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:10.891534Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:10.891536Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:54:10.891589Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:10.891595Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:10.891597Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:54:10.891674Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T08:54:10.891678Z node 18 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:10.891747Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T08:54:10.891773Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2024-11-21T08:54:10.891775Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2024-11-21T08:54:10.891778Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2024-11-21T08:54:10.891979Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:10.891986Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:10.891988Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:54:10.891991Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:54:10.891993Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:54:10.892000Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2024-11-21T08:54:10.892107Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:10.892112Z node 18 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:10.892137Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:54:10.892149Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2024-11-21T08:54:10.892151Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2024-11-21T08:54:10.892156Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2024-11-21T08:54:10.892163Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:379:2344] message: TxId: 103 2024-11-21T08:54:10.892166Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2024-11-21T08:54:10.892170Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T08:54:10.892172Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T08:54:10.892186Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:54:10.892188Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T08:54:10.892190Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T08:54:10.892193Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:54:10.892195Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2024-11-21T08:54:10.892197Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2024-11-21T08:54:10.892200Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:54:10.892218Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2024-11-21T08:54:10.892221Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2024-11-21T08:54:10.892223Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:54:10.892225Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2024-11-21T08:54:10.892227Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2024-11-21T08:54:10.892233Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:54:10.892312Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:54:10.892317Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T08:54:10.892323Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:54:10.892327Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:54:10.892330Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:54:10.892353Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:54:10.892650Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:54:10.892663Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:54:10.892666Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:54:10.892668Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:54:10.892993Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:54:10.893017Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:54:10.893021Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [18:763:2656] 2024-11-21T08:54:10.893046Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2024-11-21T08:54:10.893107Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:54:10.893132Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 30us result status StatusPathDoesNotExist 2024-11-21T08:54:10.893157Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:54:10.893187Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:54:10.893195Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 9us result status StatusPathDoesNotExist 2024-11-21T08:54:10.893204Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow |88.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldHandleReadAck >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping >> TraverseDatashard::TraverseOneTableServerless [GOOD] >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: 2024-11-21T08:54:09.332667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:09.332709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:09.332720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00274f/r3tmp/tmpQQRz1T/pdisk_1.dat 2024-11-21T08:54:09.409103Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15499, node 1 2024-11-21T08:54:09.499190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:09.499204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:09.499207Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:09.499270Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:09.503918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.578825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.578860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.590046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6871 2024-11-21T08:54:09.987933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.722090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.722117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.755410Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.756455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.809363Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.818030Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.818061Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.825681Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.825818Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.825835Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.825840Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.825846Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.825852Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.825856Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.825861Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.825951Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:10.998952Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.998975Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.999873Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:11.001364Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:11.001448Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:11.001962Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T08:54:11.005040Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:11.005050Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:11.005057Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T08:54:11.006353Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:11.006374Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:11.007331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:11.008494Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:11.008514Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:11.010373Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:11.021817Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:11.043566Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:11.151338Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:11.306451Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:12.033901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:12.627162Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:12.722670Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T08:54:12.722689Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:54:12.722698Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2484:2899], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:54:12.722893Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2486:2901] 2024-11-21T08:54:12.722915Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2486:2901], schemeshard id = 72075186224037899 2024-11-21T08:54:13.523207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2619:3193], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.523254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.526812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T08:54:13.669193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2911:3239], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.669233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.671338Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2916:3243]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:13.671392Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:54:13.671436Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T08:54:13.671447Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2919:3246] 2024-11-21T08:54:13.671458Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2919:3246] 2024-11-21T08:54:13.671678Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2920:3132] 2024-11-21T08:54:13.671777Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2919:3246], server id = [2:2920:3132], tablet id = 72075186224037897, status = OK 2024-11-21T08:54:13.671841Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2920:3132], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:54:13.671853Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:54:13.671922Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:54:13.671935Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2916:3243], StatRequests.size() = 1 2024-11-21T08:54:13.675363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2924:3250], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.675412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.675533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2929:3255], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.677106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:54:13.799421Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:54:13.799453Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:54:13.830403Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2919:3246], schemeshard count = 1 2024-11-21T08:54:14.130493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2931:3257], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:54:14.300921Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3079:3345]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:14.301852Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:54:14.301872Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3079:3345], StatRequests.size() = 1 2024-11-21T08:54:14.312084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yvpd5dhz1fmmps7s4n51d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZjOTU5Ni0zYmE5YTkyMy02MTc5MTZlMy1lMzRlNWNlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:54:14.342594Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3117:3185]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:14.343297Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:54:14.343311Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:54:14.343356Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:54:14.343364Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T08:54:14.343372Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:54:14.345000Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T08:54:14.345062Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] |88.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> DataShardReadIterator::ShouldNotReadAfterCancel |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> VectorIndexBuildTest::BaseCase [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[TabletReboots] [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> TPQTest::TestPQPartialRead [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> TPQTest::TestPQRead >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture |88.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:03.745120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:03.745140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:03.745144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:03.745147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:03.745158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:03.745161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:03.745168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:03.745225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:03.753719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:03.753734Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:03.755599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:03.756117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:03.756139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:03.757038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:03.757161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:03.757236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.757275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:03.757961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.758184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.758192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.758217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:03.758221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.758225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:03.758233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.759077Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:03.769677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:03.769735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.769780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:03.769821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:03.769826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.770330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.770347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:03.770389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.770394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:03.770397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:03.770401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:03.770658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.770665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:03.770668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:03.770903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.770909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.770912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.770916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.771315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:03.771584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:03.771625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:03.771753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.771770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:03.771775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.771811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:03.771816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.771836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.771844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:03.772104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.772109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.772136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.772139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:03.772199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.772218Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:03.772229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:03.772232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.772235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:03.772239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.772242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:03.772244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:03.772251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:03.772255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:03.772257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:03.772459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:03.772468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:03.772471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:03.772474Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:03.772477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.772485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... to activation from: 2024-11-21T08:54:15.927406Z node 1 :TX_DATASHARD INFO: 72075186233409586 Initiating switch from PreOffline to Offline state 2024-11-21T08:54:15.927870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:20178:20642], Recipient [1:16879:17609]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:54:15.927884Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:54:15.929072Z node 1 :TX_DATASHARD INFO: 72075186233409586 Reporting state Offline to schemeshard 72075186233409573 2024-11-21T08:54:15.929177Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [1:16869:17601], Recipient [1:16879:17609]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T08:54:15.929299Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [1:20286:20750], Recipient [1:16879:17609]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T08:54:15.929306Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T08:54:15.929339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409573, message: Source { RawX1: 16879 RawX2: 4294984905 } TabletId: 72075186233409586 State: 4 2024-11-21T08:54:15.929359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409586, state: Offline, at schemeshard: 72075186233409573 2024-11-21T08:54:15.930284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409573:14 hive 72057594037968897 at ss 72075186233409573 2024-11-21T08:54:15.930361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269552133, Sender [1:13581:14534], Recipient [1:16879:17609]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72075186233409573 State: 4 2024-11-21T08:54:15.930366Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChangedResult 2024-11-21T08:54:15.930372Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186233409586 state Offline 2024-11-21T08:54:15.930407Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:20286:20750], Recipient [1:16879:17609]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:54:15.930411Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T08:54:15.930514Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409573 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409586 2024-11-21T08:54:15.930591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:16869:17601], Recipient [1:16879:17609]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T08:54:15.930647Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409586 2024-11-21T08:54:15.930657Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409586 Forgetting tablet 72075186233409586 2024-11-21T08:54:15.931177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72075186233409573 ShardLocalIdx: 14, at schemeshard: 72075186233409573 2024-11-21T08:54:15.931258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409573, LocalPathId: 13] was 1 2024-11-21T08:54:15.931373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409573 2024-11-21T08:54:15.931379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409573, LocalPathId: 13], at schemeshard: 72075186233409573 2024-11-21T08:54:15.931393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409573, LocalPathId: 3] was 4 2024-11-21T08:54:15.933128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409573:14 2024-11-21T08:54:15.933155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409573:14 tabletId 72075186233409586 2024-11-21T08:54:15.933411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409573 2024-11-21T08:54:15.955624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 6450, transactions count in step: 1, at schemeshard: 72075186233409573 2024-11-21T08:54:15.955686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735772 AckTo { RawX1: 13691 RawX2: 4294981914 } } Step: 6450 MediatorID: 72075186233409575 TabletID: 72075186233409573, at schemeshard: 72075186233409573 2024-11-21T08:54:15.955703Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409573] TDropLock TPropose opId# 281474976735772:0 HandleReply TEvOperationPlan: step# 6450 2024-11-21T08:54:15.955712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976735772:0 128 -> 240 2024-11-21T08:54:15.956699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976735772:0, at schemeshard: 72075186233409573 2024-11-21T08:54:15.956716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409573] TDone opId# 281474976735772:0 ProgressState 2024-11-21T08:54:15.956733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735772:0 progress is 1/1 2024-11-21T08:54:15.956738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735772 ready parts: 1/1 2024-11-21T08:54:15.956745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976735772, ready parts: 1/1, is published: true 2024-11-21T08:54:15.956763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:13581:14534] message: TxId: 281474976735772 2024-11-21T08:54:15.956771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735772 ready parts: 1/1 2024-11-21T08:54:15.956776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976735772:0 2024-11-21T08:54:15.956781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976735772:0 2024-11-21T08:54:15.956798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409573, LocalPathId: 2] was 4 2024-11-21T08:54:15.958065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976735772 2024-11-21T08:54:15.958105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976735772 2024-11-21T08:54:15.958126Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735772, buildInfoId: 115 2024-11-21T08:54:15.958158Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735772, buildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409573, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409573, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:14443:15352], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:54:15.958904Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2024-11-21T08:54:15.958923Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409573, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409573, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:14443:15352], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:54:15.958932Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T08:54:15.959766Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2024-11-21T08:54:15.959802Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409573, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409573, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:14443:15352], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:54:15.959808Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2024-11-21T08:54:15.959839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2024-11-21T08:54:15.959846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:14647:15516] TestWaitNotification: OK eventTxId 115 2024-11-21T08:54:15.961249Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2024-11-21T08:54:15.961400Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } |89.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:00.965163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:00.965190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.965196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:00.965200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:00.965216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:00.965220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:00.965229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.965332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:00.974796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:00.974813Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:00.976847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:00.977356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:00.977380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:00.978403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:00.978597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:00.978680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.978745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:00.979634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.979845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.979852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.979877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:00.979882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.979886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:00.979896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.980909Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:00.998014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:00.998116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.998183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:00.998259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:00.998269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.999081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.999109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:00.999165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.999177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:00.999181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:00.999186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:00.999601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.999615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:00.999620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:00.999961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.999970Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.999976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.999983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:01.000651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:01.001048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:01.001099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:01.001294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:01.001318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:01.001338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:01.001392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:01.001400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:01.001449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:01.001462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:01.001892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:01.001900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:01.001941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:01.001947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:01.002038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:01.002045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:01.002057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:01.002062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:01.002068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:01.002073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:01.002077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:01.002082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:01.002093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:01.002099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:01.002104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:01.002413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:01.002428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:01.002434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:01.002438Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:01.002443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:01.002458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Shard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:16.122291Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:16.122297Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:16.415141Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:16.415164Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:16.416133Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:16.416145Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:16.479988Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:305:2293]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:54:16.480031Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409546 2024-11-21T08:54:16.480191Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2024-11-21T08:54:16.480338Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:305:2293], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 48 Memory: 123880 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 261 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 443 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2024-11-21T08:54:16.480348Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T08:54:16.480366Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0048 2024-11-21T08:54:16.480382Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T08:54:16.480390Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2024-11-21T08:54:16.480591Z node 3 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186233409546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1, LoadedSize 102, Spent{0.000s wa 0.000s cnt 1}, HistogramKeys 1 2024-11-21T08:54:16.480648Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1053:2999], Recipient [3:305:2293]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2024-11-21T08:54:16.480659Z node 3 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186233409546, for tableId 2 2024-11-21T08:54:16.493082Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:305:2293]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T08:54:16.493116Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T08:54:16.493149Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2024-11-21T08:54:16.493173Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:16.493181Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2024-11-21T08:54:16.493191Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2024-11-21T08:54:16.493196Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2024-11-21T08:54:16.525448Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:16.525480Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:16.525488Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2024-11-21T08:54:16.525515Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T08:54:16.525522Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2024-11-21T08:54:16.525557Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2024-11-21T08:54:16.525581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2024-11-21T08:54:16.525593Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2024-11-21T08:54:16.525619Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2024-11-21T08:54:16.525672Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:16.536191Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:16.536240Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:16.536247Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T08:54:16.743773Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:16.743799Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:16.743815Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:16.743818Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:16.981673Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:16.981706Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:16.981739Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:16.981745Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:17.239695Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:17.239730Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:17.239750Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:17.239755Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:17.496190Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:17.496627Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:17.496664Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:17.496669Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:17.735750Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:17.735793Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:17.735825Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:17.735831Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:17.756283Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:305:2293]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:54:17.997914Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:17.997944Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:17.997965Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:17.997970Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys |89.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> DataShardReadIteratorBatchMode::RangeFull |89.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 |89.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |89.0%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitTableAndCreateStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:32.284399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:32.284422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:32.284428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:32.284433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:32.284444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:32.284448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:32.284457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:32.284526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:32.296050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:32.296071Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:32.298423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:32.298511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:32.298533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:32.301116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:32.301181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:32.301304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.301504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:32.302180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.302413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:32.302420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.302431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:32.302438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:32.302445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:32.302477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:32.303715Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:32.320486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:32.320562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.320620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:32.320667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:32.320675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.321290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.321313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:32.321346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.321355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:32.321359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:32.321364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:32.321766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.321774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:32.321779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:32.322087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.322094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.322100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.322106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.322681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:32.323076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:32.323113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:32.323275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:32.323298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:32.323305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.323356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:32.323363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:32.323392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:32.323402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:32.323741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:32.323749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:32.323785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:32.323790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:32.323863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:32.323869Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:32.323879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:32.323883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.323889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:32.323894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:32.323898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:32.323903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:32.323911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:32.323917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:32.323921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... schemeshard: 72057594046678944 2024-11-21T08:54:16.676826Z node 235 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:54:16.676830Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:54:16.676834Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:1, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:54:16.676838Z node 235 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:1 129 -> 240 2024-11-21T08:54:16.677768Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:54:16.677787Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:54:16.677797Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:54:16.678266Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:54:16.678290Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:54:16.678304Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:54:16.678317Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:54:16.678411Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T08:54:16.678418Z node 235 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:1 ProgressState 2024-11-21T08:54:16.678427Z node 235 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:1 progress is 3/3 2024-11-21T08:54:16.678431Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:54:16.678436Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/3, is published: true 2024-11-21T08:54:16.678447Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [235:398:2373] message: TxId: 1004 2024-11-21T08:54:16.678452Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 3/3 2024-11-21T08:54:16.678457Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:54:16.678461Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:54:16.678470Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:54:16.678474Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T08:54:16.678477Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T08:54:16.678493Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:54:16.678499Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T08:54:16.678502Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T08:54:16.678510Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:54:16.679423Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:54:16.679436Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [235:404:2379] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:54:16.679569Z node 235 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:16.679647Z node 235 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 83us result status StatusSuccess 2024-11-21T08:54:16.679886Z node 235 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\002\000\000\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:16.679952Z node 235 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:54:16.679977Z node 235 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 27us result status StatusSuccess 2024-11-21T08:54:16.680044Z node 235 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409550 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409549 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409550 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> TPersQueueTest::StoreNoMoreThanXSourceIDs [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey |89.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> GroupLayoutSanitizer::Test3dc [GOOD] >> GroupLayoutSanitizer::MultipleRealmsOccupation >> TPersQueueTest::PartitionsMapping [GOOD] >> TPersQueueTest::MessageMetadata >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive >> DataShardReadIteratorSysTables::ShouldRead >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> TCdcStreamWithRebootsTests::CreateDropRecreate[TabletReboots] [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] Test command err: 2024-11-21T08:53:50.147144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653145732316371:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:50.147179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:50.152002Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653145836515490:2234];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:50.167321Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:50.171174Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:50.172570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0014b4/r3tmp/tmpkLaWBq/pdisk_1.dat 2024-11-21T08:53:50.195941Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21274, node 1 2024-11-21T08:53:50.207072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0014b4/r3tmp/yandexyh6PKr.tmp 2024-11-21T08:53:50.207082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0014b4/r3tmp/yandexyh6PKr.tmp 2024-11-21T08:53:50.207127Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0014b4/r3tmp/yandexyh6PKr.tmp 2024-11-21T08:53:50.207159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:50.211031Z INFO: TTestServer started on Port 14926 GrpcPort 21274 TClient is connected to server localhost:14926 PQClient connected to localhost:21274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:50.247089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:50.247117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:50.248718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:50.275086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:50.275106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:50.276012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:50.276524Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:50.276799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:53:50.286864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:53:50.443451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653145732317329:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.443481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.443547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653145732317356:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.444484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:53:50.445254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653145732317387:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.445282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:50.448107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653145732317358:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:53:50.473551Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653145836515673:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:50.473656Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmNjYzJmMzItNjI5MmM0NzEtNTQyMDc5YjQtZTUyYmI0NTM=, ActorId: [2:7439653145836515634:2280], ActorState: ExecuteState, TraceId: 01jd6ytzqn7923m68j68xgjyvn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:50.474082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:50.474162Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:50.484094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:53:50.547952Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653145732317691:2328], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:50.548170Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTgxZTIyODQtNjhlMTNiYTUtZTBiM2Y2NjYtNmY4OWI3OWI=, ActorId: [1:7439653145732317326:2300], ActorState: ExecuteState, TraceId: 01jd6ytzqbfpxxmtqhrcx1sbmm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:50.548363Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:50.548972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:53:50.580961Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6ytzv27ry7k3qqhkhjywxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGM1MTIyM2QtNjczNzE0OTQtODljN2VmOTctNDk2ZjNlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653145732317870:3034] 2024-11-21T08:53:55.147336Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653145732316371:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:55.147371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:53:55.149868Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653145836515490:2234];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:55.149898Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:53:55.675289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2024-11-21T08:53:55.773569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:53:55.815364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation par ... /Root, SessionId: ydb://session/3?node_id=9&id=MmZiY2U0OC02ZTZlY2Y3LWMyOWYwYjFiLWNhMWVjYzE3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7439653256474367406:3026] 2024-11-21T08:54:21.391417Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7439653256474366032:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:21.391453Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:54:21.998794Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2024-11-21T08:54:22.060829Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:54:22.111396Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:54:22.170129Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T08:54:22.219826Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T08:54:22.270893Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (3445495608623639121, "Root", "00415F536F757263655F36", 1732179262313, 1732179262313, 0, 13); 2024-11-21T08:54:22.323932Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715697. Ctx: { TraceId: 01jd6yvyvb0ay4q7dymaw8dtgb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MzFiZTQ3Y2EtMTZkODE4ZDctMWJkZjc5YmEtMWNlMWI2Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:54:22.327370Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T08:54:22.327384Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:54:22.327386Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:54:22.327394Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2024-11-21T08:54:22.327431Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7439653282244172346:3727], Recipient [9:7439653277949204414:3320]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T08:54:22.327461Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7439653282244172345:3727], Recipient [9:7439653277949204414:3320]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_6" 2024-11-21T08:54:22.327486Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7439653277949204414:3320], Recipient [9:7439653282244172345:3727]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T08:54:22.327495Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_6 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2024-11-21T08:54:22.327520Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7439653282244172345:3727], Recipient [9:7439653277949204414:3320]: NActors::TEvents::TEvPoison 2024-11-21T08:54:22.327533Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7439653256474365872:2049], Recipient [9:7439653282244172345:3727]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2024-11-21T08:54:22.327541Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:54:22.327875Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7439653256474365890:2060], Recipient [9:7439653282244172345:3727]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=ZDViYWY3NzYtMzAyYmQ0NDYtYzZhMTcxNDMtNjNmNzdmYmM=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2024-11-21T08:54:22.327886Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:54:22.342498Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7439653256474365890:2060], Recipient [9:7439653282244172345:3727]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ZDViYWY3NzYtMzAyYmQ0NDYtYzZhMTcxNDMtNjNmNzdmYmM=" PreparedQuery: "3b349983-4c7dd51d-f6c37ca9-b125e2a0" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jd6yvyw46vbz63837mydwvv4" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1732179262313 } items { uint64_value: 1732179262313 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 8 2024-11-21T08:54:22.342548Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2024-11-21T08:54:22.342559Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) GetOldSeqNo 2024-11-21T08:54:22.342611Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7439653282244172369:3727], Recipient [9:7439653277949204413:3319]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T08:54:22.344049Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271187968, Sender [9:7439653282244172345:3727], Recipient [9:7439653277949204413:3319]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000A_Source_6" } PipeClient { RawX1: 7439653282244172369 RawX2: 38654709391 } } 2024-11-21T08:54:22.344087Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) OnPartitionChosen 2024-11-21T08:54:22.344123Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7439653282244172345:3727], Recipient [9:7439653277949204413:3319]: NActors::TEvents::TEvPoison 2024-11-21T08:54:22.344148Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7439653282244172370:3727], Recipient [9:7439653277949204414:3320]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T08:54:22.344170Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7439653282244172345:3727], Recipient [9:7439653277949204414:3320]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2024-11-21T08:54:22.344199Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [9:7439653277949204414:3320], Recipient [9:7439653282244172345:3727]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T08:54:22.344230Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) Update the table 2024-11-21T08:54:22.344287Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7439653282244172345:3727], Recipient [9:7439653277949204414:3320]: NActors::TEvents::TEvPoison 2024-11-21T08:54:22.359471Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7439653256474365890:2060], Recipient [9:7439653282244172345:3727]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ZDViYWY3NzYtMzAyYmQ0NDYtYzZhMTcxNDMtNjNmNzdmYmM=" PreparedQuery: "dc0ab325-7ed206e0-d1138f70-5f0c41f3" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 8 Received TEvChooseResult: 1 2024-11-21T08:54:22.359489Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T08:54:22.359502Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=157 2024-11-21T08:54:22.359513Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653282244172345:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 3445495608623639121 AND Topic = "Root" AND ProducerId = "00415F536F757263655F36" 2024-11-21T08:54:22.378075Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715700. Ctx: { TraceId: 01jd6yvywt9wbf2jven9npntr4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=Nzk0MDgyNWMtNzA1OWE2NzYtNWZhZDIxN2ItOWYzNjJlNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> GroupLayoutSanitizer::MultipleRealmsOccupation [GOOD] >> GroupLayoutSanitizer::ForbidMultipleRealmsOccupation >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::RejectsCancel >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive |89.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> KqpYql::TableConcat >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersSimple::Partition >> GroupLayoutSanitizer::ForbidMultipleRealmsOccupation [GOOD] >> GroupReconfiguration::BsControllerConfigurationRequestIsFastEnough >> KqpYql::TableConcat [GOOD] >> KqpYql::SelectNoAsciiValue >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite |89.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> IndexBuildTest::CancellationNoTable [GOOD] >> PQCountersSimple::Partition [GOOD] >> PQCountersSimple::PartitionFirstClass >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> PQCountersSimple::PartitionFirstClass [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::SelectingColumns >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> PQCountersSimple::PartitionFirstClass [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2024-11-21T08:54:07.441976Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.441994Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.445075Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.446619Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T08:54:07.446753Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T08:54:07.447103Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T08:54:07.447341Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T08:54:07.447581Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR 2024-11-21T08:54:07.448807Z node 1 :PERSQUEUE INFO: new Cookie default|1d3bbf9f-86906517-a43030a9-6e7e0875_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.449406Z node 1 :PERSQUEUE INFO: new Cookie default|1119d8a7-3cd08053-2db261d2-75ee8a1e_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.449940Z node 1 :PERSQUEUE INFO: new Cookie default|5fd25dfe-8d8d5e45-bdb7975c-88aa061e_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": ... keup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2024-11-21T08:54:24.878241Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:24.878264Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:24.882136Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:24.882331Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 9 actor [4:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 9 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 9 Important: false } 2024-11-21T08:54:24.882452Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:184:2197] 2024-11-21T08:54:24.882989Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:24.883385Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:185:2198] 2024-11-21T08:54:24.883776Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:24.885333Z node 4 :PERSQUEUE INFO: new Cookie default|cdc9ec45-8b89f85c-679fdabb-74f7c96b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:24.886140Z node 4 :PERSQUEUE INFO: new Cookie default|a34308e-7c089d59-37650f63-9653bc1f_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:24.886762Z node 4 :PERSQUEUE INFO: new Cookie default|6e8b0bcb-4d53fb1f-ede7538a-aabf7416_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:24.887425Z node 4 :PERSQUEUE INFO: new Cookie default|89ec9c60-36ca8734-5f45b5c6-83b33337_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2024-11-21T08:54:24.887593Z node 4 :PERSQUEUE INFO: new Cookie default|232bb70f-7053784e-9060ce2b-821d0ca1_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T08:54:25.152086Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:25.152115Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:25.157027Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:25.157273Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 10 actor [5:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2024-11-21T08:54:25.157466Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:184:2197] 2024-11-21T08:54:25.157685Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:25.157944Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:185:2198] 2024-11-21T08:54:25.158088Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [5:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:25.159455Z node 5 :PERSQUEUE INFO: new Cookie default|fa9fb806-a6ae69fd-178f574c-222fb682_0 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:25.160472Z node 5 :PERSQUEUE INFO: new Cookie default|7e398fb6-7a80c66e-a61ac98a-a487b34_1 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:25.161144Z node 5 :PERSQUEUE INFO: new Cookie default|a0b027be-5eb900a6-de4bff52-a281b989_2 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:25.161638Z node 5 :PERSQUEUE INFO: new Cookie default|b817397f-19c58b70-4ffffbb1-7d61ce89_3 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR |89.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 >> KqpYql::SelectNoAsciiValue [GOOD] |89.0%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:02.882827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:02.882855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:02.882860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:02.882864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:02.882878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:02.882882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:02.882890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:02.882965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:02.889779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:02.889793Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:02.891498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:02.891990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:02.892011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:02.892892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:02.893022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:02.893091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:02.893127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:02.893800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:02.893978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:02.893983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:02.894004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:02.894009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:02.894012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:02.894021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.894835Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:02.906889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:02.906950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.906993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:02.907034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:02.907039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.907518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:02.907535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:02.907574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.907582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:02.907585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:02.907588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:02.907870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.907877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:02.907879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:02.908106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.908111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.908114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:02.908119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:02.908487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:02.908728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:02.908763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:02.908871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:02.908885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:02.908889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:02.908921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:02.908925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:02.908946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:02.908953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:02.909192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:02.909197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:02.909220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:02.909222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:02.909276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:02.909280Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:02.909287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:02.909289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:02.909292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:02.909295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:02.909298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:02.909300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:02.909306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:02.909310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:02.909312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:02.909508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:02.909517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:02.909520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:02.909522Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:02.909525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:02.909533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:24.900265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:24.900288Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:24.901761Z node 2 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:237:2058] recipient: [2:15:2062] 2024-11-21T08:54:24.903475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:24.903515Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:24.903567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:24.903600Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:24.903605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:24.904049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:24.904069Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:24.904101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:24.904108Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:24.904111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:24.904114Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:24.904413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:24.904421Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:24.904423Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:24.904656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:24.904662Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:24.904667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:24.904672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:24.904695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:24.904929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:24.904955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:24.905097Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:24.905119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:24.905128Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:24.905169Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:24.905176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:24.905201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:24.905214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:24.905620Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:24.905627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:24.905663Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:24.905667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:24.905676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:24.905681Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:24.905693Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:24.905697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:24.905703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:24.905707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:24.905712Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:24.905715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:24.905724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:24.905729Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:24.905732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:24.905909Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:24.905918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:24.905922Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:24.905925Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:24.905927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:24.905938Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T08:54:24.906395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T08:54:24.906460Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:24.906592Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_batch_rows: 2 max_shards_in_flight: 2 } 2024-11-21T08:54:24.906634Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp:70" severity: 1 } SchemeStatus: 2 2024-11-21T08:54:24.906657Z node 2 :TX_PROXY DEBUG: actor# [2:267:2259] Bootstrap 2024-11-21T08:54:24.907617Z node 2 :TX_PROXY DEBUG: actor# [2:267:2259] Become StateWork (SchemeCache [2:272:2264]) 2024-11-21T08:54:24.907743Z node 2 :TX_PROXY DEBUG: actor# [2:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:54:24.908142Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp:70" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2024-11-21T08:54:24.908237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:54:24.908244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:54:24.908297Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:54:24.908315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:54:24.908319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:279:2271] TestWaitNotification: OK eventTxId 101 2024-11-21T08:54:24.908381Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2024-11-21T08:54:24.908398Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 32152, MsgBus: 28229 2024-11-21T08:54:24.109975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653293661719499:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:24.110028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0049a2/r3tmp/tmpgghSx9/pdisk_1.dat 2024-11-21T08:54:24.154907Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32152, node 1 2024-11-21T08:54:24.172725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:24.172741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:24.172742Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:24.172769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28229 TClient is connected to server localhost:28229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:24.210269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:24.210301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:24.211680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:24.221219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:24.229841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:24.292755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:24.310680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:24.321405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:24.380269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653293661721029:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:24.380290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:24.420388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:24.427836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:24.439732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:24.445770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:24.453340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:24.460226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:24.469494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653293661721522:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:24.469519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653293661721527:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:24.469528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:24.470108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:24.473228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653293661721529:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Trying to start YDB, gRPC: 3859, MsgBus: 26682 2024-11-21T08:54:24.979514Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653293499964985:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:24.979700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0049a2/r3tmp/tmpBg9O4X/pdisk_1.dat 2024-11-21T08:54:24.991303Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3859, node 2 2024-11-21T08:54:25.002787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:25.002805Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:25.002810Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:25.002860Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26682 TClient is connected to server localhost:26682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:25.079990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:25.080035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:25.081893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:25.083565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:25.089813Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:25.097123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:25.111398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:25.174336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:25.191780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:25.288758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653297794933815:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:25.288793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:25.295062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.303699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.315171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.328669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.335978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.350492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.365949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653297794934316:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:25.365975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653297794934321:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:25.365981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:25.366657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:25.369233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653297794934323:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:25.572791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.687649Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179265733, txId: 281474976715675] shutting down >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> IndexBuildTest::RejectsCancel [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] Test command err: 2024-11-21T08:53:53.928150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653156714771207:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:53.928430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:53.930880Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653157567838950:2242];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001400/r3tmp/tmpMRcy8y/pdisk_1.dat 2024-11-21T08:53:53.952782Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:53.953895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:53:53.955764Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:53:53.972772Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19484, node 1 2024-11-21T08:53:53.983642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/001400/r3tmp/yandexMLGCT0.tmp 2024-11-21T08:53:53.983655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/001400/r3tmp/yandexMLGCT0.tmp 2024-11-21T08:53:53.983710Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/001400/r3tmp/yandexMLGCT0.tmp 2024-11-21T08:53:53.983742Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:53.987051Z INFO: TTestServer started on Port 19706 GrpcPort 19484 TClient is connected to server localhost:19706 PQClient connected to localhost:19484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:53:54.028412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:54.028437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:54.029934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:54.049665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:54.049693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:54.050627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:54.050984Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:54.051207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:53:54.059887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:53:54.202282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653161862806395:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:54.202303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653161862806404:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:54.202309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:54.203099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:53:54.206637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653161862806409:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:53:54.224488Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653161009739524:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:54.224742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:53:54.224752Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGZhOTY1MmMtN2JmYmQ5OTUtN2U5MTM1ZDgtMmI3NjczMjA=, ActorId: [1:7439653161009739463:2300], ActorState: ExecuteState, TraceId: 01jd6yv3cw9ef9kgehg4w3cg3a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:54.225068Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:54.283536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:53:54.294648Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653161862806488:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:53:54.294722Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTYwMTI1NWUtYzE1NWQ4OTgtNjNmNTMzNWEtYTYwNjM4MjE=, ActorId: [2:7439653161862806393:2280], ActorState: ExecuteState, TraceId: 01jd6yv3cs6a35w9x8m95hct6y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:53:54.294926Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:53:54.297708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:53:54.324548Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jd6yv3g517rbn021ck1mv7ka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc2MmI4Y2QtNWUwMGRhM2MtMjA0ZWFlOTgtN2YzMTA1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653161009739964:3023] 2024-11-21T08:53:58.928601Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653156714771207:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:58.928634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:53:58.931162Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653157567838950:2242];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:53:58.931188Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:53:59.344588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720677:1, at schemeshard: 72057594046644480 2024-11-21T08:53:59.441711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720680:0, at schemeshard: 72057594046644480 2024-11-21T08:53:59.483424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720683:0, at schemeshard: 72057594046644480 2024-11-21T08:53:59.532100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976720686:0, at schemeshard: 72057594046644480 2024-11-21T08:53:59.578748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976720689:0, ... : "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:20.049200Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:20.093781Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439653273275704825:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:20.093877Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=OGQ2MzdmZDYtNjU3ZDc3MzktODgwYjcwMjUtZWY4OGMyNjA=, ActorId: [9:7439653273275704508:2300], ActorState: ExecuteState, TraceId: 01jd6yvwkr2kz3k9npyvhqnb8j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:20.094189Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:20.126197Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:54:20.175116Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yvwqs7hkwjyy154t9dg9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=M2E2ZDFiNTEtNWVlMmQwMWItZjQ0ZTUxYWItMjBmOWY1NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7439653273275705058:3025] 2024-11-21T08:54:24.698272Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7439653268980736255:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:24.698304Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:54:24.699119Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439653268831265682:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:24.699155Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:54:25.258076Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2024-11-21T08:54:25.353499Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.405978Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.467294Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.531837Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T08:54:25.613685Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (16261273835729377752, "Root", "00415F536F757263655F3130", 1732179265674, 1732179265674, 0, 13); 2024-11-21T08:54:25.687569Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715697. Ctx: { TraceId: 01jd6yw24c9kymvkxxkptjny9q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NjEyMzM5OTMtNjRhNTA1NTQtZmRkNGExM2UtZWU1ODZlNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:54:25.690916Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T08:54:25.690929Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:54:25.690930Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:54:25.690936Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653294750542704:3729] (SourceId=A_Source_10, PreferedPartition=1) GetOwnershipFast Partition=1 TabletId=1001 2024-11-21T08:54:25.690962Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7439653294750542705:3729], Recipient [9:7439653294750542067:3320]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T08:54:25.690984Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7439653294750542704:3729], Recipient [9:7439653294750542067:3320]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_10" 2024-11-21T08:54:25.691001Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7439653294750542067:3320], Recipient [9:7439653294750542704:3729]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T08:54:25.691005Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439653294750542704:3729] (SourceId=A_Source_10, PreferedPartition=1) InitTable: SourceId=A_Source_10 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2024-11-21T08:54:25.691023Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7439653294750542704:3729], Recipient [9:7439653294750542067:3320]: NActors::TEvents::TEvPoison 2024-11-21T08:54:25.691069Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7439653268980736250:2049], Recipient [9:7439653294750542704:3729]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2024-11-21T08:54:25.691082Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653294750542704:3729] (SourceId=A_Source_10, PreferedPartition=1) StartKqpSession 2024-11-21T08:54:25.691474Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7439653268980736473:2256], Recipient [9:7439653294750542704:3729]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=MmUzNDc0NjUtNzY1MzA2NTItODMwZjJkODktNTE1NGJiYzA=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2024-11-21T08:54:25.691483Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439653294750542704:3729] (SourceId=A_Source_10, PreferedPartition=1) Select from the table 2024-11-21T08:54:25.704748Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7439653268980736473:2256], Recipient [9:7439653294750542704:3729]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=MmUzNDc0NjUtNzY1MzA2NTItODMwZjJkODktNTE1NGJiYzA=" PreparedQuery: "786609d6-77b8f805-a68cd1d1-e384fcbc" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jd6yw256dz1263hdjj6kcpcv" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1732179265674 } items { uint64_value: 1732179265674 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 7 2024-11-21T08:54:25.704804Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439653294750542704:3729] (SourceId=A_Source_10, PreferedPartition=1) Selected from table PartitionId=0 SeqNo=13 2024-11-21T08:54:25.704809Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439653294750542704:3729] (SourceId=A_Source_10, PreferedPartition=1) OnPartitionChosen 2024-11-21T08:54:25.704823Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7439653294750542704:3729] (SourceId=A_Source_10, PreferedPartition=1) ReplyError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Received TEvChooseError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 16261273835729377752 AND Topic = "Root" AND ProducerId = "00415F536F757263655F3130" 2024-11-21T08:54:25.720464Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715704. Ctx: { TraceId: 01jd6yw25abgjzdmszwh46yqe9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NTYyYWM4YmQtZmE0MjM2YjEtYzkxODc4MTgtZjhkZTYzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:03.626832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:03.626848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:03.626851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:03.626854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:03.626863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:03.626865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:03.626871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:03.626917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:03.634000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:03.634011Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:03.635770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:03.636273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:03.636294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:03.637149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:03.637303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:03.637380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.637440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:03.638170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.638370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.638375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.638401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:03.638406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.638410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:03.638418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.639176Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:03.650818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:03.650878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.650917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:03.650959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:03.650964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.651603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.651621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:03.651662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.651669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:03.651672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:03.651675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:03.651952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.651957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:03.651960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:03.652180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.652186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.652190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.652195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.652569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:03.652853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:03.652887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:03.653000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:03.653014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:03.653019Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.653052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:03.653056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:03.653076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.653084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:03.653359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:03.653363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:03.653392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:03.653395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:03.653468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:03.653472Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:03.653480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:03.653483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.653486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:03.653490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:03.653493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:03.653495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:03.653501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:03.653505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:03.653509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:03.653699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:03.653707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:03.653710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:03.653713Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:03.653715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:03.653723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:54:26.235656Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T08:54:26.236316Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T08:54:26.236341Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:54:26.236347Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T08:54:26.236382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:54:26.236389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1232:3086] TestWaitNotification: OK eventTxId 102 2024-11-21T08:54:26.236792Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2024-11-21T08:54:26.236834Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2024-11-21T08:54:26.237054Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2024-11-21T08:54:26.237136Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } 2024-11-21T08:54:26.237450Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:26.237508Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 81us result status StatusSuccess 2024-11-21T08:54:26.237647Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:26.237949Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:54:26.238000Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 58us result status StatusSuccess 2024-11-21T08:54:26.238182Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite >> KqpScripting::ScanQueryInvalid >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> KqpYql::UuidPrimaryKeyBulkUpsert >> KqpYql::EvaluateExprPgNull >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T08:54:07.522425Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.522444Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.525392Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.527259Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2024-11-21T08:54:07.527423Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T08:54:07.527865Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T08:54:07.528379Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T08:54:07.528634Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.535327Z node 1 :PERSQUEUE INFO: new Cookie default|de0b8d0c-738ec5a5-b897f408-b756a642_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:216:2057] recipient: [1:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:219:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:220:2057] recipient: [1:218:2222] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:221:2223] sender: [1:222:2057] recipient: [1:218:2222] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.550842Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.550856Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:54:07.550915Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:272:2266] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:07.551270Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:273:2267] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:07.552297Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:273:2267] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:07.559927Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:272:2266] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:221:2223] sender: [1:299:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T08:54:07.796130Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.796152Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.799668Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.799868Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "user1" Generation: 2 Important: true } 2024-11-21T08:54:07.799985Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T08:54:07.800626Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T08:54:07.801183Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T08:54:07.801658Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvRequest ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:203:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:206:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:207:2057] recipient: [2:205:2211] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:208:2212] sender: [2:209:2057] recipient: [2:205:2211] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.811661Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.811678Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:54:07.811775Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:263:2259] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:07.812158Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:264:2260] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:07.813355Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:263:2259] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:07.813445Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:264:2260] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:208:2212] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents: ... n: 0, State: StateIdle] Reading cookie 2. Send blob request. 2024-11-21T08:54:26.941288Z node 54 :PERSQUEUE DEBUG: No blob in L1. Partition 0 offset 12 actorID [54:530:2504] 2024-11-21T08:54:26.941293Z node 54 :PERSQUEUE DEBUG: Reading cookie 2. Have to read 1 of 1 from KV 2024-11-21T08:54:26.941316Z node 54 :PERSQUEUE DEBUG: PQ Cache (L2). Missed blob. tabletId '72057594037927937' partition 0 offset 12 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:26.941699Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:54:26.941752Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2024-11-21T08:54:26.941789Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 57 actor [54:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 57 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 ReadRuleGenerations: 57 ReadRuleGenerations: 56 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "aaa" Generation: 55 Important: false } Consumers { Name: "another1" Generation: 57 Important: true } Consumers { Name: "important" Generation: 56 Important: true } 2024-11-21T08:54:26.944674Z node 54 :PERSQUEUE DEBUG: Got results. 1 of 1 from KV. Status 1 2024-11-21T08:54:26.944686Z node 54 :PERSQUEUE DEBUG: Got results. result 0 from KV. Status 0 2024-11-21T08:54:26.944691Z node 54 :PERSQUEUE DEBUG: Prefetched blob in L1. Partition 0 offset 12 count 6 size 6292734 actorID [54:530:2504] 2024-11-21T08:54:26.944718Z node 54 :PERSQUEUE DEBUG: FormAnswer 1 2024-11-21T08:54:26.944866Z node 54 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2024-11-21T08:54:26.944994Z node 54 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2024-11-21T08:54:26.945006Z node 54 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2024-11-21T08:54:26.945034Z node 54 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 12 size 6292734 2024-11-21T08:54:26.945043Z node 54 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user another1 readTimeStamp done, result 282 queuesize 0 startOffset 12 2024-11-21T08:54:26.945139Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:26.945145Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:631:2581], now have 1 active actors on pipe 2024-11-21T08:54:26.945416Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:26.945424Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:636:2585], now have 1 active actors on pipe 2024-11-21T08:54:26.945444Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:54:26.945496Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 58(current 57) received from actor [54:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 58 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 ReadRuleGenerations: 57 ReadRuleGenerations: 56 ReadRuleGenerations: 58 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "aaa" Generation: 55 Important: false } Consumers { Name: "another1" Generation: 57 Important: true } Consumers { Name: "important" Generation: 56 Important: true } Consumers { Name: "another" Generation: 58 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:26.946250Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 58 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 ReadRuleGenerations: 57 ReadRuleGenerations: 56 ReadRuleGenerations: 58 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "aaa" Generation: 55 Important: false } Consumers { Name: "another1" Generation: 57 Important: true } Consumers { Name: "important" Generation: 56 Important: true } Consumers { Name: "another" Generation: 58 Important: false } 2024-11-21T08:54:26.946260Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:26.946302Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 58 done 2024-11-21T08:54:26.946336Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 58 done 2024-11-21T08:54:26.946353Z node 54 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:54:26.946418Z node 54 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:26.947338Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:54:26.947387Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2024-11-21T08:54:26.947535Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:54:26.947565Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2024-11-21T08:54:26.947597Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 58 actor [54:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 58 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 ReadRuleGenerations: 57 ReadRuleGenerations: 56 ReadRuleGenerations: 58 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "aaa" Generation: 55 Important: false } Consumers { Name: "another1" Generation: 57 Important: true } Consumers { Name: "important" Generation: 56 Important: true } Consumers { Name: "another" Generation: 58 Important: false } 2024-11-21T08:54:26.947687Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:26.947692Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:654:2600], now have 1 active actors on pipe 2024-11-21T08:54:26.947957Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:26.947967Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:659:2604], now have 1 active actors on pipe 2024-11-21T08:54:26.947984Z node 54 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:54:26.947990Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T08:54:26.948014Z node 54 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:54:26.948074Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:26.948080Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:661:2606], now have 1 active actors on pipe 2024-11-21T08:54:26.948092Z node 54 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:54:26.948095Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T08:54:26.948107Z node 54 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:54:26.948166Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:26.948169Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:663:2608], now have 1 active actors on pipe 2024-11-21T08:54:26.948186Z node 54 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:54:26.948193Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T08:54:26.948223Z node 54 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:54:26.948287Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:26.948292Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:665:2610], now have 1 active actors on pipe 2024-11-21T08:54:26.948302Z node 54 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T08:54:26.948306Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T08:54:26.948317Z node 54 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> KqpYql::TestUuidDefaultColumn >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] >> KqpYql::InsertIgnore >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExprYsonAndType >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 20402, MsgBus: 25154 2024-11-21T08:54:27.161904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653305056926985:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:27.162220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004997/r3tmp/tmpTKc1Y9/pdisk_1.dat 2024-11-21T08:54:27.205746Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20402, node 1 2024-11-21T08:54:27.223389Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:27.223404Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:27.223405Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:27.223442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25154 TClient is connected to server localhost:25154 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:54:27.262076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:27.262106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:54:27.263170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:27.288195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.416472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653305056927583:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.416514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.447590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.514734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653305056927691:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.514788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.514794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653305056927696:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.515607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:54:27.517890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653305056927698:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } >> KqpScripting::ScriptingCreateAndAlterTableTest >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> KqpYql::TestUuidDefaultColumn [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> KqpScripting::ScanQueryTruncate [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> KqpYql::EvaluateExprYsonAndType [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 20369, MsgBus: 9544 2024-11-21T08:54:27.728695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653303700272441:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:27.728752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00498a/r3tmp/tmp9oNXGy/pdisk_1.dat 2024-11-21T08:54:27.774428Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20369, node 1 2024-11-21T08:54:27.793356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:27.793370Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:27.793372Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:27.793411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9544 TClient is connected to server localhost:9544 2024-11-21T08:54:27.828951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:27.828979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:54:27.830175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:27.840139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.008354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653307995240335:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.008390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.043749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.102293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653307995240435:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.102309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653307995240440:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.102313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.102784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:54:28.106061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653307995240442:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } >> KqpYql::InsertIgnore [GOOD] >> KqpYql::JsonCast >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 7381, MsgBus: 6121 2024-11-21T08:54:26.981418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653300673072000:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:26.981624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00499f/r3tmp/tmpVHpD6R/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7381, node 1 2024-11-21T08:54:27.033202Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:27.043196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:27.043209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:27.043210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:27.043237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6121 TClient is connected to server localhost:6121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:27.081868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:27.081902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:27.082941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:27.086545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.099109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.113382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.126668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.134124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.222615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653304968040825:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.222635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.246763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.251474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.259688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.267146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.273894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.280690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.289545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653304968041317:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.289568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.289594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653304968041322:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.290232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:27.293964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653304968041324:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:27.463615Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653304968041633:2462], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2024-11-21T08:54:27.463701Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzJiODRlMjEtNTBjMmFlODMtNjVkMDgzNS1jNGU5MDk5MQ==, ActorId: [1:7439653304968041631:2461], ActorState: ExecuteState, TraceId: 01jd6yw3w50vkmvffc5rhk7h9d, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2024-11-21T08:54:27.474157Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653304968041665:2475], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 2024-11-21T08:54:27.474212Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2IwOGU2MWEtMzBlYTE5MjQtOWVlMjg5ZWEtOTRhMDQ1NjQ=, ActorId: [1:7439653304968041663:2474], ActorState: ExecuteState, TraceId: 01jd6yw3wg72ec22xf0gqt0fj8, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 9144, MsgBus: 61003 2024-11-21T08:54:27.662508Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653306721712080:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:27.662522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00499f/r3tmp/tmpQoxNbN/pdisk_1.dat 2024-11-21T08:54:27.672790Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9144, node 2 2024-11-21T08:54:27.683664Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:27.683683Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:27.683686Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:27.683740Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61003 TClient is connected to server localhost:61003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:27.760334Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:27.760385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:27.761806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:27.765269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.775442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.785820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.805601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.817202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.945034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653306721713612:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.945061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.951141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.958820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.014229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.069134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.079640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.093791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.102092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653311016681426:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.102114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653311016681431:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.102118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.102708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:28.106124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653311016681433:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:28.302447Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7439653311016681815:2057], tablet: [2:7439653306721712835:2303], scanId: 3, table: /Root/EightShard 2024-11-21T08:54:28.303632Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179268344, txId: 281474976715671] shutting down >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType [GOOD] Test command err: Trying to start YDB, gRPC: 6523, MsgBus: 19006 2024-11-21T08:54:27.310556Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653302915405729:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:27.310739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00498c/r3tmp/tmpGT7hin/pdisk_1.dat 2024-11-21T08:54:27.358763Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6523, node 1 2024-11-21T08:54:27.376637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:27.376656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:27.376658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:27.376700Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19006 TClient is connected to server localhost:19006 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:54:27.410902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:27.410935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:54:27.411983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:27.436911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.441159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.456407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.473088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.485360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:27.581368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653302915407288:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.581394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.615805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.622359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.632335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.638130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.644876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.652552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:27.668355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653302915407794:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.668378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.672336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653302915407799:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:27.673162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:27.674400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653302915407801:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 3157, MsgBus: 19344 2024-11-21T08:54:28.008312Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653308707463769:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:28.008327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00498c/r3tmp/tmpDfsOlR/pdisk_1.dat 2024-11-21T08:54:28.018655Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3157, node 2 2024-11-21T08:54:28.024180Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:28.024190Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:28.024191Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:28.024222Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19344 TClient is connected to server localhost:19344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:28.108757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:28.108784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:28.109848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:28.110505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.120203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.127162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.141192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.149803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.243284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653308707465312:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.243348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.245820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.251787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.306304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.317078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.324705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.331435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.339674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653308707465821:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.339698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.339747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653308707465826:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.340286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:28.344393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653308707465828:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> KqpYql::BinaryJsonOffsetBound >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpYql::JsonCast [GOOD] >> KqpScripting::SecondaryIndexes >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 >> GroupWriteTest::WithRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 5615, MsgBus: 3302 2024-11-21T08:54:27.878276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653304251968132:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:27.878313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004987/r3tmp/tmpvp5tFl/pdisk_1.dat 2024-11-21T08:54:27.930723Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5615, node 1 2024-11-21T08:54:27.947509Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:27.947524Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:27.947526Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:27.947564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3302 2024-11-21T08:54:27.978465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:27.978490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:27.979576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:27.993177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.004593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.064647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.083671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.093842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.123621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653308546936966:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.123647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.164742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.219509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.226140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.232564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.286973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.296008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.304873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653308546937485:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.304898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.304957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653308546937490:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.305486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:28.309274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653308546937492:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. Trying to start YDB, gRPC: 17185, MsgBus: 6133 2024-11-21T08:54:28.775791Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653306969648158:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:28.775810Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004987/r3tmp/tmpR5Rb7x/pdisk_1.dat 2024-11-21T08:54:28.785989Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17185, node 2 2024-11-21T08:54:28.790619Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:28.790634Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:28.790636Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:28.790672Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6133 TClient is connected to server localhost:6133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:28.877012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:28.877046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:28.878122Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:28.878298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.879125Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:28.888562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.897933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.916401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.927590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.039759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653311264616995:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.040947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.041799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.047962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.059684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.065478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.072925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.079909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.088965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653311264617489:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.088988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.089044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653311264617494:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.089625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:29.093395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653311264617496:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } [[#]] >> GroupWriteTest::ByTableName >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 >> GroupWriteTest::WithRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2024-11-21T08:54:30.074284Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T08:54:30.074473Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T08:54:30.074543Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T08:54:30.074604Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:35:2058], server id = [1:35:2058], tablet id = 2, status = OK 2024-11-21T08:54:30.074611Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:35:2058], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T08:54:30.074633Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T08:54:30.074669Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:36:2059], server id = [1:36:2059], tablet id = 3, status = OK 2024-11-21T08:54:30.074675Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:36:2059], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T08:54:30.074740Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T08:54:30.074767Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2024-11-21T08:54:30.074792Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T08:54:30.074805Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T08:54:30.074808Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T08:54:30.074812Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T08:54:30.074824Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:40:2056], server id = [2:40:2056], tablet id = 4, status = OK 2024-11-21T08:54:30.074831Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:40:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T08:54:30.074845Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T08:54:30.074852Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:43:2056], server id = [3:43:2056], tablet id = 5, status = OK 2024-11-21T08:54:30.074858Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T08:54:30.074862Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T08:54:30.074870Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:35:2058], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T08:54:30.074873Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T08:54:30.074878Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2024-11-21T08:54:30.074882Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T08:54:30.074892Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:36:2059], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T08:54:30.074895Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T08:54:30.074903Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:40:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T08:54:30.074906Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T08:54:30.074910Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:45:2056], server id = [4:45:2056], tablet id = 6, status = OK 2024-11-21T08:54:30.074915Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:45:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T08:54:30.074919Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:43:2056], server id = [0:0:0], tablet id = 5, status = ERROR 2024-11-21T08:54:30.074924Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T08:54:30.074932Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2024-11-21T08:54:30.074937Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:54:30.074948Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:45:2056], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T08:54:30.074951Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T08:54:30.074986Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T08:54:30.075006Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T08:54:30.075011Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T08:54:30.075026Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2024-11-21T08:54:30.075031Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> TColumnShardTestSchema::ExportAfterFail [GOOD] >> KqpScripting::SecondaryIndexes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 13580020797403645845 2024-11-21T08:54:29.886852Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T08:54:29.889926Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T08:54:29.889943Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T08:54:29.890383Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T08:54:29.900077Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T08:54:29.900524Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T08:54:30.391517Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T08:54:30.391540Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T08:54:30.391546Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T08:54:30.391549Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T08:54:30.397678Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2024-11-21T08:54:30.397702Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> TSchemeShardLoginTest::BasicLogin >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] >> KqpYql::AnsiIn [GOOD] >> TSchemeShardLoginTest::BasicLogin [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 26501, MsgBus: 5930 2024-11-21T08:54:28.462314Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653309149542640:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:28.462376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004984/r3tmp/tmpOZBgyN/pdisk_1.dat 2024-11-21T08:54:28.510533Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26501, node 1 2024-11-21T08:54:28.531394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:28.531407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:28.531408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:28.531441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5930 2024-11-21T08:54:28.562827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:28.562860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:28.563861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:28.587685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:54:28.591518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.609645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:28.628927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:54:28.641458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.750937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653309149544174:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.750982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.782335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.837484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.849687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.863773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.869919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.877334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:28.886899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653309149544692:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.886922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.886971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653309149544697:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:28.887827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:28.890369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653309149544699:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:29.047605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.105452Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179269149, txId: 281474976715673] shutting down 2024-11-21T08:54:29.111139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.120789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.129698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.136427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.148443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.157753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.203971Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179269247, txId: 281474976715682] shutting down 2024-11-21T08:54:29.210407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.216736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.250042Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179269296, txId: 281474976715687] shutting down Trying to start YDB, gRPC: 23914, MsgBus: 24927 2024-11-21T08:54:29.522999Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653311694471239:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:29.523159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004984/r3tmp/tmpRNhQvR/pdisk_1.dat 2024-11-21T08:54:29.531258Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23914, node 2 2024-11-21T08:54:29.539751Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:29.539761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:29.539762Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:29.539786Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24927 TClient is connected to server localhost:24927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:29.623099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:29.623127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:29.624195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:29.625539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.638138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.646562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.664383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.676062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.792466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653311694472771:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.792497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.797875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.805857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.860957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.916473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.927479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.933791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.942891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653311694473289:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.942915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.942928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653311694473294:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.943402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:29.947136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653311694473296:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:30.087737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:30.095579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:30.110204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132179815.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112179815.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112178615.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2024-11-21T08:53:35.907342Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T08:53:35.923785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:53:35.925472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T08:53:35.925502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:53:35.925556Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T08:53:35.926064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:35.926093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:35.926121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:35.926133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:35.926145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:35.926156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:35.926171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:35.926187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:35.926203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:35.926235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:35.926251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:35.926267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:35.929640Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T08:53:35.929659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T08:53:35.930873Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T08:53:35.930940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:53:35.930949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:53:35.930976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:35.931052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:35.931063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:35.931067Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:53:35.931073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:53:35.931080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:35.931085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:35.931088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:53:35.931099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:35.931104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:35.931109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:35.931112Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:53:35.931118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:53:35.931122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:53:35.931128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:53:35.931131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:53:35.931139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:53:35.931143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:53:35.931146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:53:35.931153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:35.931158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:35.931160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T08:53:35.931189Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T08:53:35.931196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T08:53:35.931203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T08:53:35.931211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T08:53:35.931228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:53:35.931234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:53:35.931237Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T08:53:35.931253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:53:35.931258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:53:35.931260Z node 1 :TX_COLUM ... m_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T08:54:30.436641Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=26668; 2024-11-21T08:54:30.436644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=timestamp; 2024-11-21T08:54:30.436656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:669:2674] send ScanData to [1:668:2673] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] Got TEvKqpCompute::TEvScanData [1:669:2674]->[1:668:2673] 2024-11-21T08:54:30.436663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436669Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T08:54:30.436673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T08:54:30.436718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:54:30.436724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436726Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T08:54:30.436730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=26664; 2024-11-21T08:54:30.436733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=timestamp; 2024-11-21T08:54:30.436747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:669:2674] send ScanData to [1:668:2673] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] Got TEvKqpCompute::TEvScanData [1:669:2674]->[1:668:2673] 2024-11-21T08:54:30.436753Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436758Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:54:30.436818Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436822Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:54:30.436824Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:669:2674] finished for tablet 9437184 2024-11-21T08:54:30.436835Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:669:2674] send ScanData to [1:668:2673] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:54:30.436885Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:669:2674] and sent to [1:668:2673] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.021}],"full":{"a":1732179270415014,"name":"_full_task","f":1732179270415014,"d_finished":0,"c":0,"l":1732179270436841,"d":21827},"events":[{"name":"bootstrap","f":1732179270415052,"d_finished":1045,"c":1,"l":1732179270416097,"d":1045},{"a":1732179270436811,"name":"ack","f":1732179270436136,"d_finished":387,"c":6,"l":1732179270436764,"d":417},{"a":1732179270436810,"name":"processing","f":1732179270416255,"d_finished":1378,"c":48,"l":1732179270436764,"d":1409},{"name":"ProduceResults","f":1732179270415485,"d_finished":863,"c":56,"l":1732179270436823,"d":863},{"a":1732179270436823,"name":"Finish","f":1732179270436823,"d_finished":0,"c":0,"l":1732179270436841,"d":18},{"name":"task_result","f":1732179270416257,"d_finished":940,"c":42,"l":1732179270436112,"d":940}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:669:2674]->[1:668:2673] 2024-11-21T08:54:30.436897Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:54:30.414923Z;index_granules=0;index_portions=6;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9495312;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9495312;selected_rows=0; 2024-11-21T08:54:30.436900Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:54:30.436914Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.019087s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.025055s;size=7.584e-06;details={columns=1;};};]};; 2024-11-21T08:54:30.436919Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:669:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 160000/9495312 >> THealthCheckTest::Basic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 26986, MsgBus: 23742 2024-11-21T08:54:29.267642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653312279639351:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:29.267672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004975/r3tmp/tmpqUI7Nq/pdisk_1.dat 2024-11-21T08:54:29.316938Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26986, node 1 2024-11-21T08:54:29.326865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:29.326879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:29.326881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:29.326914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23742 TClient is connected to server localhost:23742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:29.367967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:29.367985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:29.369107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:29.371008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.382263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.443218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.464899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.476650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:29.539914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653312279640904:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.539935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.571561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.578933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.634272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.647479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.654745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.669109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:29.684573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653312279641409:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.684598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653312279641414:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.684606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:29.685309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:29.688505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653312279641416:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:29.882553Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=ZTRhZDNmNTctNjFkMDNkOTUtNmI2NGZhMmYtMjBhMjg4OTI=, ActorId: [1:7439653312279641712:2454], ActorState: ExecuteState, TraceId: 01jd6yw67mbjwpt8rch79026tk, Internal error, message: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2024-11-21T08:54:29.882571Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTRhZDNmNTctNjFkMDNkOTUtNmI2NGZhMmYtMjBhMjg4OTI=, ActorId: [1:7439653312279641712:2454], ActorState: ExecuteState, TraceId: 01jd6yw67mbjwpt8rch79026tk, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer Trying to start YDB, gRPC: 23336, MsgBus: 18897 2024-11-21T08:54:30.157556Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653318757260685:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:30.157586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004975/r3tmp/tmpKLGMuM/pdisk_1.dat 2024-11-21T08:54:30.167520Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23336, node 2 2024-11-21T08:54:30.177216Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:30.177230Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:30.177232Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:30.177283Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18897 TClient is connected to server localhost:18897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:30.257958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:30.257999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:30.259027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:30.260340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:30.267529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:30.275171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:30.289999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:30.298985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:30.460033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653318757262236:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:30.460067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:30.464143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:30.470349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:30.480247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:30.534650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:30.589206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:30.599209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:30.607838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653318757262745:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:30.607869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:30.607871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653318757262750:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:30.608423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:30.612389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653318757262752:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BasicLogin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:30.929877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:30.929900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:30.929904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:30.929907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:30.929922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:30.929924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:30.929932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:30.930006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:30.937652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:30.937673Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:30.939917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:30.940586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:30.940617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:30.941653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:30.941802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:30.941879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:30.941942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:30.942667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:30.942919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:30.942926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:30.942958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:30.942963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:30.942967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:30.942976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:30.943910Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:30.955378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:30.955462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:30.955513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:30.955553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:30.955578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:30.956297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:30.956325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:30.956364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:30.956371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:30.956374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:30.956378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:30.956692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:30.956699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:30.956703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:30.956959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:30.956966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:30.956970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:30.956975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:30.957390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:30.957670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:30.957709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:30.957838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:30.957855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:30.957873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:30.957913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:30.957918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:30.957944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:30.957952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:30.958245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:30.958249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:30.958281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:30.958285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:30.958353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:30.958357Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:30.958367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:30.958370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:30.958374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:30.958377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:30.958380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:30.958383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:30.958390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:30.958394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:30.958397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:30.958590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:30.958599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:30.958602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:30.958605Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:30.958608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:30.958618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T08:54:30.959051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T08:54:30.959116Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T08:54:30.959211Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T08:54:30.960094Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T08:54:30.960601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:30.963586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:30.963614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:54:30.963617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:54:30.963629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:30.963636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:54:30.963639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:54:30.963642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:54:30.963646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T08:54:30.963648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T08:54:30.963790Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:54:30.964386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:30.964418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T08:54:30.964476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:30.964482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:30.964516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:30.964522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T08:54:30.964697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:54:30.964710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:54:30.964715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:54:30.964723Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T08:54:30.964727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:30.964743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:54:30.964813Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:54:30.965064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T08:54:30.965122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T08:54:30.965127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T08:54:31.036779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjIyNDcxLCJpYXQiOjE3MzIxNzkyNzEsInN1YiI6InVzZXIxIn0.wUBPpN69zQCxTQj8C85MeCmuqI4RzLiq8ezOuyAz0-cNmI6ff_uOOo_RDlmtddT-CiQ6_BdIBKF68QNWs3r5l5qVl0hkXFVj6U4T2CgfGYgPbutUVITvl6dFlgdCd_970OnRIWRg1isjpXe9p35GJ4sLfpmQeVuIRr83ymrnWuDHzHpI8JufqpRGu8DZ3pXUM09TNu_HtdqN-aXLcyWKYHUtTTBKBizsA_Iup6bllQ64P454SB-XNUCe0nNKXdhH9nvStyUhXxkHilL8GZcM6cR-1HApWVJvwMH8B7CPA3Nos5nB3X7EIA6VM1ge0_PPWosVzHG5je9S7cYpDAKEyg", at schemeshard: 72057594046678944 2024-11-21T08:54:31.036854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:31.036862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:31.036918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:31.036924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T08:54:31.037053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T08:54:31.037131Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:31.037154Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 32us result status StatusSuccess 2024-11-21T08:54:31.037288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1OYM7LCDON54Z9K3MSQ/\ntd73rF+IL+eqJKR87NkPGncuPaxj+wg4UbvbZ4pcUcZzxo7rToLrhAD9oSjVLf4I\nNKw+gYb9Kq3tbfpLF5F03vAaeepkk85Y7+rCN5UKz4iWja2n1fTiS9c/ceRSCk9w\nSApQhUK/8C672LEgBpaxv17Q1qsiwzZd4vDA74h/XetL1YLPgHVf3+fYuAbg1DhT\nkrqcKrdjz8JBpIZgyFAgIg+pFMqdpXcvxDLV9pPIE1JtLQMulkQvu4agB3uznZkO\nX6E0tqmhG6D2m/Ym2Gt7QiUo4bYIM92aU3Mx1bh4IHQF9XcKjh1PBSGhs2os1ppl\nAwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732265671031 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132179818.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132179818.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132179818.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132179818.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132179818.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112179818.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=132179818.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132179818.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112178618.000000s;Name=;Codec=}; 2024-11-21T08:53:38.328571Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T08:53:38.353615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:53:38.356579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T08:53:38.356617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:53:38.356682Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T08:53:38.357392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:38.357449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:38.357495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:38.357513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:38.357530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:38.357547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:38.357563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:38.357581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:38.357600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:38.357629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:38.357646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:38.357664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:38.362180Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T08:53:38.362206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T08:53:38.363777Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T08:53:38.363913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:53:38.363928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:53:38.363970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:38.364098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:38.364119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:38.364126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:53:38.364140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:53:38.364150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:38.364158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:38.364163Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:53:38.364182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:38.364191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:38.364199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:38.364227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:53:38.364239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:53:38.364246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:53:38.364255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:53:38.364260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:53:38.364274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:53:38.364282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:53:38.364287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:53:38.364299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:38.364308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:38.364313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T08:53:38.364353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=12; 2024-11-21T08:53:38.364378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=20; 2024-11-21T08:53:38.364387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=6; 2024-11-21T08:53:38.364400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2024-11-21T08:53:38.364425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTx ... CHARGE:tx_controllerLoadingTime=5; 2024-11-21T08:54:30.859298Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T08:54:30.859303Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=11; 2024-11-21T08:54:30.859313Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=6; 2024-11-21T08:54:30.859319Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T08:54:30.859342Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=20; 2024-11-21T08:54:30.859395Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=48; 2024-11-21T08:54:30.859407Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=6; 2024-11-21T08:54:30.859415Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=3; 2024-11-21T08:54:30.859422Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T08:54:30.859427Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T08:54:30.859433Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T08:54:30.859447Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2024-11-21T08:54:30.859454Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T08:54:30.859466Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2024-11-21T08:54:30.859472Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T08:54:30.859481Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2024-11-21T08:54:30.859486Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1692; 2024-11-21T08:54:30.859515Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=15;blobs=30;rows=400000;bytes=23741012;raw_bytes=40211545; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T08:54:30.859537Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T08:54:30.859547Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T08:54:30.859553Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T08:54:30.859573Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T08:54:30.859600Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T08:54:30.859618Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T08:54:30.859624Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T08:54:30.859642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T08:54:30.859648Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:54:30.859659Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:54:30.859674Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T08:54:30.859688Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T08:54:30.859694Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:54:30.859707Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:54:30.859712Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:54:30.859718Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:54:30.859735Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:54:30.859823Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:54:30.859970Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1488:3386];tablet_id=9437184;parent=[1:1439:3344];fline=manager.h:99;event=ask_data;request=request_id=114;1={portions_count=15};; 2024-11-21T08:54:30.860156Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T08:54:30.860644Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T08:54:30.860669Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:54:30.860672Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T08:54:30.860681Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:54:30.860698Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:54:30.860715Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T08:54:30.860726Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T08:54:30.860731Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:54:30.860740Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:54:30.860743Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:54:30.860749Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:54:30.860769Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:54:30.860884Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=15;path_id=1; 2024-11-21T08:54:30.860993Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=15;path_id=1; 2024-11-21T08:54:30.861102Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T08:54:30.861107Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1439:3344];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead >> DataStreams::TestGetShardIterator >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks >> DataStreams::TestPutRecordsOfAnauthorizedUser >> DataStreams::TestNonChargeableUser >> GroupWriteTest::ByTableName [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateDropRecreate[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:51:46.373431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:51:46.373452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:46.373457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:51:46.373461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:51:46.373474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:51:46.373477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:51:46.373486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:51:46.373572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:51:46.384961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:51:46.384980Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:51:46.386938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:51:46.387028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:51:46.387050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:51:46.389195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:51:46.389260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:51:46.389374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.389529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:46.390045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.390259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:46.390267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.390278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:51:46.390284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:46.390289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:51:46.390317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:51:46.391338Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:51:46.415009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:51:46.415069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.415112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:51:46.415151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:51:46.415159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.416340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.416365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:51:46.416397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.416406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:51:46.416410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:51:46.416414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:51:46.416843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.416854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:51:46.416858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:51:46.417140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.417148Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.417153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.417158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.417768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:51:46.418129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:51:46.418171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:51:46.418358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:51:46.418384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:51:46.418391Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.418437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:51:46.418443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:51:46.418466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:51:46.418477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:51:46.418826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:51:46.418839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:51:46.418868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:51:46.418873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:51:46.418930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:51:46.418936Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:51:46.418945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:51:46.418948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.418953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:51:46.418957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:51:46.418961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:51:46.418964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:51:46.418975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:51:46.418979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:51:46.418983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... eState::TProposedWaitParts operationId# 1005:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:54:22.281561Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1005:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 324 RawX2: 1451698948360 } Origin: 72075186233409546 State: 2 TxId: 1005 Step: 0 Generation: 2 2024-11-21T08:54:22.281568Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1005:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:22.281570Z node 338 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:1, at schemeshard: 72057594046678944 2024-11-21T08:54:22.281573Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:54:22.281576Z node 338 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:1 129 -> 240 2024-11-21T08:54:22.281594Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:54:22.281651Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [338:201:2204], Recipient [338:121:2147]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 7] Version: 2 } 2024-11-21T08:54:22.281654Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T08:54:22.281659Z node 338 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:54:22.281664Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:54:22.281666Z node 338 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:54:22.281668Z node 338 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T08:54:22.281670Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T08:54:22.281675Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 2/3, is published: true 2024-11-21T08:54:22.281676Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:54:22.282356Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.282414Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.282517Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.282532Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:54:22.282534Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.282739Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:1, at schemeshard: 72057594046678944 2024-11-21T08:54:22.282750Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.282861Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:54:22.282867Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.282880Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:1, at schemeshard: 72057594046678944 2024-11-21T08:54:22.282882Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.282886Z node 338 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1005:1 2024-11-21T08:54:22.282902Z node 338 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [338:324:2312] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1005 at schemeshard: 72057594046678944 2024-11-21T08:54:22.282974Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [338:121:2147], Recipient [338:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:54:22.282978Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T08:54:22.282983Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:1, at schemeshard: 72057594046678944 2024-11-21T08:54:22.282987Z node 338 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:1 ProgressState 2024-11-21T08:54:22.282997Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:54:22.282999Z node 338 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:1 progress is 3/3 2024-11-21T08:54:22.283002Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2024-11-21T08:54:22.283006Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 3/3, is published: true 2024-11-21T08:54:22.283014Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [338:397:2372] message: TxId: 1005 2024-11-21T08:54:22.283018Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2024-11-21T08:54:22.283022Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:54:22.283026Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:54:22.283033Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:54:22.283036Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:1 2024-11-21T08:54:22.283038Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:1 2024-11-21T08:54:22.283050Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:54:22.283053Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:2 2024-11-21T08:54:22.283055Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:2 2024-11-21T08:54:22.283061Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T08:54:22.283136Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:54:22.283139Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.283458Z node 338 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:22.283471Z node 338 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [338:397:2372] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1005 at schemeshard: 72057594046678944 2024-11-21T08:54:22.283513Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:54:22.283521Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [338:956:2805] 2024-11-21T08:54:22.283551Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [338:985:2832], Recipient [338:121:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:54:22.283555Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:54:22.283557Z node 338 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T08:54:22.283564Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [338:958:2807], Recipient [338:121:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:54:22.283566Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:54:22.283568Z node 338 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2024-11-21T08:54:22.283643Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [338:997:2844], Recipient [338:121:2147]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2024-11-21T08:54:22.283647Z node 338 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:54:22.283655Z node 338 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:54:22.283708Z node 338 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 47us result status StatusSuccess 2024-11-21T08:54:22.283784Z node 338 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409550 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataStreams::TestGetRecordsStreamWithSingleShard >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 16038374643548113879 2024-11-21T08:54:30.013460Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T08:54:30.015741Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T08:54:30.015754Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T08:54:30.016048Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T08:54:30.024579Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T08:54:30.025087Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T08:54:31.786120Z 8 00h01m24.010512s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:7:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 2718 2024-11-21T08:54:32.109112Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T08:54:32.109136Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T08:54:32.109142Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T08:54:32.109145Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T08:54:32.115237Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2024-11-21T08:54:32.115259Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} >> DataStreams::TestUpdateStorage >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [GOOD] >> TPersQueueTest::SetupReadSession >> DataStreams::TestControlPlaneAndMeteringData >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> DataStreams::TestStreamStorageRetention >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite >> DataStreams::TestDeleteStream >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestListShards1Shard >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutRecords >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> DataStreams::TestListShards1Shard [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> DataStreams::TestPutRecords [GOOD] >> DataStreams::TestPutEmptyMessage >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] >> GroupReconfiguration::BsControllerConfigurationRequestIsFastEnough [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2024-11-21T08:54:10.175065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:10.175390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:10.175405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d79/r3tmp/tmpmraQZj/pdisk_1.dat 2024-11-21T08:54:10.265577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.280538Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.322401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.322447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.332952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.436096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.448908Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:10.449036Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:10.449090Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:10.449126Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:10.454143Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:10.454241Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:10.454256Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:10.454350Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:10.454355Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:10.454360Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:10.454386Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:10.456557Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:10.456599Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:10.456613Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:10.456616Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:10.456619Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:10.456622Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:10.456685Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.456689Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.456761Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:10.456769Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:10.456777Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.456780Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.456784Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:10.456789Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:10.456793Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:10.456798Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:10.456801Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:10.456803Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:10.456807Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:10.456810Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:10.456824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:10.456827Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:10.456840Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:10.456881Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:10.456889Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:10.456900Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:10.456905Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:10.456907Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:10.456911Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:10.456914Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:10.456940Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:10.456942Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:10.456945Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:10.456947Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:10.456953Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:10.456955Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:10.456958Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:10.456960Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:10.456963Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:10.457114Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:10.457119Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:10.467311Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:10.467330Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:10.467335Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:10.467343Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:10.467352Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:10.640755Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.640773Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.640778Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:10.640792Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:10.640795Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:10.640833Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:10.640840Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:10.640843Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:10.640846Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:10.641438Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:10.641452Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:10.641538Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.641542Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.641546Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:10.641551Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:10.641554Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:10.641559Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:34.754026Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:34.754028Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:34.754043Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:34.754045Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:34.754048Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:54:34.754050Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:34.754052Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:54:34.754054Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2024-11-21T08:54:34.754056Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2024-11-21T08:54:34.754058Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:54:34.754060Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2024-11-21T08:54:34.754062Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T08:54:34.754064Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2024-11-21T08:54:34.754075Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2024-11-21T08:54:34.754077Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:54:34.754079Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T08:54:34.754081Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:34.754083Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T08:54:34.754085Z node 14 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2024-11-21T08:54:34.754087Z node 14 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2024-11-21T08:54:34.754089Z node 14 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2024-11-21T08:54:34.754092Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:54:34.754094Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:34.754095Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T08:54:34.754097Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T08:54:34.754106Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T08:54:34.754108Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T08:54:34.754111Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T08:54:34.754113Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T08:54:34.754115Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:54:34.754117Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T08:54:34.754118Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T08:54:34.754120Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:54:34.754136Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2024-11-21T08:54:34.754139Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T08:54:34.754141Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:54:34.754143Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:54:34.754145Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:54:34.754147Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:54:34.754149Z node 14 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2024-11-21T08:54:34.754151Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:34.754153Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:54:34.754154Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T08:54:34.754156Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T08:54:34.774616Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3000 txid# 281474976715664} 2024-11-21T08:54:34.774633Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2024-11-21T08:54:34.774644Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:34.774650Z node 14 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:54:34.774666Z node 14 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [14:999:2800], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:34.774673Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:34.774737Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3000 txid# 281474976715664} 2024-11-21T08:54:34.774740Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2024-11-21T08:54:34.774744Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:54:34.774748Z node 14 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:54:34.774752Z node 14 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [14:999:2800], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:34.774758Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:54:34.774994Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2024-11-21T08:54:34.775008Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:54:34.775016Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T08:54:34.775028Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:54:34.775032Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:54:34.775035Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:34.775037Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:34.775043Z node 14 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T08:54:34.775046Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:54:34.775048Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:34.775050Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:54:34.775052Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:34.775061Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2024-11-21T08:54:34.775111Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2024-11-21T08:54:34.775116Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 1} after executionsCount# 1 2024-11-21T08:54:34.775121Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:34.775146Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} finished in read 2024-11-21T08:54:34.775153Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:54:34.775155Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:54:34.775157Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:34.775159Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:34.775166Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:54:34.775168Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:34.775170Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T08:54:34.775173Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:54:34.775185Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation |89.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc >> DataStreams::TestShardPagination [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> GroupReconfiguration::BsControllerConfigurationRequestIsFastEnough [GOOD] Test command err: RandomSeed# 3145438660856764188 2024-11-21T08:51:23.459936Z 9 00h01m40.010512s :BS_PROXY_PUT ERROR: [b00b2ed053ee17ae] Result# TEvPutResult {Id# [1:1:1:1:123:1000:0] Status# DEADLINE StatusFlags# { } ErrorReason# "Deadline timer hit" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 numNodes# 12 numGroups# 1 resetToNone# false numDecommitted# 0 nodeId# 1 pdiskId# 1000 2024-11-21T08:51:23.640746Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:23.641138Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8499291551665300963] 2024-11-21T08:51:23.641952Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2:1000 0 1 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 nodeId# 2 pdiskId# 1000 2024-11-21T08:51:23.775312Z 11 00h02m00.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:23.775557Z 11 00h02m00.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8113867273151811786] 2024-11-21T08:51:23.776059Z 11 00h02m00.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 nodeId# 3 pdiskId# 1000 2024-11-21T08:51:23.926978Z 12 00h03m30.015632s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:23.927266Z 12 00h03m30.015632s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6716492704343206488] 2024-11-21T08:51:23.927748Z 12 00h03m30.015632s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 12:1000 0 2 numNodes# 12 numGroups# 1 resetToNone# false numDecommitted# 1 2024-11-21T08:51:24.248408Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:24.248726Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5116856655882237169] 2024-11-21T08:51:24.249481Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 nodeId# 1 pdiskId# 1000 2:1000 0 1 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 nodeId# 2 pdiskId# 1000 2024-11-21T08:51:24.517328Z 11 00h03m00.062048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:24.517753Z 11 00h03m00.062048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2504803555289466810] 2024-11-21T08:51:24.518534Z 11 00h03m00.062048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 nodeId# 3 pdiskId# 1000 2024-11-21T08:51:24.704038Z 12 00h04m30.063584s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:24.704445Z 12 00h04m30.063584s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5536245947528496733] 2024-11-21T08:51:24.705151Z 12 00h04m30.063584s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 12:1000 0 2 numNodes# 12 numGroups# 1 resetToNone# false numDecommitted# 2 2024-11-21T08:51:25.032304Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:25.032686Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7489636411293944975] 2024-11-21T08:51:25.033439Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:51:25.129856Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:25.130189Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15418269733727479592] 2024-11-21T08:51:25.130807Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 nodeId# 1 pdiskId# 1000 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 nodeId# 2 pdiskId# 1000 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 nodeId# 3 pdiskId# 1000 2024-11-21T08:51:25.559825Z 12 00h05m30.064096s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:25.560287Z 12 00h05m30.064096s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12120769628898409273] 2024-11-21T08:51:25.561052Z 12 00h05m30.064096s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 12:1000 0 2 numNodes# 12 numGroups# 1 resetToNone# false numDecommitted# 3 2024-11-21T08:51:25.914703Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:25.915084Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1814145264493906085] 2024-11-21T08:51:25.915765Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:51:26.027868Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:26.028148Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7661046597346294476] 2024-11-21T08:51:26.028631Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:51:26.134285Z 12 00h02m30.014608s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:26.134535Z 12 00h02m30.014608s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1420362439799979103] 2024-11-21T08:51:26.135019Z 12 00h02m30.014608s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 nodeId# 1 pdiskId# 1000 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 12:1000 0 2 nodeId# 2 pdiskId# 1000 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 12:1000 0 2 nodeId# 3 pdiskId# 1000 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 12:1000 0 2 numNodes# 12 numGroups# 1 resetToNone# true numDecommitted# 0 nodeId# 1 pdiskId# 1000 1:1000 0 0 2:1000 0 1 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 nodeId# 2 pdiskId# 1000 1:1000 0 0 2:1000 0 1 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 nodeId# 3 pdiskId# 1000 1:1000 0 0 2:1000 0 1 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 numNodes# 12 numGroups# 1 resetToNone# true numDecommitted# 1 2024-11-21T08:51:27.350051Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:27.350330Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 408171048613861987] 2024-11-21T08:51:27.351019Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 nodeId# 1 pdiskId# 1000 2:1000 0 1 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 nodeId# 2 pdiskId# 1000 2024-11-21T08:51:27.596125Z 1 00h03m00.062048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:27.596589Z 1 00h03m00.062048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 936852037962662170] 2024-11-21T08:51:27.597347Z 1 00h03m00.062048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1:1000 0 1 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 nodeId# 3 pdiskId# 1000 2024-11-21T08:51:27.761771Z 2 00h04m30.063584s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:27.762169Z 2 00h04m30.063584s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14821465535576483155] 2024-11-21T08:51:27.762851Z 2 00h04m30.063584s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1:1000 0 1 2:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 numNodes# 12 numGroups# 1 resetToNone# true numDecommitted# 2 2024-11-21T08:51:28.061154Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:28.061416Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8086061590891212680] 2024-11-21T08:51:28.061884Z 10 00h00m30.012560s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:51:28.169433Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:28.169791Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8433559240357235046] 2024-11-21T08:51:28.170446Z 11 00h01m30.013584s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 nodeId# 1 pdiskId# 1000 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 nodeId# 2 pdiskId# 1000 3:1000 0 2 4:1000 1 0 5:1000 1 1 6:1000 1 2 7:1000 2 0 8:1000 2 1 9:1000 2 2 10:1000 0 0 11:1000 0 1 nodeId# 3 pdiskId# 1000 2024-11-21T08:51:28.638090Z 12 00h05m30.064096s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:51:28.638493Z 12 00h05m30.064096s :BS_SYNCER ERROR: VDISK[82000000:_:0:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# ... ULLRECS ERROR: VDISK[82000000:_:0:2:0]: Db# Barriers ValidateGCCmd: empty garbage collection command TabletId# 1 2024-11-21T08:54:09.526576Z 4 00h00m00.010512s :BS_HULLRECS ERROR: VDISK[82000000:_:0:3:0]: Db# Barriers ValidateGCCmd: empty garbage collection command TabletId# 1 2024-11-21T08:54:09.526582Z 5 00h00m00.010512s :BS_HULLRECS ERROR: VDISK[82000000:_:0:4:0]: Db# Barriers ValidateGCCmd: empty garbage collection command TabletId# 1 2024-11-21T08:54:09.526587Z 6 00h00m00.010512s :BS_HULLRECS ERROR: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: empty garbage collection command TabletId# 1 2024-11-21T08:54:09.526593Z 7 00h00m00.010512s :BS_HULLRECS ERROR: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: empty garbage collection command TabletId# 1 2024-11-21T08:54:09.526601Z 8 00h00m00.010512s :BS_HULLRECS ERROR: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: empty garbage collection command TabletId# 1 2024-11-21T08:54:09.608301Z 1 00h00m06.060512s :BS_PROXY_GET ERROR: [59e769d48515e6f2] Response# TEvGetResult {Status# BLOCKED ResponseSz# 1 {[1:1:0:0:0:100:0] BLOCKED Size# 0} ErrorReason# "status# BLOCKED from# [82000000:1:0:0:0]"} Marker# BPG29 2024-11-21T08:54:09.692976Z 1 00h00m06.060512s :BS_PROXY_GET ERROR: [e9a62ad7e692b361] Response# TEvGetResult {Status# BLOCKED ResponseSz# 1 {[1:1:0:0:0:100:0] BLOCKED Size# 0} ErrorReason# "status# BLOCKED from# [82000000:1:0:0:0]"} Marker# BPG29 GroupId# 2181038084 Disks from different fail realms occupy the same Realm, first VDisk Id# {FailRealm# 0 FailDomain# 0 VDisk# 0} second VDisk Id# {FailRealm# 1 FailDomain# 1 VDisk# 0} 2024-11-21T08:54:10.715344Z 2 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000003:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.715387Z 4 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000004:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.715413Z 9 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.715436Z 2 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000001:_:0:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.715459Z 5 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000002:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.716924Z 2 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000003:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3545234058870233346] 2024-11-21T08:54:10.716937Z 2 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000001:_:0:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17524014568814932645] 2024-11-21T08:54:10.716943Z 4 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000004:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10559250933780531169] 2024-11-21T08:54:10.716949Z 9 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7283191297551735357] 2024-11-21T08:54:10.716954Z 5 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000002:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16139221800511584228] 2024-11-21T08:54:10.719971Z 2 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000003:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.719983Z 2 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000001:_:0:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.719987Z 4 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000004:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.719992Z 9 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.719996Z 5 00h01m40.001024s :BS_SYNCER ERROR: VDISK[82000002:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.892042Z 1 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000002:_:1:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.892083Z 7 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000003:_:1:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.892109Z 6 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000004:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.892134Z 10 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000000:_:2:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.892156Z 12 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000001:_:1:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:10.892522Z 10 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000000:_:2:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3626003979134199724] 2024-11-21T08:54:10.893883Z 7 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000003:_:1:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1188761026035912486] 2024-11-21T08:54:10.893897Z 1 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000002:_:1:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14817601033217897916] 2024-11-21T08:54:10.893902Z 6 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000004:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1709756078265200692] 2024-11-21T08:54:10.893908Z 12 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000001:_:1:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5257235707074743093] 2024-11-21T08:54:10.894481Z 10 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000000:_:2:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.897309Z 7 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000003:_:1:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.897321Z 1 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000002:_:1:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.897325Z 6 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000004:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:10.897330Z 12 00h01m55.001536s :BS_SYNCER ERROR: VDISK[82000001:_:1:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.054079Z 13 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000003:_:1:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.054129Z 14 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000004:_:1:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.054154Z 14 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000000:_:1:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.054176Z 7 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000001:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.054199Z 3 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000002:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.055538Z 13 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000003:_:1:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17727892437050200020] 2024-11-21T08:54:11.055551Z 14 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000004:_:1:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12366918380841982672] 2024-11-21T08:54:11.055556Z 14 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000000:_:1:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5239859846397887443] 2024-11-21T08:54:11.055561Z 7 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000001:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8070514024322777726] 2024-11-21T08:54:11.055566Z 3 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000002:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3730977943675446907] 2024-11-21T08:54:11.058606Z 13 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000003:_:1:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.058618Z 14 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000004:_:1:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.058623Z 14 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000000:_:1:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.058627Z 7 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000001:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.058631Z 3 00h02m10.002048s :BS_SYNCER ERROR: VDISK[82000002:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.195574Z 1 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000003:_:2:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.195665Z 3 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000004:_:2:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.195689Z 8 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.195711Z 9 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000001:_:2:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.195754Z 1 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000003:_:2:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15875020475626164267] 2024-11-21T08:54:11.195774Z 3 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000004:_:2:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18326414470607480255] 2024-11-21T08:54:11.195779Z 8 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6826113811077073299] 2024-11-21T08:54:11.195783Z 9 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000001:_:2:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1146341160812761555] 2024-11-21T08:54:11.198010Z 1 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000003:_:2:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.198039Z 3 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000004:_:2:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.198043Z 8 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.198047Z 9 00h02m25.002560s :BS_SYNCER ERROR: VDISK[82000001:_:2:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.319292Z 11 00h02m40.003072s :BS_SYNCER ERROR: VDISK[82000004:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.319332Z 2 00h02m40.003072s :BS_SYNCER ERROR: VDISK[82000000:_:2:2:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:54:11.319375Z 11 00h02m40.003072s :BS_SYNCER ERROR: VDISK[82000004:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16845141786007012194] 2024-11-21T08:54:11.319384Z 2 00h02m40.003072s :BS_SYNCER ERROR: VDISK[82000000:_:2:2:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15773908652598942704] 2024-11-21T08:54:11.320698Z 11 00h02m40.003072s :BS_SYNCER ERROR: VDISK[82000004:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T08:54:11.320710Z 2 00h02m40.003072s :BS_SYNCER ERROR: VDISK[82000000:_:2:2:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2024-11-21T08:54:10.474237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:10.474679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:10.474698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d4c/r3tmp/tmpglIuLB/pdisk_1.dat 2024-11-21T08:54:10.572192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.588929Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.631690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.631733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.642265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.746755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.761575Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:10.761822Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:10.761909Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:10.761965Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:10.769043Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:10.769249Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:10.769277Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:10.769460Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:10.769471Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:10.769479Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:10.769529Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:10.772906Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:10.772994Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:10.773024Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:10.773030Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:10.773034Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:10.773039Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:10.773190Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.773199Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.773351Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:10.773373Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:10.773385Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.773391Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.773398Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:10.773405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:10.773412Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:10.773421Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:10.773442Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:10.773445Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:10.773451Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:10.773457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:10.773479Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:10.773484Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:10.773510Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:10.773577Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:10.773587Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:10.773606Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:10.773614Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:10.773633Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:10.773638Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:10.773642Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:10.773688Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:10.773692Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:10.773696Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:10.773699Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:10.773710Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:10.773714Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:10.773717Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:10.773721Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:10.773726Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:10.773973Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:10.773981Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:10.784336Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:10.784374Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:10.784382Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:10.784395Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:10.784412Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:10.959497Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.959536Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.959546Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:10.959581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:10.959588Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:10.959626Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:10.959637Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:10.959642Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:10.959650Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:10.960632Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:10.960662Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:10.960827Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.960843Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.960855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:10.960866Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:10.960872Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:10.960883Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... :2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.042944Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.042969Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709543002, quota bytes left# 18446744073709138191, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.042980Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.042983Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.042986Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043010Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542903, quota bytes left# 18446744073709133439, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043024Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043027Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043029Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043056Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542804, quota bytes left# 18446744073709128687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043064Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043066Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043068Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043092Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542705, quota bytes left# 18446744073709123935, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043107Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043110Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043112Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043137Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542606, quota bytes left# 18446744073709119183, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043144Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043147Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043149Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043173Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542507, quota bytes left# 18446744073709114431, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043190Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043192Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043195Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043247Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542408, quota bytes left# 18446744073709109679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043255Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043258Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043260Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043284Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542309, quota bytes left# 18446744073709104927, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043298Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043301Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043303Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043327Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542210, quota bytes left# 18446744073709100175, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043335Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043338Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043340Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043362Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542111, quota bytes left# 18446744073709095423, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043378Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043381Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043383Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043407Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542012, quota bytes left# 18446744073709090671, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043413Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043417Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043420Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043443Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541913, quota bytes left# 18446744073709085919, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043457Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043460Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043462Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043485Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541814, quota bytes left# 18446744073709081167, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043497Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043499Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043502Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043525Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541715, quota bytes left# 18446744073709076415, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043535Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043538Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043540Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043564Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541616, quota bytes left# 18446744073709071663, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043571Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:35.043573Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T08:54:35.043576Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T08:54:35.043581Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 1, bytes# 48, quota rows left# 18446744073709541615, quota bytes left# 18446744073709071615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:35.043586Z node 14 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[14:557:2484], 1} finished in ReadContinue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2024-11-21T08:54:31.421568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653323156184398:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:31.421719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fa5/r3tmp/tmpcCWpjb/pdisk_1.dat 2024-11-21T08:54:31.463804Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12247, node 1 2024-11-21T08:54:31.477115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:31.477130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:31.477133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:31.477171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:31.522761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:31.522796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:31.523863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:31.547343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:31.773806Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653319777735282:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:31.774102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fa5/r3tmp/tmpf1Fa45/pdisk_1.dat 2024-11-21T08:54:31.781480Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7358, node 2 2024-11-21T08:54:31.791902Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:31.791922Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:31.791924Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:31.791981Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:31.875572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:31.875606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:31.876426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:31.876632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:54:32.635364Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:32.638568Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:32.638641Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:32.638723Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:32.638977Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:32.639017Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fa5/r3tmp/tmpqeG5IO/pdisk_1.dat 2024-11-21T08:54:32.725877Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15296, node 3 TClient is connected to server localhost:9153 2024-11-21T08:54:32.832412Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:32.832432Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:32.832436Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:32.832543Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:33.694884Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:33.694977Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:33.695005Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:33.695095Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:33.695131Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:33.695162Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fa5/r3tmp/tmpPgQQPO/pdisk_1.dat 2024-11-21T08:54:33.763483Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20913, node 5 TClient is connected to server localhost:5423 2024-11-21T08:54:33.864979Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:33.864999Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:33.865003Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:33.865098Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T08:54:34.547547Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:451:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:34.547594Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:34.547608Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003fa5/r3tmp/tmprOdvaE/pdisk_1.dat 2024-11-21T08:54:34.607583Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24583, node 7 TClient is connected to server localhost:29526 2024-11-21T08:54:34.708416Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:34.708433Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:34.708436Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:34.708517Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD database_status { name: "/Root/serverless" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "7-1-55" overall: GREEN pdisk { id: "7-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "9" overall: GREEN load { overall: GREEN cores: 64 } } } } database_status { name: "/Root" overall: GREEN storage { overall: GREEN pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN } } } compute { overall: GREEN nodes { id: "7" overall: GREEN load { overall: GREEN cores: 64 } } } } database_status { name: "/Root/shared" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN } } } compute { overall: GREEN nodes { id: "8" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 7 host: "::1" port: 12001 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2024-11-21T08:54:31.971071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653320055351948:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:31.971194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004417/r3tmp/tmpV6mafd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27125, node 1 TClient is connected to server localhost:8895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:32.062584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:54:32.062598Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:32.062782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:32.062796Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:32.062798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:32.062854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:32.071082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:32.071104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:32.072751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:32.110850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.111733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.111751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.112159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:32.112241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:32.112254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:54:32.112595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:32.112601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:54:32.112668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:32.112867Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.113674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272159, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.113687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:54:32.113738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:54:32.114134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.114173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.114185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:54:32.114197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:54:32.114207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:54:32.114218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:54:32.114539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:54:32.114555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:54:32.114561Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:32.114570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:32.123413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.123479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.123488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.123501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:32.123525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:32.123531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:32.124109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:32.124140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.124180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.124300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:32.124311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:32.124313Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:32.124325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:8895 2024-11-21T08:54:32.131850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.131913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.131930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.132501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:32.132536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:32.132865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:32.133106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272180, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.133114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179272180, at schemeshard: 72057594046644480 2024-11-21T08:54:32.133153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:32.133172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:32.133181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:32.133448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.133489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.133598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:32.133607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:32.133609Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:32.133619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:54:32.138741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetShardIterator, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.138825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.139477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: A ... .649243Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2024-11-21T08:54:34.649754Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:34.701096Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.701687Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.701858Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.701990Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.702163Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.702424Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.702437Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2024-11-21T08:54:34.702692Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:34.703283Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179274749, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:34.703293Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1732179274749, at tablet: 72057594046644480 2024-11-21T08:54:34.703348Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T08:54:34.703628Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:34.703690Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:34.703702Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T08:54:34.703709Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2024-11-21T08:54:34.703721Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T08:54:34.703754Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2024-11-21T08:54:34.703906Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:34.703920Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:34.703925Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T08:54:34.703956Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:34.703960Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:34.703961Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:54:34.703965Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2024-11-21T08:54:34.705569Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestInvalidRetentionCombinationsa, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:54:34.705649Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:34.706105Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestInvalidRetentionCombinationsa 2024-11-21T08:54:34.706133Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:34.706168Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:34.706178Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T08:54:34.706344Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T08:54:34.706378Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T08:54:34.706389Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T08:54:34.706392Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2024-11-21T08:54:34.706422Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T08:54:34.706428Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T08:54:34.706429Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2024-11-21T08:54:34.707440Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:34.707475Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:34.707490Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:34.707504Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:34.707519Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:34.707534Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:34.707536Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 1 -> 3 2024-11-21T08:54:34.708261Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:34.713189Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.713471Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.714427Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.714720Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.714973Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.715519Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:34.715529Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 3 -> 128 2024-11-21T08:54:34.715819Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:54:34.716541Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179274763, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:34.716552Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715661:0 HandleReply TEvOperationPlan, step: 1732179274763, at tablet: 72057594046644480 2024-11-21T08:54:34.716591Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 128 -> 240 2024-11-21T08:54:34.716967Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:34.717022Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:34.717034Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715661:0 ProgressState 2024-11-21T08:54:34.717046Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2024-11-21T08:54:34.717053Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2024-11-21T08:54:34.717106Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 2, subscribers: 1 2024-11-21T08:54:34.717362Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T08:54:34.717383Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T08:54:34.717388Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 10 2024-11-21T08:54:34.717452Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T08:54:34.717463Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T08:54:34.717465Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T08:54:34.717472Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2024-11-21T08:54:33.141922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653330591716762:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:33.142269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043d6/r3tmp/tmp42xB03/pdisk_1.dat 2024-11-21T08:54:33.191652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21453, node 1 2024-11-21T08:54:33.201085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:33.201098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:33.201099Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:33.201151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:33.242349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:33.242373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:33.243694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:33.270265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.271057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:33.271076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.271463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:33.271511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:33.271518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:54:33.271763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:33.271774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:54:33.272031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:33.272074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.272868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179273321, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:33.272878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:54:33.272922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:54:33.273240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:33.273283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:33.273295Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:54:33.273303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:54:33.273310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:54:33.273323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:54:33.273668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:54:33.273686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:54:33.273694Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:33.273704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:33.286288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.286351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:33.286360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.286374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:33.286398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:33.286404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:33.286870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:33.286914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:33.286963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:33.287075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:33.287087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:33.287090Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:33.287103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:28701 2024-11-21T08:54:33.295471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.295542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:33.295550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.296124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:33.296154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.296889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179273342, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:33.296900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179273342, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:33.296945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:33.296967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:33.296979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T08:54:33.297137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:33.297311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:33.297355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:33.297475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:33.297486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:33.297489Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:33.297498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2024-11-21T08:54:33.307685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestStreamStorageRetention, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.307830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660 ... 46644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:21545 2024-11-21T08:54:35.149853Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.149928Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.149938Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.150426Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:35.150476Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:35.150789Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:35.151082Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179275197, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:35.151092Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179275197, at schemeshard: 72057594046644480 2024-11-21T08:54:35.151132Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:35.151158Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:35.151169Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:35.151390Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.151424Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.151517Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:35.151527Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:35.151534Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:35.151544Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:54:35.158943Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestShardPagination, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.159059Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.159759Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestShardPagination 2024-11-21T08:54:35.159806Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.159852Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.159873Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T08:54:35.159946Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T08:54:35.159976Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:35.159983Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:35.159987Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T08:54:35.160009Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:35.160017Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:35.160018Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:54:35.161450Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161498Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161513Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161531Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161542Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161557Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161571Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161585Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161600Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161616Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.161624Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2024-11-21T08:54:35.162267Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:35.168742Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169256Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169322Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169386Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169482Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169583Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169653Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169705Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169772Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169854Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.169863Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2024-11-21T08:54:35.170050Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.170530Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179275218, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:35.170540Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1732179275218, at tablet: 72057594046644480 2024-11-21T08:54:35.170585Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T08:54:35.170814Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.170866Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.170877Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T08:54:35.170888Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2024-11-21T08:54:35.170897Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T08:54:35.170936Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2024-11-21T08:54:35.171073Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:35.171086Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:35.171089Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T08:54:35.171126Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:35.171133Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:35.171134Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:54:35.171138Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 >> DataStreams::TestReservedResourcesMetering >> DataStreams::ListStreamsValidation [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> DataStreams::TestListStreamConsumers [GOOD] >> THealthCheckTest::SpecificServerless >> DataStreams::TestUpdateStream >> THealthCheckTest::ShardsLimit999 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2024-11-21T08:54:32.774548Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653327058931986:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:32.774610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043dc/r3tmp/tmp9to3b8/pdisk_1.dat 2024-11-21T08:54:32.828903Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2492, node 1 2024-11-21T08:54:32.835129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:32.835148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:32.835150Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:32.835178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:32.863289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.864489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.864510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.865412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:32.865487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:32.865496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:54:32.865950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:32.865958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:54:32.866011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:54:32.866310Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.867164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272915, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.867175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:54:32.867224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:54:32.867535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.867577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.867588Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:54:32.867606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:54:32.867618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:54:32.867632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:54:32.867997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:54:32.868011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:54:32.868015Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:32.868029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:54:32.874570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:32.874592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:32.876067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:32.879459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.879525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.879536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.879552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T08:54:32.879580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:54:32.879588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T08:54:32.880047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:32.880090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.880138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.880274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:54:32.880287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:54:32.880296Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:32.880309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:14182 2024-11-21T08:54:32.891126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.891203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.891213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.891849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:32.891879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:32.892340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T08:54:32.892593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272936, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.892603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732179272936, at schemeshard: 72057594046644480 2024-11-21T08:54:32.892640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T08:54:32.892662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:54:32.892672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T08:54:32.892984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.893030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.893188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T08:54:32.893215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T08:54:32.893224Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:32.893237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T08:54:32.900548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/tdir, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.900588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.900600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/tdir/stream_TestControlPlaneAndMeteringData, opId: 281474976710660: ... oot, subject: user@builtin, status: StatusAlreadyExists, reason: Check failed: path: '/Root/stream_TestCreateExistingStream', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:342, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestCreateExistingStream 2024-11-21T08:54:35.528590Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439653338674028979:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:35.528654Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043dc/r3tmp/tmp3nln9o/pdisk_1.dat 2024-11-21T08:54:35.544218Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23047, node 10 2024-11-21T08:54:35.559245Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:35.559256Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:35.559258Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:35.559295Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:35.629102Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:35.629131Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:35.630601Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:35.636876Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.636962Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.636972Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.637302Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:35.637352Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:35.637361Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:54:35.637619Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:35.637629Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:54:35.637846Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:35.637874Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.638486Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179275687, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:35.638496Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:54:35.638541Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:54:35.638795Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.638835Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.638842Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:54:35.638851Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:54:35.638863Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:54:35.638870Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:54:35.639005Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:54:35.639022Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:54:35.639029Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:35.639038Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:35.650202Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.650291Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.650302Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.650320Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:35.650349Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:35.650357Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:35.650904Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:35.650936Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.650986Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.651117Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:35.651128Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:35.651132Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:35.651145Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:29564 2024-11-21T08:54:35.660658Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.660728Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.660742Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.661319Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:35.661356Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:35.661812Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:35.662057Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179275708, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:35.662067Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179275708, at schemeshard: 72057594046644480 2024-11-21T08:54:35.662111Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:35.662133Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:35.662143Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:35.662455Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.662492Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.662593Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:35.662603Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:35.662607Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:35.662619Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestListStreamConsumers [GOOD] Test command err: 2024-11-21T08:54:32.226317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653328039855577:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:32.226481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004406/r3tmp/tmptXiZp8/pdisk_1.dat 2024-11-21T08:54:32.265604Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13507, node 1 2024-11-21T08:54:32.281802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:32.281813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:32.281814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:32.281839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:32.326727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:32.326758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:32.328291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:32.347837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.348592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.348606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.349052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:32.349097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:32.349101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:54:32.349559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:32.349585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:32.349587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:54:32.349978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.350801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272397, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.350811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:54:32.350870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:54:32.351248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.351290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.351303Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:54:32.351312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:54:32.351323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:54:32.351338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:54:32.351731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:54:32.351753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:54:32.351757Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:32.351769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:32.370425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.370489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.370498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.370510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:32.370531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:32.370537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:32.371037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:32.371066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.371108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.371231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:32.371248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:32.371251Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:32.371267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:29105 2024-11-21T08:54:32.379761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.379819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.379828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.380323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:32.380352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:32.380765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:32.381380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272425, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.381390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179272425, at schemeshard: 72057594046644480 2024-11-21T08:54:32.381425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:32.381459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:32.381470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:32.381917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.381951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.382096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:32.382109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:32.382111Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:32.382121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:54:32.389180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestNonChargeableUser, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.389287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.390081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: ... ProgressState, operationId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.742589Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179275792, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:35.742608Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715664:0 HandleReply TEvOperationPlan, step: 1732179275792, at tablet: 72057594046644480 2024-11-21T08:54:35.742674Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 128 -> 240 2024-11-21T08:54:35.743066Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.743113Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.743125Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:0 ProgressState 2024-11-21T08:54:35.743136Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 1/1 2024-11-21T08:54:35.743147Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2024-11-21T08:54:35.743184Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715664, publications: 1, subscribers: 1 2024-11-21T08:54:35.743356Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T08:54:35.743364Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T08:54:35.743366Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T08:54:35.743374Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 E0000 00:00:1732179275.744803 597458 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732179275.744831 597458 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732179275.745695 597458 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732179275.745704 597458 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T08:54:35.747603Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropPQ Propose, path: /Root/stream_TestListStreamConsumers, pathId: 0, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.747690Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.747701Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.748141Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: user@builtin, status: StatusAccepted, operation: DROP PERSISTENT QUEUE, path: /Root/stream_TestListStreamConsumers 2024-11-21T08:54:35.748174Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.748234Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.748279Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715665, at schemeshard: 72057594046644480 2024-11-21T08:54:35.748433Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T08:54:35.748443Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715665 2024-11-21T08:54:35.748445Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2024-11-21T08:54:35.748467Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T08:54:35.748472Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715665 2024-11-21T08:54:35.748473Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T08:54:35.749423Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715665:0 4 -> 5 2024-11-21T08:54:35.749679Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976715665:0 ProgressState 2024-11-21T08:54:35.749686Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715665:0 5 -> 128 2024-11-21T08:54:35.749930Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 281474976715665:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:35.751397Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037892 disconnected 2024-11-21T08:54:35.751407Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037891 disconnected 2024-11-21T08:54:35.751427Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037892 not found 2024-11-21T08:54:35.751436Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037889 disconnected 2024-11-21T08:54:35.751438Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2024-11-21T08:54:35.751438Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037888 disconnected 2024-11-21T08:54:35.751439Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037891 not found 2024-11-21T08:54:35.751440Z node 10 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976715665 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037890 disconnected 2024-11-21T08:54:35.751440Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037893 not found 2024-11-21T08:54:35.751441Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037890 not found 2024-11-21T08:54:35.751443Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2024-11-21T08:54:35.756986Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179275806, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:35.757004Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 281474976715665:0 HandleReply TEvOperationPlan, step: 1732179275806, at schemeshard: 72057594046644480 2024-11-21T08:54:35.757077Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715665:0 128 -> 240 2024-11-21T08:54:35.757569Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.757620Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.757632Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715665:0 ProgressState 2024-11-21T08:54:35.757642Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2024-11-21T08:54:35.757653Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2024-11-21T08:54:35.757691Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2024-11-21T08:54:35.757717Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2024-11-21T08:54:35.757735Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2024-11-21T08:54:35.757741Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2024-11-21T08:54:35.757744Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2024-11-21T08:54:35.757746Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2024-11-21T08:54:35.757747Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T08:54:35.757749Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2024-11-21T08:54:35.757750Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715665 2024-11-21T08:54:35.757752Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:54:35.757979Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T08:54:35.757989Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2024-11-21T08:54:35.757991Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 10 2024-11-21T08:54:35.757998Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2024-11-21T08:54:35.758359Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 E0000 00:00:1732179275.759024 597458 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732179275.759047 597458 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> THealthCheckTest::Issues100GroupsListing >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict [GOOD] >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes >> THealthCheckTest::OneIssueListing ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict [GOOD] Test command err: 2024-11-21T08:54:10.529278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:10.529667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:10.529686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d47/r3tmp/tmp9LNlJB/pdisk_1.dat 2024-11-21T08:54:10.621519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.637267Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.679315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.679343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.689768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.793596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.807500Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:10.807698Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:10.807773Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:10.807823Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:10.814692Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:10.814865Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:10.814886Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:10.815020Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:10.815028Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:10.815034Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:10.815072Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:10.818334Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:10.818402Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:10.818427Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:10.818432Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:10.818436Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:10.818442Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:10.818560Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.818567Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:10.818686Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:10.818704Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:10.818716Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.818721Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:10.818726Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:10.818733Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:10.818739Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:10.818744Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:10.818749Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:10.818753Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:10.818757Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:10.818761Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:10.818778Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:10.818782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:10.818801Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:10.818851Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:10.818860Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:10.818876Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:10.818883Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:10.818887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:10.818892Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:10.818895Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:10.818935Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:10.818938Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:10.818942Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:10.818945Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:10.818953Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:10.818956Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:10.818959Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:10.818962Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:10.818967Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:10.819177Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:10.819183Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:10.829467Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:10.829493Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:10.829498Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:10.829509Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:10.829521Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:11.003632Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:11.003653Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:11.003660Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:11.003675Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:11.003678Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:11.003697Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:11.003705Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:11.003708Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:11.003711Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:11.004251Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:11.004263Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:11.004340Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:11.004344Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:11.004349Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:11.004354Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:11.004357Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:11.004362Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3001 txid# 281474976715667} 2024-11-21T08:54:36.977816Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3001} 2024-11-21T08:54:36.977825Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:36.977830Z node 14 :TX_DATASHARD TRACE: Complete execution for [3001:281474976715667] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:54:36.977838Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:36.978077Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [14:1122:2896], Recipient [14:630:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:36.978087Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:36.978095Z node 14 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [14:1121:2895], serverId# [14:1122:2896], sessionId# [0:0:0] 2024-11-21T08:54:36.978224Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2024-11-21T08:54:36.978250Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:54:36.978262Z node 14 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T08:54:36.978268Z node 14 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2024-11-21T08:54:36.978278Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T08:54:36.978303Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:54:36.978309Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:54:36.978314Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:36.978319Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:36.978336Z node 14 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T08:54:36.978343Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:54:36.978346Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:36.978350Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:54:36.978354Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:36.978368Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2024-11-21T08:54:36.978455Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2024-11-21T08:54:36.978462Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2024-11-21T08:54:36.978471Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:54:37.009478Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:1019:2817], Recipient [14:630:2536]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T08:54:37.009524Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T08:54:37.009532Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2024-11-21T08:54:37.009559Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T08:54:37.009625Z node 14 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1111:2871], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:37.009640Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:54:37.009652Z node 14 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2024-11-21T08:54:37.009741Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd6ywd458r460wzdyzjefqad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=NWNlMjBiYi1hNzUwY2RmNi1hNTk1NDAzNS0yNzg2MjJjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T08:54:37.009794Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd6ywd458r460wzdyzjefqad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=NWNlMjBiYi1hNzUwY2RmNi1hNTk1NDAzNS0yNzg2MjJjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:54:37.009808Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd6ywd458r460wzdyzjefqad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=NWNlMjBiYi1hNzUwY2RmNi1hNTk1NDAzNS0yNzg2MjJjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T08:54:37.009828Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:37.009834Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:37.010090Z node 14 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [14:557:2484], selfId: [14:50:2097], source: [14:1089:2871] 2024-11-21T08:54:37.010130Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:37.010222Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:37.010229Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:54:37.010234Z node 14 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2024-11-21T08:54:37.010240Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:37.010292Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2024-11-21T08:54:37.010406Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T08:54:37.010415Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 1} after executionsCount# 2 2024-11-21T08:54:37.010423Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2024-11-21T08:54:37.010462Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:54:37.010468Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:54:37.010474Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:37.010478Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:37.010492Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:54:37.010495Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:37.010501Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T08:54:37.010504Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:37.010508Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:37.010513Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:37.010516Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:37.010572Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:37.010582Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 2 2024-11-21T08:54:37.010598Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 2 2024-11-21T08:54:37.010612Z node 14 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2024-11-21T08:54:37.010785Z node 14 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=14&id=NWNlMjBiYi1hNzUwY2RmNi1hNTk1NDAzNS0yNzg2MjJjMw==, workerId: [14:1089:2871], local sessions count: 0 2024-11-21T08:54:37.010811Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T08:54:37.010819Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 4 2024-11-21T08:54:37.010836Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 4 2024-11-21T08:54:37.010853Z node 14 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2024-11-21T08:54:37.010866Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[14:557:2484], 1} finished in ReadContinue 2024-11-21T08:54:37.010896Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [14:54:2101], Recipient [14:1019:2817]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 14 Status: STATUS_NOT_FOUND >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc [GOOD] >> THealthCheckTest::Issues100Groups100VCardListing >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite |89.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |89.1%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc [GOOD] Test command err: 2024-11-21T08:54:11.838300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:11.838623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:11.838637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d45/r3tmp/tmpgLuv92/pdisk_1.dat 2024-11-21T08:54:11.927727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:11.942549Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:11.984361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:11.984386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:11.994811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:12.097773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:12.110555Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:12.110704Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:12.110756Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:12.110794Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:12.115620Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:12.115722Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:12.115736Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:12.115836Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:12.115841Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:12.115845Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:12.115871Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:12.118028Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:12.118075Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:12.118090Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:12.118094Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:12.118097Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:12.118099Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:12.118185Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:12.118190Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:12.118269Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:12.118279Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:12.118286Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:12.118289Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:12.118293Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:12.118298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:12.118302Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:12.118306Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:12.118309Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:12.118312Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:12.118316Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:12.118319Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:12.118333Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:12.118336Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:12.118350Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:12.118393Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:12.118400Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:12.118411Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:12.118417Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:12.118419Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:12.118424Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:12.118426Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:12.118454Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:12.118456Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:12.118458Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:12.118460Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:12.118466Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:12.118468Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:12.118471Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:12.118472Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:12.118476Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:12.118631Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:12.118635Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:12.128865Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:12.128888Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:12.128893Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:12.128902Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:12.128913Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:12.302695Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:12.302735Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:12.302743Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:12.302766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:12.302770Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:12.302824Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:12.302833Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:12.302838Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:12.302842Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:12.303517Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:12.303534Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:12.303640Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:12.303644Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:12.303648Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:12.303653Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:12.303657Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:12.303662Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 2024-11-21T08:54:37.447975Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:37.447979Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:37.447985Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:37.447989Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:37.447992Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T08:54:37.447996Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:54:37.448000Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2024-11-21T08:54:37.448027Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:632:2537], Recipient [15:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:37.448031Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:37.448035Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:37.448039Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T08:54:37.448043Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2024-11-21T08:54:37.448047Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:37.448061Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T08:54:37.448090Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T08:54:37.448094Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 3} after executionsCount# 2 2024-11-21T08:54:37.448098Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:37.448108Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 3} finished in read 2024-11-21T08:54:37.448114Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:54:37.448117Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:54:37.448120Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:37.448123Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:37.448128Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:54:37.448131Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:37.448134Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2024-11-21T08:54:37.448137Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:37.448141Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:37.448146Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:37.448150Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:37.448280Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=YmNjOTI1MTEtNDIwOWFlOTctMjNiZDExMmEtZTQ0YmNkODA=, workerId: [15:1120:2893], local sessions count: 0 2024-11-21T08:54:37.448427Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T08:54:37.448448Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:54:37.448461Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2024-11-21T08:54:37.448475Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T08:54:37.448479Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:54:37.448483Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:37.448486Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:37.448499Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2024-11-21T08:54:37.448503Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T08:54:37.448506Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:37.448511Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:54:37.448514Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:37.448525Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T08:54:37.448551Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T08:54:37.448555Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 4} after executionsCount# 1 2024-11-21T08:54:37.448560Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:37.448573Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 4} finished in read 2024-11-21T08:54:37.448580Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T08:54:37.448584Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:54:37.448587Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:37.448590Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:37.448596Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T08:54:37.448599Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:37.448602Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2024-11-21T08:54:37.448606Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:54:37.448685Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T08:54:37.448693Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:54:37.448698Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2024-11-21T08:54:37.448705Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T08:54:37.448709Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:54:37.448712Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:37.448716Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:37.448722Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2024-11-21T08:54:37.448726Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T08:54:37.448728Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:37.448732Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:54:37.448734Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:37.448746Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T08:54:37.448765Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2024-11-21T08:54:37.448768Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 5} after executionsCount# 1 2024-11-21T08:54:37.448773Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:37.448783Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 5} finished in read 2024-11-21T08:54:37.448789Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T08:54:37.448793Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:54:37.448796Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:37.448799Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:37.448803Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T08:54:37.448807Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:37.448810Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2024-11-21T08:54:37.448813Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> THealthCheckTest::StaticGroupIssue >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> DataShardTxOrder::RandomPoints_DelayData [GOOD] >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> THealthCheckTest::RedGroupIssueOnRedSpace [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2024-11-21T08:54:13.427084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:13.427559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:13.427582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d2b/r3tmp/tmpUndEyJ/pdisk_1.dat 2024-11-21T08:54:13.537674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:13.556685Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:13.599237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:13.599271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:13.609769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:13.718513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:13.732418Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:13.732603Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:13.732681Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:13.732743Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:13.741478Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:13.741625Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:13.741645Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:13.741790Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:13.741798Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:13.741806Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:13.741843Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:13.745562Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:13.745626Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:13.745647Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:13.745652Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:13.745657Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:13.745662Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:13.745782Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:13.745790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:13.745904Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:13.745921Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:13.745932Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:13.745936Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:13.745942Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:13.745949Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:13.745955Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:13.745962Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:13.745967Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:13.745971Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:13.745975Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:13.745982Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:13.746000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:13.746004Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:13.746022Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:13.746071Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:13.746078Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:13.746093Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:13.746100Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:13.746104Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:13.746108Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:13.746113Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:13.746155Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:13.746159Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:13.746164Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:13.746169Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:13.746177Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:13.746181Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:13.746186Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:13.746189Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:13.746194Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:13.746406Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:13.746414Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:13.756710Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:13.756743Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:13.756751Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:13.756765Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:13.756778Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:13.932914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:13.932939Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:13.932948Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:13.932968Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:13.932973Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:13.932999Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:13.933009Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:13.933014Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:13.933020Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:13.933841Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:13.933863Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:13.934000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:13.934006Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:13.934013Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:13.934021Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:13.934026Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:13.934034Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:38.397248Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:38.397250Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:38.397263Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:842:2676], Recipient [13:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:38.397266Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:38.397268Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:54:38.397270Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:38.397272Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:54:38.397274Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2024-11-21T08:54:38.397276Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2024-11-21T08:54:38.397279Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T08:54:38.397280Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2024-11-21T08:54:38.397282Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T08:54:38.397284Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2024-11-21T08:54:38.397294Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2024-11-21T08:54:38.397296Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T08:54:38.397297Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T08:54:38.397299Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:38.397301Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T08:54:38.397304Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2024-11-21T08:54:38.397306Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2024-11-21T08:54:38.397307Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2024-11-21T08:54:38.397310Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T08:54:38.397312Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:38.397313Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T08:54:38.397315Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T08:54:38.397324Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T08:54:38.397326Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T08:54:38.397330Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T08:54:38.397332Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T08:54:38.397334Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T08:54:38.397335Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T08:54:38.397337Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T08:54:38.397339Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:54:38.397356Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2024-11-21T08:54:38.397358Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T08:54:38.397360Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:54:38.397362Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:54:38.397364Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T08:54:38.397366Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:54:38.397368Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2024-11-21T08:54:38.397369Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:38.397371Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:54:38.397373Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T08:54:38.397374Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T08:54:38.407671Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715665} 2024-11-21T08:54:38.407693Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T08:54:38.407704Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:38.407710Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:54:38.407724Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1030:2831], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:38.407730Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:38.407748Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715665} 2024-11-21T08:54:38.407751Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T08:54:38.407756Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:54:38.407759Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:54:38.407763Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1030:2831], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:38.407770Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:54:38.408015Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:557:2484], Recipient [13:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T08:54:38.408029Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:54:38.408037Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2024-11-21T08:54:38.408050Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:54:38.408053Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:54:38.408056Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:38.408058Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:38.408064Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2024-11-21T08:54:38.408067Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:54:38.408069Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:38.408071Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:54:38.408073Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:38.408099Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2024-11-21T08:54:38.408144Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2024-11-21T08:54:38.408149Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:557:2484], 1} after executionsCount# 1 2024-11-21T08:54:38.408154Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:557:2484], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:38.408174Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:557:2484], 1} finished in read 2024-11-21T08:54:38.408181Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:54:38.408183Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:54:38.408185Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:38.408187Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:38.408194Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T08:54:38.408196Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:38.408199Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2024-11-21T08:54:38.408202Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:54:38.408260Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> THealthCheckTest::StorageLimit95 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsNoLimit >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestOwnership >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning >> DataStreams::TestPutRecordsCornerCases [GOOD] >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit87 >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecordsCornerCases [GOOD] Test command err: 2024-11-21T08:54:32.201812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653324817222225:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:32.202091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004412/r3tmp/tmpqTDi4I/pdisk_1.dat 2024-11-21T08:54:32.256477Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20377, node 1 2024-11-21T08:54:32.263589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:32.263600Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:32.263602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:32.263633Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:32.302080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:32.302119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:32.303627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:32.331370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.332319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.332338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.332858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:32.332913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:32.332927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:54:32.333285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:32.333297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:54:32.333400Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:32.333642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.334369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272383, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.334382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:54:32.334440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:54:32.334793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.334831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.334843Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:54:32.334855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:54:32.334866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:54:32.334875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:54:32.335198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:54:32.335216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:54:32.335218Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:32.335230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:32.344581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.344652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.344662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.344676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:32.344701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:32.344707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:32.345332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:32.345375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.345422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.345533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:32.345543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:32.345546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:32.345560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:13328 2024-11-21T08:54:32.354387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.354452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.354461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.354949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:32.354981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:32.355362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:32.355595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272404, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.355603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179272404, at schemeshard: 72057594046644480 2024-11-21T08:54:32.355647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:32.355670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:32.355681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:32.355967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.356001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.356121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:32.356132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:32.356138Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:32.356147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:54:32.362562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestPutRecordsOfAnauthorizedUser, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.362647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.363363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@built ... nerCases in database: Root, partition 2(assignId:3) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037889 Generation: 1 2024-11-21T08:54:39.057965Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037892 Generation: 1 2024-11-21T08:54:39.058647Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 2024-11-21T08:54:39.058652Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1) EndOffset 2 readOffset 0 committedOffset 0 2024-11-21T08:54:39.058660Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) EndOffset 2 readOffset 0 committedOffset 0 2024-11-21T08:54:39.058663Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4) EndOffset 8 readOffset 0 committedOffset 0 2024-11-21T08:54:39.058721Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3) EndOffset 0 readOffset 0 committedOffset 0 2024-11-21T08:54:39.058941Z :INFO: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] Confirm partition stream create. Partition stream id: 1. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 4. Read offset: (empty maybe) 2024-11-21T08:54:39.059033Z :INFO: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] Confirm partition stream create. Partition stream id: 2. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 3. Read offset: (empty maybe) 2024-11-21T08:54:39.059078Z :INFO: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] Confirm partition stream create. Partition stream id: 3. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 0. Read offset: (empty maybe) 2024-11-21T08:54:39.059105Z :INFO: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] Confirm partition stream create. Partition stream id: 4. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:39.059332Z :INFO: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] Confirm partition stream create. Partition stream id: 5. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 2. Read offset: (empty maybe) 2024-11-21T08:54:39.059340Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1), readOffset# 0, commitOffset# 0 2024-11-21T08:54:39.059373Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2), readOffset# 0, commitOffset# 0 2024-11-21T08:54:39.059378Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1) EndOffset 2 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T08:54:39.059385Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T08:54:39.059442Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5), readOffset# 0, commitOffset# 0 2024-11-21T08:54:39.059458Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) EndOffset 2 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T08:54:39.059473Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4), readOffset# 0, commitOffset# 0 2024-11-21T08:54:39.059483Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4) EndOffset 8 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T08:54:39.059554Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3), readOffset# 0, commitOffset# 0 2024-11-21T08:54:39.059565Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3) EndOffset 0 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T08:54:39.059718Z :DEBUG: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:39.059763Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2024-11-21T08:54:39.059793Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2024-11-21T08:54:39.059798Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 1} (1-1) 2024-11-21T08:54:39.059804Z :DEBUG: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2024-11-21T08:54:39.079724Z :DEBUG: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:39.079803Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2024-11-21T08:54:39.079820Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2024-11-21T08:54:39.079847Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2024-11-21T08:54:39.079867Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2024-11-21T08:54:39.079879Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 1} (1-1) 2024-11-21T08:54:39.079885Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (2-2) 2024-11-21T08:54:39.079903Z :DEBUG: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] The application data is transferred to the client. Number of messages 3, size 1049088 bytes 2024-11-21T08:54:39.079925Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-0) 2024-11-21T08:54:39.079938Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T08:54:39.079948Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T08:54:39.079957Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2024-11-21T08:54:39.079962Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2024-11-21T08:54:39.079967Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T08:54:39.079972Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 1} (1-1) 2024-11-21T08:54:39.079980Z :DEBUG: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2024-11-21T08:54:39.079996Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2024-11-21T08:54:39.080006Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2024-11-21T08:54:39.080011Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2024-11-21T08:54:39.080012Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (3-3) 2024-11-21T08:54:39.080016Z :DEBUG: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T08:54:39.080027Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2024-11-21T08:54:39.080029Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 1} (1-1) 2024-11-21T08:54:39.080031Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 2} (2-2) 2024-11-21T08:54:39.080034Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 3} (3-3) 2024-11-21T08:54:39.080038Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T08:54:39.080040Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 1} (5-5) 2024-11-21T08:54:39.080042Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (6-6) 2024-11-21T08:54:39.080044Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 1} (7-7) 2024-11-21T08:54:39.080050Z :DEBUG: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] The application data is transferred to the client. Number of messages 8, size 8388608 bytes 2024-11-21T08:54:39.080082Z :INFO: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] Closing read session. Close timeout: 0.000000s 2024-11-21T08:54:39.080094Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:4:1:1:0 null:stream_TestPutRecordsCornerCases:3:2:3:0 null:stream_TestPutRecordsCornerCases:2:5:0:0 null:stream_TestPutRecordsCornerCases:1:4:7:0 null:stream_TestPutRecordsCornerCases:0:3:1:0 2024-11-21T08:54:39.080098Z :INFO: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 24 BytesRead: 9437696 MessagesRead: 16 BytesReadCompressed: 9437696 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:54:39.080115Z :NOTICE: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:54:39.080123Z :DEBUG: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] [null] Abort session to cluster 2024-11-21T08:54:39.080284Z :NOTICE: [/Root/] [/Root/] [bec5aec9-8ad4dde3-a2bbad1f-d76178b4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:54:39.080436Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 grpc read failed 2024-11-21T08:54:39.080456Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 grpc closed 2024-11-21T08:54:39.080481Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10064724505320115002_v1 is DEAD >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsLimit800 >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageNoQuota >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2024-11-21T08:54:12.491587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:12.491914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:12.491929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d41/r3tmp/tmpJ4iGWc/pdisk_1.dat 2024-11-21T08:54:12.583738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:12.599276Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:12.641130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:12.641159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:12.651542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:12.755040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:12.768405Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:12.768583Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:12.768650Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:12.768705Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:12.774937Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:12.775074Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:12.775094Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:12.775239Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:12.775245Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:12.775250Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:12.775299Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:12.778205Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:12.778266Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:12.778284Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:12.778288Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:12.778292Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:12.778297Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:12.778423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:12.778431Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:12.778530Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:12.778550Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:12.778562Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:12.778566Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:12.778573Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:12.778580Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:12.778586Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:12.778592Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:12.778598Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:12.778601Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:12.778605Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:12.778610Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:12.778626Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:12.778630Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:12.778648Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:12.778697Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:12.778705Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:12.778719Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:12.778725Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:12.778729Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:12.778733Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:12.778736Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:12.778776Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:12.778779Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:12.778782Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:12.778785Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:12.778794Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:12.778797Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:12.778800Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:12.778803Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:12.778807Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:12.779022Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:12.779031Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:12.789296Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:12.789318Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:12.789324Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:12.789334Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:12.789346Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:12.963414Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:12.963452Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:12.963460Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:12.963478Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:12.963482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:12.963507Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:12.963515Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:12.963519Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:12.963525Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:12.964267Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:12.964281Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:12.964389Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:12.964394Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:12.964401Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:12.964408Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:12.964412Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:12.964420Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:39.767925Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:39.767927Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:39.767930Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:54:39.767932Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:39.767934Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:54:39.767936Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T08:54:39.767938Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2024-11-21T08:54:39.767941Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:39.767943Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2024-11-21T08:54:39.767945Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T08:54:39.767947Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2024-11-21T08:54:39.767960Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T08:54:39.767962Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:39.767964Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T08:54:39.767966Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:39.767968Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T08:54:39.767970Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2024-11-21T08:54:39.767972Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2024-11-21T08:54:39.767974Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2024-11-21T08:54:39.767977Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:39.767978Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:39.767980Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T08:54:39.767982Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T08:54:39.767993Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T08:54:39.767995Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T08:54:39.767998Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T08:54:39.768000Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T08:54:39.768002Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:39.768004Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T08:54:39.768006Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T08:54:39.768008Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:54:39.768026Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2024-11-21T08:54:39.768028Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T08:54:39.768030Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:54:39.768032Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:54:39.768034Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:39.768036Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:54:39.768038Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2024-11-21T08:54:39.768039Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:39.768041Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:54:39.768043Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T08:54:39.768044Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T08:54:39.778450Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715666} 2024-11-21T08:54:39.778484Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T08:54:39.778503Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:39.778513Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:54:39.778539Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:39.778551Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:39.778584Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T08:54:39.778588Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T08:54:39.778598Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:54:39.778602Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:54:39.778610Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:39.778617Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:54:39.779055Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T08:54:39.779081Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:54:39.779096Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T08:54:39.779117Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:39.779124Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:54:39.779129Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:39.779134Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:39.779143Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T08:54:39.779148Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:39.779152Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:39.779155Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:54:39.779159Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:39.779174Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2024-11-21T08:54:39.779283Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:54:39.779291Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2024-11-21T08:54:39.779298Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 10} after executionsCount# 1 2024-11-21T08:54:39.779306Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:39.779346Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 10} finished in read 2024-11-21T08:54:39.779358Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:39.779362Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:54:39.779366Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:39.779370Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:39.779382Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:39.779385Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:39.779392Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T08:54:39.779397Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:54:39.779415Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> THealthCheckTest::ShardsNoLimit [GOOD] >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2024-11-21T08:54:36.826105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:36.826142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:36.826150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f9c/r3tmp/tmpZTDXmA/pdisk_1.dat 2024-11-21T08:54:36.897681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14027, node 1 TClient is connected to server localhost:27225 2024-11-21T08:54:37.003774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:37.003791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:37.003795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:37.003908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD database_status { name: "/Root/serverless" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "1-1-55" overall: GREEN pdisk { id: "1-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "2" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 1 host: "::1" port: 12001 } 2024-11-21T08:54:37.696085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:451:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:37.696133Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:37.696147Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f9c/r3tmp/tmpoBAheO/pdisk_1.dat 2024-11-21T08:54:37.761072Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19459, node 3 TClient is connected to server localhost:22182 2024-11-21T08:54:37.857061Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:37.857077Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:37.857080Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:37.857164Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD database_status { name: "/Root/serverless" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "3-1-55" overall: GREEN pdisk { id: "3-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "5" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 3 host: "::1" port: 12001 } 2024-11-21T08:54:38.728355Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:38.728398Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:38.728414Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f9c/r3tmp/tmpxygs69/pdisk_1.dat 2024-11-21T08:54:38.795193Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18281, node 6 TClient is connected to server localhost:13677 2024-11-21T08:54:38.890324Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:38.890339Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:38.890342Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:38.890390Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-f65b-f489" status: RED message: "Database has compute issues" location { database { name: "/Root/serverless" } } reason: "RED-f65b-7469" type: "DATABASE" level: 1 } issue_log { id: "RED-f65b-7469" status: RED message: "There are no compute nodes" location { database { name: "/Root/serverless" } } type: "COMPUTE" level: 2 } database_status { name: "/Root/serverless" overall: RED storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "6-1-55" overall: GREEN pdisk { id: "6-1" overall: GREEN } } } } } compute { overall: RED } } database_status { name: "/Root" overall: GREEN storage { overall: GREEN pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN } } } compute { overall: GREEN nodes { id: "6" overall: GREEN load { overall: GREEN cores: 64 } } } } database_status { name: "/Root/shared" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN } } } compute { overall: GREEN nodes { id: "7" overall: GREY } } } location { id: 6 host: "::1" port: 12001 } 2024-11-21T08:54:39.850513Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.850569Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.850600Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.850718Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.850771Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.850783Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f9c/r3tmp/tmpaTq5VL/pdisk_1.dat 2024-11-21T08:54:39.922441Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20254, node 8 TClient is connected to server localhost:2209 2024-11-21T08:54:40.022995Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.023011Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.023013Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.023108Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> DataStreams::TestUnsupported [GOOD] >> THealthCheckTest::ShardsLimit800 [GOOD] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::YellowGroupIssueOnYellowSpace >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse |89.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |89.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2024-11-21T08:54:36.581825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653341745841847:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:36.581935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043bb/r3tmp/tmpvQQ58E/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15599, node 1 2024-11-21T08:54:36.643090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:54:36.643104Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:36.643242Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:36.643253Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:36.643255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:36.643295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:36.661218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.661983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:36.661993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.662373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:36.662427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:36.662437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:54:36.662760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:36.662771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:54:36.662820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:54:36.663019Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.663619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179276709, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:36.663629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:54:36.663670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:54:36.663938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:36.663977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:36.663989Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:54:36.663999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:54:36.664009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:54:36.664020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:54:36.664348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:54:36.664363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:54:36.664365Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:36.664381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:54:36.678051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.678141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:36.678158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.678178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T08:54:36.678217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:54:36.678225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T08:54:36.678919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:36.678972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:36.679032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:36.679171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:54:36.679190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:54:36.679199Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:36.679217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2024-11-21T08:54:36.682022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:36.682046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:36.683439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4908 2024-11-21T08:54:36.687817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.687872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:36.687883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.688381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:36.688430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:36.688824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T08:54:36.689134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179276737, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:36.689142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732179276737, at schemeshard: 72057594046644480 2024-11-21T08:54:36.689206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T08:54:36.689225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:54:36.689234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T08:54:36.689498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:36.689533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:36.689627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T08:54:36.689636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T08:54:36.689639Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:36.689647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T08:54:36.695934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestUpdateStream, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.696034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:36.696668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT ... 0, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:20482 2024-11-21T08:54:40.200587Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.200671Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:40.200684Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.201443Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:40.201496Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:40.201948Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:40.202401Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179280251, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:40.202413Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179280251, at schemeshard: 72057594046644480 2024-11-21T08:54:40.202454Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:40.202483Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:40.202499Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:40.202862Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:40.202914Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:40.203048Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:40.203070Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:40.203074Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:40.203086Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:54:40.211211Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.211316Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:40.212264Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/test-topic 2024-11-21T08:54:40.212333Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:40.212396Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:40.212422Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T08:54:40.212489Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T08:54:40.212654Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:40.212667Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:40.212671Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T08:54:40.212714Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:40.212723Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:40.212724Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:54:40.214551Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:40.214616Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:40.214632Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:40.214648Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:40.214656Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2024-11-21T08:54:40.215159Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:40.266872Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:40.267103Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 2024-11-21T08:54:40.267111Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:54:40.267176Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 2024-11-21T08:54:40.267182Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:54:40.267195Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046644480 2024-11-21T08:54:40.267195Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T08:54:40.267198Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2024-11-21T08:54:40.267484Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.267797Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046644480 message# TabletId: 72075186224037888 TxId: 281474976715660 Status: OK 2024-11-21T08:54:40.267833Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046644480 message# TabletId: 72075186224037889 TxId: 281474976715660 Status: OK 2024-11-21T08:54:40.267846Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046644480 message# TabletId: 72075186224037890 TxId: 281474976715660 Status: OK 2024-11-21T08:54:40.268157Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179280314, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:40.268167Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1732179280314, at tablet: 72057594046644480 2024-11-21T08:54:40.270653Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046644480 message# Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715660 Step: 1732179280314 2024-11-21T08:54:40.271011Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046644480 message# Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715660 Step: 1732179280314 2024-11-21T08:54:40.271045Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046644480 message# Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715660 Step: 1732179280314 2024-11-21T08:54:40.271081Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T08:54:40.271377Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:40.271443Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:40.271456Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T08:54:40.271465Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2024-11-21T08:54:40.271477Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T08:54:40.271502Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2024-11-21T08:54:40.271676Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:40.271690Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:40.271698Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T08:54:40.271725Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:40.271731Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:40.271732Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:54:40.271736Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: 2024-11-21T08:54:37.191871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:37.193977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:37.194067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:37.194094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:37.194332Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:37.194341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f84/r3tmp/tmpwz9qhl/pdisk_1.dat 2024-11-21T08:54:37.262478Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1517, node 1 TClient is connected to server localhost:4184 2024-11-21T08:54:37.351882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:37.351898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:37.351901Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:37.351947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-4ff1" reason: "RED-9a33-ebec" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 id: "RED-9a33-4ff1" status: RED message: "Compute quota usage" location { database { name: "/Root" } } reason: "RED-9a33-3195" type: "COMPUTE" level: 2 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-1" reason: "YELLOW-9a33-e9e2-2" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "RED-9a33-3195" status: RED message: "Shards quota exhausted" location { database { name: "/Root" } } type: "COMPUTE_QUOTA" level: 3 id: "RED-9a33-ebec" status: RED message: "Storage usage over 90%" location { database { name: "/Root" } } type: "STORAGE" level: 2 2024-11-21T08:54:38.229736Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:38.231563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:38.231607Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.231658Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:38.231806Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.231829Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f84/r3tmp/tmpMHFg0I/pdisk_1.dat 2024-11-21T08:54:38.297330Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63102, node 3 TClient is connected to server localhost:1928 2024-11-21T08:54:38.395899Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:38.395914Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:38.395917Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:38.395998Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "ORANGE-9a33-4ff1" reason: "RED-9a33-ebec" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 id: "ORANGE-9a33-4ff1" status: ORANGE message: "Compute quota usage" location { database { name: "/Root" } } reason: "ORANGE-9a33-3f66" type: "COMPUTE" level: 2 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "ORANGE-9a33-3f66" status: ORANGE message: "Shards quota usage is over 99%" location { database { name: "/Root" } } type: "COMPUTE_QUOTA" level: 3 id: "RED-9a33-ebec" status: RED message: "Storage usage over 90%" location { database { name: "/Root" } } type: "STORAGE" level: 2 2024-11-21T08:54:39.308878Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.308949Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.308974Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.309055Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.309088Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.309121Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f84/r3tmp/tmpcVSTXY/pdisk_1.dat 2024-11-21T08:54:39.375261Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23714, node 5 TClient is connected to server localhost:63550 2024-11-21T08:54:39.470633Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:39.470647Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:39.470651Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:39.470731Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-ebec" reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-4ff1" type: "DATABASE" level: 1 id: "YELLOW-9a33-4ff1" status: YELLOW message: "Compute quota usage" location { database { name: "/Root" } } reason: "YELLOW-9a33-d159" type: "COMPUTE" level: 2 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-d159" status: YELLOW message: "Shards quota usage is over 90%" location { database { name: "/Root" } } type: "COMPUTE_QUOTA" level: 3 id: "RED-9a33-ebec" status: RED message: "Storage usage over 90%" location { database { name: "/Root" } } type: "STORAGE" level: 2 2024-11-21T08:54:40.433695Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.433776Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:40.433798Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.433909Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.433983Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.434005Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f84/r3tmp/tmpANxkCH/pdisk_1.dat 2024-11-21T08:54:40.513049Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23819, node 7 TClient is connected to server localhost:1245 2024-11-21T08:54:40.614043Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.614061Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.614065Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.614117Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-1c83" type: "DATABASE" level: 1 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-1c83" status: YELLOW message: "Storage usage over 75%" location { database { name: "/Root" } } type: "STORAGE" level: 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2024-11-21T08:54:32.602736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653325129732878:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:32.602920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ee/r3tmp/tmpAJoJ8R/pdisk_1.dat 2024-11-21T08:54:32.648069Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12119, node 1 2024-11-21T08:54:32.663998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:32.664009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:32.664010Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:32.664051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:32.684532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.685442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.685456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.686124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:32.686173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:32.686180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:54:32.686521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:32.686532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:54:32.686576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:54:32.686770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.687465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272733, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.687475Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:54:32.687517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:54:32.687806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.687848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.687860Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:54:32.687869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:54:32.687878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:54:32.687886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:54:32.688187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:54:32.688203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:54:32.688223Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:32.688232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:54:32.698055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.698109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.698120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.698132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T08:54:32.698149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:54:32.698158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T08:54:32.698623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:32.698657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.698691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.698780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:54:32.698794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:54:32.698798Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:32.698812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2024-11-21T08:54:32.703076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:32.703100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:32.704577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30797 2024-11-21T08:54:32.708340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.708412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.708423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.708852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:32.708891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:32.709348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T08:54:32.709578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272754, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.709586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732179272754, at schemeshard: 72057594046644480 2024-11-21T08:54:32.709632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T08:54:32.709652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:54:32.709661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T08:54:32.709963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.710014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.710139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T08:54:32.710153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T08:54:32.710157Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:32.710170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T08:54:32.716504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestUpdateStorage, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.716586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.717268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: user@builtin, status: Sta ... 179276}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1732179276612-7","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1732179276,"finish":1732179277},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1732179277}' 2024-11-21T08:54:40.310428Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653360028660728:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:40.310620Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ee/r3tmp/tmpMVQpPI/pdisk_1.dat 2024-11-21T08:54:40.322487Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61866, node 7 2024-11-21T08:54:40.337060Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.337077Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.337080Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.337127Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:40.410845Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:40.410885Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:40.412597Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:40.414069Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.414179Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:40.414192Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.414617Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:40.414669Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:40.414678Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:54:40.415074Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:40.415083Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:54:40.415210Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:40.415376Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.416134Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179280461, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:40.416144Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:54:40.416185Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:54:40.416640Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:40.416695Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:40.416711Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:54:40.416725Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:54:40.416734Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:54:40.416751Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:54:40.416927Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:54:40.416940Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:54:40.416944Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:40.416957Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:40.431177Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.431273Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:40.431284Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.431306Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:40.431341Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:40.431350Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:40.432309Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:40.432383Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:40.432461Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:40.432755Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:40.432765Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:40.432768Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:40.432793Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:22705 2024-11-21T08:54:40.442843Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.442931Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:40.442943Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:40.443657Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:40.443713Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:40.444126Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:40.444568Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179280489, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:40.444580Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179280489, at schemeshard: 72057594046644480 2024-11-21T08:54:40.444621Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:40.444645Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:40.444657Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:40.445074Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:40.445127Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:40.445238Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:40.445256Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:40.445259Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:40.445271Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> JsonChangeRecord::DataChange [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd >> JsonChangeRecord::Heartbeat [GOOD] |89.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] Test command err: 2024-11-21T08:54:37.166078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:37.168201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:37.168320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:37.168350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:37.168597Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:37.168606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f94/r3tmp/tmpFXonZO/pdisk_1.dat 2024-11-21T08:54:37.236489Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8160, node 1 TClient is connected to server localhost:61826 2024-11-21T08:54:37.326487Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:37.326503Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:37.326506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:37.326552Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:38.202652Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:38.204301Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:38.204345Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.204395Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:38.204547Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.204571Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f94/r3tmp/tmpzMep8G/pdisk_1.dat 2024-11-21T08:54:38.270588Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25228, node 3 TClient is connected to server localhost:23769 2024-11-21T08:54:38.364784Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:38.364801Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:38.364805Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:38.364889Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-d6d1" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-d6d1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-9a33-258e-ab18" type: "STORAGE" level: 2 } issue_log { id: "RED-9a33-258e-ab18" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-9a33-819b-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-99d2-3-2147483648-3-55-0-55" status: RED message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "RED-8ac8-3-3-42" reason: "RED-8ac8-3-3-43" reason: "RED-8ac8-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "RED-8ac8-3-3-42" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/jptk/003f94/r3tmp/tmpzMep8G/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-43" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/jptk/003f94/r3tmp/tmpzMep8G/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-44" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/jptk/003f94/r3tmp/tmpzMep8G/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-9a33-819b-2147483648" status: RED message: "Group failed" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2024-11-21T08:54:39.308703Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.308776Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.308803Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.308885Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.308917Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.308947Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f94/r3tmp/tmpvo7ihM/pdisk_1.dat 2024-11-21T08:54:39.374661Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3457, node 5 TClient is connected to server localhost:28804 2024-11-21T08:54:39.469642Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:39.469657Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:39.469660Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:39.469720Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:40.131214Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:287:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.131256Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:40.131271Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f94/r3tmp/tmpP67srs/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4939, node 7 TClient is connected to server localhost:13223 self_check_result: EMERGENCY issue_log { id: "RED-f65b-f489" status: RED message: "Database has compute issues" location { database { name: "/Root/serverless" } } reason: "RED-f65b-7469" type: "DATABASE" level: 1 } issue_log { id: "RED-f65b-7469" status: RED message: "There are no compute nodes" location { database { name: "/Root/serverless" } } type: "COMPUTE" level: 2 } database_status { name: "/Root/serverless" overall: RED storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "7-1-55" overall: GREEN pdisk { id: "7-1" overall: GREEN } } } } } compute { overall: RED } } location { id: 7 host: "::1" port: 12001 } 2024-11-21T08:54:40.648249Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:330:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.648304Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.648315Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f94/r3tmp/tmp3cvDrg/pdisk_1.dat 2024-11-21T08:54:40.719253Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9503, node 8 TClient is connected to server localhost:23816 2024-11-21T08:54:40.817720Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.817737Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.817740Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.817810Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] |89.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] |89.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> THealthCheckTest::YellowGroupIssueOnYellowSpace [GOOD] >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks >> JsonChangeRecord::DataChangeVersion [GOOD] |89.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] Test command err: 2024-11-21T08:54:20.729363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:20.729906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:20.729939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d18/r3tmp/tmpX4FMkR/pdisk_1.dat 2024-11-21T08:54:20.838293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:20.855070Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:20.897333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:20.897369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:20.907881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:21.011658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:21.025521Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:21.025698Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:21.025795Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:21.025852Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:21.032885Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:21.033029Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:21.033050Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:21.033196Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:21.033203Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:21.033209Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:21.033247Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:21.035765Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:21.035815Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:21.035831Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:21.035834Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:21.035837Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:21.035840Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:21.035933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:21.035939Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:21.036029Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:21.036043Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:21.036055Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:21.036059Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:21.036065Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:21.036072Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:21.036078Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:21.036083Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:21.036088Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:21.036091Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:21.036096Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:21.036102Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:21.036117Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:21.036120Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:21.036138Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:21.036180Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:21.036186Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:21.036200Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:21.036221Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:21.036226Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:21.036230Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:21.036234Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:21.036274Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:21.036278Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:21.036282Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:21.036285Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:21.036294Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:21.036298Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:21.036301Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:21.036304Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:21.036309Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:21.036487Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:21.036492Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:21.046728Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:21.046751Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:21.046756Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:21.046765Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:21.046776Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:21.220164Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:21.220182Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:21.220190Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:21.220232Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:21.220237Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:21.220259Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:21.220266Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:21.220271Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:21.220276Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:21.221074Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:21.221086Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:21.221180Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:21.221186Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:21.221192Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:21.221199Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:21.221203Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:21.221210Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... leId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\002\000\004\000\000\000\002\000\000\000\004\000\000\000\000\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "index" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 3 Name: "value" Type: 1 } MaxValueSizeBytes: 4 } } 2024-11-21T08:54:41.669268Z node 13 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Int32 : 2, Int32 : 0) 2024-11-21T08:54:41.669274Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 2, Int32 : 0) table: [72057594046644480:2:1] 2024-11-21T08:54:41.669282Z node 13 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T08:54:41.669286Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T08:54:41.669320Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:54:41.669331Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:54:41.669335Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:54:41.669338Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:41.669341Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:41.669347Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2024-11-21T08:54:41.669361Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T08:54:41.669364Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:54:41.669366Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:41.669368Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T08:54:41.669371Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T08:54:41.669375Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2024-11-21T08:54:41.669383Z node 13 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715663 not found 2024-11-21T08:54:41.669386Z node 13 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715663 expected 2:5 found 0:0 2024-11-21T08:54:41.669395Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 5 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2024-11-21T08:54:41.669404Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:54:41.669406Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T08:54:41.669408Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:41.669411Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:41.669449Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:41.669453Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:41.669456Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:41.669459Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:41.669469Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T08:54:41.669472Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:41.669476Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T08:54:41.669584Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:41.669588Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:41.669593Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2024-11-21T08:54:41.669605Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:41.669685Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MjVmZmNhZjItYTgzZjk5Ni05MzEzNTQ0ZC00Mzg4MmM0Yw==, ActorId: [13:912:2730], ActorState: ExecuteState, TraceId: 01jd6ywhqyc6exrpkk0x2b58vm, Create QueryResponse for error on request, msg: 2024-11-21T08:54:41.669794Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ywhqyc6exrpkk0x2b58vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjVmZmNhZjItYTgzZjk5Ni05MzEzNTQ0ZC00Mzg4MmM0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:54:41.669835Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:964:2730], Recipient [13:837:2672]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 964 RawX2: 55834577578 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\002 \005)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2024-11-21T08:54:41.669841Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:41.669859Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:837:2672], Recipient [13:837:2672]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:54:41.669863Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T08:54:41.669870Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:41.669888Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T08:54:41.669895Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2024-11-21T08:54:41.669900Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T08:54:41.669902Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T08:54:41.669905Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:41.669909Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:41.669913Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/18446744073709551615 ImmediateWriteEdgeReplied# v3501/18446744073709551615 2024-11-21T08:54:41.669918Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2024-11-21T08:54:41.669920Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T08:54:41.669922Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:41.669924Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T08:54:41.669926Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T08:54:41.669932Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193454 2024-11-21T08:54:41.669940Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 5 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2024-11-21T08:54:41.669948Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T08:54:41.669953Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T08:54:41.669955Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T08:54:41.669957Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:41.669959Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:41.669963Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T08:54:41.669971Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2024-11-21T08:54:41.669973Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:41.669975Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:41.669977Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:41.669981Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T08:54:41.669983Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:41.669985Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2024-11-21T08:54:41.669992Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:41.669994Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:41.669997Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:41.670107Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:54:2101], Recipient [13:837:2672]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 13 Status: STATUS_NOT_FOUND |89.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:53:00.766916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:53:00.766942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.766948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:53:00.766953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:53:00.766970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:53:00.766974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:53:00.766983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:53:00.767066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:53:00.774207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:53:00.774224Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:00.776282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:53:00.776771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:53:00.776795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:53:00.778319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:53:00.778567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:53:00.778687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.778770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:53:00.779763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.779966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.779974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.779999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:53:00.780004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.780009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:53:00.780020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.780929Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:53:00.790967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:53:00.791037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.791086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:53:00.791140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:53:00.791145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.791805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.791823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:53:00.791866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.791873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:53:00.791877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:53:00.791880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:53:00.792230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.792240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:53:00.792244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:53:00.792554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.792561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.792565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.792569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.792955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:53:00.793283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:53:00.793324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:53:00.793460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:53:00.793476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:53:00.793489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.793526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:53:00.793530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:53:00.793554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.793562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:53:00.793881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:53:00.793886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:53:00.793915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:53:00.793918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:53:00.793980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:53:00.793984Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:53:00.793992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:53:00.793994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.793998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:53:00.794002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:53:00.794005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:53:00.794007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:53:00.794014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:53:00.794018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:53:00.794020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:53:00.794200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.794208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:53:00.794211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:53:00.794214Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:53:00.794216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:53:00.794224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... emeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:41.156100Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:305:2293]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:54:41.156135Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409546 2024-11-21T08:54:41.156197Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409546, for tableId 2: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1 2024-11-21T08:54:41.156260Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2024-11-21T08:54:41.156311Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:41.156324Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:41.156406Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:305:2293], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 72 Memory: 123880 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2024-11-21T08:54:41.156412Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T08:54:41.156427Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0072 2024-11-21T08:54:41.156446Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T08:54:41.156453Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2024-11-21T08:54:41.156468Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:41.156472Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:41.187307Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:41.187337Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:41.187344Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2024-11-21T08:54:41.187367Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T08:54:41.187373Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2024-11-21T08:54:41.187418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2024-11-21T08:54:41.187438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2024-11-21T08:54:41.187449Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2024-11-21T08:54:41.187503Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:41.197756Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:41.197789Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:41.197795Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T08:54:41.218166Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:704:2672]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T08:54:41.218198Z node 3 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186233409547 2024-11-21T08:54:41.218236Z node 3 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186233409547, for tableId 3: RowCount 100, DataSize 13940, IndexSize 102, PartCount 1 2024-11-21T08:54:41.218283Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2024-11-21T08:54:41.218378Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:704:2672], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 17 Memory: 123880 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 209 TableOwnerId: 72057594046678944 FollowerId: 0 2024-11-21T08:54:41.218385Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T08:54:41.218396Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0017 2024-11-21T08:54:41.218407Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T08:54:41.218415Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2024-11-21T08:54:41.248915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2024-11-21T08:54:41.248948Z node 3 :FLAT_TX_SCHEMESHARD INFO: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2024-11-21T08:54:41.248957Z node 3 :FLAT_TX_SCHEMESHARD INFO: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2024-11-21T08:54:41.248975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 3 seconds 2024-11-21T08:54:41.248978Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2024-11-21T08:54:41.249004Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:41.249009Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:41.249012Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2024-11-21T08:54:41.249028Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T08:54:41.249031Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2024-11-21T08:54:41.249053Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2024-11-21T08:54:41.249068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0 2024-11-21T08:54:41.249077Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 100, DataSize 13940, with borrowed parts 2024-11-21T08:54:41.249110Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2024-11-21T08:54:41.249132Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:54:41.259274Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:41.259287Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T08:54:41.259291Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T08:54:41.453004Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:41.453036Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:54:41.453057Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:54:41.453061Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |89.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite |89.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest |89.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::StorageLimit50 [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::StorageLimit50 [GOOD] Test command err: 2024-11-21T08:54:39.030799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:39.032834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.032919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.032945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.033187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.033196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f69/r3tmp/tmpHI763p/pdisk_1.dat 2024-11-21T08:54:39.113214Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30003, node 1 TClient is connected to server localhost:31248 2024-11-21T08:54:39.204677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:39.204694Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:39.204696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:39.204744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-1" reason: "YELLOW-9a33-e9e2-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-8d1d" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-8d1d" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-4847-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-9a33-ef3e-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9a33-4847-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 1 host: "::1" port: 12001 } 2024-11-21T08:54:40.088462Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:40.090373Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.090416Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.090465Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:40.090616Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.090639Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f69/r3tmp/tmpLh67B7/pdisk_1.dat 2024-11-21T08:54:40.164507Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13252, node 3 TClient is connected to server localhost:28625 2024-11-21T08:54:40.269935Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.269954Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.269959Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.270061Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:41.151155Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.151228Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:41.151255Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.151339Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.151372Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.151402Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f69/r3tmp/tmpEAmpca/pdisk_1.dat 2024-11-21T08:54:41.219100Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1051, node 5 TClient is connected to server localhost:18313 2024-11-21T08:54:41.316366Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:41.316379Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:41.316382Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:41.316449Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:42.330744Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:42.330802Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:42.330817Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:42.330887Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:42.330932Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:42.330946Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f69/r3tmp/tmpeoMiUl/pdisk_1.dat 2024-11-21T08:54:42.400311Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3752, node 7 TClient is connected to server localhost:26564 2024-11-21T08:54:42.498659Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:42.498676Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:42.498679Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:42.498722Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: 2024-11-21T08:54:38.044819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:38.046963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:38.047046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:38.047073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.047304Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.047313Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6e/r3tmp/tmp4FRFkc/pdisk_1.dat 2024-11-21T08:54:38.120783Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16990, node 1 TClient is connected to server localhost:30052 2024-11-21T08:54:38.209986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:38.209998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:38.210001Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:38.210045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:39.091589Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:39.093417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.093481Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.093538Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.093699Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.093724Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6e/r3tmp/tmpBmscXA/pdisk_1.dat 2024-11-21T08:54:39.166088Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21599, node 3 TClient is connected to server localhost:9627 2024-11-21T08:54:39.261917Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:39.261932Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:39.261934Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:39.262000Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:40.231219Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.231308Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:40.231336Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.231420Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.231452Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.231481Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6e/r3tmp/tmpzX05Tk/pdisk_1.dat 2024-11-21T08:54:40.297276Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15826, node 5 TClient is connected to server localhost:9998 2024-11-21T08:54:40.394880Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.394895Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.394897Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.394967Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:41.247417Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.247471Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:41.247484Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.247549Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.247589Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.247602Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6e/r3tmp/tmp0w6WpA/pdisk_1.dat 2024-11-21T08:54:41.315467Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9305, node 7 TClient is connected to server localhost:65047 2024-11-21T08:54:41.414461Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:41.414479Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:41.414483Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:41.414531Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:42.116348Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:42.116390Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:42.116402Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6e/r3tmp/tmp7Pi7nk/pdisk_1.dat 2024-11-21T08:54:42.186723Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26963, node 9 TClient is connected to server localhost:23389 2024-11-21T08:54:42.283008Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:42.283022Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:42.283025Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:42.283093Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-06ec-be81" status: RED message: "Database has storage issues" location { database { name: "/Root/database" } } reason: "RED-06ec-caea" type: "DATABASE" level: 1 } issue_log { id: "RED-06ec-caea" status: RED message: "There are no storage pools" location { database { name: "/Root/database" } } type: "STORAGE" level: 2 } database_status { name: "/Root/database" overall: RED storage { overall: RED } compute { overall: GREEN nodes { id: "10" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 9 host: "::1" port: 12001 } >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks [GOOD] >> THealthCheckTest::TestTabletIsDead >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TPersQueueTest::LOGBROKER_7820 [GOOD] >> TPersQueueTest::InflightLimit >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] Test command err: 2024-11-21T08:54:38.193826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:38.197143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:38.197298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:38.197350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.197774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.197789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6b/r3tmp/tmpHwbiKt/pdisk_1.dat 2024-11-21T08:54:38.265901Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3717, node 1 TClient is connected to server localhost:28172 2024-11-21T08:54:38.355506Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:38.355521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:38.355524Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:38.355577Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:39.309008Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:39.311058Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.311104Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.311154Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.311327Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.311353Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6b/r3tmp/tmpW8Nhc0/pdisk_1.dat 2024-11-21T08:54:39.378620Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10926, node 3 TClient is connected to server localhost:64723 2024-11-21T08:54:39.475621Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:39.475637Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:39.475639Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:39.475711Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:40.379149Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.379226Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:40.379260Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.379348Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.379381Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.379413Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6b/r3tmp/tmpnNDsUy/pdisk_1.dat 2024-11-21T08:54:40.462905Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10819, node 5 TClient is connected to server localhost:4471 2024-11-21T08:54:40.563915Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.563930Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.563933Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.563995Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T08:54:41.469390Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.469484Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:41.469503Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.469606Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.469671Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.469691Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6b/r3tmp/tmpAVJzql/pdisk_1.dat 2024-11-21T08:54:41.546356Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31649, node 7 TClient is connected to server localhost:21985 2024-11-21T08:54:41.651027Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:41.651047Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:41.651052Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:41.651116Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 7 host: "::1" port: 12001 } 2024-11-21T08:54:42.314826Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:42.314870Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:42.314881Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f6b/r3tmp/tmp6WjFic/pdisk_1.dat 2024-11-21T08:54:42.381319Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28033, node 9 TClient is connected to server localhost:10411 2024-11-21T08:54:42.479717Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:42.479732Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:42.479735Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:42.479808Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } database_status { name: "/Root" overall: YELLOW storage { overall: GREEN pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN } } } compute { overall: YELLOW nodes { id: "9" overall: YELLOW load { overall: YELLOW load: 120.309082 cores: 64 } } } } database_status { name: "/Root/shared" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "9-1-55" overall: GREEN pdisk { id: "9-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "10" overall: GREY } } } location { id: 9 host: "::1" port: 12001 } >> CompressExecutor::TestReorderedExecutor >> KqpQueryServiceScripts::ExecuteScriptStatsProfile >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T08:54:43.147838Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.147843Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.147846Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.147940Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.148107Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.149157Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.149247Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:43.149631Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.149635Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.149638Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.149702Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.149802Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.149839Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.149873Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:43.149948Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T08:54:43.150158Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.150161Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.150163Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.150216Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.150336Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.150359Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.150383Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:43.150546Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.150641Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T08:54:43.150668Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:43.150675Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T08:54:43.150846Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.150848Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.150851Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.150891Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.150990Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.151032Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.151067Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T08:54:43.151244Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:43.151261Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T08:54:43.151325Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T08:54:43.151338Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T08:54:43.151355Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:43.151358Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:54:43.151362Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:54:43.151381Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2024-11-21T08:54:43.151393Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T08:54:43.151396Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T08:54:43.151397Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:54:43.151406Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2024-11-21T08:54:43.151412Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T08:54:43.151414Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T08:54:43.151416Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:54:43.151423Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2024-11-21T08:54:43.151429Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T08:54:43.151431Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T08:54:43.151432Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:54:43.151439Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2024-11-21T08:54:43.169550Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.169557Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.169560Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.169629Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.169729Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.169779Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.169837Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T08:54:43.170025Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:43.170055Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T08:54:43.170109Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T08:54:43.170118Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T08:54:43.170142Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:43.170148Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:54:43.170153Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T08:54:43.170156Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T08:54:43.170162Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:54:43.170217Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2024-11-21T08:54:43.170245Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T08:54:43.170248Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T08:54:43.170251Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T08:54:43.170253Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T08:54:43.170257Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:54:43.170271Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2024-11-21T08:54:43.171116Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.171120Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.171123Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.171196Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.171284Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.171314Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.171357Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:43.171492Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:43.171583Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:43.171628Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T08:54:43.171643Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T08:54:43.171660Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:43.171663Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:54:43.171666Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T08:54:43.171668Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T08:54:43.171672Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T08:54:43.171674Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T08:54:43.171691Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2024-11-21T08:54:43.171709Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> KqpQueryService::TableSink_Olap_Replace >> KqpQueryService::Write ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T08:54:42.975537Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.975543Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.975545Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:42.975631Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:42.975770Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:42.976811Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.976899Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:42.977166Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:42.977223Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:42.977290Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T08:54:42.977302Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T08:54:42.977325Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:42.977329Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T08:54:42.977336Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T08:54:42.977338Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T08:54:42.977577Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.977580Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.977581Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:42.977636Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:42.977934Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:42.977984Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.978019Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T08:54:42.978129Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:42.978144Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T08:54:42.978189Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T08:54:42.978203Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T08:54:42.978233Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:42.978236Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:54:42.978240Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:54:42.978280Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2024-11-21T08:54:42.978293Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T08:54:42.978296Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T08:54:42.978298Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:54:42.978309Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2024-11-21T08:54:42.978316Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T08:54:42.978318Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T08:54:42.978319Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:54:42.978325Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2024-11-21T08:54:42.978330Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T08:54:42.978332Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T08:54:42.978334Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:54:42.978343Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2024-11-21T08:54:42.978713Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.978715Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.978717Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:42.978760Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:42.978836Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:42.978878Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:42.978906Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T08:54:42.978980Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:42.978994Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T08:54:42.979029Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T08:54:42.979042Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T08:54:42.979057Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:42.979061Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:54:42.979071Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2024-11-21T08:54:42.979080Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:54:42.979082Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:54:42.979086Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2024-11-21T08:54:42.979091Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T08:54:42.979094Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T08:54:42.979098Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2024-11-21T08:54:42.979104Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T08:54:42.979106Z :DEBUG: [db] [sessionid] [cluster] ... estTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:54:43.328818Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2024-11-21T08:54:43.355433Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T08:54:43.355441Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T08:54:43.355445Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.355530Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.355646Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.355696Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T08:54:43.355739Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T08:54:43.382239Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T08:54:43.382322Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:43.382334Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:54:43.382338Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T08:54:43.382340Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T08:54:43.382344Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T08:54:43.382346Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T08:54:43.382349Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T08:54:43.382351Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T08:54:43.382355Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T08:54:43.382358Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T08:54:43.382370Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T08:54:43.382422Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T08:54:43.384061Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2024-11-21T08:54:43.385556Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.385559Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.385562Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.385613Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.385696Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.385724Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.385794Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:43.385894Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T08:54:43.386119Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.386122Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.386125Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:43.386170Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:43.386236Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:43.386269Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.386398Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:43.386441Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T08:54:43.386475Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:43.386483Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T08:54:43.386520Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2024-11-21T08:52:35.934448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:52:35.934476Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:52:35.934502Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:52:35.937816Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:52:35.937967Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:52:35.938033Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:52:35.938998Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:52:35.948028Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:52:35.948172Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:52:35.948352Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:52:35.948368Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:52:35.948375Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:52:35.948414Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:52:35.951989Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:52:35.952054Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:52:35.952098Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:52:35.952105Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:52:35.952110Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:52:35.952115Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:35.952227Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.952235Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:35.952264Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:52:35.952286Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:52:35.952340Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:35.952348Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:52:35.952356Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:52:35.952361Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:52:35.952365Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:52:35.952370Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:52:35.952376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:52:35.978065Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.978090Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:35.978099Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:52:35.978670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:52:35.978687Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:52:35.978714Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:52:35.978751Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:52:35.978762Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:52:35.978771Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:52:35.978778Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:35.978783Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:52:35.978788Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:52:35.978793Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:35.978873Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:52:35.978877Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:52:35.978881Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:52:35.978884Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:35.978894Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:52:35.978898Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:52:35.978901Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:52:35.978904Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:35.978909Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:52:36.000081Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:52:36.000111Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:52:36.000118Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:52:36.000131Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:52:36.000145Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:52:36.000315Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:36.000324Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:52:36.000331Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:52:36.000351Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:52:36.000355Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:52:36.000390Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:52:36.000399Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:36.000403Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:52:36.000408Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:52:36.001126Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:52:36.001151Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:52:36.001218Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:36.001224Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:52:36.001233Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:52:36.001240Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:52:36.001244Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:52:36.001253Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:52:36.001257Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:52:36.001264Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:36.001268Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:52:36.001272Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:52:36.001276Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:52:36.001318Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:52:36.001322Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:36.001326Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:52:36.001330Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:52:36.001336Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:52:36.001347Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:52:36.001350Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:52:36.001353Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:52:36.001356Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:52:36.001366Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:52:36.001370Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:52:36.001372Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:52:36.001378Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:52:36.001381Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:52:36.001384Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... 9437184 txId 521 2024-11-21T08:54:38.581855Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2024-11-21T08:54:38.581857Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.581859Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2024-11-21T08:54:38.581880Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T08:54:38.581882Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.581886Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2024-11-21T08:54:38.581902Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2024-11-21T08:54:38.581904Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.581906Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2024-11-21T08:54:38.581915Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2024-11-21T08:54:38.581917Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.581918Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2024-11-21T08:54:38.581933Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2024-11-21T08:54:38.581935Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.581937Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2024-11-21T08:54:38.581952Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2024-11-21T08:54:38.581954Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.581956Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2024-11-21T08:54:38.581970Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2024-11-21T08:54:38.581972Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.581974Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2024-11-21T08:54:38.581995Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2024-11-21T08:54:38.581997Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.581999Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2024-11-21T08:54:38.582007Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2024-11-21T08:54:38.582009Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582011Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2024-11-21T08:54:38.582032Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T08:54:38.582035Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582037Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2024-11-21T08:54:38.582048Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2024-11-21T08:54:38.582050Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582052Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2024-11-21T08:54:38.582065Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2024-11-21T08:54:38.582066Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582068Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2024-11-21T08:54:38.582081Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2024-11-21T08:54:38.582083Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582085Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2024-11-21T08:54:38.582098Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2024-11-21T08:54:38.582100Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582102Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2024-11-21T08:54:38.582119Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2024-11-21T08:54:38.582120Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582122Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2024-11-21T08:54:38.582133Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2024-11-21T08:54:38.582135Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582137Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2024-11-21T08:54:38.582155Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2024-11-21T08:54:38.582157Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582159Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2024-11-21T08:54:38.582171Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T08:54:38.582173Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582175Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 2024-11-21T08:54:38.582191Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2024-11-21T08:54:38.582193Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.582195Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2024-11-21T08:54:38.594520Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:54:38.594543Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2024-11-21T08:54:38.594564Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [16:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T08:54:38.594579Z node 16 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2024-11-21T08:54:38.594587Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:54:38.594640Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:54:38.594645Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2024-11-21T08:54:38.594652Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [16:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:54:38.594657Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:54:38.594687Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2024-11-21T08:54:38.594691Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:54:38.594696Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 22 31 26 29 30 31 28 31 31 31 29 30 31 30 15 29 5 25 22 29 30 21 21 21 - 21 8 - 8 24 24 - actual 22 31 26 29 30 31 28 31 31 31 29 30 31 30 15 29 5 25 22 29 30 21 21 21 - 21 8 - 8 24 24 - interm 22 28 26 29 30 30 28 29 29 15 29 30 30 30 15 29 5 25 22 29 30 21 21 21 - 21 8 - 8 24 24 - >> KqpQueryService::PeriodicTaskInSessionPool >> KqpQueryService::TableSink_Olap_Replace [GOOD] >> KqpQueryService::TableSink_OlapUpsert >> KqpQueryService::DdlUser >> KqpQueryServiceScripts::ValidateScript >> KqpQueryServiceScripts::ExecuteScriptStatsProfile [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone >> KqpQueryService::AlterTempTable >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] >> KqpQueryService::Write [GOOD] >> KqpQueryService::TempTablesDrop >> KqpQueryServiceScripts::TestPaging >> THealthCheckTest::TestTabletIsDead [GOOD] |89.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |89.1%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> KqpQueryService::CloseConnection ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2024-11-21T08:54:16.619791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:16.620171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:16.620190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d26/r3tmp/tmpdI4GR6/pdisk_1.dat 2024-11-21T08:54:16.723819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:16.739548Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:16.781522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:16.781550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:16.791966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:16.895466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:16.909884Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:16.910064Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:16.910131Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:16.910179Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:16.917276Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:16.917465Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:16.917492Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:16.917644Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:16.917652Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:16.917658Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:16.917701Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:16.921097Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:16.921162Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:16.921182Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:16.921186Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:16.921190Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:16.921195Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:16.921306Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:16.921313Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:16.921440Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:16.921458Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:16.921469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:16.921473Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:16.921479Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:16.921485Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:16.921491Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:16.921497Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:16.921502Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:16.921506Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:16.921510Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:16.921515Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:16.921533Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:16.921537Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:16.921556Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:16.921605Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:16.921615Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:16.921630Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:16.921636Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:16.921640Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:16.921645Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:16.921648Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:16.921686Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:16.921689Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:16.921692Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:16.921696Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:16.921705Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:16.921708Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:16.921711Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:16.921714Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:16.921718Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:16.921914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:16.921921Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:16.932247Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:16.932273Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:16.932280Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:16.932291Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:16.932304Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:17.109083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:17.109105Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:17.109112Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:17.109128Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:17.109132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:17.109158Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:17.109166Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:17.109170Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:17.109175Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:17.109808Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:17.109824Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:17.109934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:17.109939Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:17.109943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:17.109948Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:17.109952Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:17.109959Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:44.401787Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:44.401789Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:44.401793Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T08:54:44.401795Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:44.401797Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:54:44.401799Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T08:54:44.401801Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2024-11-21T08:54:44.401804Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:44.401806Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2024-11-21T08:54:44.401807Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T08:54:44.401809Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2024-11-21T08:54:44.401820Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T08:54:44.401822Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:44.401824Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T08:54:44.401826Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:44.401828Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T08:54:44.401831Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2024-11-21T08:54:44.401833Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2024-11-21T08:54:44.401834Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2024-11-21T08:54:44.401837Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:44.401839Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:44.401840Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T08:54:44.401843Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T08:54:44.401854Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T08:54:44.401856Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T08:54:44.401859Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T08:54:44.401862Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T08:54:44.401864Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:44.401866Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T08:54:44.401867Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T08:54:44.401870Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:54:44.401884Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2024-11-21T08:54:44.401886Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T08:54:44.401888Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:54:44.401890Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:54:44.401892Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T08:54:44.401894Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:54:44.401896Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2024-11-21T08:54:44.401898Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:44.401899Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:54:44.401901Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T08:54:44.401903Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T08:54:44.412301Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715666} 2024-11-21T08:54:44.412327Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T08:54:44.412341Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:44.412350Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:54:44.412373Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:44.412384Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:44.412468Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T08:54:44.412471Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T08:54:44.412476Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:54:44.412479Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:54:44.412484Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:54:44.412490Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:54:44.412815Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T08:54:44.412837Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:54:44.412851Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T08:54:44.412872Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:44.412877Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:54:44.412881Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:54:44.412884Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:54:44.412891Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T08:54:44.412894Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:44.412896Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:54:44.412898Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:54:44.412900Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:54:44.412910Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2024-11-21T08:54:44.412985Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T08:54:44.412989Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2024-11-21T08:54:44.412996Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 3} after executionsCount# 1 2024-11-21T08:54:44.413005Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T08:54:44.413039Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 3} finished in read 2024-11-21T08:54:44.413052Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:44.413056Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:54:44.413059Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:54:44.413064Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:54:44.413075Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T08:54:44.413077Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:54:44.413082Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T08:54:44.413085Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:54:44.413100Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2024-11-21T08:54:39.514750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:39.516874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.516970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.516997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.517249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.517258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f61/r3tmp/tmp84GJLF/pdisk_1.dat 2024-11-21T08:54:39.588113Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1409, node 1 TClient is connected to server localhost:29335 2024-11-21T08:54:39.678586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:39.678603Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:39.678606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:39.678661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:40.538724Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:40.540637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.540701Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.540782Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:40.541013Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.541049Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f61/r3tmp/tmp06aXSg/pdisk_1.dat 2024-11-21T08:54:40.607389Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14868, node 3 TClient is connected to server localhost:22217 2024-11-21T08:54:40.707931Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.707945Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.707948Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.708020Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:41.626897Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:285:2188], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.626969Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.627002Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:41.627110Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:692:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.627143Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:41.627158Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f61/r3tmp/tmpegAfHJ/pdisk_1.dat 2024-11-21T08:54:41.703940Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23057, node 5 TClient is connected to server localhost:13026 2024-11-21T08:54:42.044069Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:42.044087Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:42.044090Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:42.044135Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:42.047822Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:42.047856Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:42.080176Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2024-11-21T08:54:42.080339Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" reason: "YELLOW-9a33-e9e2-7" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T08:54:43.139666Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:337:2187], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:43.139735Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:43.139754Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:43.139845Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:749:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:43.139895Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:43.139907Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f61/r3tmp/tmpE3iY8A/pdisk_1.dat 2024-11-21T08:54:43.211438Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10205, node 8 TClient is connected to server localhost:64664 2024-11-21T08:54:43.718400Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:43.718420Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:43.718423Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:43.718934Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:43.719133Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1221:2663]) [8:1477:2667] 2024-11-21T08:54:43.719184Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2024-11-21T08:54:43.720568Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2024-11-21T08:54:43.720587Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T08:54:43.720651Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2024-11-21T08:54:43.720657Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T08:54:43.720701Z node 8 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2024-11-21T08:54:43.720706Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T08:54:43.720728Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T08:54:43.721030Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T08:54:43.722559Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([10:1447:2333] ... y 1.000002048 2024-11-21T08:54:43.768164Z node 8 :HIVE DEBUG: HIVE#72057594037968897 [FBN] Finding best node for tablet PersQueue.72075186224037888.Leader.0 2024-11-21T08:54:43.768170Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 family {PersQueue.72075186224037888.Leader.0 Booting} 2024-11-21T08:54:43.768178Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 node 8 is not alive 2024-11-21T08:54:43.768196Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected usage 0.000005171 of node 10 2024-11-21T08:54:43.768201Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected nodes count 1 2024-11-21T08:54:43.768226Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2024-11-21T08:54:43.768232Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 10 2024-11-21T08:54:43.768238Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 10) 2024-11-21T08:54:43.768245Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2024-11-21T08:54:43.768258Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2024-11-21T08:54:43.768268Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2024-11-21T08:54:43.768280Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T08:54:43.768306Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2024-11-21T08:54:43.768340Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 10 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:54:43.779442Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2024-11-21T08:54:43.779466Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T08:54:43.779480Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [10:1446:2333]} 2024-11-21T08:54:43.779600Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 10 Cookie 72075186224037888 2024-11-21T08:54:43.795847Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2024-11-21T08:54:43.795881Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [10:1446:2333] 2024-11-21T08:54:43.795889Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 10) 2024-11-21T08:54:43.795896Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2024-11-21T08:54:43.795920Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2024-11-21T08:54:43.795925Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2024-11-21T08:54:43.795931Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2024-11-21T08:54:43.795934Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2024-11-21T08:54:43.795938Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2024-11-21T08:54:43.795961Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T08:54:43.795963Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2024-11-21T08:54:43.795997Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2024-11-21T08:54:43.796002Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T08:54:43.796005Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T08:54:43.796009Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T08:54:43.807249Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [8:1220:2662]} 2024-11-21T08:54:43.807278Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T08:54:44.031579Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 10: Status: 2 2024-11-21T08:54:44.031609Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Execute 2024-11-21T08:54:44.031614Z node 8 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T08:54:44.031634Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2024-11-21T08:54:44.031682Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2024-11-21T08:54:44.031706Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 10) 2024-11-21T08:54:44.031713Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2024-11-21T08:54:44.031725Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2024-11-21T08:54:44.031729Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2024-11-21T08:54:44.031735Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 10 2024-11-21T08:54:44.031739Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2024-11-21T08:54:44.031746Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T08:54:44.031748Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2024-11-21T08:54:44.031780Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(10)::Execute 2024-11-21T08:54:44.031792Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:54:44.031796Z node 8 :HIVE TRACE: Node(10) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2024-11-21T08:54:44.031800Z node 8 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 10) 2024-11-21T08:54:44.031804Z node 8 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [8:1517:2672] 2024-11-21T08:54:44.031808Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(10): waiting 3600.000000s 2024-11-21T08:54:44.032026Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([10:1447:2333]) [8:1517:2672] 2024-11-21T08:54:44.032660Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1873:2691]) [8:1874:2696] 2024-11-21T08:54:44.033578Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([8:1873:2691]) [8:1874:2696] 2024-11-21T08:54:44.033814Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([11:1846:2333]) [8:1908:2699] 2024-11-21T08:54:44.034876Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [11:1845:2333] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T08:54:44.034892Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(11)::Execute 2024-11-21T08:54:44.034912Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.034918Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T08:54:44.034921Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T08:54:44.034923Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T08:54:44.034924Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T08:54:44.034931Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:44.034974Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 11 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-10" reason: "YELLOW-9a33-e9e2-11" reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 8 host: "::1" port: 12001 } >> KqpQueryService::TempTablesDrop [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] Test command err: 2024-11-21T08:54:40.497140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:40.499935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.500058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:40.500095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.500440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.500452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmpKQEbAE/pdisk_1.dat 2024-11-21T08:54:40.573908Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14213, node 1 TClient is connected to server localhost:20243 2024-11-21T08:54:40.662055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.662072Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.662075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.662124Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:41.503478Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:41.505385Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.505447Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.505513Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:41.505677Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:41.505702Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmpRdvRGG/pdisk_1.dat 2024-11-21T08:54:41.572624Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11483, node 3 TClient is connected to server localhost:8194 2024-11-21T08:54:41.676820Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:41.676840Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:41.676844Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:41.676948Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-ab18" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-ab18" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-9a33-99d2-3-2147483648-3-55-0-55" status: YELLOW message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "YELLOW-e463-3-3-42" reason: "YELLOW-e463-3-3-43" reason: "YELLOW-e463-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "YELLOW-e463-3-3-42" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmpRdvRGG/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-43" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmpRdvRGG/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-44" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmpRdvRGG/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-9a33-ef3e-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "YELLOW-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2024-11-21T08:54:42.652149Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:42.652259Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:42.652293Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:42.652388Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:42.652423Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:42.652456Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmp5ZFZ34/pdisk_1.dat 2024-11-21T08:54:42.723688Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24319, node 5 TClient is connected to server localhost:10562 2024-11-21T08:54:42.821958Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:42.821975Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:42.821978Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:42.822047Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-ab18" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-ab18" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-a594-5-5-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-42" path: "/home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmp5ZFZ34/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-43" path: "/home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmp5ZFZ34/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-44" path: "/home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmp5ZFZ34/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-9a33-ef3e-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } type: "STORAGE_GROUP" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T08:54:43.727812Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:690:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:43.727902Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:43.727910Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:43.728064Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:688:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:43.728105Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:43.728121Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f5c/r3tmp/tmpEeEawz/pdisk_1.dat 2024-11-21T08:54:43.791826Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17636, node 7 TClient is connected to server localhost:23247 2024-11-21T08:54:44.119093Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.119110Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.119113Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.119177Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:44.121339Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.121362Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:44.165410Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2024-11-21T08:54:44.165579Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:44.201974Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T08:54:44.202089Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 7 host: "::1" port: 12001 } >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::CTASWithoutPerStatement >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlSecret >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement >> KqpQueryServiceScripts::TestPaging [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows >> KqpQueryService::CloseConnection [GOOD] >> KqpQueryService::CreateAndDropTopic >> KqpQueryService::TableSink_ReplaceDuplicatesOlap >> KqpDocumentApi::RestrictWrite >> KqpService::SessionBusy >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] Test command err: Trying to start YDB, gRPC: 20273, MsgBus: 23546 2024-11-21T08:54:43.659082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653374848923494:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.659242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f58/r3tmp/tmpnJVu4r/pdisk_1.dat 2024-11-21T08:54:43.700807Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20273, node 1 2024-11-21T08:54:43.715910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:43.715921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:43.715923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:43.715950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23546 TClient is connected to server localhost:23546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:43.759136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.759157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.759545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.760310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:54:43.772070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.832110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.848668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.857781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.969029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374848925029:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.969058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.994936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.001625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.011502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.018107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.072357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.081219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.089138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653379143892842:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.089172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653379143892847:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.089174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.089765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:44.094248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653379143892849:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 17419, MsgBus: 5855 2024-11-21T08:54:44.523852Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653378454626992:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.523868Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f58/r3tmp/tmpMaJchf/pdisk_1.dat 2024-11-21T08:54:44.531163Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17419, node 2 2024-11-21T08:54:44.539496Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.539507Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.539510Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.539537Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5855 TClient is connected to server localhost:5855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:44.623907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.623940Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:44.625019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:44.626175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.626859Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:44.766781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653378454627598:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.766800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653378454627587:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.766814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.767378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:54:44.768859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653378454627601:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:54:44.848013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:2, at schemeshard: 72057594046644480 2024-11-21T08:54:44.875435Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T08:54:44.875670Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653378454627909:2332], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:44.875748Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2QwZTYyMjQtMjAxNDJmYmQtYjExYjU1ZGItNDIwN2I3OWM=, ActorId: [2:7439653378454627583:2297], ActorState: ExecuteState, TraceId: 01jd6ywmw9fxb1yep1332wc38f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:44.881579Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653378454627934:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:44.881630Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2IxNGI5MDMtNWI5YjE4MTYtZDZjMzI2NWEtOGUxMGU1ZmU=, ActorId: [2:7439653378454627930:2337], ActorState: ExecuteState, TraceId: 01jd6ywmwg0jkxs9ce7bfergn7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 15475, MsgBus: 28519 2024-11-21T08:54:44.993712Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653377186235905:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.993740Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f58/r3tmp/tmpH7egbW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15475, node 3 2024-11-21T08:54:45.007986Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:45.009305Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:45.009317Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:45.009318Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:45.009346Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28519 TClient is connected to server localhost:28519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:45.094021Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:45.094059Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:45.095074Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:45.096464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.105363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.112931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.129419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.138757Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.316981Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653381481204751:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.317022Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.321543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.327978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.333868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.341216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.347552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.355204Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.363603Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653381481205252:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.363626Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.363632Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653381481205257:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.364105Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:45.367996Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653381481205259:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:45.558997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.559300Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.559442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |89.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] |89.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> KqpQueryService::CreateAndDropTopic [GOOD] >> KqpQueryService::CreateAndAlterTopic >> KqpQueryService::TableSink_ReplaceDuplicatesOlap [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] >> KqpDocumentApi::RestrictWrite [GOOD] >> KqpDocumentApi::RestrictWriteExplicitPrepare ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: Trying to start YDB, gRPC: 3158, MsgBus: 16530 2024-11-21T08:54:44.351104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653379084556017:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.351246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002efe/r3tmp/tmpY1CU11/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3158, node 1 2024-11-21T08:54:44.396030Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:44.405314Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.405332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.405334Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.405364Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16530 TClient is connected to server localhost:16530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:44.447855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.450980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.450997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:44.452151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:44.459630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.472391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.487013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.494443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.582428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653379084557546:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.582450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.606017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.610407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.619792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.626754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.633578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.640747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.649253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653379084558039:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.649277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.649284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653379084558044:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.649835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:44.653954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653379084558046:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 30652, MsgBus: 12969 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002efe/r3tmp/tmpIk16gY/pdisk_1.dat 2024-11-21T08:54:45.037911Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:45.039601Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 30652, node 2 2024-11-21T08:54:45.051176Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:45.051194Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:45.051195Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:45.051257Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12969 TClient is connected to server localhost:12969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:45.129505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:45.129539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:45.130590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:45.131249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.139667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.147949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.166110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.174762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.435909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653380332597562:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.435934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.443032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.449398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.504007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.516426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.523130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.529770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.538621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653380332598078:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.538653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.538676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653380332598083:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.539160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:45.543304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653380332598085:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:45.715525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.715780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.716107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.104588Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179286152, txId: 281474976715695] shutting down >> KqpService::SessionBusy [GOOD] >> KqpService::SessionBusyRetryOperation >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2024-11-21T08:54:33.477453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653332196686996:2251];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:33.477485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043d1/r3tmp/tmp3yHQjK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28662, node 1 2024-11-21T08:54:33.539235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:54:33.539249Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:33.540192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:33.540223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:33.540225Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:33.540254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:33.577453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:33.577490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:33.578766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:33.607565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.608530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:33.608556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.609106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:33.609168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:33.609177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:54:33.609592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:33.609670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:33.609683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:54:33.610061Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.611060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179273657, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:33.611074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:54:33.611144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:54:33.611608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:33.611675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:33.611693Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:54:33.611728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:54:33.611744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:54:33.611759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:54:33.612161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:54:33.612187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:54:33.612196Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:33.612231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:33.621004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.621063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:33.621073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.621085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:33.621106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:33.621112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:33.621607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:33.621638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:33.621677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:33.621788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:33.621797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:33.621800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:33.621810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:16363 2024-11-21T08:54:33.630144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.630214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:33.630225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.630861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:33.630915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:33.631304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:33.631730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179273678, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:33.631739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179273678, at schemeshard: 72057594046644480 2024-11-21T08:54:33.631775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:33.631802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:33.631813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:33.632060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:33.632092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:33.632224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:33.632231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:33.632235Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:33.632248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:54:33.638309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestDeleteStream, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:33.638397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:33.639109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUD ... , at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:35.912306Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.912389Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.912401Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.912419Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:35.912448Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:35.912456Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:35.913130Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:35.913180Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.913241Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.913384Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:35.913396Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:35.913400Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:35.913419Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:20279 2024-11-21T08:54:35.923239Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.923315Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.923322Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.923985Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:35.924023Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:35.924398Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:35.925019Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179275974, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:35.925034Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179275974, at schemeshard: 72057594046644480 2024-11-21T08:54:35.925094Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:35.925120Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:35.925131Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:35.925607Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.925673Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.925816Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:35.925830Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:35.925834Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:35.925851Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:54:35.934493Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetRecords1MBMessagesOneByOneBySeqNo, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.934609Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:35.935382Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestGetRecords1MBMessagesOneByOneBySeqNo 2024-11-21T08:54:35.935433Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.935492Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.935518Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T08:54:35.935576Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T08:54:35.935715Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:35.935726Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:35.935730Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T08:54:35.935760Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:35.935768Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:35.935770Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:54:35.937534Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.937614Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:54:35.937623Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2024-11-21T08:54:35.938121Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:35.989741Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.990069Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:54:35.990077Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2024-11-21T08:54:35.990460Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:35.991256Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179276037, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:35.991275Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1732179276037, at tablet: 72057594046644480 2024-11-21T08:54:35.991325Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T08:54:35.991645Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:35.991713Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:35.991727Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T08:54:35.991741Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2024-11-21T08:54:35.991748Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T08:54:35.991771Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2024-11-21T08:54:35.991942Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:35.991955Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:35.991958Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T08:54:35.991979Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:54:35.991982Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:54:35.991982Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:54:35.991986Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2024-11-21T08:54:40.795999Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439653338322368013:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:40.796034Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] Test command err: Trying to start YDB, gRPC: 14582, MsgBus: 1670 2024-11-21T08:54:44.486691Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653376182382645:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.486935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eee/r3tmp/tmpeMNRes/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14582, node 1 2024-11-21T08:54:44.536690Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:44.544183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.544194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.544195Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.544251Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1670 TClient is connected to server localhost:1670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:44.587040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.587077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:44.588125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:44.588168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.723598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653376182383242:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.723618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653376182383251:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.723624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.724313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T08:54:44.725835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653376182383256:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T08:54:44.818877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2024-11-21T08:54:44.880182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.894346Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T08:54:44.896143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.936692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.956947Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T08:54:44.958460Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653376182383812:2374], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:44.958524Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmU3OThmOWQtZjYxNTNjZjMtNjFiNGU3NTItNTUzY2ZlMzU=, ActorId: [1:7439653376182383807:2373], ActorState: ExecuteState, TraceId: 01jd6ywmywefy14e8ed7tn9jry, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:44.963819Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653376182383826:2379], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:44.963885Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmViNmRkZmEtNTAzZTZlZWEtZDZhMjAwZjUtZjgwOWFjMzI=, ActorId: [1:7439653376182383824:2378], ActorState: ExecuteState, TraceId: 01jd6ywmz17snddmdy485s8dm6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 4438, MsgBus: 27169 2024-11-21T08:54:45.146213Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653383191317682:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:45.146273Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eee/r3tmp/tmpg2dqRF/pdisk_1.dat 2024-11-21T08:54:45.154396Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4438, node 2 2024-11-21T08:54:45.170492Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:45.170504Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:45.170505Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:45.170541Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27169 TClient is connected to server localhost:27169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:45.246564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:45.246596Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:45.247635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:45.248318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.431877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653383191318265:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.431904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.431959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653383191318292:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.432609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:54:45.433949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653383191318294:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:54:45.529555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9910, MsgBus: 18522 2024-11-21T08:54:45.816735Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653381572361419:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:45.816929Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eee/r3tmp/tmpzHaJvV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9910, node 3 2024-11-21T08:54:45.830407Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:45.840051Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:45.840065Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:45.840070Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:45.840114Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18522 TClient is connected to server localhost:18522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:45.917271Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:45.917301Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:45.918334Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:45.918992Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.927444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.935158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.950005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.005823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.070940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653385867330255:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.070970Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.075156Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.081254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.089918Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.096891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.103838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.110606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.119322Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653385867330757:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.119348Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.119389Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653385867330762:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.119956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:46.124258Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653385867330764:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:46.293838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.304741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.316223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.359545Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.377167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.390705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.407514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.476344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.540557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpDocumentApi::Scripting >> KqpDocumentApi::AllowRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T08:54:07.456318Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.456340Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.459458Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.461860Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2024-11-21T08:54:07.462047Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T08:54:07.462622Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T08:54:07.463266Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T08:54:07.463703Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.467673Z node 1 :PERSQUEUE INFO: new Cookie default|826c8dce-67634098-9b6c9220-1699785c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:175:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T08:54:07.706018Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.706039Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:179:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:182:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:183:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:07.713178Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:07.713196Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:184:2194] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:261:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:09.180823Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:09.181044Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "aaa" Generation: 2 Important: true } 2024-11-21T08:54:09.181187Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2260] 2024-11-21T08:54:09.181622Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:268:2260] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:09.182071Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:269:2261] 2024-11-21T08:54:09.182361Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:269:2261] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup ... ration 8 [65:803:2661] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:46.901190Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T08:54:46.907451Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:742:2608] sender: [65:838:2057] recipient: [65:14:2061] 2024-11-21T08:54:46.914625Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:742:2608] sender: [65:845:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:742:2608] sender: [65:848:2057] recipient: [65:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:742:2608] sender: [65:849:2057] recipient: [65:847:2688] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:851:2057] recipient: [65:847:2688] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:46.924532Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:46.924548Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:54:46.924637Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:913:2744] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:46.925169Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:914:2745] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:46.927002Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [65:914:2745] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:46.930702Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [65:913:2744] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:46.937199Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T08:54:46.942736Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:948:2057] recipient: [65:14:2061] 2024-11-21T08:54:46.954676Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:955:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:958:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:959:2057] recipient: [65:957:2771] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:961:2057] recipient: [65:957:2771] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:46.963057Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:46.963074Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:54:46.963185Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1025:2829] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:46.963726Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1026:2830] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:46.965159Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [65:1026:2830] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:46.968809Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [65:1025:2829] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:46.974904Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T08:54:46.980997Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:1060:2057] recipient: [65:14:2061] 2024-11-21T08:54:46.989264Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:1067:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:1070:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:1071:2057] recipient: [65:1069:2856] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1073:2057] recipient: [65:1069:2856] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:46.997774Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:46.997794Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:54:46.997921Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1139:2916] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:46.998527Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1140:2917] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:47.000340Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [65:1140:2917] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:47.003876Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [65:1139:2916] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:47.010047Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T08:54:47.015845Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1176:2057] recipient: [65:14:2061] 2024-11-21T08:54:47.022580Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1183:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1186:2057] recipient: [65:1185:2945] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1187:2057] recipient: [65:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1188:2946] sender: [65:1189:2057] recipient: [65:1185:2945] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:54:47.031386Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:54:47.031413Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:54:47.031543Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1257:3007] 2024-11-21T08:54:47.032097Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1258:3008] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:54:47.034050Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [65:1258:3008] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:47.037714Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [65:1257:3007] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:54:47.044648Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T08:54:47.050736Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 >> KqpQueryService::CreateAndAlterTopic [GOOD] >> KqpService::SessionBusyRetryOperation [GOOD] >> KqpQueryService::ExecuteQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] Test command err: Trying to start YDB, gRPC: 19962, MsgBus: 5047 2024-11-21T08:54:43.577881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653374920255221:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.578165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f80/r3tmp/tmpOZyic4/pdisk_1.dat 2024-11-21T08:54:43.624434Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19962, node 1 2024-11-21T08:54:43.642312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:43.642329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:43.642330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:43.642356Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5047 TClient is connected to server localhost:5047 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:54:43.678191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.678240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:54:43.679283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:43.705325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.708817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.770403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.787082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.796084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.819813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374920256756:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.819833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.846188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.851068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.856686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.863824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.871133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.877798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.886722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374920257247:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.886732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374920257252:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.886745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.887283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:43.891395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653374920257254:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:44.042140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.042398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.042526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28617, MsgBus: 17680 2024-11-21T08:54:44.434029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653378956214753:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.434159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f80/r3tmp/tmpoNaWgG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28617, node 2 2024-11-21T08:54:44.447197Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:44.447991Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.447999Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.447999Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.448023Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17680 TClient is connected to server localhost:17680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:44.534176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.534199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:44.535270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:44.536544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.537225Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:44.542342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.550596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.566905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.579412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11- ... etch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.629686Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.634550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.641004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.695851Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.705249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.711838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.719187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.727672Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653380216399962:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.727696Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.727699Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653380216399967:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.728250Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:45.731898Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653380216399969:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:45.884191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.884464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.884696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.996857Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmJjNWM4MjYtOGUyZjk4YmEtZTY3YWJjNmMtNGMwZDZkZjI=, ActorId: [3:7439653380216400492:2475], ActorState: ExecuteState, TraceId: 01jd6ywnvvc76zq1e87exxepdx, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 24309, MsgBus: 23576 2024-11-21T08:54:46.254650Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653385818228651:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:46.254862Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f80/r3tmp/tmpS3DySj/pdisk_1.dat 2024-11-21T08:54:46.262835Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24309, node 4 2024-11-21T08:54:46.272284Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.272306Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.272307Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.272347Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23576 TClient is connected to server localhost:23576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:46.354883Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:46.354910Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:46.355969Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:46.357129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.361754Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.416955Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.435044Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.442846Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.519545Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653385818230189:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.519568Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.561692Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.567159Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.580414Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.586799Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.593859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.601672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.617033Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653385818230693:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.617054Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653385818230698:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.617057Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.617714Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:46.621422Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653385818230700:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:46.785531Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.785857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.786039Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateAndAlterTopic [GOOD] Test command err: Trying to start YDB, gRPC: 21431, MsgBus: 28137 2024-11-21T08:54:44.924599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653378731731180:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.924872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb8/r3tmp/tmpodh3nu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21431, node 1 2024-11-21T08:54:44.976581Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:44.976900Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.976913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.976915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.976951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28137 TClient is connected to server localhost:28137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:45.017867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.024820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:45.024844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:45.026022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:45.031209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.052983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.071926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.086336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.182302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653383026700006:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.182332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.216108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.271979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.278582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.333350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.341076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.347623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.356944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653383026700524:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.356968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.357017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653383026700529:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.357688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:45.361510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653383026700531:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:45.526869Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:54:45.529567Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:54:45.533557Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:54:45.538453Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:54:45.538484Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653383026700843:2466] TxId: 281474976710671. Ctx: { TraceId: 01jd6ywngx85pd2bbt8y2y0kxw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ3N2ZiOGEtNjc2ZjY0YmItODY4OTczOTktYWMzNjNhNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:45.539185Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ3N2ZiOGEtNjc2ZjY0YmItODY4OTczOTktYWMzNjNhNzA=, ActorId: [1:7439653383026700841:2466], ActorState: ExecuteState, TraceId: 01jd6ywngx85pd2bbt8y2y0kxw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 15347, MsgBus: 6615 2024-11-21T08:54:45.984849Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653380405790735:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:45.985303Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb8/r3tmp/tmpR8TeVD/pdisk_1.dat 2024-11-21T08:54:45.993908Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15347, node 2 2024-11-21T08:54:46.002489Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.002500Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.002501Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.002530Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6615 TClient is connected to server localhost:6615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:46.085147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:46.085181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:46.086371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:46.087016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.241647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653384700758627:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.241669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.241705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653384700758639:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don ... nce;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb8/r3tmp/tmppvJWY0/pdisk_1.dat 2024-11-21T08:54:46.654621Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6907, node 3 2024-11-21T08:54:46.665146Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.665158Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.665159Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.665188Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10265 TClient is connected to server localhost:10265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:46.746203Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:46.746233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:46.747375Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:46.748643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.751526Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.763262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.782256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.792367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.952355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653385977150571:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.952384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.956721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.963886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.972658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.979662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.993681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.000718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.016905Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653390272118373:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.016941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.016990Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653390272118378:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.017984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:47.020726Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653390272118380:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:47.217797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.228650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.240269Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439653390272119444:2579], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:3:78: Error: Executing ALTER TOPIC
: Error: path 'Root/NoSuchTopic' does not exist or you do not have access rights, code: 500018 2024-11-21T08:54:47.240332Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTI4ZDFiZGQtNTlmMDY3ZDktYjQxYTk0ZWEtNzM5YWI3ODg=, ActorId: [3:7439653390272118670:2457], ActorState: ExecuteState, TraceId: 01jd6ywq66d2nfr002qe76m0t2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Query failed, status: GENERIC_ERROR:
: Error: Execution, code: 1060
:3:78: Error: Executing ALTER TOPIC
: Error: path 'Root/NoSuchTopic' does not exist or you do not have access rights, code: 500018 Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1732179287069, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179286859, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179286824, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287048, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287062, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287006, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287020, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287027, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179286838, tx_id: 281474976715660 } } Scheme entry: { name: TempTopic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1732179287258, tx_id: 281474976715671 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287041, tx_id: 281474976715665 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179286810, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1732179287069, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179286859, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179286824, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287048, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287062, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287006, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287020, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287027, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179286838, tx_id: 281474976715660 } } Scheme entry: { name: TempTopic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1732179287258, tx_id: 281474976715671 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179287041, tx_id: 281474976715665 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179286810, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::SessionBusyRetryOperation [GOOD] Test command err: Trying to start YDB, gRPC: 24317, MsgBus: 8302 2024-11-21T08:54:46.091179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653386831269678:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:46.091377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ea6/r3tmp/tmpbu8Qgs/pdisk_1.dat 2024-11-21T08:54:46.131984Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24317, node 1 2024-11-21T08:54:46.149779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.149793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.149795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.149825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8302 TClient is connected to server localhost:8302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:46.191533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:46.191572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:46.192558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:46.214893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.224567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.285994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.300152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.308350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.341333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653386831271212:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.341359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.367627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.372608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.383625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.390769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.397727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.404703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.412962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653386831271706:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.412981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.412983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653386831271711:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.413473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:46.417845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653386831271713:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:46.599158Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 7 2024-11-21T08:54:46.599184Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2024-11-21T08:54:46.599188Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2024-11-21T08:54:46.599192Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2024-11-21T08:54:46.599251Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2024-11-21T08:54:46.599259Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2024-11-21T08:54:46.599527Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 13 2024-11-21T08:54:46.599541Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 14 2024-11-21T08:54:46.599546Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDlhOGJhMGYtYTgwZGQ1MTItODAyYjcwYWUtOTE4MTZiNTE=, ActorId: [1:7439653386831271998:2454], ActorState: ExecuteState, TraceId: 01jd6ywpj52rqmhwzsfx48ajmj, Reply query error, msg: Pending previous query completion proxyRequestId: 15 Trying to start YDB, gRPC: 62174, MsgBus: 12395 2024-11-21T08:54:46.751564Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653388060023116:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:46.751735Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ea6/r3tmp/tmpdFLFZE/pdisk_1.dat 2024-11-21T08:54:46.764104Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62174, node 2 2024-11-21T08:54:46.771571Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.771589Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.771591Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.771630Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12395 TClient is connected to server localhost:12395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 ... ing previous query completion proxyRequestId: 14 2024-11-21T08:54:47.237842Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTE3MzJiODUtY2YyZGQ4MGYtMjUzZDdkZDctNjQ2ZWVmZDU=, ActorId: [2:7439653392354992752:2454], ActorState: ExecuteState, TraceId: 01jd6ywq642q7gddrkxz57an3n, Reply query error, msg: Pending previous query completion proxyRequestId: 15 2024-11-21T08:54:47.249324Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTcwZWZjODktOWQ3ZjE1YjItNTM4MzJiOWEtOWU5NThhMDM=, ActorId: [2:7439653392354992803:2473], ActorState: ExecuteState, TraceId: 01jd6ywq6hfz0v2ynt3cdfk113, Reply query error, msg: Pending previous query completion proxyRequestId: 18 2024-11-21T08:54:47.249496Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTcwZWZjODktOWQ3ZjE1YjItNTM4MzJiOWEtOWU5NThhMDM=, ActorId: [2:7439653392354992803:2473], ActorState: ExecuteState, TraceId: 01jd6ywq6hfz0v2ynt3cdfk113, Reply query error, msg: Pending previous query completion proxyRequestId: 19 2024-11-21T08:54:47.249510Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTcwZWZjODktOWQ3ZjE1YjItNTM4MzJiOWEtOWU5NThhMDM=, ActorId: [2:7439653392354992803:2473], ActorState: ExecuteState, TraceId: 01jd6ywq6hfz0v2ynt3cdfk113, Reply query error, msg: Pending previous query completion proxyRequestId: 20 2024-11-21T08:54:47.249620Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTcwZWZjODktOWQ3ZjE1YjItNTM4MzJiOWEtOWU5NThhMDM=, ActorId: [2:7439653392354992803:2473], ActorState: ExecuteState, TraceId: 01jd6ywq6hfz0v2ynt3cdfk113, Reply query error, msg: Pending previous query completion proxyRequestId: 21 2024-11-21T08:54:47.249632Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTcwZWZjODktOWQ3ZjE1YjItNTM4MzJiOWEtOWU5NThhMDM=, ActorId: [2:7439653392354992803:2473], ActorState: ExecuteState, TraceId: 01jd6ywq6hfz0v2ynt3cdfk113, Reply query error, msg: Pending previous query completion proxyRequestId: 22 2024-11-21T08:54:47.249732Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTcwZWZjODktOWQ3ZjE1YjItNTM4MzJiOWEtOWU5NThhMDM=, ActorId: [2:7439653392354992803:2473], ActorState: ExecuteState, TraceId: 01jd6ywq6hfz0v2ynt3cdfk113, Reply query error, msg: Pending previous query completion proxyRequestId: 23 2024-11-21T08:54:47.249745Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTcwZWZjODktOWQ3ZjE1YjItNTM4MzJiOWEtOWU5NThhMDM=, ActorId: [2:7439653392354992803:2473], ActorState: ExecuteState, TraceId: 01jd6ywq6hfz0v2ynt3cdfk113, Reply query error, msg: Pending previous query completion proxyRequestId: 24 2024-11-21T08:54:47.249944Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTcwZWZjODktOWQ3ZjE1YjItNTM4MzJiOWEtOWU5NThhMDM=, ActorId: [2:7439653392354992803:2473], ActorState: ExecuteState, TraceId: 01jd6ywq6hfz0v2ynt3cdfk113, Reply query error, msg: Pending previous query completion proxyRequestId: 25 2024-11-21T08:54:47.260564Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWRjYjI0MzItNjhhYzMxN2UtNmU5NDQyMTctODg2ZTg1ZTM=, ActorId: [2:7439653392354992838:2488], ActorState: ExecuteState, TraceId: 01jd6ywq6w1bwjjp1agwv18c1b, Reply query error, msg: Pending previous query completion proxyRequestId: 28 2024-11-21T08:54:47.260698Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWRjYjI0MzItNjhhYzMxN2UtNmU5NDQyMTctODg2ZTg1ZTM=, ActorId: [2:7439653392354992838:2488], ActorState: ExecuteState, TraceId: 01jd6ywq6w1bwjjp1agwv18c1b, Reply query error, msg: Pending previous query completion proxyRequestId: 29 2024-11-21T08:54:47.260812Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWRjYjI0MzItNjhhYzMxN2UtNmU5NDQyMTctODg2ZTg1ZTM=, ActorId: [2:7439653392354992838:2488], ActorState: ExecuteState, TraceId: 01jd6ywq6w1bwjjp1agwv18c1b, Reply query error, msg: Pending previous query completion proxyRequestId: 30 2024-11-21T08:54:47.260824Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWRjYjI0MzItNjhhYzMxN2UtNmU5NDQyMTctODg2ZTg1ZTM=, ActorId: [2:7439653392354992838:2488], ActorState: ExecuteState, TraceId: 01jd6ywq6w1bwjjp1agwv18c1b, Reply query error, msg: Pending previous query completion proxyRequestId: 31 2024-11-21T08:54:47.260911Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWRjYjI0MzItNjhhYzMxN2UtNmU5NDQyMTctODg2ZTg1ZTM=, ActorId: [2:7439653392354992838:2488], ActorState: ExecuteState, TraceId: 01jd6ywq6w1bwjjp1agwv18c1b, Reply query error, msg: Pending previous query completion proxyRequestId: 32 2024-11-21T08:54:47.260928Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWRjYjI0MzItNjhhYzMxN2UtNmU5NDQyMTctODg2ZTg1ZTM=, ActorId: [2:7439653392354992838:2488], ActorState: ExecuteState, TraceId: 01jd6ywq6w1bwjjp1agwv18c1b, Reply query error, msg: Pending previous query completion proxyRequestId: 33 2024-11-21T08:54:47.260931Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWRjYjI0MzItNjhhYzMxN2UtNmU5NDQyMTctODg2ZTg1ZTM=, ActorId: [2:7439653392354992838:2488], ActorState: ExecuteState, TraceId: 01jd6ywq6w1bwjjp1agwv18c1b, Reply query error, msg: Pending previous query completion proxyRequestId: 34 2024-11-21T08:54:47.271350Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGY3YWQ2ZGUtNmMwNDFmMzgtZjAxZWU4NjMtNTRiYTlkNDI=, ActorId: [2:7439653392354992873:2502], ActorState: ExecuteState, TraceId: 01jd6ywq77eg1y7w1777hkd577, Reply query error, msg: Pending previous query completion proxyRequestId: 37 2024-11-21T08:54:47.271384Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGY3YWQ2ZGUtNmMwNDFmMzgtZjAxZWU4NjMtNTRiYTlkNDI=, ActorId: [2:7439653392354992873:2502], ActorState: ExecuteState, TraceId: 01jd6ywq77eg1y7w1777hkd577, Reply query error, msg: Pending previous query completion proxyRequestId: 38 2024-11-21T08:54:47.271502Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGY3YWQ2ZGUtNmMwNDFmMzgtZjAxZWU4NjMtNTRiYTlkNDI=, ActorId: [2:7439653392354992873:2502], ActorState: ExecuteState, TraceId: 01jd6ywq77eg1y7w1777hkd577, Reply query error, msg: Pending previous query completion proxyRequestId: 39 2024-11-21T08:54:47.271570Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGY3YWQ2ZGUtNmMwNDFmMzgtZjAxZWU4NjMtNTRiYTlkNDI=, ActorId: [2:7439653392354992873:2502], ActorState: ExecuteState, TraceId: 01jd6ywq77eg1y7w1777hkd577, Reply query error, msg: Pending previous query completion proxyRequestId: 40 2024-11-21T08:54:47.271715Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGY3YWQ2ZGUtNmMwNDFmMzgtZjAxZWU4NjMtNTRiYTlkNDI=, ActorId: [2:7439653392354992873:2502], ActorState: ExecuteState, TraceId: 01jd6ywq77eg1y7w1777hkd577, Reply query error, msg: Pending previous query completion proxyRequestId: 41 2024-11-21T08:54:47.271726Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGY3YWQ2ZGUtNmMwNDFmMzgtZjAxZWU4NjMtNTRiYTlkNDI=, ActorId: [2:7439653392354992873:2502], ActorState: ExecuteState, TraceId: 01jd6ywq77eg1y7w1777hkd577, Reply query error, msg: Pending previous query completion proxyRequestId: 42 2024-11-21T08:54:47.281800Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDZhZTczNjQtYzBhM2RjMWItZDk0NWQ2MjItM2Y0MzhlN2Y=, ActorId: [2:7439653392354992904:2515], ActorState: ExecuteState, TraceId: 01jd6ywq7h4j33ffk3gw0ngv7d, Reply query error, msg: Pending previous query completion proxyRequestId: 45 2024-11-21T08:54:47.281910Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDZhZTczNjQtYzBhM2RjMWItZDk0NWQ2MjItM2Y0MzhlN2Y=, ActorId: [2:7439653392354992904:2515], ActorState: ExecuteState, TraceId: 01jd6ywq7h4j33ffk3gw0ngv7d, Reply query error, msg: Pending previous query completion proxyRequestId: 46 2024-11-21T08:54:47.281951Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDZhZTczNjQtYzBhM2RjMWItZDk0NWQ2MjItM2Y0MzhlN2Y=, ActorId: [2:7439653392354992904:2515], ActorState: ExecuteState, TraceId: 01jd6ywq7h4j33ffk3gw0ngv7d, Reply query error, msg: Pending previous query completion proxyRequestId: 47 2024-11-21T08:54:47.282088Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDZhZTczNjQtYzBhM2RjMWItZDk0NWQ2MjItM2Y0MzhlN2Y=, ActorId: [2:7439653392354992904:2515], ActorState: ExecuteState, TraceId: 01jd6ywq7h4j33ffk3gw0ngv7d, Reply query error, msg: Pending previous query completion proxyRequestId: 48 2024-11-21T08:54:47.282159Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDZhZTczNjQtYzBhM2RjMWItZDk0NWQ2MjItM2Y0MzhlN2Y=, ActorId: [2:7439653392354992904:2515], ActorState: ExecuteState, TraceId: 01jd6ywq7h4j33ffk3gw0ngv7d, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2024-11-21T08:54:47.292359Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjVmMmQxNzEtYjE0MjRkNzQtZWMxNzgyZDQtNzJlODhkZDE=, ActorId: [2:7439653392354992934:2527], ActorState: ExecuteState, TraceId: 01jd6ywq7waq03jm9j2pg83z7s, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2024-11-21T08:54:47.292525Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjVmMmQxNzEtYjE0MjRkNzQtZWMxNzgyZDQtNzJlODhkZDE=, ActorId: [2:7439653392354992934:2527], ActorState: ExecuteState, TraceId: 01jd6ywq7waq03jm9j2pg83z7s, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2024-11-21T08:54:47.292536Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjVmMmQxNzEtYjE0MjRkNzQtZWMxNzgyZDQtNzJlODhkZDE=, ActorId: [2:7439653392354992934:2527], ActorState: ExecuteState, TraceId: 01jd6ywq7waq03jm9j2pg83z7s, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2024-11-21T08:54:47.292673Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjVmMmQxNzEtYjE0MjRkNzQtZWMxNzgyZDQtNzJlODhkZDE=, ActorId: [2:7439653392354992934:2527], ActorState: ExecuteState, TraceId: 01jd6ywq7waq03jm9j2pg83z7s, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2024-11-21T08:54:47.302537Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmI4MDU0OTgtZTRjODFjZDgtYTA5MGIzYWEtNDNlNzEwMmE=, ActorId: [2:7439653392354992972:2538], ActorState: ExecuteState, TraceId: 01jd6ywq867zdbtyrkd9njjcqv, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2024-11-21T08:54:47.302557Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmI4MDU0OTgtZTRjODFjZDgtYTA5MGIzYWEtNDNlNzEwMmE=, ActorId: [2:7439653392354992972:2538], ActorState: ExecuteState, TraceId: 01jd6ywq867zdbtyrkd9njjcqv, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2024-11-21T08:54:47.302747Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmI4MDU0OTgtZTRjODFjZDgtYTA5MGIzYWEtNDNlNzEwMmE=, ActorId: [2:7439653392354992972:2538], ActorState: ExecuteState, TraceId: 01jd6ywq867zdbtyrkd9njjcqv, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2024-11-21T08:54:47.312690Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGJjMmNkZDUtMjZmYjRlM2ItNGI0MmVhM2MtZmI2NzI0NDg=, ActorId: [2:7439653392354992998:2548], ActorState: ExecuteState, TraceId: 01jd6ywq8g4ezmc6ey9jha998f, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2024-11-21T08:54:47.312846Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGJjMmNkZDUtMjZmYjRlM2ItNGI0MmVhM2MtZmI2NzI0NDg=, ActorId: [2:7439653392354992998:2548], ActorState: ExecuteState, TraceId: 01jd6ywq8g4ezmc6ey9jha998f, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2024-11-21T08:54:47.322608Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzQ4MGNmYjYtY2MxZjJkMTEtYWQ2OThiNjMtNDQyNmNhMQ==, ActorId: [2:7439653392354993022:2557], ActorState: ExecuteState, TraceId: 01jd6ywq8t05551zecx3m28t45, Reply query error, msg: Pending previous query completion proxyRequestId: 67 >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> KqpQueryServiceScripts::ExecuteScript >> KqpDocumentApi::Scripting [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] >> KqpQueryService::ExecuteQuery [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback >> KqpQueryServiceScripts::ParseScript >> KqpQueryService::DdlSecret [GOOD] >> KqpQueryService::DdlTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4721, MsgBus: 1184 2024-11-21T08:54:46.019471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653386969656605:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:46.019493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb7/r3tmp/tmpPbS5z1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4721, node 1 2024-11-21T08:54:46.070435Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:46.078402Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.078417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.078419Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.078449Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1184 TClient is connected to server localhost:1184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:46.119402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:46.119425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:46.120575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:46.120673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.258477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653386969657201:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.258502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.282320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.293965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:46.293971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:46.293999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:46.294014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:46.294088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:46.294089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:46.294104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:46.294107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:46.294121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:46.294121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:46.294135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:46.294139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:46.294151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:46.294157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:46.294177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:46.294181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:46.294207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:46.294208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:46.294227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:46.294228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:46.294239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:46.294249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:46.294256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653386969657319:2303];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:46.294268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653386969657332:2306];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:46.294650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:54:46.294662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:54:46.294677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:54:46.294680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:54:46.294689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:54:46.294692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:54:46.294697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:54:46.294702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:54:46.294708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186 ... doublechecking } 2024-11-21T08:54:46.407698Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653386969657799:2402] TxId: 281474976710661. Ctx: { TraceId: 01jd6ywp9k0ngctd7t319e2925, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZmMGU3MzAtODljZGEwYmYtZmUyMzAxYzAtYzE4NGZmNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:54:46.419518Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653386969657854:2419] TxId: 281474976710663. Ctx: { TraceId: 01jd6ywpcj2q27r8zee4jb9j4t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgwNjBmNmQtYTAyMzc2ZTUtMWNiZDllYWYtODZmZGE5NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:54:46.443731Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653386969657891:2428] TxId: 281474976710665. Ctx: { TraceId: 01jd6ywpcv2q9twak72cpnaajx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgzOWU4OC03NGE3NGY4Ny1jYTlmMDM3Zi0xYjYwMGMzYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root Trying to start YDB, gRPC: 23034, MsgBus: 19846 2024-11-21T08:54:46.678505Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653388290094550:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:46.678646Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb7/r3tmp/tmpAceSX6/pdisk_1.dat 2024-11-21T08:54:46.692018Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23034, node 2 2024-11-21T08:54:46.703593Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.703610Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.703612Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.703655Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19846 TClient is connected to server localhost:19846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:46.778939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:46.778964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:46.780045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:46.780679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.915501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653388290095145:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.915541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.919410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.942541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.967022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653388290096406:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.967051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.967179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653388290096411:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.967915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:54:46.971637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653388290096413:2413], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } Trying to start YDB, gRPC: 4261, MsgBus: 12288 2024-11-21T08:54:47.369351Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653390218488478:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:47.369370Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb7/r3tmp/tmpTWYkHF/pdisk_1.dat 2024-11-21T08:54:47.376608Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4261, node 3 2024-11-21T08:54:47.392390Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:47.392403Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:47.392405Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:47.392446Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12288 TClient is connected to server localhost:12288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:47.469657Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:47.469690Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:47.470750Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:47.471387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.619201Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653390218489074:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.619222Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.623018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.640805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.660407Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653390218490340:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.660432Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.660520Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653390218490345:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.661183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:54:47.664312Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653390218490347:2413], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::Scripting [GOOD] Test command err: Trying to start YDB, gRPC: 30489, MsgBus: 63676 2024-11-21T08:54:46.053406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653386811529922:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:46.053734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eaa/r3tmp/tmpCW0F52/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30489, node 1 2024-11-21T08:54:46.101603Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:46.106419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.106440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.106442Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.106474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63676 TClient is connected to server localhost:63676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:46.151297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.153525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:46.153544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:46.154674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:46.162142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.223105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.236650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.244350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.289711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653386811531454:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.289738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.318980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.324936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.378808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.383359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.391053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.397989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.406170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653386811531957:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.406192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653386811531962:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.406192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.406653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:46.411087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653386811531964:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:46.566437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.574550Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653386811532313:2464], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2024-11-21T08:54:46.574622Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDNhYjdiM2UtOTlhNmNmNzgtOTFlNjI5MzAtZTkzNmM5ZGI=, ActorId: [1:7439653386811532237:2454], ActorState: ExecuteState, TraceId: 01jd6ywphbf1bq9y20hpxmzx2k, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 23523, MsgBus: 5582 2024-11-21T08:54:46.728940Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653384940031526:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:46.728961Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eaa/r3tmp/tmpjGxGLw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23523, node 2 2024-11-21T08:54:46.744060Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:46.745189Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:46.745198Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:46.745200Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:46.745233Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5582 TClient is connected to server localhost:5582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:46.829507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:46.829547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:46.830711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:46.831175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.831897Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:46.839735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.847694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.866356Z node 2 :FLAT_TX_SCHEMESH ... [TPoolFetcherActor] ActorId: [2:7439653389235000369:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.043179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.049493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.056056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.063228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.070606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.084628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.099050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.113402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653389235000872:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.113411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653389235000877:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.113422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.113929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:47.118829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653389235000879:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:47.275736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.284362Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653389235001229:2464], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2024-11-21T08:54:47.284423Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTAzNDk5ZDEtZDFkMWViMWEtM2YwZmZjODYtZDAxNjAyNWU=, ActorId: [2:7439653389235001161:2454], ActorState: ExecuteState, TraceId: 01jd6ywq7ha7ms98xjq4xbwna2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 12895, MsgBus: 23961 2024-11-21T08:54:47.414955Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653391505629502:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:47.414979Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eaa/r3tmp/tmpcrDqMn/pdisk_1.dat 2024-11-21T08:54:47.427536Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12895, node 3 2024-11-21T08:54:47.433990Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:47.434002Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:47.434004Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:47.434041Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23961 TClient is connected to server localhost:23961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:47.515557Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:47.515585Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:47.516608Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:47.517892Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.539846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.548682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.562747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.575820Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.672813Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653391505631034:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.672855Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.676739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.682561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.693555Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.699829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.707477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.714779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.730179Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653391505631536:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.730202Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.730207Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653391505631541:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.730850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:47.734620Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653391505631543:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:47.875294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:4:61: Error: At function: KiAlterTable!
:4:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 >> KqpQueryService::QueryOnClosedSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] Test command err: Trying to start YDB, gRPC: 8271, MsgBus: 4781 2024-11-21T08:54:44.722297Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653377668028504:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.722331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ed8/r3tmp/tmpAANiBY/pdisk_1.dat 2024-11-21T08:54:44.772608Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8271, node 1 2024-11-21T08:54:44.781539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.781555Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.781558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.781604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4781 TClient is connected to server localhost:4781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:44.822502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.822533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:44.823669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:44.823698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.836823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.897175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.911506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.921622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.957365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653377668030038:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.957404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.996233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.002943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.068744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.123367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.131090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.138010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.147076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653381962997851:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.147095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.147118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653381962997856:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:45.147705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:45.151601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653381962997858:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:45.333950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.334251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.334395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:45.600871Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653381962998769:2595] TxId: 281474976710690. Ctx: { TraceId: 01jd6ywnjed36fhjcsr2t4bza1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzcxZDdjNS00OGQzMjIyOS1mODhjMmM1Mi0zZTkzMzcyNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:54:45.602151Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:54:45.602288Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179285648, txId: 281474976710689] shutting down 2024-11-21T08:54:45.625915Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653381962998841:2611] TxId: 281474976710693. Ctx: { TraceId: 01jd6ywnk4ffgf0f20c4heah1e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM0NzE0ZDAtZWM1YjhjMTAtZGRhMDEwN2YtMzhlY2MzYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:54:45.626733Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:54:45.626859Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179285669, txId: 281474976710692] shutting down 2024-11-21T08:54:45.647352Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653381962998905:2627] TxId: 281474976710696. Ctx: { TraceId: 01jd6ywnkwa7fnxh2kqj9xz6a3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA1NWZiZDQtYjA3Y2ZiNDItZjFhYmRjNDEtNTJlNDNiNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:54:45.648082Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:54:45.648221Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179285690, txId: 281474976710695] shutting down Trying to start YDB, gRPC: 24372, MsgBus: 11907 2024-11-21T08:54:45.779289Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653380677578547:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:45.779610Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ed8/r3tmp/tmp3PqGbd/pdisk_1.dat 2024-11-21T08:54:45.786730Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24372, node 2 2024-11-21T08:54:45.795339Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:45.795349Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:45.795352Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:45.795381Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11907 TClient is connected to server localhost:11907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:45.879866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:45.879897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:45.880961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:45.881601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.891744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.898743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.913110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.922745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:46.079865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653384972547384:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.079887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.084226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.090240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.097006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.104058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.111286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.117936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.126840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653384972547887:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.126880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653384972547892:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.126882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:46.127583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:46.131404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653384972547894:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:46.275122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.275447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:46.275588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.277623Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179287314, txId: 281474976715734] shutting down 2024-11-21T08:54:48.147592Z node 2 :RPC_REQUEST WARN: Client lost 2024-11-21T08:54:48.147697Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439653393562486370:3668] TxId: 281474976715803. Ctx: { TraceId: 01jd6ywr1k1vwb82y92d80a1r2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTFkMTJlNGMtMjdjNzAxOTgtMjU5NDdiN2ItZTE2OGEzMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:48.148374Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTFkMTJlNGMtMjdjNzAxOTgtMjU5NDdiN2ItZTE2OGEzMmQ=, ActorId: [2:7439653393562486348:3668], ActorState: ExecuteState, TraceId: 01jd6ywr1k1vwb82y92d80a1r2, Create QueryResponse for error on request, msg: 2024-11-21T08:54:48.148475Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179288182, txId: 281474976715802] shutting down 2024-11-21T08:54:48.148676Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439653393562486375:3673], TxId: 281474976715803, task: 2. Ctx: { TraceId : 01jd6ywr1k1vwb82y92d80a1r2. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OTFkMTJlNGMtMjdjNzAxOTgtMjU5NDdiN2ItZTE2OGEzMmQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439653393562486370:3668], status: ABORTED, reason: {
: Error: Terminate execution } >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop >> KqpService::RangeCache+UseCache >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback [GOOD] >> KqpQueryService::ExecuteDDLStatusCodeSchemeError >> KqpQueryService::ExecStats >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager [GOOD] >> KqpQueryServiceScripts::ExplainScript >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript >> KqpQueryServiceScripts::ParseScript [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions >> KqpQueryService::DdlTx [GOOD] >> KqpDocumentApi::RestrictDrop [GOOD] >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlTx [GOOD] Test command err: Trying to start YDB, gRPC: 61099, MsgBus: 19745 2024-11-21T08:54:44.323992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653378905746771:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.324255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f1b/r3tmp/tmpJ2Sw1v/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61099, node 1 2024-11-21T08:54:44.376824Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:44.380102Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.380117Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.380118Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.380148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19745 TClient is connected to server localhost:19745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:44.422820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.424527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.424558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:44.425561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:44.434604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.495641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.510190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.523150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.560564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653378905748308:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.560594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.586109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.591729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.599022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.653783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.708513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.717847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.726201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653378905748825:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.726223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653378905748830:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.726231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.726700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:44.731308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653378905748832:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:44.885078Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDM5NDRiNWMtZDk3NjhhYjItMTk2Mzc4MDQtM2JhNzc5NWY=, ActorId: [1:7439653378905749130:2460], ActorState: ExecuteState, TraceId: 01jd6ywmwj7t8aa6n8akbx5bmd, Create QueryResponse for error on request, msg: 2024-11-21T08:54:44.893466Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmUzYTQ1NjEtMzk2YzFjZDktNDA3ODY5Y2EtNzVmYWEyMmY=, ActorId: [1:7439653378905749178:2469], ActorState: ExecuteState, TraceId: 01jd6ywmwv0wyh7kzgffa5vxqa, Create QueryResponse for error on request, msg: 2024-11-21T08:54:44.895622Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmMyNTRhNTAtNTIyNGZiNTYtZjA2YWM2YjgtNGMxOWRkYTc=, ActorId: [1:7439653378905749194:2472], ActorState: ExecuteState, TraceId: 01jd6ywmwy23y4jwfp3mxx4yfv, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 22865, MsgBus: 3788 2024-11-21T08:54:45.194588Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653380354744592:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f1b/r3tmp/tmpajiTu4/pdisk_1.dat 2024-11-21T08:54:45.201484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:45.202711Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22865, node 2 2024-11-21T08:54:45.209700Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:45.209713Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:45.209715Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:45.209756Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3788 TClient is connected to server localhost:3788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:45.293812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:45.293855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:45.294859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:45.296087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.297158Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:45.307126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.315296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.330373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:45.341702Z node 2 :FL ... ers { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_1" } items { text_value: "qwerty" } } } } } ; Execute SQL: DROP OBJECT IF EXISTS my_secret_1 (TYPE SECRET); Execute SQL: DROP OBJECT IF EXISTS my_secret_1 (TYPE SECRET); Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Execute SQL: CREATE OBJECT my_secret_2 (TYPE SECRET) WITH (value="qwerty"); 2024-11-21T08:54:48.039055Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439653393239651739:3476], TxId: 281474976715792, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6ywqy5br2zsjg5vrh971xc. SessionId : ydb://session/3?node_id=2&id=ZmM1YWZlMTUtYTFmMDM1MjctODZlZDFlYjgtMTUwZmY4NmU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T08:54:48.039188Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439653393239651740:3477], TxId: 281474976715792, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd6ywqy5br2zsjg5vrh971xc. SessionId : ydb://session/3?node_id=2&id=ZmM1YWZlMTUtYTFmMDM1MjctODZlZDFlYjgtMTUwZmY4NmU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439653393239651736:3405], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T08:54:48.039457Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM1YWZlMTUtYTFmMDM1MjctODZlZDFlYjgtMTUwZmY4NmU=, ActorId: [2:7439653388944684265:3405], ActorState: ExecuteState, TraceId: 01jd6ywqy5br2zsjg5vrh971xc, Create QueryResponse for error on request, msg: 2024-11-21T08:54:48.040647Z node 2 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jd6ywqwx5hpfqkdvezvgegvj" } } } } ;request=session_id: "ydb://session/3?node_id=2&id=ZmM1YWZlMTUtYTFmMDM1MjctODZlZDFlYjgtMTUwZmY4NmU=" tx_control { tx_id: "01jd6ywqwx5hpfqkdvezvgegvj" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/values`\nSELECT ownerUserId,secretId,value FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_2" } items { text_value: "qwerty" } } } } } ; 2024-11-21T08:54:48.040734Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM0ZGMwMjItNzc5NTdkNC05N2RkNDM4NS1mMWM3MDRjZQ==, ActorId: [2:7439653388944684257:3400], ActorState: ExecuteState, TraceId: 01jd6ywqvgaqz5jdsrggcdy8m0, Create QueryResponse for error on request, msg: Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Trying to start YDB, gRPC: 30223, MsgBus: 1787 2024-11-21T08:54:48.578979Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653394756030546:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.579055Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f1b/r3tmp/tmpissX9J/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30223, node 3 2024-11-21T08:54:48.599088Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:48.599104Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:48.599106Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:48.599156Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:48.599411Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1787 TClient is connected to server localhost:1787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:48.679225Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:48.679258Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:48.680299Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:48.681986Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.686275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.695092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.710926Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.723612Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.875871Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653394756032086:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.875893Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.880901Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.887167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.897328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.903789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.958502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.967548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.982705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653394756032597:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.982722Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.982740Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653394756032602:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.983205Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:48.987111Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653394756032604:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:49.160400Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmJhNTI3ZjktMjg4MzczNDgtZjViNGM1ZjQtYmI0MWIwM2I=, ActorId: [3:7439653399051000186:2454], ActorState: ExecuteState, TraceId: 01jd6yws246pw9txy8ht0y184x, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 6791, MsgBus: 6162 2024-11-21T08:54:47.481379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653390952972794:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:47.481656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ea5/r3tmp/tmp87dPNK/pdisk_1.dat 2024-11-21T08:54:47.534617Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6791, node 1 2024-11-21T08:54:47.548893Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:47.548907Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:47.548909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:47.548945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6162 TClient is connected to server localhost:6162 2024-11-21T08:54:47.581488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:47.581519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:54:47.582542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:47.592309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.603340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.663408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.677704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.686817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.746739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653390952974325:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.746770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.780280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.786740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.798685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.805423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.811877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.819246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:47.830940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653390952974819:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.830968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.830992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653390952974824:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:47.831637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:47.839481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653390952974826:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:48.017501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4426, MsgBus: 20460 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ea5/r3tmp/tmpzUkNI0/pdisk_1.dat 2024-11-21T08:54:48.158770Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:48.159465Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4426, node 2 2024-11-21T08:54:48.170594Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:48.170611Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:48.170612Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:48.170640Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20460 TClient is connected to server localhost:20460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:48.251142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:48.251170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:48.252248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:48.252899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.256972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.267618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.283449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.296599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.425373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653396320430634:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.425417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.428964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.433971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.488528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.497889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.505048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.512274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.521005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653396320431150:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.521019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653396320431155:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.521042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.521580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:48.525751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653396320431157:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:48.673074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2024-11-21T08:54:48.685264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22048, MsgBus: 25951 2024-11-21T08:54:48.853645Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653396548947250:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.853851Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ea5/r3tmp/tmpk0tVz1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22048, node 3 2024-11-21T08:54:48.869864Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:48.871047Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:48.871063Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:48.871065Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:48.871106Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25951 TClient is connected to server localhost:25951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:48.954143Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:48.954182Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:48.955264Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:48.956515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.967328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.974379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.990924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.003422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.123575Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653400843916075:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.123605Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.130740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.139952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.150684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.163613Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.171210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.184689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.193480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653400843916590:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.193509Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.193549Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653400843916595:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.194251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:49.197128Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653400843916597:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:49.350395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] Test command err: Trying to start YDB, gRPC: 3921, MsgBus: 28845 2024-11-21T08:54:47.727503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653388713561065:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:47.727639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e8b/r3tmp/tmp5nZu7D/pdisk_1.dat 2024-11-21T08:54:47.767826Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3921, node 1 2024-11-21T08:54:47.785006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:47.785042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:47.785043Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:47.785075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28845 TClient is connected to server localhost:28845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:47.827614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:47.827643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:47.828762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:47.849568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.853430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.866861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.883604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:47.893483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.004151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653393008529896:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.004194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.030868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.037565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.050359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.056719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.064222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.070719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.079095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653393008530410:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.079115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.079135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653393008530415:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.079683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:48.084417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653393008530417:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 6391, MsgBus: 8268 2024-11-21T08:54:48.402120Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653394006534445:2056];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e8b/r3tmp/tmpv41J0b/pdisk_1.dat 2024-11-21T08:54:48.405105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:48.412499Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6391, node 2 2024-11-21T08:54:48.419048Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:48.419062Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:48.419064Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:48.419102Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8268 TClient is connected to server localhost:8268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:48.501957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:48.501982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:48.503030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:48.504228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.505021Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:48.507852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.519740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.538748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.549080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.681668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653394006535971:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.681695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.686714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.692922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.701097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.707527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.714710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.722082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.730529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653394006536485:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.730549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.730584Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653394006536490:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.731051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:48.735123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653394006536492:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:48.919068Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yjk3ZWU1LWEzNTc3MGEtY2ZkZmJhMzctYTBkM2U2MWE=, ActorId: [2:7439653394006536777:2454], ActorState: ReadyState, TraceId: 01jd6ywrtp27ybaf08kdzyscz5, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 12831, MsgBus: 20544 2024-11-21T08:54:49.120258Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653400756866546:2156];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:49.120360Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e8b/r3tmp/tmply0rdX/pdisk_1.dat 2024-11-21T08:54:49.142591Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12831, node 3 2024-11-21T08:54:49.152790Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:49.152804Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:49.152806Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:49.152866Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20544 TClient is connected to server localhost:20544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:49.219818Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:49.219852Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:49.220901Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:49.223610Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.363986Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653400756867042:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.364005Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653400756867018:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.364034Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.364586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:54:49.366009Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653400756867047:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:54:49.465095Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGJkNWViLWU4ODhjZDMtZjIxNTU4MmEtM2JhNjljODY=, ActorId: [3:7439653400756867016:2297], ActorState: ExecuteState, TraceId: 01jd6yws4j4z2mtkq4x82ea6ms, Create QueryResponse for error on request, msg: >> KqpQueryService::ExecStats [GOOD] >> KqpQueryService::ExecStatsPlan >> KqpQueryService::TableSink_OlapUpsert [GOOD] >> KqpQueryService::TableSink_OltpDelete >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> KqpQueryServiceScripts::ExecuteMultiScript [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken >> KqpQueryServiceScripts::ExplainScript [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> THealthCheckTest::NoBscResponse [GOOD] >> KqpService::RangeCache+UseCache [GOOD] >> KqpService::RangeCache-UseCache >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> KqpQueryService::QueryOnClosedSession [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoBscResponse [GOOD] Test command err: 2024-11-21T08:54:37.502932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:37.505381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:37.505501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:37.505530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:37.505780Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:37.505789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f74/r3tmp/tmpbRlBDG/pdisk_1.dat 2024-11-21T08:54:37.577836Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2864, node 1 TClient is connected to server localhost:21889 2024-11-21T08:54:37.674372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:37.674391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:37.674395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:37.674473Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:38.494045Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:54:38.495767Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:38.495809Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.495858Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:38.496008Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:38.496031Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f74/r3tmp/tmpMPxzTi/pdisk_1.dat 2024-11-21T08:54:38.558868Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14801, node 3 TClient is connected to server localhost:22303 2024-11-21T08:54:38.658731Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:38.658748Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:38.658752Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:38.658840Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:39.439170Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.439240Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:39.439266Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.439347Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:39.439378Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:39.439407Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f74/r3tmp/tmpM9bqTX/pdisk_1.dat 2024-11-21T08:54:39.506949Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2595, node 5 TClient is connected to server localhost:9666 2024-11-21T08:54:39.602419Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:39.602437Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:39.602441Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:39.602521Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:40.514273Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.514340Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:40.514355Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.514442Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:40.514497Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:54:40.514516Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f74/r3tmp/tmpSUJd1u/pdisk_1.dat 2024-11-21T08:54:40.583243Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22906, node 7 TClient is connected to server localhost:10458 2024-11-21T08:54:40.680918Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:40.680933Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:40.680936Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:40.680983Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:41.383205Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:41.383253Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:41.383267Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f74/r3tmp/tmpUDs0VV/pdisk_1.dat 2024-11-21T08:54:41.468513Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25322, node 9 TClient is connected to server localhost:25505 2024-11-21T08:54:41.568249Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:41.568271Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:41.568275Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:41.568351Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:41.641577Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:41.641624Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:41.653356Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: EMERGENCY issue_log { id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-4e47" reason: "RED-9a33-53b5" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "RED-9a33-4e47" status: RED message: "Compute has issues with system tablets" location { database { name: "/Root" } } reason: "RED-9a33-c138-BSController" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-c138-BSController" status: RED message: "System tablet is unresponsive" location { compute { tablet { type: "BSController" id: "72057594037932033" } } database { name: "/Root" } } type: "SYSTEM_TABLET" level: 3 } issue_log { id: "RED-9a33-53b5" status: RED message: "System tablet BSC didn\'t provide information" location { database { name: "/Root" } } type: "STORAGE" level: 2 } database_status { name: "/Root" overall: RED storage { overall: RED pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN vdisks { id: "0-1-0-0-0" overall: GREEN pdisk { id: "9-1" overall: GREEN } } } } } compute { overall: RED nodes { id: "9" overall: YELLOW load { overall: YELLOW load: 114.242188 cores: 64 } } } } location { id: 9 host: "::1" port: 12001 } |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> KqpQueryService::ExecStatsPlan [GOOD] >> KqpQueryService::ExecStatsAst >> TSchemeShardUserAttrsTest::VariousUse >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TSchemeShardUserAttrsTest::SetAttrs >> TSchemeShardUserAttrsTest::SpecialAttributes >> TPersQueueTest::SetupReadSession [GOOD] >> TPersQueueTest::TestBigMessage >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] |89.2%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:50.949547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:50.949569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:50.949575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:50.949579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:50.949593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:50.949597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:50.949605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:50.949683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:50.958320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:50.958342Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:50.960734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:50.961294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:50.961321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:50.962380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:50.962522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:50.962601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.962649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:50.963373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.963588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:50.963595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.963627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:50.963632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:50.963637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:50.963646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.964555Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:50.976396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:50.976470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.976525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:50.976564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:50.976577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.977186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.977208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:50.977242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.977250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:50.977253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:50.977257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:50.977530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.977538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:50.977540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:50.977790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.977796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.977800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.977804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.978217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:50.978473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:50.978527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:50.978668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.978685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:50.978690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.978727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:50.978731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.978755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:50.978765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:50.979154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:50.979164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:50.979196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.979200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:50.979276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.979282Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:50.979291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:50.979293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.979297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:50.979301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.979304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:50.979306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:50.979317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:50.979321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:50.979324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:50.979546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:50.979556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:50.979558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:50.979562Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:50.979565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:50.979574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T08:54:51.056340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:54:51.056350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T08:54:51.057119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2024-11-21T08:54:51.057149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000010 2024-11-21T08:54:51.057256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T08:54:51.057287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.057309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.057318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 112:0, step: 5000010, at schemeshard: 72057594046678944 2024-11-21T08:54:51.057339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 112:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.057346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2024-11-21T08:54:51.057349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T08:54:51.057356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:54:51.057362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:54:51.057366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2024-11-21T08:54:51.057370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T08:54:51.057372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2024-11-21T08:54:51.057374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2024-11-21T08:54:51.057381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:54:51.057384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2024-11-21T08:54:51.057387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2024-11-21T08:54:51.057388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:54:51.057638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T08:54:51.057942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:51.057948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:54:51.057965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:54:51.057981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.057984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 3 2024-11-21T08:54:51.057987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2024-11-21T08:54:51.058057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T08:54:51.058064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T08:54:51.058066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2024-11-21T08:54:51.058071Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2024-11-21T08:54:51.058078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:54:51.058146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T08:54:51.058153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T08:54:51.058156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2024-11-21T08:54:51.058158Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:54:51.058160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:54:51.058168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2024-11-21T08:54:51.058194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:54:51.058197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:54:51.058203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:54:51.058688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T08:54:51.058762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T08:54:51.058771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2024-11-21T08:54:51.058825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2024-11-21T08:54:51.058830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2024-11-21T08:54:51.058895Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2024-11-21T08:54:51.058907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2024-11-21T08:54:51.058910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:488:2480] TestWaitNotification: OK eventTxId 112 2024-11-21T08:54:51.058973Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:51.058991Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 25us result status StatusSuccess 2024-11-21T08:54:51.059036Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2024-11-21T08:54:51.059558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:51.059588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.059602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:54:51.060035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.060059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:50.913272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:50.913298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:50.913304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:50.913308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:50.913324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:50.913327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:50.913335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:50.913424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:50.921356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:50.921379Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:50.923523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:50.924061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:50.924091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:50.925470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:50.925683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:50.925784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.925843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:50.926616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.926873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:50.926885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.926930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:50.926938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:50.926945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:50.926958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.927985Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:50.939782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:50.939833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.939880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:50.939916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:50.939929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.940402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.940422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:50.940453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.940462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:50.940466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:50.940469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:50.940721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.940729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:50.940733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:50.940963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.940969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.940973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.940978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.941366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:50.941665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:50.941710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:50.941825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.941841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:50.941849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.941881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:50.941885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.941905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:50.941913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:50.942190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:50.942195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:50.942224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.942228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:50.942318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.942323Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:50.942331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:50.942333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.942337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:50.942341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.942343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:50.942346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:50.942353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:50.942357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:50.942359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:50.942533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:50.942541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:50.942544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:50.942547Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:50.942550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:50.942559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... CHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:54:50.948974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:54:50.948978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:317:2309] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 101 2024-11-21T08:54:50.949013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:50.949029Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 19us result status StatusSuccess 2024-11-21T08:54:50.949085Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T08:54:50.949382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:50.949391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.949398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:50.949410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:50.949412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.949668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:50.949683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2024-11-21T08:54:50.949702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.949706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.949711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2024-11-21T08:54:50.949723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:50.949928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T08:54:50.949940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2024-11-21T08:54:50.949975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.949986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:50.949989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2024-11-21T08:54:50.950005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T08:54:50.950007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:54:50.950013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:50.950017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T08:54:50.950021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:50.950023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:54:50.950025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T08:54:50.950027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T08:54:50.950030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:54:50.950033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2024-11-21T08:54:50.950035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2024-11-21T08:54:50.950292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:50.950301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:50.950323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.950327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T08:54:50.950384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:50.950391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:50.950397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:54:50.950401Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T08:54:50.950404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:50.950413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T08:54:50.950651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:54:50.950686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:54:50.950690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:54:50.950722Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:54:50.950732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:54:50.950734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:336:2328] TestWaitNotification: OK eventTxId 103 2024-11-21T08:54:50.950770Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:50.950784Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2024-11-21T08:54:50.950831Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] Test command err: Trying to start YDB, gRPC: 28503, MsgBus: 62273 2024-11-21T08:54:48.117232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653395097989040:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.117290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e83/r3tmp/tmpMcrTo6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28503, node 1 2024-11-21T08:54:48.167180Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:48.175220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:48.175233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:48.175234Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:48.175262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62273 TClient is connected to server localhost:62273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:48.217552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:48.217578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:48.218648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:48.245089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.249653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.264287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.277428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.290960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.374093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653395097990578:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.374129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.401348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.407155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.413791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.421189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.428118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.435207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.443469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653395097991080:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.443489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.443529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653395097991085:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.444059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:48.448017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653395097991087:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:48.602709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.602901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.603131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.899053Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653395097991973:2585] TxId: 281474976715689. Ctx: { TraceId: 01jd6ywrsd0kfd9mcyb065haj6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWVlOTUxZDAtZmZkMjQ1ZjMtMjAzYWZiMTgtNDdlNjkwYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:54:48.900413Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179288945, txId: 281474976715688] shutting down 2024-11-21T08:54:48.905403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653395097992013:2599], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T08:54:48.905403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653395097992015:2601], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T08:54:48.905415Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439653395097992014:2600], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=ZGMyZmZkY2EtZTNkNWY4OTAtYTliZTJmMzYtOGJiNjAzYmE=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T08:54:48.905418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T08:54:48.905421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439653395097992014:2600], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=ZGMyZmZkY2EtZTNkNWY4OTAtYTliZTJmMzYtOGJiNjAzYmE=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2024-11-21T08:54:48.905432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7439653395097992011:2598]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2024-11-21T08:54:48.905450Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGMyZmZkY2EtZTNkNWY4OTAtYTliZTJmMzYtOGJiNjAzYmE=, ActorId: [1:7439653395097992011:2598], ActorState: ExecuteState, TraceId: 01jd6ywrt5bez42vgbq4w9tsk7, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2024-11-21T08:54:48.905482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7439653395097992011:2598]: Pool another_pool_id not found Trying to start YDB, gRPC: 8668, MsgBus: 26916 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e83/r3tmp/tmpSWdsHf/pdisk_1.dat 2024-11-21T08:54:49.203049Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:49.204560Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 8668, node 2 2024-11-21T08:54:49.212255Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:49.212268Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:49.212270Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: ... le, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.317941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.333416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.346123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.497057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653399250123351:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.497085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.500908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.555856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.561713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.568916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.575518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.583021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.591568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653399250123865:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.591589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.591942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653399250123870:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.592726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:49.596265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653399250123872:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:49.774584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.774892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.775199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17082, MsgBus: 16192 2024-11-21T08:54:50.276335Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653401416472222:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:50.276586Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e83/r3tmp/tmpjVpxRF/pdisk_1.dat 2024-11-21T08:54:50.285215Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17082, node 3 2024-11-21T08:54:50.294526Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:50.294544Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:50.294547Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:50.294592Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16192 TClient is connected to server localhost:16192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:50.376695Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:50.376742Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:50.377750Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:50.378915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.383046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.396106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.417520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.429514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.540021Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653401416473761:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.540051Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.546062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.552674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.563174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.571863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.626825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.633584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.641637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653401416474276:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.641665Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653401416474281:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.641668Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.642267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:50.646383Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653401416474283:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:50.962310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:50.962339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:50.962345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:50.962349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:50.962363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:50.962367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:50.962376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:50.962471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:50.970826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:50.970849Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:50.973351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:50.974058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:50.974083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:50.975113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:50.975259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:50.975353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.975403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:50.976103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.976334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:50.976343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.976369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:50.976373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:50.976378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:50.976387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.977313Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:50.988631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:50.988691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.988745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:50.988789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:50.988802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.989420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.989459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:50.989490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.989497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:50.989500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:50.989503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:50.989791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.989798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:50.989800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:50.990054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.990058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.990062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.990067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.990475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:50.990729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:50.990766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:50.990895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:50.990911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:50.990915Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.990957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:50.990962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:50.990986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:50.990994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:50.991268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:50.991272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:50.991300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:50.991303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:50.991374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:50.991379Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:50.991387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:50.991390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.991394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:50.991397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:50.991400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:50.991402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:50.991408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:50.991412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:50.991415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:50.991595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:50.991603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:50.991606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:50.991610Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:50.991613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:50.991622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:54:51.001310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:54:51.001313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:54:51.001347Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:54:51.001356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:54:51.001359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:327:2319] TestWaitNotification: OK eventTxId 102 2024-11-21T08:54:51.001405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:51.001420Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 19us result status StatusSuccess 2024-11-21T08:54:51.001482Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T08:54:51.001837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:51.001850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.001859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T08:54:51.001874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:51.001877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.002262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.002284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2024-11-21T08:54:51.002306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.002309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.002314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2024-11-21T08:54:51.002328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:51.002619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T08:54:51.002635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2024-11-21T08:54:51.002674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.002688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.002693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2024-11-21T08:54:51.002722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T08:54:51.002725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:54:51.002731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:54:51.002735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T08:54:51.002741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:54:51.002743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:54:51.002746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T08:54:51.002748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T08:54:51.002752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:54:51.002755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2024-11-21T08:54:51.002757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T08:54:51.003109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:51.003119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:54:51.003140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.003143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T08:54:51.003223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:51.003229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:54:51.003232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:54:51.003235Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:54:51.003238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:54:51.003248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T08:54:51.003548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:54:51.003586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:54:51.003590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:54:51.003625Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:54:51.003634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:54:51.003637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:344:2336] TestWaitNotification: OK eventTxId 103 2024-11-21T08:54:51.003675Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:51.003690Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 19us result status StatusSuccess 2024-11-21T08:54:51.003733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryService::ExecStatsAst [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:51.055879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:51.055901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:51.055904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:51.055907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:51.055920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:51.055922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:51.055929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:51.055995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:51.064143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:51.064168Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:51.066924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:51.067463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:51.067489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:51.068908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:51.069084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:51.069162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.069217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:51.070278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.070618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:51.070628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.070667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:51.070674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:51.070680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:51.070696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.072013Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:51.089780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:51.089860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.089929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:51.089996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:51.090014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.090940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.090967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:51.091005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.091014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:51.091017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:51.091021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:51.091490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.091502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:51.091507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:51.091847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.091855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.091859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:51.091865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:51.092376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:51.092793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:51.092842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:51.093020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.093046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.093054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:51.093110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:51.093119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:51.093148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:51.093161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:51.093575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:51.093581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:51.093615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.093618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:51.093689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.093694Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:51.093704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:51.093720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:51.093724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:51.093727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:51.093731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:51.093734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:51.093741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:51.093745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:51.093748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:51.093970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:51.093980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:51.093983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:51.093987Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:51.093989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:51.093999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:54:51.099462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.099465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T08:54:51.099468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:54:51.099524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.099529Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:51.099534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T08:54:51.099552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:51.099613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:54:51.099620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:54:51.099622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:54:51.099625Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T08:54:51.099628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:51.099695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:54:51.099701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:54:51.099703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:54:51.099705Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:54:51.099707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:54:51.099712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T08:54:51.099960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:54:51.099982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2024-11-21T08:54:51.100241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.100257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.100262Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2024-11-21T08:54:51.100282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T08:54:51.100301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:51.100306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:54:51.100433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:54:51.100461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:54:51.100715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:51.100719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:51.100735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:54:51.100744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.100746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T08:54:51.100749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T08:54:51.100767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.100771Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:54:51.100778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:54:51.100780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:54:51.100784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T08:54:51.100787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:54:51.100790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:54:51.100792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:54:51.100798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:54:51.100802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T08:54:51.100804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T08:54:51.100806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T08:54:51.100899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:54:51.100906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:54:51.100908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:54:51.100911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:54:51.100913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:51.101101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:54:51.101107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:54:51.101109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:54:51.101111Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:54:51.101113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:54:51.101118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T08:54:51.101638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:54:51.101735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2024-11-21T08:54:51.102147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:51.102172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.102182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2024-11-21T08:54:51.102591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.102609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 |89.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 62597, MsgBus: 18079 2024-11-21T08:54:48.093301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653395137438576:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.093389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e5f/r3tmp/tmptaytJt/pdisk_1.dat 2024-11-21T08:54:48.150980Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62597, node 1 2024-11-21T08:54:48.167748Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:48.167767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:48.167768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:48.167805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18079 2024-11-21T08:54:48.193139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:48.193167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:48.194279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:48.211596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.221253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.282340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.299967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.309411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.373741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653395137439975:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.373783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.401327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.407676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.413617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.421141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.475931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.483934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.492890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653395137440485:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.492910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653395137440490:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.492912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.493450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:48.497372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653395137440492:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:48.664492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.664793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.664934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.940507Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179288987, txId: 281474976710689] shutting down Trying to start YDB, gRPC: 9018, MsgBus: 28623 2024-11-21T08:54:49.188279Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653401090107652:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:49.188535Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e5f/r3tmp/tmpH5iiSq/pdisk_1.dat 2024-11-21T08:54:49.198772Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9018, node 2 2024-11-21T08:54:49.207036Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:49.207048Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:49.207049Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:49.207079Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28623 TClient is connected to server localhost:28623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:49.288304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:49.288340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:49.289502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:49.290607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.294141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.303683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.319373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.332621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 w ... efault, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.439618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.445239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.457975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.463930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.471128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.478225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.487459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653401090109684:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.487480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653401090109689:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.487493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.488198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:49.491533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653401090109691:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:49.617647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.617990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.618210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.968158Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179290009, txId: 281474976715695] shutting down 2024-11-21T08:54:49.991844Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179290037, txId: 281474976715698] shutting down Trying to start YDB, gRPC: 25012, MsgBus: 18780 2024-11-21T08:54:50.282812Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653401855171630:2056];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e5f/r3tmp/tmpZbNmZD/pdisk_1.dat 2024-11-21T08:54:50.288434Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:50.291147Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25012, node 3 2024-11-21T08:54:50.300683Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:50.300695Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:50.300697Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:50.300741Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18780 TClient is connected to server localhost:18780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:50.382936Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:50.382961Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:50.384029Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:50.385260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.395958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.404551Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.423899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:54:50.434035Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.573309Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653401855173138:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.573332Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.578591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.586649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.598051Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.605008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.611580Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.618856Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.627517Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653401855173675:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.627546Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.627550Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653401855173680:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.628102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:50.631897Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653401855173682:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:50.779488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.779759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.780079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.078076Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179291122, txId: 281474976715690] shutting down >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecStatsAst [GOOD] Test command err: Trying to start YDB, gRPC: 26967, MsgBus: 24517 2024-11-21T08:54:49.089174Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653397835847101:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:49.089418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e1c/r3tmp/tmp4ct7dU/pdisk_1.dat 2024-11-21T08:54:49.135025Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26967, node 1 2024-11-21T08:54:49.152773Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:49.152786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:49.152788Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:49.152824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24517 TClient is connected to server localhost:24517 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:54:49.188863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:49.188884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:54:49.190018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:49.197666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.208766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.268006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.282724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.291017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.389699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653397835848631:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.389722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.421957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.427430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.436078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.442859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.497885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.506369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.514747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653397835849146:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.514769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653397835849151:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.514771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.515296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:49.519200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653397835849153:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 17037, MsgBus: 24470 2024-11-21T08:54:49.967347Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653398309714093:2194];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e1c/r3tmp/tmpOwQsTS/pdisk_1.dat 2024-11-21T08:54:49.970636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:49.978750Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17037, node 2 2024-11-21T08:54:49.987146Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:49.987163Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:49.987165Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:49.987211Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24470 TClient is connected to server localhost:24470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:50.067084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:50.067114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:50.068240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:50.069991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.075636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.084575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.102890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.113026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.230827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653402604682771:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.230851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.236607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.242540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.296956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.304334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.359452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.367345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.375710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653402604683288:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.375732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.375739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653402604683293:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.376294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:50.380168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653402604683295:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 9449, MsgBus: 25701 2024-11-21T08:54:50.846779Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653402300079261:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:50.846838Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e1c/r3tmp/tmpWSwiBH/pdisk_1.dat 2024-11-21T08:54:50.858552Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9449, node 3 2024-11-21T08:54:50.866188Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:50.866198Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:50.866199Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:50.866226Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25701 TClient is connected to server localhost:25701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:50.947196Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:50.947224Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:50.948322Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:50.949503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.961815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.970743Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:51.030868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:51.039774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:51.110355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653406595048093:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.110375Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.114145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.119807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.129891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.137057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.144114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.150934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.159262Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653406595048587:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.159288Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.159323Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653406595048592:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.159834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:51.164628Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653406595048594:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:51.327076Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439653406595048893:2461], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:1:8: Error: At function: Member
:1:8: Error: Member not found: test_ast_column 2024-11-21T08:54:51.327151Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWIxMDQyNGEtNGUyYmVlYjItZmI2ZGJmMzUtZDljM2VmYTE=, ActorId: [3:7439653406595048891:2460], ActorState: ExecuteState, TraceId: 01jd6ywv5v5mxrtf9d2av497zs, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:54:51.334994Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439653406595048911:2465], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jd6ywv5z1rjn8q5rmv5xy6gs. SessionId : ydb://session/3?node_id=3&id=MmU1MjNlYmUtMTllMzVhYS0xMTNhNTdiYS01YTA5ZmViNQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(43):
:1:8: Failed to unwrap empty optional }. 2024-11-21T08:54:51.335504Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmU1MjNlYmUtMTllMzVhYS0xMTNhNTdiYS01YTA5ZmViNQ==, ActorId: [3:7439653406595048904:2465], ActorState: ExecuteState, TraceId: 01jd6ywv5z1rjn8q5rmv5xy6gs, Create QueryResponse for error on request, msg: |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> KqpService::RangeCache-UseCache [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:51.815183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:51.815205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:51.815208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:51.815211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:51.815223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:51.815225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:51.815232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:51.815290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:51.822736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:51.822757Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:51.824980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:51.825519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:51.825547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:51.826611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:51.826749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:51.826815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.826861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:51.827648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.827856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:51.827862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.827886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:51.827890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:51.827894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:51.827903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.828837Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:51.838366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:51.838422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.838467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:51.838499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:51.838510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.839187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.839209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:51.839242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.839248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:51.839251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:51.839254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:51.839554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.839562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:51.839565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:51.839784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.839790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.839793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:51.839797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:51.840172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:51.840463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:51.840502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:51.840640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.840660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.840666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:51.840703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:51.840707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:51.840729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:51.840739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:51.841031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:51.841037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:51.841064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.841067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:51.841136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.841141Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:51.841148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:51.841151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:51.841154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:51.841157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:51.841160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:51.841162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:51.841169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:51.841173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:51.841175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:51.841348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:51.841357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:51.841359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:51.841362Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:51.841365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:51.841372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T08:54:51.855511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2024-11-21T08:54:51.855526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2024-11-21T08:54:51.855658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:51.855669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:51.855672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T08:54:51.855684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:54:51.855688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T08:54:51.855690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:54:51.855695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:54:51.855699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:54:51.855702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T08:54:51.855705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:54:51.855708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T08:54:51.855710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T08:54:51.855714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:54:51.855716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2024-11-21T08:54:51.855718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T08:54:51.855720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:54:51.855767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:51.855791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:51.855954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:51.855959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:51.855971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:54:51.855984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:51.855986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 1 2024-11-21T08:54:51.855988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2024-11-21T08:54:51.856036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:51.856043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:51.856045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:54:51.856048Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:54:51.856050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:54:51.856080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:51.856084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:54:51.856086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:54:51.856088Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:54:51.856090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:54:51.856095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2024-11-21T08:54:51.856126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:54:51.856128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:54:51.856133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:54:51.856400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:51.856528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:54:51.856536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:54:51.856563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:54:51.856567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:54:51.856609Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:54:51.856619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:54:51.856622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:396:2388] TestWaitNotification: OK eventTxId 105 2024-11-21T08:54:51.856662Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:51.856677Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 19us result status StatusPathDoesNotExist 2024-11-21T08:54:51.856697Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:54:51.856725Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:51.856733Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 9us result status StatusSuccess 2024-11-21T08:54:51.856765Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 62296, MsgBus: 29412 2024-11-21T08:54:49.014831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653399650307048:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:49.015045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e3f/r3tmp/tmpksAG6j/pdisk_1.dat 2024-11-21T08:54:49.053553Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62296, node 1 2024-11-21T08:54:49.067315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:49.067329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:49.067331Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:49.067391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29412 TClient is connected to server localhost:29412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:49.115612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:49.115648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:49.116266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:49.136560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:54:49.142715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.205190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.220419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.228971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.305292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653399650308580:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.305329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.335140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.390400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.402074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.415369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.469852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.478182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.487617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653399650309097:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.487651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653399650309102:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.487650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.488243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:49.491204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653399650309104:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } took: 0.717979s took: 0.717977s took: 0.717965s took: 0.718029s took: 0.718241s took: 0.718250s took: 0.718249s took: 0.718265s took: 0.718247s took: 0.718278s Trying to start YDB, gRPC: 21487, MsgBus: 4472 2024-11-21T08:54:50.515256Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653402750672353:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:50.515467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e3f/r3tmp/tmpOaw5y2/pdisk_1.dat 2024-11-21T08:54:50.525480Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21487, node 2 2024-11-21T08:54:50.542192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:50.542207Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:50.542209Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:50.542246Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4472 TClient is connected to server localhost:4472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:50.615723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:50.615749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:50.616805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:50.618147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.621950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.634192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.649560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.662487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.804708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653402750673888:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.804730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.810905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.865671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.878027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.884969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.892264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.898933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.907829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653402750674403:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.907856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653402750674408:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.907856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.908391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:50.912335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653402750674410:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } took: 0.696561s took: 0.697396s took: 0.697651s took: 0.697684s took: 0.697681s took: 0.697749s took: 0.697802s took: 0.697801s took: 0.697911s took: 0.697926s >> TSchemeShardUserAttrsTest::MkDir >> TSchemeShardUserAttrsTest::Boot |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 30597, MsgBus: 22687 2024-11-21T08:54:48.581085Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653396367334049:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.581129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e58/r3tmp/tmpdy9iRr/pdisk_1.dat 2024-11-21T08:54:48.622378Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30597, node 1 2024-11-21T08:54:48.641269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:48.641281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:48.641282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:48.641312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22687 TClient is connected to server localhost:22687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:48.681522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:48.681549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:48.682665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:48.705949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.716384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.776629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.791481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.800992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.865217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653396367335592:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.865244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.899709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.905403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.960228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.967267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.973710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.980818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:48.989665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653396367336097:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.989691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.989692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653396367336102:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:48.990214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:48.994018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653396367336104:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 1148, MsgBus: 16272 2024-11-21T08:54:49.424480Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653398102701361:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:49.424645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e58/r3tmp/tmpxHD2VS/pdisk_1.dat 2024-11-21T08:54:49.437397Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1148, node 2 2024-11-21T08:54:49.442522Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:49.442534Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:49.442536Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:49.442561Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16272 TClient is connected to server localhost:16272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:49.524878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:49.524908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:49.525977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:49.527166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.536385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.543281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.556649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.569610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.689388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653398102702894:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.689408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.695560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.701637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.709913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.723994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.730866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.744798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.760073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653398102703408:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.760105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.760197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653398102703413:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.760908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:49.764372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653398102703415:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:49.949874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.950311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.950519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15243, MsgBus: 22554 2024-11-21T08:54:51.139395Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653407184482079:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:51.139720Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e58/r3tmp/tmpqjftAd/pdisk_1.dat 2024-11-21T08:54:51.148154Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15243, node 3 2024-11-21T08:54:51.157371Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:51.157384Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:51.157385Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:51.157416Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22554 TClient is connected to server localhost:22554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:51.240032Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:51.240063Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:51.241092Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:51.242262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:51.242963Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:51.252574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:51.259442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:51.275249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:51.285113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:51.415750Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653407184483614:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.415779Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.420793Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.476352Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.487337Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.494314Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.501708Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.515550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.531167Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653407184484132:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.531197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.531214Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653407184484137:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:51.531824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:51.535089Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653407184484139:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:51.726742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.727230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.727449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> TSchemeShardUserAttrsTest::Boot [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:52.475479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:52.475494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:52.475498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:52.475500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:52.475508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:52.475510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:52.475516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:52.475564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:52.482976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:52.483001Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:52.485253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:52.486193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:52.486218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:52.487491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:52.487718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:52.487812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:52.487877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:52.488636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:52.488799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:52.488805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:52.488834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:52.488839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:52.488843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:52.488851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.489638Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:52.502601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:52.502698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.502783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:52.502848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:52.502863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.503599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:52.503641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:52.503677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.503686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:52.503690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:52.503694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:52.504067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.504078Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:52.504083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:52.504412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.504423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.504427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:52.504433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:52.504925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:52.505246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:52.505283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:52.505414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:52.505498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:52.505503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:52.505525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:52.505535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:52.505889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:52.505921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:52.505988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:52.506006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:52.506010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:52.506014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:52.506019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:52.506023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:52.506027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:52.506038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:52.506042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:52.506046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:52.506303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:52.506316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:52.506320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:52.506324Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:52.506328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:52.506339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ionSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T08:54:52.520808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:54:52.520810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:54:52.520867Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T08:54:52.520887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T08:54:52.520890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:371:2363] 2024-11-21T08:54:52.520911Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:54:52.520924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:54:52.520928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:54:52.520930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:371:2363] 2024-11-21T08:54:52.520939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:54:52.520942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:54:52.520944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:371:2363] 2024-11-21T08:54:52.520952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:54:52.520954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:371:2363] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T08:54:52.521001Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521018Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2024-11-21T08:54:52.521098Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521138Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521148Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 11us result status StatusSuccess 2024-11-21T08:54:52.521172Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521228Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 8us result status StatusSuccess 2024-11-21T08:54:52.521246Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521270Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521276Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 7us result status StatusSuccess 2024-11-21T08:54:52.521302Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521328Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:54:52.521335Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 8us result status StatusSuccess 2024-11-21T08:54:52.521352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:54:52.474524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:54:52.474545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:52.474549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:54:52.474552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:54:52.474564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:54:52.474567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:54:52.474574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:54:52.474644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:52.482076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:54:52.482098Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:52.484556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:52.485509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:54:52.485554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:54:52.486859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:54:52.487032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:54:52.487121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:52.487202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:54:52.488013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:52.488330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:52.488340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:52.488395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:54:52.488402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:52.488408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:54:52.488419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.489629Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:54:52.502709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:54:52.502757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.502804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:54:52.502837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:54:52.502849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.503560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:52.503595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:54:52.503635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.503643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:54:52.503645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:54:52.503649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:54:52.503957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.503973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:54:52.503976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:54:52.504220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.504227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.504233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:52.504240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:54:52.504619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:54:52.504904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:54:52.504947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:54:52.505084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:52.505148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:54:52.505152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:54:52.505187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:52.505197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:54:52.505495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:54:52.505533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:54:52.505603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:54:52.505607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:54:52.505617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:54:52.505620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:52.505625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:54:52.505629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:54:52.505632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:54:52.505634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:54:52.505642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:54:52.505646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:54:52.505649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:54:52.505846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:52.505855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:54:52.505858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:54:52.505861Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:54:52.505864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:54:52.505872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T08:54:52.506284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T08:54:52.506340Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |89.2%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TProxyActorTest::TestCreateSemaphoreInterrupted >> TProxyActorTest::TestAttachSession >> TProxyActorTest::TestCreateSemaphore >> KqpService::ToDictCache+UseCache >> KqpQueryService::ExecuteQueryExplicitTxTLI >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> TProxyActorTest::TestAttachSession [GOOD] >> TProxyActorTest::TestCreateSemaphore [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> KqpService::ToDictCache+UseCache [GOOD] >> KqpQueryService::TableSink_OltpDelete [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpService::ToDictCache-UseCache >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] >> KqpQueryService::MaterializeTxResults >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |89.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] >> KqpQueryService::MaterializeTxResults [GOOD] >> DataStreams::TestReservedConsumersMetering [GOOD] >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> KqpQueryServiceScripts::ExecuteScriptWithTimeout >> KqpQueryService::MixedReadQueryWithoutStreamLookup >> KqpService::SwitchCache+UseCache >> KqpService::ToDictCache-UseCache [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> KqpQueryService::TableSink_Htap+withOltpSink >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> KqpQueryService::Ddl >> KqpQueryService::Tcl >> KqpService::Shutdown >> TPersQueueTest::TestBigMessage [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> KqpQueryService::DdlGroup >> KqpQueryService::ReplaceIntoWithDefaultValue >> KqpQueryServiceScripts::ExecuteScriptWithTimeout [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] >> KqpQueryService::TableSink_Htap+withOltpSink [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid >> KqpQueryService::TableSink_HtapInteractive+withOltpSink >> KqpService::SwitchCache+UseCache [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> TPersQueueTest::SetMeteringMode >> KqpQueryService::Tcl [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> KqpQueryService::TableSink_BadTransactions >> KqpService::SwitchCache-UseCache >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> KqpQueryService::Ddl [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> KqpQueryService::DdlGroup [GOOD] >> KqpQueryService::DdlPermission >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> KqpQueryService::CreateTempTable >> KqpQueryService::ReadManyRanges >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> KqpQueryService::TableSink_HtapInteractive-withOltpSink >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> KqpQueryService::DdlPermission [GOOD] >> KqpQueryService::DdlMixedDml >> KqpQueryService::ReadManyRanges [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> KqpQueryService::TableSink_HtapInteractive-withOltpSink [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] >> KqpQueryService::TableSink_OlapDelete >> KqpQueryService::AlterCdcTopic >> KqpQueryService::ReadManyShardsRange |89.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2024-11-21T08:54:36.234202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653342491716433:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:36.234378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043c3/r3tmp/tmpWzCMY9/pdisk_1.dat 2024-11-21T08:54:36.290631Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18980, node 1 2024-11-21T08:54:36.299523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:36.299534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:36.299536Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:36.299569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:36.328640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.329686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:36.329707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.330503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:36.330568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:36.330576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:54:36.330972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:36.330982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:54:36.331250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:54:36.331373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.332314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179276380, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:36.332327Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:54:36.332402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:54:36.332794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:36.332847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:36.332859Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:54:36.332877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:54:36.332885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:54:36.332897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:54:36.333415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:54:36.333445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:54:36.333450Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:36.333467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:54:36.334268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:36.334292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:36.335778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:36.350180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.350267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:36.350277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.350296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T08:54:36.350329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:54:36.350333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T08:54:36.351074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:36.351121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:36.351180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:36.351298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:54:36.351313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:54:36.351316Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:36.351333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:31825 2024-11-21T08:54:36.363313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.363388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:36.363398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.364050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:36.364087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:36.364489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T08:54:36.364692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179276408, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:36.364703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732179276408, at schemeshard: 72057594046644480 2024-11-21T08:54:36.364755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T08:54:36.364779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:54:36.364785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T08:54:36.365134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:36.365173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:36.365326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T08:54:36.365339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T08:54:36.365342Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:36.365352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T08:54:36.373845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestReservedResourcesMetering, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:36.373961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:36.374736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: user@builtin, ... older_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179290210-170","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1732179290,"finish":1732179290},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179290}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179290210-171","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1732179290,"finish":1732179290},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179290}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1732179290210-172","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1732179290,"finish":1732179290},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1732179290}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1732179290224-173","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1732179290,"finish":1732179291},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179291}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179290224-174","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1732179290,"finish":1732179291},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179291}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179290224-175","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1732179290,"finish":1732179291},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179291}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1732179290224-176","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1732179290,"finish":1732179291},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1732179291}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1732179291238-177","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1732179291,"finish":1732179292},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179292}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179291238-178","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1732179291,"finish":1732179292},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179292}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179291238-179","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1732179291,"finish":1732179292},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179292}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1732179291238-180","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1732179291,"finish":1732179292},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1732179292}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1732179292242-181","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1732179292,"finish":1732179293},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179293}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179292242-182","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1732179292,"finish":1732179293},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179293}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179292242-183","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1732179292,"finish":1732179293},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179293}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1732179292242-184","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1732179292,"finish":1732179293},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1732179293}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1732179293245-185","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1732179293,"finish":1732179294},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179294}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179293245-186","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1732179293,"finish":1732179294},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179294}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179293245-187","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1732179293,"finish":1732179294},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179294}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1732179293245-188","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1732179293,"finish":1732179294},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1732179294}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1732179294248-189","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1732179294,"finish":1732179295},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179295}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179294248-190","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1732179294,"finish":1732179295},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179295}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1732179294248-191","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1732179294,"finish":1732179295},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1732179295}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1732179294248-192","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1732179294,"finish":1732179295},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1732179295}' >> KqpQueryService::TableSink_BadTransactions [GOOD] >> KqpQueryService::TableSink_DisableSink >> KqpQueryService::ReadManyShardsRange [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] Test command err: Trying to start YDB, gRPC: 17595, MsgBus: 6646 2024-11-21T08:54:53.990487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653415990673855:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:53.990668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dfc/r3tmp/tmpHXmygk/pdisk_1.dat 2024-11-21T08:54:54.035015Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17595, node 1 2024-11-21T08:54:54.044738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:54.044748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:54.044749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:54.044776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6646 TClient is connected to server localhost:6646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:54.090462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:54.090489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:54.091577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:54.111948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.117983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.177676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.190876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.203849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.225055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420285642690:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.225085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.249001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.254510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.309004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.315026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.369382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.378200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.386028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420285643210:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.386056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420285643215:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.386056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.386535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:54.391047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653420285643217:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:54.559823Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmEwYzg2NjAtMzk2ZTM5ZjItY2U4MzlmZDEtNDZiYmMzYzE=, ActorId: [1:7439653420285643482:2454], ActorState: ExecuteState, TraceId: 01jd6ywyaxawpedrh7mktcnhqj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 27311, MsgBus: 11578 2024-11-21T08:54:54.649406Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653420456219523:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:54.649599Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dfc/r3tmp/tmpdho8RH/pdisk_1.dat 2024-11-21T08:54:54.656225Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27311, node 2 2024-11-21T08:54:54.663964Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:54.663973Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:54.663975Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:54.664003Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11578 TClient is connected to server localhost:11578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:54.749999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:54.750024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:54.751010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:54.751716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.752381Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:54.757058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.763221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.778661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.791685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.875753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653420456221052:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.875775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.878965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.884316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.895868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.902561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.909630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.916551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:54.924756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653420456221544:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.924772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.924809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653420456221549:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.925244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:54.930217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653420456221551:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 27563, MsgBus: 15234 2024-11-21T08:54:55.332837Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653423475192663:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:55.333136Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dfc/r3tmp/tmpEBLYjr/pdisk_1.dat 2024-11-21T08:54:55.343630Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27563, node 3 2024-11-21T08:54:55.349281Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:55.349294Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:55.349295Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:55.349324Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15234 TClient is connected to server localhost:15234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:55.433029Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:55.433066Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:55.434167Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:55.435353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.444782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.452132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.467044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.477929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.579165Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653423475194194:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.579197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.584147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:55.590211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:55.645556Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:55.651771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:55.659209Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:55.666037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:55.674631Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653423475194710:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.674653Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.674660Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653423475194715:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.675319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:55.679689Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653423475194717:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> KqpQueryService::CreateTempTable [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> KqpQueryService::CreateOrDropTopicOverTable >> TColumnShardTestSchema::HotTiersWithStat [GOOD] >> KqpQueryService::DdlMixedDml [GOOD] >> KqpQueryService::AlterCdcTopic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpDelete [GOOD] Test command err: Trying to start YDB, gRPC: 24800, MsgBus: 29776 2024-11-21T08:54:43.629664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653374895172774:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.630024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f77/r3tmp/tmpEFiPoF/pdisk_1.dat 2024-11-21T08:54:43.679571Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24800, node 1 2024-11-21T08:54:43.692725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:43.692739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:43.692741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:43.692786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29776 TClient is connected to server localhost:29776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:43.729654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.729697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.730834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:43.757962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:43.882050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374895173371:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.882078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.906414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.919581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:43.919627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:43.919641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:43.919655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:43.919673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:43.919676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:43.919690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:43.919708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:43.919712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:43.919724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:43.919733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:43.919742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:43.919752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:43.919760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:43.919796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:43.919806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:43.919812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:43.919826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653374895173501:2305];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:43.919827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:43.919856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:43.919875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:43.919894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:43.919914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:43.919933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439653374895173499:2303];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:43.920231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:54:43.920242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:54:43.920251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:54:43.920254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:54:43.920270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:54:43.920276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:54:43.920282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:54:43.920286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:54:43.920295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720 ... pl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:44.613243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:44.613262Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:44.615788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653379398655840:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.615793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653379398655845:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.615804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.616317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:54:44.619076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653379398655847:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:54:44.696332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 0 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"0;"}}]}; 2024-11-21T08:54:44.698334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"1;"}}]}; 2024-11-21T08:54:44.703980Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:44.704978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:44.749577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 0 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"0;"}}]}; 2024-11-21T08:54:44.752072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"1;"}}]}; 2024-11-21T08:54:44.757166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:44.758201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:44.761983Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653379398656254:2510], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2024-11-21T08:54:44.762043Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODVmZjFkNGEtNGFhY2FiYTgtY2JlM2Y4NGEtOWM1YjE3NTM=, ActorId: [2:7439653379398656252:2509], ActorState: ExecuteState, TraceId: 01jd6ywmrqfce0jajynms6cckt, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 2024-11-21T08:54:45.608679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7439653379398655505:2308];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:54:45.608873Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7439653379398655504:2307];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:54:45.611786Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=419094a6-a7e611ef-87d1b68d-1b24b237;fline=with_appended.cpp:80;portions=3,;task_id=419094a6-a7e611ef-87d1b68d-1b24b237; 2024-11-21T08:54:45.612311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=41909f3c-a7e611ef-8eaa35cc-67a4b46d;fline=with_appended.cpp:80;portions=3,;task_id=41909f3c-a7e611ef-8eaa35cc-67a4b46d; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:54:49.306769Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653379398654813:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:49.306803Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2116, MsgBus: 14968 2024-11-21T08:54:50.042065Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653404773218872:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:50.042108Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f77/r3tmp/tmpnZTVPn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2116, node 3 2024-11-21T08:54:50.055662Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:50.062144Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:50.062165Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:50.062167Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:50.062201Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14968 TClient is connected to server localhost:14968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:50.142563Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:50.142598Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:50.143627Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:50.144901Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.147385Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:50.326157Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653404773219474:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.326185Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.328533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:50.385791Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653404773219575:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.385826Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.385874Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653404773219580:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.386517Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:54:50.394170Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653404773219582:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:54:55.042195Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439653404773218872:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:55.042226Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2078, MsgBus: 30776 2024-11-21T08:54:44.141856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653379695546255:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:44.142024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f45/r3tmp/tmp4F5FQC/pdisk_1.dat 2024-11-21T08:54:44.183634Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2078, node 1 2024-11-21T08:54:44.195656Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:44.195668Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:44.195669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:44.195693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30776 TClient is connected to server localhost:30776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:44.241790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:44.241812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:44.242942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:44.262102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.270916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.330558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.344053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.398817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:44.419499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653379695547788:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.419519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.446893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.453357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.466490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.473313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.480124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.487084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.495798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653379695548281:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.495822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653379695548286:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.495828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:44.496410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:44.500021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653379695548288:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:49.142788Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653379695546255:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:49.142822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27427, MsgBus: 23211 2024-11-21T08:54:55.841599Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653422835389724:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:55.841862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002f45/r3tmp/tmpm8EB5c/pdisk_1.dat 2024-11-21T08:54:55.852033Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27427, node 2 2024-11-21T08:54:55.856332Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:55.856341Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:55.856342Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:55.856368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23211 TClient is connected to server localhost:23211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:55.942054Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:55.942089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:55.943130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:55.943810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.948875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.956575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.969971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.981784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:56.070292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653427130358546:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:56.070313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Ro ... 72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:54:56.898519Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:54:56.898523Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:54:56.898537Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:54:56.898541Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:54:56.898551Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:54:56.898554Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:54:56.898569Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:54:56.898573Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:54:56.898582Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:54:56.898584Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:54:56.898604Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:54:56.898607Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:54:56.898613Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:54:56.898616Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:54:56.898625Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:54:56.898627Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:54:56.898632Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:54:56.898634Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:54:56.898639Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:54:56.898647Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:54:56.898650Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:54:56.898652Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:54:56.898672Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:54:56.898675Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:54:56.898683Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:54:56.898686Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:54:56.898692Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:54:56.898692Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:54:56.898695Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:54:56.898697Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:54:56.898703Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:54:56.898705Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:54:56.898706Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:54:56.898709Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:54:56.898712Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:54:56.898714Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:54:56.898726Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:54:56.898729Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:54:56.898737Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:54:56.898740Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:54:56.898747Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:54:56.898750Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:54:56.898756Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:54:56.898759Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:54:56.898782Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:54:56.898786Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:54:56.898800Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:54:56.898804Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:54:56.898815Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:54:56.898819Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:54:56.898833Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:54:56.898836Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:54:56.898846Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:54:56.898849Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::ToDictCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 21131, MsgBus: 6970 2024-11-21T08:54:53.952681Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653416572072568:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:53.953036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e03/r3tmp/tmpNtbtB2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21131, node 1 2024-11-21T08:54:54.001538Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:54.008941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:54.008951Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:54.008952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:54.008971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6970 TClient is connected to server localhost:6970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:54.052617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:54.052642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:54.053696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:54.083379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:54.205844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420867040469:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.205866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420867040489:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.205870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420867040490:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.205886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.205907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420867040488:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.206078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420867040506:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.206150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653420867040501:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.206163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:54.206648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:54:54.207994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653420867040519:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:54:54.207998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653420867040497:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:54:54.208003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653420867040498:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:54:54.208021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653420867040496:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } took: 0.271320s took: 0.271345s took: 0.271367s took: 0.277392s took: 0.067288s took: 0.067297s took: 0.067299s took: 0.067349s took: 0.065275s took: 0.065434s took: 0.065459s took: 0.065481s took: 0.064923s took: 0.064971s took: 0.064978s took: 0.064981s took: 0.070240s took: 0.070247s took: 0.070229s took: 0.070291s took: 0.070947s took: 0.070971s took: 0.070968s took: 0.071014s took: 0.072773s took: 0.072801s took: 0.072895s took: 0.072897s took: 0.070060s took: 0.070086s took: 0.070099s took: 0.070120s took: 0.067765s took: 0.067800s took: 0.067801s took: 0.067841s took: 0.064287s took: 0.064292s took: 0.064501s took: 0.064513s took: 0.067313s took: 0.067338s took: 0.067341s took: 0.067344s took: 0.072853s took: 0.072897s took: 0.072903s took: 0.072904s took: 0.073201s took: 0.073319s took: 0.073324s took: 0.073501s took: 0.079925s took: 0.079935s took: 0.079921s took: 0.079920s took: 0.080769s took: 0.080764s took: 0.080753s took: 0.080776s took: 0.082810s took: 0.082805s took: 0.082808s took: 0.082807s Trying to start YDB, gRPC: 17731, MsgBus: 3359 2024-11-21T08:54:55.613591Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653425569530981:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:55.613732Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e03/r3tmp/tmpNlqB97/pdisk_1.dat 2024-11-21T08:54:55.627081Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17731, node 2 2024-11-21T08:54:55.637724Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:55.637737Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:55.637738Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:55.637777Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3359 TClient is connected to server localhost:3359 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:55.714113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:55.714157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:55.715178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:55.715780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:55.834971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653425569531610:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.834989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653425569531611:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.834995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653425569531590:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.834999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653425569531612:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.835017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.835080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653425569531613:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:55.835587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:54:55.837067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653425569531619:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:54:55.837083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653425569531620:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:54:55.837087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653425569531618:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:54:55.837093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653425569531624:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } took: 0.235882s took: 0.235882s took: 0.253105s took: 0.266161s took: 0.065072s took: 0.065344s took: 0.065383s took: 0.065399s took: 0.063869s took: 0.063874s took: 0.063918s took: 0.063909s took: 0.066286s took: 0.066273s took: 0.066280s took: 0.066273s took: 0.072762s took: 0.072927s took: 0.072967s took: 0.073005s took: 0.071039s took: 0.071061s took: 0.071047s took: 0.071092s took: 0.075846s took: 0.075921s took: 0.075950s took: 0.075968s took: 0.078875s took: 0.079004s took: 0.079005s took: 0.079047s took: 0.073969s took: 0.074091s took: 0.074110s took: 0.074313s took: 0.075635s took: 0.075751s took: 0.075773s took: 0.075804s took: 0.077262s took: 0.077417s took: 0.077421s took: 0.077726s took: 0.077937s took: 0.077952s took: 0.078278s took: 0.078322s took: 0.079448s took: 0.079694s took: 0.079715s took: 0.079757s took: 0.085501s took: 0.085531s took: 0.085569s took: 0.085589s took: 0.088890s took: 0.089167s took: 0.089202s took: 0.089369s took: 0.084093s took: 0.084108s took: 0.084122s took: 0.084153s ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyShardsRange [GOOD] Test command err: Trying to start YDB, gRPC: 28496, MsgBus: 5528 2024-11-21T08:54:58.146488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653439739523895:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:58.146509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d56/r3tmp/tmpOHiQa2/pdisk_1.dat 2024-11-21T08:54:58.199807Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28496, node 1 2024-11-21T08:54:58.210817Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:58.210827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:58.210828Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:58.210854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5528 TClient is connected to server localhost:5528 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:54:58.246567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.246588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:54:58.247701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.255827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.400819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653439739524492:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.400834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653439739524503:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.400838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.401280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T08:54:58.402475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653439739524506:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T08:54:58.528385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26741, MsgBus: 16460 2024-11-21T08:54:58.794530Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653437691057977:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:58.794692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d56/r3tmp/tmpgWKNWQ/pdisk_1.dat 2024-11-21T08:54:58.801116Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26741, node 2 2024-11-21T08:54:58.814749Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:58.814760Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:58.814762Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:58.814788Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16460 TClient is connected to server localhost:16460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.894817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.894837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:58.895886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:58.896062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.046181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653441986025866:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.046202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.048912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.064255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653441986026390:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.064277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653441986026395:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.064278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.064818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:54:59.066903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653441986026397:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 6569, MsgBus: 26768 2024-11-21T08:54:59.487493Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653442004245963:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:59.487519Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d56/r3tmp/tmpkZpYE9/pdisk_1.dat 2024-11-21T08:54:59.498145Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6569, node 3 2024-11-21T08:54:59.513590Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:59.513600Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:59.513602Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:59.513628Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26768 TClient is connected to server localhost:26768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:59.587581Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:59.587605Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:59.588711Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:59.589310Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.729118Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653442004246560:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.729140Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.731528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.740563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653442004246765:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.740586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.740585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653442004246770:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.741115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:54:59.746017Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653442004246772:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlMixedDml [GOOD] Test command err: Trying to start YDB, gRPC: 11173, MsgBus: 25613 2024-11-21T08:54:58.138925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653437623537786:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:58.139226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d4f/r3tmp/tmpFIVrFM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11173, node 1 2024-11-21T08:54:58.192514Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:58.193995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:58.194005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:58.194007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:58.194028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25613 TClient is connected to server localhost:25613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.238965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.239006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:58.240093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:58.269058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.280743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.293713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.306505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.314509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.403549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437623539326:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.403568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.425663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.430050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.437708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.444235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.451571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.506122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.516808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437623539843:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.516833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437623539848:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.516836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.517362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:58.521465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653437623539850:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:58.678693Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmIxMGMxN2MtNDdhNDMyN2MtNDdjYTg0NTEtYjBmNWMwYjM=, ActorId: [1:7439653437623540151:2460], ActorState: ExecuteState, TraceId: 01jd6yx2bk9f8zqzbjw7nndmwc, Create QueryResponse for error on request, msg: 2024-11-21T08:54:58.685249Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODlhYzAyMGQtMjQ4MTc3MWUtZDE0OWVmNDktMTdhMDk5Nzg=, ActorId: [1:7439653437623540197:2469], ActorState: ExecuteState, TraceId: 01jd6yx2bv96zrfh7kv8y89kx6, Create QueryResponse for error on request, msg: 2024-11-21T08:54:58.695051Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjQzNGQ0OTAtNjhkNGQzMDUtNGUwYzAzY2YtZjAxNGM5OWE=, ActorId: [1:7439653437623540255:2481], ActorState: ExecuteState, TraceId: 01jd6yx2c563bmmm6cvmbxwvgn, Create QueryResponse for error on request, msg: 2024-11-21T08:54:58.710678Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjAxZDY1N2YtMzljOTAwZGUtMmI3NjhlZTMtODgzMmIwNDU=, ActorId: [1:7439653437623540327:2496], ActorState: ExecuteState, TraceId: 01jd6yx2cm34mgggq02vztkw6t, Create QueryResponse for error on request, msg: 2024-11-21T08:54:58.721428Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTM3NGMxZTctMjg0MTI5NTMtYTRjZTBiMzctODRlMDRjNw==, ActorId: [1:7439653437623540385:2508], ActorState: ExecuteState, TraceId: 01jd6yx2cz55z97gse1pm0s53w, Create QueryResponse for error on request, msg: 2024-11-21T08:54:58.741811Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWY0NTJiZWYtZGQ3YjQ2ZmUtNWE2MjkzNmYtYzRmYmEyNDk=, ActorId: [1:7439653437623540530:2532], ActorState: ExecuteState, TraceId: 01jd6yx2dk2esryqj5tsmtna51, Create QueryResponse for error on request, msg: 2024-11-21T08:54:58.749790Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA1ZGNiZGMtY2JiYjYzNzEtYzdkNTMzOTctNWY3OGIyMg==, ActorId: [1:7439653437623540574:2541], ActorState: ExecuteState, TraceId: 01jd6yx2dv1kdpvcrgn5yfcw2g, Create QueryResponse for error on request, msg: 2024-11-21T08:54:58.751997Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2FiNDY5NTYtYWUzODNkZWUtYTY2M2YyNzgtNzRhODA2Nzk=, ActorId: [1:7439653437623540610:2545], ActorState: ExecuteState, TraceId: 01jd6yx2dy6ys312fzhfyhpdkj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29604, MsgBus: 29961 2024-11-21T08:54:59.003084Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653442354679516:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:59.003236Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d4f/r3tmp/tmp73ylgN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29604, node 2 2024-11-21T08:54:59.017290Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:59.018229Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:59.018237Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:59.018238Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:59.018256Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29961 TClient is connected to server localhost:29961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:59.103287Z node 2 :HIVE WARN: HIVE#720575 ... vhke, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:54:59.438290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.445886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.454170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.458983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.463273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.463640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.467161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.470734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.474554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.478762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.482698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.487026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.491936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.496311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.500895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.505157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.510482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.510845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10091, MsgBus: 16782 2024-11-21T08:54:59.695818Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653442317337593:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:59.695834Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d4f/r3tmp/tmp6Rc3q1/pdisk_1.dat 2024-11-21T08:54:59.706460Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10091, node 3 2024-11-21T08:54:59.713256Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:59.713269Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:59.713270Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:59.713307Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16782 TClient is connected to server localhost:16782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:59.796007Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:59.796042Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:59.797111Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:59.798713Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.800923Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.810018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.825553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.838507Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.924998Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653442317339126:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.925018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.928988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.934078Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.942665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.949710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.956569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.963735Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.972435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653442317339619:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.972458Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.972470Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653442317339624:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.972940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:59.976890Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653442317339626:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:00.146890Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439653446612307215:2458], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:8:25: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2024-11-21T08:55:00.146979Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjQ5M2Q3MjgtOGY1MTcyNTItY2U0ZGNjMjctMmM3MWE5ZDU=, ActorId: [3:7439653446612307208:2454], ActorState: ExecuteState, TraceId: 01jd6yx3sfc3ycw0t3fex3tfm0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpService::Shutdown [GOOD] >> KqpService::SessionBusyRetryOperationSync >> KqpQueryService::TableSink_DisableSink [GOOD] |89.3%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: Trying to start YDB, gRPC: 5048, MsgBus: 11139 2024-11-21T08:54:58.179916Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653436384545873:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:58.180096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d6d/r3tmp/tmpJhBcEN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5048, node 1 2024-11-21T08:54:58.230423Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:58.237494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:58.237504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:58.237505Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:58.237544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11139 TClient is connected to server localhost:11139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:58.280076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.280095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.281324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:58.281628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.413283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653436384546481:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.413303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653436384546470:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.413317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.413929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T08:54:58.415225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653436384546484:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T08:54:58.551024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2024-11-21T08:54:58.629630Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653436384546682:2324], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:65: Error: Failed to convert type: Struct<'id':Int32,'val':Null> to Struct<'id':Int32,'val':Int32>
:2:65: Error: Failed to convert 'val': Null to Int32
:2:65: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T08:54:58.629722Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODM5ZTg4ZWEtZTA0NTM5ODMtOTcxNmNhOTItYjUxMWUxMDc=, ActorId: [1:7439653436384546680:2323], ActorState: ExecuteState, TraceId: 01jd6yx2a20v6g9ypk7rvhps83, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:54:58.632902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16135, MsgBus: 20648 2024-11-21T08:54:58.825697Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653436940360396:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:58.825739Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d6d/r3tmp/tmp7drPCn/pdisk_1.dat 2024-11-21T08:54:58.834037Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16135, node 2 2024-11-21T08:54:58.850433Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:58.850446Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:58.850447Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:58.850478Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20648 TClient is connected to server localhost:20648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.925805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.925838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:58.926892Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:58.927542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.067816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653441235328271:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.067837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653441235328294:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.067843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.068400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:54:59.069728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653441235328300:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:54:59.147469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2024-11-21T08:54:59.169035Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653441235328495:2324], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T08:54:59.169126Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjRjOTA2NDQtYmU3NTdjNjAtN2E2ODlhMGMtMTY1MzgxY2E=, ActorId: [2:7439653441235328493:2323], ActorState: ExecuteState, TraceId: 01jd6yx2tyamnstcs56b0vysyr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:54:59.172942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.179369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22715, MsgBus: 14054 2024-11-21T08:54:59.509872Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653443478794013:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:59.509926Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d6d/r3tmp/tmpxMb7Jg/pdisk_1.dat 2024-11-21T08:54:59.520576Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22715, node 3 2024-11-21T08:54:59.534698Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:59.534710Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:59.534711Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:59.534745Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14054 TClient is connected to server localhost:14054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:59.610345Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:59.610378Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:59.611482Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:59.612137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.613179Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:59.621956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.630089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.645518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.656978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.763939Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653443478795548:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.763966Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.767731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.773025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.781736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.789327Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.796029Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.802912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.811069Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653443478796051:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.811087Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.811187Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653443478796056:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.811641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:59.816161Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653443478796058:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:59.987044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:00.107112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:55:00.115269Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGM1NmZhZDMtYWVhNTc0MjktYjIxMzEwZDUtZWUzNzIxNGM=, ActorId: [3:7439653447773763914:2489], ActorState: ExecuteState, TraceId: 01jd6yx3rhabt49zszjmgjpgs5, Create QueryResponse for error on request, msg: Query failed, status: BAD_REQUEST:
: Error: Cannot change partition count. Use split/merge instead, code: 2017 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132179819.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132179819.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132179819.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112179819.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132179819.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132179819.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112178619.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112179819.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112179819.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112178619.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112178619.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112178619.000000s;Name=;Codec=}; 2024-11-21T08:53:39.553747Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T08:53:39.571795Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:53:39.574466Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T08:53:39.574499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:53:39.574557Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T08:53:39.575187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:39.575222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:39.575246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:39.575260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:39.575276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:39.575292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:39.575303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:39.575315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:39.575326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:39.575342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:39.575351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:39.575362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:39.579153Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T08:53:39.579176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T08:53:39.580586Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T08:53:39.580657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:53:39.580665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:53:39.580692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:39.580760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:39.580770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:39.580788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:53:39.580796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:53:39.580803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:39.580808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:39.580811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:53:39.580822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:53:39.580827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:39.580832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:39.580834Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:53:39.580840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:53:39.580844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:53:39.580850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:53:39.580852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:53:39.580859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:53:39.580864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:53:39.580866Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:53:39.580873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:53:39.580879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:53:39.580881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T08:53:39.580909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2024-11-21T08:53:39.580916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T08:53:39.580922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T08:53:39.580929 ... AN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:55:00.208014Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] finished for tablet 9437184 2024-11-21T08:55:00.208025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] send ScanData to [1:945:2945] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:55:00.208080Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:946:2946] and sent to [1:945:2945] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":1732179300192947,"name":"_full_task","f":1732179300192947,"d_finished":0,"c":0,"l":1732179300208031,"d":15084},"events":[{"name":"bootstrap","f":1732179300192979,"d_finished":522,"c":1,"l":1732179300193501,"d":522},{"a":1732179300207987,"name":"ack","f":1732179300207008,"d_finished":225,"c":3,"l":1732179300207861,"d":269},{"a":1732179300207985,"name":"processing","f":1732179300193611,"d_finished":955,"c":24,"l":1732179300207861,"d":1001},{"name":"ProduceResults","f":1732179300193246,"d_finished":535,"c":29,"l":1732179300208012,"d":535},{"a":1732179300208012,"name":"Finish","f":1732179300208012,"d_finished":0,"c":0,"l":1732179300208031,"d":19},{"name":"task_result","f":1732179300193614,"d_finished":690,"c":21,"l":1732179300206965,"d":690}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:946:2946]->[1:945:2945] 2024-11-21T08:55:00.208094Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:55:00.192861Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T08:55:00.208098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:55:00.208116Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.012787s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.012828s;size=3.792e-06;details={columns=1;};};]};; 2024-11-21T08:55:00.208123Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T08:55:00.208662Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T08:55:00.208708Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T08:55:00.208740Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T08:55:00.208802Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T08:55:00.208813Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T08:55:00.208976Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:953:2953];trace_detailed=; 2024-11-21T08:55:00.209036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T08:55:00.209053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T08:55:00.209069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:55:00.209074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:55:00.209096Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:55:00.209103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:55:00.209107Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T08:55:00.209110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:953:2953] finished for tablet 9437184 2024-11-21T08:55:00.209116Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:953:2953] send ScanData to [1:952:2952] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:55:00.209144Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:953:2953] and sent to [1:952:2952] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732179300208963,"name":"_full_task","f":1732179300208963,"d_finished":0,"c":0,"l":1732179300209119,"d":156},"events":[{"name":"bootstrap","f":1732179300208994,"d_finished":83,"c":1,"l":1732179300209077,"d":83},{"a":1732179300209093,"name":"ack","f":1732179300209093,"d_finished":0,"c":0,"l":1732179300209119,"d":26},{"a":1732179300209092,"name":"processing","f":1732179300209092,"d_finished":0,"c":0,"l":1732179300209119,"d":27},{"name":"ProduceResults","f":1732179300209064,"d_finished":22,"c":2,"l":1732179300209109,"d":22},{"a":1732179300209109,"name":"Finish","f":1732179300209109,"d_finished":0,"c":0,"l":1732179300209119,"d":10}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:953:2953]->[1:952:2952] 2024-11-21T08:55:00.209155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:55:00.208820Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T08:55:00.209159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:55:00.209162Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T08:55:00.209166Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 |89.3%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::ExecuteQueryWithWorkloadManager >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_DisableSink [GOOD] Test command err: Trying to start YDB, gRPC: 9985, MsgBus: 1473 2024-11-21T08:54:57.855878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653432340230598:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:57.856019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dce/r3tmp/tmphYOysj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9985, node 1 2024-11-21T08:54:57.908734Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:57.911937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:57.911947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:57.911950Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:57.911974Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1473 TClient is connected to server localhost:1473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:57.955071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.955967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:57.955993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:54:57.957134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:58.103613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653436635198490:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.103646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.127867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.141171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:58.141179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:58.141203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:58.141216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:58.141231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:58.141239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:58.141257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:58.141259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:58.141283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:58.141286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:58.141302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:58.141302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:58.141313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:58.141319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:58.141333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:58.141334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:58.141347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:58.141360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:58.141364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:58.141377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:58.141382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:58.141394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653436635198644:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:58.141399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:58.141423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439653436635198645:2311];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:58.141738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:54:58.141749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:54:58.141757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:54:58.141766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:54:58.141777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:54:58.141784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:54:58.141791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:54:58.141799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:54:58.141805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186 ... dup;id=8; 2024-11-21T08:55:00.305762Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:55:00.305768Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:55:00.305771Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:55:00.305774Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:55:00.305774Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:55:00.305784Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:55:00.305788Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:55:00.305795Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:55:00.305802Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:55:00.305803Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:55:00.305807Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:55:00.305812Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:55:00.305814Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:55:00.305821Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:55:00.305826Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:55:00.305826Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:55:00.305830Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:55:00.305845Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:55:00.305855Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:55:00.305872Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:55:00.305881Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:55:00.305892Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:55:00.305900Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:55:00.306039Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:55:00.306051Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:55:00.306058Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:55:00.306062Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:55:00.306080Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:55:00.306088Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:55:00.306095Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:55:00.306104Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:55:00.306111Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:55:00.306120Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:55:00.306126Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:55:00.306130Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:55:00.306156Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:55:00.306165Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:55:00.306178Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:55:00.306186Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:55:00.306197Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:55:00.306206Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:55:00.306221Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:55:00.306229Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:55:00.306238Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:55:00.306246Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:55:00.334987Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653446290028152:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.335006Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.335050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653446290028157:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.335663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:55:00.337227Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653446290028159:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:55:00.408833Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439653446290028235:2374] TxId: 281474976715661. Ctx: { TraceId: 01jd6yx3ze7778hvh3a8dsvp5q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDRjODM3NGUtZjdiZTE4YzQtY2EwYTdjZmUtZWUwYzE4MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2024-11-21T08:55:00.410307Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDRjODM3NGUtZjdiZTE4YzQtY2EwYTdjZmUtZWUwYzE4MzA=, ActorId: [3:7439653446290028150:2374], ActorState: ExecuteState, TraceId: 01jd6yx3ze7778hvh3a8dsvp5q, Create QueryResponse for error on request, msg: >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> TSchemeShardSubDomainTest::Restart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] Test command err: Trying to start YDB, gRPC: 16884, MsgBus: 30043 2024-11-21T08:54:57.893249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653434088377629:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:57.893542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d99/r3tmp/tmp2FeiHn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16884, node 1 2024-11-21T08:54:57.948445Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:57.949682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:57.949701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:57.949703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:57.949745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30043 TClient is connected to server localhost:30043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:57.993422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:57.993461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:57.994457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:58.017925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.024197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.038795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.055688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.068179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.169100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653438383346463:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.169127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.194362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.248612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.255880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.263448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.269888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.277209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.333044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653438383346981:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.333070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.333072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653438383346986:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.333671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:58.339393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653438383346988:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:58.489856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.517884Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715674, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl_0', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479 2024-11-21T08:54:58.517920Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjMxYWYzN2QtZDE2ODFiNDQtNDk1ZjA2NmUtZjhjMDMxM2Q=, ActorId: [1:7439653438383347389:2479], ActorState: ExecuteState, TraceId: 01jd6yx26jazsv7sqy90xwtn2n, Create QueryResponse for error on request, msg: 2024-11-21T08:54:58.534223Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2024-11-21T08:54:58.534843Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653438383347497:2504], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:58.534932Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWM1ZjI3ODctYTNmYjAyZTktNzMxYTVhZjQtN2UzZWJmYg==, ActorId: [1:7439653438383347494:2503], ActorState: ExecuteState, TraceId: 01jd6yx2743rcnc6qkfzby5q0w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:58.537290Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653438383347516:2508], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:58.537359Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjk5MzVhMmEtZGQ3MGMyYjgtZWYzMTUyY2UtNjgxY2ZiZDI=, ActorId: [1:7439653438383347514:2507], ActorState: ExecuteState, TraceId: 01jd6yx277f9q185f4x0bdwgrv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:58.542182Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653438383347536:2516], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:58.542248Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzBkNzVmMTUtMjUzYjIwYWItOGYyOWI2ZDMtNGE1NDE4ZDI=, ActorId: [1:7439653438383347534:2515], ActorState: ExecuteState, TraceId: 01jd6yx27c2js8556bmzp740sh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:58.544283Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653438383347544:2520], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:58.544337Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjlkN2RjNTItY2QzYzVhZmEtOGFkNjc4MC0xNGUyNDFjNg==, ActorId: [1:7439653438383347542:2519], ActorState: ExecuteState, TraceId: 01jd6yx27e77va5e8h8e34pdf2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:58.547927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.583793Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T08:54:58.585539Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653438383347751:2560], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable! < ... peration type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:00.589111Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653446656649000:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.589130Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653446656649005:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.589134Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.589737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:00.593145Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653446656649007:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:00.788751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:00.800746Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715672, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:342 2024-11-21T08:55:00.800779Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTg5YzU4LTg2ZjM4YjZhLTRhOWY2ZmNlLTFkYWU5YmRk, ActorId: [3:7439653446656649376:2470], ActorState: ExecuteState, TraceId: 01jd6yx4dy0nachft6xcx4xbr3, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:342 Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1732179300642, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300453, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300425, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300628, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300635, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300551, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300558, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300607, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300439, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300621, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300838, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300411, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2024-11-21T08:55:00.804462Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715674, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:342 2024-11-21T08:55:00.804504Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTg5YzU4LTg2ZjM4YjZhLTRhOWY2ZmNlLTFkYWU5YmRk, ActorId: [3:7439653446656649376:2470], ActorState: ExecuteState, TraceId: 01jd6yx4e26sdpgns2ncrsm7c6, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:342 Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1732179300642, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300453, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300425, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300628, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300635, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300551, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300558, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300607, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300439, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300621, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300838, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300411, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2024-11-21T08:55:00.808142Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715676, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:341 2024-11-21T08:55:00.808180Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTg5YzU4LTg2ZjM4YjZhLTRhOWY2ZmNlLTFkYWU5YmRk, ActorId: [3:7439653446656649376:2470], ActorState: ExecuteState, TraceId: 01jd6yx4e66095shdnbnv0046v, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:341 2024-11-21T08:55:00.810318Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715678, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:341 2024-11-21T08:55:00.810350Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTg5YzU4LTg2ZjM4YjZhLTRhOWY2ZmNlLTFkYWU5YmRk, ActorId: [3:7439653446656649376:2470], ActorState: ExecuteState, TraceId: 01jd6yx4e8d1k9z61h4d09b8xc, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:341 Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1732179300642, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300453, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300425, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300628, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300635, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300551, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300558, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300607, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300439, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300621, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300838, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732179300411, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2024-11-21T08:55:00.813213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::SimultaneousDeclare >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> KqpService::SessionBusyRetryOperationSync [GOOD] >> TSchemeShardSubDomainTest::Restart [GOOD] >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier |89.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |89.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> KqpService::SwitchCache-UseCache [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> TSchemeShardSubDomainTest::SchemeLimitsRejects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:01.512779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.512811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.512817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.512823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.512856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.512861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.512872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.512979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.521273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.521296Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.523444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.523928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.523953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:01.525070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.525248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.525313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.525377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:01.526142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.526361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.526367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.526394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.526399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.526404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.526413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.527286Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.538912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.538976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.539024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:01.539078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:01.539083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.539607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.539629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:01.539664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.539672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:01.539676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:01.539680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:01.539996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.540004Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.540006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:01.540278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.540286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.540289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.540294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.540681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.540948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:01.540984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:01.541118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.541136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.541141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.541178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:01.541183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.541201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.541209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:01.541515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.541520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.541546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.541549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:01.541608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.541612Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:01.541622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:01.541625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.541630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:01.541633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.541636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:01.541638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:01.541646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.541650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:01.541652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:01.541841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.541849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.541851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:01.541854Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:01.541856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.541864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... -11-21T08:55:01.550140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:01.550299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:01.550324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T08:55:01.550513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.550519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:01.550536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:01.550547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.550551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T08:55:01.550555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T08:55:01.550563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.550568Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T08:55:01.550576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:55:01.550579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:55:01.550584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:55:01.550591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:55:01.550595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:55:01.550598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:55:01.550607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:01.550612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T08:55:01.550615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T08:55:01.550618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:55:01.550718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:01.550726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:01.550730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:55:01.550734Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:55:01.550737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:01.550802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:01.550809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:01.550813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:55:01.550816Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:55:01.550819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:01.550825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:55:01.551543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:01.551621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:55:01.551666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:55:01.551671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:55:01.551722Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:55:01.551737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:55:01.551744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:332:2324] TestWaitNotification: OK eventTxId 101 2024-11-21T08:55:01.551796Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.551820Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 32us result status StatusSuccess 2024-11-21T08:55:01.551938Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.551999Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.552013Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 16us result status StatusSuccess 2024-11-21T08:55:01.552050Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.552086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.552096Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 11us result status StatusSuccess 2024-11-21T08:55:01.552121Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:01.492999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.493024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.493028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.493032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.493045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.493048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.493055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.493141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.502512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.502538Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.505861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.506880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.506932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:01.509270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.509360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.509501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.509844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:01.511731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.512078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.512092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.512140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.512149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.512155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.512174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.513544Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.529876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.529951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.530023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:01.530101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:01.530109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.530759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.530784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:01.530820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.530831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:01.530835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:01.530840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:01.531229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.531237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.531242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:01.531544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.531550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.531556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.531562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.532056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.532417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:01.532464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:01.532624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.532644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.532651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.532699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:01.532705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.532730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.532742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:01.533136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.533143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.533178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.533183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:01.533262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.533267Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:01.533279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:01.533283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.533290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:01.533295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.533299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:01.533304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:01.533314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.533320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:01.533324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:01.533590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.533603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.533607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:01.533611Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:01.533616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.533626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.551993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:458:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:461:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:462:2058] recipient: [1:460:2413] Leader for TabletID 72057594046678944 is [1:463:2414] sender: [1:464:2058] recipient: [1:460:2413] 2024-11-21T08:55:01.556744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.556760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.556763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.556767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.556770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.556772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.556778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.556814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.557472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.557672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.557699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.557707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.557711Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.557773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.557817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:01.557844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T08:55:01.557897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:01.557936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:01.557938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:01.557947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.557973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.558148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.559141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.559150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.559204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.559209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.559213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.559538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:463:2414] sender: [1:522:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.600420Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.600475Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 69us result status StatusSuccess 2024-11-21T08:55:01.600562Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.600634Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.600648Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2024-11-21T08:55:01.600694Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::SessionBusyRetryOperationSync [GOOD] Test command err: Trying to start YDB, gRPC: 19424, MsgBus: 24838 2024-11-21T08:54:58.028529Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653437518077605:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:58.028545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d8d/r3tmp/tmpz55A00/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19424, node 1 2024-11-21T08:54:58.081466Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:58.084912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:58.084923Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:58.084924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:58.084948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24838 TClient is connected to server localhost:24838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.128539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.128557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.128571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:58.129722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:54:58.140007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.200725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.216980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.228862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.279619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437518079143:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.279644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.302448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.307620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.318802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.325920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.332499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.339534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.348044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437518079636:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.348057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437518079641:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.348061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.348512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:58.352853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653437518079643:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:59.532612Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653441813059841:2557] TxId: 281474976712090. Ctx: { TraceId: 01jd6yx35p3skc9qexvb0kexsf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3NjM3ZTItNWNjYzA5ZTktZTQzYWE4NjQtMmYyZTcyZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:59.532612Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653441813059821:2548] TxId: 281474976712080. Ctx: { TraceId: 01jd6yx34bc9vzsdhqm28cm7hz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM2YTNmYzgtZDIwYzA0YTctMjFhMDY3NzUtNzhkY2MwYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:59.534471Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzI3NjM3ZTItNWNjYzA5ZTktZTQzYWE4NjQtMmYyZTcyZmU=, ActorId: [1:7439653437518080039:2557], ActorState: ExecuteState, TraceId: 01jd6yx35p3skc9qexvb0kexsf, Create QueryResponse for error on request, msg: 2024-11-21T08:54:59.534475Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDM2YTNmYzgtZDIwYzA0YTctMjFhMDY3NzUtNzhkY2MwYTk=, ActorId: [1:7439653437518080026:2548], ActorState: ExecuteState, TraceId: 01jd6yx34bc9vzsdhqm28cm7hz, Create QueryResponse for error on request, msg: 2024-11-21T08:54:59.534652Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653441813059836:2492] TxId: 281474976712086. Ctx: { TraceId: 01jd6yx35q0q7ajpkkm3ymsqmd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWViYTUyNmItNWE1ZDIyMjMtNzkzYjBhMzQtZGI5OTMwMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:59.534683Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWViYTUyNmItNWE1ZDIyMjMtNzkzYjBhMzQtZGI5OTMwMDA=, ActorId: [1:7439653437518079967:2492], ActorState: ExecuteState, TraceId: 01jd6yx35q0q7ajpkkm3ymsqmd, Create QueryResponse for error on request, msg: 2024-11-21T08:54:59.534732Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653441813059839:2511] TxId: 281474976712088. Ctx: { TraceId: 01jd6yx35j2vgtym58j2b95590, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBiODAzMjktODFjMjA5YzUtYTcwODdkOWMtOTRmMzUxZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:59.534754Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBiODAzMjktODFjMjA5YzUtYTcwODdkOWMtOTRmMzUxZmY=, ActorId: [1:7439653437518079986:2511], ActorState: ExecuteState, TraceId: 01jd6yx35j2vgtym58j2b95590, Create QueryResponse for error on request, msg: 2024-11-21T08:54:59.534829Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653441813059804:2476] TxId: 281474976712078. Ctx: { TraceId: 01jd6yx34c6med8wzchy4a1mzq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTA0N2Y3MzYtMTdjN2UzMDEtYjc2MTE1OWQtMjdjZWIwMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:59.534852Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTA0N2Y3MzYtMTdjN2UzMDEtYjc2MTE1OWQtMjdjZWIwMGY=, ActorId: [1:7439653437518079951:2476], ActorState: ExecuteState, TraceId: 01jd6yx34c6med8wzchy4a1mzq, Create QueryResponse for error on request, msg: 2024-11-21T08:54:59.535259Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653441813059848:2469] TxId: 281474976712097. Ctx: { TraceId: 01jd6yx365czh4zssr3h5kv2x2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU2MjA3Y2UtOTI2MDIyNDgtNGJhYzczYTgtZWE1NGQ2YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:59.535304Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTU2MjA3Y2UtOTI2MDIyNDgtNGJhYzczYTgtZWE1NGQ2YmU=, ActorId: [1:7439653437518079944:2469], ActorState: ExecuteState, TraceId: 01jd6yx365czh4zssr3h5kv2x2, Create QueryResponse for error on request, msg: 2024-11-21T08:54:59.535346Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653441813059852:2464] TxId: 281474976712101. Ctx: { TraceId: 01jd6yx35mcdtvnpfss5dxrgpp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc1NzhiNjAtYzBkMWZlODEtZGMzNTM4OWQtODgyNzk0YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:54:59.535379Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439653441813059844:2552] TxId: 281474976712093. Ctx: { TraceId: 01jd6yx364emyrfsvf28hkvgr7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjUxNjM4YzktZmVlZDFiODYtMzhmZDUxMzEtZjY ... Pending previous query completion proxyRequestId: 14 2024-11-21T08:55:01.302032Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTY5M2VhYjctOWQwNDAxNjAtZjRiYTNmY2YtMzIzY2JjZjg=, ActorId: [2:7439653452699267868:2454], ActorState: ExecuteState, TraceId: 01jd6yx4xm3scr11839wzh39zv, Reply query error, msg: Pending previous query completion proxyRequestId: 15 2024-11-21T08:55:01.316529Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ0ZjhkZS00NzliMmQ0Ny1lMzdkZTExMC1lNjgyYTViNg==, ActorId: [2:7439653452699267915:2473], ActorState: ExecuteState, TraceId: 01jd6yx4y44hzh21brk03tfdn2, Reply query error, msg: Pending previous query completion proxyRequestId: 18 2024-11-21T08:55:01.316555Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ0ZjhkZS00NzliMmQ0Ny1lMzdkZTExMC1lNjgyYTViNg==, ActorId: [2:7439653452699267915:2473], ActorState: ExecuteState, TraceId: 01jd6yx4y44hzh21brk03tfdn2, Reply query error, msg: Pending previous query completion proxyRequestId: 19 2024-11-21T08:55:01.316781Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ0ZjhkZS00NzliMmQ0Ny1lMzdkZTExMC1lNjgyYTViNg==, ActorId: [2:7439653452699267915:2473], ActorState: ExecuteState, TraceId: 01jd6yx4y44hzh21brk03tfdn2, Reply query error, msg: Pending previous query completion proxyRequestId: 20 2024-11-21T08:55:01.316823Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ0ZjhkZS00NzliMmQ0Ny1lMzdkZTExMC1lNjgyYTViNg==, ActorId: [2:7439653452699267915:2473], ActorState: ExecuteState, TraceId: 01jd6yx4y44hzh21brk03tfdn2, Reply query error, msg: Pending previous query completion proxyRequestId: 21 2024-11-21T08:55:01.316879Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ0ZjhkZS00NzliMmQ0Ny1lMzdkZTExMC1lNjgyYTViNg==, ActorId: [2:7439653452699267915:2473], ActorState: ExecuteState, TraceId: 01jd6yx4y44hzh21brk03tfdn2, Reply query error, msg: Pending previous query completion proxyRequestId: 22 2024-11-21T08:55:01.316929Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ0ZjhkZS00NzliMmQ0Ny1lMzdkZTExMC1lNjgyYTViNg==, ActorId: [2:7439653452699267915:2473], ActorState: ExecuteState, TraceId: 01jd6yx4y44hzh21brk03tfdn2, Reply query error, msg: Pending previous query completion proxyRequestId: 23 2024-11-21T08:55:01.316990Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ0ZjhkZS00NzliMmQ0Ny1lMzdkZTExMC1lNjgyYTViNg==, ActorId: [2:7439653452699267915:2473], ActorState: ExecuteState, TraceId: 01jd6yx4y44hzh21brk03tfdn2, Reply query error, msg: Pending previous query completion proxyRequestId: 24 2024-11-21T08:55:01.317084Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ0ZjhkZS00NzliMmQ0Ny1lMzdkZTExMC1lNjgyYTViNg==, ActorId: [2:7439653452699267915:2473], ActorState: ExecuteState, TraceId: 01jd6yx4y44hzh21brk03tfdn2, Reply query error, msg: Pending previous query completion proxyRequestId: 25 2024-11-21T08:55:01.329565Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjE4Y2JiY2UtOGY5ZjhiY2ItYzZjN2FlZGMtODdlNWYzNWU=, ActorId: [2:7439653452699267951:2488], ActorState: ExecuteState, TraceId: 01jd6yx4yh8aze9weda73637nn, Reply query error, msg: Pending previous query completion proxyRequestId: 28 2024-11-21T08:55:01.329727Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjE4Y2JiY2UtOGY5ZjhiY2ItYzZjN2FlZGMtODdlNWYzNWU=, ActorId: [2:7439653452699267951:2488], ActorState: ExecuteState, TraceId: 01jd6yx4yh8aze9weda73637nn, Reply query error, msg: Pending previous query completion proxyRequestId: 29 2024-11-21T08:55:01.329829Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjE4Y2JiY2UtOGY5ZjhiY2ItYzZjN2FlZGMtODdlNWYzNWU=, ActorId: [2:7439653452699267951:2488], ActorState: ExecuteState, TraceId: 01jd6yx4yh8aze9weda73637nn, Reply query error, msg: Pending previous query completion proxyRequestId: 30 2024-11-21T08:55:01.329898Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjE4Y2JiY2UtOGY5ZjhiY2ItYzZjN2FlZGMtODdlNWYzNWU=, ActorId: [2:7439653452699267951:2488], ActorState: ExecuteState, TraceId: 01jd6yx4yh8aze9weda73637nn, Reply query error, msg: Pending previous query completion proxyRequestId: 31 2024-11-21T08:55:01.329959Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjE4Y2JiY2UtOGY5ZjhiY2ItYzZjN2FlZGMtODdlNWYzNWU=, ActorId: [2:7439653452699267951:2488], ActorState: ExecuteState, TraceId: 01jd6yx4yh8aze9weda73637nn, Reply query error, msg: Pending previous query completion proxyRequestId: 32 2024-11-21T08:55:01.330002Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjE4Y2JiY2UtOGY5ZjhiY2ItYzZjN2FlZGMtODdlNWYzNWU=, ActorId: [2:7439653452699267951:2488], ActorState: ExecuteState, TraceId: 01jd6yx4yh8aze9weda73637nn, Reply query error, msg: Pending previous query completion proxyRequestId: 33 2024-11-21T08:55:01.330017Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjE4Y2JiY2UtOGY5ZjhiY2ItYzZjN2FlZGMtODdlNWYzNWU=, ActorId: [2:7439653452699267951:2488], ActorState: ExecuteState, TraceId: 01jd6yx4yh8aze9weda73637nn, Reply query error, msg: Pending previous query completion proxyRequestId: 34 2024-11-21T08:55:01.344265Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGUzMWNjOTgtNDEzNjkwMzItNGU1YjJjMGEtZWRkMTk3NTI=, ActorId: [2:7439653452699267985:2502], ActorState: ExecuteState, TraceId: 01jd6yx4yz0w9gsrhfv2pxz00a, Reply query error, msg: Pending previous query completion proxyRequestId: 37 2024-11-21T08:55:01.344294Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGUzMWNjOTgtNDEzNjkwMzItNGU1YjJjMGEtZWRkMTk3NTI=, ActorId: [2:7439653452699267985:2502], ActorState: ExecuteState, TraceId: 01jd6yx4yz0w9gsrhfv2pxz00a, Reply query error, msg: Pending previous query completion proxyRequestId: 38 2024-11-21T08:55:01.344583Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGUzMWNjOTgtNDEzNjkwMzItNGU1YjJjMGEtZWRkMTk3NTI=, ActorId: [2:7439653452699267985:2502], ActorState: ExecuteState, TraceId: 01jd6yx4yz0w9gsrhfv2pxz00a, Reply query error, msg: Pending previous query completion proxyRequestId: 39 2024-11-21T08:55:01.344742Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGUzMWNjOTgtNDEzNjkwMzItNGU1YjJjMGEtZWRkMTk3NTI=, ActorId: [2:7439653452699267985:2502], ActorState: ExecuteState, TraceId: 01jd6yx4yz0w9gsrhfv2pxz00a, Reply query error, msg: Pending previous query completion proxyRequestId: 40 2024-11-21T08:55:01.344759Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGUzMWNjOTgtNDEzNjkwMzItNGU1YjJjMGEtZWRkMTk3NTI=, ActorId: [2:7439653452699267985:2502], ActorState: ExecuteState, TraceId: 01jd6yx4yz0w9gsrhfv2pxz00a, Reply query error, msg: Pending previous query completion proxyRequestId: 41 2024-11-21T08:55:01.344764Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGUzMWNjOTgtNDEzNjkwMzItNGU1YjJjMGEtZWRkMTk3NTI=, ActorId: [2:7439653452699267985:2502], ActorState: ExecuteState, TraceId: 01jd6yx4yz0w9gsrhfv2pxz00a, Reply query error, msg: Pending previous query completion proxyRequestId: 42 2024-11-21T08:55:01.358424Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjhhY2EwNjctNmM5YzZhZTItNTllOGFhZi02ZWI2MWVlMg==, ActorId: [2:7439653452699268019:2515], ActorState: ExecuteState, TraceId: 01jd6yx4ze2hqa1kbaanbzy3g3, Reply query error, msg: Pending previous query completion proxyRequestId: 45 2024-11-21T08:55:01.358496Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjhhY2EwNjctNmM5YzZhZTItNTllOGFhZi02ZWI2MWVlMg==, ActorId: [2:7439653452699268019:2515], ActorState: ExecuteState, TraceId: 01jd6yx4ze2hqa1kbaanbzy3g3, Reply query error, msg: Pending previous query completion proxyRequestId: 46 2024-11-21T08:55:01.358665Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjhhY2EwNjctNmM5YzZhZTItNTllOGFhZi02ZWI2MWVlMg==, ActorId: [2:7439653452699268019:2515], ActorState: ExecuteState, TraceId: 01jd6yx4ze2hqa1kbaanbzy3g3, Reply query error, msg: Pending previous query completion proxyRequestId: 47 2024-11-21T08:55:01.358704Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjhhY2EwNjctNmM5YzZhZTItNTllOGFhZi02ZWI2MWVlMg==, ActorId: [2:7439653452699268019:2515], ActorState: ExecuteState, TraceId: 01jd6yx4ze2hqa1kbaanbzy3g3, Reply query error, msg: Pending previous query completion proxyRequestId: 48 2024-11-21T08:55:01.358729Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjhhY2EwNjctNmM5YzZhZTItNTllOGFhZi02ZWI2MWVlMg==, ActorId: [2:7439653452699268019:2515], ActorState: ExecuteState, TraceId: 01jd6yx4ze2hqa1kbaanbzy3g3, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2024-11-21T08:55:01.370412Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGVkNDJkNTYtNGMwY2FjODItZTgzNDVhODYtZDI5Njk3YTQ=, ActorId: [2:7439653452699268049:2527], ActorState: ExecuteState, TraceId: 01jd6yx4zt4xe8dss4ycrdtm1f, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2024-11-21T08:55:01.370542Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGVkNDJkNTYtNGMwY2FjODItZTgzNDVhODYtZDI5Njk3YTQ=, ActorId: [2:7439653452699268049:2527], ActorState: ExecuteState, TraceId: 01jd6yx4zt4xe8dss4ycrdtm1f, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2024-11-21T08:55:01.370628Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGVkNDJkNTYtNGMwY2FjODItZTgzNDVhODYtZDI5Njk3YTQ=, ActorId: [2:7439653452699268049:2527], ActorState: ExecuteState, TraceId: 01jd6yx4zt4xe8dss4ycrdtm1f, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2024-11-21T08:55:01.370733Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGVkNDJkNTYtNGMwY2FjODItZTgzNDVhODYtZDI5Njk3YTQ=, ActorId: [2:7439653452699268049:2527], ActorState: ExecuteState, TraceId: 01jd6yx4zt4xe8dss4ycrdtm1f, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2024-11-21T08:55:01.383210Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTIzMjljMDgtNzY5ZWQ4N2YtMjYxZGNiMWMtODBhY2Y4NGU=, ActorId: [2:7439653452699268085:2538], ActorState: ExecuteState, TraceId: 01jd6yx50706n526ezvmk2grrf, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2024-11-21T08:55:01.383370Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTIzMjljMDgtNzY5ZWQ4N2YtMjYxZGNiMWMtODBhY2Y4NGU=, ActorId: [2:7439653452699268085:2538], ActorState: ExecuteState, TraceId: 01jd6yx50706n526ezvmk2grrf, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2024-11-21T08:55:01.383420Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTIzMjljMDgtNzY5ZWQ4N2YtMjYxZGNiMWMtODBhY2Y4NGU=, ActorId: [2:7439653452699268085:2538], ActorState: ExecuteState, TraceId: 01jd6yx50706n526ezvmk2grrf, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2024-11-21T08:55:01.393268Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmRjODQ4NTAtYzNlMzQwMTEtYTBjYzIzMS0yMTc1ZTc3MA==, ActorId: [2:7439653452699268114:2548], ActorState: ExecuteState, TraceId: 01jd6yx50h1cxeaw0ck5hgnatb, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2024-11-21T08:55:01.393322Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmRjODQ4NTAtYzNlMzQwMTEtYTBjYzIzMS0yMTc1ZTc3MA==, ActorId: [2:7439653452699268114:2548], ActorState: ExecuteState, TraceId: 01jd6yx50h1cxeaw0ck5hgnatb, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2024-11-21T08:55:01.406394Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGJkZmNiYS0zOGVjYmQ1LWExMzE0ZjE0LTZkMDk2NmQ0, ActorId: [2:7439653452699268138:2557], ActorState: ExecuteState, TraceId: 01jd6yx50y86qnz3xgmjgvnvry, Reply query error, msg: Pending previous query completion proxyRequestId: 67 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T08:54:43.166333Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1732179283166327 2024-11-21T08:54:43.257903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653372363468130:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.258198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:43.260461Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653375544902837:2231];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004660/r3tmp/tmpkIdRYN/pdisk_1.dat 2024-11-21T08:54:43.283364Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:54:43.284624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:43.285771Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:54:43.306926Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31547, node 1 2024-11-21T08:54:43.317364Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004660/r3tmp/yandextNPuPk.tmp 2024-11-21T08:54:43.317378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004660/r3tmp/yandextNPuPk.tmp 2024-11-21T08:54:43.317456Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004660/r3tmp/yandextNPuPk.tmp 2024-11-21T08:54:43.317507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:43.322096Z INFO: TTestServer started on Port 12878 GrpcPort 31547 TClient is connected to server localhost:12878 PQClient connected to localhost:31547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:43.358302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.358336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.360045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:43.380488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.380512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.381490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.381570Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:43.381853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:54:43.543452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653375544902949:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.543488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653375544902938:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.543508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.543696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653372363469013:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.543711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653372363469039:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.543716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.544426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T08:54:43.545456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653372363469072:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.545485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.548234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653372363469042:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T08:54:43.548269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653375544902952:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T08:54:43.568448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.627272Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653375544903042:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:43.627376Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjgyNjE5YTItYzk2ZWE5ZjItNjE5MDNiMi05N2E4YTZhNQ==, ActorId: [2:7439653375544902936:2277], ActorState: ExecuteState, TraceId: 01jd6ywkjn2f8awr1b50v29xxk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:43.630878Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:43.632754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.650144Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653372363469324:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:43.650250Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTA2MWU3MDctMzcyYTc4MTktY2ZjZjhjYmQtYmI0NzE4YTk=, ActorId: [1:7439653372363469009:2299], ActorState: ExecuteState, TraceId: 01jd6ywkjn7sk3snersk3h8ncv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:43.650525Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:43.696335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:31547", true, true, 1000); 2024-11-21T08:54:43.724457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd6ywkqx5j5t2n8ar5kz5f64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMxNmJlYmEtNjI3ZmEyYWMtNDZmMDJhMTAtMTdmMmU5M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653372363469515:2939] 2024-11-21T08:54:48.258421Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653372363468130:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.258454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... 9565987 2024-11-21T08:54:59.279030Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T08:54:59.279053Z :DEBUG: [/Root] [/Root] [40e4d3b2-3ee1a8b9-8b6d9f53-f4a21505] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T08:54:58.173000Z WriteTime: 2024-11-21T08:54:58.173000Z Ip: "ipv6:[::1]:55450" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:55450" } } } } 2024-11-21T08:54:59.279094Z :INFO: [/Root] [/Root] [40e4d3b2-3ee1a8b9-8b6d9f53-f4a21505] Closing read session. Close timeout: 3.000000s 2024-11-21T08:54:59.279104Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T08:54:59.279112Z :INFO: [/Root] [/Root] [40e4d3b2-3ee1a8b9-8b6d9f53-f4a21505] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1230 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:54:59.279378Z :INFO: [/Root] [/Root] [40e4d3b2-3ee1a8b9-8b6d9f53-f4a21505] Closing read session. Close timeout: 0.000000s 2024-11-21T08:54:59.279385Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T08:54:59.279375Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_7040871032854889045_v1 grpc read done: success# 0, data# { } 2024-11-21T08:54:59.279381Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_7040871032854889045_v1 grpc read failed 2024-11-21T08:54:59.279390Z :INFO: [/Root] [/Root] [40e4d3b2-3ee1a8b9-8b6d9f53-f4a21505] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1230 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:54:59.279384Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_7040871032854889045_v1 grpc closed 2024-11-21T08:54:59.279397Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_7040871032854889045_v1 is DEAD 2024-11-21T08:54:59.279409Z :NOTICE: [/Root] [/Root] [40e4d3b2-3ee1a8b9-8b6d9f53-f4a21505] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:54:59.279522Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:54:59.279532Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_7040871032854889045_v1 2024-11-21T08:54:59.279541Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439653435985190582:2508] destroyed 2024-11-21T08:54:59.279557Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_7040871032854889045_v1 2024-11-21T08:54:59.279611Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653435985190579:2505] disconnected; active server actors: 1 2024-11-21T08:54:59.279618Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653435985190579:2505] client user disconnected session shared/user_3_1_7040871032854889045_v1 2024-11-21T08:54:59.713686Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.713692Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.713695Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:59.713780Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:59.713910Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:59.713964Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.714031Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T08:54:59.714250Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.714252Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.714254Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:59.714293Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:59.714380Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:59.714417Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.714445Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:59.714588Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:59.714744Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T08:54:59.714757Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T08:54:59.714794Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:59.714799Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T08:54:59.714802Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T08:54:59.714806Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T08:54:59.715179Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.715184Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.715186Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:59.715251Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:59.715323Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:59.715342Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.715368Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:59.715458Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.715516Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T08:54:59.715551Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:59.715557Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T08:54:59.715563Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2024-11-21T08:54:59.715841Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.715844Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.715846Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:54:59.715888Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:54:59.715934Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:54:59.715946Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:54:59.715976Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T08:54:59.716065Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:59.716088Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T08:54:59.716123Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T08:54:59.716135Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T08:54:59.716143Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:54:59.716148Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T08:54:59.716176Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T08:54:59.716183Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T08:55:01.716670Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:55:01.716675Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:55:01.716678Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:55:01.716748Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T08:55:01.716860Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T08:55:01.716910Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:55:01.717145Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:55:01.717195Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T08:55:01.717223Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T08:55:01.717247Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:01.565488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.565516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.565521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.565526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.565541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.565545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.565554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.565632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.576626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.576648Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.579215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.579933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.579970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:01.581985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.582203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.582290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.582368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:01.583230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.583502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.583511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.583551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.583558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.583564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.583576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.584848Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.599669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.599757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.599829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:01.599901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:01.599908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.600709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.600735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:01.600781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.600790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:01.600794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:01.600799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:01.601271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.601287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.601292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:01.601670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.601677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.601683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.601690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.602294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.602616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:01.602660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:01.602820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.602841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.602850Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.602903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:01.602909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.602934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.602945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:01.603305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.603312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.603340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.603345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:01.603423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.603429Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:01.603441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:01.603445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.603451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:01.603457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.603462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:01.603466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:01.603477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.603482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:01.603486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:01.603754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.603765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.603770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:01.603775Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:01.603779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.603792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... AT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:01.608413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2024-11-21T08:55:01.608416Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T08:55:01.608420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:01.608427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2024-11-21T08:55:01.608641Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:55:01.608711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.608717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#100:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.608722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2024-11-21T08:55:01.609040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:55:01.609164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:55:01.609233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.609239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.609244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet 72057594046678944 2024-11-21T08:55:01.609249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T08:55:01.609273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.609533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T08:55:01.609557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T08:55:01.609617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.609631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.609636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T08:55:01.609696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T08:55:01.609702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T08:55:01.609722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.609729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:01.609736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:01.610025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.610031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.610050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:01.610061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.610065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T08:55:01.610070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T08:55:01.610129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.610134Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T08:55:01.610144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T08:55:01.610148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T08:55:01.610154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T08:55:01.610159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T08:55:01.610166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T08:55:01.610170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T08:55:01.610177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:01.610183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2024-11-21T08:55:01.610187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T08:55:01.610190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T08:55:01.610248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:01.610256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:01.610260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T08:55:01.610264Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:55:01.610268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.610330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:01.610337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:01.610341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T08:55:01.610344Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:55:01.610348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:01.610355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2024-11-21T08:55:01.610359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:272:2264] 2024-11-21T08:55:01.610944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:55:01.610994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:55:01.611004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T08:55:01.611007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:273:2265] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2024-11-21T08:55:01.611079Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.611102Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 28us result status StatusSuccess 2024-11-21T08:55:01.611188Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:01.629757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.629777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.629780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.629784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.629795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.629798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.629804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.629860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.637668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.637687Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.639811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.640324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.640351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:01.641480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.641676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.641739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.641796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:01.642542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.642780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.642788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.642817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.642823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.642828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.642838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.643800Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.657063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.657150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.657227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:01.657299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:01.657328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.658183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.658216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:01.658272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.658284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:01.658289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:01.658294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:01.658731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.658743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.658747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:01.659088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.659097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.659102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.659109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.659593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.659947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:01.659993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:01.660156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.660176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.660181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.660251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:01.660258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.660289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.660301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:01.660663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.660670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.660711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.660715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:01.660808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.660813Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:01.660825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:01.660828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.660834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:01.660839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.660843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:01.660847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:01.660857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.660862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:01.660866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:01.661121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.661140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.661146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:01.661151Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:01.661156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.661169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T08:55:01.661851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T08:55:01.661949Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2024-11-21T08:55:01.662081Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T08:55:01.663250Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T08:55:01.663832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.663885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.663898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2024-11-21T08:55:01.664082Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:55:01.664773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.664807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2024-11-21T08:55:01.664909Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2024-11-21T08:55:01.665390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.665422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.665433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2024-11-21T08:55:01.665800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.665816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T08:55:01.665859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T08:55:01.665876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T08:55:01.665897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:55:01.665902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:55:01.665965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T08:55:01.665982Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:55:01.665989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T08:55:01.665994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:285:2277] 2024-11-21T08:55:01.666009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:55:01.666012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:285:2277] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T08:55:01.666066Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.666086Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 25us result status StatusPathDoesNotExist 2024-11-21T08:55:01.666125Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:01.666177Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.666185Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 9us result status StatusPathDoesNotExist 2024-11-21T08:55:01.666196Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:01.666229Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.666250Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 21us result status StatusSuccess 2024-11-21T08:55:01.666312Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::SwitchCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 16419, MsgBus: 13063 2024-11-21T08:54:57.294692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653432662157210:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:57.295053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dd4/r3tmp/tmpuRqkQ7/pdisk_1.dat 2024-11-21T08:54:57.339680Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16419, node 1 2024-11-21T08:54:57.356094Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:57.356108Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:57.356110Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:57.356149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13063 TClient is connected to server localhost:13063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:57.394822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:57.394845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:57.395912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:57.420942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.432245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.492795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.510595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.521690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.544510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653432662158751:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.544532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.571704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.577151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.584092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.590640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.645107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.653973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.662538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653432662159247:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.662565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.662576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653432662159252:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.663183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:57.667425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653432662159254:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:57.848821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 took: 1.414109s took: 1.414268s took: 1.414440s took: 1.414467s took: 1.414564s took: 1.414543s took: 1.414558s took: 1.414679s took: 1.414671s took: 1.414690s Trying to start YDB, gRPC: 17273, MsgBus: 1455 2024-11-21T08:54:59.628480Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653443281163214:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:59.628668Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dd4/r3tmp/tmpRhAzke/pdisk_1.dat 2024-11-21T08:54:59.641738Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17273, node 2 2024-11-21T08:54:59.649646Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:59.649661Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:59.649662Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:59.649712Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1455 TClient is connected to server localhost:1455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:59.729032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:59.729072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:59.730139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:59.730777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.734520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.747379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.762794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.775895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:59.897262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653443281164753:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.897295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.902005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.907702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.914524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:59.969074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:00.023978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:00.034391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:00.042315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653447576132568:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.042332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.042331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653447576132573:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:00.042826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:00.047044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653447576132575:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:00.221318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 took: 1.469172s took: 1.469656s took: 1.469679s took: 1.469676s took: 1.469715s took: 1.469702s took: 1.469725s took: 1.469740s took: 1.469709s took: 1.469713s >> TSchemeShardSubDomainTest::RestartAtInFly >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:01.730689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.730713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.730716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.730721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.730734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.730737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.730744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.730798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.738441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.738461Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.740742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.741261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.741287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:01.742544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.742701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.742766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.742827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:01.743677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.743915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.743922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.743951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.743957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.743961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.743972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.745021Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.756021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.756096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.756159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:01.756237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:01.756245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.756889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.756910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:01.756949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.756957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:01.756960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:01.756964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:01.757259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.757266Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.757269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:01.757548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.757555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.757559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.757564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.757958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.758251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:01.758290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:01.758423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.758440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.758445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.758483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:01.758487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.758509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.758518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:01.758829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.758834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.758865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.758868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:01.758937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.758942Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:01.758951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:01.758954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.758959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:01.758962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.758965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:01.758968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:01.758976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.758980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:01.758983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:01.759179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.759189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.759192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:01.759196Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:01.759199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.759208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 9551615 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T08:55:01.929357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 106 2024-11-21T08:55:01.929360Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:55:01.929362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:01.929711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T08:55:01.929730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T08:55:01.929735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2024-11-21T08:55:01.929740Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:55:01.939881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:01.939940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2024-11-21T08:55:01.940626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:01.940635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:01.940638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:01.940640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:01.940643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:01.940764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T08:55:01.940975Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2024-11-21T08:55:01.941008Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:55:01.941044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T08:55:01.941086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409550 2024-11-21T08:55:01.941455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409546 2024-11-21T08:55:01.941517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.941539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:01.941585Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:55:01.941678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T08:55:01.941758Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:55:01.941824Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 2024-11-21T08:55:01.941900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:01.941920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409547 2024-11-21T08:55:01.941987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:01.941998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2024-11-21T08:55:01.942041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:01.942052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:01.942180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:01.942185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:55:01.942193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:01.942367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:01.942372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:01.942385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:01.942532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:55:01.942539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T08:55:01.942684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:55:01.942688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:55:01.942745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:01.942747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:55:01.942753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:55:01.942755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:55:01.942762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:55:01.942765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:55:01.942964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:01.942977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:01.942980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:01.942987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.943006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:01.943181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T08:55:01.943232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T08:55:01.943236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T08:55:01.943291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T08:55:01.943303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T08:55:01.943306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:762:2678] TestWaitNotification: OK eventTxId 106 2024-11-21T08:55:01.943366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:01.943392Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 35us result status StatusSuccess 2024-11-21T08:55:01.943448Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:01.962619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.962637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.962641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.962644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.962652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.962655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.962661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.962730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.969793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.969810Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.971944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.972474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.972498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:01.973615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.973781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.973841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.973900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:01.974725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.974935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.974943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.974971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.974976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.974980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.974988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.975926Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.986864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.986937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.986990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:01.987046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:01.987054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.987745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.987763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:01.987796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.987803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:01.987806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:01.987809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:01.988142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.988149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.988151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:01.988457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.988463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.988467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.988472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.988864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.989170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:01.989204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:01.989350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.989366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.989372Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.989408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:01.989413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.989436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.989459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:01.989838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.989845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.989874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.989878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:01.989940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.989945Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:01.989953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:01.989956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.989960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:01.989963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.989966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:01.989969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:01.989978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.989981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:01.989983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:01.990172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.990180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.990183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:01.990186Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:01.990189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.990196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:55:02.199684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T08:55:02.199689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T08:55:02.199693Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T08:55:02.199696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:55:02.199703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/3, is published: true 2024-11-21T08:55:02.199730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 212 } } 2024-11-21T08:55:02.199733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2024-11-21T08:55:02.199741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 212 } } 2024-11-21T08:55:02.199748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 212 } } 2024-11-21T08:55:02.199784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 593 RawX2: 4294969831 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2024-11-21T08:55:02.199787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2024-11-21T08:55:02.199793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 593 RawX2: 4294969831 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2024-11-21T08:55:02.199796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:55:02.199800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 593 RawX2: 4294969831 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2024-11-21T08:55:02.199804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:02.199806Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.199808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:55:02.199810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2024-11-21T08:55:02.200435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2024-11-21T08:55:02.200451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T08:55:02.200587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T08:55:02.200598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2024-11-21T08:55:02.200612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T08:55:02.200648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2024-11-21T08:55:02.200654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:2 ProgressState 2024-11-21T08:55:02.200663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 2/3 2024-11-21T08:55:02.200667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2024-11-21T08:55:02.200671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2024-11-21T08:55:02.200774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T08:55:02.200784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.200794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.200814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.200817Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:0 ProgressState 2024-11-21T08:55:02.200821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 3/3 2024-11-21T08:55:02.200823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2024-11-21T08:55:02.200826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2024-11-21T08:55:02.200835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:475:2426] message: TxId: 107 2024-11-21T08:55:02.200838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2024-11-21T08:55:02.200842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T08:55:02.200844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T08:55:02.200857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:55:02.200859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:1 2024-11-21T08:55:02.200861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:1 2024-11-21T08:55:02.200864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:55:02.200866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:2 2024-11-21T08:55:02.200867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:2 2024-11-21T08:55:02.200871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:55:02.201172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T08:55:02.201180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:526:2477] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T08:55:02.201745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:02.201787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2024-11-21T08:55:02.201796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2024-11-21T08:55:02.201799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2024-11-21T08:55:02.202127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:02.202144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T08:55:02.202189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T08:55:02.202193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T08:55:02.202244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T08:55:02.202257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T08:55:02.202260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:715:2636] TestWaitNotification: OK eventTxId 108 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::DeleteAdd ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:01.911879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.911900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.911903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.911907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.911915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.911918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.911923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.911981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.919331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.919349Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.921555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.922166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.922198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:01.923789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.924037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.924132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.924244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:01.925314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.925574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.925582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.925613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.925618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.925622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.925634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.926728Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.938641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.938709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.938767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:01.938822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:01.938826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.939456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.939475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:01.939507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.939514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:01.939516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:01.939519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:01.939807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.939814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.939817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:01.940057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.940063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.940067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.940071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.940475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.940813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:01.940849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:01.940979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.940997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.941004Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.941040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:01.941044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.941064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.941073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:01.941417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.941422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.941465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.941468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:01.941530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.941534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:01.941542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:01.941544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.941548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:01.941551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.941554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:01.941556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:01.941564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.941568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:01.941570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:01.941781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.941791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.941795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:01.941799Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:01.941801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.941811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:02.347589Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:55:02.347591Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:55:02.347593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:55:02.347666Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:02.347672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:02.347674Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:55:02.347676Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:55:02.347679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:02.347683Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2024-11-21T08:55:02.347871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:02.347880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:02.347883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:02.347887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:02.347925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:55:02.348030Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:55:02.348235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:02.348286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2024-11-21T08:55:02.348419Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:55:02.348598Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:55:02.348620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:02.349063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:02.349231Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T08:55:02.349274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:02.349300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2024-11-21T08:55:02.349412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:02.349432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2024-11-21T08:55:02.349686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:02.349694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:55:02.349705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:02.349934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:02.349942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:02.349968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:02.350042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:55:02.350103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:55:02.350116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:55:02.350122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:55:02.350361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:02.350366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:55:02.350378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:55:02.350380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:55:02.350642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:55:02.350648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:55:02.350670Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:02.350678Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:02.350685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:02.350688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:02.350696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:02.350902Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:55:02.350956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:55:02.350962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:55:02.351013Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:02.351024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:55:02.351027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:654:2608] TestWaitNotification: OK eventTxId 105 2024-11-21T08:55:02.351080Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:02.351103Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 32us result status StatusPathDoesNotExist 2024-11-21T08:55:02.351142Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:02.351186Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:02.351195Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 10us result status StatusPathDoesNotExist 2024-11-21T08:55:02.351204Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:02.391040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:02.391061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:02.391066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:02.391070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:02.391082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:02.391085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:02.391092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:02.391169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:02.399030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:02.399051Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:02.401446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:02.402045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:02.402069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:02.403253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:02.403399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:02.403463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:02.403525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:02.404348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:02.404566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:02.404574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:02.404600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:02.404605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:02.404610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:02.404620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.405630Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:02.417284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:02.417376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.417459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:02.417522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:02.417527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.418256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:02.418278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:02.418325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.418332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:02.418335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:02.418339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:02.418616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.418626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:02.418630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:02.418882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.418889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.418892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:02.418897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:02.419362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:02.419734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:02.419785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:02.419926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:02.419947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:02.419952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:02.419997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:02.420002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:02.420024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:02.420032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:02.420417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:02.420427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:02.420462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:02.420465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:02.420538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.420542Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:02.420552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:02.420556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:02.420560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:02.420563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:02.420566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:02.420569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:02.420578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:02.420582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:02.420585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:02.420805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:02.420813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:02.420817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:02.420820Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:02.420822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:02.420831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... D 72057594046678944 is [1:121:2147] sender: [1:455:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:456:2058] recipient: [1:454:2407] Leader for TabletID 72057594046678944 is [1:457:2408] sender: [1:458:2058] recipient: [1:454:2407] 2024-11-21T08:55:02.439502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:02.439522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:02.439526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:02.439532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:02.439536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:02.439539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:02.439545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:02.439588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:02.440241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:02.440508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:02.440537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:02.440549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:02.440553Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:02.440585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:02.440631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:02.440660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T08:55:02.440718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:02.440758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:02.440760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:02.440767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.440968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.442184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:02.442196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:02.442227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:02.442232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:02.442235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:02.442254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2024-11-21T08:55:02.483061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T08:55:02.483087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:457:2408] sender: [1:516:2058] recipient: [1:15:2062] 2024-11-21T08:55:02.483236Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T08:55:02.483260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T08:55:02.483263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:514:2453] TestWaitNotification: OK eventTxId 100 2024-11-21T08:55:02.483321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:02.483380Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 70us result status StatusSuccess 2024-11-21T08:55:02.483477Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:02.483545Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:02.483556Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2024-11-21T08:55:02.483585Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> TSchemeShardSubDomainTest::RedefineErrors >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle [GOOD] >> KqpQueryService::ReadDatashardAndColumnshard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.025576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.025600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.025605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.025610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.025622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.025624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.025631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.025687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.033175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.033189Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.035245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.035799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.035821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.036798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.036925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.036985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.037030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.037717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.037911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.037916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.037947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.037951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.037955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.037963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.038736Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.054027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.054097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.054153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.054219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.054226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.054806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.054831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.054868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.054876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.054880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.054885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.055220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.055230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.055237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.055555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.055565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.055570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.055577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.056137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.056543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.056584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.056704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.056724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.056730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.056783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.056789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.056809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.056817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.057374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.057382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.057409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.057413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.057485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.057491Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.057499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.057502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.057507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.057511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.057514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.057516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.057525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.057528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.057531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.057754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.057766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.057770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.057774Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.057778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.057790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 4 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.144455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T08:55:03.144479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000004 2024-11-21T08:55:03.144544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.144563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.144568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet 72057594046678944 2024-11-21T08:55:03.144636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T08:55:03.144643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet 72057594046678944 2024-11-21T08:55:03.144670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.144680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T08:55:03.144688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:55:03.145093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.145100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.145137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:03.145151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.145156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T08:55:03.145162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T08:55:03.145170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.145176Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T08:55:03.145186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T08:55:03.145190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:55:03.145196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T08:55:03.145201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T08:55:03.145205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:55:03.145209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:55:03.145249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T08:55:03.145254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T08:55:03.145258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:55:03.145261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:55:03.145415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:55:03.145426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:55:03.145430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:55:03.145434Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:55:03.145452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.145543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:55:03.145552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:55:03.145556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:55:03.145561Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:55:03.145564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T08:55:03.145572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T08:55:03.146179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:55:03.146386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:55:03.146439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:55:03.146443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:55:03.146496Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:55:03.146513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.146518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:963:2789] TestWaitNotification: OK eventTxId 102 2024-11-21T08:55:03.146580Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.146605Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 32us result status StatusSuccess 2024-11-21T08:55:03.146673Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.146733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.146745Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 14us result status StatusSuccess 2024-11-21T08:55:03.146785Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.005512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.005534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.005538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.005541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.005552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.005555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.005561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.005630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.012849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.012868Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.014735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.015217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.015239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.016374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.016584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.016669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.016759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.017680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.017929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.017938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.017966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.017970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.017975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.017986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.018878Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.028866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.028929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.028980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.029040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.029044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.029610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.029630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.029663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.029672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.029676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.029680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.029938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.029944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.029946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.030138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.030142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.030146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.030150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.030507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.030770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.030800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.030915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.030930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.030935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.030968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.030972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.030991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.030998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.031363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.031370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.031399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.031402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.031462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.031467Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.031475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.031478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.031482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.031485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.031487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.031503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.031511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.031516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.031518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.031767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.031778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.031781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.031784Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.031786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.031796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:03.035675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2024-11-21T08:55:03.036040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:55:03.036115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.036119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.036123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet 72057594046678944 2024-11-21T08:55:03.036127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T08:55:03.036147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.036175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:55:03.036390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T08:55:03.036409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T08:55:03.036457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.036469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.036474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T08:55:03.036529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T08:55:03.036533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T08:55:03.036549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.036554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.036560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T08:55:03.036905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.036917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.036947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:03.036959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.036963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T08:55:03.036968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T08:55:03.037029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.037035Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T08:55:03.037045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T08:55:03.037049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T08:55:03.037053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T08:55:03.037056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T08:55:03.037059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T08:55:03.037061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T08:55:03.037071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:03.037076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T08:55:03.037078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T08:55:03.037079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T08:55:03.037143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:03.037149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:03.037152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T08:55:03.037155Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:55:03.037157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.037231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:03.037236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T08:55:03.037238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T08:55:03.037242Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:55:03.037244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.037249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T08:55:03.037928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T08:55:03.038026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T08:55:03.038073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T08:55:03.038086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T08:55:03.038102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:55:03.038104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:55:03.038146Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T08:55:03.038159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.038162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:308:2300] 2024-11-21T08:55:03.038170Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:55:03.038179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.038180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:308:2300] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T08:55:03.038217Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.038236Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 26us result status StatusSuccess 2024-11-21T08:55:03.038306Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:02.086618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:02.086642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:02.086648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:02.086652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:02.086665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:02.086669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:02.086679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:02.086737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:02.096356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:02.096374Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:02.098784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:02.099352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:02.099375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:02.100328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:02.100481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:02.100551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:02.100631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:02.101508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:02.101719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:02.101726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:02.101751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:02.101755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:02.101759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:02.101768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.102597Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:02.113278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:02.113334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.113381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:02.113430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:02.113435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.113874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:02.113889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:02.113915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.113921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:02.113924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:02.113927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:02.114158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.114164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:02.114167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:02.114350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.114355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.114358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:02.114362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:02.114725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:02.115023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:02.115057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:02.115204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:02.115222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:02.115228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:02.115268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:02.115274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:02.115296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:02.115306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:02.115627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:02.115633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:02.115662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:02.115667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:02.115739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:02.115745Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:02.115756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:02.115760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:02.115765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:02.115769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:02.115773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:02.115777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:02.115786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:02.115790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:02.115794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:02.116024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:02.116035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:02.116039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:02.116043Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:02.116047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:02.116056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nerId: 72057594046678944, cookie: 139 2024-11-21T08:55:03.198844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 139 2024-11-21T08:55:03.198846Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 16], version: 18446744073709551615 2024-11-21T08:55:03.198849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 4 2024-11-21T08:55:03.199073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2024-11-21T08:55:03.199082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2024-11-21T08:55:03.199084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 139 2024-11-21T08:55:03.199087Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:55:03.199091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:03.199099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 139, subscribers: 0 2024-11-21T08:55:03.199332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.199338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.199340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.199342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.199345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.199449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2024-11-21T08:55:03.199524Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:55:03.212613Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 2024-11-21T08:55:03.212959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.213059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:03.213317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2024-11-21T08:55:03.213358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409556 2024-11-21T08:55:03.213665Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 2024-11-21T08:55:03.214192Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:55:03.214282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2024-11-21T08:55:03.214344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 2024-11-21T08:55:03.214454Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 2024-11-21T08:55:03.214582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:03.214610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409555 Forgetting tablet 72075186233409547 2024-11-21T08:55:03.215013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2024-11-21T08:55:03.215067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 Forgetting tablet 72075186233409557 2024-11-21T08:55:03.215270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2024-11-21T08:55:03.216385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.216397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2024-11-21T08:55:03.216411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.216453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2024-11-21T08:55:03.216481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.216486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.216510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:03.216634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:55:03.216640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:55:03.216652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2024-11-21T08:55:03.216655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2024-11-21T08:55:03.217282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:14 2024-11-21T08:55:03.217291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2024-11-21T08:55:03.217307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:55:03.217310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:55:03.217316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2024-11-21T08:55:03.217320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2024-11-21T08:55:03.217341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.217372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.217384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.217389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.217403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.217784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2024-11-21T08:55:03.217981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2024-11-21T08:55:03.217988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2024-11-21T08:55:03.218109Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2024-11-21T08:55:03.218126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.218130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2095:3892] TestWaitNotification: OK eventTxId 139 2024-11-21T08:55:03.218296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.218338Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 59us result status StatusSuccess 2024-11-21T08:55:03.218431Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.037935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.037953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.037956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.037960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.037971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.037973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.037979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.038036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.044836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.044849Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.046682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.047159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.047179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.048095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.048255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.048327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.048374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.049247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.049511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.049523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.049557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.049564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.049570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.049581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.050313Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.062285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.062345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.062393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.062451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.062455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.062891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.062907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.062929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.062935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.062937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.062940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.063184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.063190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.063193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.063380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.063386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.063389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.063394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.063768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.064004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.064036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.064153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.064168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.064173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.064233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.064240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.064267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.064277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.064655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.064663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.064701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.064705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.064781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.064787Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.064800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.064804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.064810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.064815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.064820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.064824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.064834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.064840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.064843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.065109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.065121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.065125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.065130Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.065135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.065145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... precated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2024-11-21T08:55:03.212990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:55:03.213134Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:55:03.213582Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409552 2024-11-21T08:55:03.213739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.213747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:55:03.213764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:03.213772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:55:03.213778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T08:55:03.213930Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2024-11-21T08:55:03.213972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.214012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T08:55:03.214196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:03.214225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2024-11-21T08:55:03.214402Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2024-11-21T08:55:03.214686Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T08:55:03.214774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T08:55:03.214804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2024-11-21T08:55:03.214986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:03.215009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2024-11-21T08:55:03.215119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:03.215139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.215249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:55:03.215458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:55:03.215804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:55:03.215872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.215880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.215913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:03.216270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:55:03.216282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T08:55:03.216297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T08:55:03.216301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2024-11-21T08:55:03.216315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.216331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.216336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.216350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.216379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:55:03.216383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:55:03.216983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:03.216993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:55:03.217006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T08:55:03.217009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T08:55:03.217018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:55:03.217022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:55:03.217030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:55:03.217036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:55:03.217070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.217665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:55:03.217747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:55:03.217753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:55:03.217821Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:55:03.217843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.217847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:777:2667] TestWaitNotification: OK eventTxId 103 2024-11-21T08:55:03.217914Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.217947Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 50us result status StatusPathDoesNotExist 2024-11-21T08:55:03.217994Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:03.218030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.218048Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2024-11-21T08:55:03.218099Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.165063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.165088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.165091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.165095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.165107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.165110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.165117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.165178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.172300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.172318Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.174601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.175073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.175098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.175974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.176092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.176153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.176225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.177196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.177538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.177557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.177604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.177614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.177621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.177636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.178999Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.190039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.190108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.190161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.190224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.190228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.190751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.190772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.190801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.190807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.190810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.190814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.191188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.191204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.191208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.191568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.191579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.191585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.191591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.192030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.192356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.192396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.192530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.192549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.192556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.192596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.192601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.192622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.192633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.192976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.192982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.193006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.193010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.193059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.193064Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.193071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.193074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.193078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.193081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.193084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.193086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.193094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.193098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.193100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.193326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.193337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.193340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.193343Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.193345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.193357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... BUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.233045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:03.233264Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T08:55:03.233336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T08:55:03.233389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T08:55:03.233471Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:55:03.233576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.233599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T08:55:03.233692Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:55:03.233790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:03.233811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2024-11-21T08:55:03.234007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:03.234060Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2024-11-21T08:55:03.234116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T08:55:03.234137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:03.234181Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:55:03.234272Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T08:55:03.234374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:03.234398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:03.234435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:03.234450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409551 2024-11-21T08:55:03.234580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.234585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.234608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2024-11-21T08:55:03.234941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.234949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.234959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.235178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:55:03.235187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T08:55:03.235215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:55:03.235219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:55:03.235294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T08:55:03.235677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:03.235685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:55:03.235699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T08:55:03.235702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T08:55:03.235714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T08:55:03.235727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:55:03.235730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:55:03.235778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:55:03.235783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:55:03.236026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.236041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T08:55:03.236097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T08:55:03.236113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T08:55:03.236132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:55:03.236135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:55:03.236193Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T08:55:03.236227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.236232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:611:2518] 2024-11-21T08:55:03.236246Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:55:03.236258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.236261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:611:2518] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T08:55:03.236321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.236354Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 46us result status StatusPathDoesNotExist 2024-11-21T08:55:03.236405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:03.236455Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.236476Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 24us result status StatusSuccess 2024-11-21T08:55:03.236535Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TSchemeShardSubDomainTest::Redefine ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.241807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.241825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.241829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.241832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.241841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.241844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.241850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.241905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.249129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.249143Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.251235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.251838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.251863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.252803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.252910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.252982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.253036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.253705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.253918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.253928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.253963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.253970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.253976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.253988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.254975Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.266480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.266565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.266638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.266714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.266722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.267477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.267505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.267551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.267561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.267566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.267570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.267944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.267956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.267961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.268301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.268313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.268318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.268324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.268860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.269267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.269311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.269499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.269522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.269528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.269577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.269586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.269615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.269627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.270031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.270038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.270080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.270085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.270168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.270175Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.270187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.270191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.270197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.270202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.270208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.270211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.270222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.270228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.270231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.270520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.270536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.270540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.270545Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.270550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.270562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... n, txId: 107, at schemeshard: 72057594046678944 2024-11-21T08:55:03.346475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.346480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:568:2523] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T08:55:03.346933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 2 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.346954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/USER_0, opId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.346992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:03.347023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.347026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.347326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAccepted TxId: 108 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:03.347343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /MyRoot/USER_0 2024-11-21T08:55:03.347359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.347365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.347367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.347370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 2 -> 3 2024-11-21T08:55:03.347608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.347615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#108:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.347618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 3 -> 128 2024-11-21T08:55:03.347859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.347866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.347869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet 72057594046678944 2024-11-21T08:55:03.347872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2024-11-21T08:55:03.347891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.348111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2024-11-21T08:55:03.348131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2024-11-21T08:55:03.348175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.348188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.348192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet 72057594046678944 2024-11-21T08:55:03.348256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 240 2024-11-21T08:55:03.348261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet 72057594046678944 2024-11-21T08:55:03.348283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:03.348292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2024-11-21T08:55:03.348795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.348802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:03.348829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.348834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 108, path id: 2 2024-11-21T08:55:03.348888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.348894Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2024-11-21T08:55:03.348902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2024-11-21T08:55:03.348906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T08:55:03.348912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2024-11-21T08:55:03.348916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T08:55:03.348920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2024-11-21T08:55:03.348924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2024-11-21T08:55:03.348933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T08:55:03.348938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2024-11-21T08:55:03.348942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2024-11-21T08:55:03.349023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T08:55:03.349035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T08:55:03.349039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2024-11-21T08:55:03.349043Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T08:55:03.349047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:03.349058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2024-11-21T08:55:03.349507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T08:55:03.349546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T08:55:03.349551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T08:55:03.349602Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T08:55:03.349613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.349615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:593:2548] TestWaitNotification: OK eventTxId 108 2024-11-21T08:55:03.349669Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.349687Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 25us result status StatusSuccess 2024-11-21T08:55:03.349736Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.467204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.467243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.467249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.467254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.467266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.467270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.467280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.467350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.475949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.475971Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.478197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.478720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.478746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.479782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.479915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.479976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.480027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.480892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.481115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.481123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.481153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.481160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.481165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.481177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.482248Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.492857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.492920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.492975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.493037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.493045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.493614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.493630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.493660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.493667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.493670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.493673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.493912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.493920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.493924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.494138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.494144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.494148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.494152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.494521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.494792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.494824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.494950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.494964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.494969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.495011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.495018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.495038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.495046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.495455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.495465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.495497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.495502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.495587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.495593Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.495601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.495604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.495608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.495612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.495615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.495618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.495629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.495633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.495636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.495853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.495863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.495867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.495870Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.495872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.495882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.505573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId#101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T08:55:03.505583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.505586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:03.505609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2024-11-21T08:55:03.505627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.505634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.505789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:03.505913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T08:55:03.505979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.505984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.505998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:03.506011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.506014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T08:55:03.506016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T08:55:03.506045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.506050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2024-11-21T08:55:03.506054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:55:03.506056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:55:03.506060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:55:03.506062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:55:03.506065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:55:03.506067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:55:03.506073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:03.506077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T08:55:03.506079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:55:03.506081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T08:55:03.506133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:03.506138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:03.506141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:55:03.506143Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:55:03.506145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.506199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:03.506204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:03.506208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:55:03.506210Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:55:03.506212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.506219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:55:03.506365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.506372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.506384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:03.506426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.506430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.506435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.506682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:03.506957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:03.506969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.506977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:55:03.507002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:55:03.507006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T08:55:03.507048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:55:03.507058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.507061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2328] TestWaitNotification: OK eventTxId 101 2024-11-21T08:55:03.507107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.507122Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 23us result status StatusPathDoesNotExist 2024-11-21T08:55:03.507158Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:03.507201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.507211Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 10us result status StatusSuccess 2024-11-21T08:55:03.507253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Redefine [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:01.263064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:01.263091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.263097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:01.263102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:01.263118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:01.263122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:01.263131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:01.263201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:01.270736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:01.270763Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.273958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:01.274637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:01.274667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:01.276027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:01.276249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:01.276326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.276396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:01.277490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.277751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.277759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.277789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:01.277794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.277799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:01.277811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.278922Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:01.291374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:01.291463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.291531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:01.291596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:01.291601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.292409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.292432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:01.292484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.292491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:01.292494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:01.292498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:01.292942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.292961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:01.292967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:01.293422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.293433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.293448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.293454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.293893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:01.294291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:01.294340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:01.294485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:01.294521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:01.294527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.294570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:01.294575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:01.294600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.294608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:01.294994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:01.295001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:01.295034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:01.295038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:01.295109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:01.295114Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:01.295123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:01.295126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.295130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:01.295134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:01.295137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:01.295139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:01.295148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:01.295152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:01.295154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:01.295394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.295406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:01.295409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:01.295412Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:01.295415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:01.295423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... Stats { ShardId: 72075186233409548 CpuTimeUsec: 172 } } 2024-11-21T08:55:03.484433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T08:55:03.484442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2024-11-21T08:55:03.484455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T08:55:03.484460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T08:55:03.484529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.484535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T08:55:03.484542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:55:03.484546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2024-11-21T08:55:03.484556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T08:55:03.484583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2024-11-21T08:55:03.484604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:03.484614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:55:03.485090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.485339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.485380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.485386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:03.485426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:03.485462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.485467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T08:55:03.485472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T08:55:03.485561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.485568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:55:03.485581Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.485586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:55:03.485591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T08:55:03.485733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:55:03.485743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:55:03.485747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:55:03.485752Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T08:55:03.485756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:03.485902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:55:03.485912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:55:03.485915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:55:03.485921Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:55:03.485925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:03.485934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T08:55:03.486646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.486657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.486741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:55:03.486774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T08:55:03.486779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:55:03.486785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T08:55:03.486796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2024-11-21T08:55:03.486801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:55:03.486805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T08:55:03.486809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T08:55:03.486825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:03.486899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.486903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:03.487036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:55:03.487322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:55:03.487601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.487610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2024-11-21T08:55:03.487628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.487632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:730:2666] 2024-11-21T08:55:03.487769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2024-11-21T08:55:03.487991Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.488020Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 38us result status StatusSuccess 2024-11-21T08:55:03.488119Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] |89.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |89.3%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.324362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.324392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.324398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.324404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.324418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.324422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.324433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.324517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.335847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.335872Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.338967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.339754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.339792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.341428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.341652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.341742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.341821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.343108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.343395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.343407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.343442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.343449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.343455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.343469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.344837Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.362853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.362939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.362993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.363050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.363055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.363861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.363887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.363939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.363949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.363952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.363958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.364430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.364443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.364448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.364846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.364859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.364864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.364870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.365431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.365882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.365922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.366070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.366089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.366094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.366137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.366143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.366170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.366183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.366747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.366757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.366801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.366806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.366894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.366902Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.366913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.366917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.366923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.366928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.366932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.366936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.366947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.366952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.366956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.367239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.367255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.367260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.367265Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.367269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.367282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:39 tabletId 72075186233409584 2024-11-21T08:55:03.746321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2024-11-21T08:55:03.746323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2024-11-21T08:55:03.746330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2024-11-21T08:55:03.746332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2024-11-21T08:55:03.746341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2024-11-21T08:55:03.746343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2024-11-21T08:55:03.746350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2024-11-21T08:55:03.746352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2024-11-21T08:55:03.746364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2024-11-21T08:55:03.746366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2024-11-21T08:55:03.746375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2024-11-21T08:55:03.746377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2024-11-21T08:55:03.746452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2024-11-21T08:55:03.746455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2024-11-21T08:55:03.746494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:03.746499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:55:03.746834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2024-11-21T08:55:03.746839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2024-11-21T08:55:03.746849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:8 2024-11-21T08:55:03.746851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2024-11-21T08:55:03.746857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:12 2024-11-21T08:55:03.746859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2024-11-21T08:55:03.746867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T08:55:03.746869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2024-11-21T08:55:03.746877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2024-11-21T08:55:03.746880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2024-11-21T08:55:03.746887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:25 2024-11-21T08:55:03.746892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2024-11-21T08:55:03.746899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:20 2024-11-21T08:55:03.746901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2024-11-21T08:55:03.746910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:29 2024-11-21T08:55:03.746912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2024-11-21T08:55:03.747750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:33 2024-11-21T08:55:03.747764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2024-11-21T08:55:03.747782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:55:03.747784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:55:03.748088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2024-11-21T08:55:03.748101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2024-11-21T08:55:03.748119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2024-11-21T08:55:03.748122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2024-11-21T08:55:03.748129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T08:55:03.748131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T08:55:03.748144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2024-11-21T08:55:03.748146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2024-11-21T08:55:03.748154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2024-11-21T08:55:03.748157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2024-11-21T08:55:03.748168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2024-11-21T08:55:03.748171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2024-11-21T08:55:03.748342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T08:55:03.748346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2024-11-21T08:55:03.748357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2024-11-21T08:55:03.748360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2024-11-21T08:55:03.748367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2024-11-21T08:55:03.748369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2024-11-21T08:55:03.748375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2024-11-21T08:55:03.748377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2024-11-21T08:55:03.748385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:55:03.748388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:55:03.748395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2024-11-21T08:55:03.748399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2024-11-21T08:55:03.748440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.748453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.748462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.748466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.748488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.748963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:55:03.749010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:55:03.749015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:55:03.749069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:55:03.749085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.749088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2056:3657] TestWaitNotification: OK eventTxId 103 2024-11-21T08:55:03.749145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.749177Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 42us result status StatusPathDoesNotExist 2024-11-21T08:55:03.749212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:03.749257Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.749265Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 8us result status StatusPathDoesNotExist 2024-11-21T08:55:03.749274Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.916713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.916741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.916745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.916751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.916764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.916768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.916777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.916851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.926916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.926938Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.929150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.929640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.929661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.930694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.930835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.930890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.930940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.931647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.931853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.931860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.931887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.931891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.931895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.931904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.932982Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.944486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.944562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.944624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.944692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.944699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.945397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.945417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.945464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.945471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.945474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.945477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.945795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.945802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.945805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.946041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.946046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.946050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.946054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.946434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.946741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.946788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.946956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.946975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.946982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.947029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.947036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.947061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.947069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.947459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.947464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.947498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.947501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.947565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.947569Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.947576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.947579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.947584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.947587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.947590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.947592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.947599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.947603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.947605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.947799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.947807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.947810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.947813Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.947817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.947827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... on IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:55:03.976715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T08:55:03.976718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:55:03.976722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:55:03.976726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:55:03.976750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T08:55:03.976754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2024-11-21T08:55:03.976758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:55:03.976761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T08:55:03.976836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:03.976842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:03.976844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:55:03.976847Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:55:03.976849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.976897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:03.976902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:03.976904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:55:03.976906Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:55:03.976908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:03.976913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T08:55:03.977217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.977226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.977229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:03.977529Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:55:03.977724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.977766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:03.977837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:55:03.977900Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:55:03.977975Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:55:03.977998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:55:03.978077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:03.978098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2024-11-21T08:55:03.978218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:03.978232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.978294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.978298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.978314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:03.978477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.978482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.978492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.978614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:55:03.978620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:55:03.978806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:03.978815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:55:03.978858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:55:03.978862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:55:03.979070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.979082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T08:55:03.979119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T08:55:03.979122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T08:55:03.979163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T08:55:03.979175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.979178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:572:2527] TestWaitNotification: OK eventTxId 104 2024-11-21T08:55:03.979226Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.979247Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 27us result status StatusPathDoesNotExist 2024-11-21T08:55:03.979282Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:03.979320Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.979330Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 11us result status StatusSuccess 2024-11-21T08:55:03.979370Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.767075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.767097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.767101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.767105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.767117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.767120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.767127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.767186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.774531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.774552Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.776784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.777328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.777356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.778481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.778624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.778687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.778745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.779563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.779775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.779782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.779808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.779813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.779818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.779826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.781094Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.796380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.796466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.796540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.796608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.796616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.797492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.797520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.797572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.797582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.797586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.797591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.798015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.798026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.798031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.798353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.798362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.798367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.798374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.798918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.799289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.799338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.799510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.799531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.799537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.799585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.799592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.799618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.799629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.800030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.800037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.800077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.800082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.800165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.800170Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.800184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.800188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.800194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.800199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.800221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.800226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.800237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.800242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.800246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.800534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.800549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.800554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.800558Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.800562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.800574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 2057594046678944 2024-11-21T08:55:03.831493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:03.831556Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2024-11-21T08:55:03.831740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T08:55:03.831765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2024-11-21T08:55:03.831879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:03.831899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:03.832019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:03.832041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.832156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.832162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.832193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:03.832715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:55:03.832729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T08:55:03.832747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T08:55:03.832775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T08:55:03.832787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:55:03.832790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:55:03.833271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.833301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:03.833307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:03.833321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.833350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:03.833355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:55:03.833406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T08:55:03.833411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T08:55:03.833422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:55:03.833425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:55:03.833505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:55:03.833510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:55:03.833527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T08:55:03.833771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2024-11-21T08:55:03.834110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T08:55:03.834125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T08:55:03.834142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:55:03.834144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T08:55:03.834149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:55:03.834151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:55:03.834205Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834218Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.834228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:626:2531] 2024-11-21T08:55:03.834236Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.834244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:626:2531] 2024-11-21T08:55:03.834253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.834255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:626:2531] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2024-11-21T08:55:03.834318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834351Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 52us result status StatusPathDoesNotExist 2024-11-21T08:55:03.834407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834473Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834484Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 12us result status StatusPathDoesNotExist 2024-11-21T08:55:03.834499Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834539Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:03.834557Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2024-11-21T08:55:03.834626Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> KqpQueryService::ExecuteRetryQuery >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> BasicUsage::BrokenCredentialsProvider [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.944432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.944454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.944458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.944462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.944474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.944477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.944483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.944557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.952796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.952818Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.955423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.955952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.955981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.957331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.957575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.957664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.957754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.958973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.959256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.959265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.959302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.959309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.959315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.959329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.960499Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.979491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.979587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.979665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.979750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.979760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.980627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.980662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.980707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.980717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.980722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.980728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.981255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.981272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.981278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.981696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.981705Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.981709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.981714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.982192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.982684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.982739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.982934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.982961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.982969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.983029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.983038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.983074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.983087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.983543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.983552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.983595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.983601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.983693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.983700Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.983713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.983718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.983725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.983731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.983736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.983740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.983752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.983759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.983763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.984104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.984120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.984126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.984132Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.984137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.984150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T08:55:03.989809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:03.989912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:03.990226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.990237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.990244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T08:55:03.990251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T08:55:03.990286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.990606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T08:55:03.990644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T08:55:03.990720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.990740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.990747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T08:55:03.990839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T08:55:03.990846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T08:55:03.990878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.990887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.990898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T08:55:03.991335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.991344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.991378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:03.991392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.991395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T08:55:03.991398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T08:55:03.991463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.991471Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T08:55:03.991485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T08:55:03.991490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:55:03.991496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T08:55:03.991502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T08:55:03.991507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:55:03.991511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:55:03.991525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:03.991534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T08:55:03.991538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T08:55:03.991542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T08:55:03.991653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:03.991664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:03.991668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:55:03.991673Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T08:55:03.991677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.991778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:03.991789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T08:55:03.991793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T08:55:03.991797Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T08:55:03.991800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:03.991809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T08:55:03.992751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:55:03.992834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T08:55:03.993811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.993860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.993909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2024-11-21T08:55:03.994507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:03.994545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2024-11-21T08:55:03.994612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T08:55:03.994630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T08:55:03.994656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T08:55:03.994659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T08:55:03.994747Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:55:03.994775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.994780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:310:2302] 2024-11-21T08:55:03.994798Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:55:03.994815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:55:03.994818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:310:2302] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TGroupMapperTest::ReassignGroupTest3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] Test command err: Trying to start YDB, gRPC: 27925, MsgBus: 15120 2024-11-21T08:54:48.792044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653393477436366:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.792220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e55/r3tmp/tmpDALRdm/pdisk_1.dat 2024-11-21T08:54:48.850163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27925, node 1 2024-11-21T08:54:48.866643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:48.866658Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:48.866660Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:48.866690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15120 2024-11-21T08:54:48.892234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:48.892258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:48.893350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:48.928644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.942399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.956990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.973501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:48.981946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:49.069763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653397772405193:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.069802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.104687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.110741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.165630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.177286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.231849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.239806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:49.248195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653397772405711:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.248236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653397772405716:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.248241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:49.248749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:49.253233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653397772405718:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 23374, MsgBus: 20615 2024-11-21T08:54:50.693247Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653402784509232:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:50.693511Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e55/r3tmp/tmpFdf6eL/pdisk_1.dat 2024-11-21T08:54:50.703510Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23374, node 2 2024-11-21T08:54:50.713547Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:50.713561Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:50.713562Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:50.713607Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20615 TClient is connected to server localhost:20615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:50.793821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:50.793862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:50.794844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:50.796054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.798951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.809013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.824420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.836932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:50.967780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653402784510767:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.967815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:50.972791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:51.027455Z node 2 :FLAT_TX_SCHEMESH ... 03.794258Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:55:03.794267Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:55:03.794274Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:55:03.794277Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:55:03.794282Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:55:03.794285Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:55:03.794290Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:55:03.794298Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:55:03.794316Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:55:03.794326Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:55:03.794339Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:55:03.794347Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:55:03.794354Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:55:03.794362Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:55:03.794372Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:55:03.794380Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:55:03.794387Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:55:03.794389Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:55:03.794993Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:55:03.795011Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:55:03.795037Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:55:03.795053Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:55:03.795068Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:55:03.795082Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:55:03.795117Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:55:03.795136Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:55:03.795154Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:55:03.795169Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:55:03.795187Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:55:03.795203Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[3:7439653459185299850:2320];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:55:03.795536Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:55:03.795548Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:55:03.795555Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:55:03.795559Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:55:03.795572Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:55:03.795581Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:55:03.795589Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:55:03.795597Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:55:03.795602Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:55:03.795609Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:55:03.795613Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:55:03.795619Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:55:03.795645Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:55:03.795654Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:55:03.795662Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:55:03.795669Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:55:03.795676Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:55:03.795682Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:55:03.795692Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:55:03.795698Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:55:03.795705Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:55:03.795711Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> TGroupMapperTest::MonteCarlo >> TGroupMapperTest::NonUniformCluster >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2024-11-21T08:54:43.423490Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1732179283423485 2024-11-21T08:54:43.522969Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653371624881958:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.523007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:43.525722Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653373946362616:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.525838Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:43.548359Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004653/r3tmp/tmp4cbQGz/pdisk_1.dat 2024-11-21T08:54:43.557797Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:54:43.574894Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10643, node 1 2024-11-21T08:54:43.587530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004653/r3tmp/yandexlvt9uc.tmp 2024-11-21T08:54:43.587542Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004653/r3tmp/yandexlvt9uc.tmp 2024-11-21T08:54:43.587592Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004653/r3tmp/yandexlvt9uc.tmp 2024-11-21T08:54:43.587627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:43.592023Z INFO: TTestServer started on Port 10800 GrpcPort 10643 TClient is connected to server localhost:10800 PQClient connected to localhost:10643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:43.622954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.622989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.624492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:43.651544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.651574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.652778Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:43.652939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.653145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:54:43.784248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653371624882834:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.784271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653371624882860:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.784277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.784985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T08:54:43.785800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653371624882892:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.785837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.788248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653371624882863:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T08:54:43.803599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.843055Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653373946362960:2285], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:43.843154Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjBjN2YwOS04YTE2MDgyNS1mZDdmOTYzOC1iYjFmYTE2Yg==, ActorId: [2:7439653373946362920:2279], ActorState: ExecuteState, TraceId: 01jd6ywkvyfyszqayh97yd2s1y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:43.843655Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:43.863987Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653371624883045:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:43.864080Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQ1MzU3YTUtZWRkZGE2YjYtMjk5NmI4YWEtMTg1NTk0MmE=, ActorId: [1:7439653371624882831:2299], ActorState: ExecuteState, TraceId: 01jd6ywkt7b9q0xjfpnxq0b49a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:43.864315Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:43.867760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.931826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:10643", true, true, 1000); 2024-11-21T08:54:43.959677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd6ywkz83n4yvfg9yr6r1xb0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE2OWFlNWMtNWIyNTZmZDgtZDgwMjlmZTgtZTU3MGVkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653371624883341:2934] 2024-11-21T08:54:48.523129Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653371624881958:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.523170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:54:48.526179Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653373946362616:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.526210Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:54:49.009490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:10643 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:54:49.022085Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:10643 MetaRequest { Cmd ... quest 2024-11-21T08:55:04.227096Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:2297 2024-11-21T08:55:04.227407Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T08:55:04.227520Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:55:04.227532Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T08:55:04.227615Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T08:55:04.227634Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:54752 2024-11-21T08:55:04.227637Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:54752 proto=v1 topic=test-topic durationSec=0 2024-11-21T08:55:04.227639Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:55:04.227942Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T08:55:04.227966Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T08:55:04.227972Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:55:04.227973Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:55:04.227976Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439653462160615588:2470] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:55:04.228459Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439653462160615588:2470] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:55:04.240373Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439653462160615588:2470] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T08:55:04.240488Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439653462160615618:2470] connected; active server actors: 1 2024-11-21T08:55:04.240526Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439653462160615588:2470] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T08:55:04.240541Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439653462160615588:2470] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T08:55:04.240614Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439653462160615618:2470] disconnected; active server actors: 1 2024-11-21T08:55:04.240622Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439653462160615618:2470] disconnected no session 2024-11-21T08:55:04.255374Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439653462160615588:2470] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T08:55:04.255397Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439653462160615588:2470] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T08:55:04.255401Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439653462160615588:2470] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T08:55:04.255412Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:55:04.255695Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:04.255713Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7439653462160615636:2470], now have 1 active actors on pipe 2024-11-21T08:55:04.255750Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2024-11-21T08:55:04.255835Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:04.255848Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:04.255890Z node 6 :PERSQUEUE INFO: new Cookie src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T08:55:04.255928Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:55:04.255954Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:04.256173Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:04.256180Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:04.256197Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:04.256616Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0 2024-11-21T08:55:04.257071Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179304257 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:04.257111Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T08:55:04.257245Z :INFO: [] MessageGroupId [src] SessionId [src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0] Write session: close. Timeout = 0 ms 2024-11-21T08:55:04.257254Z :INFO: [] MessageGroupId [src] SessionId [src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0] Write session will now close 2024-11-21T08:55:04.257260Z :DEBUG: [] MessageGroupId [src] SessionId [src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0] Write session: aborting 2024-11-21T08:55:04.257393Z :INFO: [] MessageGroupId [src] SessionId [src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:55:04.257399Z :DEBUG: [] MessageGroupId [src] SessionId [src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0] Write session: destroy 2024-11-21T08:55:04.257521Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0 grpc read done: success: 0 data: 2024-11-21T08:55:04.257540Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0 grpc read failed 2024-11-21T08:55:04.257546Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0 grpc closed 2024-11-21T08:55:04.257551Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|97d87b22-e6fe7f8f-944bd934-ed13ddd_0 is DEAD 2024-11-21T08:55:04.257840Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:55:04.257972Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:04.257998Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439653462160615636:2470] destroyed 2024-11-21T08:55:04.258019Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:55:04.259741Z :INFO: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] Starting read session 2024-11-21T08:55:04.259753Z :DEBUG: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] Starting session to cluster null (localhost:2297) 2024-11-21T08:55:04.260007Z :DEBUG: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:55:04.260012Z :DEBUG: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:55:04.260015Z :DEBUG: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T08:55:04.260067Z :ERROR: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2024-11-21T08:55:04.260072Z :DEBUG: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:55:04.260073Z :DEBUG: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:55:04.260085Z :INFO: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2024-11-21T08:55:04.260118Z :NOTICE: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:55:04.260123Z :DEBUG: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2024-11-21T08:55:04.260131Z :INFO: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] Closing read session. Close timeout: 0.000000s 2024-11-21T08:55:04.260134Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T08:55:04.260139Z :INFO: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:04.260144Z :NOTICE: [/Root] [/Root] [299cdc1c-3b9c5338-966226e5-bdcaed39] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> KqpQueryService::TableSink_OlapDelete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2024-11-21T08:54:32.470645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653327675964855:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:32.470963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043f9/r3tmp/tmpdGb706/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1274, node 1 2024-11-21T08:54:32.530294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T08:54:32.530312Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:32.533199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:32.533211Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:32.533213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:32.533244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:32.570661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:32.570688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:32.572118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:32.603682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.604424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.604447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.604942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:54:32.605018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:54:32.605027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:54:32.605495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:54:32.605515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:54:32.605604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:54:32.605881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.606801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272656, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.606813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:54:32.606870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:54:32.607255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.607303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.607317Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:54:32.607332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:54:32.607344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:54:32.607358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:54:32.607884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:54:32.607911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:54:32.607916Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:54:32.607936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:54:32.619717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.619812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.619823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.619841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:54:32.619870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:54:32.619878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:54:32.620594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:54:32.620656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.620719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.620844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:54:32.620855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:54:32.620859Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:54:32.620874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:27592 2024-11-21T08:54:32.629263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.629338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.629350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.629963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:54:32.630008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:32.630381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:54:32.630751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179272677, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:54:32.630759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179272677, at schemeshard: 72057594046644480 2024-11-21T08:54:32.630802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:54:32.630826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:54:32.630838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:54:32.631239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:54:32.631282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:54:32.631397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:54:32.631410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:54:32.631414Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:54:32.631424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:54:32.638839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetRecordsStreamWithSingleShard, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:54:32.638930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:54:32.639503Z node 1 :FLAT_TX_SCHEM ... 269Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:55:04.040272Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:04.040285Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:55:04.050234Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.050342Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:04.050354Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.050374Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T08:55:04.050408Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:55:04.050420Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T08:55:04.051235Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T08:55:04.051279Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:04.051330Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:04.051445Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:04.051455Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:04.051457Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:55:04.051473Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:18427 2024-11-21T08:55:04.061829Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.061914Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:04.061924Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.062734Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T08:55:04.062786Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:04.063192Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:55:04.063678Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179304107, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:04.063688Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732179304107, at schemeshard: 72057594046644480 2024-11-21T08:55:04.063731Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T08:55:04.063757Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T08:55:04.063768Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T08:55:04.064087Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:04.064130Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:04.064258Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T08:55:04.064271Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T08:55:04.064274Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T08:55:04.064288Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T08:55:04.072468Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetRecordsWithBigSeqno, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.072571Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:04.073365Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestGetRecordsWithBigSeqno 2024-11-21T08:55:04.073418Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:04.073479Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:04.073499Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T08:55:04.073545Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T08:55:04.073690Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:55:04.073701Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:55:04.073704Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T08:55:04.073738Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:55:04.073744Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:55:04.073745Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T08:55:04.075141Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:55:04.075188Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T08:55:04.075197Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2024-11-21T08:55:04.075548Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:04.127011Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:55:04.127492Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T08:55:04.127505Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2024-11-21T08:55:04.127815Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.128565Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179304177, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:04.128579Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1732179304177, at tablet: 72057594046644480 2024-11-21T08:55:04.128648Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T08:55:04.129039Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:04.129107Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:04.129123Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T08:55:04.129133Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2024-11-21T08:55:04.129141Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T08:55:04.129170Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2024-11-21T08:55:04.129364Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:55:04.129379Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:55:04.129383Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T08:55:04.129422Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T08:55:04.129430Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T08:55:04.129432Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:55:04.129448Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] >> TGroupMapperTest::Block42_1disk |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> KqpQueryService::ExecuteRetryQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapDelete [GOOD] Test command err: Trying to start YDB, gRPC: 4371, MsgBus: 10388 2024-11-21T08:54:58.021425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653439835649935:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:58.021662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d7f/r3tmp/tmpKeO5Ej/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4371, node 1 2024-11-21T08:54:58.074556Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:58.078605Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:58.078616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:58.078617Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:58.078645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10388 TClient is connected to server localhost:10388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:58.121334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.121364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:58.122527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.126434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.269392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653439835650532:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.269418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.292738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.305533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:58.305533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:58.305568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:58.305585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:58.305604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:58.305605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:58.305624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:58.305633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:58.305652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:58.305652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:58.305671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:58.305681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:58.305698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:58.305698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:58.305720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:58.305722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:58.305763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:58.305764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:58.305781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:58.305785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:58.305802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:58.305806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:58.305827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439653439835650664:2309];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:58.305827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439653439835650665:2310];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:58.309452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653439835650663:2308];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:58.309452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439653439835650666:2311];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:58.309467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439653439835650666:2311];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:58.309472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653439835650663:2308];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:58.309506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653439835650663:2308];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:58.309507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439653439835650666:2311];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:58.309526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439653439835650663:2308];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLAS ... estoreV2Chunks;id=16; 2024-11-21T08:54:59.632682Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:54:59.632771Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.633010Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.633069Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.633332Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.633358Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.633655Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.633672Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.633936Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.637641Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.637668Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.638952Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.638969Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.638972Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.638989Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.638996Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.639008Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.639079Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.639375Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.641916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653444118998466:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.641934Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653444118998471:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.641937Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:59.642442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:54:59.648520Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653444118998473:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:54:59.718826Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 0 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"0;"}}]}; 2024-11-21T08:54:59.720857Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037894;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"1;"}}]}; 2024-11-21T08:54:59.722888Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"2;"}}]}; 2024-11-21T08:54:59.727960Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.727960Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.729041Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.793043Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;local_tx_no=13;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715666}],"starts":[{"inc":{"count_not_include":1},"id":281474976715666}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715666}]},"p":{"include":2147483647}}]}; 2024-11-21T08:54:59.798602Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.798621Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.798631Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.798660Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.798719Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.799688Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.799689Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.799703Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.799712Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.799713Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.847250Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849325Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849331Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849344Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849347Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849353Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849356Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849363Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849372Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.849373Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:54:59.864181Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T08:55:00.634361Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439653444118998156:2312];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:55:00.634515Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7439653444118998142:2311];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:55:00.636773Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=4a855254-a7e611ef-84260dc2-7df84cfa;fline=with_appended.cpp:80;portions=;task_id=4a855254-a7e611ef-84260dc2-7df84cfa; 2024-11-21T08:55:00.636919Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=4a855ace-a7e611ef-92228090-85817749;fline=with_appended.cpp:80;portions=;task_id=4a855ace-a7e611ef-92228090-85817749; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:55:04.364896Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439653444118997435:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:04.364940Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TMultiversionObjectMap::MonteCarlo >> TGroupMapperTest::MakeDisksNonoperational [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteRetryQuery [GOOD] Test command err: Trying to start YDB, gRPC: 19167, MsgBus: 9764 2024-11-21T08:55:01.025819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653450506770192:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:01.025834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d11/r3tmp/tmp2bvwCW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19167, node 1 2024-11-21T08:55:01.078853Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:01.084923Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:01.084935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:01.084936Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:01.084957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9764 TClient is connected to server localhost:9764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:55:01.125668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:01.125689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:01.126796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:01.128225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:01.140088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:01.200922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:01.215177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:01.224364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:01.257302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653450506771728:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:01.257331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:01.283230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.288932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.293651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.300662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.307839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.314785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.322871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653450506772220:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:01.322894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:01.322907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653450506772225:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:01.323352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:01.327945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653450506772227:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:55:01.481591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653450506772543:2469], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T08:55:01.481597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653450506772541:2467], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T08:55:01.481613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T08:55:01.481618Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439653450506772542:2468], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=MmQ5NTQ3ZjUtYzBlMDA1NzgtYTkyYTExODAtNmU4NGU5Y2Y=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T08:55:01.481626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439653450506772542:2468], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=MmQ5NTQ3ZjUtYzBlMDA1NzgtYTkyYTExODAtNmU4NGU5Y2Y=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2024-11-21T08:55:01.481641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7439653450506772539:2466]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2024-11-21T08:55:01.481653Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ5NTQ3ZjUtYzBlMDA1NzgtYTkyYTExODAtNmU4NGU5Y2Y=, ActorId: [1:7439653450506772539:2466], ActorState: ExecuteState, TraceId: 01jd6yx53826vveg7m08c9r0zk, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2024-11-21T08:55:01.481711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7439653450506772539:2466]: Pool another_pool_id not found Trying to start YDB, gRPC: 6999, MsgBus: 21105 2024-11-21T08:55:01.685201Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653448997585516:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:01.685399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d11/r3tmp/tmpIf4RtX/pdisk_1.dat 2024-11-21T08:55:01.691027Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6999, node 2 2024-11-21T08:55:01.700197Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:01.700220Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:01.700221Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:01.700254Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21105 TClient is connected to server localhost:21105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ... ation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.931651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.986483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:01.994170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:02.001342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:02.010945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653453292554855:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:02.010975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:02.011011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653453292554860:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:02.011791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:02.014351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653453292554862:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:02.170241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:02.686750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2024-11-21T08:55:02.740164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:55:02.782724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:1, at schemeshard: 72057594046644480 2024-11-21T08:55:02.840606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T08:55:02.894471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T08:55:02.938496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T08:55:03.122744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715699:0, at schemeshard: 72057594046644480 Wait resource pool classifier 0.008085s: status = SUCCESS, issues = 2024-11-21T08:55:04.133689Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Q4MjFkNjYtZjBkMGM0MDAtNWU1MmQ3MDYtN2Y1MDljNmU=, ActorId: [2:7439653461882490754:2691], ActorState: ExecuteState, TraceId: 01jd6yx7p41v5nmwmhs3h6mwre, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool Trying to start YDB, gRPC: 2788, MsgBus: 23143 2024-11-21T08:55:04.445136Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653465173778220:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:04.445158Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d11/r3tmp/tmpDkF8TM/pdisk_1.dat 2024-11-21T08:55:04.460067Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2788, node 3 2024-11-21T08:55:04.466518Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:04.466532Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:04.466533Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:04.466567Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23143 TClient is connected to server localhost:23143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:04.545340Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:04.545369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:04.546434Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:04.548085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:04.548860Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:04.554595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:04.563148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:04.579249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:04.591790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:04.711918Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653465173779748:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:04.711945Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:04.717686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.723428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.731304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.737904Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.744766Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.751925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:04.760053Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653465173780251:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:04.760074Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:04.760103Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653465173780256:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:04.760607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:04.765202Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653465173780258:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2024-11-21T08:54:20.221133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:20.221716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:20.221755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d19/r3tmp/tmp6slbnI/pdisk_1.dat 2024-11-21T08:54:20.338095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:20.358303Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:20.400873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:20.400911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:20.411479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:20.522262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:20.536694Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:20.536857Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:20.536936Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:20.536996Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:20.544653Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:20.544835Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:20.544859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:20.545025Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:20.545034Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:20.545041Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:20.545087Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:20.548883Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:20.548955Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:20.548977Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:20.548982Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:20.548986Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:20.548991Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:20.549120Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:20.549127Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:20.549243Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:20.549261Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:20.549272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:20.549277Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:20.549282Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:20.549289Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:20.549296Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:20.549302Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:20.549307Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:20.549310Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:20.549315Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:20.549320Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:20.549338Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:20.549342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:20.549363Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:20.549416Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:20.549436Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:20.549451Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:20.549459Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:20.549463Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:20.549468Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:20.549472Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:20.549513Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:20.549518Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:20.549521Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:20.549524Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:20.549534Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:20.549538Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:20.549541Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:20.549545Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:20.549549Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:20.549781Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:20.549789Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:20.560514Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:20.560550Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:20.560557Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:20.560572Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:20.560590Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:20.762037Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:20.762062Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:20.762071Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:20.762091Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:20.762096Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:20.762121Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:20.762130Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:20.762135Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:20.762141Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:20.762920Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:20.762940Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:20.763064Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:20.763071Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:20.763077Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:20.763085Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:20.763089Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:20.763097Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 037889 2024-11-21T08:55:05.108833Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2024-11-21T08:55:05.108836Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2024-11-21T08:55:05.108840Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:55:05.108843Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2024-11-21T08:55:05.108846Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T08:55:05.108850Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2024-11-21T08:55:05.108867Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2024-11-21T08:55:05.108871Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:55:05.108874Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T08:55:05.108877Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T08:55:05.108880Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T08:55:05.108884Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2024-11-21T08:55:05.108887Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2024-11-21T08:55:05.108890Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2024-11-21T08:55:05.108894Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:55:05.108896Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T08:55:05.108899Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T08:55:05.108902Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T08:55:05.108917Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T08:55:05.108921Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T08:55:05.108925Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T08:55:05.108928Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T08:55:05.108931Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:55:05.108934Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T08:55:05.108936Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T08:55:05.108940Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:55:05.108968Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2024-11-21T08:55:05.108972Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T08:55:05.108975Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T08:55:05.108978Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2024-11-21T08:55:05.108982Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T08:55:05.108984Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T08:55:05.108987Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2024-11-21T08:55:05.108990Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:05.108993Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T08:55:05.108995Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T08:55:05.108998Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T08:55:05.129950Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3000 txid# 281474976715664} 2024-11-21T08:55:05.130015Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2024-11-21T08:55:05.130046Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:05.130057Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T08:55:05.130082Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1000:2801], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:55:05.130094Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:05.130277Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3000 txid# 281474976715664} 2024-11-21T08:55:05.130282Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2024-11-21T08:55:05.130290Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T08:55:05.130293Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T08:55:05.130300Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1000:2801], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:55:05.130307Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T08:55:05.130687Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2024-11-21T08:55:05.130707Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T08:55:05.130718Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T08:55:05.130740Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:55:05.130744Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T08:55:05.130748Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T08:55:05.130751Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T08:55:05.130761Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T08:55:05.130766Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:55:05.130769Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T08:55:05.130773Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T08:55:05.130776Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T08:55:05.130791Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2024-11-21T08:55:05.130796Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2024-11-21T08:55:05.130826Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:55:05.130829Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T08:55:05.130831Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T08:55:05.130834Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T08:55:05.130843Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T08:55:05.130845Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T08:55:05.130847Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T08:55:05.130851Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T08:55:05.130863Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T08:55:05.130981Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [15:1021:2820], Recipient [15:632:2537]: NKikimr::TEvDataShard::TEvReadScanStarted 2024-11-21T08:55:05.131001Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [15:1021:2820], Recipient [15:632:2537]: NKikimr::TEvDataShard::TEvReadScanFinished 2024-11-21T08:55:05.131039Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:632:2537], Recipient [15:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:55:05.131043Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:55:05.131047Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:05.131052Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:05.131055Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:55:05.131059Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:55:05.131062Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:55:05.131066Z node 15 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:55:05.131070Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 |89.4%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Worker::Basic |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |89.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ut/ydb-core-security-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |89.4%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |89.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |89.4%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] >> TBoardSubscriberTest::DropByDisconnect |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher >> TBoardSubscriberTest::NotAvailableByShutdown >> TBoardSubscriberTest::ReconnectReplica >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] |89.4%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 32462, MsgBus: 14419 2024-11-21T08:54:57.041678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653431654702318:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:57.041712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dec/r3tmp/tmp82b1pn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32462, node 1 2024-11-21T08:54:57.092024Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:57.099277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:57.099287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:57.099288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:57.099317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14419 TClient is connected to server localhost:14419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:57.141618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:57.141640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:57.142722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:57.169870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.173854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.234834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.251378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.262874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:57.286722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653431654703854:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.286756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.314422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.321261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.332315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.339019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.346153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.353030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.362009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653431654704346:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.362031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.362037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653431654704351:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:57.362631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:57.366056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653431654704353:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:54:57.516621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.516790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.517036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:54:57.622338Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTA3NTVmZTgtYWExMmFlNDQtMTNkN2UwMTctNjM1ODcwMjM=, ActorId: [1:7439653431654704896:2475], ActorState: ExecuteState, TraceId: 01jd6yx17b4fct9jm8faqw50yx, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4156, MsgBus: 9696 2024-11-21T08:54:57.909739Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653433452149337:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:57.909957Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dec/r3tmp/tmpN18lVu/pdisk_1.dat 2024-11-21T08:54:57.917346Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4156, node 2 2024-11-21T08:54:57.925840Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:57.925855Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:57.925857Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:57.925893Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9696 TClient is connected to server localhost:9696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.010079Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.010115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:58.011167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:58.012296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.020803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.028350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.046182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.055398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is ... WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304282, txId: 281474976715808] shutting down 2024-11-21T08:55:04.281474Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304324, txId: 281474976715811] shutting down 2024-11-21T08:55:04.339287Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304380, txId: 281474976715814] shutting down 2024-11-21T08:55:04.390860Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304436, txId: 281474976715817] shutting down 2024-11-21T08:55:04.437906Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304478, txId: 281474976715820] shutting down 2024-11-21T08:55:04.483695Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304527, txId: 281474976715823] shutting down 2024-11-21T08:55:04.527361Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304569, txId: 281474976715826] shutting down 2024-11-21T08:55:04.564803Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304611, txId: 281474976715829] shutting down 2024-11-21T08:55:04.608166Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304653, txId: 281474976715832] shutting down 2024-11-21T08:55:04.651331Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304695, txId: 281474976715835] shutting down 2024-11-21T08:55:04.693009Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304737, txId: 281474976715838] shutting down 2024-11-21T08:55:04.727677Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179304772, txId: 281474976715841] shutting down 2024-11-21T08:55:04.741099Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 582fb2f8-afc479f-d01e9151-94c9b7e8, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=4&id=ODYzNWYyNDctNGUzNGI4NzUtZTJiODQwODgtMzBkZTI5ODE=, TxId: Trying to start YDB, gRPC: 16307, MsgBus: 21999 2024-11-21T08:55:05.139405Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653466383955147:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:05.139428Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dec/r3tmp/tmpDZF6K2/pdisk_1.dat 2024-11-21T08:55:05.150818Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16307, node 5 2024-11-21T08:55:05.160058Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:05.160074Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:05.160076Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:05.160122Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21999 TClient is connected to server localhost:21999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:05.240017Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:05.240048Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:05.241123Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:05.241719Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:05.247736Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:05.258400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:05.275472Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:05.285637Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:05.421235Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653466383956704:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:05.421260Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:05.426458Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:05.433618Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:05.445999Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:05.460479Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:05.473731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:05.480375Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:05.488824Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653466383957207:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:05.488850Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:05.488864Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653466383957212:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:05.489478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:05.493214Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653466383957214:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:05.726870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:55:05.727141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:05.727429Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:06.452358Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: d9c5445f-11b0e3e0-86a52a00-e8d1ba54, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=Njg4ZmJjOC1kMzVlZDA5NC1jY2E0YWZkNy0yMGUwZGQ0MA==, TxId: 2024-11-21T08:55:06.554365Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: d9c5445f-11b0e3e0-86a52a00-e8d1ba54, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=YjJlZGM3NjUtYjQ1YjZiZDYtNmUwMTgwOWQtZGJkZTdkYjM=, TxId: 2024-11-21T08:55:06.584006Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: d9c5445f-11b0e3e0-86a52a00-e8d1ba54, reply NOT_FOUND, issues: {
: Error: No such execution } 2024-11-21T08:55:06.589302Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: d9c5445f-11b0e3e0-86a52a00-e8d1ba54, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=YTI2M2U3OTMtMWY2NWUxZjctN2E0MzFiNWItNDFkNDlkMmM=, TxId: 2024-11-21T08:55:06.589345Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: d9c5445f-11b0e3e0-86a52a00-e8d1ba54, check lease failed 2024-11-21T08:55:06.644168Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: d9c5445f-11b0e3e0-86a52a00-e8d1ba54, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=5&id=ZTdkZWE1NC03YTZmZTllZi1iZTk2NDg2MC1kNGU2NjFlZA==, TxId: >> TBoardSubscriberTest::SimpleSubscriber >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted |89.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::ReconnectReplica [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |89.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> Worker::Basic [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |89.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |89.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |89.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |89.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |89.4%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |89.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> TMultiversionObjectMap::MonteCarlo [GOOD] |89.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TPersQueueTest::InflightLimit [GOOD] |89.4%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2024-11-21T08:55:06.101007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653473624385354:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:06.101026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034ec/r3tmp/tmp8Q6bMe/pdisk_1.dat 2024-11-21T08:55:06.156583Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28426 TServer::EnableGrpc on GrpcPort 5494, node 1 2024-11-21T08:55:06.197829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:06.197841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:06.197843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:06.197887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:06.198151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:06.198174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:06.199307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:06.244330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:06.252712Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:06.321061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732179306424 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2024-11-21T08:55:06.380362Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handshake: worker# [1:7439653473624386094:2390] 2024-11-21T08:55:06.380385Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handshake: worker# [1:7439653473624386094:2390] 2024-11-21T08:55:06.380473Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:55:06.380524Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T08:55:06.380549Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2024-11-21T08:55:06.380551Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7439653473624386094:2390] Handshake with writer: sender# [1:7439653473624386096:2390] 2024-11-21T08:55:06.381069Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Create read session: session# [1:7439653473624386099:2277] 2024-11-21T08:55:06.381084Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2024-11-21T08:55:06.381085Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7439653473624386094:2390] Handshake with reader: sender# [1:7439653473624386095:2390] 2024-11-21T08:55:06.381089Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:55:07.099394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2024-11-21T08:55:07.108575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653477919353570:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:07.108575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653477919353560:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:07.108591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:07.109149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2024-11-21T08:55:07.117466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653477919353574:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:55:07.272662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:07.330003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T08:55:07.392852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:07.450509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:55:07.505528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:55:07.599227Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 0 Data: 36b Codec: RAW }] } } 2024-11-21T08:55:07.599268Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 36b CreateTime: 2024-11-21T08:55:07Z }] } 2024-11-21T08:55:07.599276Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 36b CreateTime: 2024-11-21T08:55:07Z }] } 2024-11-21T08:55:07.599306Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2024-11-21T08:55:07.599348Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439653477919354132:2390] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T08:55:07.599360Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2024-11-21T08:55:07.599376Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439653477919354132:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T08:55:07.600535Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439653477919354132:2390] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:55:07.600557Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2024-11-21T08:55:07.600565Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2024-11-21T08:55:07.600576Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:55:07.600582Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:55:07.711838Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 36b Codec: RAW }] } } 2024-11-21T08:55:07.711860Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 1 Data: 36b CreateTime: 2024-11-21T08:55:07Z }] } 2024-11-21T08:55:07.711866Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 1 Data: 36b CreateTime: 2024-11-21T08:55:07Z }] } 2024-11-21T08:55:07.711891Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2024-11-21T08:55:07.711917Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439653477919354132:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T08:55:07.713143Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439653477919354132:2390] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:55:07.713165Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2024-11-21T08:55:07.713172Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2024-11-21T08:55:07.713181Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:55:07.713187Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:55:07.825880Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 2 Data: 36b Codec: RAW }] } } 2024-11-21T08:55:07.825904Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 2 Data: 36b CreateTime: 2024-11-21T08:55:07Z }] } 2024-11-21T08:55:07.825911Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 2 Data: 36b CreateTime: 2024-11-21T08:55:07Z }] } 2024-11-21T08:55:07.825938Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2024-11-21T08:55:07.825961Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439653477919354132:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T08:55:07.826749Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439653477919354132:2390] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T08:55:07.826774Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2024-11-21T08:55:07.826782Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439653473624386096:2390] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2024-11-21T08:55:07.826791Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:55:07.826796Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T08:55:07.931598Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2024-11-21T08:55:07.931618Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7439653473624386095:2390] Leave 2024-11-21T08:55:07.931632Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7439653473624386094:2390] Reader has gone: sender# [1:7439653473624386095:2390] 2024-11-21T08:55:07.931645Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653477919354283:2390] Handshake: worker# [1:7439653473624386094:2390] 2024-11-21T08:55:07.931923Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653477919354283:2390] Create read session: session# [1:7439653477919354284:2277] 2024-11-21T08:55:07.931941Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439653473624386094:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2024-11-21T08:55:07.931944Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7439653473624386094:2390] Handshake with reader: sender# [1:7439653477919354283:2390] 2024-11-21T08:55:07.931950Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439653477919354283:2390] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> TTicketParserTest::AuthorizationRetryError >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood |89.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute |89.5%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest::LdapServerIsUnavailable >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |89.5%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] |89.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad |89.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |89.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2024-11-21T08:52:49.141415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652885816111966:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.141734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:49.144779Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652884225544935:2060];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.170837Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f22/r3tmp/tmp3tnnIu/pdisk_1.dat 2024-11-21T08:52:49.179600Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:49.180860Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:49.210369Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4616, node 1 2024-11-21T08:52:49.223261Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003f22/r3tmp/yandexi5Arte.tmp 2024-11-21T08:52:49.223273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003f22/r3tmp/yandexi5Arte.tmp 2024-11-21T08:52:49.227200Z INFO: TTestServer started on Port 22093 GrpcPort 4616 2024-11-21T08:52:49.236712Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003f22/r3tmp/yandexi5Arte.tmp 2024-11-21T08:52:49.236820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22093 PQClient connected to localhost:4616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:49.241938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.241991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.245057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:49.252943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:52:49.271455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:52:49.272261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.272282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.277248Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:49.277759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:52:49.475436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652885816113007:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.475454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652885816113003:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.475501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.475151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652884225545210:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.475190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439652884225545221:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.475201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.476173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:52:49.478228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652885816113047:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.478390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:49.481028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652885816113017:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:52:49.481124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439652884225545225:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:52:49.502088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.557891Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652884225545305:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:49.557963Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDQ3ZmQxMTgtNDQ3ZDc4NjItMWI3NjVkNjctNDllZTdiNzg=, ActorId: [2:7439652884225545194:2277], ActorState: ExecuteState, TraceId: 01jd6ys4614bqbwv3ch8e7ns3s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:49.558488Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:49.561859Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652885816113180:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:49.561949Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjBhN2Y5Yy1kYzE5N2I3Ny00N2U2ZWIyLTEzMmRmMzNi, ActorId: [1:7439652885816112999:2303], ActorState: ExecuteState, TraceId: 01jd6ys461ecbfn8dxy4w3p4xp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:49.562244Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:49.562752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.583183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:52:49.615400Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd6ys49w5wvqt0q9dr9x5vy9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY0NTM1MDYtNWFmNDAwOWEtY2U2NDMxYTUtYmVkNTI2MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439652885816113476:3049] 2024-11-21T08:52:54.143939Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439652885816111966:2053];send_to=[0:7307199536658146131:7762515]; 20 ... 4998205307508174_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T08:54:59.962515Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1048576 } } 2024-11-21T08:54:59.962530Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 got read request: guid# 96f5f821-19b965f3-b3cf1d61-788621ae 2024-11-21T08:54:59.962534Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_14214998205307508174_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T08:54:59.962537Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2024-11-21T08:54:59.962558Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1732179296733, sizeLag# 82536 2024-11-21T08:54:59.962570Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1TEvPartitionReady. Aval parts: 1 2024-11-21T08:54:59.962582Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 performing read request: guid# dd7da27e-c0e9eb00-e9058d48-86c449b7, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 99043, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T08:54:59.962596Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid dd7da27e-c0e9eb00-e9058d48-86c449b7 2024-11-21T08:54:59.962681Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T08:54:59.962689Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T08:54:59.962697Z node 28 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:55:00.683269Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:55:03.686107Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T08:55:03.686120Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:55:03.686403Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T08:55:03.686426Z node 28 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:55:03.686480Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:03.686772Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_12290695836455955885_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1732179296733 CreateTimestampMS: 1732179296733 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1732179296735 CreateTimestampMS: 1732179296735 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1732179296737 CreateTimestampMS: 1732179296737 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1732179296739 CreateTimestampMS: 1732179296739 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3724 } Cookie: 0 } 2024-11-21T08:55:03.686851Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_12290695836455955885_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T08:55:03.686864Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_12290695836455955885_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 8f89a48d-cf8e13e4-44534d65-5605c88b has messages 1 2024-11-21T08:55:03.686914Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_12290695836455955885_v1 read done: guid# 8f89a48d-cf8e13e4-44534d65-5605c88b, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2024-11-21T08:55:03.686931Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_12290695836455955885_v1 response to read: guid# 8f89a48d-cf8e13e4-44534d65-5605c88b Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2024-11-21T08:55:03.687859Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_12290695836455955885_v1 Process answer. Aval parts: 0 2024-11-21T08:55:03.687918Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_12290695836455955885_v1 grpc read done: success# 0, data# { } 2024-11-21T08:55:03.687924Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_12290695836455955885_v1 grpc read failed 2024-11-21T08:55:03.687929Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_12290695836455955885_v1 grpc closed 2024-11-21T08:55:03.687940Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_12290695836455955885_v1 is DEAD 2024-11-21T08:55:03.688077Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:03.688093Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_2_12290695836455955885_v1 2024-11-21T08:55:03.688106Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7439653442662658700:2550] destroyed 2024-11-21T08:55:03.688140Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_2_12290695836455955885_v1 2024-11-21T08:55:03.948350Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:55:05.207742Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:55:05.207762Z node 27 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:05.683605Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:55:07.687957Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T08:55:07.687976Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:55:07.688021Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2024-11-21T08:55:07.688029Z node 28 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:55:07.688099Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:07.688479Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1732179296733 CreateTimestampMS: 1732179296733 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1732179296735 CreateTimestampMS: 1732179296735 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1732179296737 CreateTimestampMS: 1732179296737 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1732179296739 CreateTimestampMS: 1732179296739 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7725 } Cookie: 0 } 2024-11-21T08:55:07.688548Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T08:55:07.688561Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid dd7da27e-c0e9eb00-e9058d48-86c449b7 has messages 1 2024-11-21T08:55:07.688617Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 read done: guid# dd7da27e-c0e9eb00-e9058d48-86c449b7, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2024-11-21T08:55:07.688633Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 response to read: guid# dd7da27e-c0e9eb00-e9058d48-86c449b7 2024-11-21T08:55:07.688869Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 Process answer. Aval parts: 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2024-11-21T08:55:07.689733Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_14214998205307508174_v1 grpc read done: success# 0, data# { } 2024-11-21T08:55:07.689745Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_14214998205307508174_v1 grpc read failed 2024-11-21T08:55:07.689749Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_14214998205307508174_v1 grpc closed 2024-11-21T08:55:07.689762Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_14214998205307508174_v1 is DEAD 2024-11-21T08:55:07.689966Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:07.689984Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_3_14214998205307508174_v1 2024-11-21T08:55:07.690000Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7439653442662658702:2551] destroyed 2024-11-21T08:55:07.690017Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_3_14214998205307508174_v1 |89.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |89.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |89.5%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] |89.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |89.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |89.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn |89.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood |89.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> GenericFederatedQuery::YdbManagedSelectConstant >> GenericFederatedQuery::ClickHouseManagedSelectAll >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::ClickHouseManagedSelectConstant |89.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |89.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> KqpNewEngine::PkRangeSelect1 >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad >> KqpSort::ReverseRangeOptimized >> KqpSqlIn::SecondaryIndex_SimpleKey >> KqpSort::ReverseOptimized >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> TPersQueueTest::SetMeteringMode [GOOD] >> TPersQueueTest::TClusterTrackerTest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28537, MsgBus: 3399 2024-11-21T08:54:57.917900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653433674034552:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:57.918122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002db8/r3tmp/tmpQZjjNJ/pdisk_1.dat 2024-11-21T08:54:57.961239Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28537, node 1 2024-11-21T08:54:57.977124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:57.977137Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:57.977138Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:57.977165Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3399 TClient is connected to server localhost:3399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:54:58.018163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.018186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:58.019313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.020928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.032376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.092956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.107586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.116678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.173892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437969003380:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.173929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.201490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.207259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.213510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.221052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.227844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.234590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:54:58.243264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437969003873:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.243290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.243299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653437969003878:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.243885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:54:58.248347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653437969003880:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:54:58.407610Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653437969004169:2458], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2024-11-21T08:54:58.407680Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWVmMjU0Zi02YzFlZTQwYy0zNzQ4ZjlkNC01MjQ4NTRhYg==, ActorId: [1:7439653437969004162:2454], ActorState: ExecuteState, TraceId: 01jd6yx23454b133pj1h3kj2ap, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:54:58.410145Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653437969004177:2461], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2024-11-21T08:54:58.410223Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjVjNTQ5ZjMtNjcxZWY4ZjEtYzQwMDE2ZTktMzFkMmU2ZTA=, ActorId: [1:7439653437969004175:2460], ActorState: ExecuteState, TraceId: 01jd6yx238cz5kyy2cr59190fk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 13912, MsgBus: 7028 2024-11-21T08:54:58.578912Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653436626160884:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:58.579211Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002db8/r3tmp/tmpkMM97U/pdisk_1.dat 2024-11-21T08:54:58.585488Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13912, node 2 2024-11-21T08:54:58.594633Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:58.594647Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:58.594648Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:58.594678Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7028 TClient is connected to server localhost:7028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:58.679227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:58.679254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:58.680312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:58.681065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:54:58.818886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653436626161479:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:58.818910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=120192;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=120192;columns=2; 2024-11-21T08:55:05.518529Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653467078230145:14348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:05.518553Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:05.518557Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653467078230150:14351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:05.519542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:55:05.521050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653467078230152:14352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] Test command err: 2024-11-21T08:55:08.976597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653482064651781:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:08.976617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033be/r3tmp/tmpUY8DYd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10538, node 1 2024-11-21T08:55:09.037029Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:09.041218Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.041228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.041231Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.041264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.078733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.078766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.079923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.089844Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.092772Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29164, port: 29164 2024-11-21T08:55:09.092800Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:09.110853Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.156459Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:09.200385Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:09.200557Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:09.200581Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.245956Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.292344Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.292930Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****yOVg (C139AE17) () has now valid token of ldapuser@ldap 2024-11-21T08:55:09.423145Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653483818102020:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.423163Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033be/r3tmp/tmpsnJ3yQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24297, node 2 2024-11-21T08:55:09.438248Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:09.438909Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.438916Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.438918Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.438942Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.459643Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.461267Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:17307 ldap://localhost:17307 ldap://localhost:11111, port: 17307 2024-11-21T08:55:09.461299Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:09.475379Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.516443Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:09.523170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.523202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.524282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.560371Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:09.560586Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:09.560610Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.608455Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.652409Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.652791Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****5ACg (3E09C273) () has now valid token of ldapuser@ldap 2024-11-21T08:55:09.891593Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653483895023248:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.891716Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033be/r3tmp/tmpvPnYTg/pdisk_1.dat 2024-11-21T08:55:09.906496Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2190, node 3 2024-11-21T08:55:09.913301Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.913313Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.913315Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.913354Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.992038Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.992158Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.992199Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.993336Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.993484Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15922, port: 15922 2024-11-21T08:55:09.993513Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:10.009174Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:10.052510Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T08:55:10.096673Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****73PA (03F2F07D) () has now valid token of ldapuser@ldap 2024-11-21T08:55:10.369657Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653487368807270:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.369706Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033be/r3tmp/tmps5lRId/pdisk_1.dat 2024-11-21T08:55:10.377062Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3468, node 4 2024-11-21T08:55:10.387919Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.387937Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.387939Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.387980Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.469798Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.469834Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.470930Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:10.602847Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.604483Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:30254, port: 30254 2024-11-21T08:55:10.604505Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:10.616520Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:10.660471Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:30254. Invalid credentials 2024-11-21T08:55:10.660691Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****c6Sw (463D14F6) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:10.881229Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653487722628584:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.881676Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033be/r3tmp/tmpC6JB7Q/pdisk_1.dat 2024-11-21T08:55:10.892791Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14061, node 5 2024-11-21T08:55:10.908388Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.908402Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.908404Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.908443Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.983915Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.983946Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.985997Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:11.150874Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:11.152720Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29275, port: 29275 2024-11-21T08:55:11.152753Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:11.165866Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:11.208459Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:29275. Invalid credentials 2024-11-21T08:55:11.208728Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****H0gQ (402B06CD) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:11.356629Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653495616434235:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:11.356752Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033be/r3tmp/tmp4tTNrp/pdisk_1.dat 2024-11-21T08:55:11.365785Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22156, node 6 2024-11-21T08:55:11.378270Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:11.378282Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:11.378284Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:11.378318Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:11.414454Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:11.416610Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24275, port: 24275 2024-11-21T08:55:11.416639Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:11.452364Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:11.458521Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.458549Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.459632Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:11.497708Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:11.497866Z node 6 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:24275 return no entries 2024-11-21T08:55:11.498068Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****7KQQ (9DE1EED2) () has now permanent error message 'Could not login via LDAP' >> KqpNewEngine::PkRangeSelect1 [GOOD] >> KqpNewEngine::OnlineRO_Consistent ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] Test command err: 2024-11-21T08:55:09.240359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653487194656802:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003324/r3tmp/tmpNBFQMZ/pdisk_1.dat 2024-11-21T08:55:09.262830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:09.288239Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24574, node 1 2024-11-21T08:55:09.307397Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.307407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.307409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.307438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.330152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.330176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.331627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.367479Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.369483Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20370, port: 20370 2024-11-21T08:55:09.369867Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.385970Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:09.432383Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:09.432573Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:09.432594Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.480578Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.524390Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:09.525011Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****ed8w (9F4505F2) () has now valid token of ldapuser@ldap 2024-11-21T08:55:09.675602Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653485534396508:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.675756Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003324/r3tmp/tmpVsvTpO/pdisk_1.dat 2024-11-21T08:55:09.682275Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2478, node 2 2024-11-21T08:55:09.692339Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.692356Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.692358Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.692392Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.757042Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.758649Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:11743, port: 11743 2024-11-21T08:55:09.758683Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.766907Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:09.777655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.777682Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.778745Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.808519Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****0S6w (5F831953) () has now valid token of ldapuser@ldap 2024-11-21T08:55:09.974407Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653483086508340:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.974645Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003324/r3tmp/tmpcGFa7E/pdisk_1.dat 2024-11-21T08:55:09.988584Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63533, node 3 2024-11-21T08:55:10.003266Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.003279Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.003281Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.003314Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.041166Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.043175Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:8568 ldap://localhost:8568 ldap://localhost:11111, port: 8568 2024-11-21T08:55:10.043213Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:10.065739Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:10.074653Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.074692Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.075776Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:10.108393Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:10.108626Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:10.108641Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.152425Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.200433Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.201215Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****3yhA (C88A4214) () has now valid token of ldapuser@ldap 2024-11-21T08:55:10.460781Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653490573475273:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.460959Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003324/r3tmp/tmpJPVYDR/pdisk_1.dat 2024-11-21T08:55:10.471146Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27932, node 4 2024-11-21T08:55:10.479510Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.479522Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.479525Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.479563Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.503282Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.504804Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13492, port: 13492 2024-11-21T08:55:10.504834Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:10.515551Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T08:55:10.560358Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:10.560614Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:10.560638Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T08:55:10.561060Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.561095Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.562135Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:10.604409Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T08:55:10.652378Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T08:55:10.652682Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****txZA (B405A0E7) () has now valid token of ldapuser@ldap 2024-11-21T08:55:10.971107Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653491469023964:2193];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003324/r3tmp/tmpqcKqaS/pdisk_1.dat 2024-11-21T08:55:10.986640Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:10.987055Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28133, node 5 2024-11-21T08:55:10.998449Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.998464Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.998467Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.998512Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:11.038477Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:11.040512Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:61490, port: 61490 2024-11-21T08:55:11.040559Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:11.053034Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T08:55:11.068440Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.068474Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.069547Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:11.100566Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****5RFQ (FBB5A98F) () has now valid token of ldapuser@ldap test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003324/r3tmp/tmpMCfkSU/pdisk_1.dat 2024-11-21T08:55:11.310496Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:11.310617Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 26686, node 6 2024-11-21T08:55:11.322209Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:11.322234Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:11.322236Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:11.322295Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:11.400714Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.400747Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.401877Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:11.478972Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:11.481409Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:27022, port: 27022 2024-11-21T08:55:11.481458Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:11.494048Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:27022. Invalid credentials 2024-11-21T08:55:11.494182Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****egbQ (1263B18A) () has now permanent error message 'Could not login via LDAP' ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2024-11-21T08:55:09.327576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653484217690076:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.327603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003338/r3tmp/tmpRUszkr/pdisk_1.dat 2024-11-21T08:55:09.374451Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12953, node 1 2024-11-21T08:55:09.386944Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.386960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.386962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.387021Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.429401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.429425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.430552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.445279Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.447277Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:11856, port: 11856 2024-11-21T08:55:09.447306Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:09.459741Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2024-11-21T08:55:09.460056Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****erfg (AD4D9F14) () has now retryable error message 'Could not login via LDAP' 2024-11-21T08:55:09.724422Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653485891581535:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.724671Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003338/r3tmp/tmpe0ugCs/pdisk_1.dat 2024-11-21T08:55:09.731430Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6381, node 2 2024-11-21T08:55:09.741570Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.741583Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.741585Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.741619Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.826761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.826797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.827876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.996873Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.998944Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****CpHQ (E10D4FBD) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:10.181431Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653488814998910:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.181619Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003338/r3tmp/tmpQ6oVdy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23966, node 3 2024-11-21T08:55:10.197240Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:10.199701Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.199707Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.199712Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.199770Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.270358Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.271855Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****hP1w (7CA038E7) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:10.281963Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.282005Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.283089Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:10.543843Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653489466733289:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.543885Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003338/r3tmp/tmpvORL7D/pdisk_1.dat 2024-11-21T08:55:10.554375Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62806, node 4 2024-11-21T08:55:10.562935Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.562946Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.562949Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.562987Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.643938Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.643976Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.645026Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:10.673581Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.675461Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****28SQ (72140E97) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:10.931884Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653489426725039:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.931958Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003338/r3tmp/tmpF8NYsg/pdisk_1.dat 2024-11-21T08:55:10.949644Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19653, node 5 2024-11-21T08:55:10.964451Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.964465Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.964468Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.964511Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:11.014522Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:11.015616Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****wkyA (175432A3) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:11.034173Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.034202Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.035140Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:11.294828Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653492856698297:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:11.295122Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003338/r3tmp/tmp07B5RO/pdisk_1.dat 2024-11-21T08:55:11.311027Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29491, node 6 2024-11-21T08:55:11.322375Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:11.322389Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:11.322390Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:11.322431Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:11.395075Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.395114Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.396137Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:11.426512Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:11.429214Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:22919, port: 22919 2024-11-21T08:55:11.429251Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:11.496435Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:11.544641Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****ETQg (6598C3CC) () has now valid token of ldapuser@ldap >> KqpSort::ReverseRangeOptimized [GOOD] >> KqpSort::ReverseRangeLimitOptimized >> KqpSort::ReverseOptimized [GOOD] >> KqpSort::ReverseOptimizedWithPredicate >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumnPg >> KqpSqlIn::SecondaryIndex_SimpleKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount >> KqpNewEngine::OnlineRO_Consistent [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent >> KqpSort::ReverseRangeLimitOptimized [GOOD] >> KqpSort::TopSortExpr ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 15546, MsgBus: 4638 2024-11-21T08:55:10.673363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653488586149887:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.674908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f05/r3tmp/tmpovyjoL/pdisk_1.dat 2024-11-21T08:55:10.737227Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15546, node 1 2024-11-21T08:55:10.750910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.750921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.750923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.750957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4638 2024-11-21T08:55:10.776349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.776373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.777582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:10.812422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:10.815952Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:10.982055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653488586150497:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:10.982090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.677848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:55:11.703700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653492881117922:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.703734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.703872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653492881117928:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.704578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T08:55:11.712288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653492881117930:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:55:11.869431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.930838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2024-11-21T08:55:11.996651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.053273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.095389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.139239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:55:12.194240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.393521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715693:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] Test command err: Trying to start YDB, gRPC: 62279, MsgBus: 6523 2024-11-21T08:55:10.674473Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653487944465889:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.674520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ed3/r3tmp/tmpnYtkBm/pdisk_1.dat 2024-11-21T08:55:10.730964Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62279, node 1 2024-11-21T08:55:10.747280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.747291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.747294Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.747342Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6523 TClient is connected to server localhost:6523 2024-11-21T08:55:10.776451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.776474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.777637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:10.791297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:10.793423Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:11.016854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653492239433642:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.016880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.676739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T08:55:11.739441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653492239433775:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.739464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653492239433780:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.739466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.740287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T08:55:11.745284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653492239433782:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:55:11.876394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.934819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480 2024-11-21T08:55:11.991214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.036714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.082873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.130328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T08:55:12.137555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.342633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710693:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> KqpSqlIn::SimpleKey >> KqpNewEngine::KeyColumnOrder >> KqpNotNullColumns::AlterAddNotNullColumnPg [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> KqpSort::ReverseOptimizedWithPredicate [GOOD] >> KqpSort::ReverseFirstKeyOptimized |89.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In >> KqpSort::TopSortExpr [GOOD] >> KqpSort::TopParameter >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent [GOOD] >> KqpNewEngine::Nondeterministic >> KqpNewEngine::KeyColumnOrder [GOOD] >> KqpNewEngine::KeyColumnOrder2 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TTopicReaderTests::TestRun_ReadOneMessage |89.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |89.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> KqpSqlIn::SimpleKey [GOOD] >> KqpSqlIn::SimpleKey_Negated >> KqpSort::ReverseFirstKeyOptimized [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::Describe >> TGroupMapperTest::NonUniformCluster [GOOD] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> KqpSort::TopParameter [GOOD] >> KqpSort::TopParameterFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:03.003301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:03.003326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.003330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:03.003333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:03.003342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:03.003344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:03.003351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:03.003407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:03.010447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:03.010463Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:03.012266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:03.012762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:03.012798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:03.013798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:03.013964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:03.014022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.014078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:03.014674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.014869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.014875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.014901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:03.014910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.014914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:03.014922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.015778Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:03.025761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:03.025822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.025871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:03.025922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:03.025926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.026379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.026397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:03.026424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.026430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:03.026433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:03.026436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:03.026684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.026690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:03.026692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:03.026884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.026888Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.026891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.026895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.027263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:03.027499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:03.027528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:03.027649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:03.027663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:03.027667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.027704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:03.027709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:03.027727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.027735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:03.027995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:03.028000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:03.028026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:03.028029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:03.028088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:03.028092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:03.028099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:03.028101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.028105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:03.028108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:03.028110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:03.028112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:03.028119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:03.028122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:03.028125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:03.028325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.028336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:03.028338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:03.028341Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:03.028344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:03.028351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... 5186233409546 2024-11-21T08:55:13.082412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:13.082423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:55:13.082725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:55:13.082732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:55:13.083035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3150, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:13.083066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 378 RawX2: 4294969643 } } Step: 3150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:13.083077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 103:0 HandleReply TEvOperationPlan, step: 3150, at schemeshard: 72057594046678944 2024-11-21T08:55:13.083151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:13.083170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2024-11-21T08:55:13.083204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:13.083216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:13.083753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:13.083771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:55:13.083913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:13.083921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:13.083959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:13.083968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:13.083991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:13.083995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T08:55:13.084003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T08:55:13.084007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T08:55:13.084018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:55:13.084024Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T08:55:13.084036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T08:55:13.084041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:55:13.084046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T08:55:13.084051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T08:55:13.084056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T08:55:13.084060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T08:55:13.084083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:13.084094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T08:55:13.084097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2024-11-21T08:55:13.084101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:55:13.084416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:55:13.084434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:55:13.084439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:55:13.084444Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:55:13.084449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:13.084533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:55:13.084538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:55:13.084547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:13.084595Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T08:55:13.084657Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T08:55:13.084671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:55:13.084682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T08:55:13.084686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T08:55:13.084690Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2024-11-21T08:55:13.084694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:13.084702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T08:55:13.084969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:13.085017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:13.085546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:55:13.085566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:55:13.085920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T08:55:13.085937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:55:13.085950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:55:13.086030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:55:13.086036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:55:13.086118Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:55:13.086134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:55:13.086138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:714:2635] TestWaitNotification: OK eventTxId 103 2024-11-21T08:55:13.473802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:13.473880Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 99us result status StatusSuccess 2024-11-21T08:55:13.473957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] >> KqpNewEngine::KeyColumnOrder2 [GOOD] >> KqpNewEngine::JoinWithParams >> BasicUsage::PropagateSessionClosed >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> KqpNewEngine::Nondeterministic [GOOD] >> KqpNotNullColumns::Describe [GOOD] >> KqpNewEngine::OrderedScalarContext >> KqpNotNullColumns::CreateTableWithNotNullColumns >> BasicUsage::WriteSessionWriteInHandlers >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> KqpSqlIn::SimpleKey_Negated [GOOD] >> KqpSqlIn::SelectNotAllElements >> KqpSort::ReverseMixedOrderNotOptimized [GOOD] >> KqpSort::ReverseLimitOptimized >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 65271, MsgBus: 28134 2024-11-21T08:55:10.740877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653490079749400:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.740993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001eea/r3tmp/tmphT75IY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65271, node 1 2024-11-21T08:55:10.819764Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:10.823465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.823474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.823476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.823504Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28134 2024-11-21T08:55:10.840524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.840553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.841683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:10.881900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.077745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653494374717154:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.077777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.742721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:55:11.810630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653494374717287:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.810676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.810774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653494374717292:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.811701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T08:55:11.813779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653494374717294:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:55:11.961306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.007472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2024-11-21T08:55:12.068709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.122828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.175665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.218771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:55:12.228531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.559051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715703:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 16404, MsgBus: 7047 2024-11-21T08:55:12.798414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653500095678030:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:12.798452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001eea/r3tmp/tmpB0jhKf/pdisk_1.dat 2024-11-21T08:55:12.807038Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16404, node 2 2024-11-21T08:55:12.817354Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:12.817369Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:12.817370Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:12.817404Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7047 TClient is connected to server localhost:7047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:12.898769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:12.898795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:12.899839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:12.900536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.035692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653504390645926:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.035716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.800926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:55:13.810418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653504390646057:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.810448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653504390646062:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.810451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.811234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T08:55:13.818006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653504390646064:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:55:13.935220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.986152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2024-11-21T08:55:14.033813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.091263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.137543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.197527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:55:14.210003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.614716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715703:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 26743, MsgBus: 25945 2024-11-21T08:55:10.672798Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653487407687228:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.672954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ef6/r3tmp/tmpHPh4zW/pdisk_1.dat 2024-11-21T08:55:10.750704Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26743, node 1 2024-11-21T08:55:10.759172Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.759182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.759184Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.759220Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25945 2024-11-21T08:55:10.775316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.775342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.776266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:10.812185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:10.814387Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:11.029515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653491702655126:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.029542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.676351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T08:55:11.740777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653491702655257:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.740825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.740985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653491702655262:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.741599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T08:55:11.744266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653491702655265:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:55:11.901383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.960518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480 2024-11-21T08:55:12.024473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.071553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.125551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.179644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T08:55:12.186787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.522833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710703:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.531126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.531434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.531579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2024-11-21T08:55:12.840299Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179312885, txId: 281474976710725] shutting down Trying to start YDB, gRPC: 20367, MsgBus: 27209 2024-11-21T08:55:12.938757Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653498336662510:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:12.938956Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ef6/r3tmp/tmp4m51X5/pdisk_1.dat 2024-11-21T08:55:12.946662Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20367, node 2 2024-11-21T08:55:12.955265Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:12.955279Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:12.955280Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:12.955316Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27209 TClient is connected to server localhost:27209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:13.040263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:13.040290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:13.041029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.041257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:55:13.208724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653502631630408:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.208752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.940458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:55:13.947767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653502631630539:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.947798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653502631630544:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.947804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.948265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T08:55:13.951718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653502631630546:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:55:14.055724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.114160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2024-11-21T08:55:14.171938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.230505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.290865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.352365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:55:14.364006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.779698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715703:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> KqpNewEngine::JoinWithParams [GOOD] >> KqpNewEngine::LeftSemiJoin >> KqpSort::TopParameterFilter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] Test command err: Trying to start YDB, gRPC: 23953, MsgBus: 23665 2024-11-21T08:55:11.729646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653494141623823:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:11.729761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ebc/r3tmp/tmpemX01O/pdisk_1.dat 2024-11-21T08:55:11.784611Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23953, node 1 2024-11-21T08:55:11.804199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:11.804225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:11.804229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:11.804287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:11.829254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.829290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.830353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23665 TClient is connected to server localhost:23665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:11.904880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.908362Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:12.069467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653498436591571:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.069513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.100831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653498436591601:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.100860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.103502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13863, MsgBus: 8656 2024-11-21T08:55:12.393045Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653496561576601:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:12.393267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ebc/r3tmp/tmpgidvL1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13863, node 2 2024-11-21T08:55:12.408410Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:12.416588Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:12.416599Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:12.416601Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:12.416633Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8656 TClient is connected to server localhost:8656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:12.493258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:12.493292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:12.494362Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:12.495621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.501993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.510778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.528956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.538600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.645733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653496561578137:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.645758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.650024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.656023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.661989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.668507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.675895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.683311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.691117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653496561578631:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.691141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653496561578636:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.691143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.691711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:12.696331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653496561578638:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:12.857806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5489, MsgBus: 14007 2024- ... h pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.357033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1837, MsgBus: 24417 2024-11-21T08:55:13.558829Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653501280659479:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:13.559123Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ebc/r3tmp/tmp6Nwa25/pdisk_1.dat 2024-11-21T08:55:13.570989Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1837, node 4 2024-11-21T08:55:13.581681Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:13.581696Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:13.581713Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:13.581762Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24417 TClient is connected to server localhost:24417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:13.659117Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:13.659152Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:13.660220Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:13.661422Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.662950Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:13.924509Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653501280660074:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.924540Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.982239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653501280660104:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.982262Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.985822Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18786, MsgBus: 14143 2024-11-21T08:55:14.255775Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653508464185280:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.255966Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ebc/r3tmp/tmpscL2QC/pdisk_1.dat 2024-11-21T08:55:14.270094Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18786, node 5 2024-11-21T08:55:14.277208Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:14.277221Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:14.277223Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:14.277269Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14143 TClient is connected to server localhost:14143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:14.356141Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:14.356175Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:14.357869Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:55:14.359458Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.360780Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:14.592340Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653508464185879:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.592374Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.595550Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20129, MsgBus: 31634 2024-11-21T08:55:14.759359Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653506641014197:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.759401Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ebc/r3tmp/tmp9O0eNL/pdisk_1.dat 2024-11-21T08:55:14.769647Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20129, node 6 2024-11-21T08:55:14.775959Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:14.775973Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:14.775975Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:14.776014Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31634 TClient is connected to server localhost:31634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:14.859625Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:14.859665Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:14.860846Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:14.861999Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.052365Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> KqpNewEngine::OrderedScalarContext [GOOD] >> KqpNewEngine::PagingNoPredicateExtract >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> BasicUsage::BasicWriteSession >> BasicUsage::RetryDiscoveryWithCancel >> KqpSqlIn::SecondaryIndex_TupleParameter [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> KqpSort::ReverseLimitOptimized [GOOD] >> KqpSort::ReverseEightShardOptimized ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameterFilter [GOOD] Test command err: Trying to start YDB, gRPC: 8500, MsgBus: 12332 2024-11-21T08:55:11.379658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653493508437516:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:11.379745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ed6/r3tmp/tmpRhEeuV/pdisk_1.dat 2024-11-21T08:55:11.424263Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8500, node 1 2024-11-21T08:55:11.445995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:11.446009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:11.446010Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:11.446042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12332 2024-11-21T08:55:11.480415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.480440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.481557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:11.501069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.513155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.526408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.542534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.551785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.660700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653493508439053:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.660729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.695682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.704085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.714239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.724899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.731363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.746694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.768931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653493508439567:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.768964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.769041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653493508439572:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.769958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:11.772856Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:55:11.772930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653493508439574:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 19724, MsgBus: 27867 2024-11-21T08:55:12.250534Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653497657189979:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:12.250721Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ed6/r3tmp/tmpkt6jV1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19724, node 2 2024-11-21T08:55:12.267744Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:12.270134Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:12.270146Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:12.270148Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:12.270207Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27867 TClient is connected to server localhost:27867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:12.350831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:12.350863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:12.351957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:12.353687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.358806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.369503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.385368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.397378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.541822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653497657191513:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.541857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.547104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... , NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.987375Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.991908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.999652Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.006192Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.013478Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.020081Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.027314Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.095443Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653507903194687:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.095475Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.095620Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653507903194692:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.096830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:14.100189Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:55:14.100273Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653507903194694:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 2445, MsgBus: 8453 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ed6/r3tmp/tmpVUq96J/pdisk_1.dat 2024-11-21T08:55:14.586463Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:14.586703Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2445, node 5 2024-11-21T08:55:14.597729Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:14.597743Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:14.597745Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:14.597792Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8453 TClient is connected to server localhost:8453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:14.676992Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:14.677032Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:14.677989Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:14.679017Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.680427Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:14.684121Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.702183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.717853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.730118Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.899797Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653506491989711:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.899844Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.904305Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.910346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.923397Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.930420Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.937403Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.944345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.953974Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653506491990212:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.954004Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.954021Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653506491990217:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.954591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:14.957282Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653506491990219:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ( (declare $limit (DataType 'Uint64)) (declare $value (DataType 'Int32)) (let $1 (KqpTable '"/Root/TwoShard" '"72057594046644480:2" '"" '1)) (let $2 '('"Key" '"Value1" '"Value2")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DataType 'Int32)) (let $5 (Min (Uint64 '"1001") $limit)) (let $6 (StructType '('"Key" (OptionalType (DataType 'Uint32))) '('"Value1" (OptionalType (DataType 'String))) '('"Value2" (OptionalType $4)))) (let $7 '('('"_logical_id" '473) '('"_id" '"e731bcf0-f7dadb72-8c21e14b-fee350b3") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (lambda '($16) (block '( (let $17 (Member $16 '"Value2")) (return (Member $16 '"Key") (Member $16 '"Value1") $17 (Coalesce (!= $17 $value) (Bool 'false))) )))) (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda '($18 $19 $20 $21) $21) $5)) (let $15 (lambda '($22 $23 $24 $25) $22 $23 $24)) (return (FromFlow (WideMap $14 $15))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($26) (FromFlow (NarrowMap (Take (ToFlow $26) $5) (lambda '($27 $28 $29) (AsStruct '('"Key" $27) '('"Value1" $28) '('"Value2" $29)))))) '('('"_logical_id" '486) '('"_id" '"1601672d-4f18e12b-3908150-767ccd48")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '('('"$limit") '('"$value")) '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) >> KqpSqlIn::SelectNotAllElements [GOOD] >> KqpSqlIn::SimpleKey_In_And_In |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] >> KqpNewEngine::LeftSemiJoin [GOOD] >> KqpNewEngine::JoinWithPrecompute >> Sharding::XXUsage |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] >> KqpNewEngine::PagingNoPredicateExtract [GOOD] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 2104849252515447450 2811238974699703223 12713943813578736131 18072534857661802687 16021066117351368243 9441082890911877522 16924667026806305658 1321373926680311200 6693921172490362669 13996998044590624775 5836407453717141263 677976561513142358 4146216106326601891 16055569441596771721 15571955585738576261 6447779810104057068 14723994416489349400 10160876694038397112 1910786897999046379 12201779184427045142 18175202628359182138 1615656189185161446 1374793863783357207 10172992151454684972 2615522978934483580 6423568722855346764 14799656893910665895 15942146739284483126 9594416712346637467 15915237879519647877 2249711912916349199 5794672843176066106 16606208338218374998 14238774718440263825 15236399091925016784 16003139228210243686 16962669682938577702 14856210561393404338 14593160102614941133 11571121311761234307 7972306151567229096 13393031532185573144 5017675040751338262 2605670623766448684 5123098650269992492 8470926905646888281 6357223840205194796 2399835886778194838 16395585207816314195 14010080443654727365 4684142311006257191 9765569350561524034 9453249821727341918 7844736920860668044 3207848502084601096 12744981401437211048 18245116930115526699 12187901658367315869 5878737326908939311 17101943647442805226 17099506110185566985 9483703009202653735 17172646525429883035 9433516332584226502 8951864931597489104 3588547804965980650 7981377381177546457 12123585755513922440 2699706432152828030 2238459807833894465 1310192797669293303 724377362154279596 12026722729572785216 737734264873354238 1816436426672621050 4272845399615604609 15730564926415702066 11421367443134808305 1575352540195399868 974336528082147494 14377818822491027860 13846313137877024749 10396726784559327824 4664490360151283578 3046943175970370014 7655577754445179137 2021654080805675040 5610243187181348374 16857891355280235899 2616542421605736774 >> KqpSort::ReverseEightShardOptimized [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PagingNoPredicateExtract [GOOD] Test command err: Trying to start YDB, gRPC: 12690, MsgBus: 26821 2024-11-21T08:55:11.286982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653494787466123:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:11.287269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eeb/r3tmp/tmp8qeh5c/pdisk_1.dat 2024-11-21T08:55:11.335608Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12690, node 1 2024-11-21T08:55:11.352734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:11.352749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:11.352751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:11.352784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26821 TClient is connected to server localhost:26821 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:55:11.388390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.388420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:55:11.389537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:11.415503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.417420Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:11.449497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.465694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.483760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.494183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.569473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653494787467660:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.569499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.596422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.651995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.661716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.676023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.731366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.741217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.754301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653494787468179:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.754328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.754330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653494787468184:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.754920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:11.760029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653494787468186:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 12932, MsgBus: 26241 2024-11-21T08:55:12.178835Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653496937324777:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:12.178979Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eeb/r3tmp/tmpITS9bZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12932, node 2 2024-11-21T08:55:12.192380Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:12.194493Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:12.194504Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:12.194505Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:12.194538Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26241 TClient is connected to server localhost:26241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:12.279275Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:12.279322Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:12.280360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:12.281583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.293187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.302368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.321656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.332339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.444239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653496937326314:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.444266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.447959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo u ... T08:55:14.873126Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:14.874399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.881962Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.890917Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.951362Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.962280Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.064711Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653510455905598:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.064743Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.069634Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.075399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.084176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.091226Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.098108Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.104700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.113738Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653510455906091:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.113763Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.113808Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653510455906096:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.114437Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:15.118535Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653510455906098:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 9792, MsgBus: 16842 2024-11-21T08:55:15.705863Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653511874548328:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.705918Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eeb/r3tmp/tmpsQzfO3/pdisk_1.dat 2024-11-21T08:55:15.718143Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9792, node 6 2024-11-21T08:55:15.726908Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:15.726941Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:15.726943Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:15.726991Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16842 TClient is connected to server localhost:16842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:15.806513Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:15.806559Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:15.807542Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:15.808954Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.816582Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:15.833040Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.845620Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.886378Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.950303Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.084713Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653516169517169:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.084755Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.088086Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.101256Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.109456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.122831Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.135335Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.149234Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.168768Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653516169517673:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.168792Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.168806Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653516169517678:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.169767Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:16.176747Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439653516169517680:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] Test command err: Trying to start YDB, gRPC: 14397, MsgBus: 25952 2024-11-21T08:55:11.384548Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653493921763153:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:11.384833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002edb/r3tmp/tmp7JJ4Lr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14397, node 1 2024-11-21T08:55:11.442625Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:11.446876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:11.446889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:11.446921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:11.446954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25952 2024-11-21T08:55:11.484749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.484779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.485842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:11.510681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.515363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.577603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.594846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.605914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.659933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653493921764687:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.659961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.695234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.701558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.713324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.724994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.733223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.745363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.755032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653493921765184:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.755056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.755115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653493921765189:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.755755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:11.758959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653493921765191:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:11.975749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.983053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.990848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:66: Warning: At function: Filter, At function: Coalesce
:5:78: Warning: At function: SqlIn
:5:78: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:66: Warning: At function: Filter, At function: Coalesce
:5:78: Warning: At function: SqlIn
:5:78: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 30350, MsgBus: 13333 2024-11-21T08:55:12.449492Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653499226407568:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:12.449510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002edb/r3tmp/tmpg3HYye/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30350, node 2 2024-11-21T08:55:12.464988Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:12.467258Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:12.467270Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:12.467272Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:12.467306Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13333 TClient is connected to server localhost:13333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:12.549633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:12.549657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:12.550734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:12.552374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.553015Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at scheme ... CreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.113797Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653512853772352:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.113827Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653512853772357:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.113831Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.114458Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:15.118216Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653512853772359:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:15.389681Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.397659Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.406462Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:60: Warning: At function: Filter, At function: Coalesce
:7:33: Warning: At function: SqlIn
:7:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:60: Warning: At function: Filter, At function: Coalesce
:7:33: Warning: At function: SqlIn
:7:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 25413, MsgBus: 62744 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002edb/r3tmp/tmpR8uO2U/pdisk_1.dat 2024-11-21T08:55:15.823829Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653512858338717:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.826706Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:15.844904Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25413, node 5 2024-11-21T08:55:15.868752Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:15.868767Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:15.868769Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:15.868820Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62744 2024-11-21T08:55:15.925675Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:15.925720Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:15.927276Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:15.946315Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.953785Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.981102Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:55:16.013630Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.029154Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.169674Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653517153307417:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.169699Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.179346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.191244Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.253003Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.267331Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.286440Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.298239Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.314505Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653517153307931:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.314541Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.314554Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653517153307936:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.315450Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:16.323546Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653517153307938:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:16.504445Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.518712Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.526457Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSort::ReverseEightShardOptimized [GOOD] Test command err: Trying to start YDB, gRPC: 14877, MsgBus: 6561 2024-11-21T08:55:11.415002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653492030180440:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:11.415026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ec4/r3tmp/tmpBVpEpq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14877, node 1 2024-11-21T08:55:11.472027Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:11.478591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:11.478604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:11.478605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:11.478643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6561 TClient is connected to server localhost:6561 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:55:11.515377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:11.515408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:55:11.516518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:11.526765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.535799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.549176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.565146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.577990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:11.702729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653492030181976:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.702757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.737234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.744278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.752006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.760597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.780631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.842461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:11.853878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653492030182493:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.853904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.854001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653492030182498:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:11.854700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:11.864419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653492030182500:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 8833, MsgBus: 27023 2024-11-21T08:55:12.308703Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653498684493937:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:12.308953Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ec4/r3tmp/tmpvlrCCG/pdisk_1.dat 2024-11-21T08:55:12.316614Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8833, node 2 2024-11-21T08:55:12.325138Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:12.325153Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:12.325154Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:12.325194Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27023 TClient is connected to server localhost:27023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:12.408866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:12.408900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:12.410045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:12.411098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.421493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.429715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.448187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.460439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:12.597003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653498684495476:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.597024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:12.600322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:12.605521Z node 2 :FLAT_TX_SCHEMESHARD W ... 1T08:55:15.105083Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:15.106240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.110846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.120037Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.136303Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.148772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.326684Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653512740709959:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.326725Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.329809Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.338526Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.351642Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.365315Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.379164Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.393171Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.408253Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653512740710460:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.408276Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653512740710465:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.408284Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.408899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:15.412670Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653512740710467:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 22549, MsgBus: 9780 2024-11-21T08:55:15.957736Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653508843222419:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.960944Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ec4/r3tmp/tmpTV0lYf/pdisk_1.dat 2024-11-21T08:55:15.978782Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22549, node 6 2024-11-21T08:55:15.998442Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:15.998456Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:15.998458Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:15.998505Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9780 TClient is connected to server localhost:9780 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:55:16.058488Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:16.058527Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:16.060859Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:16.062487Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.063880Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:16.071521Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.087042Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.114781Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.126871Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.301376Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653513138191115:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.301412Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.308008Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.315454Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.323522Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.379938Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.397690Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.409781Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.467132Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653513138191638:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.467149Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653513138191643:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.467159Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.467762Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:16.476947Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439653513138191645:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] >> KqpNewEngine::JoinWithPrecompute [GOOD] >> KqpNewEngine::JoinSameKey >> BasicUsage::GetAllStartPartitionSessions |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> KqpSqlIn::SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions |89.6%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessionsPool::Get1Session >> KqpNewEngine::JoinSameKey [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> YdbSdkSessionsPool::StressTestSync10 >> YdbSdkSessionsPool::WaitQueue10 >> YdbSdkSessionsPool::StressTestAsync1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinSameKey [GOOD] Test command err: Trying to start YDB, gRPC: 6451, MsgBus: 6684 2024-11-21T08:55:13.077771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653501799268694:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:13.077791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ea7/r3tmp/tmp9hqNsl/pdisk_1.dat 2024-11-21T08:55:13.141291Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6451, node 1 2024-11-21T08:55:13.158113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:13.158130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:13.158132Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:13.158176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6684 2024-11-21T08:55:13.178135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:13.178176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:13.179242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:13.203150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.214692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:55:13.230485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.248306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.258545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.390386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653501799270230:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.390423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.418611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.424561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.478917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.487947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.543032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.551290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.559865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653501799270750:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.559896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.559957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653501799270755:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.560666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:13.564733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653501799270757:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:55:13.772350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7249, MsgBus: 16267 2024-11-21T08:55:13.987203Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653502121862045:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:13.987361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ea7/r3tmp/tmpGhFlbP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7249, node 2 2024-11-21T08:55:14.003102Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:14.003959Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:14.003971Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:14.003972Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:14.004001Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16267 TClient is connected to server localhost:16267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:14.087615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:14.087656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:14.088642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:14.089971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.096476Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:14.101169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.116564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.131209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.141313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:14.273162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653506416830870:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:14.273184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FO ... hard: 72057594046644480 waiting... 2024-11-21T08:55:16.520767Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:16.527783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.537543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.559161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.570166Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.757743Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653516726362712:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.757780Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.763202Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.771164Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.778655Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.835772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.849186Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.863148Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.879281Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653516726363217:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.879307Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.879329Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653516726363222:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.880068Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:16.882164Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653516726363224:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 11610, MsgBus: 14962 2024-11-21T08:55:17.648427Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653518713650821:2058];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ea7/r3tmp/tmpcIlnhu/pdisk_1.dat 2024-11-21T08:55:17.654508Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:17.665933Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11610, node 6 2024-11-21T08:55:17.677134Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:17.677152Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:17.677154Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:17.677201Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14962 TClient is connected to server localhost:14962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:17.749459Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:17.749496Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:17.750463Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:17.751762Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:17.753643Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:17.759692Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:17.776678Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:17.842159Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:17.859444Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:18.085589Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653523008619669:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.085646Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.088628Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.107140Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.120854Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.132438Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.144969Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.161180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.185622Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653523008620183:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.185655Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.186142Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653523008620188:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.187175Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:18.196944Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:55:18.197068Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439653523008620190:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] >> YdbSdkSessionsPool::RunSmallPlan >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 >> YdbSdkSessionsPool::StressTestSync1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:18.438141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:18.438173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:18.438178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:18.438184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:18.438190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:18.438194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:18.438203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:18.438288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:18.450102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:18.450130Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:18.458602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:18.459497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:18.459541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:18.463329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:18.464552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:18.464684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:18.464804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:18.467274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:18.467615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:18.467626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:18.467669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:18.467677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:18.467683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:18.467698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.469500Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:18.490451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:18.490577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.490653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:18.490707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:18.490715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.492885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:18.492928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:18.493007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.493018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:18.493022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:18.493026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:18.493686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.493699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:18.493704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:18.494069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.494080Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.494086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:18.494094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:18.494748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:18.496226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:18.496284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:18.496440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:18.496470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:18.496494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:18.496559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:18.496567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:18.496599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:18.496613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:18.498389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:18.498402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:18.498454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:18.498460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:18.498562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.498594Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:18.498612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:18.498616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:18.498634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:18.498640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:18.498645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:18.498649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:18.498665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:18.498671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:18.498676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:18.499047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:18.499062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:18.499067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:18.499073Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:18.499079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:18.499095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... perationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:55:19.046467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:55:19.046475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:55:19.046543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:19.046548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T08:55:19.046552Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:19.078100Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:19.078149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 373 RawX2: 8589936935 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:19.078160Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2024-11-21T08:55:19.078167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T08:55:19.095470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T08:55:19.095530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T08:55:19.095542Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T08:55:19.095555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:19.095559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T08:55:19.095598Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T08:55:19.095636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:19.096111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:19.096198Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:19.096221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:19.096294Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:19.096301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T08:55:19.096384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:19.096392Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T08:55:19.096406Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T08:55:19.096411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:55:19.096417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T08:55:19.096423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:55:19.096428Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T08:55:19.096433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T08:55:19.096462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:55:19.096470Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T08:55:19.096475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:55:19.096755Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:19.096770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:19.096775Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:55:19.096780Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:55:19.096785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:19.096798Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T08:55:19.096803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T08:55:19.097434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:55:19.097468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:55:19.097472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:683:2606] TestWaitNotification: OK eventTxId 105 2024-11-21T08:55:19.100157Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:55:19.100236Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 92us result status StatusSuccess 2024-11-21T08:55:19.100384Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] Test command err: Trying to start YDB, gRPC: 30836, MsgBus: 8870 2024-11-21T08:55:13.049139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653501549571642:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:13.049311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb5/r3tmp/tmpcDAOt2/pdisk_1.dat 2024-11-21T08:55:13.098683Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30836, node 1 2024-11-21T08:55:13.115375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:13.115384Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:13.115386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:13.115410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8870 2024-11-21T08:55:13.149417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:13.149471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:8870 2024-11-21T08:55:13.150591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:13.176979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.182344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.245011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.265005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.275036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:13.319154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653501549573178:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.319183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.346756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.352949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.361881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.369393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.376287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.383133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.391923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653501549573671:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.391945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653501549573676:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.391947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:13.392726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:13.396628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653501549573678:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:13.585417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.600644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:13.609412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 3484, MsgBus: 8215 2024-11-21T08:55:14.150913Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653505619532560:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.150996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb5/r3tmp/tmpOIYQBc/pdisk_1.dat 2024-11-21T08:55:14.162452Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3484, node 2 2024-11-21T08:55:14.173326Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:14.173338Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:14.173341Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:14.173389Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8215 TClient is connected to server localhost:8215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } Dom ... 75Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.677874Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.684920Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.694255Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.701274Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.707930Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.715136Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.950227Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653514056277476:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.950266Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.950298Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653514056277481:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.951344Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:16.953217Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653514056277483:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:17.191027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:17.203434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:17.215202Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20330, MsgBus: 8959 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002eb5/r3tmp/tmp0M3KbA/pdisk_1.dat 2024-11-21T08:55:17.872392Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:17.897895Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20330, node 5 2024-11-21T08:55:17.913116Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:17.913141Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:17.913144Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:17.913196Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8959 2024-11-21T08:55:17.975515Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:17.975568Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:17.978035Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:18.064827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:18.072762Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:18.097079Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:18.133064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:18.201423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:18.245091Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:18.324657Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653522904342778:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.324684Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.341004Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.357149Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.374939Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.389632Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.398964Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.420132Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.443180Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653522904343279:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.443223Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.443351Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653522904343284:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.444230Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:55:18.446423Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653522904343286:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:55:18.674804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.743844Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.760792Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:56: Warning: At function: Filter, At function: Coalesce
:7:29: Warning: At function: SqlIn
:7:29: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge |89.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |89.6%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |89.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> YdbSdkSessionsPool::CustomPlan |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> YdbSdkSessionsPool::RunSmallPlan [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> YdbSdkSessionsPool::PeriodicTask10 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2024-11-21T08:55:19.468179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653528602325704:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.468475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d38/r3tmp/tmpzFVMSV/pdisk_1.dat 2024-11-21T08:55:19.516105Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9619, node 1 2024-11-21T08:55:19.526666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:19.526678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:19.526680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:19.526707Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:19.556581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.557342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:19.557364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.557997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:19.558054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:19.558065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:55:19.558436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:19.558459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:19.558461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:55:19.558701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.559322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179319605, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:19.559336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:19.559399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:55:19.559836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:19.559874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:19.559887Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:55:19.559898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:55:19.559915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:55:19.559929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:55:19.560303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:55:19.560324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:55:19.560334Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:19.560352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:55:19.568819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:19.568866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:19.570374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> YdbSdkSessionsPool::StressTestAsync10 >> YdbSdkSessionsPool::WaitQueue10 [GOOD] >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately |89.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2024-11-21T08:55:19.056376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653526089956864:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.056555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d54/r3tmp/tmpG5PUbj/pdisk_1.dat 2024-11-21T08:55:19.102924Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27712, node 1 2024-11-21T08:55:19.115061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:19.115086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:19.115087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:19.115118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:19.156913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:19.156950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:19.158577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:19.182441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.183541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:19.183560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.184321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:19.184393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:19.184405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:55:19.184818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:19.184830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:55:19.184901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:19.185174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.186033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179319234, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:19.186048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:55:19.186103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:55:19.186501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:19.186545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:19.186560Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:55:19.186568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:55:19.186575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:55:19.186589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:55:19.187076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:55:19.187098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:55:19.187102Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:19.187116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:18.582197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:18.582230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:18.582236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:18.582240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:18.582247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:18.582251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:18.582258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:18.582350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:18.592538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:18.592565Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:18.595747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:18.596547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:18.596591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:18.600296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:18.600820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:18.600934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:18.601036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:18.604090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:18.604440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:18.604451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:18.604497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:18.604505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:18.604511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:18.604528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.606843Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:18.624351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:18.624463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.624536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:18.624584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:18.624592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.628776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:18.628828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:18.628926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.628943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:18.628949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:18.628954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:18.632003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.632027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:18.632035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:18.633720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.633736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.633742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:18.633750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:18.634464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:18.635049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:18.635103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:18.635294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:18.635321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:18.635328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:18.635394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:18.635402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:18.635427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:18.635438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:18.635852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:18.635859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:18.635899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:18.635903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:18.635969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:18.635975Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:18.635988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:18.635992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:18.635998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:18.636003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:18.636008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:18.636011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:18.636022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:18.636028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:18.636034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:18.636382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:18.636396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:18.636401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:18.636406Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:18.636411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:18.636423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:20.606483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T08:55:20.606538Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T08:55:20.606603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:20.607480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:20.607577Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:20.607585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:20.607686Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:20.607692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T08:55:20.607813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:20.607823Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T08:55:20.607852Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T08:55:20.607857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:55:20.607864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T08:55:20.607870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:55:20.607877Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T08:55:20.607881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T08:55:20.607918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:55:20.607924Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2024-11-21T08:55:20.607929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:55:20.608138Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:20.608151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:20.608156Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:55:20.608161Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:55:20.608167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:20.608182Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2024-11-21T08:55:20.625194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:55:20.643844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:55:20.643864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:55:20.643982Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:20.644006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:55:20.644012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:771:2684] TestWaitNotification: OK eventTxId 105 2024-11-21T08:55:21.163561Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:21.163689Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 128us result status StatusSuccess 2024-11-21T08:55:21.163872Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:21.225422Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:55:21.225552Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 155us result status StatusSuccess 2024-11-21T08:55:21.225747Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> YdbSdkSessionsPool::WaitQueue1 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2024-11-21T08:55:14.835833Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1732179314835826 2024-11-21T08:55:14.925759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653507386931971:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.925777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:14.929464Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653505535411908:2265];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.949562Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003340/r3tmp/tmpxxlON7/pdisk_1.dat 2024-11-21T08:55:14.958023Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:14.958919Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:14.977074Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24133, node 1 2024-11-21T08:55:14.991775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003340/r3tmp/yandexah4Ipn.tmp 2024-11-21T08:55:14.991786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003340/r3tmp/yandexah4Ipn.tmp 2024-11-21T08:55:14.991834Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003340/r3tmp/yandexah4Ipn.tmp 2024-11-21T08:55:14.991868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:14.996696Z INFO: TTestServer started on Port 14208 GrpcPort 24133 TClient is connected to server localhost:14208 PQClient connected to localhost:24133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:15.021050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:15.025934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:15.025960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:55:15.027250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:55:15.055059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:15.055087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:15.056537Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:15.056799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:15.195151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653509830379270:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.195178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653509830379279:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.195184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.196710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T08:55:15.202570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653509830379284:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T08:55:15.240859Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653511681900236:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:15.241287Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjBlZTE5NTUtMmNiZjljZC01YTBjNjAxOC1iOTIwMzJlZg==, ActorId: [1:7439653511681900195:2299], ActorState: ExecuteState, TraceId: 01jd6yxjg7a584dj0rchps72r1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:15.241853Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:15.242730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.304796Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653509830379363:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:15.304982Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTkzZDQ0YWEtYzRjMDE3MTQtYWRhY2VlNGItMjVjNmVmYmQ=, ActorId: [2:7439653509830379268:2277], ActorState: ExecuteState, TraceId: 01jd6yxjft3q2vnb55wjegfyvj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:15.305349Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:15.316168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.383720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:24133", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T08:55:15.420181Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd6yxjpb4daa6dmm3n3jggmt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzhmOTdhMi04NDQyNGE0My0xODE0MWJjOS1kOTUzY2FjZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653511681900609:2920] 2024-11-21T08:55:19.926262Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653507386931971:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.926305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:19.928786Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653505535411908:2265];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.928839Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:55:20.493962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:24133 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:20.520500Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:24133 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 ... 2024-11-21T08:55:21.780428Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T08:55:21.781725Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:21.781747Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:21.781813Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T08:55:21.787376Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T08:55:21.792480Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:21.792511Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:21.792627Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 1 partNo : 0 messageNo: 1 size 115 offset: -1 2024-11-21T08:55:21.792713Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 1 partNo 0 2024-11-21T08:55:21.792778Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2024-11-21T08:55:21.792849Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 177 WTime 1732179321792 2024-11-21T08:55:21.792890Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:55:21.802298Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 2024-11-21T08:55:21.802324Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:55:21.802342Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T08:55:21.802375Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:55:21.802378Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T08:55:21.802409Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:55:21.802448Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T08:55:21.802452Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:55:21.802457Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T08:55:21.802461Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:55:21.802482Z node 2 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732179321792 queuesize 0 startOffset 0 2024-11-21T08:55:21.805350Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T08:55:21.808502Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T08:55:21.808682Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 9000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T08:55:21.808692Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2024-11-21T08:55:21.808696Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2024-11-21T08:55:21.808792Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler === AcksHandler has written a message, closing the session 2024-11-21T08:55:21.808945Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: try to update token 2024-11-21T08:55:21.808959Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 2024-11-21T08:55:21.816273Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T08:55:21.816486Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T08:55:21.820465Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:21.820487Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:21.820538Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2024-11-21T08:55:21.820737Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T08:55:21.820959Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:21.820966Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:21.820986Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2024-11-21T08:55:21.821051Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2024-11-21T08:55:21.821084Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2024-11-21T08:55:21.821142Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1732179321821 2024-11-21T08:55:21.821177Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:55:21.824963Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 2024-11-21T08:55:21.824984Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:55:21.824998Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T08:55:21.825040Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2024-11-21T08:55:21.825702Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T08:55:21.825754Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 4000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T08:55:21.825760Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2024-11-21T08:55:21.825764Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler 2024-11-21T08:55:21.825325Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle === Inside SessionClosedHandler 2024-11-21T08:55:21.825948Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2024-11-21T08:55:21.825997Z :INFO: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2024-11-21T08:55:21.826001Z :INFO: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session will now close 2024-11-21T08:55:21.826016Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: aborting 2024-11-21T08:55:21.826234Z :WARNING: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T08:55:21.826720Z :DEBUG: [/Root] SessionId [src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0] MessageGroupId [src_id] Write session: destroy 2024-11-21T08:55:21.826438Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0 grpc read done: success: 0 data: 2024-11-21T08:55:21.826445Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0 grpc read failed 2024-11-21T08:55:21.826450Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0 grpc closed 2024-11-21T08:55:21.826455Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|a9518f89-c5032fc7-a4bcfd2b-f963845d_0 is DEAD 2024-11-21T08:55:21.826631Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:55:21.833407Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:21.833428Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439653537451705276:2477] destroyed 2024-11-21T08:55:21.833443Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:23.014903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:23.014926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:23.014930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:23.014934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:23.014939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:23.014942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:23.014948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:23.015033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:23.024060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:23.024082Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:23.026802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:23.027383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:23.027408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:23.028769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:23.029023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:23.029142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.029272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:23.030547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.030836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:23.030849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.030901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:23.030908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:23.030913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:23.030928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.032398Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:23.049126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:23.049220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.049282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:23.049332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:23.049339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.050180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.050212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:23.050274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.050283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:23.050290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:23.050295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:23.050696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.050708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:23.050714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:23.051081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.051093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.051099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.051107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.051673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:23.052078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:23.052127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:23.052325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.052347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.052355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.052411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:23.052417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.052452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:23.052465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:23.052851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:23.052857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:23.052900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.052905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:23.052989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.052996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:23.053007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:23.053011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.053017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:23.053022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.053027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:23.053031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:23.053041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:23.053047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:23.053050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:23.053315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:23.053327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:23.053332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:23.053337Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:23.053341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:23.053353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.175803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:55:23.175859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:55:23.175867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:55:23.175942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:23.175957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:55:23.175962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:641:2564] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2024-11-21T08:55:23.176563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:23.176595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.176639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2024-11-21T08:55:23.176999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.177015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T08:55:23.177051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T08:55:23.177059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T08:55:23.177104Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T08:55:23.177114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T08:55:23.177117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:648:2571] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2024-11-21T08:55:23.177604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:23.177633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.177665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2024-11-21T08:55:23.178088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.178111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2024-11-21T08:55:23.178161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T08:55:23.178166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T08:55:23.178210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T08:55:23.178220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T08:55:23.178223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:655:2578] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2024-11-21T08:55:23.178620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:23.178639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.178664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2024-11-21T08:55:23.179061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.179082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T08:55:23.179128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T08:55:23.179132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T08:55:23.179182Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T08:55:23.179195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T08:55:23.179197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:662:2585] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2024-11-21T08:55:23.179549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:23.179566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.179597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2024-11-21T08:55:23.180097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.180145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2024-11-21T08:55:23.180201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2024-11-21T08:55:23.180220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2024-11-21T08:55:23.180277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2024-11-21T08:55:23.180291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2024-11-21T08:55:23.180295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:669:2592] TestWaitNotification: OK eventTxId 109 >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:23.103805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:23.103834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:23.103839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:23.103844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:23.103851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:23.103855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:23.103863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:23.103947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:23.116008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:23.116041Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:23.120171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:23.121153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:23.121196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:23.122871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:23.123049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:23.123148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.123235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:23.124392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.124635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:23.124642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.124671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:23.124676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:23.124680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:23.124691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.125929Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:23.138664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:23.138747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.138797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:23.138835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:23.138841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.139489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.139509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:23.139554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.139560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:23.139563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:23.139567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:23.140033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.140051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:23.140069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:23.140495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.140505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.140512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.140519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.141164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:23.141624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:23.141673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:23.141866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.141890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.141898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.141949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:23.141957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.142007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:23.142019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:23.142399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:23.142407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:23.142446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.142452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:23.142545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.142552Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:23.142578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:23.142583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.142589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:23.142594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.142599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:23.142603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:23.142613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:23.142620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:23.142624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:23.142937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:23.142950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:23.142955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:23.142961Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:23.142966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:23.142978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... tatus StatusSuccess 2024-11-21T08:55:23.332898Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.333103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:771:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:773:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:775:2058] recipient: [1:774:2681] Leader for TabletID 72057594046678944 is [1:776:2682] sender: [1:777:2058] recipient: [1:774:2681] 2024-11-21T08:55:23.339104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:23.339135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:23.339140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:23.339145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:23.339152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:23.339156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:23.339165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:23.339219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:23.340311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:23.340617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:23.340655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:23.340673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:23.340679Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:23.340735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:23.340804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:23.340824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:23.340830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:55:23.340839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.340847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.340892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:23.340922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.340956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.340973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.340984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:23.340989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:23.340991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:23.340995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T08:55:23.340998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:23.341006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:23.341093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.341273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.343230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:23.343250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.343276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:23.343285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:23.343290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:23.343329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount |89.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |89.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |89.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TPersQueueTest::TClusterTrackerTest [GOOD] >> TPersQueueTest::SrcIdCompatibility >> YdbSdkSessionsPool::WaitQueue1 [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationModify >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationUnavailable >> TSchemeShardTopicSplitMergeTest::Boot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:23.864544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:23.864574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:23.864579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:23.864584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:23.864590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:23.864594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:23.864602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:23.864694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:23.875205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:23.875229Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:23.877966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:23.878730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:23.878764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:23.880199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:23.880440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:23.880563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.880667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:23.881676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.881950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:23.881961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.882000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:23.882008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:23.882014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:23.882032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.883194Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:23.900322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:23.900417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.900490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:23.900540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:23.900550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.901359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.901390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:23.901473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.901485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:23.901489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:23.901494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:23.901929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.901941Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:23.901946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:23.902285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.902297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.902303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.902311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.902909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:23.903291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:23.903347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:23.903535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:23.903561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:23.903569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.903626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:23.903634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:23.903668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:23.903681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:23.904072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:23.904082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:23.904125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:23.904130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:23.904239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:23.904246Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:23.904257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:23.904262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.904268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:23.904273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:23.904278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:23.904281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:23.904291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:23.904297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:23.904301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:23.904568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:23.904581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:23.904586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:23.904590Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:23.904595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:23.904607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... olution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:24.064039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:749:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:752:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:753:2058] recipient: [1:751:2665] Leader for TabletID 72057594046678944 is [1:754:2666] sender: [1:755:2058] recipient: [1:751:2665] 2024-11-21T08:55:24.070153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:24.070176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:24.070181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:24.070185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:24.070189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:24.070192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:24.070199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:24.070257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:24.071349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:24.071892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:24.071963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:24.072020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:24.072027Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:24.072054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:24.072166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:24.072196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:55:24.072225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:24.072352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:24.072440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:24.072443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T08:55:24.072446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:24.072464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:24.072603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.072882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.074483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:24.074501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:24.074817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:24.074830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:24.074838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:24.075237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:754:2666] sender: [1:812:2058] recipient: [1:15:2062] 2024-11-21T08:55:24.137509Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:55:24.137642Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 163us result status StatusSuccess 2024-11-21T08:55:24.137840Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2024-11-21T08:55:22.544679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653541563699071:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:22.544843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b5b/r3tmp/tmp6dn7lO/pdisk_1.dat 2024-11-21T08:55:22.611308Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9487, node 1 2024-11-21T08:55:22.632569Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:22.632588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:22.632589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:22.632632Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11496 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:55:22.645067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:22.645097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:22.648615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:22.674044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:22.675156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:22.675179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:22.677972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:22.678047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:22.678052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:55:22.678918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:22.678922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:55:22.680411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:22.685292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:22.687069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179322727, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:22.687081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:22.687183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:55:22.688763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:22.688831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:22.688847Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:55:22.688863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:55:22.688874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:55:22.688895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:55:22.689782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:55:22.689811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:55:22.689815Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:22.689837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationModify [GOOD] |89.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2024-11-21T08:55:08.930001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653479782822572:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:08.930504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a74/r3tmp/tmpIXTpIp/pdisk_1.dat 2024-11-21T08:55:08.980383Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27346, node 1 2024-11-21T08:55:08.993620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:08.993630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:08.993632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:08.993664Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:09.031404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.031445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.032538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.059403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:09.062775Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:09.062816Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Connect to grpc://localhost:19597 2024-11-21T08:55:09.063502Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:09.065851Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Status 14 Service Unavailable 2024-11-21T08:55:09.065921Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2024-11-21T08:55:09.065937Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:09.065942Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:09.066011Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:09.066468Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Status 14 Service Unavailable 2024-11-21T08:55:09.066502Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2024-11-21T08:55:09.066514Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:09.930797Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T08:55:09.930820Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:09.930910Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:09.931806Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Status 14 Service Unavailable 2024-11-21T08:55:09.931881Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2024-11-21T08:55:09.931892Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:10.931294Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T08:55:10.931320Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:10.931474Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:10.933481Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Status 14 Service Unavailable 2024-11-21T08:55:10.933567Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2024-11-21T08:55:10.933573Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:12.932124Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T08:55:12.932146Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:12.932301Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:12.933095Z node 1 :GRPC_CLIENT DEBUG: [7f6ff082690] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T08:55:12.933142Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2024-11-21T08:55:12.933171Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2024-11-21T08:55:13.930028Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653479782822572:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:13.930079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:21.393058Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653537697800480:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:21.393280Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a74/r3tmp/tmpoL82hT/pdisk_1.dat 2024-11-21T08:55:21.422260Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15330, node 2 2024-11-21T08:55:21.446799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:21.446815Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:21.446817Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:21.446872Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:21.497190Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:21.497236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:21.497865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:21.501295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:21.501581Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:21.502642Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:21.502672Z node 2 :GRPC_CLIENT DEBUG: [7f6ff086a10] Connect to grpc://localhost:7765 2024-11-21T08:55:21.502879Z node 2 :GRPC_CLIENT DEBUG: [7f6ff086a10] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:21.505146Z node 2 :GRPC_CLIENT DEBUG: [7f6ff086a10] Status 14 Service Unavailable 2024-11-21T08:55:21.505259Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unava ... EnableGrpc on GrpcPort 21571, node 4 2024-11-21T08:55:24.024638Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:24.024655Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:24.024657Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:24.024699Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:24.105773Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:24.105828Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:24.106409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:24.106839Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:55:24.109464Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:24.109495Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Connect to grpc://localhost:8899 2024-11-21T08:55:24.109751Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:24.111754Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T08:55:24.111820Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T08:55:24.111833Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T08:55:24.112094Z node 4 :GRPC_CLIENT DEBUG: [7f6ff087310] Connect to grpc://localhost:27367 2024-11-21T08:55:24.112187Z node 4 :GRPC_CLIENT DEBUG: [7f6ff087310] Request GetUserAccountRequest { user_account_id: "user1" } 2024-11-21T08:55:24.114047Z node 4 :GRPC_CLIENT DEBUG: [7f6ff087310] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2024-11-21T08:55:24.114177Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T08:55:24.114357Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T08:55:24.114414Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:24.114912Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Status 16 Access Denied 2024-11-21T08:55:24.114957Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2024-11-21T08:55:24.114969Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2024-11-21T08:55:24.115125Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:24.115137Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T08:55:24.115163Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:24.115299Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:24.115629Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T08:55:24.115675Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T08:55:24.115935Z node 4 :GRPC_CLIENT DEBUG: [7f6ff085c90] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T08:55:24.115970Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2024-11-21T08:55:24.115979Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T08:55:24.116011Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T08:55:24.429475Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653550573612883:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:24.429800Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a74/r3tmp/tmpW7T7MD/pdisk_1.dat 2024-11-21T08:55:24.440381Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24088, node 5 2024-11-21T08:55:24.449572Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:24.449587Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:24.449589Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:24.449633Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:24.529596Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:24.529621Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:24.530706Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:24.531873Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:24.533647Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:24.533666Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T08:55:24.533676Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Connect to grpc://localhost:31584 2024-11-21T08:55:24.533888Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:24.533989Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:24.535909Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Status 14 Service Unavailable 2024-11-21T08:55:24.535959Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T08:55:24.535967Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2024-11-21T08:55:24.535989Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T08:55:24.535999Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:24.536003Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T08:55:24.536012Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T08:55:24.536061Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:24.536193Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T08:55:24.536677Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Status 1 CANCELLED 2024-11-21T08:55:24.536701Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2024-11-21T08:55:24.536739Z node 5 :GRPC_CLIENT DEBUG: [7f6ff085a50] Status 1 CANCELLED 2024-11-21T08:55:24.536777Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2024-11-21T08:55:24.536788Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2024-11-21T08:55:08.956484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653479141061649:2180];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:08.956531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a50/r3tmp/tmpH5U5DM/pdisk_1.dat 2024-11-21T08:55:09.003911Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9627, node 1 2024-11-21T08:55:09.016799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.016811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.016812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.016842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:09.049151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:09.052711Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2024-11-21T08:55:09.052738Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Connect to grpc://localhost:7547 2024-11-21T08:55:09.053258Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2024-11-21T08:55:09.055149Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Status 14 Service Unavailable 2024-11-21T08:55:09.055213Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T08:55:09.055228Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:09.055238Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2024-11-21T08:55:09.055294Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2024-11-21T08:55:09.055771Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Status 14 Service Unavailable 2024-11-21T08:55:09.055802Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T08:55:09.055806Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:09.057153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.057175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.058197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.956564Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2024-11-21T08:55:09.956599Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2024-11-21T08:55:09.956720Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2024-11-21T08:55:09.957800Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Status 14 Service Unavailable 2024-11-21T08:55:09.957879Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T08:55:09.957891Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:10.957245Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2024-11-21T08:55:10.957280Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2024-11-21T08:55:10.957420Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2024-11-21T08:55:10.963261Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Status 14 Service Unavailable 2024-11-21T08:55:10.963360Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T08:55:10.963371Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:12.958392Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2024-11-21T08:55:12.958426Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2024-11-21T08:55:12.958509Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2024-11-21T08:55:12.959448Z node 1 :GRPC_CLIENT DEBUG: [17333f081250] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2024-11-21T08:55:12.959523Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T08:55:13.955789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653479141061649:2180];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:13.955830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:21.359015Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653538194510081:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:21.359172Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a50/r3tmp/tmp9mmAcb/pdisk_1.dat 2024-11-21T08:55:21.373401Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62052, node 2 2024-11-21T08:55:21.385406Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:21.385419Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:21.385422Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:21.385479Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:21.459312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:21.459366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:21.460484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:21.462678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain ... urce_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2024-11-21T08:55:23.766337Z node 3 :GRPC_CLIENT DEBUG: [17333f087550] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2024-11-21T08:55:23.766369Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T08:55:24.018784Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653550913628770:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:24.018904Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a50/r3tmp/tmpfAV3Ov/pdisk_1.dat 2024-11-21T08:55:24.027196Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7172, node 4 2024-11-21T08:55:24.037684Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:24.037698Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:24.037700Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:24.037746Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:24.119160Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:24.119200Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:24.120192Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:24.121528Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:24.123398Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2024-11-21T08:55:24.123419Z node 4 :GRPC_CLIENT DEBUG: [17333f087550] Connect to grpc://localhost:21526 2024-11-21T08:55:24.123744Z node 4 :GRPC_CLIENT DEBUG: [17333f087550] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2024-11-21T08:55:24.129681Z node 4 :GRPC_CLIENT DEBUG: [17333f087550] Status 14 Service Unavailable 2024-11-21T08:55:24.129771Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T08:55:24.129783Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T08:55:24.129790Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T08:55:24.129808Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2024-11-21T08:55:24.129892Z node 4 :GRPC_CLIENT DEBUG: [17333f087550] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2024-11-21T08:55:24.130476Z node 4 :GRPC_CLIENT DEBUG: [17333f087550] Status 1 CANCELLED 2024-11-21T08:55:24.130523Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2024-11-21T08:55:24.130532Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2024-11-21T08:55:24.130535Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2024-11-21T08:55:24.453125Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653549924838962:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:24.453144Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a50/r3tmp/tmpA2ENtP/pdisk_1.dat 2024-11-21T08:55:24.464471Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26951, node 5 2024-11-21T08:55:24.474620Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:24.474633Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:24.474635Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:24.474675Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:24.553363Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:24.553398Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:24.554417Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:24.555607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:24.557359Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2024-11-21T08:55:24.557378Z node 5 :GRPC_CLIENT DEBUG: [17333f087550] Connect to grpc://localhost:24107 2024-11-21T08:55:24.557618Z node 5 :GRPC_CLIENT DEBUG: [17333f087550] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2024-11-21T08:55:24.559438Z node 5 :GRPC_CLIENT DEBUG: [17333f087550] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2024-11-21T08:55:24.559535Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T08:55:24.559658Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2024-11-21T08:55:24.559708Z node 5 :GRPC_CLIENT DEBUG: [17333f087550] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } 0: "OK" 2024-11-21T08:55:24.560157Z node 5 :GRPC_CLIENT DEBUG: [17333f087550] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } } 2024-11-21T08:55:24.560222Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:24.746325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:24.746348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:24.746351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:24.746354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:24.746359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:24.746362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:24.746368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:24.746437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:24.753617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:24.753637Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:24.755934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:24.756584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:24.756617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:24.757996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:24.758166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:24.758262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:24.758344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:24.759083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:24.759290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:24.759296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:24.759324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:24.759328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:24.759332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:24.759344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.760170Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:24.771077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:24.771155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.771209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:24.771251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:24.771256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.771868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:24.771888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:24.771938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.771946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:24.771949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:24.771952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:24.772271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.772282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:24.772285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:24.772526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.772531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.772535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:24.772540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:24.773040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:24.773414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:24.773471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:24.773602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:24.773621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:24.773626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:24.773668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:24.773673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:24.773700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:24.773710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:24.774020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:24.774028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:24.774066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:24.774072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:24.774146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:24.774150Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:24.774161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:24.774164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:24.774168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:24.774171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:24.774174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:24.774176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:24.774184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:24.774188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:24.774191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:24.774389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:24.774397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:24.774400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:24.774404Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:24.774407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:24.774416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... es { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:25.086451Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:636:2058] recipient: [2:100:2135] Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:639:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:640:2058] recipient: [2:638:2562] Leader for TabletID 72057594046678944 is [2:641:2563] sender: [2:642:2058] recipient: [2:638:2562] 2024-11-21T08:55:25.091167Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:25.091191Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:25.091196Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:25.091206Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:25.091213Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:25.091216Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:25.091225Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:25.091279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:25.092040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:25.092322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:25.092352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:25.092381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:25.092385Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:25.092403Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:25.092475Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:55:25.092493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:55:25.092499Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092505Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:55:25.092562Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092579Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092593Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092601Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:55:25.092608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:55:25.092610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T08:55:25.092613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:55:25.092622Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092628Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092650Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:25.092710Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092729Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092779Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092786Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092802Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092813Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092820Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092835Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092841Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092871Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092888Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092898Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092901Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.092905Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.094337Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:25.094353Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.094506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:25.094515Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:25.094522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:25.094823Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:641:2563] sender: [2:700:2058] recipient: [2:15:2062] 2024-11-21T08:55:25.146202Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:55:25.146303Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 127us result status StatusSuccess 2024-11-21T08:55:25.146463Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition |89.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |89.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> BasicUsage::WriteSessionNoAvailableDatabase >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> TSchemeShardTopicSplitMergeTest::MargePartitions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:25.829091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:25.829117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:25.829121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:25.829125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:25.829129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:25.829132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:25.829139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:25.829222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:25.836439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:25.836462Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:25.838532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:25.839045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:25.839074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:25.840191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:25.840361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:25.840450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.840529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:25.841440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.841690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:25.841699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.841727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:25.841732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:25.841736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:25.841749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.842665Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:25.854051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:25.854136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.854202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:25.854242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:25.854248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.854884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.854906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:25.854957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.854964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:25.854967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:25.854970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:25.855260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.855266Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:25.855269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:25.855501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.855506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.855511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.855516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.855917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:25.856202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:25.856257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:25.856399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.856417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:25.856425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.856465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:25.856470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.856493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:25.856502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:25.856790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:25.856795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:25.856826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.856829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:25.856912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.856917Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:25.856926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:25.856929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.856932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:25.856936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.856939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:25.856941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:25.856948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:25.856953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:25.856956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:25.857140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:25.857148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:25.857152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:25.857155Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:25.857158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:25.857170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : 72075186233409548 TxId: 104 Status: OK 2024-11-21T08:55:25.887368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2024-11-21T08:55:25.887373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T08:55:25.887376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T08:55:25.887726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T08:55:25.887773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T08:55:25.887777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T08:55:25.887823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2024-11-21T08:55:25.887827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T08:55:25.887831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2024-11-21T08:55:25.919503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.919550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 378 RawX2: 4294969643 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:25.919563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2024-11-21T08:55:25.919593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T08:55:25.956567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2024-11-21T08:55:25.956630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T08:55:25.956641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T08:55:25.956655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.956658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T08:55:25.956708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T08:55:25.956752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:25.956763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:25.957173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.957334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:25.957340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:25.957379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:25.957409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.957413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T08:55:25.957417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T08:55:25.957515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.957523Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T08:55:25.957533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T08:55:25.957537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:55:25.957542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T08:55:25.957548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:55:25.957552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:55:25.957555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:55:25.957579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:55:25.957584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2024-11-21T08:55:25.957586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T08:55:25.957588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T08:55:25.957775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:25.957786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:25.957789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:55:25.957793Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:55:25.957796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:25.957881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:25.957887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:25.957890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:55:25.957892Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:55:25.957894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:25.957899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2024-11-21T08:55:25.957902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:404:2371] 2024-11-21T08:55:25.958558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:55:25.958800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:55:25.958816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:55:25.958820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:541:2475] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2024-11-21T08:55:25.961506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:25.961551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.961585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2024-11-21T08:55:25.962004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:25.962031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:55:25.962073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:55:25.962077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:55:25.962126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:25.962137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:55:25.962141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:641:2564] TestWaitNotification: OK eventTxId 105 >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:25.869442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:25.869487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:25.869493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:25.869500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:25.869507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:25.869511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:25.869522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:25.869604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:25.880705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:25.880731Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:25.883538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:25.884345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:25.884382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:25.885894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:25.886093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:25.886188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.886281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:25.887282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.887545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:25.887558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.887600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:25.887608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:25.887615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:25.887628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.888957Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:25.906448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:25.906545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.906613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:25.906662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:25.906672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.907593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.907622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:25.907682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.907694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:25.907699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:25.907704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:25.908106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.908120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:25.908126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:25.908542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.908553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.908560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.908567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.909162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:25.909604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:25.909656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:25.909850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.909876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:25.909884Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.909933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:25.909940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.909977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:25.909989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:25.910381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:25.910390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:25.910433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.910439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:25.910526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.910534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:25.910547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:25.910552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.910557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:25.910562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.910567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:25.910571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:25.910583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:25.910590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:25.910595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:25.910914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:25.910928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:25.910934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:25.910941Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:25.910946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:25.910958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EvProposeTransactionResult> complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.419850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.419854Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.419866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2024-11-21T08:55:26.419891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2024-11-21T08:55:26.420420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2024-11-21T08:55:26.420445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2024-11-21T08:55:26.420462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2024-11-21T08:55:26.420508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T08:55:26.420535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2024-11-21T08:55:26.420544Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2024-11-21T08:55:26.420549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T08:55:26.420553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T08:55:26.420850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:55:26.420898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:55:26.420904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:55:26.420980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:26.420985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T08:55:26.420989Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:26.452240Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:26.452284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 373 RawX2: 8589936935 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:26.452297Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2024-11-21T08:55:26.452304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T08:55:26.480342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T08:55:26.480423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T08:55:26.480438Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T08:55:26.480454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.480459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T08:55:26.480523Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T08:55:26.480571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:26.481500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.481794Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:26.481810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:26.481904Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:26.481910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T08:55:26.482017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.482029Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T08:55:26.482047Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T08:55:26.482052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:55:26.482060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T08:55:26.482068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:55:26.482074Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T08:55:26.482080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T08:55:26.482117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:55:26.482124Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T08:55:26.482128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:55:26.482400Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:26.482417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:26.482426Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:55:26.482433Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:55:26.482438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:26.482455Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T08:55:26.482460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T08:55:26.483573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:55:26.483615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:55:26.483621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:675:2600] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2024-11-21T08:55:26.486944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:26.487031Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.487087Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2024-11-21T08:55:26.487680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:26.487713Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T08:55:26.487862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T08:55:26.487868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T08:55:26.487947Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T08:55:26.487963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T08:55:26.487967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:770:2683] TestWaitNotification: OK eventTxId 106 >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:25.112963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:25.112986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:25.112990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:25.112995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:25.113001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:25.113003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:25.113010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:25.113090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:25.121509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:25.121538Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:25.124898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:25.125746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:25.125786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:25.127558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:25.127815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:25.127931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.128032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:25.129405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.129688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:25.129697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.129728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:25.129733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:25.129737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:25.129749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.130929Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:25.147178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:25.147270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.147338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:25.147394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:25.147403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.148377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.148408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:25.148472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.148499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:25.148503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:25.148509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:25.148938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.148950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:25.148954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:25.149325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.149335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.149341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.149349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.149979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:25.150391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:25.150446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:25.150632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:25.150657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:25.150665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.150722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:25.150729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:25.150764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:25.150778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:25.151316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:25.151365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:25.151425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:25.151432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:25.151553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:25.151562Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:25.151575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:25.151580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.151586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:25.151591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:25.151596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:25.151600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:25.151614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:25.151621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:25.151625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:25.152020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:25.152040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:25.152046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:25.152052Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:25.152058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:25.152074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T08:55:26.243004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.243011Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T08:55:26.243021Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T08:55:26.243024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:55:26.243028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T08:55:26.243033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:55:26.243037Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T08:55:26.243040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T08:55:26.243067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:55:26.243072Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T08:55:26.243075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T08:55:26.243233Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:26.243244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:55:26.243251Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:55:26.243255Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:55:26.243258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:26.243269Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T08:55:26.243272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T08:55:26.243865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:55:26.243882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:55:26.243886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:714:2635] TestWaitNotification: OK eventTxId 105 2024-11-21T08:55:26.715825Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:26.715969Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 142us result status StatusSuccess 2024-11-21T08:55:26.716187Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:26.788014Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:55:26.788172Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 181us result status StatusSuccess 2024-11-21T08:55:26.788396Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2024-11-21T08:55:26.789227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:26.789291Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.789323Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2024-11-21T08:55:26.790413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:26.790454Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T08:55:26.790544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T08:55:26.790552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T08:55:26.790634Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T08:55:26.790657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T08:55:26.790662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:780:2694] TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:55:26.355309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:26.355334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:26.355339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:26.355345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:26.355352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:26.355357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:26.355365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:26.355451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:26.363672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:26.363692Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:26.366000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:26.366506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:26.366531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:26.367700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:26.367849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:26.367919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:26.367982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:26.368967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:26.369175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:26.369181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:26.369210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:26.369215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:26.369219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:26.369228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.370206Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:55:26.380956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:26.381035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.381086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:26.381122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:26.381127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.381913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:26.381934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:26.381979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.381986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:26.381989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:26.381993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:26.382319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.382328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:26.382330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:26.382591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.382596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.382600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:26.382604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:26.383000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:26.383328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:26.383366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:26.383525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:26.383545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:26.383552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:26.383592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:26.383597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:26.383619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:26.383627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:26.383922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:26.383926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:26.383957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:26.383960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:55:26.384023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.384028Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:26.384038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:26.384041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:26.384045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:26.384048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:26.384051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:26.384053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:26.384059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:26.384063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:26.384066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:55:26.384266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:26.384279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:55:26.384283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:55:26.384286Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:55:26.384289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:26.384296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 548 TxId: 104 Status: OK 2024-11-21T08:55:26.885748Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2024-11-21T08:55:26.885753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T08:55:26.885758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T08:55:26.886108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T08:55:26.886171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T08:55:26.886179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T08:55:26.886245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2024-11-21T08:55:26.886253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T08:55:26.886259Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2024-11-21T08:55:26.919844Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:26.919906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 373 RawX2: 8589936935 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:26.919919Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2024-11-21T08:55:26.919939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T08:55:26.947241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2024-11-21T08:55:26.947321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T08:55:26.947333Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T08:55:26.947346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.947351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T08:55:26.947402Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T08:55:26.947444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:55:26.947455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:26.948224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.948298Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:26.948307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:55:26.948356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:26.948399Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:26.948407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T08:55:26.948414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T08:55:26.948500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.948509Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T08:55:26.948523Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T08:55:26.948528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:55:26.948534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T08:55:26.948540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T08:55:26.948546Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:55:26.948551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:55:26.948581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:55:26.948589Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2024-11-21T08:55:26.948593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T08:55:26.948600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T08:55:26.948934Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:26.948954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:26.948960Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:55:26.948965Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:55:26.948971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:55:26.949059Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:26.949070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T08:55:26.949074Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T08:55:26.949078Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T08:55:26.949082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:55:26.949091Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2024-11-21T08:55:26.949096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T08:55:26.950391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:55:26.950425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T08:55:26.950443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:55:26.950450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:540:2477] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2024-11-21T08:55:26.953824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:26.953867Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:55:26.953901Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2024-11-21T08:55:26.954267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:26.954290Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:55:26.954328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:55:26.954332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:55:26.954385Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:55:26.954395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:55:26.954398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:638:2564] TestWaitNotification: OK eventTxId 105 >> TTxLocatorTest::TestWithReboot >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> TTxLocatorTest::TestWithReboot [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> Cdc::DocApi[PqRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2024-11-21T08:55:27.395932Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:55:27.396012Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:55:27.396123Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:55:27.396501Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.396612Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:55:27.398082Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398096Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398102Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398114Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:55:27.398144Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398160Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:55:27.398173Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:55:27.398318Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2024-11-21T08:55:27.398388Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2024-11-21T08:55:27.398430Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2024-11-21T08:55:27.398470Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2024-11-21T08:55:27.398492Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#100000 2024-11-21T08:55:27.398511Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398520Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2024-11-21T08:55:27.398533Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398564Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398578Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398584Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2024-11-21T08:55:27.398596Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398605Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2024-11-21T08:55:27.398623Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398634Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2024-11-21T08:55:27.398650Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398661Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398675Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398683Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2024-11-21T08:55:27.398696Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398705Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2024-11-21T08:55:27.398708Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2024-11-21T08:55:27.398718Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398724Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2024-11-21T08:55:27.398726Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2024-11-21T08:55:27.398732Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2024-11-21T08:55:27.398734Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2024-11-21T08:55:27.398740Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398744Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398748Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2024-11-21T08:55:27.398750Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2024-11-21T08:55:27.398757Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398762Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2024-11-21T08:55:27.398764Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 400000 to# 500000 2024-11-21T08:55:27.398770Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398775Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398779Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2024-11-21T08:55:27.398781Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2024-11-21T08:55:27.398787Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2024-11-21T08:55:27.398789Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2024-11-21T08:55:27.398796Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398800Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398805Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2024-11-21T08:55:27.398807Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2024-11-21T08:55:27.398814Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398817Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2024-11-21T08:55:27.398819Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2024-11-21T08:55:27.398825Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.398829Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2024-11-21T08:55:27.398831Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T08:55:27.399370Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2024-11-21T08:55:27.399541Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2024-11-21T08:55:27.399629Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2024-11-21T08:55:27.399636Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2024-11-21T08:55:27.399674Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2024-11-21T08:55:27.399680Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2024-11-21T08:55:27.399686Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2024-11-21T08:55:27.399692Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... IN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566328Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2024-11-21T08:55:27.566331Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:615:2546] TEvAllocateResult from# 9100000 to# 9200000 2024-11-21T08:55:27.566339Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2024-11-21T08:55:27.566343Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:617:2548] TEvAllocateResult from# 9200000 to# 9300000 2024-11-21T08:55:27.566354Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566364Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2024-11-21T08:55:27.566368Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:619:2550] TEvAllocateResult from# 9300000 to# 9400000 2024-11-21T08:55:27.566378Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566389Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2024-11-21T08:55:27.566393Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:621:2552] TEvAllocateResult from# 9400000 to# 9500000 2024-11-21T08:55:27.566402Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566409Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2024-11-21T08:55:27.566412Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:623:2554] TEvAllocateResult from# 9500000 to# 9600000 2024-11-21T08:55:27.566420Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566431Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2024-11-21T08:55:27.566434Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:625:2556] TEvAllocateResult from# 9600000 to# 9700000 2024-11-21T08:55:27.566443Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566453Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566465Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2024-11-21T08:55:27.566469Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:627:2558] TEvAllocateResult from# 9700000 to# 9800000 2024-11-21T08:55:27.566500Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566509Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566518Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2024-11-21T08:55:27.566521Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:629:2560] TEvAllocateResult from# 9800000 to# 9900000 2024-11-21T08:55:27.566533Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566544Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.566553Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2024-11-21T08:55:27.566556Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:631:2562] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T08:55:27.567022Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2024-11-21T08:55:27.567319Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2024-11-21T08:55:27.567458Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2024-11-21T08:55:27.567469Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2024-11-21T08:55:27.567498Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2024-11-21T08:55:27.567504Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2024-11-21T08:55:27.567510Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2024-11-21T08:55:27.567516Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2024-11-21T08:55:27.567529Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2024-11-21T08:55:27.567535Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2024-11-21T08:55:27.567542Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2024-11-21T08:55:27.567549Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2024-11-21T08:55:27.567555Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2024-11-21T08:55:27.567561Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2024-11-21T08:55:27.567594Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2024-11-21T08:55:27.567600Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2024-11-21T08:55:27.567605Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2024-11-21T08:55:27.567609Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2024-11-21T08:55:27.567613Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2024-11-21T08:55:27.567619Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:5:1:24576:78:0],] 2024-11-21T08:55:27.567624Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2024-11-21T08:55:27.567628Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2024-11-21T08:55:27.567632Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2024-11-21T08:55:27.567637Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2024-11-21T08:55:27.567641Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2024-11-21T08:55:27.567646Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2024-11-21T08:55:27.567687Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:55:27.567946Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.568556Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:55:27.568606Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:55:27.568732Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:55:27.568746Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.568762Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:27.568775Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> Cdc::KeysOnlyLog[PqRunner] |89.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UuidExchange[PqRunner] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] >> YdbSdkSessionsPool::StressTestSync10 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] Test command err: 2024-11-21T08:55:18.985758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653523827738626:2058];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:18.985970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d75/r3tmp/tmpd0QGMW/pdisk_1.dat 2024-11-21T08:55:19.060016Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5779, node 1 2024-11-21T08:55:19.077222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:19.077238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:19.077239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:19.077274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:19.086750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:19.086782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:19.088248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61258 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:19.126259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.127024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:19.127037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.127397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:19.127439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:19.127446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:55:19.127696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:19.127708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:55:19.127910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:19.127975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.128608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179319178, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:19.128617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:55:19.128657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:55:19.128966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:19.129002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:19.129013Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:55:19.129025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:55:19.129035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:55:19.129043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:55:19.129344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:55:19.129359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:55:19.129365Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:19.129376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:55:23.985897Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653523827738626:2058];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:23.985931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> YdbSdkSessionsPool::StressTestSync1 [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] Test command err: 2024-11-21T08:55:19.585644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653527174812100:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.585792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cef/r3tmp/tmpEaEFZK/pdisk_1.dat 2024-11-21T08:55:19.629348Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61864, node 1 2024-11-21T08:55:19.642680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:19.642690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:19.642691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:19.642735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:19.664588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.665690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:19.665709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.666498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:19.666573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:19.666582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:55:19.666964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:19.667164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:19.667175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:55:19.667595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.668479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179319717, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:19.668493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:19.668555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:55:19.668949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:19.669001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:19.669017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:55:19.669028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:55:19.669047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:55:19.669061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:55:19.669502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:55:19.669516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:55:19.669519Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:19.669549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:55:19.686037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:19.686071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:19.687627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:24.586150Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653527174812100:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:24.586194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> BasicUsage::CloseWriteSessionImmediately [GOOD] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> YdbSdkSessionsPool::FailTest [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2024-11-21T08:55:15.692408Z :BasicWriteSession INFO: Random seed for debugging is 1732179315692399 2024-11-21T08:55:15.812777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653512597150375:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.812860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:15.816345Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653511350334930:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.843012Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003322/r3tmp/tmpBX78pH/pdisk_1.dat 2024-11-21T08:55:15.851287Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:15.852140Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:15.892995Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:15.912406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:15.912439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:15.915029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26815, node 1 2024-11-21T08:55:15.925369Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003322/r3tmp/yandexmhQ41Y.tmp 2024-11-21T08:55:15.925386Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003322/r3tmp/yandexmhQ41Y.tmp 2024-11-21T08:55:15.925499Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003322/r3tmp/yandexmhQ41Y.tmp 2024-11-21T08:55:15.925547Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:15.932447Z INFO: TTestServer started on Port 6422 GrpcPort 26815 TClient is connected to server localhost:6422 PQClient connected to localhost:26815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:55:15.963210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:15.963237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:15.965089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:15.966238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:15.972455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T08:55:16.121913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653515645302344:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.121931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653515645302367:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.121938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.123389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:55:16.130165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653515645302372:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:55:16.186509Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653516892118482:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:16.186632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.186643Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTAyNThiNDgtNDlmNzNjZWUtYTZhMjBiMWYtMzdlZDdiNjY=, ActorId: [1:7439653516892118441:2299], ActorState: ExecuteState, TraceId: 01jd6yxkdnag6fgfm2rd2att0p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:16.187404Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:16.229073Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653515645302452:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:16.232342Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2M2YTlmMzYtOTAwMzMwNzQtYzhhMzkwNDAtZDcwODRjNzY=, ActorId: [2:7439653515645302340:2277], ActorState: ExecuteState, TraceId: 01jd6yxkcs53xpfprf4bskhngj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:16.232764Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:16.273469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.313836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:26815", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T08:55:16.404603Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd6yxkmw4dgt6a66y2m4hcch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQzZTM2MGMtOTE2MmE1ZTMtMzIwMmZiZTItZWRlZGI4Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653516892118850:2899] 2024-11-21T08:55:20.815635Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653512597150375:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:20.815691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:20.816272Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653511350334930:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:20.817066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:55:21.511899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:26815 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:21.556267Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:26815 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: ... Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:16476 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:30.168040Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:16476 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:30.669421Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:16476 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:31.171405Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2024-11-21T08:55:31.176615Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2024-11-21T08:55:31.176860Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2024-11-21T08:55:31.176867Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:16476 2024-11-21T08:55:31.177312Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T08:55:31.180547Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:55:31.180563Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T08:55:31.180755Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T08:55:31.180793Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:54582 2024-11-21T08:55:31.180797Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:54582 proto=v1 topic=test-topic durationSec=0 2024-11-21T08:55:31.180800Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:55:31.181277Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T08:55:31.181319Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T08:55:31.181326Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:55:31.181328Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:55:31.181334Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653578379132830:2482] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:55:31.181833Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653578379132830:2482] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:55:31.211083Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653578379132830:2482] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T08:55:31.211344Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653578379132860:2482] connected; active server actors: 1 2024-11-21T08:55:31.211410Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653578379132830:2482] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T08:55:31.211420Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653578379132830:2482] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T08:55:31.214294Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653578379132860:2482] disconnected; active server actors: 1 2024-11-21T08:55:31.214309Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653578379132860:2482] disconnected no session 2024-11-21T08:55:31.228254Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653578379132830:2482] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T08:55:31.228271Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653578379132830:2482] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T08:55:31.228275Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653578379132830:2482] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T08:55:31.228284Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:55:31.229500Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T08:55:31.230324Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|252f1dc8-5b53cbee-92245486-7f403b13_0 2024-11-21T08:55:31.230709Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179331230 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:31.230748Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|252f1dc8-5b53cbee-92245486-7f403b13_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T08:55:31.230848Z :INFO: [] MessageGroupId [src] SessionId [src|252f1dc8-5b53cbee-92245486-7f403b13_0] Write session: close. Timeout = 0 ms 2024-11-21T08:55:31.230854Z :INFO: [] MessageGroupId [src] SessionId [src|252f1dc8-5b53cbee-92245486-7f403b13_0] Write session will now close 2024-11-21T08:55:31.230859Z :DEBUG: [] MessageGroupId [src] SessionId [src|252f1dc8-5b53cbee-92245486-7f403b13_0] Write session: aborting 2024-11-21T08:55:31.230944Z :INFO: [] MessageGroupId [src] SessionId [src|252f1dc8-5b53cbee-92245486-7f403b13_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:55:31.230948Z :DEBUG: [] MessageGroupId [src] SessionId [src|252f1dc8-5b53cbee-92245486-7f403b13_0] Write session: destroy 2024-11-21T08:55:31.229348Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:31.229365Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439653578379132877:2482], now have 1 active actors on pipe 2024-11-21T08:55:31.229611Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:31.229618Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:31.229643Z node 4 :PERSQUEUE INFO: new Cookie src|252f1dc8-5b53cbee-92245486-7f403b13_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T08:55:31.229693Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:55:31.229711Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:31.230161Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:31.230167Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:31.230200Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:31.232287Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|252f1dc8-5b53cbee-92245486-7f403b13_0 grpc read done: success: 0 data: 2024-11-21T08:55:31.232295Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|252f1dc8-5b53cbee-92245486-7f403b13_0 grpc read failed 2024-11-21T08:55:31.232300Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|252f1dc8-5b53cbee-92245486-7f403b13_0 grpc closed 2024-11-21T08:55:31.232305Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|252f1dc8-5b53cbee-92245486-7f403b13_0 is DEAD 2024-11-21T08:55:31.232525Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:55:31.232720Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:31.232735Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439653578379132877:2482] destroyed 2024-11-21T08:55:31.232745Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2024-11-21T08:55:20.355387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653530712815596:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:20.355575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cca/r3tmp/tmpYpSaxq/pdisk_1.dat 2024-11-21T08:55:20.410315Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15371, node 1 2024-11-21T08:55:20.427096Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:20.427123Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:20.427125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:20.427161Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:55:20.455815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:20.455854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:20.458814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:20.466856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:20.468087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:20.468114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:20.469155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:20.469269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:20.469280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:55:20.469877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:20.469888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:55:20.470155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:20.470808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:20.472316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179320515, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:20.472329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:20.472599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:55:20.473329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:20.473383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:20.473395Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:55:20.473406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:55:20.473415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:55:20.473430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:55:20.474258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:55:20.474271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:55:20.474275Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:20.474293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:55:25.355902Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653530712815596:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:25.355952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:31.036635Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653581226397340:2082];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:31.036810Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cca/r3tmp/tmppUqKcF/pdisk_1.dat 2024-11-21T08:55:31.058121Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7723, node 4 2024-11-21T08:55:31.079500Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:31.079519Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:31.079521Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:31.079566Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:31.137203Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:31.137241Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:31.139637Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:31.140325Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:31.140415Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:31.140420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:31.140841Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:31.140888Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:31.140892Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:55:31.141355Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:31.141361Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:55:31.141722Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:31.142525Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179331190, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:31.142534Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:55:31.142596Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:55:31.142975Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:31.143021Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:31.143030Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:55:31.143042Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:55:31.143050Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:55:31.143 ... 2024-11-21T08:55:31.388717Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:31.388728Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T08:55:31.388764Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T08:55:31.388826Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:31.388837Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:55:31.389660Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T08:55:31.389715Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:31.389806Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:31.389829Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T08:55:31.389866Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:31.389887Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:31.389902Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:31.389935Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:55:31.390182Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.390202Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.390207Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:55:31.390272Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.390280Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.390282Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:55:31.390297Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.390304Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.390306Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T08:55:31.390319Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.390327Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.390329Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T08:55:31.390341Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.390347Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.390349Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T08:55:31.390989Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179331435, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:31.390999Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179331435, at schemeshard: 72057594046644480 2024-11-21T08:55:31.391018Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T08:55:31.391039Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179331435, at schemeshard: 72057594046644480 2024-11-21T08:55:31.391044Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T08:55:31.391050Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179331435, at schemeshard: 72057594046644480 2024-11-21T08:55:31.391055Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T08:55:31.391061Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732179331435 2024-11-21T08:55:31.391064Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T08:55:31.391435Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:31.391564Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:31.391584Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T08:55:31.391593Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T08:55:31.391623Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T08:55:31.391630Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T08:55:31.391637Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T08:55:31.391640Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T08:55:31.391650Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T08:55:31.391657Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T08:55:31.391660Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T08:55:31.391674Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T08:55:31.391681Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T08:55:31.391684Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T08:55:31.391689Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T08:55:31.391950Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.391963Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.391965Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:55:31.391993Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.391999Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.392000Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T08:55:31.392009Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.392014Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.392015Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T08:55:31.392023Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.392028Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.392029Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T08:55:31.392037Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T08:55:31.392042Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T08:55:31.392043Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T08:55:31.392047Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T08:55:31.392560Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653581226398230:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2024-11-21T08:55:09.262560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653486783802680:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.262725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00335e/r3tmp/tmpGHJxg0/pdisk_1.dat 2024-11-21T08:55:09.333565Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65142, node 1 2024-11-21T08:55:09.356997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.357014Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.357016Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.357048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.364241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.364267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.365374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.460379Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.462447Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:25103, port: 25103 2024-11-21T08:55:09.462483Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.512483Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T08:55:09.556895Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****N_mw (1FBF546C) () has now valid token of ldapuser@ldap 2024-11-21T08:55:09.695594Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653484725522029:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.695613Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00335e/r3tmp/tmpSwgsRH/pdisk_1.dat 2024-11-21T08:55:09.706451Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7427, node 2 2024-11-21T08:55:09.712627Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.712641Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.712644Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.712680Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.747003Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.748494Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:11723, port: 11723 2024-11-21T08:55:09.748522Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.795872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.795908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.796972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.804447Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:11723. Invalid credentials 2024-11-21T08:55:09.804655Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****a8Dg (F27DED9D) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:10.087847Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653489361069906:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.088081Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00335e/r3tmp/tmpN4vzdo/pdisk_1.dat 2024-11-21T08:55:10.096331Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62657, node 3 2024-11-21T08:55:10.106616Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.106630Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.106631Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.106666Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.190163Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.190203Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.191263Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:10.196270Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.196528Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:18282, port: 18282 2024-11-21T08:55:10.196566Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:10.260471Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:18282. Invalid credentials 2024-11-21T08:55:10.260729Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****PCLw (C024EE87) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:10.483890Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653490549070346:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.483942Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00335e/r3tmp/tmpyy0lOd/pdisk_1.dat 2024-11-21T08:55:10.496584Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14240, node 4 2024-11-21T08:55:10.507478Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.507491Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.507493Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.507537Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.558855Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.560253Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:22655, port: 22655 2024-11-21T08:55:10.560286Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:10.584360Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.584392Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.585476Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:10.612455Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:10.612641Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:22655 return no entries 2024-11-21T08:55:10.612830Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****hZ2A (0665ADA2) () has now permanent error message 'Could not login via LDAP' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00335e/r3tmp/tmpZ4njrh/pdisk_1.dat 2024-11-21T08:55:10.900687Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653488915052455:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.908690Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:10.910876Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4794, node 5 2024-11-21T08:55:10.923269Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.923280Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.923282Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.923320Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.983320Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.983544Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9699, port: 9699 2024-11-21T08:55:10.983575Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:10.999423Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.999450Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:11.000777Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:11.040908Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:11.084402Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:11.084640Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:11.084663Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:11.132380Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:11.176390Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:11.176784Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****GbjQ (5DAA4408) () has now valid token of ldapuser@ldap 2024-11-21T08:55:14.900394Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****GbjQ (5DAA4408) 2024-11-21T08:55:14.900475Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9699, port: 9699 2024-11-21T08:55:14.900512Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:14.960463Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:15.004407Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:15.004640Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:15.004658Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:15.052375Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:15.096411Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:15.096823Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****GbjQ (5DAA4408) () has now valid token of ldapuser@ldap 2024-11-21T08:55:15.901570Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439653488915052455:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.901597Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:19.903613Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****GbjQ (5DAA4408) 2024-11-21T08:55:19.903695Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9699, port: 9699 2024-11-21T08:55:19.903721Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:19.972444Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:20.020400Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:20.020578Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:20.020591Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:20.068401Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:20.116400Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:20.116863Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****GbjQ (5DAA4408) () has now valid token of ldapuser@ldap 2024-11-21T08:55:21.469428Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653536870597545:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:21.469480Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00335e/r3tmp/tmpsZ0FFH/pdisk_1.dat 2024-11-21T08:55:21.487954Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6964, node 6 2024-11-21T08:55:21.526602Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:21.526614Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:21.526617Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:21.526663Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:21.554658Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:21.556613Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:63986, port: 63986 2024-11-21T08:55:21.556648Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:21.571890Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:21.571921Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:21.573500Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:21.668421Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:21.716624Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****lU-g (9EC82158) () has now valid token of ldapuser@ldap 2024-11-21T08:55:26.469536Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439653536870597545:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:26.469580Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:26.471516Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****lU-g (9EC82158) 2024-11-21T08:55:26.471541Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:63986, port: 63986 2024-11-21T08:55:26.471564Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:26.532480Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:26.576658Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****lU-g (9EC82158) () has now valid token of ldapuser@ldap 2024-11-21T08:55:30.473355Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****lU-g (9EC82158) 2024-11-21T08:55:30.473409Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:63986, port: 63986 2024-11-21T08:55:30.473457Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:30.532472Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:30.576659Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****lU-g (9EC82158) () has now valid token of ldapuser@ldap >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> BasicUsage::ReadMirrored [GOOD] >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice |89.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> TSubDomainTest::StartAndStopTenanNode >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] [GOOD] >> TSubDomainTest::Boot >> Cdc::DocApi[TopicRunner] >> Cdc::UpdatesLog[PqRunner] >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TSubDomainTest::Boot [GOOD] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> Cdc::KeysOnlyLogDebezium [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2024-11-21T08:55:14.725160Z :PropagateSessionClosed INFO: Random seed for debugging is 1732179314725151 2024-11-21T08:55:14.811503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653508512548556:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.811800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:14.817139Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653508518011876:2265];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003365/r3tmp/tmpohvCde/pdisk_1.dat 2024-11-21T08:55:14.837049Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:14.838067Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:14.838370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:14.865837Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8642, node 1 2024-11-21T08:55:14.871606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003365/r3tmp/yandex7cu2xL.tmp 2024-11-21T08:55:14.871617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003365/r3tmp/yandex7cu2xL.tmp 2024-11-21T08:55:14.871668Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003365/r3tmp/yandex7cu2xL.tmp 2024-11-21T08:55:14.871701Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:14.875436Z INFO: TTestServer started on Port 24865 GrpcPort 8642 TClient is connected to server localhost:24865 PQClient connected to localhost:8642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:14.911803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:14.911843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:14.913434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:14.934104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:14.934136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:14.934847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.935419Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:14.935684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:55:15.063605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653512812979250:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.063629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653512812979225:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.063646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:15.064963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:55:15.068577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653512812979254:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:55:15.091798Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653512807516808:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:15.091906Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjkwYmRkYzgtMjI2ODQyYjgtZjhlNzU3Y2MtNjZlZmI3OWU=, ActorId: [1:7439653512807516766:2299], ActorState: ExecuteState, TraceId: 01jd6yxjbv2na34rdnrxjy3ecz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:15.091938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.092479Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:15.156610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:15.158370Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653512812979354:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:15.158449Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTQzOTE2MDEtOWRmY2QwNGItNzRjMWM5NjMtNWIyMWZlZTg=, ActorId: [2:7439653512812979223:2277], ActorState: ExecuteState, TraceId: 01jd6yxjbqb4kaw18gpt23cvkr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:15.158672Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:15.223847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:8642", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T08:55:15.293221Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd6yxjj27s12yzvyt7ftfr29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJkZTBiNS04ZDlmNzIxZS0yNGM4OWNjZC0xMWMxMjhjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653512807517184:2910] 2024-11-21T08:55:19.811969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653508512548556:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.812004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:19.814522Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653508518011876:2265];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.814558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:55:20.329118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:8642 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:20.364334Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:8642 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 I ... e0-1df25d69-711f3d40] [] Got ReadResponse, serverBytesSize = 1445, now ReadSizeBudget = 0, ReadSizeServerDelta = 8387163 2024-11-21T08:55:32.042063Z :DEBUG: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 8387163 2024-11-21T08:55:32.042128Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T08:55:32.042136Z :DEBUG: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] [] Returning serverBytesSize = 1445 to budget 2024-11-21T08:55:32.042140Z :DEBUG: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] [] In ContinueReadingDataImpl, ReadSizeBudget = 1445, ReadSizeServerDelta = 8387163 2024-11-21T08:55:32.042177Z :DEBUG: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2024-11-21T08:55:32.042188Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (1-1) 2024-11-21T08:55:32.042192Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 1} (2-2) 2024-11-21T08:55:32.042195Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 2} (3-3) 2024-11-21T08:55:32.042197Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 3} (4-4) >>> event from dataHandler: DataReceived { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 Message { Data: ..130 bytes.. Information: { Offset: 1 ProducerId: "src_id" SeqNo: 2 CreateTime: 2024-11-21T08:55:32.038000Z WriteTime: 2024-11-21T08:55:32.039000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:34052", "server": "ipv6:[::1]:34052" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..240 bytes.. Information: { Offset: 2 ProducerId: "src_id" SeqNo: 3 CreateTime: 2024-11-21T08:55:32.038000Z WriteTime: 2024-11-21T08:55:32.039000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:34052", "server": "ipv6:[::1]:34052" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..350 bytes.. Information: { Offset: 3 ProducerId: "src_id" SeqNo: 4 CreateTime: 2024-11-21T08:55:32.038000Z WriteTime: 2024-11-21T08:55:32.039000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:34052", "server": "ipv6:[::1]:34052" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2024-11-21T08:55:32.038000Z WriteTime: 2024-11-21T08:55:32.039000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:34052", "server": "ipv6:[::1]:34052" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2024-11-21T08:55:32.042262Z :DEBUG: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2024-11-21T08:55:32.042252Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_5565289030045509358_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1445 } } 2024-11-21T08:55:32.042267Z :DEBUG: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] [] Returning serverBytesSize = 0 to budget 2024-11-21T08:55:32.042299Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_5565289030045509358_v1 got read request: guid# 11798747-3f31c518-5ddfe4ef-228a3be 2024-11-21T08:55:32.138420Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|c8598932-33a0ccf5-56640b5d-427f994e_0] Write session will now close 2024-11-21T08:55:32.138445Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|c8598932-33a0ccf5-56640b5d-427f994e_0] Write session: aborting 2024-11-21T08:55:32.138634Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|c8598932-33a0ccf5-56640b5d-427f994e_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2024-11-21T08:55:32.138645Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|c8598932-33a0ccf5-56640b5d-427f994e_0] Write session: destroy 2024-11-21T08:55:32.138681Z :INFO: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] Closing read session. Close timeout: 18446744073709.551615s 2024-11-21T08:55:32.138701Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:1:4:0 -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 2024-11-21T08:55:32.138708Z :INFO: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] Counters: { Errors: 0 CurrentSessionLifetimeMs: 325 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:32.138950Z :INFO: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] Closing read session. Close timeout: 0.000000s 2024-11-21T08:55:32.138958Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:1:4:0 -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 2024-11-21T08:55:32.138962Z :INFO: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] Counters: { Errors: 0 CurrentSessionLifetimeMs: 325 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:32.138966Z :INFO: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] Closing read session. Close timeout: 0.000000s 2024-11-21T08:55:32.138968Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:1:4:0 -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 2024-11-21T08:55:32.138970Z :INFO: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] Counters: { Errors: 0 CurrentSessionLifetimeMs: 325 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:32.138913Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|c8598932-33a0ccf5-56640b5d-427f994e_0 grpc read done: success: 0 data: 2024-11-21T08:55:32.138983Z :NOTICE: [/Root] [/Root] [b9c0eda3-9c1e1be0-1df25d69-711f3d40] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:55:32.138932Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|c8598932-33a0ccf5-56640b5d-427f994e_0 grpc read failed 2024-11-21T08:55:32.138940Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|c8598932-33a0ccf5-56640b5d-427f994e_0 grpc closed 2024-11-21T08:55:32.138949Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|c8598932-33a0ccf5-56640b5d-427f994e_0 is DEAD 2024-11-21T08:55:32.139008Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_5565289030045509358_v1 grpc read done: success# 0, data# { } 2024-11-21T08:55:32.139012Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_5565289030045509358_v1 grpc read failed 2024-11-21T08:55:32.139019Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_5565289030045509358_v1 grpc closed 2024-11-21T08:55:32.139038Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_5565289030045509358_v1 is DEAD 2024-11-21T08:55:32.139222Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:55:32.139331Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7439653581623467722:2552] disconnected; active server actors: 1 2024-11-21T08:55:32.139339Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653581623467723:2552] disconnected; active server actors: 1 2024-11-21T08:55:32.139340Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7439653581623467722:2552] client user disconnected session shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.139342Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653581623467723:2552] client user disconnected session shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.139361Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7439653581623467724:2552] disconnected; active server actors: 1 2024-11-21T08:55:32.139362Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7439653581623467724:2552] client user disconnected session shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.139389Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:32.139389Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:32.139397Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.139407Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7439653585918435135:2566] destroyed 2024-11-21T08:55:32.139409Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439653581623467738:2560] destroyed 2024-11-21T08:55:32.139412Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:32.139415Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.139418Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7439653581623467731:2558] destroyed 2024-11-21T08:55:32.139425Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:32.139425Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:55:32.139427Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.139429Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7439653581623467733:2559] destroyed 2024-11-21T08:55:32.139444Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.139448Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.139450Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_5565289030045509358_v1 2024-11-21T08:55:32.486353Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439653585918435175:2572], TxId: 281474976720703, task: 1, CA Id [3:7439653585918435173:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T08:55:32.517907Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439653585918435175:2572], TxId: 281474976720703, task: 1, CA Id [3:7439653585918435173:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] Test command err: === Start server === Server->StartServer(false); 2024-11-21T08:52:49.303093Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652881881888068:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.303418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:49.309134Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652884462171863:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:49.336243Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f02/r3tmp/tmpWFq75y/pdisk_1.dat 2024-11-21T08:52:49.340157Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:49.340181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:49.370147Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9618, node 1 2024-11-21T08:52:49.389882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003f02/r3tmp/yandexI6HRJB.tmp 2024-11-21T08:52:49.389895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003f02/r3tmp/yandexI6HRJB.tmp 2024-11-21T08:52:49.389954Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003f02/r3tmp/yandexI6HRJB.tmp 2024-11-21T08:52:49.389997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:49.394549Z INFO: TTestServer started on Port 27374 GrpcPort 9618 TClient is connected to server localhost:27374 PQClient connected to localhost:9618 === TenantModeEnabled() = 0 === Init PQ - start server on port 9618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:49.439396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.439422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.440882Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:49.441193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:49.443108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:49.443132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:49.444683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:52:49.449918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:52:49.449971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.450030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:52:49.450091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:52:49.450100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.450856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:49.450873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:52:49.450911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.450921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:52:49.450922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T08:52:49.450925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:52:49.451392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.451404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:52:49.451409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:52:49.451485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:49.451494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T08:52:49.451498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:52:49.451800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.451810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.451814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:49.451818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:52:49.452586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:52:49.453011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2024-11-21T08:52:49.453048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:52:49.453589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179169497, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:52:49.453616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7439652881881888635 RawX2: 4294969654 } } Step: 1732179169497 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:52:49.453627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:49.453720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:52:49.453732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:52:49.453764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:52:49.453778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:52:49.454174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:52:49.454186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:52:49.454224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:52:49.454232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439652881881888658:2378], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T08:52:49.454240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:49.454250Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:52:49.454263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:52:49.454270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:52:49.454276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2024-11-21T08:52:49.454280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T08:52:49.454289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:52:49.454291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T08:52:49.454302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:52:49.454311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:52:49.454320Z node 1 :FLAT_T ... SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:55:31.066844Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439653581306183088:2673] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:55:31.067288Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439653581306183088:2673] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:55:31.070091Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439653581306183088:2673] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) Update the table 2024-11-21T08:55:31.076982Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439653581306183088:2673] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T08:55:31.077001Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439653581306183088:2673] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) ReplyResult: Partition=7, SeqNo=0 2024-11-21T08:55:31.077003Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439653581306183088:2673] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) Start idle 2024-11-21T08:55:31.077014Z node 27 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 5 sessionId: partition: 7 expectedGeneration: (NULL) 2024-11-21T08:55:31.080684Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:31.080708Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [27:7439653581306183113:2673], now have 1 active actors on pipe 2024-11-21T08:55:31.080865Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) TEvClientConnected Status OK, TabletId: 72075186224037910, NodeId 28, Generation: 1 2024-11-21T08:55:31.081027Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2024-11-21T08:55:31.081041Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2024-11-21T08:55:31.081083Z node 28 :PERSQUEUE INFO: new Cookie test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0 generated for partition 7 topic 'rt3.dc1--account--topic100' owner test-src-id-compat2 2024-11-21T08:55:31.081124Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 7 2024-11-21T08:55:31.081155Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:31.081442Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2024-11-21T08:55:31.081464Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2024-11-21T08:55:31.081509Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:31.081671Z node 27 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0 2024-11-21T08:55:31.082058Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179331082 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:31.082103Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Write session established. Init response: session_id: "test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0" topic: "account/topic100" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T08:55:31.082896Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write 1 messages with Id from 1 to 1 2024-11-21T08:55:31.083096Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session: try to update token 2024-11-21T08:55:31.083111Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Send 1 message(s) (0 left), first sequence number is 1 2024-11-21T08:55:31.084150Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T08:55:31.084483Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2024-11-21T08:55:31.084496Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2024-11-21T08:55:31.084540Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 1 2024-11-21T08:55:31.084301Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T08:55:31.084671Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::IEventHandle 2024-11-21T08:55:31.084803Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2024-11-21T08:55:31.084820Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2024-11-21T08:55:31.084840Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message topic: rt3.dc1--account--topic100 partition: 7 SourceId: '\0test-src-id-compat2' SeqNo: 1 partNo : 0 messageNo: 1 size 102 offset: -1 2024-11-21T08:55:31.084916Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob processing sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 2024-11-21T08:55:31.084961Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob complete sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2024-11-21T08:55:31.085016Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic100' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 177 WTime 1732179331084 2024-11-21T08:55:31.085045Z node 28 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:55:31.087268Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 2024-11-21T08:55:31.087290Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2024-11-21T08:55:31.087320Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 1 requestId: cookie: 1 2024-11-21T08:55:31.087331Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Answering for message sourceid: '\0test-src-id-compat2', Topic: 'rt3.dc1--account--topic100', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T08:55:31.087361Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:55:31.087371Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T08:55:31.087426Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--account--topic100' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T08:55:31.087436Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:55:31.087441Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T08:55:31.087446Z node 28 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:55:31.087464Z node 28 :PERSQUEUE DEBUG: Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp done, result 1732179331084 queuesize 0 startOffset 0 2024-11-21T08:55:31.087455Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::IEventHandle 2024-11-21T08:55:31.087792Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { persist_duration_ms: 2 } 2024-11-21T08:55:31.087805Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session: acknoledged message 1 2024-11-21T08:55:31.087907Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session: close. Timeout = 0 ms 2024-11-21T08:55:31.087915Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session will now close 2024-11-21T08:55:31.087920Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session: aborting 2024-11-21T08:55:31.088163Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:55:31.088166Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session is aborting and will not restart 2024-11-21T08:55:31.088186Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0] Write session: destroy 2024-11-21T08:55:31.088270Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0 grpc read done: success: 0 data: 2024-11-21T08:55:31.088281Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0 grpc read failed 2024-11-21T08:55:31.088287Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0 grpc closed 2024-11-21T08:55:31.088292Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|ec612bd3-81f9515b-a312534d-9a2d9af8_0 is DEAD 2024-11-21T08:55:31.088495Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:55:31.088717Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:31.088759Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [27:7439653581306183113:2673] destroyed 2024-11-21T08:55:31.088772Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::DropOwner. >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2024-11-21T08:55:14.099252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653505954306731:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.099390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:14.101679Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653508293147263:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.101876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:14.121421Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:14.124585Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003de0/r3tmp/tmpiy12eB/pdisk_1.dat 2024-11-21T08:55:14.152754Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26136, node 1 2024-11-21T08:55:14.166842Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003de0/r3tmp/yandexQif1mp.tmp 2024-11-21T08:55:14.166854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003de0/r3tmp/yandexQif1mp.tmp 2024-11-21T08:55:14.166903Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003de0/r3tmp/yandexQif1mp.tmp 2024-11-21T08:55:14.166934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:14.171431Z INFO: TTestServer started on Port 64145 GrpcPort 26136 TClient is connected to server localhost:64145 PQClient connected to localhost:26136 === TenantModeEnabled() = 0 === Init PQ - start server on port 26136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:14.199656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:14.199691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:14.201905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:14.202036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:55:14.202086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.202135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T08:55:14.202191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:14.202203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.202753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:14.202779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:14.202812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.202824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:14.202830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T08:55:14.202833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:55:14.203182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.203197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:14.203200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:55:14.203344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:14.203354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2024-11-21T08:55:14.203357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:14.204069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.204079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.204083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:14.204108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2024-11-21T08:55:14.204633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:14.205127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T08:55:14.205170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:55:14.206235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179314250, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:14.206265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439653505954307267 RawX2: 4294969643 } } Step: 1732179314250 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T08:55:14.206269Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:14.206345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:55:14.206357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:14.206404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:55:14.206425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T08:55:14.207457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:14.207471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T08:55:14.207510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:14.207520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439653505954307311:2380], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T08:55:14.207527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:14.207531Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:55:14.207543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:55:14.207545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T08:55:14.207550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2024-11-21T08:55:14.207554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T08:55:14.207556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:55:14.207559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2024-11-21T08:55:14.207568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T08:55:14.207572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:55:14.207574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T08:55:14.207973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:55:14.207989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057 ... [1 (0), ], RequireBalancing=0 [] 2024-11-21T08:55:33.614654Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_5608430687997054198_v1" (Sender=[5:7439653589045666961:2568], Pipe=[5:7439653589045666964:2568], Partitions=[], ActiveFamilyCount=0) 2024-11-21T08:55:33.614666Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_5608430687997054198_v1" sender [5:7439653589045666961:2568] lock partition 0 for ReadingSession "shared/cli_5_1_5608430687997054198_v1" (Sender=[5:7439653589045666961:2568], Pipe=[5:7439653589045666964:2568], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2024-11-21T08:55:33.614680Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T08:55:33.614690Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000045s 2024-11-21T08:55:33.614983Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_5608430687997054198_v1" ClientId: "cli" PipeClient { RawX1: 7439653589045666964 RawX2: 4503621102209544 } Path: "/Root/PQ/rt3.dc1--topic1" } 2024-11-21T08:55:33.615023Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T08:55:33.615116Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1 2024-11-21T08:55:33.615153Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_5608430687997054198_v1:1 with generation 1 2024-11-21T08:55:33.616682Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1732179333612 } Cookie: 18446744073709551615 } 2024-11-21T08:55:33.616703Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2024-11-21T08:55:33.616737Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 sending to client partition status 2024-11-21T08:55:33.617049Z :INFO: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (empty maybe) 2024-11-21T08:55:33.617217Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2024-11-21T08:55:33.617278Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T08:55:33.617297Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T08:55:33.617305Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2024-11-21T08:55:33.617327Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2024-11-21T08:55:33.617337Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1TEvPartitionReady. Aval parts: 1 2024-11-21T08:55:33.617351Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 performing read request: guid# f133f87f-6a2c779b-8877e57d-6e461c29, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T08:55:33.617379Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid f133f87f-6a2c779b-8877e57d-6e461c29 2024-11-21T08:55:33.617674Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1732179333512 CreateTimestampMS: 1732179333511 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1732179333513 CreateTimestampMS: 1732179333512 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1732179333513 CreateTimestampMS: 1732179333512 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T08:55:33.617716Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2024-11-21T08:55:33.617728Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid f133f87f-6a2c779b-8877e57d-6e461c29 has messages 1 2024-11-21T08:55:33.617761Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 read done: guid# f133f87f-6a2c779b-8877e57d-6e461c29, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 371 2024-11-21T08:55:33.617775Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 response to read: guid# f133f87f-6a2c779b-8877e57d-6e461c29 2024-11-21T08:55:33.617848Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 Process answer. Aval parts: 0 2024-11-21T08:55:33.617935Z :DEBUG: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] Got ReadResponse, serverBytesSize = 371, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2024-11-21T08:55:33.617960Z :DEBUG: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2024-11-21T08:55:33.618052Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2024-11-21T08:55:33.618075Z :DEBUG: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] Returning serverBytesSize = 371 to budget 2024-11-21T08:55:33.618080Z :DEBUG: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] In ContinueReadingDataImpl, ReadSizeBudget = 371, ReadSizeServerDelta = 52428429 2024-11-21T08:55:33.618247Z :DEBUG: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T08:55:33.618302Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T08:55:33.618311Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T08:55:33.618316Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2024-11-21T08:55:33.618325Z :DEBUG: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2024-11-21T08:55:33.618332Z :DEBUG: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] Returning serverBytesSize = 0 to budget 2024-11-21T08:55:33.618359Z :INFO: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] Closing read session. Close timeout: 0.000000s 2024-11-21T08:55:33.618334Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 grpc read done: success# 1, data# { read_request { bytes_size: 371 } } 2024-11-21T08:55:33.618366Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2024-11-21T08:55:33.618375Z :INFO: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] Counters: { Errors: 0 CurrentSessionLifetimeMs: 5 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:33.618374Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 got read request: guid# 4709b634-9ee5989f-e6f8218f-4b03f3cf 2024-11-21T08:55:33.618394Z :NOTICE: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:55:33.618399Z :DEBUG: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] [] Abort session to cluster 2024-11-21T08:55:33.618522Z :NOTICE: [] [] [3bdd1977-54184ef9-998282ba-aaeb6342] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:55:33.618623Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 grpc read done: success# 0, data# { } 2024-11-21T08:55:33.618631Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 grpc read failed 2024-11-21T08:55:33.618635Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 grpc closed 2024-11-21T08:55:33.618649Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5608430687997054198_v1 is DEAD 2024-11-21T08:55:33.618720Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_5608430687997054198_v1 2024-11-21T08:55:33.618939Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7439653589045666964:2568] disconnected; active server actors: 1 2024-11-21T08:55:33.618953Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7439653589045666964:2568] client cli disconnected session shared/cli_5_1_5608430687997054198_v1 >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2024-11-21T08:55:33.157872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653588387967682:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:33.158004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e7f/r3tmp/tmprz9qmd/pdisk_1.dat 2024-11-21T08:55:33.196931Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8049 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:33.212450Z node 1 :TX_PROXY DEBUG: actor# [1:7439653588387967902:2136] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:33.212482Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653588387968312:2418] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:33.212517Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653588387967941:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:33.212534Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653588387967941:2157], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:33.212585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:33.212865Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967575:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653588387968317:2419] 2024-11-21T08:55:33.212881Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967578:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653588387968318:2419] 2024-11-21T08:55:33.212885Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653588387967575:2050] Subscribe: subscriber# [1:7439653588387968317:2419], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:33.212892Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967581:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653588387968319:2419] 2024-11-21T08:55:33.212894Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653588387967581:2056] Subscribe: subscriber# [1:7439653588387968319:2419], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:33.212896Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653588387967578:2053] Subscribe: subscriber# [1:7439653588387968318:2419], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:33.212900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968317:2419][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588387967575:2050] 2024-11-21T08:55:33.212903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968319:2419][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588387967581:2056] 2024-11-21T08:55:33.212907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968318:2419][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588387967578:2053] 2024-11-21T08:55:33.212910Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588387968314:2419] 2024-11-21T08:55:33.212911Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967575:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653588387968317:2419] 2024-11-21T08:55:33.212913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588387968316:2419] 2024-11-21T08:55:33.212919Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653588387968313:2419][/dc-1] Set up state: owner# [1:7439653588387967941:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:33.212921Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967581:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653588387968319:2419] 2024-11-21T08:55:33.212924Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967578:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653588387968318:2419] 2024-11-21T08:55:33.212933Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588387968315:2419] 2024-11-21T08:55:33.212937Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653588387968313:2419][/dc-1] Path was already updated: owner# [1:7439653588387967941:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:33.212945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968317:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588387968314:2419], cookie# 1 2024-11-21T08:55:33.212947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968318:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588387968315:2419], cookie# 1 2024-11-21T08:55:33.212949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968319:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588387968316:2419], cookie# 1 2024-11-21T08:55:33.212953Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967575:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588387968317:2419], cookie# 1 2024-11-21T08:55:33.212956Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967578:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588387968318:2419], cookie# 1 2024-11-21T08:55:33.212959Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588387967581:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588387968319:2419], cookie# 1 2024-11-21T08:55:33.212969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968317:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588387967575:2050], cookie# 1 2024-11-21T08:55:33.212974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968318:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588387967578:2053], cookie# 1 2024-11-21T08:55:33.212993Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588387968319:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588387967581:2056], cookie# 1 2024-11-21T08:55:33.213000Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588387968314:2419], cookie# 1 2024-11-21T08:55:33.213004Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:33.213007Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588387968315:2419], cookie# 1 2024-11-21T08:55:33.213009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:33.213012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588387968316:2419], cookie# 1 2024-11-21T08:55:33.213013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588387968313:2419][/dc-1] Unexpected sync response: sender# [1:7439653588387968316:2419], cookie# 1 2024-11-21T08:55:33.218954Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653588387967941:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:33.219016Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653588387967941:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Ta ... 1142382115:2518], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:34.056414Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653586847413939:2050] Subscribe: subscriber# [3:7439653591142382113:2518], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:34.056414Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653586847413945:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7439653591142382116:2519] 2024-11-21T08:55:34.056415Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653586847413945:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2024-11-21T08:55:34.056418Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653586847413945:2056] Subscribe: subscriber# [3:7439653591142382116:2519], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:34.056419Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439653591142382114:2519][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653586847413942:2053] 2024-11-21T08:55:34.056420Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653586847413945:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7439653591142382117:2518] 2024-11-21T08:55:34.056421Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653586847413945:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2024-11-21T08:55:34.056421Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439653591142382112:2519][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653586847413939:2050] 2024-11-21T08:55:34.056424Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653586847413945:2056] Subscribe: subscriber# [3:7439653591142382117:2518], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:34.056424Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439653591142382116:2519][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653586847413945:2056] 2024-11-21T08:55:34.056428Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653586847413939:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439653591142382112:2519] 2024-11-21T08:55:34.056428Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439653591142382105:2519][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653591142382110:2519] 2024-11-21T08:55:34.056430Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653586847413945:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439653591142382116:2519] 2024-11-21T08:55:34.056433Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439653591142382115:2518][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439653586847413942:2053] 2024-11-21T08:55:34.056434Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439653591142382105:2519][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653591142382109:2519] 2024-11-21T08:55:34.056436Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439653591142382113:2518][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439653586847413939:2050] 2024-11-21T08:55:34.056437Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439653591142382117:2518][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439653586847413945:2056] 2024-11-21T08:55:34.056438Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439653591142382105:2519][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7439653586847414282:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:34.056441Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439653591142382104:2518][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439653591142382107:2518] 2024-11-21T08:55:34.056442Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439653591142382105:2519][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653591142382111:2519] 2024-11-21T08:55:34.056443Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439653591142382104:2518][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439653591142382106:2518] 2024-11-21T08:55:34.056445Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439653591142382105:2519][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7439653586847414282:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:34.056446Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439653591142382104:2518][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7439653586847414282:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:34.056447Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653586847413939:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439653591142382113:2518] 2024-11-21T08:55:34.056448Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439653591142382104:2518][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439653591142382108:2518] 2024-11-21T08:55:34.056449Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653586847413945:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439653591142382117:2518] 2024-11-21T08:55:34.056451Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439653591142382104:2518][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7439653586847414282:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:34.056453Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439653586847414282:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T08:55:34.056453Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653586847413942:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439653591142382114:2519] 2024-11-21T08:55:34.056454Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653586847413942:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439653591142382115:2518] 2024-11-21T08:55:34.056457Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439653586847414282:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7439653591142382105:2519] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:34.056462Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439653586847414282:2147], cacheItem# { Subscriber: { Subscriber: [3:7439653591142382105:2519] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:34.056465Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439653586847414282:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T08:55:34.056467Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439653586847414282:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7439653591142382104:2518] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:34.056470Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439653586847414282:2147], cacheItem# { Subscriber: { Subscriber: [3:7439653591142382104:2518] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:34.056480Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439653591142382118:2520], recipient# [3:7439653591142382102:2279], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TPQTestSlow::TestWriteVeryBigMessage >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> TPQTestSlow::TestOnDiskStoredSourceIds >> TSubDomainTest::LsLs >> TSubDomainTest::CreateTabletForUnknownDomain >> TSubDomainTest::FailIfAffectedSetNotInterior >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] >> TSubDomainTest::ConsistentCopyTable [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2024-11-21T08:55:33.231736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653588526762329:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:33.231800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e87/r3tmp/tmpay0aKO/pdisk_1.dat 2024-11-21T08:55:33.273444Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19013 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:33.290920Z node 1 :TX_PROXY DEBUG: actor# [1:7439653588526762549:2100] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:33.290943Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653588526762812:2247] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:33.291007Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653588526762623:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:33.291026Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653588526762623:2131], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:33.291087Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:33.291430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762271:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653588526762817:2248] 2024-11-21T08:55:33.291437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762274:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653588526762818:2248] 2024-11-21T08:55:33.291450Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653588526762271:2049] Subscribe: subscriber# [1:7439653588526762817:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:33.291451Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653588526762274:2052] Subscribe: subscriber# [1:7439653588526762818:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:33.291463Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762277:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653588526762819:2248] 2024-11-21T08:55:33.291466Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653588526762277:2055] Subscribe: subscriber# [1:7439653588526762819:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:33.291470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762817:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588526762271:2049] 2024-11-21T08:55:33.291474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762818:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588526762274:2052] 2024-11-21T08:55:33.291474Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762271:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653588526762817:2248] 2024-11-21T08:55:33.291477Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762274:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653588526762818:2248] 2024-11-21T08:55:33.291478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762819:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588526762277:2055] 2024-11-21T08:55:33.291481Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762277:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653588526762819:2248] 2024-11-21T08:55:33.291483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588526762814:2248] 2024-11-21T08:55:33.291489Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588526762815:2248] 2024-11-21T08:55:33.291499Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653588526762813:2248][/dc-1] Set up state: owner# [1:7439653588526762623:2131], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:33.291531Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653588526762816:2248] 2024-11-21T08:55:33.291541Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653588526762813:2248][/dc-1] Path was already updated: owner# [1:7439653588526762623:2131], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:33.291547Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762817:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588526762814:2248], cookie# 1 2024-11-21T08:55:33.291550Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762818:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588526762815:2248], cookie# 1 2024-11-21T08:55:33.291553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762819:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588526762816:2248], cookie# 1 2024-11-21T08:55:33.291557Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762271:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588526762817:2248], cookie# 1 2024-11-21T08:55:33.291563Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762274:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588526762818:2248], cookie# 1 2024-11-21T08:55:33.291565Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653588526762277:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653588526762819:2248], cookie# 1 2024-11-21T08:55:33.291580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762817:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588526762271:2049], cookie# 1 2024-11-21T08:55:33.291588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762818:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588526762274:2052], cookie# 1 2024-11-21T08:55:33.291590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653588526762819:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588526762277:2055], cookie# 1 2024-11-21T08:55:33.291593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588526762814:2248], cookie# 1 2024-11-21T08:55:33.291598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:33.291601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588526762815:2248], cookie# 1 2024-11-21T08:55:33.291605Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:33.291609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653588526762816:2248], cookie# 1 2024-11-21T08:55:33.291613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653588526762813:2248][/dc-1] Unexpected sync response: sender# [1:7439653588526762816:2248], cookie# 1 2024-11-21T08:55:33.297391Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653588526762623:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:33.297461Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653588526762623:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/table TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2024-11-21T08:55:35.367109Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050168:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_1 }: sender# [5:7439653591315051200:2668], cookie# 6 2024-11-21T08:55:35.367111Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050171:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_1 }: sender# [5:7439653591315051201:2668], cookie# 6 2024-11-21T08:55:35.367113Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050174:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_1 }: sender# [5:7439653591315051202:2668], cookie# 6 2024-11-21T08:55:35.367114Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7439653591315050512:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_1 PathId: Partial: 0 } 2024-11-21T08:55:35.367115Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7439653591315051200:2668][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439653591315050168:2051], cookie# 6 2024-11-21T08:55:35.367116Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7439653591315051201:2668][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439653591315050171:2054], cookie# 6 2024-11-21T08:55:35.367118Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7439653591315051202:2668][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439653591315050174:2057], cookie# 6 2024-11-21T08:55:35.367119Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439653591315051196:2668][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439653591315051197:2668], cookie# 6 2024-11-21T08:55:35.367120Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439653591315051196:2668][/dc-1/USER_1] Sync is in progress: cookie# 6, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:35.367120Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7439653591315050512:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [5:7439653591315051196:2668] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732179334949 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:35.367121Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439653591315051196:2668][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439653591315051198:2668], cookie# 6 2024-11-21T08:55:35.367122Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439653591315051196:2668][/dc-1/USER_1] Sync is done: cookie# 6, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:35.367124Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439653591315051196:2668][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439653591315051199:2668], cookie# 6 2024-11-21T08:55:35.367125Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439653591315051196:2668][/dc-1/USER_1] Unexpected sync response: sender# [5:7439653591315051199:2668], cookie# 6 2024-11-21T08:55:35.367128Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7439653591315050512:2148], cacheItem# { Subscriber: { Subscriber: [5:7439653591315051196:2668] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732179334949 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 5 IsSync: true Partial: 0 } 2024-11-21T08:55:35.367133Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7439653591315050512:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_1 PathId: Partial: 0 } 2024-11-21T08:55:35.367138Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7439653591315050512:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [5:7439653591315051196:2668] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732179334949 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:35.367146Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7439653591315050512:2148], cacheItem# { Subscriber: { Subscriber: [5:7439653591315051196:2668] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732179334949 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 6 IsSync: true Partial: 0 } 2024-11-21T08:55:35.367184Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7439653595610019488:3521], recipient# [5:7439653595610019487:3520], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/table TableId: [72057594046644480:6:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } },{ Path: dc-1/USER_1 TableId: [72057594046644480:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037892 Coordinators: 72075186224037893 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037894 Mediators: 72075186224037895 } ServerlessComputeResourcesMode: (empty maybe) } },{ Path: dc-1/USER_0/a/table TableId: [72057594046644480:7:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } },{ Path: dc-1/USER_1 TableId: [72057594046644480:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037892 Coordinators: 72075186224037893 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037894 Mediators: 72075186224037895 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:55:35.367199Z node 5 :TX_PROXY DEBUG: Actor# [5:7439653595610019487:3520] txid# 281474976715672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:55:35.367204Z node 5 :TX_PROXY ERROR: Access denied for user1@builtin with access SelectRow to path dc-1/USER_0/table 2024-11-21T08:55:35.367208Z node 5 :TX_PROXY DEBUG: Actor# [5:7439653595610019487:3520] txid# 281474976715672 SEND to# [5:7439653595610019486:3519] Source {TEvProposeTransactionStatus Status# 5} 2024-11-21T08:55:35.377214Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050168:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [6:7439653591597809351:2099] 2024-11-21T08:55:35.377235Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439653591315050168:2051] Unsubscribe: subscriber# [6:7439653591597809351:2099], path# /dc-1/USER_1 2024-11-21T08:55:35.377242Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050174:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [6:7439653591597809353:2099] 2024-11-21T08:55:35.377245Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439653591315050174:2057] Unsubscribe: subscriber# [6:7439653591597809353:2099], path# /dc-1/USER_1 2024-11-21T08:55:35.377247Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050171:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [6:7439653591597809352:2099] 2024-11-21T08:55:35.377256Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439653591315050171:2054] Unsubscribe: subscriber# [6:7439653591597809352:2099], path# /dc-1/USER_1 2024-11-21T08:55:35.377297Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 6 2024-11-21T08:55:35.377300Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050168:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7439653594561195676:2098] 2024-11-21T08:55:35.377303Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439653591315050168:2051] Unsubscribe: subscriber# [7:7439653594561195676:2098], path# /dc-1/USER_0 2024-11-21T08:55:35.377308Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050171:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7439653594561195680:2098] 2024-11-21T08:55:35.377311Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439653591315050171:2054] Unsubscribe: subscriber# [7:7439653594561195680:2098], path# /dc-1/USER_0 2024-11-21T08:55:35.377316Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439653591315050174:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7439653594561195682:2098] 2024-11-21T08:55:35.377319Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439653591315050174:2057] Unsubscribe: subscriber# [7:7439653594561195682:2098], path# /dc-1/USER_0 2024-11-21T08:55:35.377414Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:55:35.377440Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2024-11-21T08:55:35.377855Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TSubDomainTest::LsAltered [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2024-11-21T08:55:35.466766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653597650746491:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:35.466782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e6a/r3tmp/tmpdw1b0P/pdisk_1.dat 2024-11-21T08:55:35.514574Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20399 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:35.518313Z node 1 :TX_PROXY DEBUG: actor# [1:7439653597650746711:2100] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:35.518334Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653597650746978:2246] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:35.518364Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653597650746756:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:35.518376Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653597650746756:2123], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:35.518411Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:35.518592Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746436:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653597650746984:2247] 2024-11-21T08:55:35.518597Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746433:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653597650746983:2247] 2024-11-21T08:55:35.518605Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653597650746436:2052] Subscribe: subscriber# [1:7439653597650746984:2247], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.518608Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653597650746433:2049] Subscribe: subscriber# [1:7439653597650746983:2247], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.518614Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746439:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653597650746985:2247] 2024-11-21T08:55:35.518621Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653597650746439:2055] Subscribe: subscriber# [1:7439653597650746985:2247], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.518622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746984:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653597650746436:2052] 2024-11-21T08:55:35.518625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746983:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653597650746433:2049] 2024-11-21T08:55:35.518626Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746436:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653597650746984:2247] 2024-11-21T08:55:35.518628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746985:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653597650746439:2055] 2024-11-21T08:55:35.518629Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746433:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653597650746983:2247] 2024-11-21T08:55:35.518631Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746439:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653597650746985:2247] 2024-11-21T08:55:35.518633Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653597650746981:2247] 2024-11-21T08:55:35.518637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653597650746980:2247] 2024-11-21T08:55:35.518643Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653597650746979:2247][/dc-1] Set up state: owner# [1:7439653597650746756:2123], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:35.518686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653597650746982:2247] 2024-11-21T08:55:35.518694Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653597650746979:2247][/dc-1] Path was already updated: owner# [1:7439653597650746756:2123], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:35.518699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746983:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653597650746980:2247], cookie# 1 2024-11-21T08:55:35.518705Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746984:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653597650746981:2247], cookie# 1 2024-11-21T08:55:35.518712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746985:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653597650746982:2247], cookie# 1 2024-11-21T08:55:35.518722Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746436:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653597650746984:2247], cookie# 1 2024-11-21T08:55:35.518729Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746439:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653597650746985:2247], cookie# 1 2024-11-21T08:55:35.518733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746984:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653597650746436:2052], cookie# 1 2024-11-21T08:55:35.518734Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746985:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653597650746439:2055], cookie# 1 2024-11-21T08:55:35.518737Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653597650746981:2247], cookie# 1 2024-11-21T08:55:35.518740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:35.518756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653597650746982:2247], cookie# 1 2024-11-21T08:55:35.518759Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 TClient::Ls response: 2024-11-21T08:55:35.518762Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653597650746433:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653597650746983:2247], cookie# 1 2024-11-21T08:55:35.518764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653597650746983:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653597650746433:2049], cookie# 1 2024-11-21T08:55:35.518766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653597650746980:2247], cookie# 1 2024-11-21T08:55:35.518767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653597650746979:2247][/dc-1] Unexpected sync response: sender# [1:7439653597650746980:2247], cookie# 1 2024-11-21T08:55:35.523778Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653597650746756:2123], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:35.523826Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653597650746756:2123], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: ... 22] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2024-11-21T08:55:36.682501Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653602756594301:2322] Handle TEvDescribeSchemeResult Forward to# [2:7439653602756594300:2321] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179336125 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179336125 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /dc-1 2024-11-21T08:55:36.687581Z node 2 :TX_PROXY DEBUG: actor# [2:7439653598461626544:2098] Handle TEvNavigate describe path /dc-1 2024-11-21T08:55:36.687596Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653602756594304:2325] HANDLE EvNavigateScheme /dc-1 2024-11-21T08:55:36.687632Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439653598461626617:2112], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:36.687667Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653598461626857:2246][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7439653598461626617:2112], cookie# 4 2024-11-21T08:55:36.687688Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653598461626861:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439653598461626858:2246], cookie# 4 2024-11-21T08:55:36.687695Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653598461626862:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439653598461626859:2246], cookie# 4 2024-11-21T08:55:36.687699Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653598461626863:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439653598461626860:2246], cookie# 4 2024-11-21T08:55:36.687703Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439653598461626317:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439653598461626861:2246], cookie# 4 2024-11-21T08:55:36.687705Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439653598461626320:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439653598461626862:2246], cookie# 4 2024-11-21T08:55:36.687710Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439653598461626323:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439653598461626863:2246], cookie# 4 2024-11-21T08:55:36.687717Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653598461626862:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439653598461626320:2052], cookie# 4 2024-11-21T08:55:36.687719Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653598461626861:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439653598461626317:2049], cookie# 4 2024-11-21T08:55:36.687721Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653598461626863:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439653598461626323:2055], cookie# 4 2024-11-21T08:55:36.687725Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653598461626857:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439653598461626859:2246], cookie# 4 2024-11-21T08:55:36.687734Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653598461626857:2246][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:36.687737Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653598461626857:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439653598461626858:2246], cookie# 4 2024-11-21T08:55:36.687740Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653598461626857:2246][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:36.687748Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653598461626857:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439653598461626860:2246], cookie# 4 2024-11-21T08:55:36.687753Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439653598461626617:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2024-11-21T08:55:36.687753Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653598461626857:2246][/dc-1] Unexpected sync response: sender# [2:7439653598461626860:2246], cookie# 4 2024-11-21T08:55:36.687763Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439653598461626617:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7439653598461626857:2246] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732179336055 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:36.687780Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439653598461626617:2112], cacheItem# { Subscriber: { Subscriber: [2:7439653598461626857:2246] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732179336055 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2024-11-21T08:55:36.687816Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439653602756594305:2326], recipient# [2:7439653602756594304:2325], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:55:36.687828Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653602756594304:2325] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:55:36.687854Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653602756594304:2325] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2024-11-21T08:55:36.687961Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653602756594304:2325] Handle TEvDescribeSchemeResult Forward to# [2:7439653602756594303:2324] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732179336055 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732179336055 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179336125 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594... (TRUNCATED) >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] >> TSubDomainTest::CreateTablet >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2024-11-21T08:55:35.206238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653596636416577:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:35.206360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e79/r3tmp/tmpn8SPxp/pdisk_1.dat 2024-11-21T08:55:35.246078Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20178 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:35.263265Z node 1 :TX_PROXY DEBUG: actor# [1:7439653596636416796:2136] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:35.263287Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653596636417154:2376] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:35.263318Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653596636416818:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:35.263329Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653596636416818:2149], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:35.263380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:35.263654Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416472:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653596636417160:2377] 2024-11-21T08:55:35.263666Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416469:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653596636417159:2377] 2024-11-21T08:55:35.263674Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653596636416472:2053] Subscribe: subscriber# [1:7439653596636417160:2377], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.263678Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653596636416469:2050] Subscribe: subscriber# [1:7439653596636417159:2377], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.263688Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416475:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653596636417161:2377] 2024-11-21T08:55:35.263691Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653596636416475:2056] Subscribe: subscriber# [1:7439653596636417161:2377], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.263694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417160:2377][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653596636416472:2053] 2024-11-21T08:55:35.263700Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416472:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653596636417160:2377] 2024-11-21T08:55:35.263699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417159:2377][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653596636416469:2050] 2024-11-21T08:55:35.263702Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416469:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653596636417159:2377] 2024-11-21T08:55:35.263702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417161:2377][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653596636416475:2056] 2024-11-21T08:55:35.263705Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416475:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653596636417161:2377] 2024-11-21T08:55:35.263707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653596636417157:2377] 2024-11-21T08:55:35.263711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653596636417156:2377] 2024-11-21T08:55:35.263719Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653596636417155:2377][/dc-1] Set up state: owner# [1:7439653596636416818:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:35.263750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653596636417158:2377] 2024-11-21T08:55:35.263760Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653596636417155:2377][/dc-1] Path was already updated: owner# [1:7439653596636416818:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:35.263765Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417159:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653596636417156:2377], cookie# 1 2024-11-21T08:55:35.263767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417160:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653596636417157:2377], cookie# 1 2024-11-21T08:55:35.263769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417161:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653596636417158:2377], cookie# 1 2024-11-21T08:55:35.263772Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416469:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653596636417159:2377], cookie# 1 2024-11-21T08:55:35.263775Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416472:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653596636417160:2377], cookie# 1 2024-11-21T08:55:35.263778Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653596636416475:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653596636417161:2377], cookie# 1 2024-11-21T08:55:35.263781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417159:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653596636416469:2050], cookie# 1 2024-11-21T08:55:35.263788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417160:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653596636416472:2053], cookie# 1 2024-11-21T08:55:35.263790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653596636417161:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653596636416475:2056], cookie# 1 2024-11-21T08:55:35.263793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653596636417156:2377], cookie# 1 2024-11-21T08:55:35.263797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:35.263799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653596636417157:2377], cookie# 1 2024-11-21T08:55:35.263802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:35.263805Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653596636417158:2377], cookie# 1 2024-11-21T08:55:35.263809Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653596636417155:2377][/dc-1] Unexpected sync response: sender# [1:7439653596636417158:2377], cookie# 1 2024-11-21T08:55:35.268779Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653596636416818:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:35.268839Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653596636416818:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... fy { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439653600885136593:2056] 2024-11-21T08:55:37.032570Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653600885136593:2056] Subscribe: subscriber# [4:7439653603595824504:2341], path# /dc-1/USER_0/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:37.032572Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653600885136590:2053] Subscribe: subscriber# [4:7439653603595824509:2342], path# /dc-1/USER_0/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:37.032624Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439653603595824508:2342][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653600885136587:2050] 2024-11-21T08:55:37.032628Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439653603595824497:2341][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439653603595824499:2341] 2024-11-21T08:55:37.032574Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136593:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [4:7439653603595824510:2342] 2024-11-21T08:55:37.032629Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439653603595824509:2342][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653600885136590:2053] 2024-11-21T08:55:37.032575Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653600885136593:2056] Upsert description: path# /dc-1/USER_0/.metadata/workload_manager/running_requests 2024-11-21T08:55:37.032578Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653600885136593:2056] Subscribe: subscriber# [4:7439653603595824510:2342], path# /dc-1/USER_0/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:37.032586Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136587:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824491:2340] 2024-11-21T08:55:37.032590Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136593:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824493:2340] 2024-11-21T08:55:37.032594Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136590:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824492:2340] 2024-11-21T08:55:37.032633Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439653603595824497:2341][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439653603595824500:2341] 2024-11-21T08:55:37.032690Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136587:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824502:2341] 2024-11-21T08:55:37.032695Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136590:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824503:2341] 2024-11-21T08:55:37.032635Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439653603595824510:2342][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439653600885136593:2056] 2024-11-21T08:55:37.032700Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136593:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824504:2341] 2024-11-21T08:55:37.032636Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7439653603595824497:2341][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Set up state: owner# [4:7439653599300856732:2098], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:37.032639Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439653603595824497:2341][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439653603595824501:2341] 2024-11-21T08:55:37.032641Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7439653603595824497:2341][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7439653599300856732:2098], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:37.032711Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136587:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824508:2342] 2024-11-21T08:55:37.032641Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439653603595824498:2342][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7439653603595824505:2342] 2024-11-21T08:55:37.032648Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439653603595824498:2342][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7439653603595824506:2342] 2024-11-21T08:55:37.032649Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439653603595824511:2343], recipient# [4:7439653603595824481:2299], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:37.032652Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7439653603595824498:2342][/dc-1/USER_0/.metadata/workload_manager/running_requests] Set up state: owner# [4:7439653599300856732:2098], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:37.032723Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136590:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824509:2342] 2024-11-21T08:55:37.032654Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439653603595824498:2342][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7439653603595824507:2342] 2024-11-21T08:55:37.032658Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7439653603595824498:2342][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7439653599300856732:2098], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:37.032662Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7439653599300856732:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T08:55:37.032732Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653600885136593:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439653603595824510:2342] 2024-11-21T08:55:37.032666Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7439653599300856732:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7439653603595824497:2341] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:37.032673Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439653599300856732:2098], cacheItem# { Subscriber: { Subscriber: [4:7439653603595824497:2341] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:37.032675Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7439653599300856732:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T08:55:37.032678Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7439653599300856732:2098], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7439653603595824498:2342] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:37.032684Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439653599300856732:2098], cacheItem# { Subscriber: { Subscriber: [4:7439653603595824498:2342] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:37.032692Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439653603595824512:2344], recipient# [4:7439653603595824496:2305], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T08:54:43.491479Z :TestReorderedExecutor INFO: Random seed for debugging is 1732179283491473 2024-11-21T08:54:43.586785Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653374756026783:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.587212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:43.592448Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653373159385597:2262];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.607451Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:54:43.612196Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:54:43.613454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004649/r3tmp/tmpLyuTdJ/pdisk_1.dat 2024-11-21T08:54:43.638542Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15034, node 1 2024-11-21T08:54:43.653514Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004649/r3tmp/yandexU9C70i.tmp 2024-11-21T08:54:43.653526Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004649/r3tmp/yandexU9C70i.tmp 2024-11-21T08:54:43.653576Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004649/r3tmp/yandexU9C70i.tmp 2024-11-21T08:54:43.653607Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:43.659006Z INFO: TTestServer started on Port 6145 GrpcPort 15034 TClient is connected to server localhost:6145 PQClient connected to localhost:15034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:43.686944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.686970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.688624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:43.714443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.714482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.715765Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:43.716035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:43.716039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T08:54:43.863396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374756027677:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.863422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.863554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374756027689:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.864152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T08:54:43.867954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653374756027691:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T08:54:43.889702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.914042Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653373159385728:2285], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:43.914126Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmQ0MDVkZWQtZTUwZjZjYzgtNWUxYjdhYTUtZjMwODI3YQ==, ActorId: [2:7439653373159385703:2279], ActorState: ExecuteState, TraceId: 01jd6ywky55h22rjrkgd67672c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:43.914542Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:43.950436Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653374756027873:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:43.950579Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTZlYmI5ZjEtOTJhZjY1ZGMtZDg1YTcxZjMtYzU2OTQwMzU=, ActorId: [1:7439653374756027659:2299], ActorState: ExecuteState, TraceId: 01jd6ywkwm6pjhxf97r9wycfen, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:43.950841Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:43.953220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:44.017074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:15034", true, true, 1000); 2024-11-21T08:54:44.046601Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd6ywm1zcs85wzy6g8bb94qy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJhYjA1ODMtMTIyNDAyZDgtMmVkNTNhNzMtNjQ1NWZhN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653379050995463:2931] 2024-11-21T08:54:48.587170Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653374756026783:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.587213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:54:48.589215Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653373159385597:2262];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.589262Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok waiting... 2024-11-21T08:54:49.102541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:15034 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:54:49.120933Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:15034 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" ... sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:47874 proto=v1 topic=test-topic durationSec=0 2024-11-21T08:55:37.239413Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:55:37.239781Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T08:55:37.239834Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T08:55:37.239842Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:55:37.239844Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:55:37.239849Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653603630074852:2470] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:55:37.240291Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653603630074852:2470] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:55:37.255066Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653603630074852:2470] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T08:55:37.255152Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439653603630074882:2470] connected; active server actors: 1 2024-11-21T08:55:37.255168Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653603630074852:2470] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T08:55:37.255172Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653603630074852:2470] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T08:55:37.255242Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439653603630074882:2470] disconnected; active server actors: 1 2024-11-21T08:55:37.255248Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439653603630074882:2470] disconnected no session 2024-11-21T08:55:37.269152Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653603630074852:2470] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T08:55:37.269171Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653603630074852:2470] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T08:55:37.269175Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653603630074852:2470] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T08:55:37.269185Z node 13 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:55:37.269528Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:37.269554Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [13:7439653603630074900:2470], now have 1 active actors on pipe 2024-11-21T08:55:37.270026Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 14, Generation: 1 2024-11-21T08:55:37.270111Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:37.270139Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:37.270197Z node 14 :PERSQUEUE INFO: new Cookie src|b45f71df-2a01266-80cb47d9-f0bb78ab_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T08:55:37.270262Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:55:37.270317Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:37.270577Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:37.270587Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:37.270612Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:37.270717Z node 13 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|b45f71df-2a01266-80cb47d9-f0bb78ab_0 2024-11-21T08:55:37.271123Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179337271 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:37.271170Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b45f71df-2a01266-80cb47d9-f0bb78ab_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T08:55:37.271302Z :INFO: [] MessageGroupId [src] SessionId [src|b45f71df-2a01266-80cb47d9-f0bb78ab_0] Write session: close. Timeout = 0 ms 2024-11-21T08:55:37.271307Z :INFO: [] MessageGroupId [src] SessionId [src|b45f71df-2a01266-80cb47d9-f0bb78ab_0] Write session will now close 2024-11-21T08:55:37.271312Z :DEBUG: [] MessageGroupId [src] SessionId [src|b45f71df-2a01266-80cb47d9-f0bb78ab_0] Write session: aborting 2024-11-21T08:55:37.271564Z :INFO: [] MessageGroupId [src] SessionId [src|b45f71df-2a01266-80cb47d9-f0bb78ab_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:55:37.271567Z :DEBUG: [] MessageGroupId [src] SessionId [src|b45f71df-2a01266-80cb47d9-f0bb78ab_0] Write session is aborting and will not restart 2024-11-21T08:55:37.271610Z :DEBUG: [] MessageGroupId [src] SessionId [src|b45f71df-2a01266-80cb47d9-f0bb78ab_0] Write session: destroy 2024-11-21T08:55:37.271631Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|b45f71df-2a01266-80cb47d9-f0bb78ab_0 grpc read done: success: 0 data: 2024-11-21T08:55:37.271647Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b45f71df-2a01266-80cb47d9-f0bb78ab_0 grpc read failed 2024-11-21T08:55:37.271656Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b45f71df-2a01266-80cb47d9-f0bb78ab_0 grpc closed 2024-11-21T08:55:37.271664Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b45f71df-2a01266-80cb47d9-f0bb78ab_0 is DEAD 2024-11-21T08:55:37.272009Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:55:37.272152Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:37.272189Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7439653603630074900:2470] destroyed 2024-11-21T08:55:37.272198Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:55:37.276868Z :INFO: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Starting read session 2024-11-21T08:55:37.276878Z :DEBUG: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Starting cluster discovery 2024-11-21T08:55:37.276910Z :INFO: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23085: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23085
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23085. " 2024-11-21T08:55:37.276914Z :DEBUG: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Restart cluster discovery in 0.009160s 2024-11-21T08:55:37.286325Z :DEBUG: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Starting cluster discovery 2024-11-21T08:55:37.286416Z :INFO: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23085: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23085
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23085. " 2024-11-21T08:55:37.286422Z :DEBUG: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Restart cluster discovery in 0.018958s 2024-11-21T08:55:37.306360Z :DEBUG: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Starting cluster discovery 2024-11-21T08:55:37.306433Z :INFO: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23085: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23085
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23085. " 2024-11-21T08:55:37.306441Z :DEBUG: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Restart cluster discovery in 0.023569s 2024-11-21T08:55:37.330356Z :DEBUG: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Starting cluster discovery 2024-11-21T08:55:37.330444Z :NOTICE: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23085: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23085
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23085. " } 2024-11-21T08:55:37.330499Z :NOTICE: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23085: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23085
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23085. " } 2024-11-21T08:55:37.330525Z :INFO: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Closing read session. Close timeout: 0.000000s 2024-11-21T08:55:37.330531Z :NOTICE: [/Root] [/Root] [893bffa3-f54edb4a-e2be0326-73cdc72c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped >> TSubDomainTest::UserAttributes >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::UserAttributesApplyIf [GOOD] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2024-11-21T08:55:38.756006Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653607722261645:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:38.756192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e3e/r3tmp/tmpKlGvOr/pdisk_1.dat 2024-11-21T08:55:38.804372Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9817 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:38.819066Z node 1 :TX_PROXY DEBUG: actor# [1:7439653607722261882:2100] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:38.819106Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653607722262146:2247] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:38.819141Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653607722261953:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:38.819149Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653607722261953:2129], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:38.819236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:38.819588Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261604:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653607722262151:2248] 2024-11-21T08:55:38.819604Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261607:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653607722262152:2248] 2024-11-21T08:55:38.819616Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653607722261604:2049] Subscribe: subscriber# [1:7439653607722262151:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:38.819626Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653607722261607:2052] Subscribe: subscriber# [1:7439653607722262152:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:38.819629Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261610:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653607722262153:2248] 2024-11-21T08:55:38.819632Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653607722261610:2055] Subscribe: subscriber# [1:7439653607722262153:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:38.819643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262151:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607722261604:2049] 2024-11-21T08:55:38.819648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262153:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607722261610:2055] 2024-11-21T08:55:38.819651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262152:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607722261607:2052] 2024-11-21T08:55:38.819654Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261604:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653607722262151:2248] 2024-11-21T08:55:38.819659Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607722262148:2248] 2024-11-21T08:55:38.819660Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261610:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653607722262153:2248] 2024-11-21T08:55:38.819665Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261607:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653607722262152:2248] 2024-11-21T08:55:38.819667Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607722262150:2248] 2024-11-21T08:55:38.819676Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653607722262147:2248][/dc-1] Set up state: owner# [1:7439653607722261953:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:38.819717Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607722262149:2248] 2024-11-21T08:55:38.819731Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653607722262147:2248][/dc-1] Path was already updated: owner# [1:7439653607722261953:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:38.819737Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262151:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607722262148:2248], cookie# 1 2024-11-21T08:55:38.819740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262152:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607722262149:2248], cookie# 1 2024-11-21T08:55:38.819743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262153:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607722262150:2248], cookie# 1 2024-11-21T08:55:38.819748Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261604:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607722262151:2248], cookie# 1 2024-11-21T08:55:38.819752Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261607:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607722262152:2248], cookie# 1 2024-11-21T08:55:38.819759Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607722261610:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607722262153:2248], cookie# 1 2024-11-21T08:55:38.819772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262151:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607722261604:2049], cookie# 1 2024-11-21T08:55:38.819775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262152:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607722261607:2052], cookie# 1 2024-11-21T08:55:38.819776Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607722262153:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607722261610:2055], cookie# 1 2024-11-21T08:55:38.819780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607722262148:2248], cookie# 1 2024-11-21T08:55:38.819784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:38.819786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607722262149:2248], cookie# 1 2024-11-21T08:55:38.819789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:38.819792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607722262150:2248], cookie# 1 2024-11-21T08:55:38.819794Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607722262147:2248][/dc-1] Unexpected sync response: sender# [1:7439653607722262150:2248], cookie# 1 2024-11-21T08:55:38.829348Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653607722261953:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:38.829432Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653607722261953:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Ta ... actorId: [2:7439653615302860408:2274] 2024-11-21T08:55:39.205030Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439653615302860041:2112], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179339233 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [2:7439653615302860347:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732179339233 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [2:7439653615302860347:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732179339233 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2024-11-21T08:55:39.205062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 TClient::Ls request: /dc-1/USER_0 2024-11-21T08:55:39.205297Z node 2 :TX_PROXY DEBUG: actor# [2:7439653615302860018:2098] Handle TEvNavigate describe path /dc-1/USER_0 2024-11-21T08:55:39.205307Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653615302860416:2345] HANDLE EvNavigateScheme /dc-1/USER_0 2024-11-21T08:55:39.205316Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439653615302860041:2112], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:39.205326Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653615302860347:2292][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7439653615302860041:2112], cookie# 10 2024-11-21T08:55:39.205334Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653615302860351:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439653615302860348:2292], cookie# 10 2024-11-21T08:55:39.205336Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653615302860352:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439653615302860349:2292], cookie# 10 2024-11-21T08:55:39.205338Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439653615302859745:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439653615302860351:2292], cookie# 10 2024-11-21T08:55:39.205339Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653615302860353:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439653615302860350:2292], cookie# 10 2024-11-21T08:55:39.205341Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439653615302859748:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439653615302860352:2292], cookie# 10 2024-11-21T08:55:39.205342Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653615302860351:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439653615302859745:2049], cookie# 10 2024-11-21T08:55:39.205343Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439653615302859751:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439653615302860353:2292], cookie# 10 2024-11-21T08:55:39.205344Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653615302860352:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439653615302859748:2052], cookie# 10 2024-11-21T08:55:39.205346Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439653615302860353:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439653615302859751:2055], cookie# 10 2024-11-21T08:55:39.205348Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653615302860347:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439653615302860348:2292], cookie# 10 2024-11-21T08:55:39.205350Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653615302860347:2292][/dc-1/USER_0] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:39.205351Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653615302860347:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439653615302860349:2292], cookie# 10 2024-11-21T08:55:39.205354Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653615302860347:2292][/dc-1/USER_0] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:39.205356Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653615302860347:2292][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439653615302860350:2292], cookie# 10 2024-11-21T08:55:39.205357Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439653615302860347:2292][/dc-1/USER_0] Unexpected sync response: sender# [2:7439653615302860350:2292], cookie# 10 2024-11-21T08:55:39.205360Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439653615302860041:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2024-11-21T08:55:39.205365Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439653615302860041:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7439653615302860347:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732179339233 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:39.205371Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439653615302860041:2112], cacheItem# { Subscriber: { Subscriber: [2:7439653615302860347:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732179339233 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2024-11-21T08:55:39.205387Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439653615302860417:2346], recipient# [2:7439653615302860416:2345], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:55:39.205389Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653615302860416:2345] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:55:39.205396Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653615302860416:2345] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2024-11-21T08:55:39.205445Z node 2 :TX_PROXY DEBUG: Actor# [2:7439653615302860416:2345] Handle TEvDescribeSchemeResult Forward to# [2:7439653615302860415:2344] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179339233 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179339233 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] Test command err: 2024-11-21T08:55:37.446038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653607338798967:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:37.446068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:37.448963Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653606857399277:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:37.449275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:37.449654Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653604437779403:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:37.450273Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e4d/r3tmp/tmp182CGC/pdisk_1.dat 2024-11-21T08:55:37.492163Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:37.546264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:37.546308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:37.548454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31559 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:37.549594Z node 1 :TX_PROXY DEBUG: actor# [1:7439653607338799186:2139] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:37.549622Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653607338799574:2408] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:37.549657Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653607338799295:2198], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:37.549673Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653607338799295:2198], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:37.549716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:37.550072Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798855:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653607338799579:2409] 2024-11-21T08:55:37.550097Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798858:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653607338799580:2409] 2024-11-21T08:55:37.550098Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653607338798855:2053] Subscribe: subscriber# [1:7439653607338799579:2409], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:37.550117Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798861:2059] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653607338799581:2409] 2024-11-21T08:55:37.550117Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653607338798858:2056] Subscribe: subscriber# [1:7439653607338799580:2409], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:37.550129Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653607338798861:2059] Subscribe: subscriber# [1:7439653607338799581:2409], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:37.550137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799579:2409][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607338798855:2053] 2024-11-21T08:55:37.550143Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798855:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653607338799579:2409] 2024-11-21T08:55:37.550145Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799580:2409][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607338798858:2056] 2024-11-21T08:55:37.550148Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798858:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653607338799580:2409] 2024-11-21T08:55:37.550150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799581:2409][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607338798861:2059] 2024-11-21T08:55:37.550155Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798861:2059] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653607338799581:2409] 2024-11-21T08:55:37.550160Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607338799576:2409] 2024-11-21T08:55:37.550166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607338799577:2409] 2024-11-21T08:55:37.550177Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653607338799575:2409][/dc-1] Set up state: owner# [1:7439653607338799295:2198], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:37.550213Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653607338799578:2409] 2024-11-21T08:55:37.550228Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653607338799575:2409][/dc-1] Path was already updated: owner# [1:7439653607338799295:2198], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:37.550236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799579:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607338799576:2409], cookie# 1 2024-11-21T08:55:37.550239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799580:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607338799577:2409], cookie# 1 2024-11-21T08:55:37.550243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799581:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607338799578:2409], cookie# 1 2024-11-21T08:55:37.550249Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798855:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607338799579:2409], cookie# 1 2024-11-21T08:55:37.550266Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798858:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607338799580:2409], cookie# 1 2024-11-21T08:55:37.550278Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653607338798861:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653607338799581:2409], cookie# 1 2024-11-21T08:55:37.550287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799579:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607338798855:2053], cookie# 1 2024-11-21T08:55:37.550290Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799580:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607338798858:2056], cookie# 1 2024-11-21T08:55:37.550294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653607338799581:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607338798861:2059], cookie# 1 2024-11-21T08:55:37.550306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607338799576:2409], cookie# 1 2024-11-21T08:55:37.550324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:37.550335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607338799577:2409], cookie# 1 2024-11-21T08:55:37.550338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:37.550343Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653607338799578:2409], cookie# 1 2024-11-21T08:55:37.550345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653607338799575:2409][/dc-1] Unexpected sync response: sender# [1:7439653607338799578:2409], cookie# 1 2024-11-21T08:55:37.558046Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653607338799295:2198], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQParti ... 7594046644480:3 tabletId 72075186224037890 2024-11-21T08:55:39.019790Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2024-11-21T08:55:39.019796Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:55:39.019807Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T08:55:39.019814Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T08:55:39.019822Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T08:55:39.020122Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:55:39.046939Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7439653611274087573:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:39.046974Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7439653611274087573:2100], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:39.046980Z node 7 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [7:7439653611274087573:2100], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2024-11-21T08:55:39.047024Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][7:7439653615569055262:2341][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:39.047129Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439653610013261561:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0 DomainOwnerId: 72057594046644480 }: sender# [7:7439653615569055270:2341] 2024-11-21T08:55:39.047130Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439653610013261558:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0 DomainOwnerId: 72057594046644480 }: sender# [7:7439653615569055269:2341] 2024-11-21T08:55:39.047157Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439653610013261561:2053] Subscribe: subscriber# [7:7439653615569055270:2341], path# /dc-1/USER_0, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:39.047157Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439653610013261558:2050] Subscribe: subscriber# [7:7439653615569055269:2341], path# /dc-1/USER_0, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:39.047168Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439653610013261564:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0 DomainOwnerId: 72057594046644480 }: sender# [7:7439653615569055271:2341] 2024-11-21T08:55:39.047171Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439653610013261564:2056] Subscribe: subscriber# [7:7439653615569055271:2341], path# /dc-1/USER_0, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:39.047207Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][7:7439653615569055270:2341][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [6:7439653610013261561:2053] 2024-11-21T08:55:39.047216Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][7:7439653615569055269:2341][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [6:7439653610013261558:2050] 2024-11-21T08:55:39.047220Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][7:7439653615569055271:2341][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [6:7439653610013261564:2056] 2024-11-21T08:55:39.047226Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][7:7439653615569055262:2341][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [7:7439653615569055267:2341] 2024-11-21T08:55:39.047235Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][7:7439653615569055262:2341][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [7:7439653615569055266:2341] 2024-11-21T08:55:39.047242Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439653610013261561:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [7:7439653615569055270:2341] 2024-11-21T08:55:39.047247Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439653610013261558:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [7:7439653615569055269:2341] 2024-11-21T08:55:39.047242Z node 7 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][7:7439653615569055262:2341][/dc-1/USER_0] Set up state: owner# [7:7439653611274087573:2100], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:39.047251Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439653610013261564:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [7:7439653615569055271:2341] 2024-11-21T08:55:39.047251Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][7:7439653615569055262:2341][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [7:7439653615569055268:2341] 2024-11-21T08:55:39.047257Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [7:7439653611274087573:2100], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 } 2024-11-21T08:55:39.047257Z node 7 :SCHEME_BOARD_SUBSCRIBER INFO: [main][7:7439653615569055262:2341][/dc-1/USER_0] Path was already updated: owner# [7:7439653611274087573:2100], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:39.047268Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [7:7439653611274087573:2100], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7439653615569055262:2341] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:39.047289Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7439653611274087573:2100], cacheItem# { Subscriber: { Subscriber: [7:7439653615569055262:2341] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:39.047311Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7439653615569055274:2342], recipient# [7:7439653615569055261:2340], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:39.047335Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7439653611274087573:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:39.047351Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7439653615569055277:2343], recipient# [7:7439653615569055258:2299], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:39.047431Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7439653611274087573:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:39.047446Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7439653615569055278:2344], recipient# [7:7439653615569055276:2305], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> TSubDomainTest::GenericCases [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2024-11-21T08:55:38.046652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653610779707749:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:38.046846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e47/r3tmp/tmpTi9vlj/pdisk_1.dat 2024-11-21T08:55:38.091417Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27247 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:38.105098Z node 1 :TX_PROXY DEBUG: actor# [1:7439653610779707967:2136] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:38.105119Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653610779708337:2389] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:38.105162Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653610779708068:2190], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:38.105181Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653610779708068:2190], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:38.105254Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:38.105557Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707640:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653610779708342:2390] 2024-11-21T08:55:38.105572Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707643:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653610779708343:2390] 2024-11-21T08:55:38.105589Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653610779707640:2050] Subscribe: subscriber# [1:7439653610779708342:2390], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:38.105590Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653610779707643:2053] Subscribe: subscriber# [1:7439653610779708343:2390], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:38.105602Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707646:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653610779708344:2390] 2024-11-21T08:55:38.105604Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653610779707646:2056] Subscribe: subscriber# [1:7439653610779708344:2390], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:38.105608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708343:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653610779707643:2053] 2024-11-21T08:55:38.105616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708342:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653610779707640:2050] 2024-11-21T08:55:38.105618Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707643:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653610779708343:2390] 2024-11-21T08:55:38.105618Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708344:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653610779707646:2056] 2024-11-21T08:55:38.105620Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707640:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653610779708342:2390] 2024-11-21T08:55:38.105622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653610779708340:2390] 2024-11-21T08:55:38.105622Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707646:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653610779708344:2390] 2024-11-21T08:55:38.105628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653610779708339:2390] 2024-11-21T08:55:38.105639Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653610779708338:2390][/dc-1] Set up state: owner# [1:7439653610779708068:2190], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:38.105669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653610779708341:2390] 2024-11-21T08:55:38.105677Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653610779708338:2390][/dc-1] Path was already updated: owner# [1:7439653610779708068:2190], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:38.105682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708342:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653610779708339:2390], cookie# 1 2024-11-21T08:55:38.105684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708343:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653610779708340:2390], cookie# 1 2024-11-21T08:55:38.105686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708344:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653610779708341:2390], cookie# 1 2024-11-21T08:55:38.105690Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707640:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653610779708342:2390], cookie# 1 2024-11-21T08:55:38.105697Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707643:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653610779708343:2390], cookie# 1 2024-11-21T08:55:38.105699Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653610779707646:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653610779708344:2390], cookie# 1 2024-11-21T08:55:38.105703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708342:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653610779707640:2050], cookie# 1 2024-11-21T08:55:38.105709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708343:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653610779707643:2053], cookie# 1 2024-11-21T08:55:38.105726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653610779708344:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653610779707646:2056], cookie# 1 2024-11-21T08:55:38.105736Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653610779708339:2390], cookie# 1 2024-11-21T08:55:38.105741Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:38.105744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653610779708340:2390], cookie# 1 2024-11-21T08:55:38.105746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:38.105749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653610779708341:2390], cookie# 1 2024-11-21T08:55:38.105751Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653610779708338:2390][/dc-1] Unexpected sync response: sender# [1:7439653610779708341:2390], cookie# 1 2024-11-21T08:55:38.111407Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653610779708068:2190], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:38.111465Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653610779708068:2190], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [3:7439653613921073290:2732] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:39.862067Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439653609626105190:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/table PathId: Partial: 0 } 2024-11-21T08:55:39.862077Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439653609626105190:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/table PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7439653613921073290:2732] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732179339900 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2024-11-21T08:55:39.862086Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439653609626105190:2148], cacheItem# { Subscriber: { Subscriber: [3:7439653613921073290:2732] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732179339900 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/table TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T08:55:39.862126Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439653613921073297:2733], recipient# [3:7439653613921073289:2731], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/table TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T08:55:39.862145Z node 3 :TX_PROXY DEBUG: Actor# [3:7439653613921073289:2731] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T08:55:39.862167Z node 3 :TX_PROXY DEBUG: Actor# [3:7439653613921073289:2731] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/table" Options { ShowPrivateTable: true } 2024-11-21T08:55:39.862328Z node 3 :TX_PROXY DEBUG: Actor# [3:7439653613921073289:2731] Handle TEvDescribeSchemeResult Forward to# [3:7439653613921073288:2730] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1732179339900 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1732179339900 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partition... (TRUNCATED) IsActive: /dc-1/USER_0 -- 1 -- 2 -- 1 2024-11-21T08:55:39.863124Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653609626104849:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439653612922911259:2100] 2024-11-21T08:55:39.863129Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653609626104846:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439653612922911258:2100] 2024-11-21T08:55:39.863134Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653609626104849:2054] Unsubscribe: subscriber# [4:7439653612922911259:2100], path# /dc-1/USER_0 2024-11-21T08:55:39.863135Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653609626104846:2051] Unsubscribe: subscriber# [4:7439653612922911258:2100], path# /dc-1/USER_0 2024-11-21T08:55:39.863138Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653609626104852:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439653612922911260:2100] 2024-11-21T08:55:39.863140Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653609626104852:2057] Unsubscribe: subscriber# [4:7439653612922911260:2100], path# /dc-1/USER_0 2024-11-21T08:55:39.863162Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2024-11-21T08:55:39.863316Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected IsActive: /dc-1/USER_0 -- 2 -- 2 2024-11-21T08:55:39.879835Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653609626104846:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439653612308356460:2102] 2024-11-21T08:55:39.879858Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653609626104846:2051] Unsubscribe: subscriber# [5:7439653612308356460:2102], path# /dc-1/USER_0 2024-11-21T08:55:39.879864Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653609626104849:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439653612308356461:2102] 2024-11-21T08:55:39.879863Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439653609626104852:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439653612308356462:2102] 2024-11-21T08:55:39.879867Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653609626104849:2054] Unsubscribe: subscriber# [5:7439653612308356461:2102], path# /dc-1/USER_0 2024-11-21T08:55:39.879877Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439653609626104852:2057] Unsubscribe: subscriber# [5:7439653612308356462:2102], path# /dc-1/USER_0 2024-11-21T08:55:39.879936Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2024-11-21T08:55:39.880223Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2024-11-21T08:55:18.686408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653522773380035:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:18.686596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d77/r3tmp/tmpjsHM5A/pdisk_1.dat 2024-11-21T08:55:18.755585Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64119, node 1 2024-11-21T08:55:18.773729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:18.773740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:18.773742Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:18.773780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12468 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:55:18.788539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:18.788574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:55:18.790688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:18.798600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.799404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:18.799418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.800100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:18.800160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:18.800169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:55:18.800522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:18.800534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:55:18.800888Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:18.800953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.801900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179318849, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:18.801910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:18.801953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:55:18.802283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:18.802336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:18.802352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:55:18.802368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:55:18.802381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:55:18.802404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:55:18.802754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:55:18.802773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:55:18.802782Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:18.802795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:55:19.511515Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653528751628056:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.511787Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d77/r3tmp/tmp214oM3/pdisk_1.dat 2024-11-21T08:55:19.524958Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2122, node 4 2024-11-21T08:55:19.540158Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:19.540172Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:19.540173Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:19.540231Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:19.611731Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:19.611760Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:19.613323Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:19.614605Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.614701Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:19.614711Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.615061Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:19.615115Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:19.615123Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:55:19.615440Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:19.615452Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:55:19.615621Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:19.615721Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.616398Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179319661, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:19.616408Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:55:19.616456Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:55:19.616746Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:19.616792Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:19.616806Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:55:19.616821Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:55:19.616833Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:55:19.616847Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:55:19.616974Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:55:19.617000Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:55:19.617004Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:19.617019Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:55:24.511923Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439653528751628056:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:24.511963Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:34.520451Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:55:34.520467Z node 4 :IMPORT WARN: Table profiles were not loaded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2024-11-21T08:55:35.878042Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653598692835718:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:35.878234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e5b/r3tmp/tmpjpc5lt/pdisk_1.dat 2024-11-21T08:55:35.919172Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22678 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:35.937399Z node 1 :TX_PROXY DEBUG: actor# [1:7439653598692835938:2137] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:35.937427Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653598692836299:2380] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:35.937483Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653598692835961:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:35.937501Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653598692835961:2151], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:35.937565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:35.937924Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835610:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653598692836304:2381] 2024-11-21T08:55:35.937933Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835613:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653598692836305:2381] 2024-11-21T08:55:35.937946Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653598692835610:2051] Subscribe: subscriber# [1:7439653598692836304:2381], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.937951Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653598692835613:2054] Subscribe: subscriber# [1:7439653598692836305:2381], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.937960Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835616:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653598692836306:2381] 2024-11-21T08:55:35.937963Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653598692835616:2057] Subscribe: subscriber# [1:7439653598692836306:2381], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.937971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836305:2381][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653598692835613:2054] 2024-11-21T08:55:35.937985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836304:2381][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653598692835610:2051] 2024-11-21T08:55:35.937986Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835613:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653598692836305:2381] 2024-11-21T08:55:35.937989Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835610:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653598692836304:2381] 2024-11-21T08:55:35.937990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836306:2381][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653598692835616:2057] 2024-11-21T08:55:35.937994Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835616:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653598692836306:2381] 2024-11-21T08:55:35.937998Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653598692836302:2381] 2024-11-21T08:55:35.938004Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653598692836301:2381] 2024-11-21T08:55:35.938015Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653598692836300:2381][/dc-1] Set up state: owner# [1:7439653598692835961:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:35.938051Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653598692836303:2381] 2024-11-21T08:55:35.938063Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653598692836300:2381][/dc-1] Path was already updated: owner# [1:7439653598692835961:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:35.938076Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836304:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653598692836301:2381], cookie# 1 2024-11-21T08:55:35.938084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836305:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653598692836302:2381], cookie# 1 2024-11-21T08:55:35.938098Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836306:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653598692836303:2381], cookie# 1 2024-11-21T08:55:35.938113Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835610:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653598692836304:2381], cookie# 1 2024-11-21T08:55:35.938124Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835613:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653598692836305:2381], cookie# 1 2024-11-21T08:55:35.938133Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653598692835616:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653598692836306:2381], cookie# 1 2024-11-21T08:55:35.938142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836304:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653598692835610:2051], cookie# 1 2024-11-21T08:55:35.938148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836305:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653598692835613:2054], cookie# 1 2024-11-21T08:55:35.938150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653598692836306:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653598692835616:2057], cookie# 1 TClient::Ls response: 2024-11-21T08:55:35.938153Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653598692836301:2381], cookie# 1 2024-11-21T08:55:35.938156Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:35.938159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653598692836302:2381], cookie# 1 2024-11-21T08:55:35.938161Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:35.938164Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653598692836303:2381], cookie# 1 2024-11-21T08:55:35.938166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653598692836300:2381][/dc-1] Unexpected sync response: sender# [1:7439653598692836303:2381], cookie# 1 2024-11-21T08:55:35.944712Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653598692835961:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:35.944787Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653598692835961:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: ... 1T08:55:40.056105Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [4:7439653610676956124:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) }] } 2024-11-21T08:55:40.056113Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [4:7439653610676956124:2147], cacheItem# { Subscriber: { Subscriber: [4:7439653619266891830:2996] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732179340000 PathId: [OwnerId: 72057594046644480, LocalPathId: 8] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:40.056120Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [4:7439653610676956124:2147], cacheItem# { Subscriber: { Subscriber: [4:7439653614971924413:2886] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732179339900 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:40.056142Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439653619266891876:3011], recipient# [4:7439653619266891874:3009], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } Point: (Uint64 : 42) },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } Point: (Uint64 : 42) }] } 2024-11-21T08:55:40.056161Z node 4 :TX_PROXY TRACE: StateWaitResolve, received event# 269746178, Sender [4:7439653619266891876:3011], Recipient [4:7439653619266891874:3009]: NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult 2024-11-21T08:55:40.056164Z node 4 :TX_PROXY TRACE: StateWaitResolve, processing event TEvTxProxySchemeCache::TEvResolveKeySetResult 2024-11-21T08:55:40.056168Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T08:55:40.056331Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 SEND TEvProposeTransaction to datashard 72075186224037892 with 327 bytes program affected shards 2 followers disallowed marker# P4 2024-11-21T08:55:40.056373Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 SEND TEvProposeTransaction to datashard 72075186224037894 with 327 bytes program affected shards 2 followers disallowed marker# P4 2024-11-21T08:55:40.057850Z node 4 :TX_PROXY TRACE: StateWaitPrepare, received event# 269550080, Sender [5:7439653612710762159:2293], Recipient [4:7439653619266891874:3009] 2024-11-21T08:55:40.057852Z node 4 :TX_PROXY TRACE: StateWaitPrepare, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T08:55:40.057859Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037892 read size 0 out readset size 0 marker# P6 2024-11-21T08:55:40.057861Z node 4 :TX_PROXY TRACE: StateWaitPrepare, received event# 269550080, Sender [5:7439653612710762332:2306], Recipient [4:7439653619266891874:3009] 2024-11-21T08:55:40.057862Z node 4 :TX_PROXY TRACE: StateWaitPrepare, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T08:55:40.057864Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037894 read size 0 out readset size 0 marker# P6 2024-11-21T08:55:40.057868Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 SEND EvProposeTransaction to# 72075186224037888 Coordinator marker# P7 2024-11-21T08:55:40.058021Z node 4 :TX_PROXY TRACE: StateWaitPlan, received event# 269091328, Sender [5:7439653612710761944:2269], Recipient [4:7439653619266891874:3009] 2024-11-21T08:55:40.058023Z node 4 :TX_PROXY TRACE: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2024-11-21T08:55:40.058027Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P11 Status# 16 2024-11-21T08:55:40.101260Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955782:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [5:7439653617005729820:2505] 2024-11-21T08:55:40.101263Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955785:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [5:7439653617005729821:2505] 2024-11-21T08:55:40.101265Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955782:2050] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2024-11-21T08:55:40.101266Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955785:2053] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2024-11-21T08:55:40.101285Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955785:2053] Subscribe: subscriber# [5:7439653617005729821:2505], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:40.101285Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955782:2050] Subscribe: subscriber# [5:7439653617005729820:2505], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:40.101298Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955788:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [5:7439653617005729822:2505] 2024-11-21T08:55:40.101300Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955788:2056] Upsert description: path# /dc-1/USER_0/.metadata/initialization/migrations 2024-11-21T08:55:40.101303Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955788:2056] Subscribe: subscriber# [5:7439653617005729822:2505], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:40.101370Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955785:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7439653617005729821:2505] 2024-11-21T08:55:40.101373Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955782:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7439653617005729820:2505] 2024-11-21T08:55:40.101377Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955788:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7439653617005729822:2505] 2024-11-21T08:55:40.101586Z node 4 :TX_PROXY TRACE: StateWaitPlan, received event# 269091328, Sender [5:7439653612710761944:2269], Recipient [4:7439653619266891874:3009] 2024-11-21T08:55:40.101590Z node 4 :TX_PROXY TRACE: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2024-11-21T08:55:40.101595Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P10 Status# 17 2024-11-21T08:55:40.103313Z node 4 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [5:7439653612710762159:2293], Recipient [4:7439653619266891874:3009] 2024-11-21T08:55:40.103316Z node 4 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T08:55:40.103327Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2024-11-21T08:55:40.103336Z node 4 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [5:7439653612710762332:2306], Recipient [4:7439653619266891874:3009] 2024-11-21T08:55:40.103337Z node 4 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T08:55:40.103339Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037894 marker# P12 2024-11-21T08:55:40.103417Z node 4 :TX_PROXY DEBUG: Actor# [4:7439653619266891874:3009] txid# 281474976715668 MergeResult ExecComplete TDataReq marker# P17 2024-11-21T08:55:40.103447Z node 4 :TX_PROXY INFO: Actor# [4:7439653619266891874:3009] txid# 281474976715668 RESPONSE Status# ExecComplete prepare time: 0.001833s execute time: 0.045576s total time: 0.047409s marker# P13 2024-11-21T08:55:40.110037Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955785:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439653612710761899:2099] 2024-11-21T08:55:40.110037Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955782:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439653612710761898:2099] 2024-11-21T08:55:40.110048Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955782:2050] Unsubscribe: subscriber# [5:7439653612710761898:2099], path# /dc-1/USER_0 2024-11-21T08:55:40.110055Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439653610676955788:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439653612710761900:2099] 2024-11-21T08:55:40.110060Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955788:2056] Unsubscribe: subscriber# [5:7439653612710761900:2099], path# /dc-1/USER_0 2024-11-21T08:55:40.110063Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439653610676955785:2053] Unsubscribe: subscriber# [5:7439653612710761899:2099], path# /dc-1/USER_0 2024-11-21T08:55:40.110100Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2024-11-21T08:55:40.110384Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> VDiskRestart::Simple [GOOD] >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] Test command err: 2024-11-21T08:55:09.243131Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653486047233793:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.243148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003346/r3tmp/tmpaRxXoG/pdisk_1.dat 2024-11-21T08:55:09.313059Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17938, node 1 2024-11-21T08:55:09.328410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.328427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.328429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.328464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.345745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.345778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.346892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.369909Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.372264Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:12575, port: 12575 2024-11-21T08:55:09.372570Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.385894Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:12575. Invalid credentials 2024-11-21T08:55:09.386232Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****-IcA (8DED2498) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:09.546524Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653483346964890:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.546685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003346/r3tmp/tmp67QSNp/pdisk_1.dat 2024-11-21T08:55:09.553886Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21812, node 2 2024-11-21T08:55:09.563610Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.563621Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.563623Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.563658Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.618667Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.620059Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:25858, port: 25858 2024-11-21T08:55:09.620092Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.627048Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:09.627209Z node 2 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:25858 return no entries 2024-11-21T08:55:09.627303Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****49DA (C01C7BE9) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:09.648391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.648427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.649413Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.811735Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653485577924571:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.811851Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003346/r3tmp/tmp7EYHww/pdisk_1.dat 2024-11-21T08:55:09.825176Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26779, node 3 2024-11-21T08:55:09.832324Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.832338Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.832340Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.832392Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.912530Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.912564Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.913661Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.915237Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.916835Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18545, port: 18545 2024-11-21T08:55:09.916865Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.941402Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:09.989550Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:09.989807Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:09.989824Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.032355Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.076358Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.076648Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****o-mQ (E094F568) () has now valid token of ldapuser@ldap 2024-11-21T08:55:14.812201Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439653485577924571:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:14.812267Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:15.815072Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****o-mQ (E094F568) 2024-11-21T08:55:15.815134Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18545, port: 18545 2024-11-21T08:55:15.815163Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:15.840382Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:15.886297Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:15.887004Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:15.887020Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:15.928405Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:15.972563Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:15.972910Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****o-mQ (E094F568) () has now valid token of ldapuser@ldap 2024-11-21T08:55:18.816689Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****o-mQ (E094F568) 2024-11-21T08:55:18.816730Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18545, port: 18545 2024-11-21T08:55:18.816773Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:18.835195Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:18.884692Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:18.884946Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:18.884960Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:18.928623Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:18.976509Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:18.976934Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****o-mQ (E094F568) () has now valid token of ldapuser@ldap 2024-11-21T08:55:20.420003Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653534295249591:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:20.420263Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003346/r3tmp/tmpwHC2St/pdisk_1.dat 2024-11-21T08:55:20.435848Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22008, node 4 2024-11-21T08:55:20.447865Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:20.447881Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:20.447884Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:20.447943Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:20.522706Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:20.522739Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:20.523782Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:20.713757Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:20.716086Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8242, port: 8242 2024-11-21T08:55:20.716129Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:20.718216Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:20.764623Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****P2VQ (65FB2B15) () has now valid token of ldapuser@ldap 2024-11-21T08:55:25.420308Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439653534295249591:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:25.420367Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:25.422362Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****P2VQ (65FB2B15) 2024-11-21T08:55:25.422397Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8242, port: 8242 2024-11-21T08:55:25.422427Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:25.432520Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:25.476601Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****P2VQ (65FB2B15) () has now valid token of ldapuser@ldap 2024-11-21T08:55:29.424167Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****P2VQ (65FB2B15) 2024-11-21T08:55:29.424220Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8242, port: 8242 2024-11-21T08:55:29.424245Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:29.425989Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:29.468581Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****P2VQ (65FB2B15) () has now valid token of ldapuser@ldap 2024-11-21T08:55:30.940248Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653575179933483:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:30.940409Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003346/r3tmp/tmpO1PREj/pdisk_1.dat 2024-11-21T08:55:30.957044Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9967, node 5 2024-11-21T08:55:30.965954Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:30.965972Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:30.965974Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:30.966021Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:31.044002Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:31.044045Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:31.044915Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:31.135459Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:31.137522Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24950, port: 24950 2024-11-21T08:55:31.137563Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:31.149119Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:31.192422Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:31.192678Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:31.192691Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:31.240376Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:31.284357Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:31.284698Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****HoOQ (EB72F2A7) () has now valid token of ldapuser@ldap 2024-11-21T08:55:34.942474Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****HoOQ (EB72F2A7) 2024-11-21T08:55:34.942508Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24950, port: 24950 2024-11-21T08:55:34.942547Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:34.955132Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:34.955256Z node 5 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:24950 return no entries 2024-11-21T08:55:34.955331Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****HoOQ (EB72F2A7) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:35.940423Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439653575179933483:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:35.940473Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:38.944273Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****HoOQ (EB72F2A7) >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DescribeStream >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] Test command err: 2024-11-21T08:55:09.029137Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653485194121428:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.029156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033a6/r3tmp/tmplaMHaC/pdisk_1.dat 2024-11-21T08:55:09.076567Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64013, node 1 2024-11-21T08:55:09.093376Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.093389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.093390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.093429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.131007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.131038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.132961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.184031Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.186624Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24111, port: 24111 2024-11-21T08:55:09.186972Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.200059Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:09.244390Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:09.288815Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****3rCg (81542CE2) () has now valid token of ldapuser@ldap 2024-11-21T08:55:09.456609Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653486120678483:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.456671Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033a6/r3tmp/tmp505iJq/pdisk_1.dat 2024-11-21T08:55:09.464006Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8188, node 2 2024-11-21T08:55:09.475153Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.475169Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.475171Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.475214Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.557542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.557573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.558639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.567946Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.569400Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:4235, port: 4235 2024-11-21T08:55:09.569431Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.578864Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:09.624481Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****VAsA (9B8DE2FE) () has now valid token of ldapuser@ldap 2024-11-21T08:55:09.758207Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653484365861066:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:09.758342Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033a6/r3tmp/tmpWQvLaz/pdisk_1.dat 2024-11-21T08:55:09.769226Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31136, node 3 2024-11-21T08:55:09.779751Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:09.779766Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:09.779768Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:09.779807Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:09.858589Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:09.858628Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:09.859702Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:09.955848Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:09.958511Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7034, port: 7034 2024-11-21T08:55:09.958547Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:09.963659Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2024-11-21T08:55:09.963687Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:7034. Bad search filter 2024-11-21T08:55:09.963797Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****SYnw (DC170A59) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:10.235746Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653488069767960:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:10.235766Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033a6/r3tmp/tmpP8f9HX/pdisk_1.dat 2024-11-21T08:55:10.249580Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26613, node 4 2024-11-21T08:55:10.260102Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:10.260117Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:10.260119Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:10.260156Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:10.315826Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:10.317995Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1321, port: 1321 2024-11-21T08:55:10.318040Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:10.334444Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:10.335759Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:10.335796Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:10.336921Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:10.380490Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:10.424423Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:10.424645Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:10.424676Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.472441Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.516370Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:10.516795Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****nZhQ (A0961BAE) () has now valid token of ldapuser@ldap 2024-11-21T08:55:14.237618Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nZhQ (A0961BAE) 2024-11-21T08:55:14.237672Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1321, port: 1321 2024-11-21T08:55:14.237698Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:14.259993Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:14.304827Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:14.348441Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:14.348627Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:14.348648Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:14.396402Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:14.444513Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:14.444980Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****nZhQ (A0961BAE) () has now valid token of ldapuser@ldap 2024-11-21T08:55:15.235967Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439653488069767960:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.236007Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:19.239818Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nZhQ (A0961BAE) 2024-11-21T08:55:19.239862Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1321, port: 1321 2024-11-21T08:55:19.239880Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:19.255639Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:19.300445Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:19.348416Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:19.348620Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:19.348631Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:19.392390Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:19.436429Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:19.436853Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****nZhQ (A0961BAE) () has now valid token of ldapuser@ldap test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033a6/r3tmp/tmpxpzHUV/pdisk_1.dat 2024-11-21T08:55:20.811775Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:20.812016Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 63739, node 5 2024-11-21T08:55:20.836438Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:20.836453Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:20.836455Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:20.836507Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:20.898845Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:20.898886Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:20.899900Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:20.911236Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:20.913388Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10439, port: 10439 2024-11-21T08:55:20.913714Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:20.929572Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:20.973289Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:21.016674Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ZXgA (7EDDB25C) () has now valid token of ldapuser@ldap 2024-11-21T08:55:24.810449Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****ZXgA (7EDDB25C) 2024-11-21T08:55:24.810514Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10439, port: 10439 2024-11-21T08:55:24.810538Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:24.874354Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:24.920462Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:24.964695Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ZXgA (7EDDB25C) () has now valid token of ldapuser@ldap 2024-11-21T08:55:27.811772Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****ZXgA (7EDDB25C) 2024-11-21T08:55:27.811828Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10439, port: 10439 2024-11-21T08:55:27.811860Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:27.847598Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:27.888464Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:27.936626Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ZXgA (7EDDB25C) () has now valid token of ldapuser@ldap 2024-11-21T08:55:31.179121Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653578693843724:2191];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033a6/r3tmp/tmp1BzDju/pdisk_1.dat 2024-11-21T08:55:31.185363Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:31.207034Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21717, node 6 2024-11-21T08:55:31.224812Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:31.224825Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:31.224827Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:31.224867Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:31.258675Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T08:55:31.260296Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18528, port: 18528 2024-11-21T08:55:31.260334Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:31.284793Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:31.284844Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:31.285837Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:31.302928Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:31.348461Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:31.396380Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T08:55:31.396571Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T08:55:31.396584Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:31.440423Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:31.484403Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T08:55:31.484813Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****3XUQ (17D31228) () has now valid token of ldapuser@ldap 2024-11-21T08:55:36.177846Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439653578693843724:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:36.177891Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:37.181405Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****3XUQ (17D31228) 2024-11-21T08:55:37.181500Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18528, port: 18528 2024-11-21T08:55:37.181544Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T08:55:37.195632Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T08:55:37.236471Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T08:55:37.236663Z node 6 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:18528 return no entries 2024-11-21T08:55:37.236821Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****3XUQ (17D31228) () has now permanent error message 'Could not login via LDAP' 2024-11-21T08:55:41.183144Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****3XUQ (17D31228) >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> Cdc::DescribeStream [GOOD] >> Cdc::DropColumn |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2024-11-21T08:55:20.983690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653533157068501:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:20.983881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cb4/r3tmp/tmpTTnKc9/pdisk_1.dat 2024-11-21T08:55:21.132330Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16433, node 1 2024-11-21T08:55:21.145847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:21.145863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:21.145865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:21.145906Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:21.170126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:21.171168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:21.171182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:21.172036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:21.172101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:21.172110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T08:55:21.172518Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:21.172526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:55:21.172566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:55:21.172849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:21.173732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179321222, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:21.173744Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:55:21.173829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:55:21.174190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:21.174251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:21.174265Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:55:21.174281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:55:21.174295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:55:21.174310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:55:21.174740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:55:21.174761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:55:21.174765Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:21.174779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:55:21.296047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:21.296081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:21.297767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:25.984134Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653533157068501:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:25.984170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:36.132004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:55:36.132021Z node 1 :IMPORT WARN: Table profiles were not loaded >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> KqpUserConstraint::KqpReadNull-UploadNull |89.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> KqpUserConstraint::KqpReadNull+UploadNull >> StatisticsSaveLoad::Simple |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2024-11-21T08:55:42.868650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:55:42.869004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:42.869021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00437a/r3tmp/tmp6LeHcg/pdisk_1.dat 2024-11-21T08:55:42.962391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:42.978504Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:43.020694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:43.020725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:43.031149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:43.134599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:43.391803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:43.391831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:832:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:43.391839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:43.392500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:55:43.558749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:836:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:55:43.626959Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yye0zewqrwb646697jwv3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg5YzYzNGQtZmRiOTY4NjEtNTZmNWIxN2UtNjFmNmE4NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> Cdc::SupportedTypes [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2024-11-21T08:55:43.358605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:55:43.358995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:43.359016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004368/r3tmp/tmpBI8L82/pdisk_1.dat 2024-11-21T08:55:43.450545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:43.465830Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:43.507861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:43.507888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:43.518314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:43.621800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:43.878938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:43.878958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:832:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:43.878965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:43.879588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:55:44.045539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:836:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:55:44.122322Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yyeg6az75ss0qr7h1hqr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjJkZmRhMzctMmVlZmRiZDItNzA2YTU3YmYtODM2M2RiMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:44.123202Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:950:2756], TxId: 281474976715660, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YjJkZmRhMzctMmVlZmRiZDItNzA2YTU3YmYtODM2M2RiMTc=. TraceId : 01jd6yyeg6az75ss0qr7h1hqr2. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2024-11-21T08:55:44.123601Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:950:2756], TxId: 281474976715660, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YjJkZmRhMzctMmVlZmRiZDItNzA2YTU3YmYtODM2M2RiMTc=. TraceId : 01jd6yyeg6az75ss0qr7h1hqr2. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2024-11-21T08:55:44.125395Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:951:2757], TxId: 281474976715660, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YjJkZmRhMzctMmVlZmRiZDItNzA2YTU3YmYtODM2M2RiMTc=. CustomerSuppliedId : . TraceId : 01jd6yyeg6az75ss0qr7h1hqr2. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T08:55:44.126468Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjJkZmRhMzctMmVlZmRiZDItNzA2YTU3YmYtODM2M2RiMTc=, ActorId: [1:820:2673], ActorState: ExecuteState, TraceId: 01jd6yyeg6az75ss0qr7h1hqr2, Create QueryResponse for error on request, msg: 2024-11-21T08:55:44.126749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yyeg6az75ss0qr7h1hqr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjJkZmRhMzctMmVlZmRiZDItNzA2YTU3YmYtODM2M2RiMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTxLocatorTest::Boot |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |89.8%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxLocatorTest::Boot [GOOD] >> TTxLocatorTest::TestImposibleSize >> TTxLocatorTest::TestImposibleSize [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2024-11-21T08:55:44.697845Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:55:44.697905Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:55:44.697993Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:55:44.698304Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.698384Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:55:44.699654Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.699664Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.699670Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.699679Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:55:44.699696Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.699705Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:55:44.699716Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |89.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |89.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |89.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2024-11-21T08:55:44.974940Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:55:44.975006Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:55:44.975109Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:55:44.975455Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.975560Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:55:44.976955Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.976967Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.976973Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.976982Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:55:44.977005Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.977015Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:55:44.977027Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:55:44.977112Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#281474976710656 2024-11-21T08:55:44.977134Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2024-11-21T08:55:44.977493Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2024-11-21T08:55:44.977531Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2106] requested range size#123456 2024-11-21T08:55:44.977585Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.977590Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.977597Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2024-11-21T08:55:44.977600Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2106] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2024-11-21T08:55:44.977625Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2110] requested range size#281474976587200 2024-11-21T08:55:44.977647Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2024-11-21T08:55:44.977649Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:76:2110] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2024-11-21T08:55:44.977670Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2113] requested range size#246912 2024-11-21T08:55:44.977690Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.977693Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:44.977700Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2024-11-21T08:55:44.977703Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2113] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2024-11-21T08:55:44.977726Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#281474976340288 2024-11-21T08:55:44.977730Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2024-11-21T08:55:44.977732Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2024-11-21T08:55:45.404557Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:55:45.404616Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:55:45.404702Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:55:45.405007Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.405103Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:55:45.406618Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.406633Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.406639Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.406648Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:55:45.406684Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.406695Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:55:45.406709Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:55:45.406861Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2024-11-21T08:55:45.406928Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2024-11-21T08:55:45.406962Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2024-11-21T08:55:45.406990Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2024-11-21T08:55:45.407005Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#100000 2024-11-21T08:55:45.407023Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407032Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2024-11-21T08:55:45.407042Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407061Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407071Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407077Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2024-11-21T08:55:45.407089Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407096Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2024-11-21T08:55:45.407110Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407118Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2024-11-21T08:55:45.407130Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407138Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407148Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407154Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2024-11-21T08:55:45.407164Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407173Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2024-11-21T08:55:45.407176Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2024-11-21T08:55:45.407185Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407192Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2024-11-21T08:55:45.407194Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2024-11-21T08:55:45.407201Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2024-11-21T08:55:45.407203Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2024-11-21T08:55:45.407209Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407213Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407218Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2024-11-21T08:55:45.407220Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2024-11-21T08:55:45.407228Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407233Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2024-11-21T08:55:45.407235Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 400000 to# 500000 2024-11-21T08:55:45.407252Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407258Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407262Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2024-11-21T08:55:45.407264Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2024-11-21T08:55:45.407272Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2024-11-21T08:55:45.407274Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2024-11-21T08:55:45.407280Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407286Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407290Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2024-11-21T08:55:45.407292Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2024-11-21T08:55:45.407300Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407303Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2024-11-21T08:55:45.407305Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2024-11-21T08:55:45.407312Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407316Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2024-11-21T08:55:45.407318Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T08:55:45.407795Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#100000 2024-11-21T08:55:45.407845Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#100000 2024-11-21T08:55:45.407854Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#100000 2024-11-21T08:55:45.407864Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407875Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407895Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#100000 2024-11-21T08:55:45.407914Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:117:2151] requested range size#100000 2024-11-21T08:55:45.407931Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407935Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407944Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:15:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.407951Z node 1 :TX_ALLOCATOR DEBUG: tablet# ... from# 8200000 Reserved to# 8300000 2024-11-21T08:55:45.412416Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:393:2427] TEvAllocateResult from# 8200000 to# 8300000 2024-11-21T08:55:45.412423Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412429Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2024-11-21T08:55:45.412431Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:395:2429] TEvAllocateResult from# 8300000 to# 8400000 2024-11-21T08:55:45.412437Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412443Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2024-11-21T08:55:45.412445Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:397:2431] TEvAllocateResult from# 8400000 to# 8500000 2024-11-21T08:55:45.412450Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412455Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2024-11-21T08:55:45.412459Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:399:2433] TEvAllocateResult from# 8500000 to# 8600000 2024-11-21T08:55:45.412466Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2024-11-21T08:55:45.412468Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:401:2435] TEvAllocateResult from# 8600000 to# 8700000 2024-11-21T08:55:45.412474Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412479Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412483Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2024-11-21T08:55:45.412485Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:403:2437] TEvAllocateResult from# 8700000 to# 8800000 2024-11-21T08:55:45.412492Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2024-11-21T08:55:45.412494Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8800000 to# 8900000 2024-11-21T08:55:45.412498Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2024-11-21T08:55:45.412500Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T08:55:45.412703Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:429:2463] requested range size#100000 2024-11-21T08:55:45.412726Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:431:2465] requested range size#100000 2024-11-21T08:55:45.412746Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2024-11-21T08:55:45.412754Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412770Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412780Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2024-11-21T08:55:45.412800Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412816Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2024-11-21T08:55:45.412824Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412835Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412844Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2024-11-21T08:55:45.412851Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412866Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2024-11-21T08:55:45.412881Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412891Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2024-11-21T08:55:45.412898Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412902Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412911Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412919Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2024-11-21T08:55:45.412926Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412935Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412945Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2024-11-21T08:55:45.412947Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:429:2463] TEvAllocateResult from# 9000000 to# 9100000 2024-11-21T08:55:45.412952Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412960Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2024-11-21T08:55:45.412967Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412975Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2024-11-21T08:55:45.412977Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:431:2465] TEvAllocateResult from# 9100000 to# 9200000 2024-11-21T08:55:45.412981Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.412988Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2024-11-21T08:55:45.412990Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9200000 to# 9300000 2024-11-21T08:55:45.413003Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2024-11-21T08:55:45.413005Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9300000 to# 9400000 2024-11-21T08:55:45.413009Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.413020Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2024-11-21T08:55:45.413022Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9400000 to# 9500000 2024-11-21T08:55:45.413027Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.413033Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2024-11-21T08:55:45.413035Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9500000 to# 9600000 2024-11-21T08:55:45.413039Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.413046Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.413052Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2024-11-21T08:55:45.413054Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9600000 to# 9700000 2024-11-21T08:55:45.413061Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2024-11-21T08:55:45.413063Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9700000 to# 9800000 2024-11-21T08:55:45.413067Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.413074Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2024-11-21T08:55:45.413076Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9800000 to# 9900000 2024-11-21T08:55:45.413082Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2024-11-21T08:55:45.413084Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] >> Cdc::DropIndex [GOOD] >> Cdc::DisableStream >> TTxLocatorTest::TestZeroRange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2024-11-21T08:55:45.688193Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:55:45.688278Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:55:45.688360Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:55:45.688664Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.688750Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:55:45.690072Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690084Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690089Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690098Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:55:45.690117Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690126Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:55:45.690137Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:55:45.690223Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#8796093022207 2024-11-21T08:55:45.690283Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690287Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690294Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2024-11-21T08:55:45.690297Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2024-11-21T08:55:45.690673Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#8796093022207 2024-11-21T08:55:45.690724Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690732Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690741Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2024-11-21T08:55:45.690743Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2024-11-21T08:55:45.690770Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#8796093022207 2024-11-21T08:55:45.690796Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690800Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690806Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2024-11-21T08:55:45.690809Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2024-11-21T08:55:45.690831Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#8796093022207 2024-11-21T08:55:45.690845Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690849Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690854Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2024-11-21T08:55:45.690857Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2024-11-21T08:55:45.690877Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#8796093022207 2024-11-21T08:55:45.690894Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690898Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690902Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2024-11-21T08:55:45.690904Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2024-11-21T08:55:45.690927Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2123] requested range size#8796093022207 2024-11-21T08:55:45.690941Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690945Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690949Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2024-11-21T08:55:45.690951Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:89:2123] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2024-11-21T08:55:45.690972Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:93:2127] requested range size#8796093022207 2024-11-21T08:55:45.690987Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690991Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.690995Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2024-11-21T08:55:45.690997Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:93:2127] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2024-11-21T08:55:45.691023Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:97:2131] requested range size#8796093022207 2024-11-21T08:55:45.691041Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691046Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691051Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2024-11-21T08:55:45.691053Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:97:2131] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2024-11-21T08:55:45.691075Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:101:2135] requested range size#8796093022207 2024-11-21T08:55:45.691091Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691095Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691100Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2024-11-21T08:55:45.691102Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:101:2135] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2024-11-21T08:55:45.691128Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:105:2139] requested range size#8796093022207 2024-11-21T08:55:45.691145Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691150Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691154Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2024-11-21T08:55:45.691156Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:105:2139] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2024-11-21T08:55:45.691180Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#8796093022207 2024-11-21T08:55:45.691195Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691200Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691205Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2024-11-21T08:55:45.691207Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:109:2143] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2024-11-21T08:55:45.691245Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#8796093022207 2024-11-21T08:55:45.691267Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691272Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:149:2183] requested range size#8796093022207 2024-11-21T08:55:45.691812Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691816Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691821Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2024-11-21T08:55:45.691824Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:149:2183] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2024-11-21T08:55:45.691854Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:153:2187] requested range size#8796093022207 2024-11-21T08:55:45.691868Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691873Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691877Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2024-11-21T08:55:45.691880Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:153:2187] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2024-11-21T08:55:45.691908Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:157:2191] requested range size#8796093022207 2024-11-21T08:55:45.691922Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691928Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691932Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2024-11-21T08:55:45.691934Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:157:2191] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2024-11-21T08:55:45.691964Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:161:2195] requested range size#8796093022207 2024-11-21T08:55:45.691982Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691987Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.691991Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2024-11-21T08:55:45.691993Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:161:2195] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2024-11-21T08:55:45.692024Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:165:2199] requested range size#8796093022207 2024-11-21T08:55:45.692039Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692043Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692048Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2024-11-21T08:55:45.692050Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:165:2199] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2024-11-21T08:55:45.692081Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:169:2203] requested range size#8796093022207 2024-11-21T08:55:45.692095Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692101Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692107Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2024-11-21T08:55:45.692109Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:169:2203] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2024-11-21T08:55:45.692140Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:173:2207] requested range size#8796093022207 2024-11-21T08:55:45.692157Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692161Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692166Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2024-11-21T08:55:45.692168Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:173:2207] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2024-11-21T08:55:45.692200Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:177:2211] requested range size#8796093022207 2024-11-21T08:55:45.692245Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692254Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692261Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2024-11-21T08:55:45.692263Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:177:2211] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2024-11-21T08:55:45.692303Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:181:2215] requested range size#8796093022207 2024-11-21T08:55:45.692322Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692326Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692330Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2024-11-21T08:55:45.692332Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:181:2215] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2024-11-21T08:55:45.692367Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:185:2219] requested range size#8796093022207 2024-11-21T08:55:45.692390Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692395Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692400Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2024-11-21T08:55:45.692403Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:185:2219] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2024-11-21T08:55:45.692438Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:189:2223] requested range size#8796093022207 2024-11-21T08:55:45.692457Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692461Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692466Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2024-11-21T08:55:45.692468Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:189:2223] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2024-11-21T08:55:45.692508Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:193:2227] requested range size#8796093022207 2024-11-21T08:55:45.692522Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692527Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692531Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2024-11-21T08:55:45.692533Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:193:2227] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2024-11-21T08:55:45.692568Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:197:2231] requested range size#31 2024-11-21T08:55:45.692582Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692587Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:45.692591Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2024-11-21T08:55:45.692593Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:197:2231] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2024-11-21T08:55:45.692628Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:201:2235] requested range size#1 2024-11-21T08:55:45.692633Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2024-11-21T08:55:45.692639Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:201:2235] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> TTxLocatorTest::TestZeroRange [GOOD] >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] >> TTxLocatorTest::TestAllocateAll ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2024-11-21T08:55:46.137893Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:55:46.137955Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:55:46.138046Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:55:46.138404Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.138494Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:55:46.140070Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.140083Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.140089Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.140098Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:55:46.140119Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.140133Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:55:46.140149Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:55:46.140256Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#0 2024-11-21T08:55:46.140322Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.140327Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.140335Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2024-11-21T08:55:46.140338Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 0 expected SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2024-11-21T08:55:26.667171Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1732179326667164 2024-11-21T08:55:26.766882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653557912506443:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:26.767139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:26.770721Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653559379409611:2056];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032c0/r3tmp/tmpOdnXwb/pdisk_1.dat 2024-11-21T08:55:26.801899Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:26.803200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:26.804666Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:26.825482Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1586, node 1 2024-11-21T08:55:26.840957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0032c0/r3tmp/yandexarZNqv.tmp 2024-11-21T08:55:26.840971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0032c0/r3tmp/yandexarZNqv.tmp 2024-11-21T08:55:26.841043Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0032c0/r3tmp/yandexarZNqv.tmp 2024-11-21T08:55:26.841097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:26.845896Z INFO: TTestServer started on Port 4413 GrpcPort 1586 TClient is connected to server localhost:4413 PQClient connected to localhost:1586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:55:26.867329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:26.867363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:26.868888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:26.897247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:26.897282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:26.898225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:26.899052Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:26.899396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:55:27.067350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653563674377195:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:27.067372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653563674377190:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:27.067389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:27.068737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:55:27.073596Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653563674377204:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:55:27.100557Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653562207474687:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:27.100641Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDgzZDI0ODctNzEzMmMzNzgtY2U5MDg1NDQtNGZlODQ1MzE=, ActorId: [1:7439653562207474639:2299], ActorState: ExecuteState, TraceId: 01jd6yxy2z3p6187pk51xgwg3f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:27.101207Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:27.101379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T08:55:27.161559Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653563674377294:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:27.161676Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODJlMTQ0MDctNGYwYTYyOGQtOWIzZWRjNWQtYjdiYjBlMzI=, ActorId: [2:7439653563674377188:2277], ActorState: ExecuteState, TraceId: 01jd6yxy2t0kj6t361mt5awpz9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:27.161981Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:27.166002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:27.228770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:1586", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T08:55:27.308147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd6yxy9tfqkbded2yb4bptca, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWU2NDNlNjEtYjIyZmQwMTQtOTdlNWJhOWEtNTUyYzg4OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653562207475095:2926] 2024-11-21T08:55:31.767412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653557912506443:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:31.767449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:31.770981Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653559379409611:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:31.771007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:55:33.291550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:1586 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:33.304079Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:1586 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 ... tId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:20257 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:41.628951Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:20257 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:42.130121Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:20257 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:42.631830Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2024-11-21T08:55:42.633153Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2024-11-21T08:55:42.633377Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2024-11-21T08:55:42.633382Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:20257 2024-11-21T08:55:42.633758Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T08:55:42.633938Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:55:42.633955Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T08:55:42.634079Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T08:55:42.634112Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:49236 2024-11-21T08:55:42.634118Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:49236 proto=v1 topic=test-topic durationSec=0 2024-11-21T08:55:42.634121Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:55:42.634540Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T08:55:42.634573Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T08:55:42.634581Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:55:42.634584Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:55:42.634590Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653628761519450:2481] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:55:42.635063Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653628761519450:2481] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:55:42.649966Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653628761519450:2481] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T08:55:42.650025Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653628761519480:2481] connected; active server actors: 1 2024-11-21T08:55:42.650037Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653628761519450:2481] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T08:55:42.650047Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653628761519450:2481] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T08:55:42.650101Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653628761519480:2481] disconnected; active server actors: 1 2024-11-21T08:55:42.650109Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653628761519480:2481] disconnected no session 2024-11-21T08:55:42.660996Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653628761519450:2481] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T08:55:42.661011Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653628761519450:2481] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T08:55:42.661014Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439653628761519450:2481] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T08:55:42.661023Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:55:42.661302Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:42.661335Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T08:55:42.661324Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439653628761519498:2481], now have 1 active actors on pipe 2024-11-21T08:55:42.661401Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:42.661413Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:42.661447Z node 4 :PERSQUEUE INFO: new Cookie src|3918ec9b-38ceff28-e6225dbd-8d82f755_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T08:55:42.661487Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:55:42.661513Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:42.661678Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:42.661687Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:42.661707Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:42.661792Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|3918ec9b-38ceff28-e6225dbd-8d82f755_0 2024-11-21T08:55:42.662214Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179342662 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:42.662264Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|3918ec9b-38ceff28-e6225dbd-8d82f755_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T08:55:42.662427Z :INFO: [] MessageGroupId [src] SessionId [src|3918ec9b-38ceff28-e6225dbd-8d82f755_0] Write session: close. Timeout = 0 ms 2024-11-21T08:55:42.662436Z :INFO: [] MessageGroupId [src] SessionId [src|3918ec9b-38ceff28-e6225dbd-8d82f755_0] Write session will now close 2024-11-21T08:55:42.662442Z :DEBUG: [] MessageGroupId [src] SessionId [src|3918ec9b-38ceff28-e6225dbd-8d82f755_0] Write session: aborting 2024-11-21T08:55:42.662600Z :INFO: [] MessageGroupId [src] SessionId [src|3918ec9b-38ceff28-e6225dbd-8d82f755_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:55:42.662605Z :DEBUG: [] MessageGroupId [src] SessionId [src|3918ec9b-38ceff28-e6225dbd-8d82f755_0] Write session: destroy 2024-11-21T08:55:42.662772Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|3918ec9b-38ceff28-e6225dbd-8d82f755_0 grpc read done: success: 0 data: 2024-11-21T08:55:42.662780Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|3918ec9b-38ceff28-e6225dbd-8d82f755_0 grpc read failed 2024-11-21T08:55:42.663068Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected Session was created 2024-11-21T08:55:42.662783Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|3918ec9b-38ceff28-e6225dbd-8d82f755_0 grpc closed 2024-11-21T08:55:42.663088Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439653628761519498:2481] destroyed 2024-11-21T08:55:42.662787Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|3918ec9b-38ceff28-e6225dbd-8d82f755_0 is DEAD 2024-11-21T08:55:42.662993Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:55:42.663092Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >>> Ready to answer: ok 2024-11-21T08:55:42.733667Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: >> TTxLocatorTest::TestAllocateAll [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> StatisticsSaveLoad::Simple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2024-11-21T08:55:46.493952Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:55:46.494016Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:55:46.494113Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:55:46.494498Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.494600Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:55:46.495919Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.495930Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.495936Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.495945Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:55:46.495967Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.495978Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:55:46.495991Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:55:46.496069Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#281474976710655 2024-11-21T08:55:46.496131Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.496135Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:55:46.496142Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2024-11-21T08:55:46.496146Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2024-11-21T08:55:46.496530Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#1 2024-11-21T08:55:46.496559Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2024-11-21T08:55:46.496561Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2024-11-21T08:55:43.581483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:55:43.581522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:43.581530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046f3/r3tmp/tmpWOjOAA/pdisk_1.dat 2024-11-21T08:55:43.654670Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62661, node 1 2024-11-21T08:55:43.746337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:43.746351Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:43.746354Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:43.746416Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:43.750690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:43.826368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:43.826406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:43.837932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26542 2024-11-21T08:55:44.236532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:44.976111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:44.976146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:45.008757Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:45.009728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:45.053862Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:45.060180Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:55:45.060202Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:55:45.065012Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:55:45.065134Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:55:45.065154Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:55:45.065159Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:55:45.065165Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:55:45.065171Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:55:45.065176Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:55:45.065182Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:55:45.065302Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:55:45.239520Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:55:45.239542Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:55:45.240454Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:55:45.241864Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:55:45.241943Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:55:45.242491Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:55:45.245939Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:55:45.245950Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:55:45.245958Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:55:45.247406Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:45.247449Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:45.248801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:55:45.250225Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:55:45.250249Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:55:45.252440Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:55:45.264019Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:45.286089Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:55:45.398904Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:55:45.565777Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:55:46.258609Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:55:46.258726Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:55:46.261253Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:55:46.262175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2159:3034], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.262191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2175:3040], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.262200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.263443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037889 2024-11-21T08:55:46.272280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2180:3043], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:55:46.495947Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2290:3084]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:46.495994Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:55:46.496003Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2292:3086] 2024-11-21T08:55:46.496013Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2292:3086] 2024-11-21T08:55:46.496174Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2293:2800] 2024-11-21T08:55:46.496277Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2292:3086], server id = [2:2293:2800], tablet id = 72075186224037897, status = OK 2024-11-21T08:55:46.496344Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2293:2800], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:55:46.496365Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:55:46.496414Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:46.496427Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2290:3084], StatRequests.size() = 1 2024-11-21T08:55:46.533014Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=N2RjN2M0NTAtODg4ZmU3NTEtYzBiY2I3NDEtNDQzYTk3OGI=, TxId: 2024-11-21T08:55:46.533042Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=N2RjN2M0NTAtODg4ZmU3NTEtYzBiY2I3NDEtNDQzYTk3OGI=, TxId: 2024-11-21T08:55:46.533258Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:55:46.533689Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2024-11-21T08:55:46.536080Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2321:3107]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:46.536112Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:55:46.536116Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2321:3107], StatRequests.size() = 1 2024-11-21T08:55:46.568360Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZWFhM2MzYzEtNGY2NzUxYS02Y2RlOTU4Ni05ODE5ZWEyYw==, TxId: 01jd6yyh3h7t9wd4hj970nq2n8 2024-11-21T08:55:46.568402Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZWFhM2MzYzEtNGY2NzUxYS02Y2RlOTU4Ni05ODE5ZWEyYw==, TxId: 01jd6yyh3h7t9wd4hj970nq2n8 2024-11-21T08:55:46.569253Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:55:46.569800Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2024-11-21T08:55:46.582020Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NjYxOWFmZDktM2IwNjc4MjMtY2NiMzE3MTgtNzQzODE1YmY=, TxId: 01jd6yyh4a57e5cvxqgt64gfg6 2024-11-21T08:55:46.582050Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NjYxOWFmZDktM2IwNjc4MjMtY2NiMzE3MTgtNzQzODE1YmY=, TxId: 01jd6yyh4a57e5cvxqgt64gfg6 |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> StatisticsSaveLoad::Delete [GOOD] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.8%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSolomonReboots::AdoptDropSolomonWithReboots |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2024-11-21T08:55:43.877798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:55:43.877837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:43.877845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046de/r3tmp/tmpxF0pSm/pdisk_1.dat 2024-11-21T08:55:43.949716Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2430, node 1 2024-11-21T08:55:44.044810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:44.044839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:44.044845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:44.044979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:44.052183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:44.127918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:44.127948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:44.139201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17191 2024-11-21T08:55:44.539511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:45.294823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:45.294853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:45.328791Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:45.329581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:45.376632Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:45.383693Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:55:45.383714Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:55:45.389192Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:55:45.389296Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:55:45.389311Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:55:45.389314Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:55:45.389318Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:55:45.389323Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:55:45.389326Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:55:45.389330Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:55:45.389424Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:55:45.563947Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:55:45.563972Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:55:45.564895Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:55:45.566477Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:55:45.566566Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:55:45.567148Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:55:45.570584Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:55:45.570594Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:55:45.570602Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:55:45.571988Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:45.572023Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:45.573074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:55:45.574288Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:55:45.574310Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:55:45.576147Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:55:45.587918Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:45.609586Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:55:45.719770Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:55:45.875063Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:55:46.580399Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:55:46.580503Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:55:46.582296Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:55:46.582992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2159:3034], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.583009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2175:3040], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.583017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.584232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037889 2024-11-21T08:55:46.591281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2180:3043], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:55:46.841444Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2291:3087]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:46.841512Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:55:46.841521Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2293:3089] 2024-11-21T08:55:46.841530Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2293:3089] 2024-11-21T08:55:46.841674Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2294:2798] 2024-11-21T08:55:46.841725Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2293:3089], server id = [2:2294:2798], tablet id = 72075186224037897, status = OK 2024-11-21T08:55:46.841765Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2294:2798], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:55:46.841779Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:55:46.841815Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:46.841820Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2291:3087], StatRequests.size() = 1 2024-11-21T08:55:46.876887Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YWViZDI0ZmEtY2E2Y2M2MzQtYjViNWQxMjItOTY3ZDUzZjY=, TxId: 2024-11-21T08:55:46.876910Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YWViZDI0ZmEtY2E2Y2M2MzQtYjViNWQxMjItOTY3ZDUzZjY=, TxId: 2024-11-21T08:55:46.877152Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:55:46.877533Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:55:46.879519Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2322:3110]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:46.879562Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:55:46.879567Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2322:3110], StatRequests.size() = 1 2024-11-21T08:55:46.902552Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MjIzMzQ1N2MtODI3ODNhZTItYzhmNTEyMWEtYjMwZTZiZmQ=, TxId: 2024-11-21T08:55:46.902574Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MjIzMzQ1N2MtODI3ODNhZTItYzhmNTEyMWEtYjMwZTZiZmQ=, TxId: 2024-11-21T08:55:46.902814Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:55:46.903172Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2024-11-21T08:55:46.905269Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2354:3126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:46.905308Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:55:46.905313Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2354:3126], StatRequests.size() = 1 2024-11-21T08:55:46.925481Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NzVlMmUyZjAtOTZkOGQxNDEtNTdhODc2MzItYWJiOTg5Nzc=, TxId: 01jd6yyhf117tdg3k1q8f9p3cc 2024-11-21T08:55:46.925523Z node 1 :STATISTICS WARN: [TQueryBase] Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=NzVlMmUyZjAtOTZkOGQxNDEtNTdhODc2MzItYWJiOTg5Nzc=, TxId: 01jd6yyhf117tdg3k1q8f9p3cc >> Cdc::DisableStream [GOOD] >> Cdc::InitialScan |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> IntermediateDirsReboots::CreateKesusWithIntermediateDirsForceDrop >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] Test command err: 2024-11-21T08:55:32.806997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653583242251004:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:32.807180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ec5/r3tmp/tmpADillT/pdisk_1.dat 2024-11-21T08:55:32.851488Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25488 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:32.876056Z node 1 :TX_PROXY DEBUG: actor# [1:7439653583242251225:2137] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:32.876080Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653583242251592:2388] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:32.876127Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653583242251323:2192], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:32.876141Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653583242251323:2192], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:32.876198Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:32.876549Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250897:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653583242251597:2389] 2024-11-21T08:55:32.876553Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250900:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653583242251598:2389] 2024-11-21T08:55:32.876573Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653583242250900:2054] Subscribe: subscriber# [1:7439653583242251598:2389], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:32.876573Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653583242250897:2051] Subscribe: subscriber# [1:7439653583242251597:2389], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:32.876584Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250903:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653583242251599:2389] 2024-11-21T08:55:32.876587Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653583242250903:2057] Subscribe: subscriber# [1:7439653583242251599:2389], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:32.876604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251597:2389][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653583242250897:2051] 2024-11-21T08:55:32.876612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251598:2389][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653583242250900:2054] 2024-11-21T08:55:32.876614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251599:2389][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653583242250903:2057] 2024-11-21T08:55:32.876618Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250897:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653583242251597:2389] 2024-11-21T08:55:32.876619Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653583242251594:2389] 2024-11-21T08:55:32.876624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653583242251595:2389] 2024-11-21T08:55:32.876630Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250900:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653583242251598:2389] 2024-11-21T08:55:32.876632Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250903:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653583242251599:2389] 2024-11-21T08:55:32.876633Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653583242251593:2389][/dc-1] Set up state: owner# [1:7439653583242251323:2192], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:32.876655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653583242251596:2389] 2024-11-21T08:55:32.876675Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653583242251593:2389][/dc-1] Path was already updated: owner# [1:7439653583242251323:2192], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:32.876684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251597:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653583242251594:2389], cookie# 1 2024-11-21T08:55:32.876688Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251598:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653583242251595:2389], cookie# 1 2024-11-21T08:55:32.876691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251599:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653583242251596:2389], cookie# 1 2024-11-21T08:55:32.876797Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250897:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653583242251597:2389], cookie# 1 2024-11-21T08:55:32.876806Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250900:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653583242251598:2389], cookie# 1 2024-11-21T08:55:32.876808Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653583242250903:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653583242251599:2389], cookie# 1 2024-11-21T08:55:32.876817Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251597:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653583242250897:2051], cookie# 1 2024-11-21T08:55:32.876819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251598:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653583242250900:2054], cookie# 1 2024-11-21T08:55:32.876820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653583242251599:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653583242250903:2057], cookie# 1 2024-11-21T08:55:32.876824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653583242251594:2389], cookie# 1 2024-11-21T08:55:32.876828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:32.876830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653583242251595:2389], cookie# 1 2024-11-21T08:55:32.876832Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:32.876835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653583242251596:2389], cookie# 1 2024-11-21T08:55:32.876837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653583242251593:2389][/dc-1] Unexpected sync response: sender# [1:7439653583242251596:2389], cookie# 1 2024-11-21T08:55:32.883466Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653583242251323:2192], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:32.883521Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653583242251323:2192], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... 0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.257056Z node 13 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [13:7439653644333456509:2102], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2024-11-21T08:55:47.257112Z node 13 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][13:7439653648628423890:2129][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:47.257212Z node 13 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][13:7439653648628423890:2129][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [13:7439653648628423891:2129] 2024-11-21T08:55:47.257228Z node 13 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][13:7439653648628423890:2129][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [13:7439653648628423892:2129] 2024-11-21T08:55:47.257241Z node 13 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][13:7439653648628423890:2129][/dc-1/USER_0/.metadata/initialization/migrations] Set up state: owner# [13:7439653644333456509:2102], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:47.257250Z node 13 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][13:7439653648628423890:2129][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [13:7439653648628423893:2129] 2024-11-21T08:55:47.257259Z node 13 :SCHEME_BOARD_SUBSCRIBER INFO: [main][13:7439653648628423890:2129][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [13:7439653644333456509:2102], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:47.257274Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [13:7439653644333456509:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 0 } 2024-11-21T08:55:47.257296Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [13:7439653644333456509:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [13:7439653648628423890:2129] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T08:55:47.257320Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7439653644333456509:2102], cacheItem# { Subscriber: { Subscriber: [13:7439653648628423890:2129] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:47.257345Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7439653648628423897:2130], recipient# [13:7439653648628423889:2278], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.257402Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:47.260074Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7439653645008725673:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.260122Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [14:7439653645008725673:2102], cacheItem# { Subscriber: { Subscriber: [14:7439653645008725778:2158] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:47.260152Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [14:7439653649303693254:2253], recipient# [14:7439653649303693253:2297], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.540668Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7439653645008725673:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.540709Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [14:7439653645008725673:2102], cacheItem# { Subscriber: { Subscriber: [14:7439653645008725835:2194] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:47.540733Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [14:7439653649303693256:2254], recipient# [14:7439653649303693255:2298], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.541158Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7439653644333456509:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.541191Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7439653644333456509:2102], cacheItem# { Subscriber: { Subscriber: [13:7439653644333456556:2114] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:47.541210Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7439653648628423899:2131], recipient# [13:7439653648628423898:2279], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.571893Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7439653643097896849:2104], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:47.571935Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7439653643097896849:2104], cacheItem# { Subscriber: { Subscriber: [11:7439653643097896884:2114] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:55:47.571961Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7439653647392864294:2177], recipient# [11:7439653647392864293:2281], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> StatisticsSaveLoad::ForbidAccess [GOOD] |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2024-11-21T08:55:44.087836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:55:44.087880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:44.087892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046ce/r3tmp/tmpeIkn4B/pdisk_1.dat 2024-11-21T08:55:44.164592Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18017, node 1 2024-11-21T08:55:44.254758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:44.254776Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:44.254779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:44.254840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:44.258857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:44.334122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:44.334159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:44.345479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6824 2024-11-21T08:55:44.742947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:45.520943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:45.520973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:45.553468Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:45.554138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:45.601252Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:45.609600Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:55:45.609618Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:55:45.614726Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:55:45.614846Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:55:45.614859Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:55:45.614862Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:55:45.614866Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:55:45.614870Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:55:45.614873Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:55:45.614877Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:55:45.614944Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:55:45.786512Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:55:45.786538Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:55:45.787289Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T08:55:45.788647Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T08:55:45.788945Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T08:55:45.789504Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:55:45.793336Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:55:45.793348Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:55:45.793356Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:55:45.794116Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:45.794158Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:45.795802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:55:45.796992Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:55:45.797014Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:55:45.799104Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:55:45.810660Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:45.832246Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:55:45.942714Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:55:46.128819Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:55:46.674927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2141:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.674955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.677549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:55:46.815439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2429:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.815469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.815887Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2434:3073]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:46.815927Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:55:46.815934Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2436:3075] 2024-11-21T08:55:46.815941Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2436:3075] 2024-11-21T08:55:46.816065Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2437:2943] 2024-11-21T08:55:46.816127Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2436:3075], server id = [2:2437:2943], tablet id = 72075186224037897, status = OK 2024-11-21T08:55:46.816155Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2437:2943], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:55:46.816168Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:55:46.816202Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:46.816223Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2434:3073], StatRequests.size() = 1 2024-11-21T08:55:46.817913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2441:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.817929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.817989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2446:3084], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:46.818987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:55:46.978983Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:55:46.979010Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:55:47.050891Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2436:3075], schemeshard count = 1 2024-11-21T08:55:47.295368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2448:3086], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:55:47.393937Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2589:3173]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:47.393974Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:55:47.393979Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2589:3173], StatRequests.size() = 1 2024-11-21T08:55:47.401093Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yyhby13t2722gqv507qx0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzNhZTY5YTctZDRhYmQ2YjMtY2I3YTMzZDctZmYwNGRiMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:47.441073Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2664:3204], for# user@builtin, access# DescribeSchema 2024-11-21T08:55:47.441091Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2664:3204], for# user@builtin, access# DescribeSchema 2024-11-21T08:55:47.442153Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:2654:3200], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:47.442393Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjJjZGNmNy1lNGNkOGU3My1lYzYyZmQ5Yi03NmM0N2IyNA==, ActorId: [1:2645:3192], ActorState: ExecuteState, TraceId: 01jd6yyhze3k7wwvb7nscp0gye, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight |89.8%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> TConsistentOpsWithReboots::Fake [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInSolomon |89.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::Fake [GOOD] >> Cdc::InitialScan [GOOD] >> Cdc::InitialScanDebezium |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.8%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T08:54:43.001147Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732179283001142 2024-11-21T08:54:43.103533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653374282781801:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.103789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:43.106007Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653375331543608:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:43.106250Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:54:43.126716Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:54:43.129865Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00468e/r3tmp/tmpSVXfK0/pdisk_1.dat 2024-11-21T08:54:43.152845Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10367, node 1 2024-11-21T08:54:43.162863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00468e/r3tmp/yandexbHYiA8.tmp 2024-11-21T08:54:43.162874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00468e/r3tmp/yandexbHYiA8.tmp 2024-11-21T08:54:43.166470Z INFO: TTestServer started on Port 13905 GrpcPort 10367 2024-11-21T08:54:43.169524Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00468e/r3tmp/yandexbHYiA8.tmp 2024-11-21T08:54:43.169590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13905 PQClient connected to localhost:10367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:54:43.203681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.203710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.205422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:43.232049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:43.232081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:43.232669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.233513Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:43.233781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:54:43.363696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374282782697:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.363730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.363774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653374282782709:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:43.364424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T08:54:43.371879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653374282782711:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T08:54:43.396588Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653375331543919:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:43.396717Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGY0MDk0MTUtYTRmYjE5ZjQtYTY1ZjYxMWMtNzE1ZjE2NmM=, ActorId: [2:7439653375331543879:2277], ActorState: ExecuteState, TraceId: 01jd6ywkdg397fe8yqvfake22b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:43.396736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.397389Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:43.443624Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653374282782875:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:54:43.443698Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjJiMmY4ZDEtYTM2MDNiMjgtM2JkZGI1NWYtYTlhM2RlNTI=, ActorId: [1:7439653374282782679:2299], ActorState: ExecuteState, TraceId: 01jd6ywkd2dtn1h9jvef3zg3g5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:54:43.443935Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:54:43.461716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T08:54:43.526609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:10367", true, true, 1000); 2024-11-21T08:54:43.606725Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd6ywkm4chcygcaebb3assc2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk3ZTBkYjQtZWViYjUxOGItNmViY2UxNWEtMWM1MWJmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653374282783181:2925] 2024-11-21T08:54:48.103801Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653374282781801:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.103869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:54:48.106344Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653375331543608:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:54:48.106372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:54:49.606795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720681:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:10367 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:54:49.620030Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:10367 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" ... sionId: ydb://session/3?node_id=13&id=M2E3YzNlMGUtZGYyMzBkMzMtNThlOWMyMTUtOWNlNjZiNGM=, ActorId: [13:7439653647873514547:2536], ActorState: ExecuteState, TraceId: 01jd6yyj308se2nr6v28absdth, Create QueryResponse for error on request, msg: 2024-11-21T08:55:47.734916Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yyj3pehweks105tf3jam6" } } YdbStatus: UNAVAILABLE ConsumedRu: 14 } 2024-11-21T08:55:48.377339Z :INFO: [/Root] MessageGroupId [test-message-group-id] Running cds request ms 2024-11-21T08:55:48.378456Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Got CDS response: write_sessions_clusters { clusters { endpoint: "localhost:14584" name: "dc1" available: true } primary_cluster_selection_reason: CLIENT_LOCATION } version: 1 2024-11-21T08:55:48.378467Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Start write session. Will connect to endpoint: localhost:14584 2024-11-21T08:55:48.378853Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T08:55:48.378911Z node 13 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:55:48.378944Z node 13 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T08:55:48.379115Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T08:55:48.379138Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:33930 2024-11-21T08:55:48.379142Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:33930 proto=v1 topic=test-topic durationSec=0 2024-11-21T08:55:48.379146Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:55:48.379630Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T08:55:48.379679Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T08:55:48.379687Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:55:48.379688Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T08:55:48.379694Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653652168481954:2546] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T08:55:48.380156Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439653652168481954:2546] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T08:55:48.536846Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715702. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T08:55:48.536898Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439653652168481967:2548] TxId: 281474976715702. Ctx: { TraceId: 01jd6yyjww7nawns8ef9hvb16a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NmNlNjFiYzktZjFhNGI3MmItZjNmZThjMTAtOWIyOWNlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T08:55:48.537004Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NmNlNjFiYzktZjFhNGI3MmItZjNmZThjMTAtOWIyOWNlY2Y=, ActorId: [13:7439653652168481955:2548], ActorState: ExecuteState, TraceId: 01jd6yyjww7nawns8ef9hvb16a, Create QueryResponse for error on request, msg: 2024-11-21T08:55:48.537272Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7439653652168481954:2546] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NmNlNjFiYzktZjFhNGI3MmItZjNmZThjMTAtOWIyOWNlY2Y=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yyjww7nawns8efaqvt8ft" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T08:55:48.537343Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NmNlNjFiYzktZjFhNGI3MmItZjNmZThjMTAtOWIyOWNlY2Y=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yyjww7nawns8efaqvt8ft" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2024-11-21T08:55:48.537641Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2024-11-21T08:55:48.537827Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NmNlNjFiYzktZjFhNGI3MmItZjNmZThjMTAtOWIyOWNlY2Y=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd6yyjww7nawns8efaqvt8ft" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T08:55:48.537838Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Write session will restart in 2.000000s 2024-11-21T08:55:48.537861Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Write session: Do CDS request 2024-11-21T08:55:48.537867Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Do schedule cds request after 2000 ms 2024-11-21T08:55:48.917427Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720685. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T08:55:48.917496Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7439653651488979811:2467] TxId: 281474976720685. Ctx: { TraceId: 01jd6yyk7x8frk3qetkty0p71t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=OTMxM2NjZWQtZTI4ZTljMDAtNThjOGNkNjUtZGQ5YTg2MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T08:55:48.917654Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=OTMxM2NjZWQtZTI4ZTljMDAtNThjOGNkNjUtZGQ5YTg2MWI=, ActorId: [14:7439653651488979798:2467], ActorState: ExecuteState, TraceId: 01jd6yyk7x8frk3qetkty0p71t, Create QueryResponse for error on request, msg: 2024-11-21T08:55:48.918026Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd6yyk8na2cwakaf1512fpvd" } } YdbStatus: UNAVAILABLE ConsumedRu: 15 } 2024-11-21T08:55:48.918781Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715704. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T08:55:48.918820Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439653652168482026:2550] TxId: 281474976715704. Ctx: { TraceId: 01jd6yyk7z1vrq7a8xc2g8asdg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTAyYjhkN2QtOGQxNjE4MWQtOTFjNDM4ZWUtODMyNTkzMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T08:55:48.918912Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NTAyYjhkN2QtOGQxNjE4MWQtOTFjNDM4ZWUtODMyNTkzMDg=, ActorId: [13:7439653652168482013:2550], ActorState: ExecuteState, TraceId: 01jd6yyk7z1vrq7a8xc2g8asdg, Create QueryResponse for error on request, msg: 2024-11-21T08:55:48.919256Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd6yyk8rft273bpgyqkvq2wm" } } YdbStatus: UNAVAILABLE ConsumedRu: 16 } 2024-11-21T08:55:49.378124Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Write session: close. Timeout = 0 ms 2024-11-21T08:55:49.378156Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Write session will now close 2024-11-21T08:55:49.378168Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Write session: aborting 2024-11-21T08:55:49.378323Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T08:55:49.378328Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7a3ec134-ca41b99b-11860257-cfc772c1_0] Write session: destroy 2024-11-21T08:55:49.528636Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715706. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:55:49.528688Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439653656463449425:2559] TxId: 281474976715706. Ctx: { TraceId: 01jd6yykvs1qee6fxmztt80a1m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGI4ODlkMTktMTZhYzlhZjAtMWY3YzFkZTMtYTRhOGMxYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T08:55:49.528808Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZGI4ODlkMTktMTZhYzlhZjAtMWY3YzFkZTMtYTRhOGMxYjM=, ActorId: [13:7439653656463449422:2559], ActorState: ExecuteState, TraceId: 01jd6yykvs1qee6fxmztt80a1m, Create QueryResponse for error on request, msg: 2024-11-21T08:55:49.529164Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd6yykvs1qee6fxmzvptvxxz" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropIndexedTableWithReboots |89.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |89.9%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateDirWithIntermediateDirs |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> IntermediateDirsReboots::CreateWithIntermediateDirs >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |89.9%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T08:55:34.945223Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:34.945245Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:34.948886Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:34.950951Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T08:55:34.951208Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T08:55:34.951620Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T08:55:34.951930Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T08:55:34.952188Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:34.954069Z node 1 :PERSQUEUE INFO: new Cookie default|35a85c0c-cbff2866-2369f7fd-edf6d16_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.006825Z node 1 :PERSQUEUE INFO: new Cookie default|28076455-3534e264-d667a32f-7fb7eda2_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.078133Z node 1 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.085815Z node 1 :PERSQUEUE INFO: new Cookie default|201173c9-8802bc02-4c03cd85-95714837_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.149609Z node 1 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:286:2057] recipient: [1:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:289:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:290:2057] recipient: [1:288:2284] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:291:2285] sender: [1:292:2057] recipient: [1:288:2284] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.156503Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:35.156521Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:55:35.156592Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:342:2328] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:55:35.156935Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:343:2329] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:55:35.159819Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:342:2328] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:55:35.160334Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:343:2329] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:291:2285] sender: [1:373:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T08:55:35.380111Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:35.380134Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.383819Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:35.384017Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T08:55:35.384131Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T08:55:35.384729Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T08:55:35.385058Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T08:55:35.385470Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.386898Z node 2 :PERSQUEUE INFO: new Cookie default|e1dd026a-d2ce60b9-e46e2701-808ca96a_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.433994Z node 2 :PERSQUEUE INFO: new Cookie default|47d50f43-f9459943-90eca80a-67998a64_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.494705Z node 2 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.501560Z node 2 :PERSQUEUE INFO: new Cookie default|3465d193-8f66fac3-3ab917fc-278f0606_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::T ... up to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.110672Z node 54 :PERSQUEUE INFO: new Cookie default|b5763bf8-60e7ca48-8daed7d7-664acb02_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.122172Z node 54 :PERSQUEUE INFO: new Cookie default|c14dbe27-f76bbd42-672719cd-202e0bc0_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.158910Z node 54 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.168061Z node 54 :PERSQUEUE INFO: new Cookie default|7cae19f9-9eaa20ba-a7d3d362-aeee0763_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.208978Z node 54 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:286:2057] recipient: [54:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:289:2057] recipient: [54:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:290:2057] recipient: [54:288:2284] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:291:2285] sender: [54:292:2057] recipient: [54:288:2284] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.218349Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:51.218374Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:55:51.218508Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:342:2328] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:55:51.219214Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:343:2329] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:55:51.223230Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:342:2328] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:55:51.223972Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:343:2329] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:291:2285] sender: [54:375:2057] recipient: [54:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:106:2057] recipient: [55:99:2133] 2024-11-21T08:55:51.343854Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:51.343878Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:147:2057] recipient: [55:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:147:2057] recipient: [55:145:2168] Leader for TabletID 72057594037927938 is [55:151:2172] sender: [55:152:2057] recipient: [55:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:177:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.347173Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:51.347338Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2024-11-21T08:55:51.347425Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:184:2197] 2024-11-21T08:55:51.347844Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:55:51.348104Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:185:2198] 2024-11-21T08:55:51.348410Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.349423Z node 55 :PERSQUEUE INFO: new Cookie default|3acf7b6c-35fbfc45-938c676-f0cde57f_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.361271Z node 55 :PERSQUEUE INFO: new Cookie default|1f562d8d-e6613fa4-243e786c-4913a3bf_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.394936Z node 55 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.403055Z node 55 :PERSQUEUE INFO: new Cookie default|9c726775-b8af7d5e-f07465b-20b1f06e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.445182Z node 55 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:286:2057] recipient: [55:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:289:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:290:2057] recipient: [55:288:2284] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:291:2285] sender: [55:292:2057] recipient: [55:288:2284] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:51.454476Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:51.454500Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:55:51.454666Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:342:2328] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:55:51.455425Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:343:2329] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:55:51.459608Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:342:2328] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:55:51.460427Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:343:2329] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:291:2285] sender: [55:373:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >> TConsistentOpsWithReboots::CopyWithData |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |89.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanUpdatedRows |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> TSolomonReboots::CreateAlterSolomonWithReboots >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> Cdc::Alter [GOOD] >> Cdc::AddColumn >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable >> IntermediateDirsReboots::CreateTableWithIntermediateDirs |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |89.9%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] >> ReadIteratorExternalBlobs::NotExtBlobs >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::InitialScanAndLimits |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |89.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop [GOOD] |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:48.379740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:48.379757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:48.379761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:48.379764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:48.379769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:48.379771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:48.379777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:48.379833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:48.389003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:48.389020Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:48.390695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:48.390761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:48.390790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:48.392448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:48.392520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:48.392604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:48.392773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:48.393365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:48.393611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:48.393621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:48.393634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:48.393640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:48.393645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:48.393685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:48.394688Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:48.405418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:48.405477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.405519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:48.405562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:48.405567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.406033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:48.406051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:48.406089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.406096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:48.406098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:48.406102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:48.406331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.406339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:48.406342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:48.406522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.406527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.406531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:48.406535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:48.406859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:48.407103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:48.407135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:48.407272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:48.407286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:48.407290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:48.407331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:48.407335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:48.407353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:48.407361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:48.407592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:48.407597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:48.407622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:48.407625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:48.407683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.407687Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:48.407697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:48.407700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:48.407703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:48.407706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:48.407709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:48.407711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:48.407717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:48.407721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:48.407724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... mentPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:57.858070Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:55:57.858112Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:55:57.858133Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:55:57.858414Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:55:57.858426Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:55:57.858433Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.858439Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:55:57.858442Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [40:300:2292] 2024-11-21T08:55:57.858674Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:55:57.858709Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:57.858712Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:57.858729Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T08:55:57.858739Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:55:57.858744Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:55:57.858752Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:57.858755Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T08:55:57.858757Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T08:55:57.858759Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:55:57.858761Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T08:55:57.858790Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:55:57.858794Z node 40 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T08:55:57.858806Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:55:57.858808Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:55:57.858811Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:55:57.858814Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:55:57.858818Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:55:57.858821Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:55:57.858829Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:55:57.858832Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T08:55:57.858834Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T08:55:57.858836Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:55:57.858838Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:55:57.858839Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T08:55:57.858882Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.858888Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.858890Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:55:57.858893Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:55:57.858895Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:55:57.858965Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.858970Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.858972Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:55:57.858974Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:55:57.858976Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:55:57.859041Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.859046Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.859048Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:55:57.859050Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:55:57.859055Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:55:57.859233Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.859240Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.859242Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:55:57.859244Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:55:57.859246Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:55:57.859251Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:55:57.859254Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [40:299:2291] 2024-11-21T08:55:57.859391Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.859567Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.859591Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.859816Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:55:57.859839Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:55:57.859843Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [40:300:2292] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:55:57.859927Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:55:57.859951Z node 40 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 42us result status StatusPathDoesNotExist 2024-11-21T08:55:57.859977Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 1003, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 3 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BasicStatistics::TwoNodes [GOOD] >> Cdc::RenameTable [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2024-11-21T08:53:15.587149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:15.587216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:15.587230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0038ff/r3tmp/tmpLpimXG/pdisk_1.dat 2024-11-21T08:53:15.670654Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27111, node 1 2024-11-21T08:53:15.761045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:15.761064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:15.761067Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:15.761101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:15.765304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:15.840342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:15.840370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:15.851704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26720 2024-11-21T08:53:16.260970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:17.250140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:17.250167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:17.250249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:17.250262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:17.283414Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:17.283503Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T08:53:17.284267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:17.284576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:17.330934Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:17.331362Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:17.331385Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:17.336320Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:17.336382Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:17.336396Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:17.336400Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:17.336404Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:17.336408Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:17.336411Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:17.336416Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:17.336573Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:17.515991Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2142:2417] 2024-11-21T08:53:17.516580Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:53:17.518296Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:17.518307Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:17.518314Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:53:17.519078Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:17.519091Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2186:2429], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:17.522257Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2254:2434] 2024-11-21T08:53:17.522315Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2254:2434], schemeshard id = 72075186224037889 2024-11-21T08:53:17.525983Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:17.526016Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:17.526212Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:17.526224Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:17.526775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:17.528006Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976725657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:17.528028Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976725657 2024-11-21T08:53:17.530499Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:17.542518Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:17.542891Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:17.543324Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:17.565826Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:17.696240Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976725657. Doublechecking... 2024-11-21T08:53:17.843397Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:18.445400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2568:3039], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.445459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.448812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:53:18.559140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2728:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.559199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.559867Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2733:3080]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:53:18.559910Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:53:18.559920Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2735:3082] 2024-11-21T08:53:18.559930Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2735:3082] 2024-11-21T08:53:18.560091Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2736:2568] 2024-11-21T08:53:18.560151Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2735:3082], server id = [2:2736:2568], tablet id = 72075186224037897, status = OK 2024-11-21T08:53:18.560193Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2736:2568], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:53:18.560239Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:53:18.560296Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:53:18.560318Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2733:3080], StatRequests.size() = 1 2024-11-21T08:53:18.562674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2740:3086], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.562696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.562766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2745:3091], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:18.564055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:53:18.728152Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:53:18.728183Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:53:18.884904Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2735:3082], schemeshard count = 1 2024-11-21T08:53:19.099455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2747:3093], DatabaseId: /Root, PoolId: d ... StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:47.869126Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T08:55:47.869131Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7330:3765], StatRequests.size() = 1 2024-11-21T08:55:48.858375Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T08:55:48.858404Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:55:48.858410Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:55:48.858414Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T08:55:49.706453Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7377:3776]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:49.706555Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T08:55:49.706560Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7377:3776], StatRequests.size() = 1 2024-11-21T08:55:50.299806Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:55:50.299936Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:50.300006Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:55:50.467730Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T08:55:50.467755Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 211.000000s, at schemeshard: 72075186224037889 2024-11-21T08:55:50.467848Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 2024-11-21T08:55:50.479347Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:55:51.395520Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7414:3788]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:51.395637Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T08:55:51.395644Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7414:3788], StatRequests.size() = 1 2024-11-21T08:55:51.988914Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:55:51.988948Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:55:51.988960Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T08:55:51.988966Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:55:51.989140Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:55:51.992904Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:55:51.994366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439:3801], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:51.994397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7450:3806], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:51.994493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:51.997833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725658:2, at schemeshard: 72075186224037889 2024-11-21T08:55:52.009207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7453:3809], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725658 completed, doublechecking } 2024-11-21T08:55:52.236161Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7570:3867]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:52.236251Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T08:55:52.236259Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7570:3867], StatRequests.size() = 1 2024-11-21T08:55:52.253305Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWU2YjY5MzMtZGE3ZjI4M2UtMzUyYWYwMTYtZjcwZjdiZjA=, TxId: 2024-11-21T08:55:52.253334Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWU2YjY5MzMtZGE3ZjI4M2UtMzUyYWYwMTYtZjcwZjdiZjA=, TxId: 2024-11-21T08:55:52.253496Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:55:52.275563Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:55:52.275586Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:55:52.859791Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7614:3889]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:52.859883Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T08:55:52.859887Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7614:3889], StatRequests.size() = 1 2024-11-21T08:55:54.022735Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7659:3905]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:54.022830Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T08:55:54.022835Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7659:3905], StatRequests.size() = 1 2024-11-21T08:55:54.408599Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:54.439909Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:55:54.439939Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:55:54.439948Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T08:55:54.439952Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:55:54.440048Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:55:54.440915Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:55:54.444651Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTk1ZjA3MWMtNzMxMjk4NC0yMjIwYzA2NS1jMmEyMTljYg==, TxId: 2024-11-21T08:55:54.444672Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTk1ZjA3MWMtNzMxMjk4NC0yMjIwYzA2NS1jMmEyMTljYg==, TxId: 2024-11-21T08:55:54.444791Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:55:54.467592Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:55:54.467619Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:55:55.247834Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7733:3943]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:55.247954Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T08:55:55.247960Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7733:3943], StatRequests.size() = 1 2024-11-21T08:55:56.521113Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7778:3959]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:56.521218Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2024-11-21T08:55:56.521226Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7778:3959], StatRequests.size() = 1 2024-11-21T08:55:56.912820Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:55:56.912907Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:55:56.913049Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:56.944406Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:55:56.944439Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:55:57.708901Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7817:3969]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:57.709024Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2024-11-21T08:55:57.709034Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7817:3969], StatRequests.size() = 1 2024-11-21T08:55:57.709196Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7819:3774]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:55:57.710169Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:55:57.710194Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7829:3778] 2024-11-21T08:55:57.710206Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7829:3778] 2024-11-21T08:55:57.711059Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7837:3971] 2024-11-21T08:55:57.711166Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7829:3778], server id = [2:7837:3971], tablet id = 72075186224037897, status = OK 2024-11-21T08:55:57.711319Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7837:3971], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:55:57.711333Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2024-11-21T08:55:57.711416Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T08:55:57.711434Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7819:3774], StatRequests.size() = 1 >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |89.9%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> Cdc::AddIndex [GOOD] >> Cdc::AddStream >> IntermediateDirsReboots::CreateKesusWithIntermediateDirsForceDrop [GOOD] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateKesusWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:48.325273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:48.325290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:48.325294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:48.325297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:48.325301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:48.325304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:48.325309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:48.325362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:48.332245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:48.332264Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:48.334213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:48.334299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:48.334335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:48.336061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:48.336114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:48.336169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:48.336347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:48.336806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:48.336976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:48.336982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:48.336990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:48.336994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:48.336998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:48.337030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:48.338077Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:48.354800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:48.354874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.354938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:48.355006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:48.355012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.355733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:48.355748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:48.355794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.355801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:48.355804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:48.355808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:48.356356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.356370Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:48.356373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:48.356625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.356630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.356634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:48.356638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:48.357032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:48.357300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:48.357338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:48.357502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:48.357518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:48.357523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:48.357575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:48.357579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:48.357600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:48.357609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:48.357888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:48.357893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:48.357927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:48.357930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:48.357994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:48.357998Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:48.358006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:48.358009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:48.358012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:48.358015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:48.358018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:48.358020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:48.358027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:48.358031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:48.358033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.141982Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:00.141985Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T08:56:00.141990Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T08:56:00.141993Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:56:00.141997Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T08:56:00.142048Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:56:00.142055Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T08:56:00.142066Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:56:00.142072Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:56:00.142077Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:56:00.142080Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:56:00.142084Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:56:00.142087Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:56:00.142109Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:56:00.142114Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T08:56:00.142117Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T08:56:00.142121Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:56:00.142124Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:56:00.142127Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T08:56:00.142203Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.142213Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.142217Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:00.142221Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:56:00.142225Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:56:00.142357Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.142368Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.142372Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:00.142375Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:56:00.142379Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:00.142640Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.142653Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.142661Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:00.142664Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:56:00.142668Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:00.142741Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.142750Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.142753Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:00.142756Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:56:00.142759Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:00.142767Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:56:00.142771Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [49:301:2293] 2024-11-21T08:56:00.142950Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 Leader for TabletID 72057594037968897 is [49:213:2213] sender: [49:339:2058] recipient: [49:15:2062] 2024-11-21T08:56:00.143291Z node 49 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T08:56:00.189510Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:00.189623Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:56:00.189788Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:00.189795Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:56:00.189811Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:56:00.189817Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:56:00.189824Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:56:00.189829Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:56:00.189835Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:00.189959Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.190080Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.190680Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.190722Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:00.190738Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:56:00.190743Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [49:302:2294] 2024-11-21T08:56:00.191355Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:56:00.191404Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:56:00.191506Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:00.191550Z node 49 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 50us result status StatusPathDoesNotExist 2024-11-21T08:56:00.191590Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |89.9%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift |89.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> TS3WrapperTests::GetUnknownObject |89.9%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> TS3WrapperTests::PutObject |90.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] >> IntermediateDirsReboots::CreateDirWithIntermediateDirs [GOOD] >> TS3WrapperTests::PutObject [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2024-11-21T08:56:02.000422Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0006C976-C01C-4B74-9596-DC32D0D43716, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:8391 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 83CF8D81-3184-4B08-A089-379997019852 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T08:56:02.001739Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0006C976-C01C-4B74-9596-DC32D0D43716, response# No response body. >> TGroupMapperTest::MonteCarlo [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2024-11-21T08:56:02.115418Z node 1 :S3_WRAPPER NOTICE: Request: uuid# F70FBB2E-6E06-4557-9DAA-2E9206C8418E, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:4143 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8A0E6549-8F19-488F-99E9-AC4216285E75 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T08:56:02.116600Z node 1 :S3_WRAPPER NOTICE: Response: uuid# F70FBB2E-6E06-4557-9DAA-2E9206C8418E, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } >> Cdc::AddStream [GOOD] >> Cdc::AwsRegion ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateDirWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:51.343931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:51.343948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:51.343952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:51.343955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:51.343960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:51.343962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:51.343968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:51.344046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:51.351206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:51.351221Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:51.352757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:51.352822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:51.352854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:51.355310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:51.355392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:51.355486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:51.355657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:51.356161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:51.356392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:51.356401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:51.356409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:51.356414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:51.356418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:51.356455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:51.357449Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:51.369184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:51.369252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.369300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:51.369356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:51.369365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.369870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:51.369887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:51.369921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.369928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:51.369931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:51.369934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:51.370183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.370190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:51.370192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:51.370422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.370429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.370433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:51.370437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:51.370949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:51.371243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:51.371280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:51.371424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:51.371446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:51.371452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:51.371504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:51.371509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:51.371531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:51.371539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:51.371834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:51.371840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:51.371870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:51.371875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:51.371936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.371941Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:51.371948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:51.371950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:51.371954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:51.371956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:51.371959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:51.371962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:51.371968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:51.371972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:51.371975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 44 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:56:02.004062Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:56:02.004065Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:56:02.004067Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:56:02.004069Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:02.004071Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:56:02.004073Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:56:02.004075Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:56:02.004077Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:56:02.004079Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:56:02.004081Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:56:02.004084Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T08:56:02.004087Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:56:02.004089Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T08:56:02.004091Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T08:56:02.004092Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T08:56:02.004094Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2024-11-21T08:56:02.004433Z node 44 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.004445Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.004448Z node 44 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.004451Z node 44 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:56:02.004456Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:02.004558Z node 44 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.004565Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.004568Z node 44 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.004570Z node 44 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:56:02.004572Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:02.004666Z node 44 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.004672Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.004675Z node 44 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.004677Z node 44 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:56:02.004679Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:02.004936Z node 44 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.004951Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.004954Z node 44 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.004957Z node 44 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T08:56:02.004960Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:56:02.005047Z node 44 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.005055Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.005059Z node 44 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.005062Z node 44 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T08:56:02.005068Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:56:02.005076Z node 44 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T08:56:02.005731Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.005755Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.005784Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.005793Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.006092Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:56:02.006145Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:56:02.006151Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:56:02.006212Z node 44 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:56:02.006229Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:56:02.006234Z node 44 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [44:342:2334] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:56:02.006293Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:02.006319Z node 44 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 36us result status StatusSuccess 2024-11-21T08:56:02.006383Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:02.006425Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:02.039336Z node 44 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 32.9ms result status StatusPathDoesNotExist 2024-11-21T08:56:02.039398Z node 44 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> IntermediateDirsReboots::CreateWithIntermediateDirs [GOOD] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:51.531765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:51.531780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:51.531783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:51.531786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:51.531790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:51.531792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:51.531798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:51.531848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:51.538749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:51.538760Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:51.540177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:51.540286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:51.540318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:51.542598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:51.542670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:51.542726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:51.542857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:51.543432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:51.543628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:51.543637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:51.543647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:51.543653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:51.543659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:51.543692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:51.544604Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:51.557078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:51.557140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.557199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:51.557246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:51.557251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.557817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:51.557837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:51.557881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.557888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:51.557890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:51.557893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:51.558180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.558188Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:51.558191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:51.558403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.558410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.558414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:51.558419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:51.558812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:51.559099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:51.559139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:51.559297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:51.559317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:51.559322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:51.559363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:51.559367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:51.559390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:51.559398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:51.559668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:51.559675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:51.559704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:51.559708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:51.559774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:51.559778Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:51.559786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:51.559788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:51.559792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:51.559795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:51.559799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:51.559801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:51.559808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:51.559812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:51.559814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:56:02.614910Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:56:02.614913Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:56:02.614918Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:02.614921Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:56:02.614924Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:56:02.614929Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:56:02.614932Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:56:02.614935Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:56:02.614939Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:56:02.614944Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T08:56:02.614948Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:56:02.614953Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T08:56:02.614957Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T08:56:02.614960Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T08:56:02.614963Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 2 2024-11-21T08:56:02.615666Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.615685Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.615690Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.615694Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:56:02.615700Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:02.615846Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.615856Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.615861Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.615865Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:56:02.615869Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:02.615997Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.616007Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.616011Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.616014Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:56:02.616017Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:02.616582Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.616607Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.616616Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.616622Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T08:56:02.616626Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:56:02.616752Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.616766Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.616769Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:02.616773Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 2 2024-11-21T08:56:02.616777Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:56:02.616787Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T08:56:02.617803Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.618037Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.618066Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.618101Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:02.618167Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:56:02.618235Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:56:02.618243Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:56:02.618321Z node 46 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:56:02.618341Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:56:02.618346Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [46:351:2343] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:56:02.618512Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:02.618569Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 69us result status StatusSuccess 2024-11-21T08:56:02.618661Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "z" PathId: 6 PartitionsCount: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:02.656475Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:02.656545Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 87us result status StatusPathDoesNotExist 2024-11-21T08:56:02.656590Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::ResolvedTimestamps >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry |90.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |90.0%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |90.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |90.0%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 18122728117732569627 Reassign# 5 -- VSlotId { NodeId: 6 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 5 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 6 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green } Status: "READY" Ready: true Put# [1:1:1:0:0:22:0] Put# [1:1:2:0:0:59:0] 2024-11-21T08:53:01.183714Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T08:53:01.183976Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1341265745400324471] 2024-11-21T08:53:01.184662Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: THullOsirisActor: RESURRECT: id# [1:1:1:0:0:22:1] 2024-11-21T08:53:01.184674Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: THullOsirisActor: RESURRECT: id# [1:1:2:0:0:59:2] 2024-11-21T08:53:01.184699Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: THullOsirisActor: FINISH: BlobsResurrected# 2 PartsResurrected# 2 Put# [1:1:3:0:0:100:0] Put# [1:1:4:0:0:6:0] Put# [1:1:5:0:0:38:0] Put# [1:1:6:0:0:42:0] Put# [1:1:7:0:0:38:0] Put# [1:1:8:0:0:99:0] Put# [1:1:9:0:0:4:0] Put# [1:1:10:0:0:28:0] Put# [1:1:11:0:0:95:0] Put# [1:1:12:0:0:47:0] Put# [1:1:13:0:0:61:0] Put# [1:1:14:0:0:23:0] Put# [1:1:15:0:0:32:0] Put# [1:1:16:0:0:33:0] Put# [1:1:17:0:0:96:0] Put# [1:1:18:0:0:51:0] Put# [1:1:19:0:0:20:0] Put# [1:1:20:0:0:92:0] Put# [1:1:21:0:0:76:0] Put# [1:1:22:0:0:7:0] Put# [1:1:23:0:0:83:0] Put# [1:1:24:0:0:1:0] Put# [1:1:25:0:0:43:0] Put# [1:1:26:0:0:29:0] Put# [1:1:27:0:0:37:0] Put# [1:1:28:0:0:23:0] Put# [1:1:29:0:0:60:0] Put# [1:1:30:0:0:71:0] Put# [1:1:31:0:0:75:0] Put# [1:1:32:0:0:38:0] Put# [1:1:33:0:0:36:0] Put# [1:1:34:0:0:8:0] Put# [1:1:35:0:0:93:0] Put# [1:1:36:0:0:89:0] Put# [1:1:37:0:0:41:0] Put# [1:1:38:0:0:47:0] Put# [1:1:39:0:0:7:0] Put# [1:1:40:0:0:37:0] Put# [1:1:41:0:0:57:0] Put# [1:1:42:0:0:99:0] Put# [1:1:43:0:0:95:0] Put# [1:1:44:0:0:75:0] Put# [1:1:45:0:0:34:0] Put# [1:1:46:0:0:17:0] Put# [1:1:47:0:0:41:0] Put# [1:1:48:0:0:1:0] Put# [1:1:49:0:0:45:0] Put# [1:1:50:0:0:44:0] Put# [1:1:51:0:0:12:0] Put# [1:1:52:0:0:54:0] Put# [1:1:53:0:0:21:0] Put# [1:1:54:0:0:74:0] Put# [1:1:55:0:0:75:0] Put# [1:1:56:0:0:66:0] Put# [1:1:57:0:0:14:0] Put# [1:1:58:0:0:3:0] Put# [1:1:59:0:0:38:0] Put# [1:1:60:0:0:19:0] Put# [1:1:61:0:0:35:0] Put# [1:1:62:0:0:84:0] Put# [1:1:63:0:0:74:0] Put# [1:1:64:0:0:73:0] Put# [1:1:65:0:0:84:0] Put# [1:1:66:0:0:81:0] Put# [1:1:67:0:0:81:0] Put# [1:1:68:0:0:99:0] Put# [1:1:69:0:0:74:0] Put# [1:1:70:0:0:24:0] Put# [1:1:71:0:0:90:0] Put# [1:1:72:0:0:72:0] Put# [1:1:73:0:0:9:0] Put# [1:1:74:0:0:1:0] Put# [1:1:75:0:0:13:0] Put# [1:1:76:0:0:62:0] Put# [1:1:77:0:0:70:0] Put# [1:1:78:0:0:14:0] Put# [1:1:79:0:0:48:0] Put# [1:1:80:0:0:2:0] Put# [1:1:81:0:0:2:0] Put# [1:1:82:0:0:16:0] Put# [1:1:83:0:0:41:0] Put# [1:1:84:0:0:80:0] Put# [1:1:85:0:0:6:0] Put# [1:1:86:0:0:79:0] Put# [1:1:87:0:0:18:0] Put# [1:1:88:0:0:29:0] Put# [1:1:89:0:0:83:0] Put# [1:1:90:0:0:2:0] Put# [1:1:91:0:0:14:0] Put# [1:1:92:0:0:9:0] Put# [1:1:93:0:0:82:0] Put# [1:1:94:0:0:45:0] Put# [1:1:95:0:0:78:0] Put# [1:1:96:0:0:6:0] Put# [1:1:97:0:0:96:0] Put# [1:1:98:0:0:18:0] Put# [1:1:99:0:0:16:0] Put# [1:1:100:0:0:46:0] Put# [1:1:101:0:0:32:0] Put# [1:1:102:0:0:92:0] Put# [1:1:103:0:0:73:0] Put# [1:1:104:0:0:72:0] Put# [1:1:105:0:0:22:0] Put# [1:1:106:0:0:46:0] Put# [1:1:107:0:0:69:0] Put# [1:1:108:0:0:52:0] Put# [1:1:109:0:0:86:0] Put# [1:1:110:0:0:41:0] Put# [1:1:111:0:0:49:0] Put# [1:1:112:0:0:98:0] Put# [1:1:113:0:0:73:0] Put# [1:1:114:0:0:62:0] Put# [1:1:115:0:0:53:0] Put# [1:1:116:0:0:51:0] Put# [1:1:117:0:0:39:0] Put# [1:1:118:0:0:59:0] Put# [1:1:119:0:0:91:0] Put# [1:1:120:0:0:69:0] Put# [1:1:121:0:0:20:0] Put# [1:1:122:0:0:29:0] Put# [1:1:123:0:0:75:0] Put# [1:1:124:0:0:34:0] Put# [1:1:125:0:0:98:0] Put# [1:1:126:0:0:92:0] Put# [1:1:127:0:0:61:0] Put# [1:1:128:0:0:61:0] Put# [1:1:129:0:0:78:0] Put# [1:1:130:0:0:26:0] Put# [1:1:131:0:0:20:0] Put# [1:1:132:0:0:15:0] Put# [1:1:133:0:0:79:0] Put# [1:1:134:0:0:38:0] Put# [1:1:135:0:0:2:0] Put# [1:1:136:0:0:55:0] Put# [1:1:137:0:0:99:0] Put# [1:1:138:0:0:75:0] Put# [1:1:139:0:0:30:0] Put# [1:1:140:0:0:98:0] Put# [1:1:141:0:0:36:0] Put# [1:1:142:0:0:54:0] Put# [1:1:143:0:0:45:0] Put# [1:1:144:0:0:21:0] Put# [1:1:145:0:0:50:0] Put# [1:1:146:0:0:42:0] Put# [1:1:147:0:0:77:0] Put# [1:1:148:0:0:32:0] Put# [1:1:149:0:0:63:0] Put# [1:1:150:0:0:65:0] Put# [1:1:151:0:0:79:0] Put# [1:1:152:0:0:32:0] Put# [1:1:153:0:0:12:0] Put# [1:1:154:0:0:94:0] Put# [1:1:155:0:0:53:0] Put# [1:1:156:0:0:87:0] Put# [1:1:157:0:0:36:0] Put# [1:1:158:0:0:20:0] Put# [1:1:159:0:0:5:0] Put# [1:1:160:0:0:83:0] Put# [1:1:161:0:0:35:0] Put# [1:1:162:0:0:31:0] Put# [1:1:163:0:0:46:0] Put# [1:1:164:0:0:77:0] Put# [1:1:165:0:0:60:0] Put# [1:1:166:0:0:61:0] Put# [1:1:167:0:0:68:0] Put# [1:1:168:0:0:90:0] Put# [1:1:169:0:0:27:0] Put# [1:1:170:0:0:76:0] Put# [1:1:171:0:0:96:0] Put# [1:1:172:0:0:15:0] Put# [1:1:173:0:0:74:0] Put# [1:1:174:0:0:61:0] Put# [1:1:175:0:0:92:0] Put# [1:1:176:0:0:76:0] Put# [1:1:177:0:0:27:0] Put# [1:1:178:0:0:87:0] Put# [1:1:179:0:0:8:0] Put# [1:1:180:0:0:41:0] Put# [1:1:181:0:0:40:0] Put# [1:1:182:0:0:16:0] Put# [1:1:183:0:0:64:0] Put# [1:1:184:0:0:98:0] Put# [1:1:185:0:0:32:0] Put# [1:1:186:0:0:85:0] Put# [1:1:187:0:0:100:0] Put# [1:1:188:0:0:42:0] Put# [1:1:189:0:0:19:0] Put# [1:1:190:0:0:17:0] Put# [1:1:191:0:0:37:0] Put# [1:1:192:0:0:33:0] Put# [1:1:193:0:0:66:0] Put# [1:1:194:0:0:90:0] Put# [1:1:195:0:0:10:0] Put# [1:1:196:0:0:99:0] Put# [1:1:197:0:0:44:0] Put# [1:1:198:0:0:99:0] Put# [1:1:199:0:0:76:0] Put# [1:1:200:0:0:10:0] Put# [1:1:201:0:0:73:0] Put# [1:1:202:0:0:81:0] Put# [1:1:203:0:0:31:0] Put# [1:1:204:0:0:72:0] Put# [1:1:205:0:0:19:0] Put# [1:1:206:0:0:92:0] Put# [1:1:207:0:0:19:0] Put# [1:1:208:0:0:85:0] Put# [1:1:209:0:0:71:0] Put# [1:1:210:0:0:87:0] Put# [1:1:211:0:0:79:0] Put# [1:1:212:0:0:53:0] Put# [1:1:213:0:0:43:0] Put# [1:1:214:0:0:6:0] Put# [1:1:215:0:0:75:0] Put# [1:1:216:0:0:87:0] Put# [1:1:217:0:0:38:0] Put# [1:1:218:0:0:85:0] Put# [1:1:219:0:0:15:0] Put# [1:1:220:0:0:37:0] Put# [1:1:221:0:0:80:0] Put# [1:1:222:0:0:5:0] Put# [1:1:223:0:0:78:0] Put# [1:1:224:0:0:43:0] Put# [1:1:225:0:0:80:0] Put# [1:1:226:0:0:55:0] Put# [1:1:227:0:0:29:0] Put# [1:1:228:0:0:37:0] Put# [1:1:229:0:0:60:0] Put# [1:1:230:0:0:63:0] Put# [1:1:231:0:0:66:0] Put# [1:1:232:0:0:15:0] Put# [1:1:233:0:0:19:0] Put# [1:1:234:0:0:100:0] Put# [1:1:235:0:0:68:0] Put# [1:1:236:0:0:61:0] Put# [1:1:237:0:0:34:0] Put# [1:1:238:0:0:26:0] Put# [1:1:239:0:0:13:0] Put# [1:1:240:0:0:32:0] Put# [1:1:241:0:0:89:0] Put# [1:1:242:0:0:6:0] Put# [1:1:243:0:0:70:0] Put# [1:1:244:0:0:58:0] Put# [1:1:245:0:0:53:0] Put# [1:1:246:0:0:71:0] Put# [1:1:247:0:0:88:0] Put# [1:1:248:0:0:34:0] Put# [1:1:249:0:0:95:0] Put# [1:1:250:0:0:15:0] Put# [1:1:251:0:0:95:0] Put# [1:1:252:0:0:70:0] Put# [1:1:253:0:0:40:0] Put# [1:1:254:0:0:96:0] Put# [1:1:255:0:0:93:0] Put# [1:1:256:0:0:75:0] Put# [1:1:257:0:0:57:0] Put# [1:1:258:0:0:96:0] Put# [1:1:259:0:0:17:0] Put# [1:1:260:0:0:43:0] Put# [1:1:261:0:0:85:0] Put# [1:1:262:0:0:73:0] Put# [1:1:263:0:0:87:0] Put# [1:1:264:0:0:4:0] Put# [1:1:265:0:0:89:0] Put# [1:1:266:0:0:40:0] Put# [1:1:267:0:0:88:0] Put# [1:1:268:0:0:26:0] Put# [1:1:269:0:0:54:0] Put# [1:1:270:0:0:44:0] Put# [1:1:271:0:0:22:0] Put# [1:1:272:0:0:65:0] Put# [1:1:273:0:0:42:0] Put# [1:1:274:0:0:3:0] Put# [1:1:275:0:0:29:0] Put# [1:1:276:0:0:63:0] Put# [1:1:277:0:0:41:0] Put# [1:1:278:0:0:81:0] Put# [1:1:279:0:0:49:0] Put# [1:1:280:0:0:26:0] Put# [1:1:281:0:0:90:0] Put# [1:1:282:0:0:84:0] Put# [1:1:283:0:0:62:0] Put# [1:1:284:0:0:64:0] Put# [1:1:285:0:0:38:0] Put# [1:1:286:0:0:4:0] Put# [1:1:287:0:0:36:0] Put# [1:1:288:0:0:72:0] Put# [1:1:289:0:0:31:0] Put# [1:1:290:0:0:75:0] Put# [1:1:291:0:0:22:0] Put# [1:1:292:0:0:60:0] Put# [1:1:293:0:0:79:0] Put# [1:1:294:0:0:61:0] Put# [1:1:295:0:0:40:0] Put# [1:1:296:0:0:68:0] Put# [1:1:297:0:0:32:0] Put# [1:1:298:0:0:16:0] Put# [1:1:299:0:0:67:0] Put# [1:1:300:0:0:40:0] Put# [1:1:301:0:0:4:0] Put# [1:1:302:0:0:51:0] Put# [1:1:303:0:0:7:0] Put# [1:1:304:0:0:22:0] Put# [1:1:305:0:0:82:0] Put# [1:1:306:0:0:36:0] Put# [1:1:307:0:0:96:0] Put# [1:1:308:0:0:65:0] Put# [1:1:309:0:0:100:0] Put# [1:1:310:0:0:61:0] Put# [1:1:311:0:0:76:0] Put# [1:1:312:0:0:73:0] Put# [1:1:313:0:0:4:0] Put# [1:1:314:0:0:80:0] Put# [1:1:315:0:0:51:0] Put# [1:1:316:0:0:65:0] Put# [1:1:317:0:0:9:0] Put# [1:1:318:0:0:89:0] Put# [1:1:319:0:0:17:0] Put# [1:1:320:0:0:8:0] Put# [1:1:321:0:0:99:0] Put# [1:1:322:0:0:57:0] Put# [1:1:323:0:0:36:0] Put# [1:1:324:0:0:55:0] Put# [1:1:325:0:0:44:0] Put# [1:1:326:0:0:82:0] Put# [1:1:327:0:0:85:0] Put# [1:1:328:0:0:98:0] Put# [1:1:329:0:0:5:0] Put# [1:1:330:0:0:87:0] Put# [1:1:331:0:0:1:0] Put# [1:1:332:0:0:9:0] Put# [1:1:333:0:0:33:0] Put# [1:1:334:0:0:59:0] Put# [1:1:335:0:0:66:0] Put# [1:1:336:0:0:2:0] Put# [1:1:337:0:0:41:0] Put# [1:1:338:0:0:44:0] Put# [1:1:339:0:0:11:0] Put# [1:1:340:0:0:78:0] Put# [1:1:341:0:0:85:0] Put# [1:1:342:0:0:50:0] Put# [1:1:343:0:0:32:0] Put# [1:1:344:0:0:36:0] Put# [1:1:345:0:0:67:0] Put# [1:1:346:0:0:4:0] Put# [1:1:347:0:0:18:0] Put# [1:1:348:0:0:54:0] Put# [1:1:349:0:0:95:0] Put# [1:1:350:0:0:36:0] Put# [1:1:351:0:0:1:0] Put# [1:1:352:0:0:12:0] Put# [1:1:353:0:0:75:0] Put# [1:1:354:0:0:37:0] Put# [1:1:355:0:0:48:0] Put# [1:1:356:0:0:73:0] Put# [1:1:357:0:0:67:0] Put# [1:1:358:0:0:13:0] Put# [1:1:359:0:0:80:0] Put# [1:1:360:0:0:20:0] Put# [1:1:361:0:0:66:0] Put# [1:1:362:0:0:68:0] Put# [1:1:363:0:0:14:0] Put# [1:1:364:0:0:69:0] Put# [1:1:365:0:0:89:0] Put# [1:1:366:0:0:38:0] Put# [1:1:367:0:0:5:0] Put# [1:1:368:0:0:24:0] Put# [1:1:369:0:0:1:0] Put# [1:1:370:0:0:4:0] Put# [1:1:371:0:0:30:0] Put# [1:1:372:0:0:67:0] Put# [1:1:373:0:0:29:0] Put# [1:1:374:0:0:43:0] Put# [1:1:375:0:0:53:0] Put# [1:1:376:0:0:39:0] Put# [1:1:377:0:0:59:0] Put# [1:1:378:0:0:28:0] Put# [1:1:379:0:0:2:0] Put# [1:1:380:0:0:77:0] Put# [1:1:381:0:0:80:0] Put# [1:1:382:0:0:78:0] Put# [1:1:383:0:0:24:0] Put# [1:1:384:0:0:46:0] Put# [1:1:385:0:0:20:0] Put# [1:1:386:0:0:91:0] Put# [1:1:387:0:0:62:0] Put# [1:1:388:0:0:21:0] Put# [1:1:389:0:0:17:0] Put# [1:1:390:0:0:36:0] Put# [1:1:391:0:0:9:0] Put# [1:1:392:0:0:19:0] Put# [1:1:393:0:0:20:0] Put# [1:1:394:0:0:5:0] Put# [1:1:395:0:0:43:0] Put# [1:1:396:0:0:58:0] Put# [1:1:397:0:0:79:0] Put# [1:1:398:0:0:47:0] Put# [1:1:399:0:0:1:0] Put# [1:1:400:0:0:79:0] Put# [1:1:401:0:0:31:0] Put# [1:1:402:0:0:22:0] Put# [1:1:403:0:0:11:0] Put# [1:1:404:0:0:84:0] Put# [1:1:405:0:0:20:0] Put# [1:1:406:0:0:37:0] Put# [1:1:407:0:0:43:0] Put# [1:1:408:0:0:34:0] Put# [1:1:409:0:0:61:0] Put# [1:1:410:0:0:58:0] Put# [1:1:411:0:0:39:0] Put# [1:1:412:0:0:63:0] Put# [1:1:413:0:0:98:0] Put# [1:1:414:0:0:22:0] Put# [1:1:415:0:0:17:0] Put# [1:1:416:0:0:83:0] Put# [1:1:417:0:0:16:0] Put# [1:1:418:0:0:6:0] Put# [1:1:419:0:0:83:0] Put# [1:1:420:0:0:11:0] Put# [1:1:421:0:0:95:0] Put# [1:1:422:0:0:55:0] Put# [1:1:423:0:0:44:0] Put# [1:1:424:0:0:88:0] Put# [1:1:425:0:0:82:0] Put# [1:1:426:0:0:64:0] Put# [1:1:427:0:0:54:0] Put# [1:1:428:0:0:32:0] Put# [1:1:429:0:0:1:0] Put# [1:1:430:0:0:55:0] Put# [1:1:431:0:0:38:0] Put# [1:1:432:0:0:35:0] Put# [1:1:433:0:0:81:0] Put# [1:1:434:0:0:72:0] Put# [1:1:435:0:0:61:0] Put# [1:1:436:0:0:62:0] Put# [1:1:437:0:0:26:0] Put# [1:1:438:0:0:1:0] Put# [1:1:439:0:0:86:0] Put# [1:1:440:0:0:95:0] Put# [1:1:441:0:0:26:0] Put# [1:1:442:0:0:100:0] Put# [1:1:443:0:0:36:0] Put# [1:1:444:0:0:33:0] Put# [1:1:445:0:0:92:0] Put# [1:1:446:0:0:99:0] Put# [1:1:447:0:0:73:0] Put# [1:1:448:0:0:41:0] Put# [1:1:449:0:0:41:0] Put# [1:1:450:0:0:6:0] Put# [1:1:451:0:0:67:0] Put# [1:1:452:0:0:32:0] Put# [1:1:453:0:0:17:0] Put# [1:1:454:0:0:7:0] Put# [1:1:455:0:0:10:0] Put# [1:1:456:0:0:90:0] Put# [1:1:457:0:0:55:0] Put# [1:1:458:0:0:5:0] Put# [1:1:459:0:0:99:0] Put# [1:1:460:0:0:15:0] Put# ... 21:0] Put# [1:30:9538:0:0:83:0] Put# [1:30:9539:0:0:39:0] Put# [1:30:9540:0:0:67:0] Put# [1:30:9541:0:0:64:0] Put# [1:30:9542:0:0:38:0] Put# [1:30:9543:0:0:25:0] Put# [1:30:9544:0:0:20:0] Put# [1:30:9545:0:0:70:0] Put# [1:30:9546:0:0:43:0] Put# [1:30:9547:0:0:74:0] Put# [1:30:9548:0:0:44:0] Put# [1:30:9549:0:0:20:0] Put# [1:30:9550:0:0:69:0] Put# [1:30:9551:0:0:67:0] Put# [1:30:9552:0:0:62:0] Put# [1:30:9553:0:0:22:0] Put# [1:30:9554:0:0:65:0] Put# [1:30:9555:0:0:82:0] Put# [1:30:9556:0:0:11:0] Put# [1:30:9557:0:0:48:0] Put# [1:30:9558:0:0:59:0] Put# [1:30:9559:0:0:83:0] Put# [1:30:9560:0:0:33:0] Put# [1:30:9561:0:0:34:0] Put# [1:30:9562:0:0:8:0] Put# [1:30:9563:0:0:93:0] Put# [1:30:9564:0:0:32:0] Put# [1:30:9565:0:0:11:0] Put# [1:30:9566:0:0:94:0] Put# [1:30:9567:0:0:97:0] Put# [1:30:9568:0:0:74:0] Put# [1:30:9569:0:0:31:0] Put# [1:30:9570:0:0:42:0] Put# [1:30:9571:0:0:22:0] Put# [1:30:9572:0:0:91:0] Put# [1:30:9573:0:0:78:0] Put# [1:30:9574:0:0:21:0] Put# [1:30:9575:0:0:85:0] Put# [1:30:9576:0:0:33:0] Put# [1:30:9577:0:0:55:0] Put# [1:30:9578:0:0:63:0] Put# [1:30:9579:0:0:40:0] Put# [1:30:9580:0:0:50:0] Put# [1:30:9581:0:0:20:0] Put# [1:30:9582:0:0:72:0] Put# [1:30:9583:0:0:86:0] Put# [1:30:9584:0:0:48:0] Put# [1:30:9585:0:0:88:0] Put# [1:30:9586:0:0:6:0] Put# [1:30:9587:0:0:87:0] Put# [1:30:9588:0:0:70:0] Put# [1:30:9589:0:0:29:0] Put# [1:30:9590:0:0:2:0] Put# [1:30:9591:0:0:86:0] Put# [1:30:9592:0:0:22:0] Put# [1:30:9593:0:0:84:0] Put# [1:30:9594:0:0:10:0] Put# [1:30:9595:0:0:96:0] Put# [1:30:9596:0:0:32:0] Put# [1:30:9597:0:0:19:0] Put# [1:30:9598:0:0:90:0] Put# [1:30:9599:0:0:81:0] Put# [1:30:9600:0:0:47:0] Put# [1:30:9601:0:0:47:0] Put# [1:30:9602:0:0:75:0] Put# [1:30:9603:0:0:63:0] Put# [1:30:9604:0:0:53:0] Put# [1:30:9605:0:0:66:0] Put# [1:30:9606:0:0:73:0] Put# [1:30:9607:0:0:28:0] Put# [1:30:9608:0:0:89:0] Put# [1:30:9609:0:0:13:0] Put# [1:30:9610:0:0:92:0] Put# [1:30:9611:0:0:100:0] Put# [1:30:9612:0:0:4:0] Put# [1:30:9613:0:0:21:0] Put# [1:30:9614:0:0:24:0] Put# [1:30:9615:0:0:69:0] Put# [1:30:9616:0:0:53:0] Put# [1:30:9617:0:0:92:0] Put# [1:30:9618:0:0:36:0] Put# [1:30:9619:0:0:31:0] Put# [1:30:9620:0:0:97:0] Put# [1:30:9621:0:0:85:0] Put# [1:30:9622:0:0:52:0] Put# [1:30:9623:0:0:1:0] Put# [1:30:9624:0:0:98:0] Put# [1:30:9625:0:0:90:0] Put# [1:30:9626:0:0:28:0] Put# [1:30:9627:0:0:17:0] Put# [1:30:9628:0:0:66:0] Put# [1:30:9629:0:0:15:0] Put# [1:30:9630:0:0:14:0] Put# [1:30:9631:0:0:21:0] Put# [1:30:9632:0:0:70:0] Put# [1:30:9633:0:0:91:0] Put# [1:30:9634:0:0:79:0] Put# [1:30:9635:0:0:50:0] Put# [1:30:9636:0:0:41:0] Put# [1:30:9637:0:0:10:0] Put# [1:30:9638:0:0:20:0] Put# [1:30:9639:0:0:64:0] Put# [1:30:9640:0:0:36:0] Put# [1:30:9641:0:0:62:0] Put# [1:30:9642:0:0:72:0] Put# [1:30:9643:0:0:89:0] Put# [1:30:9644:0:0:83:0] Put# [1:30:9645:0:0:60:0] Put# [1:30:9646:0:0:62:0] Put# [1:30:9647:0:0:65:0] Put# [1:30:9648:0:0:72:0] Put# [1:30:9649:0:0:6:0] Put# [1:30:9650:0:0:52:0] Put# [1:30:9651:0:0:8:0] Put# [1:30:9652:0:0:76:0] Put# [1:30:9653:0:0:55:0] Put# [1:30:9654:0:0:3:0] Put# [1:30:9655:0:0:33:0] Put# [1:30:9656:0:0:50:0] Put# [1:30:9657:0:0:43:0] Put# [1:30:9658:0:0:27:0] Put# [1:30:9659:0:0:23:0] Put# [1:30:9660:0:0:7:0] Put# [1:30:9661:0:0:94:0] Put# [1:30:9662:0:0:88:0] Put# [1:30:9663:0:0:75:0] Put# [1:30:9664:0:0:54:0] Put# [1:30:9665:0:0:57:0] Put# [1:30:9666:0:0:70:0] Put# [1:30:9667:0:0:37:0] Put# [1:30:9668:0:0:19:0] Put# [1:30:9669:0:0:87:0] Put# [1:30:9670:0:0:88:0] Put# [1:30:9671:0:0:42:0] Put# [1:30:9672:0:0:19:0] Put# [1:30:9673:0:0:6:0] Put# [1:30:9674:0:0:53:0] Put# [1:30:9675:0:0:98:0] Put# [1:30:9676:0:0:19:0] Put# [1:30:9677:0:0:87:0] Put# [1:30:9678:0:0:77:0] Put# [1:30:9679:0:0:16:0] Put# [1:30:9680:0:0:94:0] Put# [1:30:9681:0:0:31:0] Put# [1:30:9682:0:0:78:0] Put# [1:30:9683:0:0:97:0] Put# [1:30:9684:0:0:45:0] Put# [1:30:9685:0:0:1:0] Put# [1:30:9686:0:0:44:0] Put# [1:30:9687:0:0:98:0] Put# [1:30:9688:0:0:16:0] Put# [1:30:9689:0:0:23:0] Put# [1:30:9690:0:0:79:0] Put# [1:30:9691:0:0:85:0] Put# [1:30:9692:0:0:25:0] Put# [1:30:9693:0:0:18:0] Put# [1:30:9694:0:0:79:0] Put# [1:30:9695:0:0:92:0] Put# [1:30:9696:0:0:41:0] Put# [1:30:9697:0:0:86:0] Put# [1:30:9698:0:0:12:0] Put# [1:30:9699:0:0:53:0] Put# [1:30:9700:0:0:77:0] Put# [1:30:9701:0:0:44:0] Put# [1:30:9702:0:0:27:0] Put# [1:30:9703:0:0:83:0] Put# [1:30:9704:0:0:26:0] Put# [1:30:9705:0:0:13:0] Put# [1:30:9706:0:0:95:0] Put# [1:30:9707:0:0:45:0] Put# [1:30:9708:0:0:9:0] Put# [1:30:9709:0:0:9:0] Put# [1:30:9710:0:0:90:0] Put# [1:30:9711:0:0:74:0] Put# [1:30:9712:0:0:38:0] Put# [1:30:9713:0:0:3:0] Put# [1:30:9714:0:0:24:0] Put# [1:30:9715:0:0:94:0] Put# [1:30:9716:0:0:50:0] Put# [1:30:9717:0:0:59:0] Put# [1:30:9718:0:0:1:0] Put# [1:30:9719:0:0:27:0] Put# [1:30:9720:0:0:8:0] Put# [1:30:9721:0:0:85:0] Put# [1:30:9722:0:0:43:0] Put# [1:30:9723:0:0:64:0] Put# [1:30:9724:0:0:42:0] Put# [1:30:9725:0:0:88:0] Put# [1:30:9726:0:0:38:0] Put# [1:30:9727:0:0:1:0] Put# [1:30:9728:0:0:27:0] Put# [1:30:9729:0:0:86:0] Put# [1:30:9730:0:0:7:0] Put# [1:30:9731:0:0:43:0] Put# [1:30:9732:0:0:19:0] Put# [1:30:9733:0:0:72:0] Put# [1:30:9734:0:0:3:0] Put# [1:30:9735:0:0:16:0] Put# [1:30:9736:0:0:72:0] Put# [1:30:9737:0:0:12:0] Put# [1:30:9738:0:0:45:0] Put# [1:30:9739:0:0:59:0] Put# [1:30:9740:0:0:54:0] Put# [1:30:9741:0:0:88:0] Put# [1:30:9742:0:0:89:0] Put# [1:30:9743:0:0:28:0] Put# [1:30:9744:0:0:71:0] Put# [1:30:9745:0:0:74:0] Put# [1:30:9746:0:0:76:0] Put# [1:30:9747:0:0:51:0] Put# [1:30:9748:0:0:45:0] Put# [1:30:9749:0:0:72:0] Put# [1:30:9750:0:0:90:0] Put# [1:30:9751:0:0:44:0] Put# [1:30:9752:0:0:84:0] Put# [1:30:9753:0:0:71:0] Put# [1:30:9754:0:0:55:0] Put# [1:30:9755:0:0:65:0] Put# [1:30:9756:0:0:52:0] Put# [1:30:9757:0:0:72:0] Put# [1:30:9758:0:0:73:0] Put# [1:30:9759:0:0:27:0] Put# [1:30:9760:0:0:48:0] Put# [1:30:9761:0:0:62:0] Put# [1:30:9762:0:0:3:0] Put# [1:30:9763:0:0:23:0] Put# [1:30:9764:0:0:3:0] Put# [1:30:9765:0:0:21:0] Put# [1:30:9766:0:0:69:0] Put# [1:30:9767:0:0:87:0] Put# [1:30:9768:0:0:17:0] Put# [1:30:9769:0:0:63:0] Put# [1:30:9770:0:0:76:0] Put# [1:30:9771:0:0:90:0] Put# [1:30:9772:0:0:95:0] Put# [1:30:9773:0:0:74:0] Put# [1:30:9774:0:0:99:0] Put# [1:30:9775:0:0:65:0] Put# [1:30:9776:0:0:32:0] Put# [1:30:9777:0:0:15:0] Put# [1:30:9778:0:0:81:0] Put# [1:30:9779:0:0:98:0] Put# [1:30:9780:0:0:71:0] Put# [1:30:9781:0:0:71:0] Put# [1:30:9782:0:0:7:0] Put# [1:30:9783:0:0:11:0] Put# [1:30:9784:0:0:26:0] Put# [1:30:9785:0:0:83:0] Put# [1:30:9786:0:0:78:0] Put# [1:30:9787:0:0:55:0] Put# [1:30:9788:0:0:58:0] Put# [1:30:9789:0:0:16:0] Put# [1:30:9790:0:0:1:0] Put# [1:30:9791:0:0:100:0] Put# [1:30:9792:0:0:35:0] Put# [1:30:9793:0:0:25:0] Put# [1:30:9794:0:0:12:0] Put# [1:30:9795:0:0:19:0] Put# [1:30:9796:0:0:53:0] Put# [1:30:9797:0:0:86:0] Put# [1:30:9798:0:0:51:0] Put# [1:30:9799:0:0:55:0] Put# [1:30:9800:0:0:82:0] Put# [1:30:9801:0:0:36:0] Put# [1:30:9802:0:0:62:0] Put# [1:30:9803:0:0:1:0] Put# [1:30:9804:0:0:64:0] Put# [1:30:9805:0:0:64:0] Put# [1:30:9806:0:0:19:0] Put# [1:30:9807:0:0:89:0] Put# [1:30:9808:0:0:20:0] Put# [1:30:9809:0:0:86:0] Put# [1:30:9810:0:0:49:0] Put# [1:30:9811:0:0:63:0] Put# [1:30:9812:0:0:88:0] Put# [1:30:9813:0:0:97:0] Put# [1:30:9814:0:0:24:0] Put# [1:30:9815:0:0:12:0] Put# [1:30:9816:0:0:100:0] Put# [1:30:9817:0:0:95:0] Put# [1:30:9818:0:0:86:0] Put# [1:30:9819:0:0:73:0] Put# [1:30:9820:0:0:11:0] Put# [1:30:9821:0:0:70:0] Put# [1:30:9822:0:0:76:0] Put# [1:30:9823:0:0:82:0] Put# [1:30:9824:0:0:47:0] Put# [1:30:9825:0:0:57:0] Put# [1:30:9826:0:0:78:0] Put# [1:30:9827:0:0:40:0] Put# [1:30:9828:0:0:83:0] Put# [1:30:9829:0:0:1:0] Put# [1:30:9830:0:0:70:0] Put# [1:30:9831:0:0:83:0] Put# [1:30:9832:0:0:71:0] Put# [1:30:9833:0:0:16:0] Put# [1:30:9834:0:0:47:0] Put# [1:30:9835:0:0:10:0] Put# [1:30:9836:0:0:70:0] Put# [1:30:9837:0:0:72:0] Put# [1:30:9838:0:0:13:0] Put# [1:30:9839:0:0:1:0] Put# [1:30:9840:0:0:25:0] Put# [1:30:9841:0:0:11:0] Put# [1:30:9842:0:0:98:0] Put# [1:30:9843:0:0:72:0] Put# [1:30:9844:0:0:9:0] Put# [1:30:9845:0:0:37:0] Put# [1:30:9846:0:0:93:0] Put# [1:30:9847:0:0:90:0] Put# [1:30:9848:0:0:38:0] Put# [1:30:9849:0:0:3:0] Put# [1:30:9850:0:0:34:0] Put# [1:30:9851:0:0:95:0] Put# [1:30:9852:0:0:12:0] Put# [1:30:9853:0:0:8:0] Put# [1:30:9854:0:0:92:0] Put# [1:30:9855:0:0:17:0] Put# [1:30:9856:0:0:79:0] Put# [1:30:9857:0:0:5:0] Put# [1:30:9858:0:0:13:0] Put# [1:30:9859:0:0:73:0] Put# [1:30:9860:0:0:83:0] Put# [1:30:9861:0:0:28:0] Put# [1:30:9862:0:0:75:0] Put# [1:30:9863:0:0:65:0] Put# [1:30:9864:0:0:49:0] Put# [1:30:9865:0:0:28:0] Put# [1:30:9866:0:0:51:0] Put# [1:30:9867:0:0:13:0] Put# [1:30:9868:0:0:12:0] Put# [1:30:9869:0:0:86:0] Put# [1:30:9870:0:0:34:0] Put# [1:30:9871:0:0:68:0] Put# [1:30:9872:0:0:9:0] Put# [1:30:9873:0:0:3:0] Put# [1:30:9874:0:0:88:0] Put# [1:30:9875:0:0:35:0] Put# [1:30:9876:0:0:60:0] Put# [1:30:9877:0:0:15:0] Put# [1:30:9878:0:0:38:0] Put# [1:30:9879:0:0:9:0] Put# [1:30:9880:0:0:4:0] Put# [1:30:9881:0:0:63:0] Put# [1:30:9882:0:0:70:0] Put# [1:30:9883:0:0:46:0] Put# [1:30:9884:0:0:2:0] Put# [1:30:9885:0:0:4:0] Put# [1:30:9886:0:0:65:0] Put# [1:30:9887:0:0:13:0] Put# [1:30:9888:0:0:90:0] Put# [1:30:9889:0:0:9:0] Put# [1:30:9890:0:0:55:0] Put# [1:30:9891:0:0:18:0] Put# [1:30:9892:0:0:12:0] Put# [1:30:9893:0:0:4:0] Put# [1:30:9894:0:0:12:0] Put# [1:30:9895:0:0:20:0] Put# [1:30:9896:0:0:20:0] Put# [1:30:9897:0:0:28:0] Put# [1:30:9898:0:0:26:0] Put# [1:30:9899:0:0:73:0] Put# [1:30:9900:0:0:69:0] Put# [1:30:9901:0:0:42:0] Put# [1:30:9902:0:0:57:0] Put# [1:30:9903:0:0:9:0] Put# [1:30:9904:0:0:8:0] Put# [1:30:9905:0:0:34:0] Put# [1:30:9906:0:0:76:0] Put# [1:30:9907:0:0:74:0] Put# [1:30:9908:0:0:45:0] Put# [1:30:9909:0:0:8:0] Put# [1:30:9910:0:0:7:0] Put# [1:30:9911:0:0:72:0] Put# [1:30:9912:0:0:80:0] Put# [1:30:9913:0:0:22:0] Put# [1:30:9914:0:0:7:0] Put# [1:30:9915:0:0:72:0] Put# [1:30:9916:0:0:100:0] Put# [1:30:9917:0:0:60:0] Put# [1:30:9918:0:0:73:0] Put# [1:30:9919:0:0:31:0] Put# [1:30:9920:0:0:38:0] Put# [1:30:9921:0:0:2:0] Put# [1:30:9922:0:0:65:0] Put# [1:30:9923:0:0:83:0] Put# [1:30:9924:0:0:67:0] Put# [1:30:9925:0:0:52:0] Put# [1:30:9926:0:0:60:0] Put# [1:30:9927:0:0:62:0] Put# [1:30:9928:0:0:32:0] Put# [1:30:9929:0:0:65:0] Put# [1:30:9930:0:0:90:0] Put# [1:30:9931:0:0:2:0] Put# [1:30:9932:0:0:85:0] Put# [1:30:9933:0:0:55:0] Put# [1:30:9934:0:0:12:0] Put# [1:30:9935:0:0:57:0] Put# [1:30:9936:0:0:73:0] Put# [1:30:9937:0:0:67:0] Put# [1:30:9938:0:0:88:0] Put# [1:30:9939:0:0:8:0] Put# [1:30:9940:0:0:28:0] Put# [1:30:9941:0:0:7:0] Put# [1:30:9942:0:0:76:0] Put# [1:30:9943:0:0:100:0] Put# [1:30:9944:0:0:96:0] Put# [1:30:9945:0:0:88:0] Put# [1:30:9946:0:0:83:0] Put# [1:30:9947:0:0:82:0] Put# [1:30:9948:0:0:31:0] Put# [1:30:9949:0:0:24:0] Put# [1:30:9950:0:0:66:0] Put# [1:30:9951:0:0:91:0] Put# [1:30:9952:0:0:89:0] Put# [1:30:9953:0:0:15:0] Put# [1:30:9954:0:0:2:0] Put# [1:30:9955:0:0:35:0] Put# [1:30:9956:0:0:47:0] Put# [1:30:9957:0:0:79:0] Put# [1:30:9958:0:0:34:0] Put# [1:30:9959:0:0:24:0] Put# [1:30:9960:0:0:19:0] Put# [1:30:9961:0:0:50:0] Put# [1:30:9962:0:0:70:0] Put# [1:30:9963:0:0:11:0] Put# [1:30:9964:0:0:34:0] Put# [1:30:9965:0:0:83:0] Put# [1:30:9966:0:0:80:0] Put# [1:30:9967:0:0:81:0] Put# [1:30:9968:0:0:33:0] Put# [1:30:9969:0:0:86:0] Put# [1:30:9970:0:0:47:0] Put# [1:30:9971:0:0:47:0] Put# [1:30:9972:0:0:100:0] Put# [1:30:9973:0:0:76:0] Put# [1:30:9974:0:0:36:0] Put# [1:30:9975:0:0:15:0] Put# [1:30:9976:0:0:82:0] Put# [1:30:9977:0:0:12:0] Put# [1:30:9978:0:0:11:0] Put# [1:30:9979:0:0:47:0] Put# [1:30:9980:0:0:95:0] Put# [1:30:9981:0:0:37:0] Put# [1:30:9982:0:0:1:0] Put# [1:30:9983:0:0:32:0] Put# [1:30:9984:0:0:7:0] Put# [1:30:9985:0:0:81:0] Put# [1:30:9986:0:0:52:0] Put# [1:30:9987:0:0:94:0] Put# [1:30:9988:0:0:72:0] Put# [1:30:9989:0:0:97:0] Put# [1:30:9990:0:0:62:0] Put# [1:30:9991:0:0:22:0] Put# [1:30:9992:0:0:58:0] Put# [1:30:9993:0:0:13:0] Put# [1:30:9994:0:0:89:0] Put# [1:30:9995:0:0:30:0] Put# [1:30:9996:0:0:51:0] Put# [1:30:9997:0:0:37:0] Put# [1:30:9998:0:0:34:0] Put# [1:30:9999:0:0:7:0] Put# [1:30:10000:0:0:46:0] >> Cdc::AwsRegion [GOOD] >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: 2024-11-21T08:55:28.735425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:55:28.735828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:55:28.735849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034b6/r3tmp/tmpl8REN5/pdisk_1.dat 2024-11-21T08:55:28.837717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:28.854879Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:28.897248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:28.897283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:28.907842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:29.011809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:29.026327Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:55:29.026398Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:29.033086Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:29.033131Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:55:29.033285Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:55:29.033295Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:55:29.033301Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:55:29.033350Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:55:29.036693Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:55:29.036768Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:55:29.036806Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:55:29.036813Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:55:29.036818Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:55:29.036824Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:29.037085Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:55:29.037108Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:55:29.037123Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:55:29.037130Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:29.037138Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:29.037147Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:55:29.037152Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:29.037187Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:55:29.037238Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:55:29.037255Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:55:29.037518Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:55:29.047771Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:55:29.047806Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:55:29.221600Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:55:29.222196Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:55:29.222209Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:29.222298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:29.222306Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:55:29.222315Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T08:55:29.222368Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T08:55:29.222400Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:55:29.222520Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:29.222532Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:55:29.222797Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:55:29.222893Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:29.223145Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T08:55:29.223163Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:29.223257Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T08:55:29.223263Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T08:55:29.223271Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:29.223420Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:29.223428Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:55:29.223434Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:55:29.223445Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:55:29.223452Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T08:55:29.223459Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:29.223615Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:648:2546][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2024-11-21T08:55:29.224033Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:55:29.224346Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T08:55:29.224385Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T08:55:29.224390Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:55:29.699212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:55:29.699254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:55:29.699283Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034b6/r3tmp/tmpXCpu2I/pdisk_1.dat 2024-11-21T08:55:29.777925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:29.793174Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:29.835590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:29.835630Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:29.846317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:29.950536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:29.963650Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:642:2544] 2024-11-21T08:55:29.963713Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:29.970752Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [2:644:2546] 2024-11-21T08:55:29.970807Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:29.971834Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:29.971872Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:55:29.972013Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:55:29.972020Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:55:29.972025Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:55:29.972059Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:55:29.972066Z node 2 :TX_DATASHARD INFO: Switched to work ... Id: 2024-11-21T08:56:04.381305Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2024-11-21T08:56:04.381484Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:04.381493Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2024-11-21T08:56:04.381518Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T08:56:04.381544Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T08:56:04.381577Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:04.381580Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2024-11-21T08:56:04.381593Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream1/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 1 partNo : 0 messageNo: 1 size 324 offset: -1 2024-11-21T08:56:04.381639Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 1 partNo 0 2024-11-21T08:56:04.381676Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 439 count 1 nextOffset 1 batches 1 2024-11-21T08:56:04.381722Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream1/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 427 WTime 2526 2024-11-21T08:56:04.381740Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:04.381742Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2024-11-21T08:56:04.381747Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message topic: Table/Stream2/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 2 partNo : 0 messageNo: 1 size 323 offset: -1 2024-11-21T08:56:04.381760Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 2 partNo 0 2024-11-21T08:56:04.381768Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 438 count 1 nextOffset 1 batches 1 2024-11-21T08:56:04.381784Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream2/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 426 WTime 2526 2024-11-21T08:56:04.381799Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:56:04.381816Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2024-11-21T08:56:04.382399Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:04.382408Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2024-11-21T08:56:04.382502Z node 21 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:56:04.382510Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 0 max time lag 0ms effective offset 0 2024-11-21T08:56:04.382517Z node 21 :PERSQUEUE DEBUG: waiting read cookie 0 partition 0 user $without_consumer offset 0 count 10000 size 26214400 timeout 0 2024-11-21T08:56:04.382531Z node 21 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:04.382535Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 0 partition 0 read timeout for $without_consumer offset 0 2024-11-21T08:56:04.382545Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:56:04.392783Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 342 2024-11-21T08:56:04.392824Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:04.392846Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream1/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T08:56:04.392880Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2024-11-21T08:56:04.392885Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2024-11-21T08:56:04.392919Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 341 2024-11-21T08:56:04.392925Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:04.392931Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream2/streamImpl', Partition: 0, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2024-11-21T08:56:04.392983Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:56:04.393052Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:56:04.393132Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037891][21:1148:2771] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 2 Offset: 0 WriteTimestampMS: 2526 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2024-11-21T08:56:04.393146Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T08:56:04.393151Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:56:04.393156Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2024-11-21T08:56:04.393160Z node 21 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:04.393180Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][21:1147:2675] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 1 Offset: 0 WriteTimestampMS: 2526 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2024-11-21T08:56:04.484500Z node 21 :PERSQUEUE DEBUG: Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp done, result 2526 queuesize 0 startOffset 0 2024-11-21T08:56:04.484570Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:846:2675] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2024-11-21T08:56:04.484592Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:1009:2771] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2024-11-21T08:56:04.484637Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 2, at tablet# 72075186224037888 2024-11-21T08:56:04.484643Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037888 2024-11-21T08:56:04.484682Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2024-11-21T08:56:04.495158Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 2, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2024-11-21T08:56:04.957393Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:04.957418Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2024-11-21T08:56:04.957472Z node 21 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:56:04.957485Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T08:56:04.957493Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:56:04.957503Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T08:56:04.957512Z node 21 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:04.957618Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2024-11-21T08:56:04.957735Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:04.957740Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2024-11-21T08:56:04.957840Z node 21 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:56:04.957847Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T08:56:04.957851Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:56:04.957855Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T08:56:04.957858Z node 21 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:04.957887Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSolomonReboots::AdoptDropSolomonWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::AdoptDropSolomonWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:47.788761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:47.788780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:47.788783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:47.788787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:47.788791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:47.788793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:47.788799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:47.788853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:47.796189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:47.796222Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:47.797899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:47.797981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:47.798007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:47.799981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:47.800079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:47.802230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:47.802472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:47.803028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:47.803238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:47.803246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:47.803256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:47.803261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:47.803265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:47.803297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:47.804194Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:47.814106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:47.814170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:47.814230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:47.814282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:47.814288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:47.815191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:47.815216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:47.815267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:47.815275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:47.815278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:47.815282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:47.815580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:47.815586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:47.815589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:47.815813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:47.815818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:47.815822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:47.815827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:47.816199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:47.816478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:47.816516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:47.816656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:47.816675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:47.816679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:47.816720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:47.816724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:47.816743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:47.816750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:47.817023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:47.817029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:47.817056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:47.817059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:47.817120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:47.817125Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:47.817132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:47.817135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:47.817139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:47.817142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:47.817146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:47.817148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:47.817155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:47.817159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:47.817162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... rd: 72057594046678944 2024-11-21T08:56:05.566032Z node 73 :FLAT_TX_SCHEMESHARD INFO: TDropSolomon TPropose operationId#1004:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T08:56:05.566041Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 4] name: Solomon type: EPathTypeSolomonVolume state: EPathStateDrop stepDropped: 0 droppedTxId: 1004 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:05.566045Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:56:05.566071Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:56:05.566088Z node 73 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 130 2024-11-21T08:56:05.566106Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:05.566113Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:05.566306Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:56:05.566440Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:56:05.566531Z node 73 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:05.566536Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:05.566559Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:56:05.566577Z node 73 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:05.566582Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [73:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T08:56:05.566585Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [73:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T08:56:05.566636Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:56:05.566642Z node 73 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1004:0 ProgressState 2024-11-21T08:56:05.566650Z node 73 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:56:05.566654Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:56:05.566659Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T08:56:05.566662Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:56:05.566667Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:56:05.566670Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:56:05.566700Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:56:05.566705Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T08:56:05.566712Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T08:56:05.566715Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:56:05.566772Z node 73 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:56:05.566781Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:56:05.566785Z node 73 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:56:05.566789Z node 73 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:56:05.566792Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:05.566844Z node 73 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:56:05.566850Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:56:05.566853Z node 73 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:56:05.566856Z node 73 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T08:56:05.566859Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:56:05.566867Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T08:56:05.567199Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:05.567211Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:05.567533Z node 73 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2024-11-21T08:56:05.567606Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:56:05.567648Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Forgetting tablet 72075186233409546 2024-11-21T08:56:05.567921Z node 73 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409547 2024-11-21T08:56:05.567987Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:56:05.568010Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T08:56:05.568175Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:56:05.568195Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:56:05.568240Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:05.568245Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:56:05.568256Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:05.568686Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:56:05.568697Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2024-11-21T08:56:05.568906Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:56:05.568914Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409547 2024-11-21T08:56:05.568958Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:56:05.569010Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:56:05.569017Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:56:05.569074Z node 73 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:56:05.569089Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:56:05.569094Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [73:479:2452] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:56:05.569143Z node 73 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:56:05.569155Z node 73 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T08:56:05.569216Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:05.569242Z node 73 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Solomon" took 37us result status StatusPathDoesNotExist 2024-11-21T08:56:05.569281Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:50.527435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:50.527452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:50.527455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:50.527458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:50.527462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:50.527464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:50.527470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:50.527520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:50.537357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:50.537373Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:50.539384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:50.539477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:50.539508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:50.541868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:50.541940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:50.542021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.542179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:50.542779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.543009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:50.543020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.543034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:50.543041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:50.543047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:50.543083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:50.544224Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:50.560854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:50.560901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.560949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:50.561002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:50.561008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.561613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.561634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:50.561679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.561692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:50.561696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:50.561701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:50.562063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.562071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:50.562074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:50.562279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.562284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.562287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.562292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.562654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:50.562923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:50.562950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:50.563063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.563078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:50.563082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.563120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:50.563124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.563139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:50.563147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:50.563390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:50.563394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:50.563413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.563418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:50.563461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.563465Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:50.563472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:50.563474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.563478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:50.563481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.563484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:50.563486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:50.563492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:50.563495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:50.563497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 44073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.622599Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.622602Z node 62 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:05.622604Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:56:05.622606Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Leader for TabletID 72057594037968897 is [62:213:2213] sender: [62:343:2058] recipient: [62:15:2062] 2024-11-21T08:56:05.622711Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.622718Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.622720Z node 62 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:05.622722Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:56:05.622725Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:56:05.622731Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:56:05.622736Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [62:300:2292] 2024-11-21T08:56:05.622791Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.622811Z node 62 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T08:56:05.622842Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.622852Z node 62 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T08:56:05.622875Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:05.622922Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:56:05.622976Z node 62 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T08:56:05.623000Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:56:05.623019Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T08:56:05.623053Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:56:05.623071Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:56:05.623101Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:05.623106Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T08:56:05.623118Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:56:05.623123Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T08:56:05.623129Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:05.623133Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:56:05.623139Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:56:05.623142Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:56:05.623147Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:56:05.623151Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:56:05.623156Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:56:05.623162Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:56:05.623166Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:05.623682Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.623702Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.623716Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.623726Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.624229Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:05.624251Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:56:05.624257Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:301:2293] 2024-11-21T08:56:05.624284Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:56:05.624304Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:56:05.624320Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:56:05.624344Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 6 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2024-11-21T08:56:05.624470Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:56:05.624478Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:56:05.624484Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:56:05.624489Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T08:56:05.624494Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T08:56:05.624500Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T08:56:05.624505Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2024-11-21T08:56:05.624510Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2024-11-21T08:56:05.624518Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2024-11-21T08:56:05.624525Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2024-11-21T08:56:05.624615Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:05.624644Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 34us result status StatusSuccess 2024-11-21T08:56:05.624722Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |90.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TSchemeShardMoveTest::Reject >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2024-11-21T08:54:22.449844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:22.450251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:22.450275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d13/r3tmp/tmpaBIunp/pdisk_1.dat 2024-11-21T08:54:22.543144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:22.559011Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:22.601015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:22.601039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:22.611448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:22.714712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:22.727688Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:54:22.727833Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:54:22.727879Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T08:54:22.727916Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:54:22.732984Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:54:22.733078Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:54:22.733092Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:54:22.733181Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:54:22.733186Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:54:22.733190Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:54:22.733219Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:54:22.735485Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:54:22.735526Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:54:22.735540Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T08:54:22.735543Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:54:22.735546Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:54:22.735549Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:22.735622Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:22.735626Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:22.735710Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:54:22.735720Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:54:22.735728Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:22.735731Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:22.735735Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T08:54:22.735740Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:22.735743Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:54:22.735747Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T08:54:22.735750Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T08:54:22.735753Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T08:54:22.735756Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:54:22.735759Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:54:22.735776Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T08:54:22.735780Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:54:22.735798Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:54:22.735838Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T08:54:22.735844Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:54:22.735855Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T08:54:22.735860Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T08:54:22.735863Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T08:54:22.735866Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T08:54:22.735869Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:22.735898Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T08:54:22.735900Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T08:54:22.735902Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T08:54:22.735904Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:22.735911Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T08:54:22.735913Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T08:54:22.735915Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T08:54:22.735917Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:22.735920Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T08:54:22.736089Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T08:54:22.736093Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:54:22.746313Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:54:22.746334Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T08:54:22.746338Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T08:54:22.746348Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T08:54:22.746357Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:54:22.920091Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:22.920116Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:54:22.920136Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T08:54:22.920158Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T08:54:22.920162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:54:22.920185Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T08:54:22.920193Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T08:54:22.920198Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T08:54:22.920225Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T08:54:22.920977Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:54:22.920995Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:54:22.921128Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:22.921134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:54:22.921141Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:54:22.921148Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:54:22.921153Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T08:54:22.921160Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:54.347690Z node 17 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:54.389420Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:54.389450Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:54.399975Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:54.503776Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:54.712038Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:706:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:54.712064Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:716:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:54.712073Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:54.712861Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:55:54.899844Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:720:2597], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:55:54.945567Z node 17 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yys2q8r8g94y0b6a51ry1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=17&id=MzU5ZGNkODctYzkzMDA2MDktZmQ3OTMwY2YtYzdhMjc5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:55.332646Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:55:55.332676Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:55:55.332684Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d13/r3tmp/tmpV7YB5b/pdisk_1.dat 2024-11-21T08:55:55.407704Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:55.421398Z node 18 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:55.463064Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:55.463093Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:55.473571Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:55.576756Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:55.784503Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:703:2587], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:55.784528Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:714:2592], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:55.784538Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:55.785137Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:55:55.972250Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [18:717:2595], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:55:56.096045Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd6yyt4870xjxq1sfpv1sn70, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MTcxYmY4NTgtOTY3YzhlMDAtYTMzMWM5OGMtZWIzOThlMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.196132Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6yyten6dae8ttq19tfge6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YjM4MzFmZjYtYTM0ZjczMDYtMWY5ODJiMDEtY2MzNWYwNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.279348Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yythq8j5v5b63vh4rdend, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NGIxMmVlNTgtOTk1MDYxNTAtYzZhMGZkNTItMjExMzQwZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.364032Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6yytma21xmxasbez7490c3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OWM2NWYxMzctYTZlMTBlY2QtM2JjMDkwMjgtZmVkYmY2Y2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.453276Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6yytpye1syq342atdte981, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OGFmYzhkMzMtZDYzMzU1Y2EtNTc0NWFlN2EtOTFlY2I1MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.542498Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6yytsqa649vk42958jyvs4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OTJlODJiODAtOWFjNmFjMzYtYzdkYjI5NDUtMWU2NGI3ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.641898Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yytwednmx2hqkb0ea6j87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OTBlYTI5YzctNjAzOTU2ZTAtYzA1Njc2YWYtYzNmMzM5MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.744279Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6yytzm483n3awpdchrpxaf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YmQzM2RmN2MtODNkMjU2MjUtMTRhYzcxMDgtMjZmYmNiNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.833589Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6yyv2tbsqfpybhp5bmtzr2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ODA2ODJhMWItOTQ4MzVmMWMtZWI3ZDMzZTctOWY2M2UxMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:55:56.935685Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6yyv5n5p4p8mxkkj8v6xd6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ZDZhYzg4OTEtYjVjOTYxNjItYjc3NGJkYjgtNWNiNzMzYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2024-11-21T08:56:00.009376Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:56:00.009398Z node 18 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487152 RowCount: 10 IndexSize: 0 InMemSize: 10487152 LastAccessTime: 1518 LastUpdateTime: 1518 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2920 Memory: 17425112 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487152 RowCount: 10 IndexSize: 0 InMemSize: 10487152 LastAccessTime: 1518 LastUpdateTime: 1518 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1289 Memory: 124596 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1518 LastUpdateTime: 1518 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1289 Memory: 124596 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2024-11-21T08:56:06.060714Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd6yz44v8rddmq1n7q6ypsy1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YmUxYmNiNjUtNDE5OTcyZjAtZTBlZjVkZGUtMTZiYTVmZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2024-11-21T08:55:17.781430Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1732179317781421 2024-11-21T08:55:18.056344Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653524778369860:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:18.056363Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:18.056678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653524835980301:2073];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032f3/r3tmp/tmpAjLPmL/pdisk_1.dat 2024-11-21T08:55:18.084794Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:18.084522Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:18.085628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:18.140630Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6268, node 1 2024-11-21T08:55:18.154140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:18.154168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:18.159482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:18.170107Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0032f3/r3tmp/yandexZGJOiD.tmp 2024-11-21T08:55:18.170121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0032f3/r3tmp/yandexZGJOiD.tmp 2024-11-21T08:55:18.175169Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0032f3/r3tmp/yandexZGJOiD.tmp 2024-11-21T08:55:18.175238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:18.190396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:18.190422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:18.192480Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:18.196385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:18.210285Z INFO: TTestServer started on Port 15457 GrpcPort 6268 TClient is connected to server localhost:15457 PQClient connected to localhost:6268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:18.295851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:18.304531Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:18.316421Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:55:18.573348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653524778370139:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.573400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.573820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653524778370168:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:18.575454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:55:18.582590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653524778370170:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:55:18.672269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.672586Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653524835981224:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:18.672918Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQyYmNkNTYtOTliYTNiZTgtYzEyNTc2OWEtZDA2MDQ1ZDQ=, ActorId: [1:7439653524835981173:2297], ActorState: ExecuteState, TraceId: 01jd6yxnvc1ga4kg49b9dcp9by, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:18.672641Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653524778370205:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:18.673067Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mjk5MmExN2YtYmRiNzU0Y2MtYTRmNWU2OWQtZjVlMTg1NzE=, ActorId: [2:7439653524778370137:2277], ActorState: ExecuteState, TraceId: 01jd6yxnsac2re5c843kgsn6a0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:18.673511Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:18.673576Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:18.752057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:18.827031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:6268", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T08:55:18.939003Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd6yxp3w466j4rm1wtepfr96, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzY5OTYwNC1kMTEyYTUwOS0xYWY1ZWI5MC0zYTE1ZGExNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653524835981600:2906] 2024-11-21T08:55:23.056587Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653524835980301:2073];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:23.056594Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653524778369860:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:23.056627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:23.056630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:55:24.967608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 5 partitions CallPersQueueGRPC request to localhost:6268 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:24.979895Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:6268 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifeti ... 815Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439653593904889259:2468], now have 1 active actors on pipe 2024-11-21T08:55:34.265836Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T08:55:34.265919Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:34.265932Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:34.265997Z node 4 :PERSQUEUE INFO: new Cookie src|190245e4-f6e98ebb-d009881b-cc9f4093_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T08:55:34.266047Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:55:34.266100Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:34.266375Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:55:34.266386Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:55:34.266419Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:55:34.266530Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|190245e4-f6e98ebb-d009881b-cc9f4093_0 2024-11-21T08:55:34.267005Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179334266 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:55:34.267045Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|190245e4-f6e98ebb-d009881b-cc9f4093_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T08:55:34.267151Z :INFO: [] MessageGroupId [src] SessionId [src|190245e4-f6e98ebb-d009881b-cc9f4093_0] Write session: close. Timeout = 0 ms 2024-11-21T08:55:34.267160Z :INFO: [] MessageGroupId [src] SessionId [src|190245e4-f6e98ebb-d009881b-cc9f4093_0] Write session will now close 2024-11-21T08:55:34.267165Z :DEBUG: [] MessageGroupId [src] SessionId [src|190245e4-f6e98ebb-d009881b-cc9f4093_0] Write session: aborting 2024-11-21T08:55:34.267315Z :INFO: [] MessageGroupId [src] SessionId [src|190245e4-f6e98ebb-d009881b-cc9f4093_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:55:34.267320Z :DEBUG: [] MessageGroupId [src] SessionId [src|190245e4-f6e98ebb-d009881b-cc9f4093_0] Write session: destroy 2024-11-21T08:55:34.267559Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|190245e4-f6e98ebb-d009881b-cc9f4093_0 grpc read done: success: 0 data: 2024-11-21T08:55:34.267572Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|190245e4-f6e98ebb-d009881b-cc9f4093_0 grpc read failed 2024-11-21T08:55:34.267579Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|190245e4-f6e98ebb-d009881b-cc9f4093_0 grpc closed 2024-11-21T08:55:34.267585Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|190245e4-f6e98ebb-d009881b-cc9f4093_0 is DEAD 2024-11-21T08:55:34.267896Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:55:34.268013Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:55:34.268030Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439653593904889259:2468] destroyed 2024-11-21T08:55:34.268046Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2024-11-21T08:55:34.320855Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T08:55:36.321411Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T08:55:38.267705Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T08:55:38.322396Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === In the next federation discovery response dc2 will be available Test retry state: get retry delay 2024-11-21T08:55:40.323381Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T08:55:42.324361Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T08:55:42.647240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:55:42.647271Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:43.267852Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T08:55:44.325404Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T08:55:46.326392Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T08:55:48.268030Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T08:55:48.327440Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T08:55:50.328365Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T08:55:52.329401Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T08:55:53.268268Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T08:55:54.330392Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T08:55:56.331408Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T08:55:58.268392Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T08:55:58.332394Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T08:56:00.333418Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T08:56:02.334374Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T08:56:03.214824Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2024-11-21T08:56:03.214857Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2024-11-21T08:56:03.215281Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2024-11-21T08:56:03.215438Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2024-11-21T08:56:03.215749Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2024-11-21T08:56:03.215839Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2024-11-21T08:56:03.268610Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T08:56:04.335403Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === Waiting for repair>>> Ready to answer: ok === Closing the session 2024-11-21T08:56:06.336469Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:22655" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:22655" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:22655" location: "dc3" status: AVAILABLE weight: 500 } ] } 2024-11-21T08:56:06.338907Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: try to update token 2024-11-21T08:56:06.339158Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2024-11-21T08:56:06.340517Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: write to message_group: src_id 2024-11-21T08:56:06.340567Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: send init request: init_request { path: "test-topic" message_group_id: "src_id" } 2024-11-21T08:56:06.340715Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: OnWriteDone gRpcStatusCode: 0 2024-11-21T08:56:06.341201Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2024-11-21T08:56:06.341225Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session will now close 2024-11-21T08:56:06.341160Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T08:56:06.341181Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T08:56:06.341241Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: aborting 2024-11-21T08:56:06.341325Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T08:56:06.341367Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179366341 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:56:06.341371Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session is aborting and will not restart 2024-11-21T08:56:06.341361Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { path: "test-topic" message_group_id: "src_id" } 2024-11-21T08:56:06.341398Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 path: "test-topic" message_group_id: "src_id" from ipv6:[::1]:57656 2024-11-21T08:56:06.341410Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="topic server" ip=ipv6:[::1]:57656 proto=topic topic=test-topic durationSec=0 2024-11-21T08:56:06.341414Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:56:06.341536Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: grpc closed 2024-11-21T08:56:06.341545Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: is DEAD 2024-11-21T08:56:06.355122Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2024-11-21T08:56:06.355144Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: destroy |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TSchemeShardMoveTest::Replace >> TServiceAccountServiceTest::IssueToken [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> TSchemeShardMoveTest::OneTable [GOOD] |90.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2024-11-21T08:56:06.909519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653727862897680:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:06.909693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00318c/r3tmp/tmpGw94zH/pdisk_1.dat 2024-11-21T08:56:06.969859Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:06.980958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:06.983585Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:56:07.010680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:07.010710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:07.011740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:07.251630Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653733486260890:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:07.251917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00318c/r3tmp/tmpx2yAiD/pdisk_1.dat 2024-11-21T08:56:07.260598Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:07.352179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:07.352237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:07.353305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:07.354516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:56:06.752595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:06.752616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:06.752619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:06.752623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:06.752628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:06.752630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:06.752637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:06.752706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:06.760143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:06.760160Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:06.762470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:06.763001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:06.763029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:06.764100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:06.764272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:06.764347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:06.764397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:06.765126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:06.765336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:06.765342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:06.765369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:06.765374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:06.765378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:06.765387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:06.766309Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:56:06.779932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:06.780003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:06.780056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:06.780090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:06.780095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:06.780790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:06.780809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:06.780847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:06.780854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:06.780858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:06.780861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:06.781131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:06.781138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:06.781141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:06.781354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:06.781359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:06.781363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:06.781367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:06.781729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:06.781976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:06.782015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:06.782140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:06.782155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:06.782162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:06.782197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:06.782201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:06.782245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:06.782253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:06.782532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:06.782536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:06.782566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:06.782569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:56:06.782622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:06.782626Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:06.782634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:06.782637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:06.782641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:06.782644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:06.782646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:06.782649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:06.782656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:06.782660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:06.782663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:56:06.782837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:06.782846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:06.782849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:56:06.782852Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:56:06.782855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:06.782864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... HARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.531869Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T08:56:07.531874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:56:07.531878Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2024-11-21T08:56:07.531886Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T08:56:07.531906Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2024-11-21T08:56:07.531921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:07.531930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:07.532190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.532456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.532487Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:07.532492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:07.532531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:56:07.532550Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:07.532556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 108, path id: 1 2024-11-21T08:56:07.532559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 108, path id: 4 2024-11-21T08:56:07.532632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.532636Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:56:07.532649Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.532653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:56:07.532656Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2024-11-21T08:56:07.532755Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T08:56:07.532762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T08:56:07.532765Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T08:56:07.532768Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2024-11-21T08:56:07.532772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:07.532874Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T08:56:07.532882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T08:56:07.532884Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T08:56:07.532886Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:56:07.532888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:56:07.532894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2024-11-21T08:56:07.533456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.533476Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:07.533544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:07.533563Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2024-11-21T08:56:07.533566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T08:56:07.533569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2024-11-21T08:56:07.533580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:339:2314] message: TxId: 108 2024-11-21T08:56:07.533583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T08:56:07.533586Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2024-11-21T08:56:07.533588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2024-11-21T08:56:07.533602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:07.533658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T08:56:07.533938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T08:56:07.534311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T08:56:07.534325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:817:2778] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:56:07.534452Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:56:07.534461Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2024-11-21T08:56:07.556221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 8589936886 } TabletId: 72075186233409546 State: 4 2024-11-21T08:56:07.556251Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:56:07.556611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:07.556703Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:56:07.556746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:07.556802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2024-11-21T08:56:07.557230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:07.557236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:56:07.557248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:07.557812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:56:07.557823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:56:07.558004Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2024-11-21T08:56:07.558120Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:07.558167Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 53us result status StatusSuccess 2024-11-21T08:56:07.558245Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::Replace [GOOD] >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> KikimrIcGateway::TestCreateExternalTable >> KikimrIcGateway::TestCreateSameExternalTable >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:56:07.391251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:07.391273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:07.391278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:07.391282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:07.391288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:07.391292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:07.391300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:07.391384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:07.400297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:07.400316Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:07.403024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:07.403578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:07.403603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:07.404815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:07.404988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:07.405093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:07.405164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:07.405949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:07.406165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:07.406172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:07.406198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:07.406203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:07.406208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:07.406218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.407121Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:56:07.419836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:07.419913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.419963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:07.420002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:07.420010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.420827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:07.420858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:07.420919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.420929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:07.420934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:07.420939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:07.421413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.421426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:07.421430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:07.421795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.421804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.421809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:07.421816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:07.422428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:07.422888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:07.422955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:07.423169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:07.423197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:07.423206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:07.423274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:07.423280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:07.423314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:07.423329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:07.423753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:07.423761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:07.423809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:07.423814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:56:07.423892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:07.423898Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:07.423909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:07.423914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:07.423919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:07.423924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:07.423929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:07.423932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:07.423943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:07.423949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:07.423952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:56:07.424268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:07.424283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:07.424288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:56:07.424292Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:56:07.424297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:07.424310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:56:07.839129Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:56:07.839173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:56:07.839232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 1 2024-11-21T08:56:07.839400Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 2024-11-21T08:56:07.839637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:56:07.839679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 Forgetting tablet 72075186233409547 2024-11-21T08:56:07.840010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:07.840023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2024-11-21T08:56:07.840036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2024-11-21T08:56:07.840042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2024-11-21T08:56:07.840048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2024-11-21T08:56:07.840052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2024-11-21T08:56:07.840057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2024-11-21T08:56:07.840061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2024-11-21T08:56:07.840066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 Forgetting tablet 72075186233409548 2024-11-21T08:56:07.840162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:07.840195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2024-11-21T08:56:07.840649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:56:07.840663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2024-11-21T08:56:07.841076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:56:07.841088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:56:07.841106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 4 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:56:07.841120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:07.841125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2024-11-21T08:56:07.841138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:07.841172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:56:07.841178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2024-11-21T08:56:07.841493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T08:56:07.841587Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:56:07.841597Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:56:07.841605Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T08:56:07.841688Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:07.841725Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 43us result status StatusPathDoesNotExist 2024-11-21T08:56:07.841758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:56:07.841815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:07.841850Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 36us result status StatusSuccess 2024-11-21T08:56:07.841946Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:07.842046Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:07.842066Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 22us result status StatusSuccess 2024-11-21T08:56:07.842124Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 21 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 21 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 19 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:53.262615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:53.262639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:53.262645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:53.262650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:53.262656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:53.262661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:53.262670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:53.262749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:53.274372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:53.274392Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:53.276505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:53.276596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:53.276627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:53.278945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:53.278999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:53.279059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:53.279190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:53.279609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:53.279766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:53.279772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:53.279779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:53.279783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:53.279786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:53.279809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:53.280702Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:53.290909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:53.290964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:53.291006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:53.291047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:53.291052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:53.291563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:53.291581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:53.291619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:53.291625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:53.291628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:53.291631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:53.291862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:53.291867Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:53.291870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:53.292093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:53.292099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:53.292103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:53.292107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:53.292502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:53.292777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:53.292809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:53.292926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:53.292942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:53.292946Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:53.292985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:53.292989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:53.293006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:53.293014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:53.293263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:53.293268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:53.293293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:53.293296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:53.293347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:53.293351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:53.293358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:53.293360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:53.293363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:53.293366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:53.293369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:53.293371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:53.293377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:53.293381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:53.293383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:56:08.150676Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:56:08.150678Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:56:08.150684Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:56:08.150687Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:56:08.150689Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:56:08.150692Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:08.150694Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:56:08.150695Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:56:08.150697Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:56:08.150700Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:56:08.150701Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:56:08.150712Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:56:08.150716Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 1 2024-11-21T08:56:08.150718Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:56:08.150720Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T08:56:08.150722Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T08:56:08.150724Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T08:56:08.150727Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2024-11-21T08:56:08.151021Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.151032Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.151034Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:08.151038Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:56:08.151041Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:08.151105Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.151110Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.151112Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:08.151114Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:56:08.151116Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:08.151227Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.151233Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.151235Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:08.151237Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T08:56:08.151239Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:08.151317Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.151324Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.151327Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:08.151329Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T08:56:08.151331Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:56:08.191635Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.191660Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.191664Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:08.191669Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T08:56:08.191673Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:56:08.191681Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:56:08.191685Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [61:365:2346] 2024-11-21T08:56:08.192353Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.192383Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.192419Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.192619Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.192630Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:08.192641Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:56:08.192644Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [61:366:2347] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:56:08.192736Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:08.192786Z node 61 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 57us result status StatusSuccess 2024-11-21T08:56:08.192855Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } Kesus { Name: "z" PathId: 6 KesusTabletId: 72075186233409546 Config { } Version: 2 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:08.192908Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:08.192922Z node 61 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 15us result status StatusPathDoesNotExist 2024-11-21T08:56:08.192934Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool >> KikimrIcGateway::TestLoadExternalTable >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning |90.0%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::InitialScanAndResolvedTimestamps [GOOD] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] >> KikimrIcGateway::TestDropExternalDataSource [GOOD] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2024-11-21T08:55:27.926608Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653563597004897:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:27.926641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003579/r3tmp/tmpHccqRc/pdisk_1.dat 2024-11-21T08:55:27.969757Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9159, node 1 2024-11-21T08:55:27.986295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:27.986307Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:27.986309Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:27.986347Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:27.996911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:27.999309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:28.026885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:28.026920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:28.028092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:28.035310Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7439653567891972760:2282] 2024-11-21T08:55:28.035402Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:28.036603Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:28.036629Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:55:28.036771Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:55:28.036784Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:55:28.036789Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:55:28.036828Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:55:28.041493Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:55:28.041573Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:55:28.041597Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7439653567891972774:2283] 2024-11-21T08:55:28.041607Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:55:28.041611Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:55:28.041615Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:28.041656Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:55:28.041682Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:55:28.041691Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:28.041698Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:28.041712Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:55:28.041720Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:28.052454Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7439653567891972749:2284], serverId# [1:7439653567891972777:2299], sessionId# [0:0:0] 2024-11-21T08:55:28.052522Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:55:28.052633Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:55:28.052683Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2024-11-21T08:55:28.053011Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:55:28.053931Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:55:28.053968Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:55:28.054720Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7439653567891972790:2306], serverId# [1:7439653567891972792:2308], sessionId# [0:0:0] 2024-11-21T08:55:28.055172Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:55:28.056296Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1732179328103 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7439653563597005342 RawX2: 4294969517 } } Step: 1732179328103 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:55:28.056319Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:28.056356Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:28.056368Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:55:28.056380Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1732179328103:281474976710657] in PlanQueue unit at 72075186224037888 2024-11-21T08:55:28.056458Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1732179328103:281474976710657 keys extracted: 0 2024-11-21T08:55:28.056499Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:55:28.056520Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:28.056539Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:55:28.056970Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:55:28.057093Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:28.057263Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1732179328102 2024-11-21T08:55:28.057271Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:28.057282Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1732179328103 txid# 281474976710657} 2024-11-21T08:55:28.057291Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1732179328103} 2024-11-21T08:55:28.057300Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:28.057315Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:28.057323Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:55:28.057328Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:55:28.057346Z node 1 :TX_DATASHARD DEBUG: Complete [1732179328103 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7439653563597005324:2206], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:55:28.057358Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2024-11-21T08:55:28.057371Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:28.057389Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1732179328103 2024-11-21T08:55:28.058165Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7439653567891972774:2283][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2024-11-21T08:55:28.058379Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2024-11-21T08:55:28.058391Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:55:28.060295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:55:28.060331Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:55:28.060346Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2024-11-21T08:55:28.060353Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2024-11-21T08:55:28.061025Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:55:28.064527Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:55:28.064605Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2024-11-21T08:55:28.064776Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:55:28.064815Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2024-11-21T08:55:28.064824Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:55:28.064826Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2024-11-21T08:55:28.064829Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:55:28.064834Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:28.064843Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] empty tx queue 2024-11-21T08:55:28.064850Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2024-11-21T08:55:28.073943Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:28.073976Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7439653567891972872:2285], now have 1 active actors on pipe 2024-11-21T08:55:28.112305Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:28.112325Z node 1 :PERSQUEUE DEBUG: [PQ: 7207518622403788 ... -11-21T08:56:08.552734Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 3, at tablet: 72075186224037888 2024-11-21T08:56:08.563243Z node 27 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:56:08.563303Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2024-11-21T08:56:08.749075Z node 27 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715662 at step 7500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 516 RawX2: 115964119446 } } Step: 7500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:56:08.749106Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:56:08.749149Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:56:08.749159Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:56:08.749170Z node 27 :TX_DATASHARD DEBUG: Found ready operation [7500:281474976715662] in PlanQueue unit at 72075186224037888 2024-11-21T08:56:08.749234Z node 27 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 7500:281474976715662 keys extracted: 0 2024-11-21T08:56:08.749269Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:56:08.749291Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:56:08.749425Z node 27 :TX_DATASHARD DEBUG: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 7500, txId# 281474976715662, at tablet# 72075186224037888 2024-11-21T08:56:08.749539Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:56:08.760771Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 7500 txid# 281474976715662} 2024-11-21T08:56:08.760795Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 7500} 2024-11-21T08:56:08.760808Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:56:08.760814Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:56:08.760823Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:56:08.760867Z node 27 :TX_DATASHARD DEBUG: Complete [7500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [27:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:56:08.760886Z node 27 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2024-11-21T08:56:08.760904Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:56:08.760985Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2024-11-21T08:56:08.761043Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:56:08.761595Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2024-11-21T08:56:08.762050Z node 27 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2024-11-21T08:56:08.762071Z node 27 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:56:08.772500Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2024-11-21T08:56:08.772532Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2024-11-21T08:56:08.772541Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:56:08.772556Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 9000 from mediator time cast 2024-11-21T08:56:08.772563Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 0 change record(s): at tablet# 72075186224037888 2024-11-21T08:56:08.772566Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:56:08.772604Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:647:2545] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2024-11-21T08:56:08.772621Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:947:2741] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2024-11-21T08:56:08.772730Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2024-11-21T08:56:08.772779Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:947:2741], at tablet# 72075186224037888 2024-11-21T08:56:08.772783Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2024-11-21T08:56:08.772803Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:947:2741] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:56:08.772823Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1029:2741] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:56:08.772909Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:08.772919Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:08.772954Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 2 requestId: cookie: 2 2024-11-21T08:56:08.772977Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:08.772979Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:08.772989Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 4 partNo : 0 messageNo: 3 size 26 offset: -1 2024-11-21T08:56:08.773025Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v6000/0 2024-11-21T08:56:08.773038Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2024-11-21T08:56:08.773057Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2024-11-21T08:56:08.773097Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 3 PartNo 0 PackedSize 107 count 1 nextOffset 4 batches 1 2024-11-21T08:56:08.773131Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 3,1 HeadOffset 0 endOffset 3 curOffset 4 d0000000000_00000000000000000003_00000_0000000001_00000| size 93 WTime 7451 2024-11-21T08:56:08.773155Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:56:08.783564Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 2024-11-21T08:56:08.783617Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:08.783634Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2024-11-21T08:56:08.783689Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 3 requestId: cookie: 2 2024-11-21T08:56:08.783773Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1029:2741] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 4 Offset: 3 WriteTimestampMS: 7451 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 2 } } } 2024-11-21T08:56:08.783791Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:947:2741] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2024-11-21T08:56:08.783842Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2024-11-21T08:56:08.783847Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2024-11-21T08:56:08.794227Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2024-11-21T08:56:08.978336Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:08.978361Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:08.978406Z node 27 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:56:08.978419Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T08:56:08.978425Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:56:08.978469Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T08:56:08.978476Z node 27 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:08.978598Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 26048, MsgBus: 17222 2024-11-21T08:56:08.248026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653739130844977:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:08.248222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004502/r3tmp/tmpnSOAeq/pdisk_1.dat 2024-11-21T08:56:08.300772Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26048, node 1 2024-11-21T08:56:08.309721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:08.309741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:08.309743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:08.309779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17222 TClient is connected to server localhost:17222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:08.349937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:08.349977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:08.351075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:08.378439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:08.380257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:08.388460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:56:08.390767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5857, MsgBus: 21438 2024-11-21T08:56:08.645657Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653738007154295:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:08.645718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004502/r3tmp/tmpyZ7Vrt/pdisk_1.dat 2024-11-21T08:56:08.656185Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5857, node 2 2024-11-21T08:56:08.664661Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:08.664675Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:08.664676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:08.664709Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21438 TClient is connected to server localhost:21438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:08.745667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:08.745707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:08.746755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:08.747944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:08.771831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 65000, MsgBus: 16852 2024-11-21T08:56:08.976940Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653737091534913:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:08.977102Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004502/r3tmp/tmpzfq4El/pdisk_1.dat 2024-11-21T08:56:08.985115Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65000, node 3 2024-11-21T08:56:08.996476Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:08.996490Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:08.996492Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:08.996539Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16852 TClient is connected to server localhost:16852 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:09.078476Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:09.078505Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:09.079302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.079527Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:56:09.084995Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:09.089934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 15700, MsgBus: 30157 2024-11-21T08:56:08.266734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653739322985468:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:08.266926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e5/r3tmp/tmpXRqToT/pdisk_1.dat 2024-11-21T08:56:08.309559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15700, node 1 2024-11-21T08:56:08.320490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:08.320501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:08.320503Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:08.320540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30157 TClient is connected to server localhost:30157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:08.362045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:08.367931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:08.367955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:08.369078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:08.374893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T08:56:08.376683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:56:08.381747Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131 Trying to start YDB, gRPC: 4939, MsgBus: 1151 2024-11-21T08:56:08.612166Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653737844237708:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:08.612517Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e5/r3tmp/tmpjV77lU/pdisk_1.dat 2024-11-21T08:56:08.620655Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4939, node 2 2024-11-21T08:56:08.629810Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:08.629822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:08.629824Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:08.629863Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1151 TClient is connected to server localhost:1151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:08.713652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:08.713684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:08.714370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:56:08.714647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:56:08.715239Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:08.725415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T08:56:08.727602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9322, MsgBus: 13263 2024-11-21T08:56:08.933153Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653737268240990:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:08.933170Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e5/r3tmp/tmpzpOQVV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9322, node 3 2024-11-21T08:56:08.947222Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:08.950428Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:08.950441Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:08.950444Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:08.950484Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13263 TClient is connected to server localhost:13263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:09.033628Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:09.033668Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:09.034727Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:09.035455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:09.045790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest >> KqpScheme::DropKeyColumn >> KqpScheme::UseUnauthorizedTable |90.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> KqpScheme::CreateTableWithReadReplicasUncompat >> TSequence::CreateSequence >> TSequence::CreateSequenceParallel >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> KqpScheme::CreateFamilyWithCompressionLevel |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> TSequence::CreateSequence [GOOD] >> KqpConstraints::SerialTypeSmallSerial >> KqpScheme::DoubleCreateExternalDataSource >> TSequence::CreateSequenceParallel [GOOD] >> KqpScheme::DropKeyColumn [GOOD] >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInSolomon [GOOD] >> KqpScheme::UseUnauthorizedTable [GOOD] |90.0%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> TSequence::CreateDropRecreate >> KqpScheme::DropIndexDataColumn >> KqpScheme::UseNonexistentTable >> TSequence::CreateSequenceSequential >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> KqpScheme::CreateTableWithReadReplicasUncompat [GOOD] >> KqpScheme::CreateFamilyWithCompressionLevel [GOOD] >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> KqpScheme::CreateExternalTableWithUpperCaseSettings >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata >> KqpScheme::DropIndexDataColumn [GOOD] >> KqpScheme::CreateTableWithReadReplicasCompat >> TSequence::CreateDropRecreate [GOOD] >> KqpScheme::DoubleCreateExternalDataSource [GOOD] >> KqpConstraints::SerialTypeSmallSerial [GOOD] >> KqpScheme::UseNonexistentTable [GOOD] >> TSequence::CreateSequenceSequential [GOOD] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> KqpScheme::CreateTableWithReadReplicasCompat [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> KqpScheme::DoubleCreateExternalTable >> KqpConstraints::SerialTypeSerial4 >> KqpScheme::CreateExternalTableWithUpperCaseSettings [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> KqpScheme::DropExternalDataSource >> KqpScheme::UseDroppedTable >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> KqpScheme::CreateResourcePool >> KqpScheme::DoubleCreateExternalTable [GOOD] >> KqpConstraints::SerialTypeSerial4 [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> KqpScheme::DropExternalDataSource [GOOD] >> KqpScheme::UseDroppedTable [GOOD] >> KqpScheme::UnknownFamilyTest >> KqpScheme::DisableResourcePoolsOnServerless >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> KqpScheme::CreateResourcePool [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat [GOOD] >> KqpConstraints::SerialTypeSerial8 >> TSequence::CreateSequenceInsideTableThenDropTable >> KqpScheme::CreateResourcePoolClassifier >> KqpScheme::DropExternalTable >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysUuid >> TSequence::CopyTableWithSequence >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TSequence::CreateSequencesWithIndexedTable >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::AlterSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> TSequence::AlterTableSetDefaultFromSequence >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> KqpScheme::DropExternalTable [GOOD] |90.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInSolomon [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:49.873041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:49.873064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:49.873067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:49.873071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:49.873075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:49.873078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:49.873083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:49.873147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:49.882002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:49.882019Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:49.883963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:49.884034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:49.884065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:49.886051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:49.886108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:49.886168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:49.886312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:49.886774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:49.886969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:49.886976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:49.886986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:49.886991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:49.886995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:49.887029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:49.887979Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:49.900488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:49.900559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:49.900610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:49.900661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:49.900666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:49.901362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:49.901389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:49.901443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:49.901453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:49.901468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:49.901472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:49.901840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:49.901849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:49.901854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:49.902131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:49.902138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:49.902144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:49.902151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:49.902582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:49.902867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:49.902905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:49.903045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:49.903062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:49.903067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:49.903109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:49.903113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:49.903135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:49.903144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:49.903429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:49.903435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:49.903462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:49.903466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:49.903526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:49.903531Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:49.903540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:49.903543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:49.903546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:49.903550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:49.903553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:49.903555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:49.903563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:49.903567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:49.903570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... hId: 6] was 3 2024-11-21T08:56:10.972426Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:56:10.972428Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:56:10.972432Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T08:56:10.972434Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:56:10.972436Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:56:10.972438Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2024-11-21T08:56:10.972440Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:56:10.972442Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:56:10.972453Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 5 2024-11-21T08:56:10.972457Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T08:56:10.972459Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:56:10.972461Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 5 2024-11-21T08:56:10.972464Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 7], 5 2024-11-21T08:56:10.972467Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 8], 5 2024-11-21T08:56:10.972470Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 9], 2 2024-11-21T08:56:10.973083Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.973097Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.973101Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:10.973104Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:56:10.973108Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:10.973344Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.973354Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.973357Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:10.973360Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 5 2024-11-21T08:56:10.973362Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:56:10.973437Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.973443Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.973446Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:10.973448Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 5 2024-11-21T08:56:10.973450Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T08:56:10.973943Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.973966Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.973971Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:10.973976Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 5 2024-11-21T08:56:10.973981Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2024-11-21T08:56:10.974136Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.974147Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.974151Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:10.974155Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 2 2024-11-21T08:56:10.974161Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2024-11-21T08:56:10.974171Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T08:56:10.976359Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.976436Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.976633Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.976667Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:10.976809Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:56:10.977588Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:56:10.977597Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:56:10.977646Z node 86 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:56:10.977662Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:56:10.977665Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [86:426:2399] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:56:10.977720Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:10.977785Z node 86 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 75us result status StatusSuccess 2024-11-21T08:56:10.977872Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeSolomonVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SolomonVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SolomonDescription { Name: "z" PathId: 9 PartitionCount: 2 Partitions { PartitionId: 0 TabletId: 72075186233409546 ShardIdx: 1 } Partitions { PartitionId: 1 TabletId: 72075186233409547 ShardIdx: 2 } } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:10.977917Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:10.977938Z node 86 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 22us result status StatusPathDoesNotExist 2024-11-21T08:56:10.977957Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:56:10.761044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:10.761073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:10.761078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:10.761083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:10.761099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:10.761103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:10.761112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:10.761185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:10.771982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:10.771999Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:10.774638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:10.775424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:10.775460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:10.776794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:10.777003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:10.777118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:10.777194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:10.778214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:10.778469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:10.778479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:10.778518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:10.778525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:10.778531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:10.778544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.779689Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:56:10.796514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:10.796597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.796654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:10.796701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:10.796710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.797401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:10.797431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:10.797482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.797493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:10.797497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:10.797502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:10.797964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.797977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:10.797982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:10.798391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.798402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.798408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:10.798414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:10.798920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:10.799354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:10.799406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:10.799603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:10.799629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:10.799639Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:10.799696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:10.799702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:10.799730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:10.799743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:10.800190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:10.800197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:10.800261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:10.800267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:56:10.800356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.800363Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:10.800375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:10.800379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:10.800384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:10.800390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:10.800394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:10.800398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:10.800408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:10.800415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:10.800418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:56:10.800698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:10.800715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:10.800719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:56:10.800724Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:56:10.800729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:10.800744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:56:12.611327Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T08:56:12.611330Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T08:56:12.611334Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:56:12.611338Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:56:12.611349Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2024-11-21T08:56:12.611352Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:12.611724Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.611732Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2024-11-21T08:56:12.611746Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:335:2315] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T08:56:12.611809Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:56:12.611814Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T08:56:12.611819Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T08:56:12.611824Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:12.611881Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:12.611902Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:12.611906Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2024-11-21T08:56:12.611910Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2024-11-21T08:56:12.611914Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2024-11-21T08:56:12.611990Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.611997Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2024-11-21T08:56:12.612009Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:338:2317] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T08:56:12.612055Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:56:12.612059Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.612082Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:56:12.612085Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.612124Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:56:12.612129Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T08:56:12.612134Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:56:12.612138Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:12.612173Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T08:56:12.612187Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:12.612191Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2024-11-21T08:56:12.612195Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2024-11-21T08:56:12.612200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2024-11-21T08:56:12.612228Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:419:2374] message: TxId: 102 2024-11-21T08:56:12.612234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2024-11-21T08:56:12.612240Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:56:12.612244Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:56:12.612260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:56:12.612265Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2024-11-21T08:56:12.612268Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2024-11-21T08:56:12.612273Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:12.612276Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2024-11-21T08:56:12.612279Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2024-11-21T08:56:12.612285Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:12.612289Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2024-11-21T08:56:12.612292Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2024-11-21T08:56:12.612298Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:56:12.612510Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2024-11-21T08:56:12.612520Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2024-11-21T08:56:12.612527Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:12.612532Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:56:12.612542Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:56:12.612638Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:56:12.612644Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.612649Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:56:12.612652Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.612658Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:56:12.612661Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.612945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T08:56:12.612952Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.613303Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.613323Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:12.613335Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:419:2374] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T08:56:12.613364Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:56:12.613374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:56:12.613379Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:513:2467] 2024-11-21T08:56:12.613420Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:515:2469], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:12.613426Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:12.613430Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2024-11-21T08:56:12.613521Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:592:2546], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T08:56:12.613527Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:56:12.613538Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:12.613567Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 25us result status StatusPathDoesNotExist 2024-11-21T08:56:12.613601Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] Test command err: 2024-11-21T08:55:29.252031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653570275976043:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:29.252248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034a2/r3tmp/tmpVbP0Sc/pdisk_1.dat 2024-11-21T08:55:29.294573Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27163, node 1 2024-11-21T08:55:29.310854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:29.310870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:29.310872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:29.310918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:29.323861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:29.326805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:29.352370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:29.352403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:29.353487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:29.360368Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7439653570275976609:2282] 2024-11-21T08:55:29.360455Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:29.361786Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:29.361812Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:55:29.361962Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:55:29.361976Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:55:29.361984Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:55:29.362022Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:55:29.366170Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:55:29.366239Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:55:29.366265Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7439653570275976623:2283] 2024-11-21T08:55:29.366273Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:55:29.366276Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:55:29.366279Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:29.366321Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:55:29.366348Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:55:29.366356Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:29.366360Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:29.366368Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:55:29.366376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:29.378466Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7439653570275976598:2284], serverId# [1:7439653570275976626:2299], sessionId# [0:0:0] 2024-11-21T08:55:29.378537Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:55:29.378643Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:55:29.378695Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2024-11-21T08:55:29.378894Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:55:29.379966Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:55:29.380029Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:55:29.380625Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7439653570275976639:2306], serverId# [1:7439653570275976640:2307], sessionId# [0:0:0] 2024-11-21T08:55:29.381639Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1732179329426 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7439653570275976504 RawX2: 4294969526 } } Step: 1732179329426 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:55:29.381674Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:29.381709Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:55:29.381729Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:29.381744Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:55:29.381752Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1732179329426:281474976710657] in PlanQueue unit at 72075186224037888 2024-11-21T08:55:29.381833Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1732179329426:281474976710657 keys extracted: 0 2024-11-21T08:55:29.381885Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:55:29.381928Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:29.381959Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:55:29.382453Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:55:29.382602Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:29.382850Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1732179329425 2024-11-21T08:55:29.382867Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:29.382874Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1732179329426 2024-11-21T08:55:29.382903Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1732179329426 txid# 281474976710657} 2024-11-21T08:55:29.382923Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1732179329426} 2024-11-21T08:55:29.382945Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:29.382963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:29.382967Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:55:29.382978Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:55:29.382995Z node 1 :TX_DATASHARD DEBUG: Complete [1732179329426 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7439653570275976460:2203], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:55:29.383010Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2024-11-21T08:55:29.383022Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:29.383756Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7439653570275976623:2283][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2024-11-21T08:55:29.383971Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2024-11-21T08:55:29.383986Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:55:29.385743Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:55:29.385787Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:55:29.385803Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2024-11-21T08:55:29.385811Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2024-11-21T08:55:29.386607Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:55:29.389700Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:55:29.389796Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2024-11-21T08:55:29.389975Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:55:29.390037Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2024-11-21T08:55:29.390051Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:55:29.390053Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2024-11-21T08:55:29.390056Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:55:29.390061Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:29.390075Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] empty tx queue 2024-11-21T08:55:29.390082Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2024-11-21T08:55:29.398988Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:29.399017Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7439653570275976721:2285], now have 1 active actors on pipe 2024-11-21T08:55:29.437327Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:29.437347Z node 1 :PERSQUEUE DEBUG: [PQ: 720751862240378 ... : 2024-11-21T08:56:09.661702Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:09.661714Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:56:09.739422Z node 23 :TX_DATASHARD INFO: TTxActivateChangeSenderAck Complete: origin# 72075186224037893, at tablet# 72075186224037891 2024-11-21T08:56:09.739825Z node 23 :TX_DATASHARD INFO: TTxActivateChangeSenderAck Complete: origin# 72075186224037893, at tablet# 72075186224037892 2024-11-21T08:56:09.740126Z node 23 :TX_DATASHARD DEBUG: 72075186224037891 ack split partitioning changed to schemeshard 281474976715660 2024-11-21T08:56:09.740144Z node 23 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T08:56:09.740277Z node 23 :TX_DATASHARD DEBUG: 72075186224037892 ack split partitioning changed to schemeshard 281474976715660 2024-11-21T08:56:09.740285Z node 23 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T08:56:09.740446Z node 23 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2024-11-21T08:56:09.740684Z node 23 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2024-11-21T08:56:09.740985Z node 23 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T08:56:09.741044Z node 23 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T08:56:09.741236Z node 23 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2024-11-21T08:56:09.741305Z node 23 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2024-11-21T08:56:09.752031Z node 23 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2024-11-21T08:56:09.752267Z node 23 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2024-11-21T08:56:09.752637Z node 23 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2024-11-21T08:56:09.752918Z node 23 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2024-11-21T08:56:09.753537Z node 23 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 23, TabletId: 72075186224037891 not found 2024-11-21T08:56:09.753622Z node 23 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 23, TabletId: 72075186224037892 not found 2024-11-21T08:56:09.774607Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:09.774643Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [23:1230:2607], now have 1 active actors on pipe ... release register requests ... wait for merge tx notification 2024-11-21T08:56:09.795198Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:09.795226Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:09.795342Z node 23 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:56:09.795621Z node 23 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ... wait for final heartbeat >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2024-11-21T08:56:09.795925Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:09.795932Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:09.796017Z node 23 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:56:09.796039Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 2 max time lag 0ms effective offset 0 2024-11-21T08:56:09.796051Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:56:09.796060Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T08:56:09.796066Z node 23 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:09.796149Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:56:09.806427Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:56:09.806524Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:56:09.806591Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:09.806601Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:09.806631Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:56:09.806731Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][23:1281:2999] Handle NKikimr::NPQ::TEvPartitionWriter::TEvInitResult { SessionId: TxId: Success { OwnerCookie: 72075186224037893|728a097-ac226671-8638d97b-4ee5e1c1_0 SourceIdInfo: SourceId: "\00072075186224037893" SeqNo: 0 Offset: 2 WriteTimestampMS: 0 Explicit: true State: STATE_REGISTERED } } 2024-11-21T08:56:09.806760Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][23:1278:2999] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2024-11-21T08:56:09.806792Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][23:1281:2999] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:56:09.806857Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:09.806863Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:09.806887Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T08:56:09.806910Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:09.806913Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:09.806927Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037893' SeqNo: 1 partNo : 0 messageNo: 1 size 26 offset: -1 2024-11-21T08:56:09.806960Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037893' version v6000/0 2024-11-21T08:56:09.806974Z node 23 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2024-11-21T08:56:09.806994Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2024-11-21T08:56:09.829728Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2024-11-21T08:56:09.829819Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 6504 2024-11-21T08:56:09.829931Z node 23 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:56:09.840650Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 2024-11-21T08:56:09.840709Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:09.840746Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037893', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 2 is stored on disk 2024-11-21T08:56:09.840830Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:56:09.840948Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][23:1281:2999] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037893" SeqNo: 1 Offset: 2 WriteTimestampMS: 6504 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2024-11-21T08:56:09.840973Z node 23 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][23:1278:2999] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2024-11-21T08:56:09.841029Z node 23 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037893 2024-11-21T08:56:09.841038Z node 23 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037893 2024-11-21T08:56:09.853127Z node 23 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037893 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2024-11-21T08:56:10.334524Z node 23 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:10.334558Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:10.334615Z node 23 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:56:10.334641Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2024-11-21T08:56:10.334650Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:56:10.334667Z node 23 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2024-11-21T08:56:10.334676Z node 23 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:10.334732Z node 23 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> KqpConstraints::SerialTypeSerial8 [GOOD] >> KqpConstraints::Utf8AndDefault >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> KqpScheme::UnknownFamilyTest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropExternalTable [GOOD] Test command err: Trying to start YDB, gRPC: 21302, MsgBus: 4484 2024-11-21T08:56:10.335487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653747048500490:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:10.335741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004130/r3tmp/tmpIYaUyq/pdisk_1.dat 2024-11-21T08:56:10.402371Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21302, node 1 2024-11-21T08:56:10.412835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:10.412851Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:10.412852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:10.412884Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4484 2024-11-21T08:56:10.435692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:10.435727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:10.436805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:10.462604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.479886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.539892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.554299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.611128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.647710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653747048502024:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.647760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.670034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.676360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.685066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.692238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.699382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.706224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.715235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653747048502517:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.715270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.715287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653747048502522:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.716232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:10.719795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653747048502524:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:47: Error: At function: KiAlterTable!
:3:47: Error: AlterTable : db.[/Root/KeyValue] Column: "Key" is a key column. Key column drop is not supported Trying to start YDB, gRPC: 18021, MsgBus: 9687 2024-11-21T08:56:10.990857Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653747250419834:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:10.991047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004130/r3tmp/tmpR5uQYc/pdisk_1.dat 2024-11-21T08:56:10.997472Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18021, node 2 2024-11-21T08:56:11.006134Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:11.006148Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:11.006151Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:11.006205Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9687 TClient is connected to server localhost:9687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:11.090974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:11.091000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:11.092093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:11.093295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.101968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.109292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.126997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.135459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.254884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653751545388664:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.254911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.258579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Ope ... schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.983582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.002649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.013055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.165368Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653757601193224:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.165395Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.171137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.177997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.191339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.204635Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.212141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.225774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.233696Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653757601193736:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.233706Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653757601193741:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.233727Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.234342Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:12.237916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653757601193743:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:12.412770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26263, MsgBus: 4316 2024-11-21T08:56:12.544069Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653754560414035:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:12.544226Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004130/r3tmp/tmpuL0JcH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26263, node 4 2024-11-21T08:56:12.557590Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:12.559879Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:12.559888Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:12.559889Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:12.559913Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4316 TClient is connected to server localhost:4316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:12.644334Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:12.644358Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:12.645372Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:12.646111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.653770Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.661188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.675109Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.687657Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.776674Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653754560415568:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.776696Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.781299Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.786544Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.798997Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.806199Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.860995Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.868968Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.878154Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653754560416084:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.878169Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653754560416089:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.878177Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.878708Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:12.882106Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653754560416091:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:13.079276Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.085490Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 19173, MsgBus: 28141 2024-11-21T08:56:09.083715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653741703210833:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:09.083878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044cd/r3tmp/tmp0weE4B/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19173, node 1 2024-11-21T08:56:09.138607Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:09.138937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:09.138946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:09.138947Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:09.138973Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28141 TClient is connected to server localhost:28141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:09.184604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:09.184638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:09.185726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:09.209655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:09.232465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:09.245689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:09.260068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:09.272191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:09.328292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653741703212388:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:09.328319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:09.350322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.355923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.410091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.417871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.425105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.431527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.440562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653741703212895:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:09.440586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653741703212900:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:09.440587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:09.441201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:09.445251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653741703212902:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:09.608968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.610442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27185, MsgBus: 10558 2024-11-21T08:56:09.741300Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653741961334127:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044cd/r3tmp/tmpsqOCxB/pdisk_1.dat 2024-11-21T08:56:09.745978Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:56:09.750536Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27185, node 2 2024-11-21T08:56:09.760560Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:09.760572Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:09.760573Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:09.760605Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10558 TClient is connected to server localhost:10558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:09.843018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:09.843061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:09.843519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:09.844051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:09.855219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:56:09.867741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.890986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:09.957830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.044697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653746256302810:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.044724Z node 2 :KQP_WORKLOAD_SERVICE WAR ... afe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:10.095830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653746256303316:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:10.747450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:56:10.800086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.845635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:56:10.895282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.944046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.988065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.035036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:56:11.042324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.233309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.237471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14452, MsgBus: 26483 2024-11-21T08:56:11.456531Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653752932629211:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:11.456716Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044cd/r3tmp/tmp1BCFj7/pdisk_1.dat 2024-11-21T08:56:11.465854Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14452, node 3 2024-11-21T08:56:11.474970Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:11.474983Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:11.474985Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:11.475018Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26483 TClient is connected to server localhost:26483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:11.557177Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:11.557212Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:11.558255Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:11.558940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.563578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.575438Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.591139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.602810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.759286Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653752932630751:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.759312Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.763198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.769850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.776941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.831412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.840385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.846988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.855143Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653752932631256:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.855161Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653752932631261:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.855162Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.855673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:11.860399Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653752932631263:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:12.458483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:56:12.521278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.564646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:56:12.615171Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.666495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.710412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.754473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:56:12.764152Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.078623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715714:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:56:10.756135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:10.756157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:10.756160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:10.756163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:10.756176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:10.756179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:10.756185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:10.756262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:10.763546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:10.763563Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:10.765930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:10.766422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:10.766448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:10.767652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:10.767852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:10.767928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:10.767983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:10.769023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:10.769289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:10.769298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:10.769340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:10.769347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:10.769353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:10.769368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.770593Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:56:10.782962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:10.783040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.783097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:10.783135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:10.783140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.783774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:10.783803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:10.783847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.783856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:10.783860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:10.783864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:10.784247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.784257Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:10.784262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:10.784582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.784588Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.784592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:10.784598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:10.784967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:10.785289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:10.785331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:10.785529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:10.785551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:10.785559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:10.785609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:10.785614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:10.785649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:10.785662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:10.785990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:10.785995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:10.786029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:10.786036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:56:10.786108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:10.786113Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:10.786121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:10.786124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:10.786128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:10.786131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:10.786134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:10.786137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:10.786143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:10.786147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:10.786149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:56:10.786324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:10.786333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:10.786336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:56:10.786339Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:56:10.786342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:10.786350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xId: 114 2024-11-21T08:56:13.370872Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 114, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T08:56:13.370876Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:56:13.370892Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2024-11-21T08:56:13.370895Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:13.371167Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:978:2927], Recipient [7:123:2149]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 287 } } 2024-11-21T08:56:13.371177Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T08:56:13.371186Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 287 } } 2024-11-21T08:56:13.371191Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2024-11-21T08:56:13.371207Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 287 } } 2024-11-21T08:56:13.371219Z node 7 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 287 } } 2024-11-21T08:56:13.371222Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:13.371414Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1038:2980], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:13.371424Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:13.371429Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T08:56:13.376741Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:978:2927], Recipient [7:123:2149]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T08:56:13.376775Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2024-11-21T08:56:13.376794Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T08:56:13.376804Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2024-11-21T08:56:13.376844Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T08:56:13.376853Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:56:13.376869Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T08:56:13.376888Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:13.376895Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T08:56:13.376902Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T08:56:13.376913Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2024-11-21T08:56:13.376970Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:13.377497Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:13.377562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2024-11-21T08:56:13.377566Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:13.377583Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2024-11-21T08:56:13.377585Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:13.378219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T08:56:13.378230Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:13.378260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T08:56:13.378263Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:13.378268Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2024-11-21T08:56:13.378290Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:978:2927] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2024-11-21T08:56:13.378391Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:56:13.378399Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T08:56:13.378406Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T08:56:13.378415Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2024-11-21T08:56:13.378433Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:13.378439Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2024-11-21T08:56:13.378446Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2024-11-21T08:56:13.378456Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2024-11-21T08:56:13.378466Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:392:2357] message: TxId: 114 2024-11-21T08:56:13.378493Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2024-11-21T08:56:13.378499Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2024-11-21T08:56:13.378504Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2024-11-21T08:56:13.378536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:56:13.379035Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:13.379055Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:392:2357] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2024-11-21T08:56:13.379091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2024-11-21T08:56:13.379096Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1007:2949] 2024-11-21T08:56:13.379140Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1009:2951], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:13.379146Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:13.379150Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2024-11-21T08:56:13.379353Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1046:2988], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2024-11-21T08:56:13.379358Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:56:13.379871Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:13.379915Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2024-11-21T08:56:13.379995Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2024-11-21T08:56:13.380036Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:13.380480Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:13.380513Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2024-11-21T08:56:13.380518Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 >> KqpScheme::CreateTableWithPartitionAtKeysUuid [GOOD] >> KqpScheme::CreateTableWithPgColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::UnknownFamilyTest [GOOD] Test command err: Trying to start YDB, gRPC: 23096, MsgBus: 17471 2024-11-21T08:56:10.374605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653745882779609:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:10.374650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004129/r3tmp/tmpXlnnBX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23096, node 1 2024-11-21T08:56:10.430795Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:10.442224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:10.442238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:10.442240Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:10.442276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17471 2024-11-21T08:56:10.474825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:10.474871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:17471 2024-11-21T08:56:10.475854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:10.487510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.497445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.513026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.529409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.539531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.674329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653745882781147:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.674361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.696799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.751542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.761835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.769096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.775814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.830485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.840957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653745882781669:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.840972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653745882781674:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.840977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.841583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:10.845140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653745882781676:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:56:10.983218Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439653745882781968:3483], for# test_user@builtin, access# DescribeSchema 2024-11-21T08:56:10.983238Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439653745882781968:3483], for# test_user@builtin, access# DescribeSchema 2024-11-21T08:56:10.984701Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653745882781965:2460], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/KeyValue]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:10.984981Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzA5ZTBhNDgtNGQzMDM2MjEtNDIzZTJlYjYtMjQ2YmYzMmE=, ActorId: [1:7439653745882781956:2455], ActorState: ExecuteState, TraceId: 01jd6yz8z52dcv5rdc18cr6a3n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/KeyValue]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:10.987746Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653745882781972:2463], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/NonExistent]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:10.987810Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzA5ZTBhNDgtNGQzMDM2MjEtNDIzZTJlYjYtMjQ2YmYzMmE=, ActorId: [1:7439653745882781956:2455], ActorState: ExecuteState, TraceId: 01jd6yz8z973b9bnz8q7a296wg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/NonExistent]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Trying to start YDB, gRPC: 29816, MsgBus: 12376 2024-11-21T08:56:11.244449Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653752327478321:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:11.244631Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004129/r3tmp/tmpkWvb6m/pdisk_1.dat 2024-11-21T08:56:11.255214Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29816, node 2 2024-11-21T08:56:11.260364Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:11.260378Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:11.260380Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:11.260414Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12376 TClient is connected to server localhost:12376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) W ... SchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.247335Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653757209212294:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.247364Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.252873Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.258502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.267209Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.321771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.376431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.385902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:12.394851Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653757209212809:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.394879Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.394941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653757209212814:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.395546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:12.399001Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653757209212816:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:12.591405Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037911 not found 2024-11-21T08:56:12.592903Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439653757209213179:2472], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/KeyValue]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:12.592981Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NmJkMTE3MjctNWE3MjA3MGEtNGMwMGVkYjQtMTM2NTk5ZA==, ActorId: [3:7439653757209213103:2454], ActorState: ExecuteState, TraceId: 01jd6yzaheazh4h6w7aehewgh7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/KeyValue]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Trying to start YDB, gRPC: 20699, MsgBus: 28883 2024-11-21T08:56:12.833290Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653757584774053:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:12.833313Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004129/r3tmp/tmpCBMsHC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20699, node 4 2024-11-21T08:56:12.848986Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:12.850433Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:12.850440Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:12.850442Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:12.850472Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28883 TClient is connected to server localhost:28883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:12.933374Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:12.933400Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:12.934494Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:12.936162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.937022Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:12.941103Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.949939Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.968614Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.978766Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:13.072994Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653761879742888:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:13.073021Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:13.077993Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.084747Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.093438Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.100258Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.106928Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.114835Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.122660Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653761879743380:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:13.122679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:13.122688Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653761879743385:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:13.123204Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:13.127125Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653761879743387:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert |90.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> KqpScheme::DisableResourcePoolsOnServerless [GOOD] >> KqpScheme::DoubleCreateResourcePool >> KqpScheme::DisableExternalDataSourcesOnServerless >> KqpScheme::CreateAndAlterTableComplex >> KqpConstraints::Utf8AndDefault [GOOD] >> KqpOlapScheme::AddColumn >> KqpOlapScheme::TtlRunInterval |90.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |90.1%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: 2024-11-21T08:53:15.175445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:15.175485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:15.175493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003906/r3tmp/tmp9MtXPT/pdisk_1.dat 2024-11-21T08:53:15.248597Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28752, node 1 2024-11-21T08:53:15.346481Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:15.346499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:15.346503Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:15.346583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:15.352389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:15.429532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:15.429563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:15.441422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3937 2024-11-21T08:53:15.842167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:16.639213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:16.639238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:16.672085Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:16.673010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:16.722047Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:16.730998Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:16.731020Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:16.736705Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:16.736820Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:16.736834Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:16.736838Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:16.736842Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:16.736846Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:16.736849Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:16.736854Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:16.736937Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:16.910879Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:16.910902Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:16.911730Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:53:16.913147Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:53:16.913238Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:53:16.913747Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:53:16.917486Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:16.917501Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:16.917510Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:53:16.918810Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:16.918832Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:16.919772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:16.921176Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:16.921205Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:16.923358Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:16.935179Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:16.957041Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:17.067819Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:17.234163Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:17.947116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:17.947148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:17.949718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:53:17.971771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:17.971814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:17.971838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:17.971851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:17.971863Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:17.971874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:17.971888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:17.971900Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:17.971911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:17.971929Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:17.971940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:17.971952Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:17.976027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:53:17.976045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:53:17.976057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:53:17.976062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:53:17.976073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:53:17.976078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:53:17.976084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleI ... e pool default not found or you don't have access permissions } 2024-11-21T08:53:19.347151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:19.348259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000012s 2024-11-21T08:53:21.154283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;task_id=e9c57ba-a7e611ef-b0129bfd-d55f2392;fline=with_appended.cpp:80;portions=1,;task_id=e9c57ba-a7e611ef-b0129bfd-d55f2392; 2024-11-21T08:53:29.275992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:53:29.276025Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:30.409108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:53:30.409134Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:07.186615Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T08:56:07.186654Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:56:07.186658Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:56:07.186662Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T08:56:08.691379Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T08:56:08.691418Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 198.000000s, at schemeshard: 72075186224037889 2024-11-21T08:56:08.691542Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 51 2024-11-21T08:56:08.702987Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:10.237982Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:10.238013Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:10.238043Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T08:56:10.238050Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:10.238170Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:10.239600Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:10.240662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7273:5508], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.240689Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7283:5513], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.240709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.243727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T08:56:10.258202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7287:5516], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T08:56:10.444873Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7404:5578]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:10.444942Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:10.444959Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7406:5580] 2024-11-21T08:56:10.444972Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7406:5580] 2024-11-21T08:56:10.445089Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7407:5581] 2024-11-21T08:56:10.445128Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7406:5580], server id = [2:7407:5581], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:10.445143Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7407:5581], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:56:10.445165Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T08:56:10.445194Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:10.445210Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7404:5578], StatRequests.size() = 1 2024-11-21T08:56:10.469866Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWExNzE5OTQtNjRmOGJhNDQtMjFjMzNkNzUtZmI2N2QxNjA=, TxId: 2024-11-21T08:56:10.469893Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWExNzE5OTQtNjRmOGJhNDQtMjFjMzNkNzUtZmI2N2QxNjA=, TxId: 2024-11-21T08:56:10.470003Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:10.481332Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:10.481356Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:10.563854Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:56:10.563891Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:10.667721Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7406:5580], schemeshard count = 1 2024-11-21T08:56:13.490512Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:13.490538Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:13.490546Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:13.490549Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:13.491328Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:13.502973Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:13.503135Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:13.503154Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:13.503401Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:13.525119Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:13.525182Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:13.525342Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7537:5654], server id = [2:7538:5655], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:13.525493Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7537:5654], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:13.525737Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:13.525751Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:13.525792Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:13.525842Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:13.525899Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:13.525945Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7537:5654], server id = [2:7538:5655], tablet id = 72075186224037899 2024-11-21T08:56:13.525948Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:13.526491Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:13.529858Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7555:5672]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:13.529919Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:13.529926Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7555:5672], StatRequests.size() = 1 2024-11-21T08:56:13.552529Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTJhNDRmNTUtNWIwOWVlZDYtZDdhZGVmYmYtY2I4NzQ4YzQ=, TxId: 2024-11-21T08:56:13.552554Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTJhNDRmNTUtNWIwOWVlZDYtZDdhZGVmYmYtY2I4NzQ4YzQ=, TxId: 2024-11-21T08:56:13.552890Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:13.553126Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7563:5397]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:13.553178Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:13.553184Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:13.553644Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:13.553654Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:13.553661Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:13.555746Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> KqpScheme::InvalidationAfterDropCreateTable2NoEffects >> KqpScheme::CreateTableWithWrongPartitionAtKeys >> KqpScheme::CreateTableWithPgColumn [GOOD] >> KqpOlapScheme::DropTable >> KqpScheme::CreateResourcePoolClassifier [GOOD] >> KqpScheme::CreateResourcePoolClassifierOnServerless >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility >> KqpScheme::DoubleCreateResourcePool [GOOD] >> KqpScheme::DoubleCreateResourcePoolClassifier >> KqpScheme::CreateAndAlterTableComplex [GOOD] >> KqpScheme::ChangefeedTopicPartitions >> KqpScheme::CreateTableWithDefaultSettings |90.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapScheme::AddColumn [GOOD] >> KqpOlapScheme::TtlRunInterval [GOOD] >> KqpOlapScheme::TenThousandColumns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithPgColumn [GOOD] Test command err: Trying to start YDB, gRPC: 24974, MsgBus: 25162 2024-11-21T08:56:10.685451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653745504601508:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:10.685678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00411c/r3tmp/tmpToXHqD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24974, node 1 2024-11-21T08:56:10.742250Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:10.747897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:10.747908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:10.747909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:10.747941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25162 TClient is connected to server localhost:25162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:10.785583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:10.785613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:10.786726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:10.791495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.802730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.862454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.876311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.889520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.930426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653745504603037:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.930462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.955668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:10.961418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.015437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.028256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.034613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.041969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.051568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653749799570836:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.051582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.051585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653749799570841:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.052125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:11.054852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653749799570843:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:56:11.217439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.227290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.228441Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found Trying to start YDB, gRPC: 15027, MsgBus: 25727 2024-11-21T08:56:11.349662Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653751706500293:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:11.349904Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00411c/r3tmp/tmp9lfWwI/pdisk_1.dat 2024-11-21T08:56:11.358069Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15027, node 2 2024-11-21T08:56:11.366313Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:11.366329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:11.366331Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:11.366372Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25727 TClient is connected to server localhost:25727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:11.450186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:11.450215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:11.451356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:11.451961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.460475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.468197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.481695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.490769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.618439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653751706501831:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: ... 11-21T08:56:13.261148Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.268314Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.274920Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:13.370314Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653760742403235:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:13.370338Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653760742403240:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:13.370346Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:13.371226Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:13.373242Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653760742403242:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:13.550262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12676, MsgBus: 23264 2024-11-21T08:56:13.855588Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653758810033862:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:13.855884Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00411c/r3tmp/tmpYZ804X/pdisk_1.dat 2024-11-21T08:56:13.862835Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12676, node 5 2024-11-21T08:56:13.872155Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:13.872170Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:13.872172Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:13.872233Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23264 TClient is connected to server localhost:23264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:13.955904Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:13.955953Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:13.956926Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:13.957715Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:13.991097Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.002168Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.020416Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.032901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.153599Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653763105002685:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.153626Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.159201Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.166796Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.178899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.185212Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.240009Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.248261Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.256435Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653763105003201:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.256452Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.256457Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653763105003206:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.256968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:14.261046Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653763105003208:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:14.407632Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.418283Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.427060Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.441616Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.455124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.469210Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.483029Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.497988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.511621Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.526410Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.540331Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 |90.1%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpConstraints::SerialTypeNegative1 >> KqpScheme::InvalidationAfterDropCreateTable2NoEffects [GOOD] >> KqpScheme::CreateTableWithWrongPartitionAtKeys [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTxNoEffects >> KqpScheme::CreateTableWithVectorIndexPublicApi |90.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> KqpOlapScheme::DropTable [GOOD] >> KqpOlapScheme::DropThenAddColumn >> IntermediateDirsReboots::CreateTableWithIntermediateDirs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumn [GOOD] Test command err: Trying to start YDB, gRPC: 27681, MsgBus: 16063 2024-11-21T08:56:10.837495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653747591314251:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:10.837529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004102/r3tmp/tmpMZ8PSG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27681, node 1 2024-11-21T08:56:10.887638Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:10.895391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:10.895404Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:10.895407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:10.895439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16063 TClient is connected to server localhost:16063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:10.937592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:10.937619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:10.938684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:10.938871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.941370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.002292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.015611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.028830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.089926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653751886283078:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.089959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.114111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.119647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.125521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.132762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.187951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.243059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.253395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653751886283597:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.253416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653751886283602:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.253427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.254023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:11.258202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653751886283604:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:56:11.428120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22085, MsgBus: 5185 2024-11-21T08:56:11.706987Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653751194079378:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:11.707155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004102/r3tmp/tmpVJx7HL/pdisk_1.dat 2024-11-21T08:56:11.717620Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22085, node 2 2024-11-21T08:56:11.727727Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:11.727738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:11.727739Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:11.727768Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5185 TClient is connected to server localhost:5185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:11.807011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:11.807046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:11.808084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:11.809749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.810499Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:11.817314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.826866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.843828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.855287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:12.022765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653755489048210:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.022803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NO ... ent=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:56:14.485950Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:56:14.485971Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:56:14.485989Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:56:14.486006Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:56:14.486027Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:56:14.486047Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:56:14.486065Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:56:14.486082Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:56:14.486101Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653765491582816:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:56:14.486528Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:56:14.486544Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:56:14.486554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:56:14.486559Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:56:14.486574Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:56:14.486584Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:56:14.486593Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:56:14.486598Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:56:14.486607Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:56:14.486611Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:56:14.486618Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:56:14.486626Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:56:14.486685Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:56:14.486696Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:56:14.486707Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:56:14.486716Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:56:14.486724Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:56:14.486732Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:56:14.486744Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:56:14.486753Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:56:14.486760Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:56:14.486768Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=320;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=320;columns=3; 2024-11-21T08:56:14.549080Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653765491582961:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.549117Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.549146Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653765491582966:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.549761Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:56:14.551095Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653765491582968:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:56:14.653265Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179374604, txId: 18446744073709551615] shutting down 2024-11-21T08:56:14.656623Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.694771Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179374709, txId: 18446744073709551615] shutting down 2024-11-21T08:56:14.734178Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179374730, txId: 18446744073709551615] shutting down 2024-11-21T08:56:14.769424Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179374765, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=352;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=352;columns=4; 2024-11-21T08:56:14.821960Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179374807, txId: 18446744073709551615] shutting down 2024-11-21T08:56:14.859518Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179374856, txId: 18446744073709551615] shutting down 2024-11-21T08:56:14.896594Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179374891, txId: 18446744073709551615] shutting down 2024-11-21T08:56:14.933419Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179374933, txId: 18446744073709551615] shutting down 2024-11-21T08:56:14.964255Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179375000, txId: 18446744073709551615] shutting down >> KqpScheme::CreateTableWithDefaultSettings [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat >> KqpScheme::ChangefeedTopicPartitions [GOOD] >> KqpScheme::DisableExternalDataSourcesOnServerless [GOOD] >> KqpConstraints::SerialTypeNegative1 [GOOD] >> KqpScheme::ChangefeedTopicAutoPartitioning >> KqpScheme::DisableResourcePools >> KqpConstraints::SerialTypeForNonKeyColumn >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTxNoEffects [GOOD] >> KqpScheme::ModifyPermissions |90.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:54.809455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:54.809488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:54.809492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:54.809496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:54.809501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:54.809504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:54.809511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:54.809576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:54.817911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:54.817931Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:54.819916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:54.819990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:54.820018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:54.821747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:54.821809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:54.821874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:54.822024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:54.823666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:54.823888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:54.823895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:54.823905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:54.823911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:54.823915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:54.823959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:54.825353Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:54.838669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:54.838734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:54.838789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:54.838847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:54.838854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:54.839395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:54.839416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:54.839458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:54.839468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:54.839470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:54.839474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:54.839742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:54.839748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:54.839751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:54.839951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:54.839956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:54.839960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:54.839965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:54.840361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:54.840694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:54.840739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:54.840888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:54.840909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:54.840914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:54.840969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:54.840974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:54.840995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:54.841004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:54.841305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:54.841311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:54.841339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:54.841345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:54.841421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:54.841427Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:54.841437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:54.841441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:54.841444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:54.841448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:54.841451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:54.841453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:54.841472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:54.841476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:54.841479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 8944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:15.512082Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:15.512084Z node 84 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:15.512086Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T08:56:15.512089Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:56:15.512272Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 360777255187 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:56:15.512278Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 3 2024-11-21T08:56:15.512289Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:3, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 360777255187 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:56:15.512298Z node 84 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:3 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:56:15.512303Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:3 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 335 RawX2: 360777255187 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T08:56:15.512314Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:3, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:15.512317Z node 84 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:56:15.512320Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:3, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:56:15.512324Z node 84 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:3 129 -> 240 2024-11-21T08:56:15.512439Z node 84 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:15.512448Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:56:15.512450Z node 84 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:56:15.512453Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T08:56:15.512456Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T08:56:15.512464Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T08:56:15.514144Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:15.514183Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:15.514204Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:56:15.514226Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:15.514238Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:56:15.514250Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:56:15.514361Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T08:56:15.514371Z node 84 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:3 ProgressState 2024-11-21T08:56:15.514393Z node 84 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:3 progress is 4/4 2024-11-21T08:56:15.514397Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:56:15.514405Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T08:56:15.514411Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T08:56:15.514418Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:56:15.514424Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:56:15.514440Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:15.514446Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:56:15.514449Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:56:15.514454Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:15.514458Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:56:15.514461Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:56:15.514465Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:56:15.514469Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T08:56:15.514472Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T08:56:15.514488Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:56:15.514584Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:56:15.515139Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:56:15.515152Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:56:15.515220Z node 84 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:56:15.515240Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:56:15.515249Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [84:416:2391] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:56:15.515319Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:15.515374Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 72us result status StatusSuccess 2024-11-21T08:56:15.515488Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "z" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:15.515546Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:15.515563Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 19us result status StatusPathDoesNotExist 2024-11-21T08:56:15.515581Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpScheme::MoveTableWithSerialTypes >> KqpScheme::CreateTableWithVectorIndexPublicApi [GOOD] >> KqpOlapScheme::DropThenAddColumn [GOOD] >> KqpScheme::CreateUserWithPassword >> KqpOlapScheme::DropThenAddColumnIndexation >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSigned >> KqpScheme::DisableResourcePools [GOOD] >> KqpScheme::DisableResourcePoolClassifiers >> KqpScheme::DoubleCreateResourcePoolClassifier [GOOD] >> KqpScheme::ChangefeedTopicAutoPartitioning [GOOD] >> TKeyValueTracingTest::ReadHuge >> KqpScheme::CreateAlterDropTableStore >> KqpScheme::CreateUserWithPassword [GOOD] >> KqpScheme::CreateUserWithoutPassword >> KqpConstraints::SerialTypeForNonKeyColumn [GOOD] >> KqpConstraints::SerialTypeSerial2 >> KqpScheme::ModifyPermissions [GOOD] >> KqpScheme::ModifyPermissionsByIncorrectPaths >> TKeyValueTracingTest::WriteHuge >> KqpScheme::MoveTableWithSerialTypes [GOOD] >> KqpScheme::PathWithNoRoot >> KqpScheme::CreateTableWithPartitionAtKeysSigned [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysComplex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DoubleCreateResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 13421, MsgBus: 12136 2024-11-21T08:56:10.922493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653748736545834:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:10.922655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040fc/r3tmp/tmpyoZ5j2/pdisk_1.dat 2024-11-21T08:56:10.973824Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13421, node 1 2024-11-21T08:56:10.990026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:10.990038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:10.990039Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:10.990078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12136 2024-11-21T08:56:11.022842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:11.022883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:11.023976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:11.035718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.047927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.108494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.124533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.133608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.176510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653753031514663:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.176539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.205217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.211416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.223949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.231000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.238075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.245175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.253478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653753031515154:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.253504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.253511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653753031515159:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.254006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:11.258178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653753031515161:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:56:11.422751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.426300Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710672, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/ExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100 Trying to start YDB, gRPC: 31915, MsgBus: 26565 2024-11-21T08:56:11.581742Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653753440340704:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:11.581979Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040fc/r3tmp/tmpSKksPw/pdisk_1.dat 2024-11-21T08:56:11.589526Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31915, node 2 2024-11-21T08:56:11.598163Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:11.598175Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:11.598177Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:11.598203Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26565 TClient is connected to server localhost:26565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:11.682325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:11.682354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:11.683336Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:11.684024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.692316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.701522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.717672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.728574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.830042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653753440342233:2374], Da ... adata/workload_manager/pools/MyResourcePool', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91 Trying to start YDB, gRPC: 27123, MsgBus: 9255 2024-11-21T08:56:14.895836Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653762840736108:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.895854Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040fc/r3tmp/tmpfg15x9/pdisk_1.dat 2024-11-21T08:56:14.911245Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27123, node 7 2024-11-21T08:56:14.918759Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:14.918771Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:14.918775Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:14.918807Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9255 TClient is connected to server localhost:9255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:14.995935Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.995959Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.996992Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:14.998663Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.009764Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.017587Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.035161Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.047312Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.177682Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653767135704946:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.177701Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.179568Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.184678Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.192955Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.199962Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.207293Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.222142Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.236758Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653767135705460:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.236788Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653767135705465:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.236793Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.237266Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:15.241580Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439653767135705467:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:15.896946Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:56:15.955607Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.998779Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:56:16.101322Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.149540Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.196823Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.453868Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7439653771430673991:2689], TxId: 281474976715703, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=YWYxNjA3MDEtODRjODU3NGMtYzA4NWVlZjYtM2YxZWFkODI=. TraceId : 01jd6yze9da6bgyhxjy2pa555e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T08:56:16.453937Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7439653771430673993:2690], TxId: 281474976715703, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd6yze9da6bgyhxjy2pa555e. SessionId : ydb://session/3?node_id=7&id=YWYxNjA3MDEtODRjODU3NGMtYzA4NWVlZjYtM2YxZWFkODI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [7:7439653771430673988:2664], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T08:56:16.454914Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YWYxNjA3MDEtODRjODU3NGMtYzA4NWVlZjYtM2YxZWFkODI=, ActorId: [7:7439653771430673909:2664], ActorState: ExecuteState, TraceId: 01jd6yze9da6bgyhxjy2pa555e, Create QueryResponse for error on request, msg: 2024-11-21T08:56:16.455751Z node 7 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jd6yze99dcpkb8mfdvscmsa9" } } } } ;request=session_id: "ydb://session/3?node_id=7&id=YWYxNjA3MDEtODRjODU3NGMtYzA4NWVlZjYtM2YxZWFkODI=" tx_control { tx_id: "01jd6yze99dcpkb8mfdvscmsa9" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers`\nSELECT database,name,config,rank FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "database" type { type_id: UTF8 } } members { name: "name" type { type_id: UTF8 } } members { name: "config" type { type_id: JSON_DOCUMENT } } members { name: "rank" type { type_id: INT64 } } } } } } value { items { items { text_value: "/Root" } items { text_value: "MyResourcePoolClassifier" } items { text_value: "{\"resource_pool\":\"test_pool\"}" } items { int64_value: 1 } } } } } ; >> KqpScheme::CreateAlterDropTableStore [GOOD] >> KqpScheme::CreateAlterDropColumnTableInStore |90.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> KqpScheme::CreateResourcePoolClassifierOnServerless [GOOD] >> KqpConstraints::SerialTypeSerial2 [GOOD] >> KqpScheme::PathWithNoRoot [GOOD] >> KqpScheme::ModifyPermissionsByIncorrectPaths [GOOD] >> KqpScheme::CreateUserWithoutPassword [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysComplex [GOOD] >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] >> TKeyValueTracingTest::WriteSmall >> KqpScheme::DisableResourcePoolClassifiers [GOOD] >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] >> BasicStatistics::Serverless [GOOD] >> KqpOlapScheme::TenThousandColumns [FAIL] >> KqpScheme::CreatedAt >> KqpConstraints::SerialTypeSerial >> KqpScheme::ModifyUnknownPermissions >> KqpScheme::CreateTableWithFamiliesRegular >> KqpScheme::DisableResourcePoolClassifiersOnServerless >> KqpOlapScheme::NullKeySchema >> KqpScheme::CreatedAt [GOOD] >> KqpConstraints::SerialTypeSerial [GOOD] >> KqpConstraints::SerialTypeBigSerial >> KqpScheme::ModifyUnknownPermissions [GOOD] >> KqpScheme::OlapSharding_KeyOnly >> KqpScheme::CreateTableWithFamiliesRegular [GOOD] >> KqpOlapScheme::NullKeySchema [GOOD] >> TKeyValueTracingTest::WriteSmall [FAIL] >> KqpScheme::OlapSharding_KeyOnly [GOOD] >> KqpConstraints::SerialTypeBigSerial [GOOD] >> KqpOlapTypes::Decimal >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> KqpOlapTypes::Decimal [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreatedAt [GOOD] Test command err: Trying to start YDB, gRPC: 7804, MsgBus: 28111 2024-11-21T08:56:14.801257Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653762614458131:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.801477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f4/r3tmp/tmpJRAIQx/pdisk_1.dat 2024-11-21T08:56:14.853649Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7804, node 1 2024-11-21T08:56:14.869601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:14.869614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:14.869616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:14.869670Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28111 2024-11-21T08:56:14.901421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.901449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.902543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:14.932055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.935850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.996985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.016055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.025573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.063649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653766909426965:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.063672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.096947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.103337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.109096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.115807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.123494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.130570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.186291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653766909427470:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.186317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653766909427475:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.186328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.186890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:15.192301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653766909427477:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 28009, MsgBus: 14004 2024-11-21T08:56:15.500781Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653769107392343:2195];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:15.501875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f4/r3tmp/tmpE3ncAK/pdisk_1.dat 2024-11-21T08:56:15.512902Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28009, node 2 2024-11-21T08:56:15.519747Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:15.519760Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:15.519762Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:15.519799Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14004 TClient is connected to server localhost:14004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:15.599784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:15.599808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:15.600850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:15.603045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.605715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.614158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.630944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.642059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.733872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653769107393746:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.733903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.737906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.792750Z node 2 :FLAT_TX_SCHEMESHAR ... (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:16.986568Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.998418Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.008689Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.028692Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.041294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.216908Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653775546645647:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.216969Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.222381Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.230471Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.285425Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.293531Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.300753Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.307510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.558079Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653775546646195:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.558111Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653775546646200:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.558114Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.559080Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:17.561304Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653775546646202:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 31258, MsgBus: 22936 2024-11-21T08:56:17.988576Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653777475506085:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:17.988642Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f4/r3tmp/tmpxA3qgf/pdisk_1.dat 2024-11-21T08:56:17.998011Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31258, node 5 2024-11-21T08:56:18.008284Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:18.008298Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:18.008299Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:18.008335Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22936 TClient is connected to server localhost:22936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:18.089141Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:18.089183Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:18.090197Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:18.091413Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.095408Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.105437Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.121257Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.135735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.255449Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653781770474917:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.255472Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.260917Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.265734Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.272554Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.279976Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.335345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.343242Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.351950Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653781770475434:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.351975Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.351979Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653781770475439:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.352503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:18.356054Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653781770475441:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:18.511707Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 >> KqpOlapTypes::AttributeNegative ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xBED1B19) TestOneRead(TBasicString>, TBasicString>)+2032 (0xBC5B6A0) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+157 (0xBC5E73D) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xBC656A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xBED3ACE) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+426 (0xBC64E4A) NUnitTest::TTestFactory::Execute()+803 (0xBED4243) NUnitTest::RunMain(int, char**)+3005 (0xBEE3B5D) ??+0 (0x7F056918FD90) __libc_start_main+128 (0x7F056918FE40) _start+41 (0xB0C8029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xBED1B19) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+1945 (0xBC59369) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+138 (0xBC5E42A) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xBC656A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xBED3ACE) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+426 (0xBC64E4A) NUnitTest::TTestFactory::Execute()+803 (0xBED4243) NUnitTest::RunMain(int, char**)+3005 (0xBEE3B5D) ??+0 (0x7FD02582FD90) __libc_start_main+128 (0x7FD02582FE40) _start+41 (0xB0C8029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::OlapSharding_KeyOnly [GOOD] Test command err: Trying to start YDB, gRPC: 28264, MsgBus: 63663 2024-11-21T08:56:16.293443Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653773905764555:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:16.293699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ef/r3tmp/tmpzOK840/pdisk_1.dat 2024-11-21T08:56:16.334178Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28264, node 1 2024-11-21T08:56:16.350574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:16.350589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:16.350591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:16.350627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63663 TClient is connected to server localhost:63663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:16.393577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:16.393601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:16.394647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:16.415506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.424613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.485232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.504458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.517549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.541773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653773905766094:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.541804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.572449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.578318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.586346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.640577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.694965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.704678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.713663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653773905766611:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.713688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653773905766616:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.713694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.714279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:16.718455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653773905766618:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:16.912394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12755, MsgBus: 19356 2024-11-21T08:56:17.164230Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653776404934568:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:17.164486Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ef/r3tmp/tmpGQzYSB/pdisk_1.dat 2024-11-21T08:56:17.172248Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12755, node 2 2024-11-21T08:56:17.181135Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:17.181147Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:17.181149Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:17.181176Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19356 TClient is connected to server localhost:19356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:17.264487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:17.264537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:17.265615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:17.266326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.278387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.286609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.302587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.316081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.418368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653776404936101:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.418389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.424224Z node 2 :FLAT_TX_SCHEMESH ... 2:7439653776404936621:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.542030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:17.544309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653776404936623:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Execution, code: 1060
:6:30: Error: Executing CREATE TABLE
: Error: Table path not in database, path: /TablePathWithNoRoot, database: /Root Trying to start YDB, gRPC: 11252, MsgBus: 23177 2024-11-21T08:56:17.859396Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653775764330556:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:17.859438Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ef/r3tmp/tmpitCfnV/pdisk_1.dat 2024-11-21T08:56:17.871214Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11252, node 3 2024-11-21T08:56:17.881754Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:17.881766Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:17.881768Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:17.881800Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23177 TClient is connected to server localhost:23177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:17.959758Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:17.959792Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:17.960876Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:17.962664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.969069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.980160Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.996045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.008756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.119290Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653780059299386:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.119327Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.125518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.180381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.189029Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.195710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.203153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.258424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.267881Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653780059299904:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.267912Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653780059299909:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.267916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.268538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:18.272385Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653780059299911:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:18.451009Z node 3 :KQP_YQL ERROR: TraceId: 01jd6yzg8h0qzczyamv4ks5v30, SessionId: ydb://session/3?node_id=3&id=YThjMDk4YmQtZjVkYzE0N2ItNzE3ZTViMGQtOTQ3YWU2ZTU= 2024-11-21 08:56:18.450 ERROR ydb-core-kqp-ut-scheme(pid=678770, tid=0x00007F71E3A83640) [common provider] yql_provider_gateway.cpp:28: Unknown permission name: Trying to start YDB, gRPC: 26106, MsgBus: 22706 2024-11-21T08:56:18.739589Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653780006596047:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:18.739852Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ef/r3tmp/tmpFbUZ73/pdisk_1.dat 2024-11-21T08:56:18.750498Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26106, node 4 2024-11-21T08:56:18.760463Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:18.760474Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:18.760476Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:18.760518Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22706 TClient is connected to server localhost:22706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:18.839909Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:18.839940Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:18.841045Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:18.842209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:19.004645Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653784301563941:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.004674Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::SerialTypeBigSerial [GOOD] Test command err: Trying to start YDB, gRPC: 13963, MsgBus: 7787 2024-11-21T08:56:15.376256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653768194355345:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:15.376433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f0/r3tmp/tmp6UGjjk/pdisk_1.dat 2024-11-21T08:56:15.429160Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13963, node 1 2024-11-21T08:56:15.443151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:15.443169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:15.443171Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:15.443214Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7787 2024-11-21T08:56:15.476291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:15.476331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:15.477536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:15.505378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.517536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.577101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.591651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.599326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.671355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653768194356878:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.671390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.698850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.705580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.718268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.725215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.731989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.738705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.747765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653768194357370:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.747795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.747842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653768194357375:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.748366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:15.752588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653768194357377:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 22242, MsgBus: 27331 2024-11-21T08:56:16.033946Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653772313983512:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:16.034307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f0/r3tmp/tmpkYdWuZ/pdisk_1.dat 2024-11-21T08:56:16.041696Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22242, node 2 2024-11-21T08:56:16.050810Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:16.050823Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:16.050824Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:16.050856Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27331 TClient is connected to server localhost:27331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:16.134241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:16.134267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:16.135307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:16.136512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.146709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.154511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.172718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.180777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.305176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653772313985047:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.305196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.310084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.365173Z node 2 :FLAT_TX_SCHEMESHARD ... ransaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:17.966361Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.976480Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.997437Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.006852Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.174422Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653779387525177:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.174460Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.180117Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.235375Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.245611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.252061Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.259205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.266338Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.274681Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653779387525681:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.274701Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653779387525686:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.274706Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.275325Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:18.279325Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653779387525688:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:18.613249Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6070, MsgBus: 7045 2024-11-21T08:56:18.963606Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653780529347613:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:18.963776Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f0/r3tmp/tmpKtS6az/pdisk_1.dat 2024-11-21T08:56:18.972566Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6070, node 5 2024-11-21T08:56:18.984023Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:18.984037Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:18.984039Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:18.984069Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7045 TClient is connected to server localhost:7045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:19.063899Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:19.063929Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:19.065007Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:19.066788Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:19.071947Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:19.081451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:19.099116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:19.106735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:19.224728Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653784824316440:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.224750Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.229734Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:19.236682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:19.292828Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:19.302372Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:19.309336Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:19.316901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:19.332293Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653784824316955:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.332322Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.332342Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653784824316960:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.332996Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:19.336566Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653784824316962:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:19.519942Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2024-11-21T08:55:28.367678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653565951539111:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:28.367694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034c2/r3tmp/tmpu6WlY2/pdisk_1.dat 2024-11-21T08:55:28.412012Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:28.470690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:28.470717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24255, node 1 2024-11-21T08:55:28.471923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:28.481346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:28.481359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:28.481361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:28.481406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:28.493050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:55:28.496579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:28.501610Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7439653565951539678:2282] 2024-11-21T08:55:28.501690Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:28.512363Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:28.512418Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:55:28.512606Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T08:55:28.512624Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T08:55:28.512629Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T08:55:28.512685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:55:28.516069Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T08:55:28.516137Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:55:28.516158Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7439653565951539694:2283] 2024-11-21T08:55:28.516161Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:55:28.516164Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T08:55:28.516168Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:28.519659Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T08:55:28.519702Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T08:55:28.519708Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:28.519713Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:28.519720Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T08:55:28.519724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:28.519735Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7439653565951539677:2290], serverId# [1:7439653565951539684:2293], sessionId# [0:0:0] 2024-11-21T08:55:28.519773Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:55:28.519853Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2024-11-21T08:55:28.519884Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2024-11-21T08:55:28.520536Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:55:28.520648Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:55:28.520695Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T08:55:28.521324Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7439653565951539708:2306], serverId# [1:7439653565951539709:2307], sessionId# [0:0:0] 2024-11-21T08:55:28.522074Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1732179328565 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 7439653565951539554 RawX2: 4294969514 } } Step: 1732179328565 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T08:55:28.522088Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:28.522120Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T08:55:28.522131Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:28.522136Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:55:28.522145Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1732179328565:281474976710657] in PlanQueue unit at 72075186224037888 2024-11-21T08:55:28.522216Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1732179328565:281474976710657 keys extracted: 0 2024-11-21T08:55:28.522272Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T08:55:28.522286Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T08:55:28.522296Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T08:55:28.522642Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T08:55:28.522773Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:55:28.522961Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1732179328564 2024-11-21T08:55:28.522964Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:28.522973Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1732179328565 txid# 281474976710657} 2024-11-21T08:55:28.522988Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1732179328565} 2024-11-21T08:55:28.522997Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:28.523004Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1732179328572 2024-11-21T08:55:28.523023Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T08:55:28.523027Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T08:55:28.523031Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T08:55:28.523046Z node 1 :TX_DATASHARD DEBUG: Complete [1732179328565 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7439653565951539506:2192], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T08:55:28.523052Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2024-11-21T08:55:28.523057Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:55:28.534708Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7439653565951539694:2283][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2024-11-21T08:55:28.535212Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2024-11-21T08:55:28.535228Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T08:55:28.537170Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T08:55:28.537229Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2024-11-21T08:55:28.537243Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2024-11-21T08:55:28.537246Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2024-11-21T08:55:28.537660Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T08:55:28.544811Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:55:28.544917Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2024-11-21T08:55:28.545106Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:55:28.545142Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2024-11-21T08:55:28.545150Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:55:28.545153Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2024-11-21T08:55:28.545168Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:55:28.545181Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:28.545189Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] empty tx queue 2024-11-21T08:55:28.545191Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2024-11-21T08:55:28.545310Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:28.545329Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7439653565951539801:2285], now have 1 active actors on pipe 2024-11-21T08:55:28.588725Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:55:28.588751Z node 1 :PERSQUEUE DEBUG: [PQ: 720751862240378 ... : 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 5 partNo : 0 messageNo: 9 size 52 offset: -1 2024-11-21T08:56:17.870178Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 5 partNo 0 2024-11-21T08:56:17.870222Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 5 partNo 0 FormedBlobsCount 0 NewHead: Offset 4 PartNo 0 PackedSize 167 count 1 nextOffset 5 batches 1 2024-11-21T08:56:17.870281Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 4,1 HeadOffset 0 endOffset 4 curOffset 5 d0000000000_00000000000000000004_00000_0000000001_00000| size 155 WTime 8969 2024-11-21T08:56:17.870311Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:56:17.880910Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 70 2024-11-21T08:56:17.880963Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:17.880985Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2024-11-21T08:56:17.881057Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 9 requestId: cookie: 5 2024-11-21T08:56:17.881162Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:906:2673] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 5 Offset: 4 WriteTimestampMS: 8969 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 5 } } } 2024-11-21T08:56:17.881181Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:844:2673] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2024-11-21T08:56:17.881223Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2024-11-21T08:56:17.881231Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037888 2024-11-21T08:56:17.881442Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... unblocking updates ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2024-11-21T08:56:17.985288Z node 27 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2024-11-21T08:56:17.985331Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:56:17.985363Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2024-11-21T08:56:17.985452Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T08:56:17.986682Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2024-11-21T08:56:17.986709Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2024-11-21T08:56:17.986719Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T08:56:17.986740Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 12000 from mediator time cast 2024-11-21T08:56:17.986772Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:647:2545] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2024-11-21T08:56:17.986787Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:844:2673] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2024-11-21T08:56:17.987514Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2024-11-21T08:56:17.987594Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:844:2673], at tablet# 72075186224037888 2024-11-21T08:56:17.987603Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2024-11-21T08:56:17.987638Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:844:2673] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:56:17.987670Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:906:2673] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:56:17.987750Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:17.987763Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:17.987806Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 10 requestId: cookie: 6 2024-11-21T08:56:17.987842Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:17.987845Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:17.987859Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2024-11-21T08:56:17.987899Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2024-11-21T08:56:17.987918Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2024-11-21T08:56:17.987938Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2024-11-21T08:56:17.987980Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2024-11-21T08:56:17.988025Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 0 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000| size 93 WTime 8979 2024-11-21T08:56:17.988060Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:56:17.998651Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 2024-11-21T08:56:17.998699Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:17.998717Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2024-11-21T08:56:17.998777Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2024-11-21T08:56:17.998872Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:906:2673] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2024-11-21T08:56:17.998890Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:844:2673] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2024-11-21T08:56:17.998933Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2024-11-21T08:56:17.998938Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2024-11-21T08:56:17.999108Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2024-11-21T08:56:18.102603Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T08:56:18.102631Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2024-11-21T08:56:18.102697Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2024-11-21T08:56:18.102707Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:56:18.102745Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 8. All data is from uncompacted head. 2024-11-21T08:56:18.102754Z node 27 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:18.102831Z node 27 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T08:56:18.102859Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateResourcePoolClassifierOnServerless [GOOD] Test command err: Trying to start YDB, gRPC: 19223, MsgBus: 28228 2024-11-21T08:56:10.735731Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653749129023509:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:10.735862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410b/r3tmp/tmpqfGlrr/pdisk_1.dat 2024-11-21T08:56:10.790397Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19223, node 1 2024-11-21T08:56:10.801148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:10.801163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:10.801165Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:10.801216Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28228 TClient is connected to server localhost:28228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:10.835778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:10.835815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:10.836917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:10.841458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.854529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.916155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.929927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.939332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:10.974859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653749129025037:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.974895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:10.999440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.005655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.014503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.021317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.028267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.034881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:11.043601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653753423992824:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.043627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.043629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653753423992829:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.044243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:11.048468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653753423992831:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 28548, MsgBus: 8204 2024-11-21T08:56:11.396292Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653752546794743:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:11.396308Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410b/r3tmp/tmpHBvs4W/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28548, node 2 2024-11-21T08:56:11.410874Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:11.411269Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:11.411281Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:11.411282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:11.411315Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8204 TClient is connected to server localhost:8204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:11.496761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:11.496797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:11.497829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:11.498447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.499124Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:11.509230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.518223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.532673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.540573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:11.627236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653752546796272:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.627267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:11.631905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... 1-21T08:56:16.948995Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: ExecuteState, TraceId: 01jd6yzes6c0tkzqxma4yxmf6d, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:16.949002Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: ExecuteState, TraceId: 01jd6yzes6c0tkzqxma4yxmf6d, EndCleanup, isFinal: 0 2024-11-21T08:56:16.949009Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: ExecuteState, TraceId: 01jd6yzes6c0tkzqxma4yxmf6d, Sent query response back to proxy, proxyRequestId: 29, proxyId: [6:7439653768798878713:2217] 2024-11-21T08:56:16.949190Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:56:16.949201Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2024-11-21T08:56:16.949209Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: ReadyState, Created new KQP executer: [6:7439653773093847954:2581] isRollback: 1 2024-11-21T08:56:16.949213Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:16.949220Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: 72057594046644480:4:/Root/test-serverless, PoolId: test_pool 2024-11-21T08:56:16.949232Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653773093847955:2590], DatabaseId: 72057594046644480:4:/Root/test-serverless, PoolId: test_pool, Start pool fetching 2024-11-21T08:56:16.949337Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: CleanupState, EndCleanup, isFinal: 1 2024-11-21T08:56:16.949347Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:56:16.949362Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZWIzMWZlNjEtZmUyYzI3ZjYtNWIwN2IwMjAtMWIzYzRmODA=, ActorId: [6:7439653773093847931:2581], ActorState: unknown state, Session actor destroyed 2024-11-21T08:56:16.949565Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653773093847955:2590], DatabaseId: 72057594046644480:4:/Root/test-serverless, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:56:16.949579Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: 72057594046644480:4:/Root/test-serverless, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:56:16.949934Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2024-11-21T08:56:16.949725Z node 7 :HIVE WARN: HIVE#72075186224037888 THive::TTxStatus(status=2 node=Connected) - killing node 7 2024-11-21T08:56:16.949779Z node 7 :HIVE WARN: HIVE#72075186224037888 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:56:16.950080Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZDNlMjM4YWEtNGVkN2U3Yy1lZWE2MzZhYS03M2RjZmYyZA==, ActorId: [5:7439653769628361046:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:56:16.950100Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZDNlMjM4YWEtNGVkN2U3Yy1lZWE2MzZhYS03M2RjZmYyZA==, ActorId: [5:7439653769628361046:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:16.950104Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDNlMjM4YWEtNGVkN2U3Yy1lZWE2MzZhYS03M2RjZmYyZA==, ActorId: [5:7439653769628361046:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:56:16.950110Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDNlMjM4YWEtNGVkN2U3Yy1lZWE2MzZhYS03M2RjZmYyZA==, ActorId: [5:7439653769628361046:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:56:16.950118Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:56:16.950132Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZDNlMjM4YWEtNGVkN2U3Yy1lZWE2MzZhYS03M2RjZmYyZA==, ActorId: [5:7439653769628361046:2299], ActorState: unknown state, Session actor destroyed 2024-11-21T08:56:16.950153Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 6 2024-11-21T08:56:16.950253Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:56:17.365319Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU= 2024-11-21T08:56:17.365449Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:56:17.365516Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ReadyState, TraceId: 01jd6yzf6n6yqtnx5fadze5dym, received request, proxyRequestId: 31 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/initialization/migrations`; rpcActor: [6:7439653777388815343:2594] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2024-11-21T08:56:17.365525Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ReadyState, TraceId: 01jd6yzf6n6yqtnx5fadze5dym, request placed into pool from cache: default 2024-11-21T08:56:17.365536Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ReadyState, TraceId: 01jd6yzf6n6yqtnx5fadze5dym, Sending CompileQuery request 2024-11-21T08:56:17.366815Z node 6 :SCHEME_BOARD_SUBSCRIBER WARN: [main][6:7439653773093846951:2616][/Root/test-shared/.metadata/initialization/migrations] Sync is done: cookie# 26, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T08:56:17.366840Z node 6 :SCHEME_BOARD_SUBSCRIBER WARN: [main][6:7439653773093846951:2616][/Root/test-shared/.metadata/initialization/migrations] Sync is done: cookie# 27, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T08:56:17.367010Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439653777388815345:2595], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]
: Error: LookupError, code: 2005 2024-11-21T08:56:17.367410Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ExecuteState, TraceId: 01jd6yzf6n6yqtnx5fadze5dym, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2024-11-21T08:56:17.367429Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ExecuteState, TraceId: 01jd6yzf6n6yqtnx5fadze5dym, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:17.367433Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ExecuteState, TraceId: 01jd6yzf6n6yqtnx5fadze5dym, EndCleanup, isFinal: 0 2024-11-21T08:56:17.367477Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ExecuteState, TraceId: 01jd6yzf6n6yqtnx5fadze5dym, Sent query response back to proxy, proxyRequestId: 31, proxyId: [6:7439653768798878713:2217] 2024-11-21T08:56:17.368173Z node 6 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2024-11-21T08:56:17.368278Z node 6 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/initialization/migrations]
: Error: LookupError, code: 2005 2024-11-21T08:56:17.368314Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:56:17.368327Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:17.368329Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:56:17.368332Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:56:17.368355Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NGVlODBjMjgtOTI3NzkxZjgtZWMzY2QyN2EtMzY2YTQ2ZDU=, ActorId: [6:7439653777388815342:2593], ActorState: unknown state, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xBED1B19) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+1945 (0xBC59369) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+138 (0xBC5E2BA) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xBC656A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xBED3ACE) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+426 (0xBC64E4A) NUnitTest::TTestFactory::Execute()+803 (0xBED4243) NUnitTest::RunMain(int, char**)+3005 (0xBEE3B5D) ??+0 (0x7F3309ECBD90) __libc_start_main+128 (0x7F3309ECBE40) _start+41 (0xB0C8029) >> KqpOlapTypes::AttributeNegative [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] Test command err: Trying to start YDB, gRPC: 7486, MsgBus: 21727 2024-11-21T08:56:14.289875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653762445490721:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.289889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f9/r3tmp/tmpsGJyHx/pdisk_1.dat 2024-11-21T08:56:14.343365Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7486, node 1 2024-11-21T08:56:14.354204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:14.354213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:14.354214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:14.354238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21727 TClient is connected to server localhost:21727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:14.390027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.390047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.391104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:14.417632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.427119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.442605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.459150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.468228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.536868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653762445492255:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.536901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.562313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.568557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.576817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.584569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.591396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.598038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.606555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653762445492747:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.606576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.606623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653762445492752:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.607246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:14.611345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653762445492754:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:14.773484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.783863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.790881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20967, MsgBus: 17030 2024-11-21T08:56:14.951503Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653763836753787:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.951700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f9/r3tmp/tmpZ61yvI/pdisk_1.dat 2024-11-21T08:56:14.960488Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20967, node 2 2024-11-21T08:56:14.970548Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:14.970560Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:14.970562Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:14.970589Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17030 TClient is connected to server localhost:17030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:15.052127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:15.052161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:15.053160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:15.054302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.055032Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:15.055989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.067865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.084992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.096529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-2 ... ract.cpp:45;event=normalization_finished; 2024-11-21T08:56:17.803028Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:56:17.803060Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:56:17.803115Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:56:17.803142Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:56:17.803173Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:56:17.803268Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:56:17.803297Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:56:17.803318Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:56:17.803341Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:56:17.803363Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:56:17.803385Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:56:17.803403Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[5:7439653777922044076:2311];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:56:17.804073Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:56:17.804084Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:56:17.804098Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:56:17.804103Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:56:17.804123Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:56:17.804127Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:56:17.804137Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:56:17.804144Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:56:17.804154Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:56:17.804159Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:56:17.804167Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:56:17.804171Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:56:17.804357Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:56:17.804372Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:56:17.804391Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:56:17.804396Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:56:17.804408Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:56:17.804412Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:56:17.804432Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:56:17.804438Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:56:17.804451Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:56:17.804461Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:56:17.827515Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653777922044307:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.827542Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.830676Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.846169Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653777922044452:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.846205Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.848525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.856655Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653777922044532:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.856682Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.858647Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.868755Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037897 not found 2024-11-21T08:56:17.868774Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037888 not found 2024-11-21T08:56:17.868776Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037896 not found 2024-11-21T08:56:17.868778Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037893 not found 2024-11-21T08:56:17.868780Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037889 not found 2024-11-21T08:56:17.868781Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037891 not found 2024-11-21T08:56:17.868783Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037890 not found 2024-11-21T08:56:17.868784Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2024-11-21T08:56:17.868786Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037894 not found 2024-11-21T08:56:17.869501Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037892 not found ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ModifyPermissionsByIncorrectPaths [GOOD] Test command err: Trying to start YDB, gRPC: 6884, MsgBus: 10254 2024-11-21T08:56:14.515052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653764279480843:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.515306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f5/r3tmp/tmprlMePE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6884, node 1 2024-11-21T08:56:14.563813Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:14.573329Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:14.573342Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:14.573344Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:14.573376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10254 TClient is connected to server localhost:10254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:14.614801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.615042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.615063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:56:14.616168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:14.628367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.642541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.657776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.669109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.754803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653764279482376:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.754825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.780258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.787161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.794219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.808799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.822376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.829255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.837667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653764279482880:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.837682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653764279482885:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.837688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.838346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:14.842322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653764279482887:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:56:15.022684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.022847Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found Trying to start YDB, gRPC: 27612, MsgBus: 26043 2024-11-21T08:56:15.384382Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653767099961387:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:15.384440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f5/r3tmp/tmpc2ww70/pdisk_1.dat 2024-11-21T08:56:15.394615Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27612, node 2 2024-11-21T08:56:15.404858Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:15.404889Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:15.404891Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:15.404930Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26043 TClient is connected to server localhost:26043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:15.484512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:15.484539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:56:15.487609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:15.487948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.489179Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:15.493661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.503393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.518466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.531130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.655387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653767099962922:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissio ... 2024-11-21T08:56:16.448663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:16.452711Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653772536945877:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:16.605266Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.616269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.624859Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.630941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.635748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.635859Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.640794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.647067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.653134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.659123Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.664243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.669768Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.674836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.680202Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.685308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.690008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.695312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T08:56:16.695503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12441, MsgBus: 30860 2024-11-21T08:56:17.026228Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653775685540547:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:17.026249Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f5/r3tmp/tmpVCp0LM/pdisk_1.dat 2024-11-21T08:56:17.042725Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12441, node 4 2024-11-21T08:56:17.048019Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:17.048032Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:17.048034Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:17.048077Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30860 TClient is connected to server localhost:30860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:17.126522Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:17.126563Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:17.127620Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:17.129965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.136361Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.147230Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.165863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.175912Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.330109Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653775685542087:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.330154Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.334575Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.340984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.349287Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.355590Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.362802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.370206Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.590284Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653775685542630:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.590311Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.590322Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653775685542635:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.591178Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:17.593273Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653775685542637:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:17.776769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpScheme::DisableResourcePoolClassifiersOnServerless [GOOD] >> KqpScheme::DisableMetadataObjectsOnServerless ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithFamiliesRegular [GOOD] Test command err: Trying to start YDB, gRPC: 15000, MsgBus: 26176 2024-11-21T08:56:14.948568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653765360618998:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.948706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f3/r3tmp/tmpkpGUKq/pdisk_1.dat 2024-11-21T08:56:14.995699Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15000, node 1 2024-11-21T08:56:15.015801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:15.015816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:15.015817Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:15.015855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26176 2024-11-21T08:56:15.048708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TClient is connected to server localhost:2024-11-21T08:56:15.048745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 26176 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:56:15.049759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:15.076381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.080742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.141905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.156646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.165350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.214305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653769655587830:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.214333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.247300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.252924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.307957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.319003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.373888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.382305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.390685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653769655588347:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.390703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.390765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653769655588352:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.391271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:15.394893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653769655588354:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:15.552273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13037, MsgBus: 15718 2024-11-21T08:56:15.805249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653769410592504:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:15.805430Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f3/r3tmp/tmpCxOJdR/pdisk_1.dat 2024-11-21T08:56:15.813989Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13037, node 2 2024-11-21T08:56:15.824708Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:15.824721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:15.824723Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:15.824763Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15718 TClient is connected to server localhost:15718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:15.905805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:15.905836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:15.906878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:15.908111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.918604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.926867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.942189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:15.951701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:16.051665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653773705561332:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.051694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:16.057505Z node 2 :FLAT_TX_SCHEMESH ... 81474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.287376Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.295743Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.313493Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.322860Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:17.448556Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653778404413097:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.448599Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.452987Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.458787Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.468064Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.475129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.482540Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.499591Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:17.512697Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653778404413611:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.512726Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.512829Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653778404413616:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:17.513582Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:17.516581Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653778404413618:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:17.963207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22450, MsgBus: 10667 2024-11-21T08:56:18.280064Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653782023223616:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:18.280225Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f3/r3tmp/tmpNPKltL/pdisk_1.dat 2024-11-21T08:56:18.293409Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22450, node 5 2024-11-21T08:56:18.298411Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:18.298420Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:18.298422Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:18.298455Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10667 TClient is connected to server localhost:10667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:18.380301Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:18.380328Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:18.381525Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:18.382580Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.384695Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.393178Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.412443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.423447Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:18.527790Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653782023225158:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.527819Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.532275Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.538769Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.593229Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.602036Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.609061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.616255Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:18.624271Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653782023225673:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.624302Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.624314Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653782023225678:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:18.624795Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:18.629301Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653782023225680:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:18.780778Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2024-11-21T08:53:32.372942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:32.372994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:32.373006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003840/r3tmp/tmpmcjvfB/pdisk_1.dat 2024-11-21T08:53:32.466856Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63499, node 1 2024-11-21T08:53:32.561146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:32.561165Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:32.561168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:32.561229Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:32.565820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:32.642373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:32.642410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:32.654033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11667 2024-11-21T08:53:33.060902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:33.889396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:33.889437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:33.922746Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:33.923781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:33.984857Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:33.999064Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:33.999093Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:34.006103Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:34.006268Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:34.006285Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:34.006289Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:34.006294Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:34.006298Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:34.006302Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:34.006308Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:34.006422Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:34.186632Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:34.186664Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:34.187891Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:53:34.190201Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:53:34.190339Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:53:34.191221Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T08:53:34.197014Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:34.197035Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:34.197045Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T08:53:34.199170Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.199213Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.200772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:34.202315Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:34.202342Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:34.204634Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:34.216802Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.239040Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:34.370431Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:34.527807Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:35.268142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:53:35.866819Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:35.978566Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T08:53:35.978599Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:53:35.978618Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2484:2899], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:53:35.978992Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2486:2901] 2024-11-21T08:53:35.979046Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2486:2901], schemeshard id = 72075186224037899 2024-11-21T08:53:36.703804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2617:3192], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:36.703865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:36.707736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T08:53:36.763176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2765:3227], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:36.763260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:36.763675Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2770:3231]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:53:36.763711Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:53:36.763736Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T08:53:36.763743Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2773:3234] 2024-11-21T08:53:36.763751Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2773:3234] 2024-11-21T08:53:36.763924Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2774:3030] 2024-11-21T08:53:36.764007Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2773:3234], server id = [2:2774:3030], tablet id = 72075186224037897, status = OK 2024-11-21T08:53:36.764055Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2774:3030], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:53:36.764078Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:53:36.764144Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:53:36.764158Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2770:3231], StatRequests.size() = 1 2024-11-21T08:53:36.766609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2778:3238], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:36.766665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:36.766776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2783:3243], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:36.768018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:53:36.867759Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:53:36.867804Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:53:36.920168Z node 1 :STATISTICS DE ... 688403Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 116 ], ReplyToActorId[ [2:7445:5366]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:07.688509Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2024-11-21T08:56:07.688516Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:7445:5366], StatRequests.size() = 1 2024-11-21T08:56:09.169907Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:7484:5384]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:09.170040Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2024-11-21T08:56:09.170050Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:7484:5384], StatRequests.size() = 1 2024-11-21T08:56:09.275773Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T08:56:09.275812Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:56:09.275817Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:56:09.275821Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T08:56:10.718376Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:56:10.718597Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:10.718681Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:10.803241Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037889 2024-11-21T08:56:10.803273Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 204.000000s, at schemeshard: 72075186224037889 2024-11-21T08:56:10.803413Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 25 2024-11-21T08:56:10.814942Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:10.835941Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7526:5405]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:10.836065Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T08:56:10.836075Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7526:5405], StatRequests.size() = 1 2024-11-21T08:56:12.162439Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:12.162476Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:12.162490Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T08:56:12.162496Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:12.162645Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T08:56:12.166027Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:12.167336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7569:5431], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.167365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7579:5436], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.167387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:12.170453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T08:56:12.186915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7583:5439], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T08:56:12.292388Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7671:5486]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:12.292512Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T08:56:12.292523Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7671:5486], StatRequests.size() = 1 2024-11-21T08:56:12.405255Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7706:5507]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:12.405313Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T08:56:12.405361Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2024-11-21T08:56:12.405366Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T08:56:12.405387Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:12.405397Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7706:5507], StatRequests.size() = 1 2024-11-21T08:56:12.418052Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZThlNzYzNTQtNTg4ZTYyMjUtZGMxY2M5NmYtYjNkNTc4MWY=, TxId: 2024-11-21T08:56:12.418078Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZThlNzYzNTQtNTg4ZTYyMjUtZGMxY2M5NmYtYjNkNTc4MWY=, TxId: 2024-11-21T08:56:12.418188Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:12.439857Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:12.439879Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:12.512418Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:56:12.512456Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:12.605830Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:2942:3063], schemeshard count = 1 2024-11-21T08:56:13.064563Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T08:56:13.064591Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 197.000000s, at schemeshard: 72075186224037899 2024-11-21T08:56:13.064677Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2024-11-21T08:56:13.075883Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:13.850013Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7773:5547]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:13.850117Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T08:56:13.850125Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7773:5547], StatRequests.size() = 1 2024-11-21T08:56:15.263683Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:15.263743Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:15.263747Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:15.263755Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2024-11-21T08:56:15.263757Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T08:56:15.263832Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T08:56:15.264429Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:15.268012Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTY0ZDU2ODItOThkYTczYWUtNjZlOTIxNjctNThhN2M5MTY=, TxId: 2024-11-21T08:56:15.268030Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTY0ZDU2ODItOThkYTczYWUtNjZlOTIxNjctNThhN2M5MTY=, TxId: 2024-11-21T08:56:15.268154Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:15.279762Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T08:56:15.279787Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:15.417453Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7850:5593]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:15.417589Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T08:56:15.417598Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7850:5593], StatRequests.size() = 1 2024-11-21T08:56:16.975731Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7904:5627]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:16.975869Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T08:56:16.975879Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7904:5627], StatRequests.size() = 1 2024-11-21T08:56:18.268632Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 2 2024-11-21T08:56:18.268709Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:18.268811Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:18.268817Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:18.268936Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:18.395538Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7942:5647]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:18.395624Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T08:56:18.395630Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7942:5647], StatRequests.size() = 1 >> TKeyValueTracingTest::ReadSmall >> KqpScheme::QueryWithAlter >> KqpOlapScheme::AddColumnLongPk |90.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |90.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut >> TCacheTest::List >> TCacheTest::TableSchemaVersion >> TCacheTest::MigrationLostMessage >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] |90.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> TCacheTest::TableSchemaVersion [GOOD] >> TKeyValueTracingTest::ReadSmall [FAIL] >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] >> KqpOlapScheme::AddColumnLongPk [GOOD] >> TCacheTest::List [GOOD] >> KqpScheme::QueryWithAlter [GOOD] >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::WatchRoot >> KqpOlapScheme::AddColumnWithTtl >> TCacheTest::CheckSystemViewAccess >> KqpScheme::RenameTable >> TCacheTest::MigrationDeletedPathNavigate >> TCacheTest::WatchRoot [GOOD] >> KqpOlapScheme::AddColumnWithTtl [GOOD] >> TCacheTest::CheckSystemViewAccess [GOOD] >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::AddColumnWithStore >> KqpOlapScheme::AddColumnWithStore [GOOD] >> KqpOlapScheme::AddColumnErrors |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.1%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |90.1%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xBED1B19) TestOneRead(TBasicString>, TBasicString>)+2032 (0xBC5B6A0) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+157 (0xBC5E5AD) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xBC656A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xBED3ACE) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+426 (0xBC64E4A) NUnitTest::TTestFactory::Execute()+803 (0xBED4243) NUnitTest::RunMain(int, char**)+3005 (0xBEE3B5D) ??+0 (0x7FB0329D9D90) __libc_start_main+128 (0x7FB0329D9E40) _start+41 (0xB0C8029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2024-11-21T08:56:22.195870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.195898Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:22.233665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2024-11-21T08:56:22.236183Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T08:56:22.236232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T08:56:22.236242Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T08:56:22.388979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.389009Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:22.403608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2024-11-21T08:56:22.404413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:56:22.404939Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T08:56:22.404996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2024-11-21T08:56:22.405339Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:202:2193], for# user1@builtin, access# DescribeSchema 2024-11-21T08:56:22.405387Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:208:2199], for# user1@builtin, access# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::WatchRoot [GOOD] Test command err: 2024-11-21T08:56:22.300857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.300876Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:22.338749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2024-11-21T08:56:22.340702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T08:56:22.364872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 2024-11-21T08:56:22.503719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.503739Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:22.519028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:56:22.520912Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T08:56:22.521165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:56:22.521721Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> KqpOlapScheme::AddColumnErrors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: 2024-11-21T08:55:19.262702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653529496797656:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:19.262798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d1d/r3tmp/tmpIL2fgB/pdisk_1.dat 2024-11-21T08:55:19.309553Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17656, node 1 2024-11-21T08:55:19.324024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:19.324035Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:19.324036Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:19.324074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:19.363049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:19.363090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:19.364620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:19.388651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.389727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:19.389749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.390301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:19.390344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:19.390351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T08:55:19.390666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:19.390672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:55:19.390871Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:19.390975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:19.391930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179319437, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:19.391940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:55:19.391986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:55:19.392487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:19.392536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:19.392550Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:55:19.392565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:55:19.392576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:55:19.392590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:55:19.393114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:55:19.393132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:55:19.393135Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:19.393150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:55:24.263212Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653529496797656:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:24.263248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:34.307583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:55:34.307606Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:19.312915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2024-11-21T08:56:19.312982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2024-11-21T08:56:19.313018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: 2024-11-21T08:54:08.977911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:08.977946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:08.977955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00279c/r3tmp/tmpFVYhHx/pdisk_1.dat 2024-11-21T08:54:09.043790Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23456, node 1 2024-11-21T08:54:09.133854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:09.133872Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:09.133875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:09.133934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:09.137975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.212631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.212653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.223871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25832 2024-11-21T08:54:09.619476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.379629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.379660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.413346Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.414060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.458660Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.465260Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.465283Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.470225Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.470326Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.470339Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.470342Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.470346Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.470350Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.470353Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.470356Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.470428Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:10.644706Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.644735Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.645892Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:10.647996Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:10.648121Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:10.648694Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:10.653168Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:10.653186Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:10.653197Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:10.655397Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.655426Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.656954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:10.658682Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:10.658714Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:10.661842Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:10.674075Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.696088Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:10.816707Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:10.983154Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:11.702291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:11.702321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:11.704865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:11.741250Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:11.741308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:11.741363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:11.741391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:11.741413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:11.741450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:11.741472Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:11.741494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:11.741516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:11.741537Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:11.741564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:11.741590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:11.749595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:11.749632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:11.749682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:11.749706Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:11.749735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:11.749759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:23.474742Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:23.476086Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:23.488262Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:23.488435Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:23.488456Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:23.488752Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:23.499784Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:23.499844Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:23.500028Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8423:6328], server id = [2:8428:6333], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:23.500151Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8423:6328], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.500225Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8424:6329], server id = [2:8429:6334], tablet id = 72075186224037900, status = OK 2024-11-21T08:56:23.500231Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8424:6329], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.500479Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8425:6330], server id = [2:8430:6335], tablet id = 72075186224037901, status = OK 2024-11-21T08:56:23.500486Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8425:6330], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.500570Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:23.500667Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:23.500699Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8426:6331], server id = [2:8431:6336], tablet id = 72075186224037902, status = OK 2024-11-21T08:56:23.500704Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8426:6331], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.500743Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8427:6332], server id = [2:8432:6337], tablet id = 72075186224037903, status = OK 2024-11-21T08:56:23.500747Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8427:6332], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.500830Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T08:56:23.500921Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8423:6328], server id = [2:8428:6333], tablet id = 72075186224037899 2024-11-21T08:56:23.500925Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.500949Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8424:6329], server id = [2:8429:6334], tablet id = 72075186224037900 2024-11-21T08:56:23.500951Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.500970Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T08:56:23.500981Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8433:6338], server id = [2:8435:6340], tablet id = 72075186224037904, status = OK 2024-11-21T08:56:23.500989Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8433:6338], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.501068Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T08:56:23.501089Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8425:6330], server id = [2:8430:6335], tablet id = 72075186224037901 2024-11-21T08:56:23.501091Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.501112Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8434:6339], server id = [2:8436:6341], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:23.501116Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8434:6339], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.501203Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8426:6331], server id = [2:8431:6336], tablet id = 72075186224037902 2024-11-21T08:56:23.501205Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.501229Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8437:6342], server id = [2:8438:6343], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:23.501233Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8437:6342], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.501248Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T08:56:23.501258Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8427:6332], server id = [2:8432:6337], tablet id = 72075186224037903 2024-11-21T08:56:23.501260Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.501329Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:23.501346Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8439:6344], server id = [2:8441:6346], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:23.501350Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8439:6344], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.501359Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8440:6345], server id = [2:8442:6347], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:23.501362Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8440:6345], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.501423Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8433:6338], server id = [2:8435:6340], tablet id = 72075186224037904 2024-11-21T08:56:23.501425Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.501430Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:23.501519Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8434:6339], server id = [2:8436:6341], tablet id = 72075186224037905 2024-11-21T08:56:23.501522Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.501532Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:23.501548Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:23.501551Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:23.501575Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:23.501599Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:23.501657Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:23.501686Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8437:6342], server id = [2:8438:6343], tablet id = 72075186224037906 2024-11-21T08:56:23.501690Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.502034Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8439:6344], server id = [2:8441:6346], tablet id = 72075186224037907 2024-11-21T08:56:23.502038Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.502126Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:23.502185Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8440:6345], server id = [2:8442:6347], tablet id = 72075186224037908 2024-11-21T08:56:23.502187Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.505111Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8459:6364]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:23.505151Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:23.505156Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8459:6364], StatRequests.size() = 1 2024-11-21T08:56:23.547789Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWVlNWM4NjQtZjRiYTNiNTQtYTU5OTE4YzktNTMwZWRiODU=, TxId: 2024-11-21T08:56:23.547811Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWVlNWM4NjQtZjRiYTNiNTQtYTU5OTE4YzktNTMwZWRiODU=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T08:56:23.547970Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:23.573190Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:23.573211Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:23.582445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8476:6374];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:56:23.584078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:56:23.584637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8476:6374];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:56:23.819663Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8529:6409]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:23.819770Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:23.819775Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:23.820319Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:23.820333Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:23.820344Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:23.821715Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapTypes::AttributeNegative [GOOD] Test command err: Trying to start YDB, gRPC: 64898, MsgBus: 26976 2024-11-21T08:56:14.327033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653764623442347:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.327058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f7/r3tmp/tmpKlmnXX/pdisk_1.dat 2024-11-21T08:56:14.368170Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64898, node 1 2024-11-21T08:56:14.383523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:14.383536Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:14.383537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:14.383566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26976 TClient is connected to server localhost:26976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:14.427254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.427274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.428365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:14.430703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.441176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.501775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.514623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.523710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.626694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653764623443882:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.626739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.655214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.660839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.668059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.722795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.731442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.746143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.760694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653764623444399:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.760712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653764623444404:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.760723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:14.761254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:14.765479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653764623444406:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:56:14.915846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.923966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.935337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9423, MsgBus: 12425 2024-11-21T08:56:15.232718Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653768264882599:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:15.232771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f7/r3tmp/tmpBv5VVC/pdisk_1.dat 2024-11-21T08:56:15.239874Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9423, node 2 2024-11-21T08:56:15.249618Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:15.249630Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:15.249632Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:15.249666Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12425 TClient is connected to server localhost:12425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:15.333044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:15.333069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:15.334144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:15.334806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Uint64 NOT NULL, column0 Int32, column1 Int32, column2 Int32, column3 Int32, column4 Int32, column5 Int32, column6 Int32, column7 Int32, column8 Int32, column9 Int32, column10 Int32, column11 Int32, column12 Int32, column13 Int32, column14 Int32, column15 Int32, column16 Int32, column17 Int32, column18 Int32, column19 Int32, column20 Int32, column21 Int32, column22 Int32, column23 Int32, column24 Int32, column25 Int32, column26 Int32, column27 Int32, column28 Int32, column29 Int32, column30 Int32, column31 Int32, column32 Int32, column33 Int32, column34 Int32, column35 Int32, column36 Int32, column37 Int32, column38 Int32, column39 Int32, column40 Int32, column41 Int32, column42 Int32, column43 Int32, column44 Int32, column45 Int32, column46 Int32, column47 Int32, column48 Int32, column49 Int32, column50 Int32, column51 Int32, column52 Int32, column53 Int32, column54 Int32, column55 Int32, column56 Int32, column57 Int32, column58 Int32, column59 Int32, column60 Int32, column61 Int32, column62 Int32, column63 Int32, column64 Int32, column65 Int32, column66 Int3 ... 42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:56:19.758666Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:56:19.758675Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:56:19.758692Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:56:19.758703Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:56:19.758714Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:56:19.758725Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:56:19.813310Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653786331995882:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.813312Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653786331995887:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.813334Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:19.814082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:56:19.816075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653786331995889:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:56:19.988096Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380000, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.025874Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380015, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.056325Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380057, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.088092Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380092, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.122836Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380127, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.156551Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380162, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.198221Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380190, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.232483Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380232, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.262337Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380267, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.288011Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380302, txId: 18446744073709551615] shutting down 2024-11-21T08:56:20.327175Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179380330, txId: 18446744073709551615] shutting down Trying to start YDB, gRPC: 10812, MsgBus: 24920 2024-11-21T08:56:20.568696Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653791796749358:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:20.568735Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f7/r3tmp/tmpl8pB29/pdisk_1.dat 2024-11-21T08:56:20.579282Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10812, node 5 2024-11-21T08:56:20.587774Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:20.587789Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:20.587792Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:20.587835Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24920 TClient is connected to server localhost:24920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:20.669162Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:20.669190Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:20.670301Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:20.671443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:20.675682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:20.684344Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:20.701881Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:20.710884Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:20.839126Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653791796750895:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:20.839151Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:20.845022Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:20.851040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:20.906263Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:20.920025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:20.934322Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:20.948068Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:20.956665Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653791796751412:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:20.956694Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:20.956711Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653791796751417:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:20.957339Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:20.960245Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653791796751419:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestSetClientOffset >> KqpScheme::RenameTable [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> KqpScheme::DisableMetadataObjectsOnServerless [GOOD] >> KqpScheme::ResourcePoolClassifiersValidation >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumnErrors [GOOD] Test command err: Trying to start YDB, gRPC: 27921, MsgBus: 64162 2024-11-21T08:56:21.913327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653795232739449:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:21.913356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ea/r3tmp/tmpdHUp4U/pdisk_1.dat 2024-11-21T08:56:21.954156Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27921, node 1 2024-11-21T08:56:21.971085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:21.971095Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:21.971097Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:21.971124Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64162 TClient is connected to server localhost:64162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:22.013509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:22.013539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:22.014625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:22.020525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:56:22.174732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653799527707339:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:22.174757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:22.209332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:22.216108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:56:22.216146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:56:22.216169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:56:22.216187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:56:22.216236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:56:22.216253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:56:22.216267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:56:22.216287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:56:22.216303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:56:22.216320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:56:22.216335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:56:22.216353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653799527707416:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:56:22.216710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:56:22.216721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:56:22.216731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:56:22.216738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:56:22.216751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:56:22.216757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:56:22.216764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:56:22.216773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:56:22.216783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:56:22.216789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:56:22.216794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:56:22.216800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:56:22.216860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:56:22.216872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:56:22.216886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:56:22.216895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:56:22.216905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:56:22.216911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:56:22.216922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:56:22.216928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:56:22.216935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:56:22.216940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abs ... 1.dat 2024-11-21T08:56:24.432597Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1769, node 5 2024-11-21T08:56:24.447053Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:24.447080Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:24.447082Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:24.447135Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30032 TClient is connected to server localhost:30032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:24.524878Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:24.524909Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:24.525981Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:24.526654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:56:24.646603Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653805566785852:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:24.646624Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:24.649062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:24.654886Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:56:24.654910Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:56:24.654943Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:56:24.654963Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:56:24.654979Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:56:24.654997Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:56:24.655013Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:56:24.655027Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:56:24.655044Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:56:24.655062Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:56:24.655079Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:56:24.655097Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653805566785898:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:56:24.655493Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:56:24.655504Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:56:24.655513Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:56:24.655516Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:56:24.655526Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:56:24.655532Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:56:24.655538Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:56:24.655546Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:56:24.655552Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:56:24.655558Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:56:24.655563Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:56:24.655569Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:56:24.655607Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:56:24.655615Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:56:24.655625Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:56:24.655632Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:56:24.655638Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:56:24.655645Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:56:24.655656Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:56:24.655662Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:56:24.655669Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:56:24.655675Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:56:24.705588Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653805566785978:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:24.705609Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> TCacheTest::Attributes >> TCacheTest::MigrationCommon >> TCacheTest::Navigate >> TCacheTest::SystemView >> TCacheTest::Recreate >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TCacheTest::Recreate [GOOD] >> TCacheTest::RacyRecreateAndSync >> TCacheTest::Navigate [GOOD] >> TCacheTest::MigrationUndo >> TCacheTest::SystemView [GOOD] >> TCacheTest::SysLocks |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableMetadataObjectsOnServerless [GOOD] Test command err: 2024-11-21T08:56:14.282971Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653764062271560:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.283205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f6/r3tmp/tmp9hYCSs/pdisk_1.dat 2024-11-21T08:56:14.334024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25557, node 1 2024-11-21T08:56:14.350723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:14.350735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:14.350736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:14.350770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:14.383700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.383743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.385505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:14.415469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:14.422922Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T08:56:14.522991Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:56:14.523051Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:56:14.523055Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:56:14.523611Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2JiYmM1ZDktZmE2MGRmZTktMjU5OTVjODgtYThhODA1YmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2JiYmM1ZDktZmE2MGRmZTktMjU5OTVjODgtYThhODA1YmM= 2024-11-21T08:56:14.523731Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439653764062272308:2298], Start check tables existence, number paths: 2 2024-11-21T08:56:14.523756Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2JiYmM1ZDktZmE2MGRmZTktMjU5OTVjODgtYThhODA1YmM=, ActorId: [1:7439653764062272309:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:56:14.523775Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T08:56:14.525164Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439653764062272308:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:56:14.525183Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439653764062272308:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:56:14.525187Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439653764062272308:2298], Successfully finished 2024-11-21T08:56:14.525196Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:56:14.532106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.535586Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653763255016425:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.535602Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:56:14.537825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.537854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.538680Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T08:56:14.538875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:14.551384Z node 3 :STATISTICS WARN: [72075186224037897] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T08:56:14.551861Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:14.592755Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.592788Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.593961Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:14.643190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.647395Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653764072954988:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.647637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:56:14.649346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.649369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.650207Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:56:14.650445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:14.660737Z node 2 :STATISTICS WARN: [72075186224037907] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T08:56:14.661041Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:14.704137Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.704156Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:14.705499Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:14.751159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:14.755177Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:56:14.755209Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:56:14.755215Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:56:14.755227Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:56:14.755232Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:56:14.755236Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:56:14.764964Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:14.829069Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:56:14.829096Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439653763255017098:2313], Start check tables existence, number paths: 2 2024-11-21T08:56:14.829213Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:56:14.829221Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:56:14.829224Z node 3 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T08:56:14.829402Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439653763255017098:2313], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T08:56:14.829424Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439653763255017098:2313], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T08:56:14.829429Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439653763255017098:2313], Successfully finished 2024-11-21T08:56:14.829441Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T08:56:14.924237Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T08:56:14.924342Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439653764072955923:2340], Start check tables existence, number paths: 2 2024-11-21T08:56:14.924439Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T08:56:14.924447Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T08:56:14.924456Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7439653764072955932:2342], Database: /Root/test-dedicated, Start database fetching 2024-11-21T08:56:14.924462Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T08:56:14.924631Z node 2 :KQP_WORKLOAD_SERVICE DEBU ... yN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ReadyState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, received request, proxyRequestId: 64 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/initialization/migrations`; rpcActor: [10:7439653808000199166:2921] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2024-11-21T08:56:24.891437Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ReadyState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, request placed into pool from cache: default 2024-11-21T08:56:24.891450Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ReadyState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, Sending CompileQuery request 2024-11-21T08:56:24.903914Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ExecuteState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, acquire mvcc snapshot 2024-11-21T08:56:24.968741Z node 10 :KQP_RESOURCE_MANAGER ERROR: KqpSnapshotManager: CreateSnapshot got unexpected status=UNAVAILABLE, issues:
: Error: Database coordinators are unavailable 2024-11-21T08:56:24.968781Z node 10 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ExecuteState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, read snapshot result: UNAVAILABLE, step: 0, tx id: 0 2024-11-21T08:56:24.968786Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ExecuteState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, Create QueryResponse for error on request, msg: 2024-11-21T08:56:24.968834Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ExecuteState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, txInfo Status: Aborted Kind: ReadOnly TotalDuration: 0 ServerDuration: 64.92 QueriesCount: 2 2024-11-21T08:56:24.968867Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ExecuteState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, Sending to Executer TraceId: 0 8 2024-11-21T08:56:24.968881Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ExecuteState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, Created new KQP executer: [10:7439653808000199219:2920] isRollback: 1 2024-11-21T08:56:24.968891Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ExecuteState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:24.968999Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: CleanupState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, EndCleanup, isFinal: 0 2024-11-21T08:56:24.969042Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: CleanupState, TraceId: 01jd6yzphvd2rpzb0h6fmwkjgn, Sent query response back to proxy, proxyRequestId: 64, proxyId: [10:7439653795115294039:2235] 2024-11-21T08:56:24.969270Z node 10 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Database coordinators are unavailable" severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jd6yzpj74gf8v7k0gcb4tef6" } } } } ; 2024-11-21T08:56:24.969307Z node 10 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Database coordinators are unavailable 2024-11-21T08:56:24.969344Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:56:24.969353Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:24.969355Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:56:24.969358Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:56:24.969379Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MmE0ZmIyN2MtYjY0MzZkMDctNTlmOWQzMzMtNDk3ZmJjNGM=, ActorId: [10:7439653808000199165:2920], ActorState: unknown state, Session actor destroyed 2024-11-21T08:56:25.411583Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI= 2024-11-21T08:56:25.411627Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: unknown state, session actor bootstrapped 2024-11-21T08:56:25.411741Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ReadyState, TraceId: 01jd6yzq23514j5qqe4mmyaw8w, received request, proxyRequestId: 66 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/secrets/values`; rpcActor: [10:7439653812295166536:2931] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2024-11-21T08:56:25.411749Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ReadyState, TraceId: 01jd6yzq23514j5qqe4mmyaw8w, request placed into pool from cache: default 2024-11-21T08:56:25.411758Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ReadyState, TraceId: 01jd6yzq23514j5qqe4mmyaw8w, Sending CompileQuery request 2024-11-21T08:56:25.413230Z node 10 :SCHEME_BOARD_SUBSCRIBER WARN: [main][10:7439653803705230190:2980][/Root/test-shared/.metadata/secrets/values] Sync is done: cookie# 38, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T08:56:25.413255Z node 10 :SCHEME_BOARD_SUBSCRIBER WARN: [main][10:7439653803705230190:2980][/Root/test-shared/.metadata/secrets/values] Sync is done: cookie# 39, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T08:56:25.413489Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439653812295166538:2932], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]
: Error: LookupError, code: 2005 2024-11-21T08:56:25.413559Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ExecuteState, TraceId: 01jd6yzq23514j5qqe4mmyaw8w, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2024-11-21T08:56:25.413575Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ExecuteState, TraceId: 01jd6yzq23514j5qqe4mmyaw8w, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:25.413578Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ExecuteState, TraceId: 01jd6yzq23514j5qqe4mmyaw8w, EndCleanup, isFinal: 0 2024-11-21T08:56:25.413623Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ExecuteState, TraceId: 01jd6yzq23514j5qqe4mmyaw8w, Sent query response back to proxy, proxyRequestId: 66, proxyId: [10:7439653795115294039:2235] 2024-11-21T08:56:25.413805Z node 10 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2024-11-21T08:56:25.413855Z node 10 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]
: Error: LookupError, code: 2005 2024-11-21T08:56:25.413871Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T08:56:25.413880Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T08:56:25.413884Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T08:56:25.413886Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T08:56:25.413905Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=YWRlYzY2ZTgtMmMwNDlkYTktMjBkNDUyZDAtNjZlMjg0YWI=, ActorId: [10:7439653812295166535:2930], ActorState: unknown state, Session actor destroyed |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::MigrationCommit >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::SysLocks [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> KqpScheme::ResourcePoolClassifiersValidation [GOOD] >> KqpScheme::ResourcePoolClassifiersRankValidation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::MigrationUndo [GOOD] Test command err: 2024-11-21T08:56:25.787081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:25.787103Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:25.822243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:56:25.824062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2024-11-21T08:56:25.985364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:25.985391Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:25.999602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T08:56:26.000690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2024-11-21T08:56:26.000997Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:194:2185], for# user1@builtin, access# DescribeSchema 2024-11-21T08:56:26.001035Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:198:2189], for# user1@builtin, access# DescribeSchema >> TCacheTest::MigrationCommit [GOOD] >> TConsoleTests::TestRestartConsoleAndPools >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TConsoleConfigTests::TestModifyConfigItem >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyRecreateAndSync [GOOD] Test command err: 2024-11-21T08:56:25.845610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:25.845630Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:25.880396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:56:25.882357Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T08:56:25.882544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:56:25.883065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:56:25.883908Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2024-11-21T08:56:26.042357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.042379Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:26.056719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:56:26.058360Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T08:56:26.058694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T08:56:26.059374Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T08:56:26.070699Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2024-11-21T08:56:25.886173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:25.886193Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:25.919968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2024-11-21T08:56:26.083923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.083946Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:26.098061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2024-11-21T08:56:25.830947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:25.830971Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:25.864886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2024-11-21T08:56:25.866282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:171:2168], Recipient [1:68:2107]: NActors::TEvents::TEvPoison 2024-11-21T08:56:25.866365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:172:2067] recipient: [1:45:2092] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:175:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:176:2067] recipient: [1:174:2169] Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:178:2067] recipient: [1:174:2169] 2024-11-21T08:56:25.867007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:56:25.867783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:56:25.867803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:56:25.868146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:25.868157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:25.868160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:25.868164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:25.868167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:25.868169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:25.868175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:25.868238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:25.869346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:25.869638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:25.869668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:25.869692Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:177:2170]: TSystem::Undelivered 2024-11-21T08:56:25.869697Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2024-11-21T08:56:25.869701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:25.869705Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:25.869739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:25.869818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.869988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870269Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:25.870461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:25.870569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:177:2170], Recipient [1:177:2170]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T08:56:25.870576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T08:56:25.870649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:25.870681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:25.870688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:25.870693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:25.870697Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:25.870711Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:192:2170], Recipient [1:177:2170]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T08:56:25.870715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T08:56:25.870719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:207:2067] recipient: [1:24:2071] 2024-11-21T08:56:25.891247Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:206:2187], Recipient [1:177:2170]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2024-11-21T08:56:25.891261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:56:25.898376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:25.898453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.898473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:56:25.898522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T08:56:25.898556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:56:25.898576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:25.898580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:56:25.898589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:25.898599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:25.898605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:56:25.898760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678 ... 4-11-21T08:56:26.024342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2024-11-21T08:56:26.024365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:56:26.024371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:26.024398Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:415:2333], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:56:26.024423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:56:26.024426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:56:26.228648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.228663Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:26.242671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:174:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:175:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:175:2169] 2024-11-21T08:56:26.246314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.246331Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2024-11-21T08:56:26.267639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T08:56:26.268726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:245:2067] recipient: [2:239:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:245:2067] recipient: [2:239:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:249:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:254:2067] recipient: [2:239:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2024-11-21T08:56:26.271089Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:285:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:286:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2024-11-21T08:56:26.303477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:335:2067] recipient: [2:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:335:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:338:2286] sender: [2:339:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [2:338:2286] sender: [2:340:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T08:56:26.379499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:413:2067] recipient: [2:408:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:413:2067] recipient: [2:408:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:416:2333] sender: [2:417:2067] recipient: [2:408:2329] 2024-11-21T08:56:26.382959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.382979Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:416:2333] sender: [2:443:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 106 2024-11-21T08:56:26.416355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:56:26.416370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:56:26.416424Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2024-11-21T08:56:26.416439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T08:56:26.427674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2024-11-21T08:56:26.427743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:503:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:506:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:508:2067] recipient: [2:507:2404] Leader for TabletID 72057594046678944 is [2:509:2405] sender: [2:510:2067] recipient: [2:507:2404] 2024-11-21T08:56:26.432319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.432341Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:509:2405] sender: [2:536:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2024-11-21T08:56:25.869103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:25.869121Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:25.903341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:56:25.905333Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2024-11-21T08:56:26.064497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.064517Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:26.078650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2024-11-21T08:56:26.079359Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:171:2168], Recipient [2:68:2107]: NActors::TEvents::TEvPoison 2024-11-21T08:56:26.079452Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:174:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:175:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:175:2169] 2024-11-21T08:56:26.080141Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [2:175:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:56:26.081091Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [2:175:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:56:26.081115Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [2:175:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:56:26.081500Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:26.081510Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:26.081513Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:26.081517Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:26.081521Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:26.081523Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:26.081529Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:26.081559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:26.082283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:26.082455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:26.082486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:26.082515Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [2:7238242728502259555:7369577], Recipient [2:177:2170]: TSystem::Undelivered 2024-11-21T08:56:26.082519Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2024-11-21T08:56:26.082522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.082525Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:26.082545Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:26.082604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082617Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082622Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082664Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082677Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082692Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082697Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082706Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082712Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082729Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082748Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082756Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082798Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082803Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082814Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082828Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082835Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082850Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082866Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082880Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082888Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082892Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082896Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.082923Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:26.083072Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:26.083193Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [2:177:2170], Recipient [2:177:2170]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T08:56:26.083198Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T08:56:26.083267Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:26.083273Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:26.083289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:26.083293Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:26.083297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:26.083299Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:26.083329Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:192:2170], Recipient [2:177:2170]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T08:56:26.083332Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T08:56:26.083335Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2024-11-21T08:56:26.103833Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [2:206:2187], Recipient [2:177:2170]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2024-11-21T08:56:26.103849Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:56:26.110528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:26.110608Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.110628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:56:26.110676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target pa ... ype: Coordinator, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:56:26.260433Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: Mediator, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:56:26.260438Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:56:26.260450Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260473Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260485Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260511Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260519Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260550Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260556Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260571Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260579Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260585Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260598Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260603Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260629Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260648Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260658Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260661Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260664Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.260683Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:26.260863Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:26.261043Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [2:508:2398], Recipient [2:508:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T08:56:26.261048Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T08:56:26.261107Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:26.261111Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:26.261150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:26.261156Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:26.261163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:26.261166Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:26.261181Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:523:2398], Recipient [2:508:2398]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T08:56:26.261186Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T08:56:26.261189Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:26.271498Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:26.271533Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:375:2315] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:26.271589Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:535:2415], recipient# [2:534:2414], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:56:26.271622Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:26.271630Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:384:2318] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:26.271642Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:537:2417], recipient# [2:536:2416], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T08:56:26.271666Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:26.271672Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:393:2321] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 250 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:26.271684Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:539:2419], recipient# [2:538:2418], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } |90.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |90.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T08:55:35.343409Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:35.343436Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.346634Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:35.349112Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T08:55:35.349302Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T08:55:35.349690Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T08:55:35.349994Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T08:55:35.350243Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.351456Z node 1 :PERSQUEUE INFO: new Cookie default|2d40e673-5b69c694-439740ef-dce11222_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.352224Z node 1 :PERSQUEUE INFO: new Cookie default|9cd28e49-d0fa25a-4c687faa-f42ef9ea_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.356162Z node 1 :PERSQUEUE INFO: new Cookie default|642ad4f1-368e3cb-19d48afa-8d4b104c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.357331Z node 1 :PERSQUEUE INFO: new Cookie default|f4348cad-828378fa-dacef909-36f867f_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.358369Z node 1 :PERSQUEUE INFO: new Cookie default|5c55f363-dac398f9-ea00d070-bccb2b3a_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.359936Z node 1 :PERSQUEUE INFO: new Cookie default|e6d00f97-e07cb445-b23c004e-286dad93_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T08:55:35.591800Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:35.591824Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:179:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:182:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:183:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:35.598384Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:35.598399Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:184:2194] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:261:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:55:37.064238Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:55:37.064479Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T08:55:37.064644Z node 2 :PERSQUEUE ... imr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [47:290:2283] sender: [47:392:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:106:2057] recipient: [48:99:2133] 2024-11-21T08:56:26.938663Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:26.938681Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:147:2057] recipient: [48:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:147:2057] recipient: [48:145:2168] Leader for TabletID 72057594037927938 is [48:151:2172] sender: [48:152:2057] recipient: [48:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:177:2057] recipient: [48:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:26.941857Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:26.942017Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2024-11-21T08:56:26.942114Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:184:2197] 2024-11-21T08:56:26.942586Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:56:26.942915Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:185:2198] 2024-11-21T08:56:26.943228Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:26.944445Z node 48 :PERSQUEUE INFO: new Cookie default|3ef42fc4-2ff40617-493c37b0-4ba74872_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:26.944976Z node 48 :PERSQUEUE INFO: new Cookie default|963f60b3-bc406f18-ad19d935-845754da_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:26.947784Z node 48 :PERSQUEUE INFO: new Cookie default|b8280b7a-12bd8107-70a97a16-996bc103_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:26.948594Z node 48 :PERSQUEUE INFO: new Cookie default|38b94b47-b104d484-ed2c3cb7-71656bd0_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:26.949399Z node 48 :PERSQUEUE INFO: new Cookie default|46aca0e3-fa9d7530-84289041-d82f2347_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:26.950233Z node 48 :PERSQUEUE INFO: new Cookie default|f037f3f3-d719098f-8110403e-b34bcdbb_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:106:2057] recipient: [49:99:2133] 2024-11-21T08:56:27.175041Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:27.175062Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:147:2057] recipient: [49:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:147:2057] recipient: [49:145:2168] Leader for TabletID 72057594037927938 is [49:151:2172] sender: [49:152:2057] recipient: [49:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:177:2057] recipient: [49:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:27.178618Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:27.178781Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2024-11-21T08:56:27.178897Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:184:2197] 2024-11-21T08:56:27.179331Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:184:2197] 2024-11-21T08:56:27.179580Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:185:2198] 2024-11-21T08:56:27.179872Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:27.181326Z node 49 :PERSQUEUE INFO: new Cookie default|4752265f-e173841b-b3579adb-f87e3b46_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:27.182010Z node 49 :PERSQUEUE INFO: new Cookie default|2edbf1c2-165786da-fb2bf176-272326e1_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:27.190790Z node 49 :PERSQUEUE INFO: new Cookie default|fefb3ae4-9f282060-223ce9c2-a4bb10ab_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:27.192039Z node 49 :PERSQUEUE INFO: new Cookie default|ff93f971-1abbe0a4-5975ab72-e99c0d33_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:27.193047Z node 49 :PERSQUEUE INFO: new Cookie default|4649c798-95c9f387-ce35993f-bd399e60_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:27.194205Z node 49 :PERSQUEUE INFO: new Cookie default|193b8bc7-f9be73c1-264d3dae-917f9244_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: 2024-11-21T08:54:07.911044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:07.911077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:07.911085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a43/r3tmp/tmpcc9Sy3/pdisk_1.dat 2024-11-21T08:54:07.979197Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63583, node 1 2024-11-21T08:54:08.069716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:08.069734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:08.069738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:08.069832Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:08.074065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:08.148881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:08.148916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:08.160322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16000 2024-11-21T08:54:08.554492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.280386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.280410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.312525Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:09.313130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.356586Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:09.362682Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:09.362702Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:09.367452Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:09.367563Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:09.367576Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:09.367580Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:09.367584Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:09.367587Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:09.367591Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:09.367594Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:09.367671Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:09.539170Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.539187Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1754:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.539928Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1758:2552] 2024-11-21T08:54:09.541271Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1802:2576] 2024-11-21T08:54:09.541341Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1802:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:09.541778Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:09.544650Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:09.544660Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:09.544667Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:09.545851Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.545869Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.546764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:09.547962Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:09.547982Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:09.550029Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:09.561345Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.592804Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:09.688844Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:09.842781Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:10.558713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3020], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.558739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.561348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:10.592802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.592839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.592864Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.592880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.592893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.592906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:10.592918Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:10.592931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:10.592944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:10.592956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:10.592972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:10.592987Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2291:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:10.597467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.597494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.597518Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.597531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.597543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.597556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... TEvStatisticsRequest send, client id = [2:8420:6325], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.698348Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:23.698516Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8421:6326], server id = [2:8427:6332], tablet id = 72075186224037901, status = OK 2024-11-21T08:56:23.698531Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8421:6326], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.698545Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8422:6327], server id = [2:8426:6331], tablet id = 72075186224037902, status = OK 2024-11-21T08:56:23.698551Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8422:6327], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.698709Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:23.698761Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8419:6324], server id = [2:8424:6329], tablet id = 72075186224037899 2024-11-21T08:56:23.698766Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.698864Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8423:6328], server id = [2:8428:6333], tablet id = 72075186224037903, status = OK 2024-11-21T08:56:23.698875Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8423:6328], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.698906Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T08:56:23.699002Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8420:6325], server id = [2:8425:6330], tablet id = 72075186224037900 2024-11-21T08:56:23.699007Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.699025Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8429:6334], server id = [2:8430:6335], tablet id = 72075186224037904, status = OK 2024-11-21T08:56:23.699032Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8429:6334], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.699063Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T08:56:23.699146Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T08:56:23.699186Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8421:6326], server id = [2:8427:6332], tablet id = 72075186224037901 2024-11-21T08:56:23.699189Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.699200Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8431:6336], server id = [2:8432:6337], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:23.699208Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8431:6336], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.699250Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T08:56:23.699326Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8422:6327], server id = [2:8426:6331], tablet id = 72075186224037902 2024-11-21T08:56:23.699332Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.699362Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8433:6338], server id = [2:8434:6339], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:23.699369Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8433:6338], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.699384Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8423:6328], server id = [2:8428:6333], tablet id = 72075186224037903 2024-11-21T08:56:23.699387Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.699399Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:23.699472Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8429:6334], server id = [2:8430:6335], tablet id = 72075186224037904 2024-11-21T08:56:23.699475Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.699484Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8435:6340], server id = [2:8437:6342], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:23.699490Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8435:6340], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.699513Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8436:6341], server id = [2:8438:6343], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:23.699519Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8436:6341], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:23.699625Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:23.699662Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8431:6336], server id = [2:8432:6337], tablet id = 72075186224037905 2024-11-21T08:56:23.699665Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.699673Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:23.699710Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:23.699716Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:23.699749Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:23.699844Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8433:6338], server id = [2:8434:6339], tablet id = 72075186224037906 2024-11-21T08:56:23.699848Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.699871Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8435:6340], server id = [2:8437:6342], tablet id = 72075186224037907 2024-11-21T08:56:23.699874Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.699887Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8436:6341], server id = [2:8438:6343], tablet id = 72075186224037908 2024-11-21T08:56:23.699890Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:23.710740Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:23.710799Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T08:56:24.291237Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 3 2024-11-21T08:56:24.291264Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T08:56:25.992271Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:25.992340Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:27.195537Z node 2 :STATISTICS INFO: Node 3 is unavailable 2024-11-21T08:56:27.195564Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:27.195600Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2024-11-21T08:56:27.195604Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T08:56:27.195625Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:27.195638Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:27.195778Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:27.207049Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:27.207130Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2024-11-21T08:56:27.207303Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8565:6413], server id = [2:8566:6414], tablet id = 72075186224037900, status = OK 2024-11-21T08:56:27.207339Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8565:6413], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:27.207496Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:27.207508Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:27.207532Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:27.207558Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:27.207642Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:27.207693Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8565:6413], server id = [2:8566:6414], tablet id = 72075186224037900 2024-11-21T08:56:27.207698Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.208289Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:27.211580Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8583:6431]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:27.211624Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:27.211629Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8583:6431], StatRequests.size() = 1 2024-11-21T08:56:27.234920Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDNhNjk0ODctMWRiZWUxMjMtMTVjYjRhNjktYmI3MWUwMzY=, TxId: 2024-11-21T08:56:27.234942Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDNhNjk0ODctMWRiZWUxMjMtMTVjYjRhNjktYmI3MWUwMzY=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T08:56:27.235066Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8592:6437]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:27.235123Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:27.235204Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:27.235208Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:27.235821Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:27.235832Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:27.235840Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:27.237146Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: 2024-11-21T08:54:09.163764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:09.163803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:09.163812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002766/r3tmp/tmpKvdl3v/pdisk_1.dat 2024-11-21T08:54:09.240990Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20481, node 1 2024-11-21T08:54:09.333887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:09.333906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:09.333910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:09.333995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:09.340743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.415705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.415735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.426935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30034 2024-11-21T08:54:09.824000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.602167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.602197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.634773Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.635494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.689756Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.696153Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.696176Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.700954Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.701051Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.701064Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.701067Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.701071Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.701075Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.701078Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.701082Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.701151Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:10.875131Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.875157Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.876095Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:10.877607Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:10.877675Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:10.878192Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T08:54:10.881904Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:10.881915Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:10.881922Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T08:54:10.883205Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.883223Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.884092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:10.885304Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:10.885327Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:10.887492Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:10.899004Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.920620Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:11.030386Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:11.185486Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:11.910647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:54:12.507328Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:12.626337Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T08:54:12.626357Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:54:12.626367Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2489:2903], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:54:12.626509Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2490:2904] 2024-11-21T08:54:12.626554Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2490:2904], schemeshard id = 72075186224037899 2024-11-21T08:54:13.387485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2617:3191], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.387529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:13.391785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T08:54:13.450018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:13.450092Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:13.450141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:13.450164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:13.450186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:13.450205Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:13.450224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:13.450243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:13.450262Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:13.450281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:13.450298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:13.450317Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2761:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:13.459035Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2776:3042];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:13.459071Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2776 ... 024-11-21T08:56:25.247185Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:9363:7022], schemeshard count = 1 2024-11-21T08:56:25.571851Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T08:56:25.571876Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 186.000000s, at schemeshard: 72075186224037899 2024-11-21T08:56:25.571937Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2024-11-21T08:56:25.583096Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:27.259253Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:27.259276Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:27.259286Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2024-11-21T08:56:27.259292Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T08:56:27.260182Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:27.272180Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:27.272334Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:27.272350Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:27.272611Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:27.283793Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:27.283879Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:27.284079Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9504:7106], server id = [2:9509:7111], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:27.284222Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9504:7106], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.284486Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9505:7107], server id = [2:9510:7112], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:27.284497Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9505:7107], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.284594Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9506:7108], server id = [2:9511:7113], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:27.284600Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9506:7108], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.284667Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:27.284692Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:27.284745Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9507:7109], server id = [2:9512:7114], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:27.284750Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9507:7109], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.284771Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9508:7110], server id = [2:9513:7115], tablet id = 72075186224037909, status = OK 2024-11-21T08:56:27.284774Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9508:7110], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.284873Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:27.284914Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9504:7106], server id = [2:9509:7111], tablet id = 72075186224037905 2024-11-21T08:56:27.284918Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.284947Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9505:7107], server id = [2:9510:7112], tablet id = 72075186224037906 2024-11-21T08:56:27.284949Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.284970Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:27.284982Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2024-11-21T08:56:27.285012Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9506:7108], server id = [2:9511:7113], tablet id = 72075186224037907 2024-11-21T08:56:27.285015Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.285033Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9514:7116], server id = [2:9517:7119], tablet id = 72075186224037910, status = OK 2024-11-21T08:56:27.285038Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9514:7116], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.285107Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9515:7117], server id = [2:9516:7118], tablet id = 72075186224037911, status = OK 2024-11-21T08:56:27.285112Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9515:7117], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.285167Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9518:7120], server id = [2:9519:7121], tablet id = 72075186224037912, status = OK 2024-11-21T08:56:27.285172Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9518:7120], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.285216Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9507:7109], server id = [2:9512:7114], tablet id = 72075186224037908 2024-11-21T08:56:27.285218Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.285239Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9508:7110], server id = [2:9513:7115], tablet id = 72075186224037909 2024-11-21T08:56:27.285241Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.285254Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2024-11-21T08:56:27.285263Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9520:7122], server id = [2:9522:7124], tablet id = 72075186224037913, status = OK 2024-11-21T08:56:27.285267Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9520:7122], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.285324Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2024-11-21T08:56:27.285339Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9521:7123], server id = [2:9523:7125], tablet id = 72075186224037914, status = OK 2024-11-21T08:56:27.285344Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9521:7123], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:27.285394Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2024-11-21T08:56:27.285409Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9514:7116], server id = [2:9517:7119], tablet id = 72075186224037910 2024-11-21T08:56:27.285411Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.285439Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2024-11-21T08:56:27.285447Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9515:7117], server id = [2:9516:7118], tablet id = 72075186224037911 2024-11-21T08:56:27.285448Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.285491Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2024-11-21T08:56:27.285499Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:27.285522Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9518:7120], server id = [2:9519:7121], tablet id = 72075186224037912 2024-11-21T08:56:27.285525Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.285550Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:27.285586Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:27.285650Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T08:56:27.286563Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9520:7122], server id = [2:9522:7124], tablet id = 72075186224037913 2024-11-21T08:56:27.286571Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.286597Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9521:7123], server id = [2:9523:7125], tablet id = 72075186224037914 2024-11-21T08:56:27.286598Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:27.286628Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:27.289902Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:9540:7142]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:27.289961Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:27.289966Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:9540:7142], StatRequests.size() = 1 2024-11-21T08:56:27.313385Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTY0N2MwMy1mZWFkNzUzMy04NDc0ZWUwNS1iZTE3Y2ZkZA==, TxId: 2024-11-21T08:56:27.313408Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTY0N2MwMy1mZWFkNzUzMy04NDc0ZWUwNS1iZTE3Y2ZkZA==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T08:56:27.313553Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:9548:7148]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:27.313621Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:27.313736Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:27.313740Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:27.314402Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:27.314413Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T08:56:27.314420Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:27.315668Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TPQCDTest::TestPrioritizeLocalDatacenter >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TPQCDTest::TestUnavailableWithoutClustersList >> TConsoleTests::TestCreateTenant >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions |90.2%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |90.2%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] Test command err: Trying to start YDB, gRPC: 7399, MsgBus: 13316 2024-11-21T08:56:21.822336Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653794263438599:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:21.822387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ed/r3tmp/tmpGKt3Z2/pdisk_1.dat 2024-11-21T08:56:21.874296Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7399, node 1 2024-11-21T08:56:21.890620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:21.890636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:21.890638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:21.890672Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13316 2024-11-21T08:56:21.922474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:21.922501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:13316 2024-11-21T08:56:21.923535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:21.951571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:21.955265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:22.016027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:22.035412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:22.046819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:22.094487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653798558407431:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:22.094511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:22.121356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:22.127331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:22.137022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:22.144079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:22.150894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:22.157929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:22.166760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653798558407923:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:22.166782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:22.166792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653798558407928:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:22.167382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:22.170955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653798558407930:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:23.324447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976716444:0, at schemeshard: 72057594046644480 2024-11-21T08:56:23.333350Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653802853385009:2456], TxId: 281474976716445, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWNkYjE0NTYtNjMxZmU0NTgtOTg4ZGY3ZjEtYzY4NzVmMDY=. CustomerSuppliedId : . TraceId : 01jd6yzn0s0vfzp95b8far1jmf. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037895 node# 1 state# Ready) } } 2024-11-21T08:56:23.333938Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653802853385009:2456], TxId: 281474976716445, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWNkYjE0NTYtNjMxZmU0NTgtOTg4ZGY3ZjEtYzY4NzVmMDY=. CustomerSuppliedId : . TraceId : 01jd6yzn0s0vfzp95b8far1jmf. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037895 node# 1 state# Ready) } }. 2024-11-21T08:56:23.335074Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNkYjE0NTYtNjMxZmU0NTgtOTg4ZGY3ZjEtYzY4NzVmMDY=, ActorId: [1:7439653798558408212:2456], ActorState: ExecuteState, TraceId: 01jd6yzn0s0vfzp95b8far1jmf, Create QueryResponse for error on request, msg: 2024-11-21T08:56:23.335169Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653802853385016:2469], TxId: 281474976716446, task: 1. Ctx: { TraceId : 01jd6yzn0sdb7ryhp3fbe9w4h8. SessionId : ydb://session/3?node_id=1&id=OWQwODhjM2EtY2U1ZThkN2MtYjc0NmRlN2MtMmEyNmNkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037895 node# 1 state# Ready) } } 2024-11-21T08:56:23.335179Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653802853385016:2469], TxId: 281474976716446, task: 1. Ctx: { TraceId : 01jd6yzn0sdb7ryhp3fbe9w4h8. SessionId : ydb://session/3?node_id=1&id=OWQwODhjM2EtY2U1ZThkN2MtYjc0NmRlN2MtMmEyNmNkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037895 node# 1 state# Ready) } }. 2024-11-21T08:56:23.335315Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWQwODhjM2EtY2U1ZThkN2MtYjc0NmRlN2MtMmEyNmNkYzA=, ActorId: [1:7439653798558408225:2469], ActorState: ExecuteState, TraceId: 01jd6yzn0sdb7ryhp3fbe9w4h8, Create QueryResponse for error on request, msg: 2024-11-21T08:56:23.335354Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653802853385019:2474], TxId: 281474976716447, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjkyMGI5MzgtNmRkNzVkMDgtNWM3ZTVkNC01ZDEwNzU5Nw==. TraceId : 01jd6yzn0segsbg21n1916vrkq. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037895 node# 1 state# Ready) } } 2024-11-21T08:56:23.335361Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653802853385019:2474], TxId: 281474976716447, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjkyMGI5MzgtNmRkNzVkMDgtNWM3ZTVkNC01ZDEwNzU5Nw==. TraceId : 01jd6yzn0segsbg21n1916vrkq. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037895 node# 1 state# Ready) } }. 2024-11-21T08:56:23.335462Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjkyMGI5MzgtNmRkNzVkMDgtNWM3ZTVkNC01ZDEwNzU5Nw==, ActorId: [1:7439653798558408230:2474], ActorState: ExecuteState, TraceId: 01jd6yzn0segsbg21n1916vrkq, Create QueryResponse for error on request, msg: 2024-11-21T08:56:23.335495Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439653802853385022:2468], TxId: 281474976716448, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YmM4NGQzN2YtMz ... pe: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:25.822073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:25.877217Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:25.889857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:25.905414Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653812387147404:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:25.905453Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:25.905459Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653812387147409:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:25.906138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:25.909005Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439653812387147411:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 23169, MsgBus: 23461 2024-11-21T08:56:26.372199Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653817704369624:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:26.372231Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ed/r3tmp/tmpQzlRIQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23169, node 4 2024-11-21T08:56:26.387171Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:26.387292Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:26.387301Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:26.387303Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:26.387334Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23461 TClient is connected to server localhost:23461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:26.472639Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:26.472670Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:26.473744Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:26.474868Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:26.481410Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:26.492233Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:26.507670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:26.520314Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:26.623254Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653817704371156:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:26.623277Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:26.628308Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:26.633914Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:26.645266Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:26.651415Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:26.659037Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:26.666100Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:26.742095Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653817704371669:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:26.742124Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:26.742172Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653817704371674:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:26.742793Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:26.744122Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653817704371676:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:27.374089Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:56:27.431148Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:56:27.472717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:56:27.525438Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:56:27.585845Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:56:27.627844Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:56:27.855088Z node 4 :KQP_GATEWAY WARN: [TQueryBase] [TRanksCheckerActor] TraceId: /Root, Finish with ALREADY_EXISTS, Issues: {
: Error: Classifier with rank 42 already exists, its name ClassifierRank42 }, SessionId: ydb://session/3?node_id=4&id=MTFmMDQyMzEtYzQ5YzIzNzYtZDVhOTM1ZmYtNmFmNmFlY2U=, TxId: 01jd6yzseb2bwmv3j2rww5fx3w 2024-11-21T08:56:28.065694Z node 4 :KQP_GATEWAY WARN: [TQueryBase] [TRanksCheckerActor] TraceId: /Root, Finish with ALREADY_EXISTS, Issues: {
: Error: Classifier with rank 42 already exists, its name ClassifierRank42 }, SessionId: ydb://session/3?node_id=4&id=MTUxOTVlZTMtZTY0YzY5MS03NWVhZDNjNy04NmU5N2M5Nw==, TxId: 01jd6yzsmy35ea6qaf2g5t7wvn 2024-11-21T08:56:28.079756Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653826294307777:2792], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:56:28.079776Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> TCacheTest::RacyCreateAndSync >> TPQCDTest::TestRelatedServicesAreRunning >> TConsoleTests::TestGetUnknownTenantStatus |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TPQCDTest::TestDiscoverClusters >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::PathBelongsToDomain |90.2%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest |90.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/kqprun |90.2%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TCacheTest::PathBelongsToDomain [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |90.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2024-11-21T08:56:29.170243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:29.170272Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:29.220876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:56:29.224416Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2024-11-21T08:56:29.361153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:29.361178Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:29.375456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2024-11-21T08:56:29.376181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T08:56:29.376741Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2024-11-21T08:56:29.377610Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:223:2202], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:56:29.377632Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:225:2204], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2024-11-21T08:56:28.302328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653824844177791:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:28.302461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040a8/r3tmp/tmpq75iQB/pdisk_1.dat 2024-11-21T08:56:28.344746Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12782, node 1 2024-11-21T08:56:28.358922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0040a8/r3tmp/yandexDic6D0.tmp 2024-11-21T08:56:28.358940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0040a8/r3tmp/yandexDic6D0.tmp 2024-11-21T08:56:28.359003Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0040a8/r3tmp/yandexDic6D0.tmp 2024-11-21T08:56:28.359055Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22218 PQClient connected to localhost:12782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:28.403366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:28.403401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:28.404479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:28.425369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T08:56:28.542231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653824844178470:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.542251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653824844178474:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.542256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.542845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653824844178513:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.542858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.542898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:56:28.544288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653824844178484:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:56:28.561892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:28.621102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:28.635343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:28.640739Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653824844178726:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:28.640851Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWQ4ZjJiZjUtZTk1OTlmYS1kMzU2YTAwMS01ZjY5ZWE0NQ==, ActorId: [1:7439653824844178467:2299], ActorState: ExecuteState, TraceId: 01jd6yzt3w6gfjqceyr7d4jveb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:28.641318Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:56:28.707140Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yzt8mbxqvfscbcj6441ex, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRjOGIzZDctN2Q5ZDVjNGUtNzU5ZTQ4My1mZDZjYTA1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:28.711230Z node 1 :HTTP WARN: [::1]:41340 anonymous GET /actors/pqcd/health 2024-11-21T08:56:28.811954Z node 1 :HTTP WARN: [::1]:41348 anonymous GET /actors/pqcd/health 2024-11-21T08:56:28.912691Z node 1 :HTTP WARN: [::1]:41354 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.013659Z node 1 :HTTP WARN: [::1]:41356 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.114573Z node 1 :HTTP WARN: [::1]:41362 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.215531Z node 1 :HTTP WARN: [::1]:41376 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.316325Z node 1 :HTTP WARN: [::1]:41382 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.417165Z node 1 :HTTP WARN: [::1]:41398 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.517981Z node 1 :HTTP WARN: [::1]:41404 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.618876Z node 1 :HTTP WARN: [::1]:39082 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.720016Z node 1 :HTTP WARN: [::1]:39088 anonymous GET /actors/pqcd/health >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TPQCDTest::TestUnavailableWithoutNetClassifier ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2024-11-21T08:56:28.746660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653822459343273:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:28.746957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040a3/r3tmp/tmpfDH5B8/pdisk_1.dat 2024-11-21T08:56:28.786270Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24847, node 1 2024-11-21T08:56:28.796970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0040a3/r3tmp/yandexslB2Pz.tmp 2024-11-21T08:56:28.796984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0040a3/r3tmp/yandexslB2Pz.tmp 2024-11-21T08:56:28.797031Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0040a3/r3tmp/yandexslB2Pz.tmp 2024-11-21T08:56:28.797071Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12363 PQClient connected to localhost:24847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:28.846736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:28.846761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:28.847822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:28.870787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:28.872356Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:56:28.895924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:28.977306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653822459343935:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.977335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.977400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653822459343948:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.978074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:56:28.978942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653822459343979:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.978960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.979569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653822459343950:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:56:28.995526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:29.047500Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653826754311388:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:29.047638Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzQ0NDljNGYtOWU2N2Q3MzktMmE5OTI5MjYtNTg2MTc2MGE=, ActorId: [1:7439653822459343918:2299], ActorState: ExecuteState, TraceId: 01jd6yzthf1ne8h3f31s772ty1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:29.048059Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:29.053513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:29.065395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:56:29.093006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yztmm22r293g6q62ysqnc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFlNGI4ZDgtZTg4MWIyZWYtYjc4YjRmN2YtNDQ3OTk1YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:29.097381Z node 1 :HTTP WARN: [::1]:40630 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.198181Z node 1 :HTTP WARN: [::1]:40646 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.298966Z node 1 :HTTP WARN: [::1]:40650 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.399803Z node 1 :HTTP WARN: [::1]:40660 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.500639Z node 1 :HTTP WARN: [::1]:40664 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.601627Z node 1 :HTTP WARN: [::1]:41574 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.702567Z node 1 :HTTP WARN: [::1]:41586 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.803382Z node 1 :HTTP WARN: [::1]:41596 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.904125Z node 1 :HTTP WARN: [::1]:41600 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.005041Z node 1 :HTTP WARN: [::1]:41616 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.105953Z node 1 :HTTP WARN: [::1]:41624 anonymous GET /actors/pqcd/health ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: 2024-11-21T08:54:09.491672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:09.491709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:09.491718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002739/r3tmp/tmpAEoNyf/pdisk_1.dat 2024-11-21T08:54:09.557989Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22858, node 1 2024-11-21T08:54:09.647750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:09.647768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:09.647772Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:09.647850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:09.651848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.725482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.725504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.736515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15395 2024-11-21T08:54:10.135342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.865103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.865128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.897455Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.898135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.942177Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.951291Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.951317Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.957778Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.957888Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.957901Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.957905Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.957909Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.957913Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.957916Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.957921Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.958002Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:11.129770Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:11.129795Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:11.130669Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:11.131955Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:11.132028Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:11.132497Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:11.135571Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:11.135581Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:11.135590Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:11.136798Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:11.136817Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:11.137819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:11.138916Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:11.138935Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:11.140849Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:11.152341Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:11.173946Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:11.281225Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:11.436425Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:12.150314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.150337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.152740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:12.321047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2432:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.321092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.321601Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2437:3073]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:12.321648Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:54:12.321662Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2439:3075] 2024-11-21T08:54:12.321674Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2439:3075] 2024-11-21T08:54:12.321861Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2440:2944] 2024-11-21T08:54:12.321937Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2439:3075], server id = [2:2440:2944], tablet id = 72075186224037897, status = OK 2024-11-21T08:54:12.321986Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2440:2944], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:54:12.321996Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:54:12.322040Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:54:12.322050Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2437:3073], StatRequests.size() = 1 2024-11-21T08:54:12.324491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2444:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.324519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.324597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2449:3084], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.325986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:54:12.482492Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:54:12.482520Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:54:12.554885Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2439:3075], schemeshard count = 1 2024-11-21T08:54:12.814781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2451:3086], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:54:12.916786Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2592:3175]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:12.916831Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:54:12.916838Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2592:3175], StatRequests.size() = 1 2024-11-21T08:54:12.928056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yvn2z9ceq9wmpmvwpe926, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdkOWVjMTYtZjRiNmZkMDQtYzc3NDJlM2ItOTdhYTdjMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:54:12.954260Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2645:3196]], StatType[ 0 ], StatRequestsCount[ ... EvPropagateStatistics, node id = 1 2024-11-21T08:55:13.814819Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:16.536547Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:16.536689Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:20.543562Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:23.345677Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:23.345825Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:27.276792Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:30.065434Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:30.065606Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:33.675036Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:35.955218Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:35.955359Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:39.817206Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:42.448437Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:42.448567Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:46.029568Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:48.738122Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:48.738268Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:52.416135Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:55.068842Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:55.068985Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:58.916546Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:01.702393Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:01.702501Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:05.493039Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:08.239197Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:08.239313Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:11.888578Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:14.249499Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:14.249641Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:18.160562Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:20.770230Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:20.770390Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:24.631426Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:26.150753Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T08:56:26.150782Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:56:26.150786Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:56:26.150789Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T08:56:27.265550Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:27.265690Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:27.348559Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037889 2024-11-21T08:56:27.348584Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 202.000000s, at schemeshard: 72075186224037889 2024-11-21T08:56:27.348676Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 25 2024-11-21T08:56:27.359721Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:28.444595Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:28.444623Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T08:56:28.444629Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T08:56:28.444637Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T08:56:28.444641Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:28.444750Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:28.447287Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:28.448144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6566:4641], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.448165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6576:4646], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.448187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.450158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T08:56:28.460322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6580:4649], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T08:56:28.674169Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6697:4711]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:28.674226Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:28.674237Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6699:4713] 2024-11-21T08:56:28.674247Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6699:4713] 2024-11-21T08:56:28.674328Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:6700:4714] 2024-11-21T08:56:28.674352Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6699:4713], server id = [2:6700:4714], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:28.674367Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:6700:4714], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:56:28.674375Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T08:56:28.674389Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:28.674399Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6697:4711], StatRequests.size() = 1 2024-11-21T08:56:28.687234Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzU3NzY3YTktMmQyNDQ4YmMtZTBiZWViZjItM2I2YzRhMjI=, TxId: 2024-11-21T08:56:28.687262Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzU3NzY3YTktMmQyNDQ4YmMtZTBiZWViZjItM2I2YzRhMjI=, TxId: 2024-11-21T08:56:28.687352Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:28.698420Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:28.698439Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:28.780796Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:56:28.780823Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:28.832298Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6699:4713], schemeshard count = 1 2024-11-21T08:56:29.629389Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:29.629442Z node 2 :STATISTICS ERROR: [72075186224037897] IsColumnTable. traversal path [OwnerId: 72075186224037889, LocalPathId: 4] is not known to schemeshard 2024-11-21T08:56:29.629561Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:29.630165Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:29.632618Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODU5OTI4ZTUtMzAwMTY5YjMtZGExZjU4ZGMtY2NiOTNiODA=, TxId: 2024-11-21T08:56:29.632637Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODU5OTI4ZTUtMzAwMTY5YjMtZGExZjU4ZGMtY2NiOTNiODA=, TxId: 2024-11-21T08:56:29.632742Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:29.644166Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:29.644194Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2743:3221] 2024-11-21T08:56:29.645358Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6788:4767]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:29.646022Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:29.646031Z node 2 :STATISTICS ERROR: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2024-11-21T08:56:29.646035Z node 2 :STATISTICS DEBUG: ReplyFailed(), request id = 2 >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: 2024-11-21T08:54:09.420606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:09.420642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:09.420650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00273b/r3tmp/tmpN5emc2/pdisk_1.dat 2024-11-21T08:54:09.488414Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61300, node 1 2024-11-21T08:54:09.577341Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:09.577356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:09.577359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:09.577418Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:09.581899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.656533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.656562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.667972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10651 2024-11-21T08:54:10.063721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.839393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.839422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.872137Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.872712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.926433Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.933793Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.933823Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.939587Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.939713Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.939737Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.939742Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.939749Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.939753Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.939757Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.939761Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.939840Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:11.113934Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:11.113958Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:11.114720Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:11.116000Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:11.116067Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:11.116577Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:11.119680Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:11.119690Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:11.119697Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:11.120912Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:11.120930Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:11.121842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:11.122962Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:11.122981Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:11.124994Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:11.136529Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:11.158250Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:11.268000Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:11.423984Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:12.143259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.143319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.146097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:12.179034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:12.179079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:12.179105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:12.179121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:12.179133Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:12.179145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:12.179157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:12.179170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:12.179183Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:12.179195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:12.179208Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:12.179223Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:12.183634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:12.183663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:12.183688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:12.183701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:12.183715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:12.183727Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:29.485968Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:29.540874Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:29.540929Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2024-11-21T08:56:29.541071Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8428:6333], server id = [2:8434:6339], tablet id = 72075186224037903 2024-11-21T08:56:29.541077Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.541141Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8433:6338], server id = [2:8435:6340], tablet id = 72075186224037904 2024-11-21T08:56:29.541145Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.541208Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8492:6377], server id = [2:8497:6382], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:29.541243Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8492:6377], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.541317Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8493:6378], server id = [2:8498:6383], tablet id = 72075186224037900, status = OK 2024-11-21T08:56:29.541325Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8493:6378], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.541550Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8494:6379], server id = [2:8499:6384], tablet id = 72075186224037901, status = OK 2024-11-21T08:56:29.541564Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8494:6379], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.541645Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:29.541736Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:29.541764Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8495:6380], server id = [2:8500:6385], tablet id = 72075186224037902, status = OK 2024-11-21T08:56:29.541773Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8495:6380], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.541818Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8496:6381], server id = [2:8501:6386], tablet id = 72075186224037903, status = OK 2024-11-21T08:56:29.541825Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8496:6381], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.541875Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T08:56:29.542014Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8492:6377], server id = [2:8497:6382], tablet id = 72075186224037899 2024-11-21T08:56:29.542020Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542030Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8493:6378], server id = [2:8498:6383], tablet id = 72075186224037900 2024-11-21T08:56:29.542033Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542069Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8502:6387], server id = [2:8504:6389], tablet id = 72075186224037904, status = OK 2024-11-21T08:56:29.542078Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8502:6387], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.542107Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T08:56:29.542207Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T08:56:29.542223Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8494:6379], server id = [2:8499:6384], tablet id = 72075186224037901 2024-11-21T08:56:29.542227Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542262Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8503:6388], server id = [2:8505:6390], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:29.542274Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8503:6388], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.542313Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8506:6391], server id = [2:8507:6392], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:29.542319Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8506:6391], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.542388Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T08:56:29.542433Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8495:6380], server id = [2:8500:6385], tablet id = 72075186224037902 2024-11-21T08:56:29.542437Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542483Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8496:6381], server id = [2:8501:6386], tablet id = 72075186224037903 2024-11-21T08:56:29.542486Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542507Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8508:6393], server id = [2:8510:6395], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:29.542515Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8508:6393], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.542532Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:29.542597Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:29.542613Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8502:6387], server id = [2:8504:6389], tablet id = 72075186224037904 2024-11-21T08:56:29.542616Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542656Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8509:6394], server id = [2:8511:6396], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:29.542664Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8509:6394], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:29.542684Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:29.542744Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8503:6388], server id = [2:8505:6390], tablet id = 72075186224037905 2024-11-21T08:56:29.542748Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542766Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8506:6391], server id = [2:8507:6392], tablet id = 72075186224037906 2024-11-21T08:56:29.542769Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542786Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:29.542792Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:29.542810Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8508:6393], server id = [2:8510:6395], tablet id = 72075186224037907 2024-11-21T08:56:29.542814Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.542840Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:29.542872Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:29.542940Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:29.543000Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8509:6394], server id = [2:8511:6396], tablet id = 72075186224037908 2024-11-21T08:56:29.543003Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:29.543649Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:29.547965Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8528:6413]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:29.548031Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:29.548039Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8528:6413], StatRequests.size() = 1 2024-11-21T08:56:29.576692Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmUyODJlNTMtMjRlYjgyZWEtZDUxN2VhMTEtODgxZTRiMzc=, TxId: 2024-11-21T08:56:29.576722Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmUyODJlNTMtMjRlYjgyZWEtZDUxN2VhMTEtODgxZTRiMzc=, TxId: 2024-11-21T08:56:29.576891Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:29.598300Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8538:6419] 2024-11-21T08:56:29.598375Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8538:6419], schemeshard id = 72075186224037889 2024-11-21T08:56:29.598414Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8444:6349], server id = [2:8539:6420], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:29.598444Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8539:6420] 2024-11-21T08:56:29.598465Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8539:6420], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T08:56:29.620833Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:29.620859Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:29.783349Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8546:6425]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:29.783460Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:29.783467Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:29.784181Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:29.784198Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:29.784226Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:29.786238Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoKind |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2024-11-21T08:56:29.142033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653830554506817:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:29.142311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00409e/r3tmp/tmpS9FZFk/pdisk_1.dat 2024-11-21T08:56:29.184742Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6592, node 1 2024-11-21T08:56:29.197233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00409e/r3tmp/yandexe9vFUZ.tmp 2024-11-21T08:56:29.197245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00409e/r3tmp/yandexe9vFUZ.tmp 2024-11-21T08:56:29.197288Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00409e/r3tmp/yandexe9vFUZ.tmp 2024-11-21T08:56:29.197314Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63488 PQClient connected to localhost:6592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:29.219301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T08:56:29.242307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:29.242334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:29.243361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:29.391862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653830554507479:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:29.391888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:29.391950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653830554507491:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:29.392637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T08:56:29.394514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653830554507493:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T08:56:29.424394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:29.483721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:29.488840Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653830554507689:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:29.489189Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yzk3NGEzODEtYzcxOTllNTUtYzUzOWZmODktZDQzNDVmNmU=, ActorId: [1:7439653830554507476:2298], ActorState: ExecuteState, TraceId: 01jd6yztyd7vnep1hkq1nb2q9h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:29.489657Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:29.546012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:56:29.572533Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd6yzv3k270jjf96hcw0shcw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMxYzdiODItNWFiOWRkNmItYzViMjNmOGUtN2ZjMGIwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> TPQCDTest::TestUnavailableWithoutBoth >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAutoSplit >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> KqpQueryPerf::KvRead-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2024-11-21T08:56:25.845957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:25.845976Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:25.880696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2024-11-21T08:56:28.286970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653824070721721:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:28.287141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040bb/r3tmp/tmpH59Llb/pdisk_1.dat 2024-11-21T08:56:28.328166Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29312, node 1 2024-11-21T08:56:28.339503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0040bb/r3tmp/yandexk6iQ06.tmp 2024-11-21T08:56:28.339517Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0040bb/r3tmp/yandexk6iQ06.tmp 2024-11-21T08:56:28.339587Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0040bb/r3tmp/yandexk6iQ06.tmp 2024-11-21T08:56:28.339638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:56:28.344857Z node 1 :HTTP WARN: [::1]:42388 anonymous GET /actors/pqcd/health 2024-11-21T08:56:28.387647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:28.387675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:28.388717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:28.445622Z node 1 :HTTP WARN: [::1]:42392 anonymous GET /actors/pqcd/health 2024-11-21T08:56:28.518253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653824070722301:2292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.518279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653824070722313:2295], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.518286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:28.526589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:56:28.529650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653824070722316:2296], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:56:28.546373Z node 1 :HTTP WARN: [::1]:42398 anonymous GET /actors/pqcd/health 2024-11-21T08:56:28.619055Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653824070722393:2300], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:28.619219Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTZjMzAwOTctYzkyY2U4NDktZjQ1NzNjZTAtNDk4OTI0MTA=, ActorId: [1:7439653824070722285:2291], ActorState: ExecuteState, TraceId: 01jd6yzt355y23qhywmftr3zmb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:28.626138Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:28.647300Z node 1 :HTTP WARN: [::1]:42400 anonymous GET /actors/pqcd/health 2024-11-21T08:56:28.748181Z node 1 :HTTP WARN: [::1]:42412 anonymous GET /actors/pqcd/health 2024-11-21T08:56:28.849074Z node 1 :HTTP WARN: [::1]:42422 anonymous GET /actors/pqcd/health 2024-11-21T08:56:28.949905Z node 1 :HTTP WARN: [::1]:42426 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.050692Z node 1 :HTTP WARN: [::1]:42432 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.151412Z node 1 :HTTP WARN: [::1]:42440 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.252130Z node 1 :HTTP WARN: [::1]:42446 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.352953Z node 1 :HTTP WARN: [::1]:42456 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.453758Z node 1 :HTTP WARN: [::1]:42462 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.554518Z node 1 :HTTP WARN: [::1]:42478 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.630684Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653828365689764:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:29.630767Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODBjNGQ1NjctYThmNmYzYzItM2ZkYmNmOGItM2RhNDE1Y2U=, ActorId: [1:7439653828365689757:2305], ActorState: ExecuteState, TraceId: 01jd6yzv5v1ywyxhne30jxj4mn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:29.631026Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:29.655396Z node 1 :HTTP WARN: [::1]:44638 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.756246Z node 1 :HTTP WARN: [::1]:44648 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.857036Z node 1 :HTTP WARN: [::1]:44654 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.957815Z node 1 :HTTP WARN: [::1]:44660 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.058521Z node 1 :HTTP WARN: [::1]:44666 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.159290Z node 1 :HTTP WARN: [::1]:44672 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.260182Z node 1 :HTTP WARN: [::1]:44688 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.361074Z node 1 :HTTP WARN: [::1]:44702 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.461804Z node 1 :HTTP WARN: [::1]:44704 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.562439Z node 1 :HTTP WARN: [::1]:44712 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.633728Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653832660657104:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:30.633818Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWM0NmM0ZWMtODg1MDAxZDItNDQyZmVhYjYtNWUzOWNiMzc=, ActorId: [1:7439653832660657102:2314], ActorState: ExecuteState, TraceId: 01jd6yzw576e57c6gshpwxyryc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:30.634006Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:30.663187Z node 1 :HTTP WARN: [::1]:44716 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.763976Z node 1 :HTTP WARN: [::1]:44720 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.864837Z node 1 :HTTP WARN: [::1]:44724 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.965674Z node 1 :HTTP WARN: [::1]:44740 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.066521Z node 1 :HTTP WARN: [::1]:44754 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.167461Z node 1 :HTTP WARN: [::1]:44762 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.268414Z node 1 :HTTP WARN: [::1]:44772 anonymous GET /actors/pqcd/health >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2024-11-21T08:56:27.469238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:27.469264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:27.469269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:27.469274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:27.469289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:27.469293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:27.469304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:27.469438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:27.473280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:27.473304Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:27.475459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:27.475681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:27.475701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T08:56:27.476830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:27.476931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:27.476993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.477069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.477557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.477817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.477826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.477834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:27.477839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.477843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:27.477854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.509582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T08:56:27.509661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.509719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T08:56:27.509758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T08:56:27.509766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.510525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.510552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T08:56:27.510593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.510604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T08:56:27.510609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:27.510613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:27.510992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.511003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T08:56:27.511008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:27.511436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.511450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.511457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.511479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.512061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:27.512511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:27.512582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:56:27.512754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.512761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:56:27.512764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.678048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.678119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:27.678132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.678240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:27.678251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.678293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.678308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.679132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.679150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.679200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.679206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:27.679299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.679309Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:27.679340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:27.679345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.679351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:27.679359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.679365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:27.679369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:27.679382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:27.679389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:27.679393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:27.679828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.679845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.679851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:27.679856Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:27.679861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.679880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:27.679885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T0 ... 2024-11-21T08:56:31.414579Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOB_DEPOT_EVENTS has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414582Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BLOB_DEPOT_EVENTS has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414585Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BLOB_DEPOT_EVENTS has been changed from 0 to 10 2024-11-21T08:56:31.414589Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DS_LOAD_TEST has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414592Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DS_LOAD_TEST has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414595Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DS_LOAD_TEST has been changed from 0 to 10 2024-11-21T08:56:31.414599Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_PROVIDER has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414602Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_PROVIDER has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414604Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_PROVIDER has been changed from 0 to 10 2024-11-21T08:56:31.414607Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_INITIALIZER has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414610Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_INITIALIZER has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414612Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_INITIALIZER has been changed from 0 to 10 2024-11-21T08:56:31.414616Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414619Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414622Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2024-11-21T08:56:31.414626Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414630Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414634Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2024-11-21T08:56:31.414638Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414641Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414644Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2024-11-21T08:56:31.414648Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414651Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414654Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2024-11-21T08:56:31.414658Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414661Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414664Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2024-11-21T08:56:31.414667Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414671Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414674Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2024-11-21T08:56:31.414677Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414680Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414683Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2024-11-21T08:56:31.414687Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414690Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414693Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2024-11-21T08:56:31.414696Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414699Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414702Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2024-11-21T08:56:31.414706Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414710Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414714Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2024-11-21T08:56:31.414718Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414721Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414724Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2024-11-21T08:56:31.414727Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414730Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414733Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2024-11-21T08:56:31.414737Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414740Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414743Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2024-11-21T08:56:31.414747Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414750Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414753Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2024-11-21T08:56:31.414758Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414761Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414764Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2024-11-21T08:56:31.414768Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414771Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414775Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2024-11-21T08:56:31.414778Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414781Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414784Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2024-11-21T08:56:31.414791Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414794Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414799Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2024-11-21T08:56:31.414803Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414806Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414810Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2024-11-21T08:56:31.414827Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2024-11-21T08:56:31.414835Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2024-11-21T08:56:31.414838Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2024-11-21T08:56:31.414872Z node 11 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> TConsistentOpsWithReboots::CopyWithData [GOOD] >> KqpQueryPerf::IndexReplace-QueryService >> KqpQueryPerf::RangeLimitRead-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: 2024-11-21T08:54:07.863105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:07.863143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:07.863152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ab8/r3tmp/tmpOHJsqt/pdisk_1.dat 2024-11-21T08:54:07.936404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9408, node 1 2024-11-21T08:54:08.027574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:08.027592Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:08.027595Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:08.027654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:08.031626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:08.106334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:08.106361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:08.117801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23313 2024-11-21T08:54:08.518236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.296904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.296936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.330147Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:09.330874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.378179Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:09.385336Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:09.385358Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:09.389830Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:09.389958Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:09.389973Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:09.389978Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:09.389983Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:09.389989Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:09.389993Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:09.389999Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:09.390086Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:09.563683Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.563708Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.564620Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:09.566244Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:09.566341Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:09.567006Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:09.571156Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:09.571172Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:09.571182Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:09.572946Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.572967Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.573896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:09.575030Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:09.575050Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:09.577064Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:09.588720Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.610419Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:09.726771Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:09.883554Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:10.610823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.610851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.613298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:10.646049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.646087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.646115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.646131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.646143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.646155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:10.646167Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:10.646180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:10.646193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:10.646206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:10.646220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:10.646237Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:10.651104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.651135Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.651160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.651173Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.651188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.651201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... 85:6249], schemeshard count = 1 2024-11-21T08:56:29.935269Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:29.935292Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:29.935302Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:29.935307Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:29.936332Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:29.948341Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:29.948507Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:29.948528Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:29.948803Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:29.948811Z node 2 :STATISTICS WARN: [72075186224037897] TTxResponseTabletDistribution::Execute. Some tablets do not exist in Hive anymore; tablet count = 3 2024-11-21T08:56:29.948816Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:31.154333Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:31.154362Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:31.154489Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:31.166011Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:31.166079Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:31.166289Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8456:6344], server id = [2:8461:6349], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:31.166408Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8456:6344], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.166466Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8457:6345], server id = [2:8462:6350], tablet id = 72075186224037900, status = OK 2024-11-21T08:56:31.166472Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8457:6345], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.166491Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8458:6346], server id = [2:8463:6351], tablet id = 72075186224037901, status = OK 2024-11-21T08:56:31.166495Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8458:6346], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.166813Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8459:6347], server id = [2:8464:6352], tablet id = 72075186224037902, status = OK 2024-11-21T08:56:31.166821Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8459:6347], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.166900Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:31.167005Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:31.167038Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T08:56:31.167076Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8460:6348], server id = [2:8465:6353], tablet id = 72075186224037903, status = OK 2024-11-21T08:56:31.167081Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8460:6348], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.167110Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T08:56:31.167188Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8456:6344], server id = [2:8461:6349], tablet id = 72075186224037899 2024-11-21T08:56:31.167192Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.167219Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8457:6345], server id = [2:8462:6350], tablet id = 72075186224037900 2024-11-21T08:56:31.167221Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.167251Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8458:6346], server id = [2:8463:6351], tablet id = 72075186224037901 2024-11-21T08:56:31.167253Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.167272Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T08:56:31.167294Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8459:6347], server id = [2:8464:6352], tablet id = 72075186224037902 2024-11-21T08:56:31.167296Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.167325Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8466:6354], server id = [2:8469:6357], tablet id = 72075186224037904, status = OK 2024-11-21T08:56:31.167330Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8466:6354], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.167350Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8467:6355], server id = [2:8471:6359], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:31.167354Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8467:6355], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.167376Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8468:6356], server id = [2:8472:6360], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:31.167380Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8468:6356], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.167493Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8470:6358], server id = [2:8473:6361], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:31.167499Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8470:6358], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.167575Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8460:6348], server id = [2:8465:6353], tablet id = 72075186224037903 2024-11-21T08:56:31.167578Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.167592Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8474:6362], server id = [2:8475:6363], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:31.167596Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8474:6362], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:31.167716Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:31.167731Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T08:56:31.167762Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:31.167784Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:31.167796Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:31.167800Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:31.167825Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:31.167853Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:31.167926Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:31.167982Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8467:6355], server id = [2:8471:6359], tablet id = 72075186224037905 2024-11-21T08:56:31.167984Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.168422Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8466:6354], server id = [2:8469:6357], tablet id = 72075186224037904 2024-11-21T08:56:31.168429Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.168541Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:31.168603Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8468:6356], server id = [2:8472:6360], tablet id = 72075186224037906 2024-11-21T08:56:31.168605Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.168667Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8470:6358], server id = [2:8473:6361], tablet id = 72075186224037907 2024-11-21T08:56:31.168671Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.168709Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8474:6362], server id = [2:8475:6363], tablet id = 72075186224037908 2024-11-21T08:56:31.168711Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:31.171745Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8492:6380]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:31.171790Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:31.171795Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8492:6380], StatRequests.size() = 1 2024-11-21T08:56:31.204866Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTM3NjU4NjAtNjAyOGIxMjEtZjZhYjYxNGMtYTJjMzJjZjc=, TxId: 2024-11-21T08:56:31.204892Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTM3NjU4NjAtNjAyOGIxMjEtZjZhYjYxNGMtYTJjMzJjZjc=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T08:56:31.205034Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8500:6386]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:31.205093Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:31.205194Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:31.205198Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:31.205807Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:31.205817Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:31.205824Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:31.207164Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestValidation >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ControlPlane_CDC [GOOD] Test command err: 2024-11-21T08:56:03.443141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653715294567404:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:03.443355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003884/r3tmp/tmp5l1LUo/pdisk_1.dat 2024-11-21T08:56:03.489440Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:56:03.501832Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28985, node 1 2024-11-21T08:56:03.515407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003884/r3tmp/yandexMpuPeC.tmp 2024-11-21T08:56:03.515419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003884/r3tmp/yandexMpuPeC.tmp 2024-11-21T08:56:03.515480Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003884/r3tmp/yandexMpuPeC.tmp 2024-11-21T08:56:03.515525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:56:03.520010Z INFO: TTestServer started on Port 25641 GrpcPort 28985 TClient is connected to server localhost:25641 PQClient connected to localhost:28985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:03.544899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:03.544932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:03.546069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:03.577973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:56:03.587928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:56:03.733193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653715294568159:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.733239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.733378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653715294568172:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.734277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:56:03.734613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653715294568204:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.734630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.736302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653715294568174:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:56:03.769355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:03.776193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:03.793562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:03.809802Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653715294568447:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:03.809904Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQzNDBiZmItOTgyODQ2YmQtODZlMTBkNzMtZjYzNzg5YTI=, ActorId: [1:7439653715294568142:2304], ActorState: ExecuteState, TraceId: 01jd6yz1wj1y826t4dwf30mfe8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:03.810502Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439653715294568518:2596] 2024-11-21T08:56:08.443520Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653715294567404:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:08.443583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:56:08.947852Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:56:08.951121Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:56:08.951555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439653736769405293:2759], Recipient [1:7439653715294567781:2175]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:08.951569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:08.951574Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:56:08.951581Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439653736769405289:2756], Recipient [1:7439653715294567781:2175]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T08:56:08.951583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:56:08.956872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "autoscalit-topic" TotalGroupCount: 5 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 5 MaxPartitionCount: 10 ScaleThresholdSeconds: 500 ScaleUpPartitionWriteSpeedThresholdPercent: 80 ScaleDownPartitionWriteSpeedThresholdPercent: 20 PartitionStrategyType: CAN_SPLIT } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:56:08.956974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/autoscalit-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:56:08.957052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: autoscalit-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:56:08.957065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:56:08.957074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T08:56:08.957079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T08:56:08.957080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T08:56:08.957085Z node 1 :FLAT_TX_SC ... 4-11-21T08:56:31.877082Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:56:31.877083Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [5:7439653839141521138:2446], Recipient [5:7439653839141521138:2446]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdWrite { Key: "tx_00000281474976715675" Value: "\010\233\247\200\200\200\200@\020\t\030\253\353\277\357\2642 \377\377\377\377\377\377\377\377\377\001@\262\353\277\357\2642H\001X\002b\206\001\022\024\010\377\377\377\377\007\030\200\243\0058\200\200@@\200\200@h\001\030\000\"\nstreamImpl(\002\272\001\034/Root/origin/feed/streamImpl\322\001\000\332\001\000\342\001\000\352\001\005/Root\372\001\010\010\000\030\0010\0018\000\220\002\001\232\002\014\010\003\020k\030\003 \007(\0050\001\242\002\020\010\000\030\0010\0018\205\200\204\200\200\200\204\200\001j\000r\022\tQ\302\346\355\272\365>g\021f\010\000\000\005\000\000\000\202\001\034\n\032\010\000\022\026\n\022\00072075186224037892\020\000" } CmdWrite { Key: "_txinfo" Value: "\020\262\353\277\357\2642\030\233\247\200\200\200\200@(\240\215\0060\262\353\277\357\26428\233\247\200\200\200\200@" } CmdWrite { Key: "_config" Value: "\022\024\010\377\377\377\377\007\030\200\243\0058\200\200@@\200\200@h\001\030\ 2024-11-21T08:56:31.877087Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T08:56:31.877089Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715675:0 2024-11-21T08:56:31.877103Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [5:7439653817666683473:2150], Recipient [5:7439653817666683473:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:56:31.877108Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T08:56:31.877112Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:56:31.877114Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715675, path id: [OwnerId: 72057594046644480, LocalPathId: 15] 2024-11-21T08:56:31.877134Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7439653839141521303:2446], Recipient [5:7439653839141521138:2446]: NKikimr::TEvKeyValue::TEvIntermediate 2024-11-21T08:56:31.877144Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:56:31.877145Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:7439653817666683615:2234], at schemeshard: 72057594046644480, txId: 281474976715675, path id: 15 2024-11-21T08:56:31.877153Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:56:31.877160Z node 5 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715675:0 ProgressState 2024-11-21T08:56:31.877165Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T08:56:31.877166Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715675:0 progress is 1/1 2024-11-21T08:56:31.877168Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715675 ready parts: 1/1 2024-11-21T08:56:31.877171Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715675, ready parts: 1/1, is published: false 2024-11-21T08:56:31.877173Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715675 ready parts: 1/1 2024-11-21T08:56:31.877175Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715675:0 2024-11-21T08:56:31.877176Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715675:0 2024-11-21T08:56:31.877196Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 5 2024-11-21T08:56:31.877202Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715675, publications: 1, subscribers: 1 2024-11-21T08:56:31.877204Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715675, [OwnerId: 72057594046644480, LocalPathId: 15], 3 2024-11-21T08:56:31.877285Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [5:7439653817666683615:2234], Recipient [5:7439653817666683473:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 15] Version: 3 } 2024-11-21T08:56:31.877292Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T08:56:31.877300Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715675 2024-11-21T08:56:31.877307Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715675 2024-11-21T08:56:31.877313Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715675 2024-11-21T08:56:31.877316Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715675, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], version: 3 2024-11-21T08:56:31.877317Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 4 2024-11-21T08:56:31.877330Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715675, subscribers: 1 2024-11-21T08:56:31.877337Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [5:7439653839141521267:2458] 2024-11-21T08:56:31.877340Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T08:56:31.877352Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T08:56:31.877356Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7439653839141521138:2446], Recipient [5:7439653839141521138:2446]: NKikimr::TEvKeyValue::TEvCollect 2024-11-21T08:56:31.877373Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715675 2024-11-21T08:56:31.877378Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T08:56:31.877379Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7439653839141521138:2446], Recipient [5:7439653839141521138:2446]: NKikimrClient.TResponse Status: 1 Cookie: 5 WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } 2024-11-21T08:56:31.877380Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2024-11-21T08:56:31.877381Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:56:31.877384Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715675, State EXECUTED 2024-11-21T08:56:31.877386Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T08:56:31.877386Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [5:7439653839141521267:2458] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715675 at schemeshard: 72057594046644480 2024-11-21T08:56:31.877387Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715675, NewState WAIT_RS_ACKS 2024-11-21T08:56:31.877389Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976715675] PredicateAcks: 0/0 2024-11-21T08:56:31.877390Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T08:56:31.877391Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976715675] PredicateAcks: 0/0 2024-11-21T08:56:31.877393Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] add an TxId 281474976715675 to the list for deletion 2024-11-21T08:56:31.877395Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715675, NewState DELETING 2024-11-21T08:56:31.877398Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete key for TxId 281474976715675 2024-11-21T08:56:31.877403Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T08:56:31.877419Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [5:7439653839141521138:2446], Recipient [5:7439653839141521138:2446]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976715675" IncludeFrom: true To: "tx_00000281474976715675" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\262\353\277\357\2642\030\233\247\200\200\200\200@(\240\215\0060\262\353\277\357\26428\233\247\200\200\200\200@" } 2024-11-21T08:56:31.877449Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7439653839141521307:2463], Recipient [5:7439653839141521138:2446]: NKikimr::TEvKeyValue::TEvCompleteGC 2024-11-21T08:56:31.877489Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7439653839141521308:2446], Recipient [5:7439653839141521138:2446]: NKikimr::TEvKeyValue::TEvIntermediate 2024-11-21T08:56:31.877539Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7439653839141521138:2446], Recipient [5:7439653839141521138:2446]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2024-11-21T08:56:31.877546Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2024-11-21T08:56:31.877547Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:56:31.877548Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715675, State DELETING 2024-11-21T08:56:31.877550Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] empty tx queue 2024-11-21T08:56:31.877551Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715675 2024-11-21T08:56:31.877563Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7439653839141521138:2446], Recipient [5:7439653839141521138:2446]: NKikimr::TEvKeyValue::TEvCollect 2024-11-21T08:56:31.877584Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7439653839141521311:2464], Recipient [5:7439653839141521138:2446]: NKikimr::TEvKeyValue::TEvCompleteGC 2024-11-21T08:56:31.877626Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7439653839141521277:2905], Recipient [5:7439653817666683473:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:31.877631Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:31.877632Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T08:56:31.878502Z node 5 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T08:56:31.878530Z node 5 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/origin/feed" 2024-11-21T08:56:31.878569Z node 5 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/origin/feed >> KqpQueryPerf::KvRead+QueryService >> KqpQueryPerf::IndexDeleteOn+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates >> KqpQueryPerf::ComputeLength+QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 63940, MsgBus: 5379 2024-11-21T08:56:31.559984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653838198036203:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:31.560269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034a4/r3tmp/tmpkLvFxG/pdisk_1.dat 2024-11-21T08:56:31.600362Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63940, node 1 2024-11-21T08:56:31.608499Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:31.608515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:31.608517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:31.608554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5379 TClient is connected to server localhost:5379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:31.661194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:31.661223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:31.662261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:31.684278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:31.693428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:31.709950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:31.733971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:31.747293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:31.850248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653838198037752:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:31.850277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:31.884370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:31.891309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:31.902171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:31.909137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:31.916536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:31.930788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:31.939171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653838198038256:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:31.939211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:31.939232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653838198038261:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:31.939924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:31.943510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653838198038263:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:32.108983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.116932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.126605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16786, MsgBus: 21616 2024-11-21T08:56:31.794476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653835656641103:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:31.794566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00346b/r3tmp/tmpnfekJ7/pdisk_1.dat 2024-11-21T08:56:31.859534Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16786, node 1 2024-11-21T08:56:31.872787Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:31.872801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:31.872803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:31.872866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21616 TClient is connected to server localhost:21616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:31.922230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:31.929012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:31.929043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:31.930089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:31.930095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:31.998663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.020543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.030755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.131809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653839951609800:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.131844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.154474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.161420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.168297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.223032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.231791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.238494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.247123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653839951610316:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.247157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.247158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653839951610321:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.247847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:32.251429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653839951610323:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CopyWithData [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:52.066880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:52.066899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:52.066903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:52.066908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:52.066914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:52.066917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:52.066926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:52.066989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:52.077006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:52.077021Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:52.078938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:52.079022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:52.079054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:52.081299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:52.081380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:52.081475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:52.081643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:52.082252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:52.082463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:52.082473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:52.082484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:52.082490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:52.082496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:52.082525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:52.083796Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:52.095691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:52.095741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.095780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:52.095822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:52.095827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.096288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:52.096310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:52.096354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.096364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:52.096368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:52.096373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:52.096709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.096720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:52.096724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:52.097022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.097032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.097037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:52.097043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:52.097454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:52.097736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:52.097767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:52.097882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:52.097900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:52.097905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:52.097948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:52.097952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:52.097969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:52.097977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:52.098220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:52.098226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:52.098245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:52.098248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:52.098290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.098295Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:52.098302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:52.098304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:52.098308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:52.098311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:52.098314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:52.098317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:52.098323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:52.098327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:52.098330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... in" ACL: "" } Children { Name: "src2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:31.976497Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/src1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:31.976532Z node 161 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/src1" took 38us result status StatusSuccess 2024-11-21T08:56:31.976615Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/src1" PathDescription { Self { Name: "src1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "src1" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:31.976678Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/src2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:31.976699Z node 161 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/src2" took 22us result status StatusSuccess 2024-11-21T08:56:31.976744Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/src2" PathDescription { Self { Name: "src2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "src2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:31.976788Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/dst1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:31.976806Z node 161 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/dst1" took 18us result status StatusSuccess 2024-11-21T08:56:31.976847Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/dst1" PathDescription { Self { Name: "dst1" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "dst1" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:31.976886Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/dst2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:31.976902Z node 161 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/dst2" took 16us result status StatusSuccess 2024-11-21T08:56:31.976945Z node 161 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/dst2" PathDescription { Self { Name: "dst2" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "dst2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> KqpQueryPerf::KvRead+QueryService [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> KqpQueryPerf::IndexReplace-QueryService [GOOD] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19248, MsgBus: 4020 2024-11-21T08:56:32.216581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653841491618636:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:32.216602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003468/r3tmp/tmpmq5OQ0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19248, node 1 2024-11-21T08:56:32.285802Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:32.286038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:32.286051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:32.286053Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:32.286085Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4020 2024-11-21T08:56:32.316902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:32.316944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:32.318602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:32.357632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.361278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.371207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.393157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.410350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.421518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.558063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653841491620174:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.558113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.584768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.591689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.602222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.609460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.664139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.672787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.688021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653841491620690:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.688049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653841491620695:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.688059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.688827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:32.692579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653841491620697:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpQueryPerf::ComputeLength+QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 23516, MsgBus: 12331 2024-11-21T08:56:32.520741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653843678039137:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:32.520857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003460/r3tmp/tmpufnHby/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23516, node 1 2024-11-21T08:56:32.568831Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:32.572703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:32.572713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:32.572715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:32.572744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12331 TClient is connected to server localhost:12331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:32.616680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.621947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:32.621976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:32.623033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:32.627404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.640325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.655099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.666080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.783102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653843678040680:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.783134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.813496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.820196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.833734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.848337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.862120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.876642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.891157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653843678041194:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.891185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.891301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653843678041199:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.891886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:32.895463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653843678041201:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25614, MsgBus: 10302 2024-11-21T08:56:32.154182Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653843700063907:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:32.154197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003463/r3tmp/tmp3bDTTy/pdisk_1.dat 2024-11-21T08:56:32.213694Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25614, node 1 2024-11-21T08:56:32.224436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:32.224452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:32.224454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:32.224488Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10302 2024-11-21T08:56:32.255693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:32.255724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:32.256836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:32.268626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.280318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.296971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.325711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.343593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.480576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653843700065453:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.480608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.501953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.556791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.567178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.574238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.628735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.637040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.645785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653843700065971:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.645807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653843700065976:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.645808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:32.646399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:32.650080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653843700065978:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:56:32.851622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.863686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:32.881188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> TPQCDTest::TestDiscoverClusters [GOOD] >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2024-11-21T08:56:22.246970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.246989Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:22.286552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:172:2067] recipient: [1:45:2092] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:175:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:176:2067] recipient: [1:174:2169] Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:178:2067] recipient: [1:174:2169] 2024-11-21T08:56:22.290582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.290595Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:207:2067] recipient: [1:24:2071] 2024-11-21T08:56:22.311746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T08:56:22.312839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:243:2067] recipient: [1:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:243:2067] recipient: [1:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:244:2067] recipient: [1:236:2213] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:244:2067] recipient: [1:236:2213] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:245:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:245:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:248:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:248:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409546 is [1:247:2217] sender: [1:250:2067] recipient: [1:234:2211] Leader for TabletID 72075186233409547 is [1:251:2219] sender: [1:252:2067] recipient: [1:236:2213] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2024-11-21T08:56:22.317390Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:247:2217] sender: [1:285:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:251:2219] sender: [1:286:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2024-11-21T08:56:22.349848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:335:2067] recipient: [1:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:335:2067] recipient: [1:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:336:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:336:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:338:2286] sender: [1:339:2067] recipient: [1:331:2282] Leader for TabletID 72075186233409548 is [1:338:2286] sender: [1:340:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T08:56:22.405049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:412:2067] recipient: [1:408:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:412:2067] recipient: [1:408:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:413:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:413:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:415:2333] sender: [1:416:2067] recipient: [1:408:2329] Leader for TabletID 72075186233409549 is [1:415:2333] sender: [1:417:2067] recipient: [1:24:2071] 2024-11-21T08:56:22.410415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.410436Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 106 2024-11-21T08:56:22.412817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:56:22.412829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:56:22.412885Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2024-11-21T08:56:22.412898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T08:56:22.423991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2024-11-21T08:56:22.424058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T08:56:22.457958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2024-11-21T08:56:22.503232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:613:2067] recipient: [1:608:2496] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:613:2067] recipient: [1:608:2496] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:615:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:615:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:616:2500] sender: [1:617:2067] recipient: [1:608:2496] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 Leader for TabletID 72075186233409550 is [1:616:2500] sender: [1:635:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 110 2024-11-21T08:56:22.644199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.644236Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T08:56:22.658760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:174:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:175:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:175:2169] 2024-11-21T08:56:22.662425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.662443Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2024-11-21T08:56:22.683640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T08:56:22.684634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:245:2067] recipient: [2:239:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:245:2067] recipient: [2:239:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:249:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:254:2067] recipient: [2:239:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2024-11-21T08:56:22.687058Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:285:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:286:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2024-11-21T08:56:22.719185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:335:2067] recipient: [2:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:335:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:338:2286] sender: [2:339:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [2:338:2286] sender: [2:340:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T08:56:22.769832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:413:2067] recipient: [2:408:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:413:2067] recipient: [2:408:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:416:2333] sender: [2:417:2067] recipient: [2:408:2329] 2024-11-21T08:56:22.772666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:22.772684Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:416:2333] sender: [2:443:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2024-11-21T08:56:22.805601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:56:22.805614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:56:22.805682Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2024-11-21T08:56:22.805694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T08:56:22.817002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2024-11-21T08:56:22.817102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T08:56:22.840331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:546:2067] recipient: [2:541:2435] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:546:2067] recipient: [2:541:2435] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:547:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:547:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:549:2439] sender: [2:550:2067] recipient: [2:541:2435] Leader for TabletID 72075186233409550 is [2:549:2439] sender: [2:551:2067] recipient: [2:24:2071] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2024-11-21T08:56:23.743645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:56:23.743671Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:23.784749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:56:23.784778Z node 2 :IMPORT WARN: Table profiles were not loaded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9258, MsgBus: 16123 2024-11-21T08:56:32.873069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653843657497029:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:32.873091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00342b/r3tmp/tmpfVpgrR/pdisk_1.dat 2024-11-21T08:56:32.925535Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9258, node 1 2024-11-21T08:56:32.935436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:32.935465Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:32.935467Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:32.935500Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16123 TClient is connected to server localhost:16123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:32.974480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:32.974514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:32.975666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:32.977757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:32.989562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:33.057381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:33.077069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:33.087699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:33.152277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653847952465872:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:33.152311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:33.200578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:33.210753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:33.221363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:33.280797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:33.296822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:33.308988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:33.318022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653847952466387:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:33.318039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653847952466392:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:33.318043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:33.318687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:33.322705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653847952466394:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] |90.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2024-11-21T08:56:29.182383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653826698592373:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:29.182603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004064/r3tmp/tmpIU1JGs/pdisk_1.dat 2024-11-21T08:56:29.224587Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19369, node 1 2024-11-21T08:56:29.230252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004064/r3tmp/yandexScsiHJ.tmp 2024-11-21T08:56:29.230262Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004064/r3tmp/yandexScsiHJ.tmp 2024-11-21T08:56:29.230308Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004064/r3tmp/yandexScsiHJ.tmp 2024-11-21T08:56:29.230334Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28036 PQClient connected to localhost:19369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:29.283835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:29.283857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:29.284897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:29.303661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T08:56:29.413383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653826698593056:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:29.413395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653826698593037:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:29.413416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:29.414121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:56:29.414129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653826698593095:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:29.414138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:29.415792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653826698593066:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:56:29.431615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:29.491071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:29.502680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:29.517790Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653826698593362:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:29.517876Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTMzNjcxZmQtZTU2ODY1YWItYTU5NmViZi05ODkxNDJkZQ==, ActorId: [1:7439653826698593034:2299], ActorState: ExecuteState, TraceId: 01jd6yztz500qg7f6ktkj0489s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:29.518432Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:56:29.575933Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6yzv3q7c8aqz35yxkgrqwv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTkyOTQ0NGQtZjMyYmJhMGYtYWZmZDJlOGQtNDQ3ZDk2NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:29.580144Z node 1 :HTTP WARN: [::1]:50168 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.681108Z node 1 :HTTP WARN: [::1]:50172 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.782115Z node 1 :HTTP WARN: [::1]:50176 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.883095Z node 1 :HTTP WARN: [::1]:50192 anonymous GET /actors/pqcd/health 2024-11-21T08:56:29.984049Z node 1 :HTTP WARN: [::1]:50208 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.084920Z node 1 :HTTP WARN: [::1]:50212 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.185827Z node 1 :HTTP WARN: [::1]:50228 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.286736Z node 1 :HTTP WARN: [::1]:50240 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.387580Z node 1 :HTTP WARN: [::1]:50242 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.488506Z node 1 :HTTP WARN: [::1]:50246 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.589436Z node 1 :HTTP WARN: [::1]:50248 anonymous GET /actors/pqcd/health 2024-11-21T08:56:30.618460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6yzw43czs8f082b3v10ee9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVjODU4MmEtZjg1NWRjODAtNzI3N2QxNjItYjM0YmYxNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:30.619741Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6yzw43czs8f082b3v10ee9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVjODU4MmEtZjg1NWRjODAtNzI3N2QxNjItYjM0YmYxNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:31.661663Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd6yzx4s3qbbsn2pr5m588c1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE2NjM3MzgtZDM2ZTQ0N2EtNTE0ODg5ZmItMjA5OWQ3NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:31.662473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd6yzx4s3qbbsn2pr5m588c1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE2NjM3MzgtZDM2ZTQ0N2EtNTE0ODg5ZmItMjA5OWQ3NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:32.712042Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd6yzy5k6skpkv8sb5frj1ft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcwMzBmNTItY2Q1MjIxMWQtZGZjYWU3NzYtN2UwYmE4ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:32.713133Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd6yzy5k6skpkv8sb5frj1ft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcwMzBmNTItY2Q1MjIxMWQtZGZjYWU3NzYtN2UwYmE4ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:33.758798Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd6yzz68a4f98mx8ehwcjgk8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzUyNTk4ZC1mNzQ4YzRhMS1jZWY3Zjk2OC05Y2ZjMjE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:33.760031Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd6yzz68a4f98mx8ehwcjgk8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzUyNTk4ZC1mNzQ4YzRhMS1jZWY3Zjk2OC05Y2ZjMjE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:33.764536Z node 1 :HTTP WARN: [::1]:50264 anonymous GET /actors/pqcd |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant |90.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |90.3%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] |90.3%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2024-11-21T08:56:30.553232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653832141822537:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:30.553248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004058/r3tmp/tmpog1855/pdisk_1.dat 2024-11-21T08:56:30.590109Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19266, node 1 2024-11-21T08:56:30.600304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:30.600314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:30.600316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:30.600340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2328 PQClient connected to localhost:19266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:30.619210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T08:56:30.653711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:30.653742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:30.654788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:30.804409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653832141823204:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:30.804439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:30.804525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653832141823217:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:30.805320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T08:56:30.806950Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T08:56:30.807049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653832141823219:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T08:56:30.824276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:30.871410Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653832141823368:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:30.871527Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQzNmYyZGUtOWFmNGI1NmEtNjk2MDE3MmMtOWUwNWEwNzM=, ActorId: [1:7439653832141823187:2299], ActorState: ExecuteState, TraceId: 01jd6yzwajdgrv2prp6d35nv45, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:30.872067Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:30.884640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:30.947897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T08:56:30.976707Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd6yzwfg2qs1vm1v9z0bqpab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQzMWNiMTMtOTI3YjdjMmItZjllYTU2ODItYWUwYzU1NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:30.981591Z node 1 :HTTP WARN: [::1]:49020 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.082434Z node 1 :HTTP WARN: [::1]:49032 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.183312Z node 1 :HTTP WARN: [::1]:49046 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.284312Z node 1 :HTTP WARN: [::1]:49050 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.388811Z node 1 :HTTP WARN: [::1]:49052 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.490434Z node 1 :HTTP WARN: [::1]:49064 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.591252Z node 1 :HTTP WARN: [::1]:49070 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.692058Z node 1 :HTTP WARN: [::1]:49086 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.792903Z node 1 :HTTP WARN: [::1]:49092 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.895593Z node 1 :HTTP WARN: [::1]:49108 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.996683Z node 1 :HTTP WARN: [::1]:49116 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.097634Z node 1 :HTTP WARN: [::1]:49120 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.198602Z node 1 :HTTP WARN: [::1]:49122 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.302415Z node 1 :HTTP WARN: [::1]:49126 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.403438Z node 1 :HTTP WARN: [::1]:49140 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.504372Z node 1 :HTTP WARN: [::1]:49148 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.605216Z node 1 :HTTP WARN: [::1]:49150 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.706032Z node 1 :HTTP WARN: [::1]:49164 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.806742Z node 1 :HTTP WARN: [::1]:49170 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.907539Z node 1 :HTTP WARN: [::1]:49182 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.008567Z node 1 :HTTP WARN: [::1]:49190 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.112416Z node 1 :HTTP WARN: [::1]:49196 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.215767Z node 1 :HTTP WARN: [::1]:49212 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.316927Z node 1 :HTTP WARN: [::1]:49226 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.418429Z node 1 :HTTP WARN: [::1]:49238 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.519860Z node 1 :HTTP WARN: [::1]:49254 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.620921Z node 1 :HTTP WARN: [::1]:49270 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.722380Z node 1 :HTTP WARN: [::1]:49272 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.823327Z node 1 :HTTP WARN: [::1]:49280 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.927404Z node 1 :HTTP WARN: [::1]:49284 anonymous GET /actors/pqcd/health >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> TSolomonReboots::CreateAlterSolomonWithReboots [GOOD] >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2024-11-21T08:56:31.369462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653837703327654:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:31.369584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00404d/r3tmp/tmpaduyOk/pdisk_1.dat 2024-11-21T08:56:31.437528Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19289, node 1 2024-11-21T08:56:31.447313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:31.447326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:31.447329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:31.447371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:56:31.452172Z node 1 :HTTP WARN: [::1]:45472 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.469910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:31.469934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:31.470932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:31.553343Z node 1 :HTTP WARN: [::1]:45486 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.654184Z node 1 :HTTP WARN: [::1]:45488 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.679211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653837703328071:2292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:31.679236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653837703328093:2295], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:31.679243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:31.688553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:56:31.691213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653837703328100:2296], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:56:31.755233Z node 1 :HTTP WARN: [::1]:45494 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.813728Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653837703328177:2300], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:31.814289Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGY3OTA1YTEtY2M5YzkxMDQtMjlkZTMwYzItNjgyY2IxMTE=, ActorId: [1:7439653837703328069:2291], ActorState: ExecuteState, TraceId: 01jd6yzx5y0rkprtc9kwwq3r8a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:31.825684Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:31.861270Z node 1 :HTTP WARN: [::1]:45504 anonymous GET /actors/pqcd/health 2024-11-21T08:56:31.962267Z node 1 :HTTP WARN: [::1]:45510 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.063029Z node 1 :HTTP WARN: [::1]:45518 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.163947Z node 1 :HTTP WARN: [::1]:45524 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.264897Z node 1 :HTTP WARN: [::1]:45536 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.372420Z node 1 :HTTP WARN: [::1]:45552 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.473465Z node 1 :HTTP WARN: [::1]:45564 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.574270Z node 1 :HTTP WARN: [::1]:45568 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.675097Z node 1 :HTTP WARN: [::1]:45574 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.775926Z node 1 :HTTP WARN: [::1]:45576 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.827913Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653841998295547:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:32.828312Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzI4MDc5MmYtOTFjZjVkYjItNTMyMjI0N2UtYzU3OWE4ZDg=, ActorId: [1:7439653841998295540:2305], ActorState: ExecuteState, TraceId: 01jd6yzy9s5q6t5wgtjmvh3vwc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:32.828504Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:32.877351Z node 1 :HTTP WARN: [::1]:45580 anonymous GET /actors/pqcd/health 2024-11-21T08:56:32.978976Z node 1 :HTTP WARN: [::1]:45594 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.079856Z node 1 :HTTP WARN: [::1]:45596 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.180859Z node 1 :HTTP WARN: [::1]:45608 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.281956Z node 1 :HTTP WARN: [::1]:45624 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.385311Z node 1 :HTTP WARN: [::1]:45636 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.488416Z node 1 :HTTP WARN: [::1]:45648 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.593237Z node 1 :HTTP WARN: [::1]:45656 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.694249Z node 1 :HTTP WARN: [::1]:45668 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.799125Z node 1 :HTTP WARN: [::1]:45676 anonymous GET /actors/pqcd/health 2024-11-21T08:56:33.832978Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653846293262888:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:33.833505Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWFmNmNkYWUtOWVkM2Q4YmQtYjMwZjMyLTVkYTU2Njcw, ActorId: [1:7439653846293262886:2314], ActorState: ExecuteState, TraceId: 01jd6yzz9582tsr138c5xhsmky, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:33.833704Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:33.912839Z node 1 :HTTP WARN: [::1]:45680 anonymous GET /actors/pqcd/health 2024-11-21T08:56:34.024850Z node 1 :HTTP WARN: [::1]:45688 anonymous GET /actors/pqcd/health 2024-11-21T08:56:34.126542Z node 1 :HTTP WARN: [::1]:45690 anonymous GET /actors/pqcd/health 2024-11-21T08:56:34.227528Z node 1 :HTTP WARN: [::1]:45704 anonymous GET /actors/pqcd/health 2024-11-21T08:56:34.331045Z node 1 :HTTP WARN: [::1]:45708 anonymous GET /actors/pqcd/health 2024-11-21T08:56:34.432748Z node 1 :HTTP WARN: [::1]:45722 anonymous GET /actors/pqcd/health >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart |90.3%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpRequest::ProbeServerless [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2024-11-21T08:54:08.099347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:08.099379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:08.099388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002860/r3tmp/tmpYE9zWb/pdisk_1.dat 2024-11-21T08:54:08.174179Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23513, node 1 2024-11-21T08:54:08.262856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:08.262873Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:08.262877Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:08.262954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:08.267333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:08.342612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:08.342641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:08.353946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19208 2024-11-21T08:54:08.752987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.489495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.489514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.522028Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:09.522771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.574511Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:09.582265Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:09.582286Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:09.588275Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:09.588400Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:09.588419Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:09.588423Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:09.588429Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:09.588434Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:09.588438Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:09.588444Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:09.588525Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:09.760304Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.760329Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.761197Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:09.762665Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:09.762737Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:09.763232Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:09.766762Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:09.766774Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:09.766783Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:09.767921Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.767939Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.768845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:09.769940Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:09.769960Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:09.771834Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:09.783391Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.804872Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:09.920251Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:10.084135Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:10.791570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.791601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.794470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:10.961780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2433:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.961813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.962179Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2438:3075]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:10.962218Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:54:10.962226Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2440:3077] 2024-11-21T08:54:10.962237Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2440:3077] 2024-11-21T08:54:10.962380Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2441:2945] 2024-11-21T08:54:10.962455Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2440:3077], server id = [2:2441:2945], tablet id = 72075186224037897, status = OK 2024-11-21T08:54:10.962490Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2441:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:54:10.962499Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:54:10.962546Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:54:10.962552Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2438:3075], StatRequests.size() = 1 2024-11-21T08:54:10.964594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.964618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.964699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2450:3086], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.965945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:54:11.094610Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:54:11.094633Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:54:11.187351Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2440:3077], schemeshard count = 1 2024-11-21T08:54:11.429984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2452:3088], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:54:11.517589Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2592:3177]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:11.517661Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:54:11.517668Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2592:3177], StatRequests.size() = 1 2024-11-21T08:54:11.526663Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yvkrgf7yg7wdqyk2qwm2v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJjMGMyYjAtMjVmZDc0ZTYtZDMyYmQxNGEtZmE3ZGIxZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:54:11.551055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, op ... RN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T08:56:26.263416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6936:4875], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T08:56:26.519601Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7053:4937]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:26.519661Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:26.519675Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7055:4939] 2024-11-21T08:56:26.519688Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7055:4939] 2024-11-21T08:56:26.519806Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7056:4940] 2024-11-21T08:56:26.519838Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7055:4939], server id = [2:7056:4940], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:26.519862Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7056:4940], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:56:26.519871Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T08:56:26.519897Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:26.519911Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7053:4937], StatRequests.size() = 1 2024-11-21T08:56:26.537327Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2E3MWUwZjEtYjIwNDcyZWUtNjkwZDU5ZTUtZDJiNjdlMTk=, TxId: 2024-11-21T08:56:26.537352Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2E3MWUwZjEtYjIwNDcyZWUtNjkwZDU5ZTUtZDJiNjdlMTk=, TxId: 2024-11-21T08:56:26.537500Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:26.548904Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:26.548927Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:26.631969Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:56:26.631996Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:26.693938Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7055:4939], schemeshard count = 1 2024-11-21T08:56:27.804217Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:27.804248Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T08:56:27.804254Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:29.076630Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:29.129162Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:29.129221Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T08:56:29.129226Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:29.129335Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:29.129799Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:29.132492Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjQ0MWY3ZTgtNGI3NmRjZDEtYzQ5ZTEzYjUtYjIyZmUyYTI=, TxId: 2024-11-21T08:56:29.132508Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjQ0MWY3ZTgtNGI3NmRjZDEtYzQ5ZTEzYjUtYjIyZmUyYTI=, TxId: 2024-11-21T08:56:29.132596Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:29.143973Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:29.143999Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:2982:3268] 2024-11-21T08:56:30.513955Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:30.513983Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2024-11-21T08:56:30.513986Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T08:56:31.806934Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:56:31.807004Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:31.807105Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:31.828504Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:31.828543Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2024-11-21T08:56:31.828554Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T08:56:31.828661Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:31.829310Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:31.833184Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzdmMGE4NzYtOTYwYWUwZmYtNjkxZjdiMzYtZDk2NmU4MjQ=, TxId: 2024-11-21T08:56:31.833207Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzdmMGE4NzYtOTYwYWUwZmYtNjkxZjdiMzYtZDk2NmU4MjQ=, TxId: 2024-11-21T08:56:31.833420Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:31.844879Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T08:56:31.844903Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:33.225623Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:33.225657Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T08:56:33.225663Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T08:56:34.629332Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:34.629389Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2024-11-21T08:56:34.629394Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T08:56:34.629515Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:34.630144Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:34.634720Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWIzNTNkYWItZDUyMWRmODktY2VjODEzYTAtNWNlNzkyNTQ=, TxId: 2024-11-21T08:56:34.634748Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWIzNTNkYWItZDUyMWRmODktY2VjODEzYTAtNWNlNzkyNTQ=, TxId: 2024-11-21T08:56:34.634866Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:34.652832Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T08:56:34.652863Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2982:3268] 2024-11-21T08:56:34.653044Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7381:5121]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:34.653724Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:34.653736Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:34.655026Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:34.655045Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:34.655054Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:34.655633Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T08:56:34.655682Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 2024-11-21T08:56:34.655795Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:7411:5133]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:34.656371Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:34.656382Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:34.656438Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:34.656446Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:34.656452Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:34.656842Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2024-11-21T08:56:34.656898Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::CreateAlterSolomonWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:52.936953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:52.936973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:52.936976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:52.936980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:52.936984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:52.936987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:52.936993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:52.937055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:52.944572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:52.944592Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:52.946484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:52.946569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:52.946599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:52.948365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:52.948428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:52.948498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:52.948657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:52.949170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:52.949368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:52.949374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:52.949382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:52.949387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:52.949391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:52.949419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:52.950434Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:52.962913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:52.962987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.963043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:52.963091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:52.963096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.963741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:52.963758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:52.963803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.963810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:52.963813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:52.963816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:52.964054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.964060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:52.964063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:52.964320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.964329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.964335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:52.964342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:52.964981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:52.965296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:52.965333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:52.965505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:52.965526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:52.965531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:52.965580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:52.965585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:52.965606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:52.965616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:52.965903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:52.965909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:52.965940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:52.965943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:52.966007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:52.966012Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:52.966019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:52.966022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:52.966026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:52.966029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:52.966032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:52.966035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:52.966041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:52.966046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:52.966048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... HEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [171:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2024-11-21T08:56:34.919577Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:56:34.919582Z node 171 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1006:0 ProgressState 2024-11-21T08:56:34.919590Z node 171 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T08:56:34.919594Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:56:34.919600Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T08:56:34.919603Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:56:34.919608Z node 171 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T08:56:34.919612Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T08:56:34.919652Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T08:56:34.919658Z node 171 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T08:56:34.919661Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:56:34.919664Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:56:34.919732Z node 171 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:34.919741Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:34.919745Z node 171 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:56:34.919749Z node 171 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:56:34.919753Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:56:34.919798Z node 171 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:34.919805Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:34.919809Z node 171 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:56:34.919812Z node 171 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:56:34.919816Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:34.919823Z node 171 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T08:56:34.920428Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:34.920440Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:34.920444Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:34.920448Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:34.920463Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:56:34.920476Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:56:34.920525Z node 171 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2024-11-21T08:56:34.920690Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:56:34.920741Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:56:34.920848Z node 171 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:56:34.920882Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:34.920905Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:56:34.920985Z node 171 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T08:56:34.921062Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:56:34.921079Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:34.921133Z node 171 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2024-11-21T08:56:34.921185Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:56:34.921199Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:56:34.921239Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:34.921243Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:56:34.921251Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2024-11-21T08:56:34.921677Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:56:34.921690Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:56:34.922105Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:56:34.922116Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:56:34.922137Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:56:34.922140Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:56:34.922158Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:56:34.922163Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:56:34.922359Z node 171 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T08:56:34.922399Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T08:56:34.922407Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T08:56:34.922451Z node 171 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:56:34.922464Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:56:34.922466Z node 171 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [171:551:2506] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T08:56:34.922505Z node 171 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:56:34.922513Z node 171 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:56:34.922517Z node 171 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:56:34.922522Z node 171 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2024-11-21T08:56:34.922569Z node 171 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:34.922590Z node 171 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Solomon" took 28us result status StatusPathDoesNotExist 2024-11-21T08:56:34.922614Z node 171 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2024-11-21T08:53:32.787891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:32.787945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:32.787957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003821/r3tmp/tmpR19BI0/pdisk_1.dat 2024-11-21T08:53:32.864411Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23072, node 1 2024-11-21T08:53:32.957250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:32.957274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:32.957279Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:32.957353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:32.962350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:33.043235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:33.043276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:33.055252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15038 2024-11-21T08:53:33.460083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.346175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.346207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.379810Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:34.380638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.436140Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:34.445471Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:34.445499Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:34.452895Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:34.453018Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:34.453034Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:34.453037Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:34.453041Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:34.453046Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:34.453049Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:34.453054Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:34.453148Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:34.630262Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:34.630292Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:34.631658Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:53:34.633930Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:53:34.634093Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:53:34.634950Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T08:53:34.638884Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:34.638900Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:34.638910Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T08:53:34.640431Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.640457Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.641890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:34.643532Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:34.643564Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:34.646781Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:34.659111Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.681496Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:34.803823Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:34.960319Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:35.695741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.295457Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:36.446928Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T08:53:36.446951Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:53:36.446963Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2492:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:53:36.447301Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2493:2906] 2024-11-21T08:53:36.447392Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2493:2906], schemeshard id = 72075186224037899 2024-11-21T08:53:37.109782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2615:3190], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:37.109826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:37.113007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T08:53:37.158620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:37.158682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:37.158719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:37.158739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:37.158758Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:37.158782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:37.158797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:37.158813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:37.158826Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:37.158838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:37.158850Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:37.158864Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2748:3028];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:37.168948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:2781:3046];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:37.168982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:2781 ... Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:32.647926Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T08:56:32.647955Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T08:56:33.355266Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:33.355306Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=M\0-4 2024-11-21T08:56:33.355313Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T08:56:34.912410Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:34.912519Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:34.923089Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:34.923162Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2024-11-21T08:56:34.923167Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T08:56:34.923437Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:34.935397Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:34.935573Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:34.935592Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:34.935776Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:34.947543Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:34.947640Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T08:56:34.947922Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9652:7226], server id = [2:9657:7231], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:34.947968Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9652:7226], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.948281Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:34.948441Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9652:7226], server id = [2:9657:7231], tablet id = 72075186224037905 2024-11-21T08:56:34.948447Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.948503Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9653:7227], server id = [2:9658:7232], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:34.948519Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9653:7227], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.948646Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9654:7228], server id = [2:9659:7233], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:34.948657Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9654:7228], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.948770Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9655:7229], server id = [2:9661:7235], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:34.948779Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9655:7229], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.948879Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9656:7230], server id = [2:9662:7236], tablet id = 72075186224037909, status = OK 2024-11-21T08:56:34.948888Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9656:7230], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.948915Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:34.948968Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9660:7234], server id = [2:9663:7237], tablet id = 72075186224037910, status = OK 2024-11-21T08:56:34.948975Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9660:7234], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.949132Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:34.949190Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:34.949262Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9653:7227], server id = [2:9658:7232], tablet id = 72075186224037906 2024-11-21T08:56:34.949266Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.949275Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2024-11-21T08:56:34.949322Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2024-11-21T08:56:34.949371Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9654:7228], server id = [2:9659:7233], tablet id = 72075186224037907 2024-11-21T08:56:34.949374Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.949403Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9655:7229], server id = [2:9661:7235], tablet id = 72075186224037908 2024-11-21T08:56:34.949406Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.949424Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9664:7238], server id = [2:9665:7239], tablet id = 72075186224037911, status = OK 2024-11-21T08:56:34.949434Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9664:7238], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.949523Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9656:7230], server id = [2:9662:7236], tablet id = 72075186224037909 2024-11-21T08:56:34.949526Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.949547Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9666:7240], server id = [2:9668:7242], tablet id = 72075186224037912, status = OK 2024-11-21T08:56:34.949554Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9666:7240], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.949623Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9660:7234], server id = [2:9663:7237], tablet id = 72075186224037910 2024-11-21T08:56:34.949626Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.949654Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9667:7241], server id = [2:9670:7244], tablet id = 72075186224037913, status = OK 2024-11-21T08:56:34.949660Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9667:7241], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.949723Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9669:7243], server id = [2:9671:7245], tablet id = 72075186224037914, status = OK 2024-11-21T08:56:34.949730Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9669:7243], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:34.949797Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2024-11-21T08:56:34.949839Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2024-11-21T08:56:34.949873Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2024-11-21T08:56:34.949892Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2024-11-21T08:56:34.949898Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:34.949926Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:34.949961Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:34.950034Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T08:56:34.950113Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9664:7238], server id = [2:9665:7239], tablet id = 72075186224037911 2024-11-21T08:56:34.950117Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.951044Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9666:7240], server id = [2:9668:7242], tablet id = 72075186224037912 2024-11-21T08:56:34.951057Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.951180Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:34.951312Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9667:7241], server id = [2:9670:7244], tablet id = 72075186224037913 2024-11-21T08:56:34.951315Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.951406Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9669:7243], server id = [2:9671:7245], tablet id = 72075186224037914 2024-11-21T08:56:34.951409Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:34.965898Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDNmMDc0NTQtNDA1YTg4OWQtMjYzNWQwYzYtYjQyOTA1NWI=, TxId: 2024-11-21T08:56:34.965933Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDNmMDc0NTQtNDA1YTg4OWQtMjYzNWQwYzYtYjQyOTA1NWI=, TxId: 2024-11-21T08:56:34.966112Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:34.978871Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T08:56:34.978903Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=M\0-4 , ActorId=[1:3900:3426] 2024-11-21T08:56:34.979277Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9689:5710]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:34.979338Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:34.979343Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:34.979397Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:34.979407Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T08:56:34.979416Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2024-11-21T08:56:34.982405Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate-StreamLookup >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> KqpOlapScheme::DropThenAddColumnIndexation [GOOD] >> KqpOlapScheme::DropThenAddColumnCompaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: 2024-11-21T08:54:08.951359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:08.951392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:08.951400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027ca/r3tmp/tmpgcbC82/pdisk_1.dat 2024-11-21T08:54:09.022932Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27033, node 1 2024-11-21T08:54:09.117255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:09.117277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:09.117281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:09.117374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:09.123519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.199488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.199525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.211102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24082 2024-11-21T08:54:09.606211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.342623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.342649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.375661Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.376390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.421688Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.428403Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.428424Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.433025Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.433163Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.433176Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.433179Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.433183Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.433187Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.433190Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.433194Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.433266Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:10.606140Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.606164Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.606950Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T08:54:10.608244Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T08:54:10.608466Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T08:54:10.608875Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:10.612571Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:10.612584Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:10.612592Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:10.613647Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.613667Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.615306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:10.616434Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:10.616454Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:10.618506Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:10.629816Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.651417Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:10.765105Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:10.920976Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:11.499713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2141:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:11.499744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:11.502396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:11.544034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:11.544074Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:11.544101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:11.544116Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:11.544130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:11.544142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:11.544155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:11.544167Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:11.544180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:11.544192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:11.544224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:11.544244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:11.549765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:11.549799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:11.549830Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:11.549843Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:11.549855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:11.549869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... Execute 2024-11-21T08:56:35.767915Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:35.767943Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2024-11-21T08:56:35.822952Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8424:6339] 2024-11-21T08:56:35.822988Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8377:6313], server id = [2:8424:6339], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:35.823022Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8424:6339], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T08:56:35.823062Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8425:6340] 2024-11-21T08:56:35.823078Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8425:6340], schemeshard id = 72075186224037889 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2024-11-21T08:56:35.918418Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:35.918464Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:35.918774Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:35.930551Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:35.930635Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:35.930888Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8432:6347], server id = [2:8437:6352], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:35.931061Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8432:6347], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.931143Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8433:6348], server id = [2:8438:6353], tablet id = 72075186224037900, status = OK 2024-11-21T08:56:35.931154Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8433:6348], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.931563Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:35.931624Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:35.931659Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8434:6349], server id = [2:8440:6355], tablet id = 72075186224037901, status = OK 2024-11-21T08:56:35.931675Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8434:6349], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.931806Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8435:6350], server id = [2:8439:6354], tablet id = 72075186224037902, status = OK 2024-11-21T08:56:35.931816Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8435:6350], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.931831Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8432:6347], server id = [2:8437:6352], tablet id = 72075186224037899 2024-11-21T08:56:35.931836Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.931924Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8436:6351], server id = [2:8441:6356], tablet id = 72075186224037903, status = OK 2024-11-21T08:56:35.931932Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8436:6351], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.931961Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8433:6348], server id = [2:8438:6353], tablet id = 72075186224037900 2024-11-21T08:56:35.931964Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.931981Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T08:56:35.932070Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8442:6357], server id = [2:8444:6359], tablet id = 72075186224037904, status = OK 2024-11-21T08:56:35.932080Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8442:6357], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.932113Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T08:56:35.932177Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8443:6358], server id = [2:8445:6360], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:35.932185Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8443:6358], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.932231Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T08:56:35.932286Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8434:6349], server id = [2:8440:6355], tablet id = 72075186224037901 2024-11-21T08:56:35.932290Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.932426Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T08:56:35.932458Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8435:6350], server id = [2:8439:6354], tablet id = 72075186224037902 2024-11-21T08:56:35.932462Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.932488Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8436:6351], server id = [2:8441:6356], tablet id = 72075186224037903 2024-11-21T08:56:35.932490Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.932500Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:35.932542Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8446:6361], server id = [2:8448:6363], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:35.932553Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8446:6361], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.932571Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8447:6362], server id = [2:8450:6365], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:35.932577Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8447:6362], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.932656Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8442:6357], server id = [2:8444:6359], tablet id = 72075186224037904 2024-11-21T08:56:35.932659Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.932668Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8449:6364], server id = [2:8451:6366], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:35.932674Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8449:6364], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:35.932742Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8443:6358], server id = [2:8445:6360], tablet id = 72075186224037905 2024-11-21T08:56:35.932745Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.932810Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:35.932840Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:35.932861Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:35.932867Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:35.932936Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:35.932973Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:35.933037Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:35.933076Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8446:6361], server id = [2:8448:6363], tablet id = 72075186224037906 2024-11-21T08:56:35.933079Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.933807Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8447:6362], server id = [2:8450:6365], tablet id = 72075186224037907 2024-11-21T08:56:35.933818Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.933869Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:35.933956Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8449:6364], server id = [2:8451:6366], tablet id = 72075186224037908 2024-11-21T08:56:35.933960Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:35.938121Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8468:6383]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:35.938163Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:35.938169Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8468:6383], StatRequests.size() = 1 2024-11-21T08:56:35.964146Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWMwMWFlM2MtYWJlYzMxMDEtM2QyZjYzMzEtMTgxNTY0N2Q=, TxId: 2024-11-21T08:56:35.964170Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWMwMWFlM2MtYWJlYzMxMDEtM2QyZjYzMzEtMTgxNTY0N2Q=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T08:56:35.964323Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8476:6389]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:35.964395Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:35.964550Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:35.964556Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:35.965094Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:35.965103Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:35.965110Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:35.966372Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2024-11-21T08:56:28.314613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:28.314637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:28.314641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:28.314645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:28.314654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:28.314657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:28.314664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:28.314732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:28.316956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:28.316971Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:28.318635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:28.318801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:28.318813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T08:56:28.319608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:28.319674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:28.319718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:28.319779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:28.320263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:28.320468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:28.320474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:28.320482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:28.320486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:28.320489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:28.320500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.352547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T08:56:28.352630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.352690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T08:56:28.352730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T08:56:28.352737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.353586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:28.353608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T08:56:28.353645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.353655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T08:56:28.353657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:28.353661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:28.354022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.354030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T08:56:28.354033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:28.354404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.354434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.354439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:28.354453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:28.354900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:28.355265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:28.355344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:56:28.355518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:28.355525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:56:28.355529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:28.519768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:28.519829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:28.519837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:28.519912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:28.519919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:28.519951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:28.519960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:28.520517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:28.520532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:28.520598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:28.520603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:28.520688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.520695Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:28.520726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:28.520729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:28.520734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:28.520737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:28.520741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:28.520744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:28.520753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:28.520757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:28.520760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:28.521094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:28.521106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:28.521109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:28.521112Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:28.521115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:28.521127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:28.521129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T0 ... : {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:56:36.487109Z node 23 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:56:36.487159Z node 23 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:56:36.487381Z node 23 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:56:36.487392Z node 23 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:56:36.487437Z node 23 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:56:36.487557Z node 23 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/003112/r3tmp/tmpNc83Tm/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T08:56:36.487603Z node 23 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 23:1 Path# /home/runner/.ya/build/build_root/jptk/003112/r3tmp/tmpNc83Tm/pdisk_1.dat 2024-11-21T08:56:36.498713Z node 23 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:56:36.498793Z node 23 :CONFIGS_DISPATCHER DEBUG: TConfigsDispatcher Bootstrap 2024-11-21T08:56:36.498868Z node 23 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:56:36.498879Z node 23 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:56:36.498909Z node 23 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:56:36.498949Z node 23 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:56:36.498974Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [23:377:2335], Recipient [23:376:2334]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:56:36.498984Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:56:36.499078Z node 23 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:56:36.499088Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:56:36.499093Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:56:36.499108Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[23:384:2339] 2024-11-21T08:56:36.499159Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [23:379:2333], Recipient [23:376:2334]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:56:36.499164Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:56:36.499736Z node 23 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:56:36.499748Z node 23 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [23:377:2335] 2024-11-21T08:56:36.499885Z node 23 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[23:384:2339] 2024-11-21T08:56:36.499912Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:56:36.499920Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:56:36.499923Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:56:36.502264Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [23:410:2346], Recipient [23:376:2334]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:56:36.502282Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:56:36.514570Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:56:36.514592Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:56:36.526392Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273285146, Sender [23:380:2334], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } } RawConsoleConfig { } } 2024-11-21T08:56:36.526415Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T08:56:36.526450Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: AllowEditYamlInUiItem 2024-11-21T08:56:36.526462Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:410:2346]: Config { } ItemKinds: 75 Local: true 2024-11-21T08:56:36.526482Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: FeatureFlagsItem 2024-11-21T08:56:36.526489Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:434:2372]: Config { FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } } ItemKinds: 26 Local: true 2024-11-21T08:56:36.526502Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T08:56:36.526507Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:377:2335]: Config { } ItemKinds: 10 Local: true 2024-11-21T08:56:36.526509Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T08:56:36.526513Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:379:2333]: Config { } ItemKinds: 10 Local: true 2024-11-21T08:56:36.527241Z node 23 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: 2024-11-21T08:56:36.527265Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:377:2335], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T08:56:36.527270Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T08:56:36.527284Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T08:56:36.527286Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T08:56:36.527293Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:410:2346], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T08:56:36.527295Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T08:56:36.527305Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:379:2333], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T08:56:36.527307Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T08:56:36.538584Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [23:380:2334], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableExternalHive: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } } } } 2024-11-21T08:56:36.538608Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T08:56:36.538637Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: FeatureFlagsItem 2024-11-21T08:56:36.538654Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:434:2372]: Config { FeatureFlags { EnableExternalHive: false } } ItemKinds: 26 Local: true 2024-11-21T08:56:36.538691Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T08:56:36.538696Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T08:56:36.549925Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [23:380:2334], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } } } } 2024-11-21T08:56:36.549949Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T08:56:36.549980Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: FeatureFlagsItem 2024-11-21T08:56:36.550000Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:434:2372]: Config { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } } ItemKinds: 26 Local: true 2024-11-21T08:56:36.550043Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T08:56:36.550049Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T08:56:36.561262Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [23:380:2334], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableVolatileTransactionArbiters: false } Version { Items { Kind: 26 Id: 3 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableVolatileTransactionArbiters: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } Items { Kind: 26 Id: 3 Generation: 1 } } } } 2024-11-21T08:56:36.561289Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T08:56:36.561319Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: FeatureFlagsItem 2024-11-21T08:56:36.561335Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:434:2372]: Config { FeatureFlags { EnableVolatileTransactionArbiters: false } } ItemKinds: 26 Local: true 2024-11-21T08:56:36.561372Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T08:56:36.561379Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota |90.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |90.3%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] Test command err: 2024-11-21T08:56:27.120939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:27.120959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:27.120962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:27.120965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:27.120975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:27.120978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:27.120985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:27.121043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:27.123084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:27.123096Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:27.124374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:27.124514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:27.124527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T08:56:27.125123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:27.125170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:27.125208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.125250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.125651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.125805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.125811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.125818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:27.125822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.125826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:27.125835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.155871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T08:56:27.155951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.156010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T08:56:27.156051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T08:56:27.156056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.156878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.156908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T08:56:27.156949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.156957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T08:56:27.156960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:27.156964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:27.157283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:27.157600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.157627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.157997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:27.158288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:27.158343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:56:27.158516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.158521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:56:27.158524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.322889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.322944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:27.322953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.323020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:27.323027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.323058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.323066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.323502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.323572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:27.323639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323645Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:27.323665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:27.323668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.323672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:27.323675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.323678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:27.323680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:27.323686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:27.323690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:27.323693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:27.323951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.323961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.323963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:27.323966Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:27.323969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.323979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:27.323984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T0 ... istered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:37.110668Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:37.110715Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 94489282736 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:37.110727Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:37.110789Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:37.110799Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:37.110830Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:37.110841Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:37.111372Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:37.111383Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:37.111420Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:37.111424Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [22:244:2234], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:37.111436Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:37.111442Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:37.111454Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:37.111458Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:37.111462Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:37.111466Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:37.111470Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:37.111473Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:37.111480Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:37.111485Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:37.111488Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:37.111644Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:37.111652Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:37.111655Z node 22 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:37.111659Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:37.111662Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:37.111674Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:37.111677Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [22:95:2130] 2024-11-21T08:56:37.112168Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 1 2024-11-21T08:56:37.112256Z node 22 :TX_PROXY DEBUG: actor# [22:292:2274] Bootstrap 2024-11-21T08:56:37.113132Z node 22 :TX_PROXY DEBUG: actor# [22:292:2274] Become StateWork (SchemeCache [22:298:2279]) 2024-11-21T08:56:37.113301Z node 22 :TX_PROXY DEBUG: actor# [22:292:2274] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:56:37.113683Z node 22 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:56:37.114636Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:56:37.115291Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:56:37.115495Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:56:37.115563Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:56:37.115932Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:56:37.115938Z node 22 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:56:37.115976Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:56:37.117835Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:56:37.117872Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:56:37.117900Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:56:37.117931Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:56:37.117945Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:56:37.117953Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:56:37.139550Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:56:37.139611Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:56:37.150678Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:56:37.150761Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:56:37.150782Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:56:37.150796Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:56:37.150834Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:56:37.150843Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:56:37.150850Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:56:37.150859Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:56:37.161991Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:56:37.162050Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:56:37.162299Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:56:37.162310Z node 22 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:56:37.162363Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:56:37.162529Z node 22 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/00311e/r3tmp/tmpR4vwt7/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T08:56:37.162598Z node 22 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 22:1 Path# /home/runner/.ya/build/build_root/jptk/00311e/r3tmp/tmpR4vwt7/pdisk_1.dat 2024-11-21T08:56:37.173887Z node 22 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:56:37.173957Z node 22 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:56:37.173968Z node 22 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:56:37.173992Z node 22 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:56:37.174008Z node 22 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:56:37.174085Z node 22 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:56:37.174537Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:56:37.174542Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:56:37.174556Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[22:385:2341] 2024-11-21T08:56:37.174566Z node 22 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:56:37.174570Z node 22 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [22:380:2338] 2024-11-21T08:56:37.174684Z node 22 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[22:385:2341] 2024-11-21T08:56:37.174691Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:56:37.174696Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:56:37.174698Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk >> AnalyzeDatashard::AnalyzeOneTable [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: 2024-11-21T08:54:09.185622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:09.185659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:09.185667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00275d/r3tmp/tmpGrjxqe/pdisk_1.dat 2024-11-21T08:54:09.251604Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29913, node 1 2024-11-21T08:54:09.341115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:09.341129Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:09.341131Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:09.341193Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:09.346147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.421164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.421190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.432357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26391 2024-11-21T08:54:09.828866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.577465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.577491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.610518Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.611221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.656339Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.663154Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.663174Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.668278Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.668392Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.668405Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.668408Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.668412Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.668416Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.668419Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.668423Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.668502Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:10.842592Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.842615Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.843618Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:10.845421Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:10.845534Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:10.846166Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:10.850606Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:10.850622Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:10.850632Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:10.852484Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.852508Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.853541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:10.854871Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:10.854899Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:10.857699Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:10.869831Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.891819Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:11.003761Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:11.167988Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:11.876372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:11.876404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:11.878688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:12.041567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2432:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.041601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.041963Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2437:3073]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:12.041997Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:54:12.042005Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2439:3075] 2024-11-21T08:54:12.042014Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2439:3075] 2024-11-21T08:54:12.042139Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2440:2944] 2024-11-21T08:54:12.042192Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2439:3075], server id = [2:2440:2944], tablet id = 72075186224037897, status = OK 2024-11-21T08:54:12.042228Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2440:2944], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:54:12.042234Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:54:12.042269Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:54:12.042275Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2437:3073], StatRequests.size() = 1 2024-11-21T08:54:12.044119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2444:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.044137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.044198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2449:3084], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.045252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:54:12.195294Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:54:12.195314Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:54:12.267849Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2439:3075], schemeshard count = 1 2024-11-21T08:54:12.527249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2451:3086], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:54:12.637761Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2592:3173]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:54:12.637809Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:54:12.637816Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2592:3173], StatRequests.size() = 1 2024-11-21T08:54:12.647592Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yvmt83aqwn4jc3nzh3nbw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA5NzQ4MzItZmU1NmY4YTktOTRkMzZlODgtNzJhNDcwMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:54:12.667768Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2639:2996] 2024-11-21T08:54:12.668374Z node 2 :STATISTI ... 521Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:47.033767Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:47.033890Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:50.947840Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:55:53.828954Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:55:53.829079Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:55:57.602079Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:00.205106Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:00.205250Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:04.073500Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:06.849874Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:06.850003Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:10.606718Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:13.265705Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:13.265838Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:17.167161Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:19.871704Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:19.871898Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:23.802185Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:26.418256Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:26.418377Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:30.329080Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:31.974880Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T08:56:31.974925Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:56:31.974947Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:56:31.974952Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T08:56:33.294165Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:33.294330Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:56:33.378175Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T08:56:33.378207Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 190.000000s, at schemeshard: 72075186224037889 2024-11-21T08:56:33.378350Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 2024-11-21T08:56:33.389856Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:34.782731Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:34.782782Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T08:56:34.782789Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T08:56:34.782801Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T08:56:34.782806Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:34.782963Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:34.786356Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:34.787411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6481:4588], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:34.787432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6490:4593], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:34.787443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:34.789953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T08:56:34.802760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6495:4596], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T08:56:35.046721Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6614:4660]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:35.046800Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:35.046818Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6616:4662] 2024-11-21T08:56:35.046834Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6616:4662] 2024-11-21T08:56:35.046937Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:6617:4663] 2024-11-21T08:56:35.046973Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:6617:4663], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:56:35.046986Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T08:56:35.047027Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6616:4662], server id = [2:6617:4663], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:35.047055Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:35.047073Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6614:4660], StatRequests.size() = 1 2024-11-21T08:56:35.068031Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjJhYTllZjEtNmI1NjMwZjUtNGRhMWQ5YjQtY2QyOTA2MjM=, TxId: 2024-11-21T08:56:35.068067Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjJhYTllZjEtNmI1NjMwZjUtNGRhMWQ5YjQtY2QyOTA2MjM=, TxId: 2024-11-21T08:56:35.068253Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:35.080036Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:35.080064Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:35.152279Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:56:35.152314Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:35.214711Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6616:4662], schemeshard count = 1 2024-11-21T08:56:36.192894Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:36.192934Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T08:56:36.192939Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:37.382729Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:56:37.414111Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:37.414169Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T08:56:37.414176Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:37.414313Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:37.414873Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:37.418630Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmU4YWY2YjktNzNjYTQ4NGItMjM2M2MxZDYtODJiNTViYTk=, TxId: 2024-11-21T08:56:37.418656Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmU4YWY2YjktNzNjYTQ4NGItMjM2M2MxZDYtODJiNTViYTk=, TxId: 2024-11-21T08:56:37.418760Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:37.430436Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:37.430483Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2637:3187] 2024-11-21T08:56:37.430878Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6740:4734]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:37.431614Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:37.431628Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:37.432246Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:37.432263Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:37.432272Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:37.432823Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T08:56:37.432867Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants >> TPQTest::TestSetClientOffset [GOOD] >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant |90.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |90.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |90.3%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2024-11-21T08:56:36.519422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:36.519447Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:36.519467Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:56:36.522566Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:56:36.522709Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:56:36.522776Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:36.523742Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:56:36.533330Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:36.533487Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:56:36.533647Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:56:36.533663Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:56:36.533671Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:56:36.533715Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:56:36.537587Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:56:36.537661Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:56:36.537708Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:56:36.537713Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:56:36.537717Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:56:36.537724Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:36.537825Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:36.537832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:36.537857Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:56:36.537878Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:56:36.537933Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:56:36.537941Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:56:36.537948Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:56:36.537953Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:56:36.537957Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:56:36.537962Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:56:36.537967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:36.544244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:36.544266Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:36.544274Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:56:36.544614Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:56:36.544623Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:56:36.544640Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:56:36.544666Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:56:36.544675Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:56:36.544684Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:56:36.544691Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:56:36.544695Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:56:36.544700Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:56:36.544704Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:56:36.544759Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:56:36.544761Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:56:36.544763Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:56:36.544766Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:56:36.544774Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:56:36.544777Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:56:36.544779Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:56:36.544781Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:56:36.544784Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:56:36.565881Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:56:36.565908Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:56:36.565915Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:56:36.565929Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:56:36.565943Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:56:36.566060Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:36.566067Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:36.566074Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:56:36.566092Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:56:36.566097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:56:36.566133Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:56:36.566142Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:36.566146Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:56:36.566151Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:56:36.566775Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:56:36.566786Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:36.566829Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:36.566834Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:36.566841Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:56:36.566848Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:56:36.566852Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:56:36.566857Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:56:36.566861Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:56:36.566865Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:36.566869Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:56:36.566871Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:56:36.566874Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:56:36.566905Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:56:36.566907Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:36.566909Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:56:36.566912Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:56:36.566914Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:56:36.566920Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:56:36.566923Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:56:36.566925Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:56:36.566927Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:56:36.566935Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:56:36.566938Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:56:36.566940Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:56:36.566943Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:36.566945Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:56:36.566948Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... lt to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:56:38.784340Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:38.784352Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:38.784355Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2024-11-21T08:56:38.784359Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:56:38.784363Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:38.784374Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:38.784376Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2024-11-21T08:56:38.784381Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:56:38.784383Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:38.784396Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:38.784399Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2024-11-21T08:56:38.784403Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:56:38.784405Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:38.784416Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:38.784419Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2024-11-21T08:56:38.784423Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:56:38.784426Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:38.784429Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:38.784431Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T08:56:38.784460Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2024-11-21T08:56:38.784463Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784465Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2024-11-21T08:56:38.784473Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2024-11-21T08:56:38.784475Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784477Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2024-11-21T08:56:38.784486Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2024-11-21T08:56:38.784488Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784490Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2024-11-21T08:56:38.784496Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2024-11-21T08:56:38.784498Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784501Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2024-11-21T08:56:38.784510Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2024-11-21T08:56:38.784512Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784514Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2024-11-21T08:56:38.784520Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2024-11-21T08:56:38.784522Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784524Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2024-11-21T08:56:38.784532Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2024-11-21T08:56:38.784534Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784536Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2024-11-21T08:56:38.784543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2024-11-21T08:56:38.784545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784547Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2024-11-21T08:56:38.784556Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T08:56:38.784557Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784559Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T08:56:38.784566Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T08:56:38.784568Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784570Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T08:56:38.784577Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T08:56:38.784579Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784581Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T08:56:38.784589Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T08:56:38.784590Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784594Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T08:56:38.784599Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T08:56:38.784601Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784603Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T08:56:38.784613Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T08:56:38.784615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784617Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T08:56:38.784625Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T08:56:38.784627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784629Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T08:56:38.784637Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T08:56:38.784639Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.784641Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T08:56:38.795776Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:38.795796Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T08:56:38.795812Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2024-11-21T08:56:38.795823Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T08:56:38.795828Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:38.795887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T08:56:38.795891Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:38.795895Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2024-11-21T08:56:36.584429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:36.584460Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:36.584485Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:56:36.587708Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:56:36.587875Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:56:36.587960Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:36.588932Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:56:36.598125Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:36.598286Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:56:36.598447Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:56:36.598461Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:56:36.598469Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:56:36.598515Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:56:36.602286Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:56:36.602383Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:56:36.602435Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:56:36.602442Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:56:36.602447Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:56:36.602452Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:36.602586Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:36.602594Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:36.602627Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:56:36.602653Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:56:36.602725Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:56:36.602733Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:56:36.602742Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:56:36.602747Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:56:36.602751Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:56:36.602757Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:56:36.602763Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:36.612753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:36.612787Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:36.612799Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:56:36.613281Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:56:36.613298Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:56:36.613329Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:56:36.613370Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:56:36.613383Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:56:36.613394Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:56:36.613403Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:56:36.613408Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:56:36.613414Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:56:36.613418Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:56:36.613518Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:56:36.613523Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:56:36.613527Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:56:36.613531Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:56:36.613544Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:56:36.613547Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:56:36.613550Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:56:36.613554Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:56:36.613559Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:56:36.634938Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:56:36.634970Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:56:36.634978Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:56:36.634995Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:56:36.635014Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:56:36.635175Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:36.635183Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:36.635192Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:56:36.635216Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:56:36.635222Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:56:36.635280Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:56:36.635307Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:36.635311Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:56:36.635317Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:56:36.636095Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:56:36.636115Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:36.636181Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:36.636187Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:36.636196Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:56:36.636224Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:56:36.636229Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:56:36.636239Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:56:36.636245Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:56:36.636251Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:36.636256Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:56:36.636260Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:56:36.636264Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:56:36.636318Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:56:36.636322Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:36.636325Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:56:36.636329Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:56:36.636333Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:56:36.636345Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:56:36.636349Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:56:36.636352Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:56:36.636356Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:56:36.636372Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:56:36.636376Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:56:36.636379Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:56:36.636385Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:36.636388Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:56:36.636393Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... t heartbeats: at tablet# 9437186 2024-11-21T08:56:39.350746Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:39.350748Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2024-11-21T08:56:39.350752Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:56:39.350756Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T08:56:39.350758Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:39.350769Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:39.350771Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2024-11-21T08:56:39.350775Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:56:39.350779Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T08:56:39.350781Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:39.350791Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T08:56:39.350793Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T08:56:39.350797Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:56:39.350800Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T08:56:39.350802Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:56:39.350852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T08:56:39.350856Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.350860Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T08:56:39.350877Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T08:56:39.350881Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.350884Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T08:56:39.350892Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T08:56:39.350894Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.350896Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T08:56:39.350904Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T08:56:39.350905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.350907Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T08:56:39.350920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T08:56:39.350923Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.350926Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T08:56:39.350934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T08:56:39.350937Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.350939Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T08:56:39.350949Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T08:56:39.350951Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.350953Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T08:56:39.350964Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:39.350967Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2024-11-21T08:56:39.350973Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:56:39.350976Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T08:56:39.350979Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:39.350992Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:39.350995Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2024-11-21T08:56:39.350999Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:56:39.351003Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:39.351015Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:39.351017Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2024-11-21T08:56:39.351022Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:56:39.351025Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T08:56:39.351027Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:39.351038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:39.351041Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2024-11-21T08:56:39.351044Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:56:39.351048Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T08:56:39.351050Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:39.351061Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:39.351064Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2024-11-21T08:56:39.351068Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:56:39.351070Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:39.351081Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:39.351083Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T08:56:39.351088Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T08:56:39.351091Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T08:56:39.351093Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:39.351118Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T08:56:39.351121Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.351125Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T08:56:39.351139Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T08:56:39.351143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.351146Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T08:56:39.351159Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T08:56:39.351161Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.351163Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T08:56:39.351169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T08:56:39.351171Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:56:39.351172Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestSetClientOffset [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T08:53:52.497454Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:52.497474Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.500432Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:52.502201Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T08:53:52.502353Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T08:53:52.502784Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T08:53:52.503079Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T08:53:52.503369Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.504510Z node 1 :PERSQUEUE INFO: new Cookie owner1|76ae679f-7b3e512d-f19e9e5e-aa61c41e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2024-11-21T08:53:52.504582Z node 1 :PERSQUEUE INFO: new Cookie owner2|f0ed887a-7230488-cde99ccd-554aeb28_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.506280Z node 1 :PERSQUEUE INFO: new Cookie owner1|97d70e60-54692e90-f9e71a67-9a3cd848_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T08:53:52.741401Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:52.741433Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.744424Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:52.744662Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T08:53:52.744793Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T08:53:52.745465Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T08:53:52.745836Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T08:53:52.746309Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.747540Z node 2 :PERSQUEUE INFO: new Cookie owner1|98e70351-d1dc7530-47942cdf-ddf3a1f0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2024-11-21T08:53:52.747596Z node 2 :PERSQUEUE INFO: new Cookie owner2|10efa17-ab7f4b4f-47e0675-7e4dab58_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.748938Z node 2 :PERSQUEUE INFO: new Cookie owner1|e1ab4bb2-45265a62-f58139d2-d582767c_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] 2024-11-21T08:53:52.978694Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:52.978713Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] Leader for TabletID 72057594037927938 is [3:151:2172] sender: [3:152:2057] recipient: [3:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:177:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.982175Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:52.982374Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2024-11-21T08:53:52.982476Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:184:2197] 2024-11-21T08:53:52.982915Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:53:52.983186Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:185:2198] 2024-11-21T08:53:52.983447Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.984568Z node 3 :PERSQUEUE INFO: new Cookie owner1|e159619c-41fa7812-b9f249f2-d087e6bf_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2024-11-21T08:53:52.984628Z node 3 :PERSQUEUE INFO: new Cookie owner2|9bfdfc0d-226eafed-4bdb0c80-35257e2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:53:52.986009Z node 3 :PERSQUEUE INFO: new Cookie owner1|5cb471c3-f08b6de7-bf33256d-b4af2761_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] 2024-11-21T08:53:53.220101Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:53:53.220120Z node 4 :PERSQUEUE INF ... RSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [157:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:37.566676Z node 157 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [157:105:2137]) on event NKikimr::TEvPersQueue::TEvRequest ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [157:105:2137] sender: [157:235:2057] recipient: [157:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [157:105:2137] sender: [157:238:2057] recipient: [157:14:2061] Leader for TabletID 72057594037927937 is [157:105:2137] sender: [157:239:2057] recipient: [157:237:2239] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [157:240:2240] sender: [157:241:2057] recipient: [157:237:2239] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:37.574327Z node 157 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:37.574343Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T08:56:37.574424Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [157:291:2283] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:56:37.574863Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [157:292:2284] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:56:37.575814Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [157:291:2283] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T08:56:37.576258Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [157:292:2284] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR !Reboot 72057594037927937 (actor [157:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [157:105:2137]) tablet resolver refreshed! new actor is[157:240:2240] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [157:240:2240] sender: [157:324:2057] recipient: [157:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [158:101:2057] recipient: [158:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [158:101:2057] recipient: [158:99:2133] Leader for TabletID 72057594037927937 is [158:105:2137] sender: [158:106:2057] recipient: [158:99:2133] 2024-11-21T08:56:38.211920Z node 158 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:38.211945Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [158:147:2057] recipient: [158:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [158:147:2057] recipient: [158:145:2168] Leader for TabletID 72057594037927938 is [158:151:2172] sender: [158:152:2057] recipient: [158:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [158:105:2137] sender: [158:175:2057] recipient: [158:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:38.216018Z node 158 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:38.216201Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 158 actor [158:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 158 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 158 ReadRuleGenerations: 158 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 158 Important: false } Consumers { Name: "user1" Generation: 158 Important: false } 2024-11-21T08:56:38.216352Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [158:182:2195] 2024-11-21T08:56:38.216958Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [158:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:56:38.217554Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [158:183:2196] 2024-11-21T08:56:38.218007Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [158:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:38.219855Z node 158 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [159:101:2057] recipient: [159:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [159:101:2057] recipient: [159:99:2133] Leader for TabletID 72057594037927937 is [159:105:2137] sender: [159:106:2057] recipient: [159:99:2133] 2024-11-21T08:56:38.450387Z node 159 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:38.450415Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [159:147:2057] recipient: [159:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [159:147:2057] recipient: [159:145:2168] Leader for TabletID 72057594037927938 is [159:151:2172] sender: [159:152:2057] recipient: [159:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [159:105:2137] sender: [159:177:2057] recipient: [159:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:38.455297Z node 159 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:56:38.455523Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 159 actor [159:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 159 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 159 ReadRuleGenerations: 159 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 159 Important: false } Consumers { Name: "user1" Generation: 159 Important: false } 2024-11-21T08:56:38.455688Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [159:184:2197] 2024-11-21T08:56:38.456407Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [159:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T08:56:38.457100Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [159:185:2198] 2024-11-21T08:56:38.457632Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [159:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T08:56:38.460042Z node 159 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> KqpQueryPerf::AggregateToScalar-QueryService >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> KqpWorkload::STOCK >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain |90.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |90.3%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate-StreamLookup [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19844, MsgBus: 15259 2024-11-21T08:56:40.852565Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653876325201479:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:40.852734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003400/r3tmp/tmpyMnmEB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19844, node 1 2024-11-21T08:56:40.908967Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:40.910630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:40.910646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:40.910648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:40.910682Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15259 TClient is connected to server localhost:15259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:40.953705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:40.953734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:40.954764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:40.956352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:40.977821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:41.038386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:41.051781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:41.107935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:41.128427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653880620170321:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.128458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.160087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.166618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.176714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.183774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.238363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.247117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.255718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653880620170825:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.255744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653880620170830:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.255748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.256296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:41.259806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653880620170832:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] Test command err: 2024-11-21T08:55:35.563442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653595420740255:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:35.563810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e65/r3tmp/tmpEVzJ7v/pdisk_1.dat 2024-11-21T08:55:35.609275Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14904 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T08:55:35.613261Z node 1 :TX_PROXY DEBUG: actor# [1:7439653595420740485:2100] Handle TEvNavigate describe path dc-1 2024-11-21T08:55:35.613283Z node 1 :TX_PROXY DEBUG: Actor# [1:7439653595420740750:2246] HANDLE EvNavigateScheme dc-1 2024-11-21T08:55:35.613314Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439653595420740524:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:55:35.613330Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439653595420740524:2116], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T08:55:35.613367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T08:55:35.613591Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740207:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653595420740755:2247] 2024-11-21T08:55:35.613598Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740210:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653595420740756:2247] 2024-11-21T08:55:35.613610Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653595420740210:2052] Subscribe: subscriber# [1:7439653595420740756:2247], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.613610Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653595420740207:2049] Subscribe: subscriber# [1:7439653595420740755:2247], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.613618Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740213:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439653595420740757:2247] 2024-11-21T08:55:35.613623Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439653595420740213:2055] Subscribe: subscriber# [1:7439653595420740757:2247], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T08:55:35.613624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740756:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653595420740210:2052] 2024-11-21T08:55:35.613628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740755:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653595420740207:2049] 2024-11-21T08:55:35.613629Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740210:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653595420740756:2247] 2024-11-21T08:55:35.613630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740757:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653595420740213:2055] 2024-11-21T08:55:35.613632Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740207:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653595420740755:2247] 2024-11-21T08:55:35.613634Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740213:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439653595420740757:2247] 2024-11-21T08:55:35.613634Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653595420740753:2247] 2024-11-21T08:55:35.613639Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653595420740752:2247] 2024-11-21T08:55:35.613647Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439653595420740751:2247][/dc-1] Set up state: owner# [1:7439653595420740524:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:35.613674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439653595420740754:2247] 2024-11-21T08:55:35.613683Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439653595420740751:2247][/dc-1] Path was already updated: owner# [1:7439653595420740524:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T08:55:35.613688Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740755:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653595420740752:2247], cookie# 1 2024-11-21T08:55:35.613690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740756:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653595420740753:2247], cookie# 1 2024-11-21T08:55:35.613692Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740757:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653595420740754:2247], cookie# 1 2024-11-21T08:55:35.613695Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740207:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653595420740755:2247], cookie# 1 2024-11-21T08:55:35.613698Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740210:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653595420740756:2247], cookie# 1 2024-11-21T08:55:35.613700Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439653595420740213:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439653595420740757:2247], cookie# 1 2024-11-21T08:55:35.613703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740755:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653595420740207:2049], cookie# 1 2024-11-21T08:55:35.613705Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740756:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653595420740210:2052], cookie# 1 2024-11-21T08:55:35.613714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439653595420740757:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653595420740213:2055], cookie# 1 2024-11-21T08:55:35.613718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653595420740752:2247], cookie# 1 2024-11-21T08:55:35.613721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T08:55:35.613724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653595420740753:2247], cookie# 1 2024-11-21T08:55:35.613726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T08:55:35.613729Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439653595420740754:2247], cookie# 1 2024-11-21T08:55:35.613730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439653595420740751:2247][/dc-1] Unexpected sync response: sender# [1:7439653595420740754:2247], cookie# 1 2024-11-21T08:55:35.619901Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439653595420740524:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T08:55:35.619944Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439653595420740524:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... self# [2:7439653613434355106:2147], cacheItem# { Subscriber: { Subscriber: [2:7439653613434355607:2506] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:39.930959Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439653871132394661:3159], recipient# [2:7439653871132394660:2452], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:40.477169Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439653617302083740:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:40.477224Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439653617302083740:2100], cacheItem# { Subscriber: { Subscriber: [3:7439653621597051506:2344] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:40.477245Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439653875000122253:2480], recipient# [3:7439653875000122252:2441], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:40.677869Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439653613434355106:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:40.677920Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439653613434355106:2147], cacheItem# { Subscriber: { Subscriber: [2:7439653617729323059:2600] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:40.677942Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439653875427361965:3163], recipient# [2:7439653875427361964:2453], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:40.702008Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439653617302083740:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:40.702063Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439653617302083740:2100], cacheItem# { Subscriber: { Subscriber: [3:7439653617302083897:2193] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:40.702089Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439653875000122255:2481], recipient# [3:7439653875000122254:2442], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:40.931357Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439653613434355106:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:40.931404Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439653613434355106:2147], cacheItem# { Subscriber: { Subscriber: [2:7439653613434355607:2506] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:40.931431Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439653875427361970:3164], recipient# [2:7439653875427361969:2454], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:41.477618Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439653617302083740:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:41.477670Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439653617302083740:2100], cacheItem# { Subscriber: { Subscriber: [3:7439653621597051506:2344] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:41.477696Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439653879295089553:2482], recipient# [3:7439653879295089552:2443], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:41.678354Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439653613434355106:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T08:56:41.678403Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439653613434355106:2147], cacheItem# { Subscriber: { Subscriber: [2:7439653617729323059:2600] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T08:56:41.678432Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439653879722329274:3168], recipient# [2:7439653879722329273:2455], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> KqpQueryPerf::Replace+QueryService >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] >> VDiskTest::HugeBlobWrite [GOOD] >> KqpQueryPerf::RangeRead+QueryService >> TraverseColumnShard::TraverseColumnTable [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: 2024-11-21T08:54:07.926077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:07.926111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:07.926120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a48/r3tmp/tmpBEY8sz/pdisk_1.dat 2024-11-21T08:54:08.000290Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4418, node 1 2024-11-21T08:54:08.091972Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:08.091988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:08.091992Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:08.092049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:08.095877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:08.170722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:08.170746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:08.182047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22016 2024-11-21T08:54:08.578156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.348829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.348856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.381290Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:09.382125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.427784Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:09.436370Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:09.436390Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:09.441176Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:09.441218Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:09.441236Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:09.441239Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:09.441243Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:09.441248Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:09.441251Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:09.441257Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:09.441328Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:09.614117Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.614148Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2551], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.615426Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1766:2558] 2024-11-21T08:54:09.617346Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1814:2581] 2024-11-21T08:54:09.617380Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1814:2581], schemeshard id = 72075186224037889 2024-11-21T08:54:09.618273Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:09.620774Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:09.620784Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:09.620790Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:09.620896Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.620907Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.622590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:09.623687Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:09.623706Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:09.625769Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:09.637025Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.668867Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:09.754993Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:09.931588Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:10.479724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2143:3022], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.479760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.482492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:10.517305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.517351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.517382Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.517396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.517410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.517435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:10.517452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:10.517465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:10.517481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:10.517493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:10.517506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:10.517518Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:10.524748Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.524790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.524831Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.524855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.524875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.524898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... [72075186224037897] Subscribed for config changes 2024-11-21T08:56:42.411127Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:56:42.411190Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:42.411199Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:42.411282Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:42.411289Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:42.411562Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:42.454782Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:42.454826Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:42.454991Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8431:6339], server id = [2:8436:6344], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:42.455119Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8431:6339], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.455386Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8432:6340], server id = [2:8437:6345], tablet id = 72075186224037900, status = OK 2024-11-21T08:56:42.455395Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8432:6340], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.455413Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:42.455511Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8433:6341], server id = [2:8438:6346], tablet id = 72075186224037901, status = OK 2024-11-21T08:56:42.455517Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8433:6341], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.455525Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8431:6339], server id = [2:8436:6344], tablet id = 72075186224037899 2024-11-21T08:56:42.455528Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.455549Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8434:6342], server id = [2:8439:6347], tablet id = 72075186224037902, status = OK 2024-11-21T08:56:42.455553Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8434:6342], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.455610Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8435:6343], server id = [2:8440:6348], tablet id = 72075186224037903, status = OK 2024-11-21T08:56:42.455615Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8435:6343], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.455672Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:42.455731Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T08:56:42.455755Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T08:56:42.455792Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8441:6349], server id = [2:8442:6350], tablet id = 72075186224037904, status = OK 2024-11-21T08:56:42.455798Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8441:6349], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.455815Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T08:56:42.455887Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8432:6340], server id = [2:8437:6345], tablet id = 72075186224037900 2024-11-21T08:56:42.455890Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.455899Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8433:6341], server id = [2:8438:6346], tablet id = 72075186224037901 2024-11-21T08:56:42.455901Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.455934Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8434:6342], server id = [2:8439:6347], tablet id = 72075186224037902 2024-11-21T08:56:42.455936Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.455954Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8443:6351], server id = [2:8446:6354], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:42.455961Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8443:6351], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.455976Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T08:56:42.456026Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8435:6343], server id = [2:8440:6348], tablet id = 72075186224037903 2024-11-21T08:56:42.456029Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.456044Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8444:6352], server id = [2:8449:6357], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:42.456048Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8444:6352], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.456063Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8445:6353], server id = [2:8448:6356], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:42.456066Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8445:6353], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.456121Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8447:6355], server id = [2:8450:6358], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:42.456125Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8447:6355], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.456133Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:42.456239Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8441:6349], server id = [2:8442:6350], tablet id = 72075186224037904 2024-11-21T08:56:42.456242Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.456250Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:42.456281Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:42.456303Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:42.456306Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:42.456346Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:42.456371Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:42.456412Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:42.456440Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8443:6351], server id = [2:8446:6354], tablet id = 72075186224037905 2024-11-21T08:56:42.456442Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.456806Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8444:6352], server id = [2:8449:6357], tablet id = 72075186224037906 2024-11-21T08:56:42.456811Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.456902Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:42.456964Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8445:6353], server id = [2:8448:6356], tablet id = 72075186224037907 2024-11-21T08:56:42.456968Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.457080Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8447:6355], server id = [2:8450:6358], tablet id = 72075186224037908 2024-11-21T08:56:42.457083Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.460049Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8467:6375]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:42.460107Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:42.460112Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8467:6375], StatRequests.size() = 1 2024-11-21T08:56:42.482306Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzM0ZmQ1OWMtMjVmYmVkMTMtN2EwOTgzOWMtOWYwNWM5ZDk=, TxId: 2024-11-21T08:56:42.482332Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzM0ZmQ1OWMtMjVmYmVkMTMtN2EwOTgzOWMtOWYwNWM5ZDk=, TxId: 2024-11-21T08:56:42.482481Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:42.493287Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8477:6381] 2024-11-21T08:56:42.493321Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8383:6311], server id = [2:8477:6381], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:42.493352Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8477:6381], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T08:56:42.493370Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8478:6382] 2024-11-21T08:56:42.493406Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8478:6382], schemeshard id = 72075186224037889 2024-11-21T08:56:42.504811Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:42.504837Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:42.609742Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8483:6387]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:42.609842Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:42.609846Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:42.610390Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:42.610399Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:42.610407Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:42.611822Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TTxAllocatorClientTest::Boot >> TTxAllocatorClientTest::AllocateOverTheEdge >> TTxAllocatorClientTest::ZeroRange |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: 2024-11-21T08:54:08.048746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:08.048794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:08.048803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00286a/r3tmp/tmpEnpdbw/pdisk_1.dat 2024-11-21T08:54:08.119647Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20244, node 1 2024-11-21T08:54:08.211961Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:08.211985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:08.211989Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:08.212077Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:08.218861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:08.293831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:08.293863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:08.305493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22892 2024-11-21T08:54:08.700876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.436329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.436353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.468687Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:09.469384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.514595Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:09.520483Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:09.520500Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:09.524794Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:09.524885Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:09.524902Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:09.524907Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:09.524912Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:09.524916Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:09.524920Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:09.524924Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:09.524993Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:09.696929Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.696951Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.697727Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:09.698999Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:09.699066Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:09.699554Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:09.702488Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:09.702498Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:09.702506Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:09.703644Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.703662Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.704559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:09.705720Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:09.705739Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:09.707555Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:09.718839Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.740066Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:09.855918Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:10.021344Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:10.731073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.731107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.733870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:10.768370Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.768416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.768442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.768459Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.768472Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.768488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:10.768501Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:10.768514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:10.768529Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:10.768542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:10.768555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:10.768567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:10.772962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.772992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.773016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.773029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.773042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.773056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... ode 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:40.259197Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:40.341707Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:56:40.341738Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:40.413739Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8273:6227], schemeshard count = 1 2024-11-21T08:56:42.736429Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:42.736482Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:42.736495Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:42.736502Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:42.738133Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:42.750488Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:42.750691Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:42.750717Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:42.751030Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:42.762653Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:42.762744Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:42.763001Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8402:6301], server id = [2:8407:6306], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:42.763171Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8402:6301], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.763245Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8403:6302], server id = [2:8408:6307], tablet id = 72075186224037900, status = OK 2024-11-21T08:56:42.763253Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8403:6302], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.763654Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8404:6303], server id = [2:8409:6308], tablet id = 72075186224037901, status = OK 2024-11-21T08:56:42.763670Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8404:6303], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.763709Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:42.763856Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:42.763885Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8405:6304], server id = [2:8410:6309], tablet id = 72075186224037902, status = OK 2024-11-21T08:56:42.763895Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8405:6304], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.763951Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8406:6305], server id = [2:8411:6310], tablet id = 72075186224037903, status = OK 2024-11-21T08:56:42.763958Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8406:6305], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.763970Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T08:56:42.764101Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8402:6301], server id = [2:8407:6306], tablet id = 72075186224037899 2024-11-21T08:56:42.764108Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764135Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8403:6302], server id = [2:8408:6307], tablet id = 72075186224037900 2024-11-21T08:56:42.764138Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764168Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8412:6311], server id = [2:8414:6313], tablet id = 72075186224037904, status = OK 2024-11-21T08:56:42.764176Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8412:6311], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.764228Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T08:56:42.764264Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T08:56:42.764351Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8404:6303], server id = [2:8409:6308], tablet id = 72075186224037901 2024-11-21T08:56:42.764355Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764392Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8413:6312], server id = [2:8416:6315], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:42.764400Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8413:6312], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.764409Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8415:6314], server id = [2:8417:6316], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:42.764415Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8415:6314], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.764562Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8405:6304], server id = [2:8410:6309], tablet id = 72075186224037902 2024-11-21T08:56:42.764567Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764585Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T08:56:42.764620Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8406:6305], server id = [2:8411:6310], tablet id = 72075186224037903 2024-11-21T08:56:42.764624Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764631Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8418:6317], server id = [2:8420:6319], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:42.764639Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8418:6317], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.764667Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:42.764741Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:42.764770Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8419:6318], server id = [2:8421:6320], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:42.764777Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8419:6318], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:42.764798Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8412:6311], server id = [2:8414:6313], tablet id = 72075186224037904 2024-11-21T08:56:42.764802Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764818Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:42.764885Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8415:6314], server id = [2:8417:6316], tablet id = 72075186224037906 2024-11-21T08:56:42.764890Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764917Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8413:6312], server id = [2:8416:6315], tablet id = 72075186224037905 2024-11-21T08:56:42.764920Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764937Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:42.764942Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:42.764965Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8418:6317], server id = [2:8420:6319], tablet id = 72075186224037907 2024-11-21T08:56:42.764968Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.764997Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:42.765033Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:42.765110Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:42.765150Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8419:6318], server id = [2:8421:6320], tablet id = 72075186224037908 2024-11-21T08:56:42.765153Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:42.765762Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:42.770113Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8438:6337]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:42.770171Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:42.770179Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8438:6337], StatRequests.size() = 1 2024-11-21T08:56:42.798350Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTM4M2IwYzMtNTYwM2VjMjktZDVhNGRkMGUtYjgxMmMzZGQ=, TxId: 2024-11-21T08:56:42.798398Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTM4M2IwYzMtNTYwM2VjMjktZDVhNGRkMGUtYjgxMmMzZGQ=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T08:56:42.798587Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8446:6343]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:42.798659Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:42.798784Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:42.798790Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:42.799534Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:42.799551Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:42.799562Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:42.801407Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TBackupCollectionWithRebootsTests::CreateDroppedWithReboots >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpQueryPerf::Replace+QueryService [GOOD] >> KqpQueryPerf::RangeRead+QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T08:56:37.199491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:56:37.200119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:56:37.200152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042d9/r3tmp/tmpBv783K/pdisk_1.dat 2024-11-21T08:56:37.473649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:56:37.497331Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:37.549508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:37.555620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:37.572592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:37.725796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:56:37.980121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:38.401970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:884:2699], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:38.401999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2704], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:38.402009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:38.412992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:56:38.588880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:56:40.633578Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6z03qm50v73jjbnd1xa0my, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUzZWY4OGEtNzVmZDRjNmMtN2YzNzQzMTUtOWQyMTY2NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:40.717648Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6z06077wv47y2q5tpzxs0h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA3MWI4YWItMmUxYjczMDQtNjExYmExOWMtMzE5NGE5ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:41.086089Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6z062s01ab7yza97ykqc5k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDIxMjBmNWUtNWYwNWVhOTUtN2Y5ZmY5ZjItZDM0ZDFlNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2024-11-21T08:56:41.354564Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6z06m17szecgf12r11fm1x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDIxMjBmNWUtNWYwNWVhOTUtN2Y5ZmY5ZjItZDM0ZDFlNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2024-11-21T08:56:41.373060Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6z06mpcda2ntqx6d96pahg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjliNjk4NDYtZTkwOTBiNjMtYzNlNjFkNTUtNGJiOGM1MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2024-11-21T08:56:41.373534Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1128:2804] TxId: 281474976715665. Ctx: { TraceId: 01jd6z06mpcda2ntqx6d96pahg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjliNjk4NDYtZTkwOTBiNjMtYzNlNjFkNTUtNGJiOGM1MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2024-11-21T08:56:41.374901Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjliNjk4NDYtZTkwOTBiNjMtYzNlNjFkNTUtNGJiOGM1MDU=, ActorId: [1:1028:2804], ActorState: ExecuteState, TraceId: 01jd6z06mpcda2ntqx6d96pahg, Create QueryResponse for error on request, msg: 2024-11-21T08:56:41.375470Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDIxMjBmNWUtNWYwNWVhOTUtN2Y5ZmY5ZjItZDM0ZDFlNDM=, ActorId: [1:1030:2806], ActorState: ExecuteState, TraceId: 01jd6z06m17szecgf12r11fm1x, Create QueryResponse for error on request, msg: 2024-11-21T08:56:41.375596Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6z06mpcda2ntqx6d96pahg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjliNjk4NDYtZTkwOTBiNjMtYzNlNjFkNTUtNGJiOGM1MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:41.386067Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6z06m17szecgf12r11fm1x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDIxMjBmNWUtNWYwNWVhOTUtN2Y5ZmY5ZjItZDM0ZDFlNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:56:41.578833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd6z06sj5v02tkwerqd2wp3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI3Y2NhMDMtYTQyMDRjMzUtNjBiNGRlMDctMjUxZGI3YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2024-11-21T08:56:42.053030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:56:42.053066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:56:42.053088Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042d9/r3tmp/tmpk1rha4/pdisk_1.dat 2024-11-21T08:56:42.127103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:56:42.139876Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:42.181558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:42.181586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:42.192037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:42.294914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:56:42.497547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T08:56:42.749797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:42.749814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:42.749820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:42.750502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:56:42.926480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:56:42.965605Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6z07zxbq5krtxnss62jhdg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTIxNDQ1YmUtOWVhODIxYzQtMWVkODgxMDctNDBhZDE4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 889 RawX2: 8589937235 } TxBody: " \0018\000`\200\200\200\005j\240\006\010\001\022\217\006\010\001\022\024\n\022\ty\003\000\000\000\000\000\000\021S\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\00 ... 0\022\r\010\240\234\001\022\005\t\000\002\002\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001%\n\007TraceId\022\03201jd6z07zxbq5krtxnss62jhdg\222\001\026\n\022CurrentExecutionId\022\000\222\001V\n\tSessionId\022Iydb://session/3?node_id=2&id=MTIxNDQ1YmUtOWVhODIxYzQtMWVkODgxMDctNDBhZDE4\222\001\014\n\010Database\022\000\222\001\021\n\006PoolId\022\007default\230\001\000\"\n\010\227\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 61 } } 2024-11-21T08:56:42.990083Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6z0871ba64e630n8x09ts7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjczYTdmNmQtOGVmNTM2YzItMTZhOTdkY2ItZjg3ODZjYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 911 RawX2: 8589937308 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\217\003\000\000\000\000\000\000\021\234\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=NjczYTdmNmQtOGVmNTM2YzItMTZhOTdkY2ItZjg3ODZjYjg=\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001\014\n\010Database\022\000\222\001\021\n\006PoolId\022\007default\222\001\026\n\022CurrentExecutionId\022\000\222\001%\n\007TraceId\022\03201jd6z0871ba64e630n8x09ts7\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 72 } } ===== Begin SELECT 2024-11-21T08:56:43.039776Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6z087t6qjtfxnmvctf4v66, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk4Y2VkNTAtYzg5ODkxYTctM2IwMWRjOGYtNWNiMjkzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T08:56:43.055915Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd6z08923rkere5ds93c59ab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk4Y2VkNTAtYzg5ODkxYTctM2IwMWRjOGYtNWNiMjkzMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\365\006\010\001\022\223\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_1\022\r\010\240\234\001\022\005\t\000\002\006\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001\021\n\006PoolId\022\007default\222\001\014\n\010Database\022\000\222\001%\n\007TraceId\022\03201jd6z08923rkere5ds93c59ab\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=Zjk4Y2VkNTAtYzg5ODkxYTctM2IwMWRjOGYtNWNiMjkzMTc=\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CurrentExecutionId\022\000\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\367\006\010\001\022\225\006\010\002\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\010\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\004\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004H\001\200\001\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=Zjk4Y2VkNTAtYzg5ODkxYTctM2IwMWRjOGYtNWNiMjkzMTc=\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\222\001%\n\007TraceId\022\03201jd6z08923rkere5ds93c59ab\222\001\026\n\022CurrentExecutionId\022\000\222\001\014\n\010Database\022\000\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037888 TxId: 281474976715664 MinStep: 2035 MaxStep: 32035 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 15 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 154 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037889 TxId: 281474976715664 MinStep: 2035 MaxStep: 32035 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 14 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 51 } } ... captured readset ... captured readset ===== restarting tablet EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 167 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 183 } } ===== Waiting for commit response ===== Last SELECT 2024-11-21T08:56:43.351225Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6z08jb3whjj4gn5wak1v58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzY0ZmQyODctNDMxZDY1MmUtNjRmMmNkNTktZjhjODQ2N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestModifyUsedZoneKind ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:230:2060] recipient: [1:212:2140] 2024-11-21T08:56:03.988076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:03.988098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:03.988102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:03.988106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:03.988110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:03.988113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:03.988119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:03.988178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:03.995479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:03.995496Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:03.997611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:03.997662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:03.997682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:03.999007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:03.999132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:03.999200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:03.999247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:03.999624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:03.999831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:03.999838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:03.999863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:03.999867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:03.999871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:03.999882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.000873Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:342:2060] recipient: [1:17:2064] 2024-11-21T08:56:04.012790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:04.012872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.012924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:04.012960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:04.012965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.013729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:04.013748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:04.013796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.013804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:04.013807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:04.013811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:04.014103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.014109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:04.014111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:04.014341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.014346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.014350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:04.014355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:04.014731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:04.015026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:04.015063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:04.015196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:04.015212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:04.015218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:04.015254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:04.015258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:04.015294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:04.015303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:04.015594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:04.015598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:04.015625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:04.015628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:309:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:56:04.015692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.015696Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:04.015705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:04.015708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:04.015712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:04.015716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:04.015719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:04.015721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:04.015727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:04.015731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:04.015734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:56:04.015912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:04.015920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:04.015923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:56:04.015926Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:56:04.015929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:04.015940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ode 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T08:56:43.060403Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:43.060405Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:2 2024-11-21T08:56:43.060418Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:940:2718] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T08:56:43.060429Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:229:2151], Recipient [7:940:2718]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2024-11-21T08:56:43.060433Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T08:56:43.060437Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2024-11-21T08:56:43.060442Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2024-11-21T08:56:43.060486Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:56:43.060490Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T08:56:43.060493Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T08:56:43.060497Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2024-11-21T08:56:43.060504Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:43.060506Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2024-11-21T08:56:43.060509Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2024-11-21T08:56:43.060513Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T08:56:43.060537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:56:43.060539Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:43.060541Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2024-11-21T08:56:43.060545Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:951:2727] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T08:56:43.060558Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:229:2151], Recipient [7:951:2727]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2024-11-21T08:56:43.060560Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T08:56:43.060564Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2024-11-21T08:56:43.060568Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2024-11-21T08:56:43.060589Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T08:56:43.060593Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T08:56:43.060597Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T08:56:43.060600Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T08:56:43.060605Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:43.060608Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2024-11-21T08:56:43.060611Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T08:56:43.060614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2024-11-21T08:56:43.060622Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:565:2396] message: TxId: 104 2024-11-21T08:56:43.060627Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T08:56:43.060633Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T08:56:43.060637Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T08:56:43.060657Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2024-11-21T08:56:43.060662Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2024-11-21T08:56:43.060664Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2024-11-21T08:56:43.060669Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2024-11-21T08:56:43.060672Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2024-11-21T08:56:43.060675Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2024-11-21T08:56:43.060681Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2024-11-21T08:56:43.061036Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:43.061054Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:43.061064Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:565:2396] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T08:56:43.061090Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T08:56:43.061095Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1001:2765] 2024-11-21T08:56:43.061129Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1003:2767], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:43.061135Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:43.061138Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T08:56:43.061290Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:541:2100], Recipient [7:229:2151] 2024-11-21T08:56:43.061293Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:56:43.061759Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:43.061852Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:42, at schemeshard: 72057594046678944 2024-11-21T08:56:43.061857Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:42, at schemeshard: 72057594046678944 2024-11-21T08:56:43.061913Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:43.062252Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:42" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:43.062279Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:42, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2024-11-21T08:56:43.062283Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T08:56:43.062344Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T08:56:43.062350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T08:56:43.062407Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1065:2829], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:43.062412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:43.062416Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T08:56:43.062435Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:565:2396], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2024-11-21T08:56:43.062439Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T08:56:43.062450Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T08:56:43.062470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:56:43.062473Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1063:2827] 2024-11-21T08:56:43.062489Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1065:2829], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:43.062492Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:43.062495Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobWrite [GOOD] Test command err: Put id# [95:1:1:0:0:589824:1] totalSize# 0 blobValueIndex# 39 Trim Put id# [53:1:1:0:0:1572864:1] totalSize# 589824 blobValueIndex# 54 Put id# [51:1:1:0:0:589824:1] totalSize# 2162688 blobValueIndex# 34 Put id# [83:1:1:0:0:40960:1] totalSize# 2752512 blobValueIndex# 21 Change MinHugeBlobSize# 12288 Put id# [8:1:1:0:0:1572864:1] totalSize# 2793472 blobValueIndex# 57 Put id# [87:1:1:0:0:1048576:1] totalSize# 4366336 blobValueIndex# 45 Put id# [16:1:1:0:0:1024:1] totalSize# 5414912 blobValueIndex# 12 Put id# [76:1:1:0:0:1048576:1] totalSize# 5415936 blobValueIndex# 49 Put id# [55:1:1:0:0:1572864:1] totalSize# 6464512 blobValueIndex# 59 Put id# [49:1:1:0:0:10:1] totalSize# 8037376 blobValueIndex# 5 Put id# [18:1:1:0:0:40960:1] totalSize# 8037386 blobValueIndex# 23 Put id# [70:1:1:0:0:10:1] totalSize# 8078346 blobValueIndex# 0 Put id# [7:1:1:0:0:1048576:1] totalSize# 8078356 blobValueIndex# 42 Change MinHugeBlobSize# 61440 Restart Put id# [83:1:2:0:0:10:1] totalSize# 9126932 blobValueIndex# 0 Change MinHugeBlobSize# 8192 Trim Put id# [78:1:1:0:0:10:1] totalSize# 9126942 blobValueIndex# 4 Put id# [99:1:1:0:0:1024:1] totalSize# 9126952 blobValueIndex# 17 Trim Put id# [23:1:1:0:0:1572864:1] totalSize# 9127976 blobValueIndex# 56 Change MinHugeBlobSize# 65536 Put id# [19:1:1:0:0:1048576:1] totalSize# 10700840 blobValueIndex# 46 Trim Put id# [18:1:2:0:0:10:1] totalSize# 11749416 blobValueIndex# 1 Trim Put id# [7:1:2:0:0:1048576:1] totalSize# 11749426 blobValueIndex# 42 Put id# [22:1:1:0:0:589824:1] totalSize# 12798002 blobValueIndex# 33 Change MinHugeBlobSize# 61440 Put id# [51:1:2:0:0:40960:1] totalSize# 13387826 blobValueIndex# 28 Change MinHugeBlobSize# 524288 Put id# [25:1:1:0:0:10:1] totalSize# 13428786 blobValueIndex# 3 Put id# [32:1:1:0:0:1048576:1] totalSize# 13428796 blobValueIndex# 46 Trim Put id# [33:1:1:0:0:1048576:1] totalSize# 14477372 blobValueIndex# 45 Trim Put id# [18:1:3:0:0:40960:1] totalSize# 15525948 blobValueIndex# 23 Put id# [79:1:1:0:0:1024:1] totalSize# 15566908 blobValueIndex# 18 Put id# [78:1:2:0:0:1024:1] totalSize# 15567932 blobValueIndex# 16 Put id# [40:1:1:0:0:1024:1] totalSize# 15568956 blobValueIndex# 16 Put id# [14:1:1:0:0:589824:1] totalSize# 15569980 blobValueIndex# 32 Change MinHugeBlobSize# 61440 Put id# [55:1:2:0:0:1024:1] totalSize# 16159804 blobValueIndex# 15 Put id# [44:1:1:0:0:1024:1] totalSize# 16160828 blobValueIndex# 19 Put id# [48:1:1:0:0:589824:1] totalSize# 16161852 blobValueIndex# 37 Change MinHugeBlobSize# 524288 Put id# [33:1:2:0:0:589824:1] totalSize# 16751676 blobValueIndex# 32 Put id# [98:1:1:0:0:10:1] totalSize# 17341500 blobValueIndex# 3 Put id# [81:1:1:0:0:589824:1] totalSize# 17341510 blobValueIndex# 36 Restart Put id# [14:1:2:0:0:589824:1] totalSize# 17931334 blobValueIndex# 32 Put id# [17:1:1:0:0:589824:1] totalSize# 18521158 blobValueIndex# 35 Put id# [13:1:1:0:0:10:1] totalSize# 19110982 blobValueIndex# 1 Put id# [51:1:3:0:0:40960:1] totalSize# 19110992 blobValueIndex# 25 Put id# [28:1:1:0:0:10:1] totalSize# 19151952 blobValueIndex# 1 Put id# [74:1:1:0:0:1048576:1] totalSize# 19151962 blobValueIndex# 40 Trim Put id# [72:1:1:0:0:1048576:1] totalSize# 20200538 blobValueIndex# 46 Put id# [9:1:1:0:0:1048576:1] totalSize# 21249114 blobValueIndex# 45 Trim Put id# [73:1:1:0:0:1048576:1] totalSize# 22297690 blobValueIndex# 47 Put id# [65:1:1:0:0:1048576:1] totalSize# 23346266 blobValueIndex# 49 Change MinHugeBlobSize# 8192 Put id# [67:1:1:0:0:589824:1] totalSize# 24394842 blobValueIndex# 30 Trim Put id# [27:1:1:0:0:10:1] totalSize# 24984666 blobValueIndex# 5 Change MinHugeBlobSize# 61440 Put id# [25:1:2:0:0:589824:1] totalSize# 24984676 blobValueIndex# 30 Put id# [47:1:1:0:0:40960:1] totalSize# 25574500 blobValueIndex# 29 Put id# [64:1:1:0:0:1048576:1] totalSize# 25615460 blobValueIndex# 40 Put id# [54:1:1:0:0:40960:1] totalSize# 26664036 blobValueIndex# 22 Restart Put id# [57:1:1:0:0:589824:1] totalSize# 26704996 blobValueIndex# 34 Trim Put id# [83:1:3:0:0:1048576:1] totalSize# 27294820 blobValueIndex# 49 Put id# [85:1:1:0:0:589824:1] totalSize# 28343396 blobValueIndex# 36 Put id# [36:1:1:0:0:589824:1] totalSize# 28933220 blobValueIndex# 37 Change MinHugeBlobSize# 8192 Put id# [52:1:1:0:0:1024:1] totalSize# 29523044 blobValueIndex# 15 Change MinHugeBlobSize# 524288 Put id# [70:1:2:0:0:1024:1] totalSize# 29524068 blobValueIndex# 10 Put id# [89:1:1:0:0:1024:1] totalSize# 29525092 blobValueIndex# 11 Put id# [85:1:2:0:0:1048576:1] totalSize# 29526116 blobValueIndex# 41 Trim Put id# [78:1:3:0:0:40960:1] totalSize# 30574692 blobValueIndex# 20 Put id# [20:1:1:0:0:1024:1] totalSize# 30615652 blobValueIndex# 10 Put id# [9:1:2:0:0:40960:1] totalSize# 30616676 blobValueIndex# 21 Put id# [75:1:1:0:0:1024:1] totalSize# 30657636 blobValueIndex# 18 Put id# [44:1:2:0:0:1572864:1] totalSize# 30658660 blobValueIndex# 54 Put id# [81:1:2:0:0:10:1] totalSize# 32231524 blobValueIndex# 0 Put id# [87:1:2:0:0:589824:1] totalSize# 32231534 blobValueIndex# 34 Put id# [12:1:1:0:0:10:1] totalSize# 32821358 blobValueIndex# 9 Put id# [53:1:2:0:0:40960:1] totalSize# 32821368 blobValueIndex# 25 Put id# [15:1:1:0:0:1024:1] totalSize# 32862328 blobValueIndex# 18 Put id# [61:1:1:0:0:40960:1] totalSize# 32863352 blobValueIndex# 29 Put id# [96:1:1:0:0:40960:1] totalSize# 32904312 blobValueIndex# 26 Put id# [68:1:1:0:0:589824:1] totalSize# 32945272 blobValueIndex# 37 Put id# [41:1:1:0:0:10:1] totalSize# 33535096 blobValueIndex# 2 Put id# [88:1:1:0:0:1048576:1] totalSize# 33535106 blobValueIndex# 43 Put id# [67:1:2:0:0:589824:1] totalSize# 34583682 blobValueIndex# 31 Put id# [86:1:1:0:0:589824:1] totalSize# 35173506 blobValueIndex# 34 Trim Put id# [26:1:1:0:0:40960:1] totalSize# 35763330 blobValueIndex# 20 Put id# [54:1:2:0:0:40960:1] totalSize# 35804290 blobValueIndex# 27 Change MinHugeBlobSize# 12288 Put id# [2:1:1:0:0:40960:1] totalSize# 35845250 blobValueIndex# 26 Restart Put id# [73:1:2:0:0:1048576:1] totalSize# 35886210 blobValueIndex# 42 Put id# [1:1:1:0:0:1572864:1] totalSize# 36934786 blobValueIndex# 55 Put id# [83:1:4:0:0:1024:1] totalSize# 38507650 blobValueIndex# 12 Trim Put id# [51:1:4:0:0:1048576:1] totalSize# 38508674 blobValueIndex# 47 Put id# [89:1:2:0:0:1572864:1] totalSize# 39557250 blobValueIndex# 51 Trim Put id# [22:1:2:0:0:1024:1] totalSize# 41130114 blobValueIndex# 10 Change MinHugeBlobSize# 524288 Put id# [92:1:1:0:0:40960:1] totalSize# 41131138 blobValueIndex# 29 Put id# [90:1:1:0:0:10:1] totalSize# 41172098 blobValueIndex# 7 Change MinHugeBlobSize# 8192 Put id# [81:1:3:0:0:40960:1] totalSize# 41172108 blobValueIndex# 26 Put id# [78:1:4:0:0:40960:1] totalSize# 41213068 blobValueIndex# 22 Put id# [88:1:2:0:0:10:1] totalSize# 41254028 blobValueIndex# 0 Put id# [46:1:1:0:0:1572864:1] totalSize# 41254038 blobValueIndex# 53 Put id# [70:1:3:0:0:1572864:1] totalSize# 42826902 blobValueIndex# 51 Put id# [77:1:1:0:0:1572864:1] totalSize# 44399766 blobValueIndex# 50 Put id# [92:1:2:0:0:40960:1] totalSize# 45972630 blobValueIndex# 27 Put id# [86:1:2:0:0:1048576:1] totalSize# 46013590 blobValueIndex# 48 Put id# [64:1:2:0:0:40960:1] totalSize# 47062166 blobValueIndex# 24 Change MinHugeBlobSize# 65536 Put id# [66:1:1:0:0:40960:1] totalSize# 47103126 blobValueIndex# 21 Put id# [87:1:3:0:0:40960:1] totalSize# 47144086 blobValueIndex# 21 Put id# [35:1:1:0:0:1572864:1] totalSize# 47185046 blobValueIndex# 53 Put id# [63:1:1:0:0:10:1] totalSize# 48757910 blobValueIndex# 8 Change MinHugeBlobSize# 12288 Put id# [16:1:2:0:0:589824:1] totalSize# 48757920 blobValueIndex# 34 Trim Put id# [12:1:2:0:0:1024:1] totalSize# 49347744 blobValueIndex# 19 Put id# [82:1:1:0:0:1024:1] totalSize# 49348768 blobValueIndex# 11 Put id# [3:1:1:0:0:1024:1] totalSize# 49349792 blobValueIndex# 15 Put id# [43:1:1:0:0:40960:1] totalSize# 49350816 blobValueIndex# 28 Trim Put id# [88:1:3:0:0:1048576:1] totalSize# 49391776 blobValueIndex# 48 Trim Put id# [37:1:1:0:0:10:1] totalSize# 50440352 blobValueIndex# 6 Trim Put id# [27:1:2:0:0:40960:1] totalSize# 50440362 blobValueIndex# 22 Trim Put id# [28:1:2:0:0:1024:1] totalSize# 50481322 blobValueIndex# 14 Put id# [31:1:1:0:0:10:1] totalSize# 50482346 blobValueIndex# 4 Put id# [95:1:2:0:0:1048576:1] totalSize# 50482356 blobValueIndex# 40 Put id# [69:1:1:0:0:1024:1] totalSize# 51530932 blobValueIndex# 17 Put id# [69:1:2:0:0:10:1] totalSize# 51531956 blobValueIndex# 3 Put id# [88:1:4:0:0:40960:1] totalSize# 51531966 blobValueIndex# 27 Trim Put id# [36:1:2:0:0:1572864:1] totalSize# 51572926 blobValueIndex# 55 Put id# [82:1:2:0:0:1024:1] totalSize# 53145790 blobValueIndex# 11 Put id# [22:1:3:0:0:589824:1] totalSize# 53146814 blobValueIndex# 31 Put id# [46:1:2:0:0:1048576:1] totalSize# 53736638 blobValueIndex# 47 Put id# [68:1:2:0:0:1048576:1] totalSize# 54785214 blobValueIndex# 40 Put id# [89:1:3:0:0:1024:1] totalSize# 55833790 blobValueIndex# 15 Change MinHugeBlobSize# 61440 Put id# [59:1:1:0:0:40960:1] totalSize# 55834814 blobValueIndex# 21 Trim Put id# [31:1:2:0:0:1048576:1] totalSize# 55875774 blobValueIndex# 45 Put id# [31:1:3:0:0:40960:1] totalSize# 56924350 blobValueIndex# 28 Put id# [52:1:2:0:0:1024:1] totalSize# 56965310 blobValueIndex# 19 Put id# [82:1:3:0:0:40960:1] totalSize# 56966334 blobValueIndex# 26 Put id# [97:1:1:0:0:40960:1] totalSize# 57007294 blobValueIndex# 24 Put id# [18:1:4:0:0:1024:1] totalSize# 57048254 blobValueIndex# 10 Put id# [86:1:3:0:0:589824:1] totalSize# 57049278 blobValueIndex# 33 Put id# [54:1:3:0:0:1024:1] totalSize# 57639102 blobValueIndex# 11 Change MinHugeBlobSize# 65536 Put id# [48:1:2:0:0:1572864:1] totalSize# 57640126 blobValueIndex# 54 Put id# [33:1:3:0:0:589824:1] totalSize# 59212990 blobValueIndex# 34 Change MinHugeBlobSize# 61440 Trim Put id# [76:1:2:0:0:40960:1] totalSize# 59802814 blobValueIndex# 25 Put id# [23:1:2:0:0:1048576:1] totalSize# 59843774 blobValueIndex# 47 Put id# [48:1:3:0:0:10:1] totalSize# 60892350 blobValueIndex# 8 Trim Put id# [6:1:1:0:0:1024:1] totalSize# 60892360 blobValueIndex# 10 Trim Restart Put id# [83:1:5:0:0:1572864:1] totalSize# 60893384 blobValueIndex# 55 Put id# [90:1:2:0:0:1024:1] totalSize# 62466248 blobValueIndex# 15 Put id# [66:1:2:0:0:10:1] totalSize# 62467272 blobValueIndex# 7 Change MinHugeBlobSize# 8192 Put id# [87:1:4:0:0:1572864:1] totalSize# 62467282 blobValueIndex# 59 Put id# [52:1:3:0:0:589824:1] totalSize# 64040146 blobValueIndex# 32 Restart Put id# [94:1:1:0:0:1572864:1] totalSize# 64629970 blobValueIndex# 56 Put id# [62:1:1:0:0:1048576:1] totalSize# 66202834 blobValueIndex# 44 Put id# [58:1:1:0:0:589824:1] totalSize# 67251410 blobValueIndex# 31 Put id# [89:1:4:0:0:40960:1] totalSize# 67841234 blobValueIndex# 29 Put id# [16:1:3:0:0:1048576:1] totalSize# 67882194 blobValueIndex# 43 Put id# [71:1:1:0:0:10:1] totalSize# 68930770 blobValueIndex# 0 Put id# [21:1:1:0:0:1024:1] totalSize# 68930780 blobValueIndex# 10 Trim Put id# [44:1:3:0:0:1572864:1] totalSize# 68931804 blobValueIndex# 58 Trim Put id# [6:1:2:0:0:589824:1] totalSize# 70504668 blobValueIndex# 35 Put id# [10:1:1:0:0:1024:1] totalSize# 71094492 blobValueIndex# 13 Trim Put id# [51:1:5:0:0:1024:1] totalSize# 71095516 blobValueIndex# 10 Put id# [38:1:1:0:0:40960:1] totalSize# 71096540 blobValueIndex# 29 Put id# [80:1:1:0:0:1572864:1] totalSize# 71137500 blobValueIndex# 57 Put id# [28:1:3:0:0:1572864:1] totalSize# 72710364 blobValueIndex# 50 Put id# [65:1:2:0:0:1048576:1] totalSize# 74283228 blobValueIndex# 41 Restart Put id# [84:1:1:0:0:10:1] totalSize# 75331804 blobValueIndex# 2 Put id# [82:1:4:0:0:40960:1] totalSize# 75331814 blobValueIndex# 25 Put id# [91:1:1:0:0:1572864:1] totalSize# 75372774 blobValueIndex# 54 Put id# [5:1:1:0:0:589824:1] totalSize# 76945638 blobValueIndex# 39 Restart Put id# [73:1:3:0:0:40960:1] totalSize# 77535462 blobValueIndex# 26 Put id# [89:1:5:0:0:1024:1] totalSize# 77576422 blobValueIndex# 19 Put id# [38:1:2:0:0:1572864:1] totalSize# 77577446 blobValueIndex# 59 Put id# [11:1:1:0:0:1048576:1] totalSize# 79150310 blobValueIndex# 45 Change MinHugeBlobSize# 12288 Put id# [5:1:2:0: ... 0:0:1572864:1] totalSize# 610837406 blobValueIndex# 52 Put id# [42:1:125:0:0:589824:1] totalSize# 612410270 blobValueIndex# 32 Put id# [91:1:111:0:0:589824:1] totalSize# 613000094 blobValueIndex# 38 Put id# [83:1:109:0:0:1572864:1] totalSize# 613589918 blobValueIndex# 50 Put id# [80:1:86:0:0:10:1] totalSize# 615162782 blobValueIndex# 9 Trim Put id# [5:1:98:0:0:40960:1] totalSize# 615162792 blobValueIndex# 23 Restart Put id# [45:1:93:0:0:40960:1] totalSize# 615203752 blobValueIndex# 20 Put id# [17:1:100:0:0:1048576:1] totalSize# 615244712 blobValueIndex# 45 Put id# [82:1:113:0:0:1572864:1] totalSize# 616293288 blobValueIndex# 56 Trim Put id# [3:1:88:0:0:589824:1] totalSize# 617866152 blobValueIndex# 32 Restart Put id# [75:1:130:0:0:40960:1] totalSize# 618455976 blobValueIndex# 29 Put id# [31:1:121:0:0:10:1] totalSize# 618496936 blobValueIndex# 6 Restart Put id# [28:1:102:0:0:589824:1] totalSize# 618496946 blobValueIndex# 36 Put id# [95:1:119:0:0:1048576:1] totalSize# 619086770 blobValueIndex# 46 Trim Put id# [36:1:99:0:0:1048576:1] totalSize# 620135346 blobValueIndex# 42 Put id# [49:1:114:0:0:1572864:1] totalSize# 621183922 blobValueIndex# 57 Change MinHugeBlobSize# 8192 Put id# [49:1:115:0:0:1048576:1] totalSize# 622756786 blobValueIndex# 41 Change MinHugeBlobSize# 12288 Put id# [88:1:109:0:0:1024:1] totalSize# 623805362 blobValueIndex# 12 Put id# [67:1:94:0:0:1048576:1] totalSize# 623806386 blobValueIndex# 41 Put id# [51:1:101:0:0:10:1] totalSize# 624854962 blobValueIndex# 0 Trim Put id# [3:1:89:0:0:1572864:1] totalSize# 624854972 blobValueIndex# 50 Put id# [66:1:112:0:0:589824:1] totalSize# 626427836 blobValueIndex# 35 Change MinHugeBlobSize# 65536 Put id# [35:1:96:0:0:10:1] totalSize# 627017660 blobValueIndex# 4 Trim Put id# [90:1:127:0:0:10:1] totalSize# 627017670 blobValueIndex# 3 Put id# [18:1:101:0:0:1572864:1] totalSize# 627017680 blobValueIndex# 51 Put id# [25:1:126:0:0:1572864:1] totalSize# 628590544 blobValueIndex# 51 Put id# [96:1:101:0:0:40960:1] totalSize# 630163408 blobValueIndex# 26 Put id# [65:1:111:0:0:589824:1] totalSize# 630204368 blobValueIndex# 36 Put id# [82:1:114:0:0:589824:1] totalSize# 630794192 blobValueIndex# 34 Put id# [99:1:112:0:0:40960:1] totalSize# 631384016 blobValueIndex# 25 Put id# [15:1:107:0:0:1572864:1] totalSize# 631424976 blobValueIndex# 58 Trim Put id# [43:1:118:0:0:1572864:1] totalSize# 632997840 blobValueIndex# 53 Put id# [32:1:108:0:0:10:1] totalSize# 634570704 blobValueIndex# 0 Put id# [89:1:114:0:0:589824:1] totalSize# 634570714 blobValueIndex# 31 Trim Put id# [37:1:92:0:0:10:1] totalSize# 635160538 blobValueIndex# 7 Put id# [87:1:117:0:0:10:1] totalSize# 635160548 blobValueIndex# 9 Put id# [55:1:107:0:0:1048576:1] totalSize# 635160558 blobValueIndex# 46 Put id# [21:1:115:0:0:589824:1] totalSize# 636209134 blobValueIndex# 37 Put id# [89:1:115:0:0:10:1] totalSize# 636798958 blobValueIndex# 2 Put id# [37:1:93:0:0:589824:1] totalSize# 636798968 blobValueIndex# 34 Put id# [83:1:110:0:0:10:1] totalSize# 637388792 blobValueIndex# 6 Trim Restart Put id# [38:1:110:0:0:10:1] totalSize# 637388802 blobValueIndex# 9 Trim Restart Put id# [8:1:108:0:0:1048576:1] totalSize# 637388812 blobValueIndex# 43 Put id# [7:1:120:0:0:1048576:1] totalSize# 638437388 blobValueIndex# 41 Put id# [54:1:93:0:0:1572864:1] totalSize# 639485964 blobValueIndex# 50 Put id# [68:1:116:0:0:1572864:1] totalSize# 641058828 blobValueIndex# 51 Restart Put id# [10:1:104:0:0:589824:1] totalSize# 642631692 blobValueIndex# 37 Put id# [85:1:110:0:0:589824:1] totalSize# 643221516 blobValueIndex# 32 Put id# [39:1:96:0:0:1572864:1] totalSize# 643811340 blobValueIndex# 52 Trim Put id# [85:1:111:0:0:1024:1] totalSize# 645384204 blobValueIndex# 18 Trim Put id# [56:1:97:0:0:40960:1] totalSize# 645385228 blobValueIndex# 29 Put id# [82:1:115:0:0:10:1] totalSize# 645426188 blobValueIndex# 5 Put id# [84:1:107:0:0:589824:1] totalSize# 645426198 blobValueIndex# 30 Put id# [77:1:111:0:0:1024:1] totalSize# 646016022 blobValueIndex# 13 Put id# [24:1:91:0:0:1572864:1] totalSize# 646017046 blobValueIndex# 50 Put id# [11:1:98:0:0:589824:1] totalSize# 647589910 blobValueIndex# 34 Put id# [26:1:100:0:0:40960:1] totalSize# 648179734 blobValueIndex# 23 Put id# [63:1:111:0:0:1048576:1] totalSize# 648220694 blobValueIndex# 49 Trim Put id# [26:1:101:0:0:1048576:1] totalSize# 649269270 blobValueIndex# 48 Put id# [13:1:124:0:0:1024:1] totalSize# 650317846 blobValueIndex# 18 Put id# [97:1:103:0:0:10:1] totalSize# 650318870 blobValueIndex# 2 Put id# [82:1:116:0:0:40960:1] totalSize# 650318880 blobValueIndex# 22 Put id# [26:1:102:0:0:1024:1] totalSize# 650359840 blobValueIndex# 18 Put id# [92:1:119:0:0:1024:1] totalSize# 650360864 blobValueIndex# 14 Put id# [38:1:111:0:0:1048576:1] totalSize# 650361888 blobValueIndex# 42 Put id# [73:1:102:0:0:1048576:1] totalSize# 651410464 blobValueIndex# 48 Put id# [31:1:122:0:0:40960:1] totalSize# 652459040 blobValueIndex# 28 Trim Put id# [83:1:111:0:0:1024:1] totalSize# 652500000 blobValueIndex# 15 Put id# [74:1:115:0:0:1024:1] totalSize# 652501024 blobValueIndex# 11 Put id# [45:1:94:0:0:40960:1] totalSize# 652502048 blobValueIndex# 28 Trim Put id# [45:1:95:0:0:589824:1] totalSize# 652543008 blobValueIndex# 33 Put id# [11:1:99:0:0:40960:1] totalSize# 653132832 blobValueIndex# 27 Put id# [52:1:113:0:0:40960:1] totalSize# 653173792 blobValueIndex# 21 Change MinHugeBlobSize# 524288 Trim Put id# [82:1:117:0:0:10:1] totalSize# 653214752 blobValueIndex# 5 Put id# [9:1:98:0:0:1048576:1] totalSize# 653214762 blobValueIndex# 49 Trim Put id# [50:1:112:0:0:10:1] totalSize# 654263338 blobValueIndex# 8 Put id# [93:1:116:0:0:1572864:1] totalSize# 654263348 blobValueIndex# 50 Put id# [5:1:99:0:0:10:1] totalSize# 655836212 blobValueIndex# 7 Put id# [80:1:87:0:0:40960:1] totalSize# 655836222 blobValueIndex# 22 Put id# [5:1:100:0:0:40960:1] totalSize# 655877182 blobValueIndex# 22 Put id# [41:1:110:0:0:1048576:1] totalSize# 655918142 blobValueIndex# 40 Restart Put id# [47:1:86:0:0:589824:1] totalSize# 656966718 blobValueIndex# 35 Put id# [87:1:118:0:0:1024:1] totalSize# 657556542 blobValueIndex# 15 Change MinHugeBlobSize# 8192 Put id# [77:1:112:0:0:1572864:1] totalSize# 657557566 blobValueIndex# 50 Put id# [5:1:101:0:0:10:1] totalSize# 659130430 blobValueIndex# 0 Put id# [28:1:103:0:0:40960:1] totalSize# 659130440 blobValueIndex# 24 Change MinHugeBlobSize# 12288 Restart Put id# [32:1:109:0:0:1024:1] totalSize# 659171400 blobValueIndex# 10 Put id# [24:1:92:0:0:1048576:1] totalSize# 659172424 blobValueIndex# 43 Put id# [91:1:112:0:0:1024:1] totalSize# 660221000 blobValueIndex# 10 Change MinHugeBlobSize# 524288 Put id# [77:1:113:0:0:589824:1] totalSize# 660222024 blobValueIndex# 39 Change MinHugeBlobSize# 8192 Put id# [2:1:123:0:0:1024:1] totalSize# 660811848 blobValueIndex# 10 Put id# [48:1:119:0:0:1024:1] totalSize# 660812872 blobValueIndex# 10 Put id# [96:1:102:0:0:40960:1] totalSize# 660813896 blobValueIndex# 27 Put id# [81:1:103:0:0:1572864:1] totalSize# 660854856 blobValueIndex# 59 Put id# [89:1:116:0:0:1048576:1] totalSize# 662427720 blobValueIndex# 45 Trim Put id# [84:1:108:0:0:40960:1] totalSize# 663476296 blobValueIndex# 27 Put id# [40:1:97:0:0:40960:1] totalSize# 663517256 blobValueIndex# 20 Put id# [11:1:100:0:0:10:1] totalSize# 663558216 blobValueIndex# 7 Put id# [43:1:119:0:0:1048576:1] totalSize# 663558226 blobValueIndex# 41 Change MinHugeBlobSize# 524288 Put id# [99:1:113:0:0:589824:1] totalSize# 664606802 blobValueIndex# 34 Change MinHugeBlobSize# 65536 Put id# [53:1:101:0:0:1024:1] totalSize# 665196626 blobValueIndex# 10 Put id# [5:1:102:0:0:40960:1] totalSize# 665197650 blobValueIndex# 26 Put id# [66:1:113:0:0:1048576:1] totalSize# 665238610 blobValueIndex# 41 Put id# [83:1:112:0:0:1572864:1] totalSize# 666287186 blobValueIndex# 55 Put id# [66:1:114:0:0:40960:1] totalSize# 667860050 blobValueIndex# 21 Change MinHugeBlobSize# 12288 Put id# [84:1:109:0:0:40960:1] totalSize# 667901010 blobValueIndex# 20 Restart Put id# [90:1:128:0:0:10:1] totalSize# 667941970 blobValueIndex# 8 Put id# [46:1:122:0:0:1024:1] totalSize# 667941980 blobValueIndex# 13 Trim Put id# [32:1:110:0:0:1572864:1] totalSize# 667943004 blobValueIndex# 55 Put id# [48:1:120:0:0:589824:1] totalSize# 669515868 blobValueIndex# 38 Restart Put id# [67:1:95:0:0:1572864:1] totalSize# 670105692 blobValueIndex# 57 Change MinHugeBlobSize# 61440 Put id# [84:1:110:0:0:1048576:1] totalSize# 671678556 blobValueIndex# 43 Put id# [69:1:113:0:0:40960:1] totalSize# 672727132 blobValueIndex# 20 Put id# [97:1:104:0:0:40960:1] totalSize# 672768092 blobValueIndex# 23 Put id# [87:1:119:0:0:1048576:1] totalSize# 672809052 blobValueIndex# 44 Put id# [87:1:120:0:0:1048576:1] totalSize# 673857628 blobValueIndex# 42 Put id# [32:1:111:0:0:1024:1] totalSize# 674906204 blobValueIndex# 19 Restart Put id# [46:1:123:0:0:1572864:1] totalSize# 674907228 blobValueIndex# 51 Put id# [36:1:100:0:0:10:1] totalSize# 676480092 blobValueIndex# 1 Put id# [80:1:88:0:0:1048576:1] totalSize# 676480102 blobValueIndex# 40 Put id# [37:1:94:0:0:1048576:1] totalSize# 677528678 blobValueIndex# 42 Put id# [48:1:121:0:0:1572864:1] totalSize# 678577254 blobValueIndex# 51 Put id# [74:1:116:0:0:1024:1] totalSize# 680150118 blobValueIndex# 12 Put id# [66:1:115:0:0:1048576:1] totalSize# 680151142 blobValueIndex# 42 Put id# [42:1:126:0:0:10:1] totalSize# 681199718 blobValueIndex# 2 Put id# [82:1:118:0:0:1572864:1] totalSize# 681199728 blobValueIndex# 56 Put id# [35:1:97:0:0:1572864:1] totalSize# 682772592 blobValueIndex# 56 Put id# [69:1:114:0:0:40960:1] totalSize# 684345456 blobValueIndex# 26 Put id# [59:1:107:0:0:1572864:1] totalSize# 684386416 blobValueIndex# 59 Put id# [36:1:101:0:0:10:1] totalSize# 685959280 blobValueIndex# 3 Put id# [47:1:87:0:0:10:1] totalSize# 685959290 blobValueIndex# 7 Put id# [14:1:110:0:0:40960:1] totalSize# 685959300 blobValueIndex# 21 Put id# [62:1:108:0:0:589824:1] totalSize# 686000260 blobValueIndex# 36 Put id# [45:1:96:0:0:1572864:1] totalSize# 686590084 blobValueIndex# 53 Put id# [93:1:117:0:0:1572864:1] totalSize# 688162948 blobValueIndex# 50 Trim Put id# [56:1:98:0:0:589824:1] totalSize# 689735812 blobValueIndex# 38 Trim Put id# [76:1:86:0:0:1048576:1] totalSize# 690325636 blobValueIndex# 41 Put id# [37:1:95:0:0:10:1] totalSize# 691374212 blobValueIndex# 5 Put id# [45:1:97:0:0:1024:1] totalSize# 691374222 blobValueIndex# 16 Put id# [50:1:113:0:0:10:1] totalSize# 691375246 blobValueIndex# 7 Trim Put id# [58:1:114:0:0:1024:1] totalSize# 691375256 blobValueIndex# 11 Put id# [96:1:103:0:0:1572864:1] totalSize# 691376280 blobValueIndex# 59 Put id# [8:1:109:0:0:589824:1] totalSize# 692949144 blobValueIndex# 38 Trim Put id# [17:1:101:0:0:10:1] totalSize# 693538968 blobValueIndex# 5 Put id# [97:1:105:0:0:1572864:1] totalSize# 693538978 blobValueIndex# 50 Put id# [80:1:89:0:0:10:1] totalSize# 695111842 blobValueIndex# 4 Restart Put id# [93:1:118:0:0:1048576:1] totalSize# 695111852 blobValueIndex# 45 Change MinHugeBlobSize# 524288 Put id# [20:1:100:0:0:1048576:1] totalSize# 696160428 blobValueIndex# 43 Put id# [43:1:120:0:0:589824:1] totalSize# 697209004 blobValueIndex# 39 Put id# [4:1:126:0:0:589824:1] totalSize# 697798828 blobValueIndex# 30 Change MinHugeBlobSize# 65536 Put id# [46:1:124:0:0:1048576:1] totalSize# 698388652 blobValueIndex# 40 Put id# [25:1:127:0:0:40960:1] totalSize# 699437228 blobValueIndex# 25 Put id# [97:1:106:0:0:40960:1] totalSize# 699478188 blobValueIndex# 20 Put id# [99:1:114:0:0:40960:1] totalSize# 699519148 blobValueIndex# 23 Change MinHugeBlobSize# 8192 Trim Put id# [20:1:101:0:0:40960:1] totalSize# 699560108 blobValueIndex# 28 Change MinHugeBlobSize# 524288 Trim Put id# [54:1:94:0:0:1024:1] totalSize# 699601068 blobValueIndex# 10 Change MinHugeBlobSize# 65536 Put id# [86:1:108:0:0:1024:1] totalSize# 699602092 blobValueIndex# 18 Change MinHugeBlobSize# 8192 Put id# [80:1:90:0:0:1048576:1] totalSize# 699603116 blobValueIndex# 40 Put id# [92:1:120:0:0:589824:1] totalSize# 700651692 blobValueIndex# 34 Trim Restart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18126, MsgBus: 29751 2024-11-21T08:56:42.954679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653884309780363:2204];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:42.954695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033f3/r3tmp/tmp70L6VP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18126, node 1 2024-11-21T08:56:43.014043Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:43.020346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:43.020364Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:43.020366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:43.020407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29751 2024-11-21T08:56:43.054399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:43.054427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:29751 2024-11-21T08:56:43.055521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:43.069189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.077603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.138173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.153316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.166248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.246494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653888604749040:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.246522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.271161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.276992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.284288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.338607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.393202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.402935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.411219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653888604749559:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.411238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.411239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653888604749564:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.411735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:43.415859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653888604749566:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TTxAllocatorClientTest::Boot [GOOD] >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15489, MsgBus: 24234 2024-11-21T08:56:43.242956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653888315751394:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:43.243078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033f1/r3tmp/tmpx8NKht/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15489, node 1 2024-11-21T08:56:43.294438Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:43.297555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:43.297566Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:43.297568Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:43.297590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24234 TClient is connected to server localhost:24234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:43.343863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:43.343891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:43.345050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:43.373444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.381891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.442471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.458863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.467564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:43.499358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653888315752936:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.499394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.529121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.535340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.543270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.550137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.557098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.564358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:43.573517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653888315753429:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.573542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.573548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653888315753434:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:43.574107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:43.577113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653888315753436:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2024-11-21T08:56:43.911994Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:56:43.918470Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:56:43.926551Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:56:43.974484Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:43.999117Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:56:44.031466Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031486Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031492Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031500Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:56:44.031521Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031532Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:56:44.031543Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> KqpQueryPerf::IndexInsert-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2024-11-21T08:56:43.911910Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:56:43.918445Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:56:43.926550Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:56:43.974484Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:43.999116Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:56:44.031175Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031202Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031210Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031220Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:56:44.031251Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031262Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:56:44.031275Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:56:44.042051Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T08:56:44.042236Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.042246Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.042263Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2024-11-21T08:56:44.042268Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 2024-11-21T08:56:44.042302Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T08:56:44.042338Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T08:56:44.042354Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T08:56:44.042369Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T08:56:44.042383Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T08:56:44.042444Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.042449Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.042457Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2024-11-21T08:56:44.042459Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 5000 to# 10000 2024-11-21T08:56:44.042471Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T08:56:44.042483Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T08:56:44.042515Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T08:56:44.042570Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T08:56:44.042584Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T08:56:44.042613Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.042621Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.042629Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2024-11-21T08:56:44.042633Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 10000 to# 15000 2024-11-21T08:56:44.042650Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 >> KqpQueryPerf::IndexDeleteOn-QueryService >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> KqpQueryPerf::Upsert+QueryService >> KqpQueryPerf::MultiRead-QueryService >> KqpQueryPerf::MultiDeleteFromTable-QueryService >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService >> KqpWorkload::KV >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> KqpQueryPerf::IndexReplace+QueryService >> KqpQueryPerf::IdxLookupJoin+QueryService >> KqpQueryPerf::IndexInsert-QueryService [GOOD] >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService [GOOD] >> KqpQueryPerf::Upsert+QueryService [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService [GOOD] >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 10303, MsgBus: 16816 2024-11-21T08:56:44.456977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653892968198205:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:44.457148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033ed/r3tmp/tmpSxt2F4/pdisk_1.dat 2024-11-21T08:56:44.504576Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10303, node 1 2024-11-21T08:56:44.518431Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:44.518444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:44.518445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:44.518491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16816 TClient is connected to server localhost:16816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:44.557812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:44.557836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:44.558966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:44.586445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.597745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.659694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.674930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.684573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.813145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653892968199748:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:44.813203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:44.817834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:44.872979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:44.933312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:44.944408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.005709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.013770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.021758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897263167578:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.021792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.021796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897263167583:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.022521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.026024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653897263167585:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:45.181439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.187909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.194652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4667, MsgBus: 22306 2024-11-21T08:56:44.901522Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653893970586057:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:44.901669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033e2/r3tmp/tmpS2BIRm/pdisk_1.dat 2024-11-21T08:56:44.955549Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4667, node 1 2024-11-21T08:56:44.968436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:44.968447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:44.968449Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:44.968478Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22306 TClient is connected to server localhost:22306 2024-11-21T08:56:45.004808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:45.004836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:56:45.005875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:45.032725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.046500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.059392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.072266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.083964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.156580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653898265554905:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.156599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.175308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.180120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.188016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.194913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.201807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.209424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.217736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653898265555397:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.217766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653898265555402:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.217769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.218402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.221977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653898265555404:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25529, MsgBus: 9978 2024-11-21T08:56:44.855645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653891795428464:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:44.855869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033e7/r3tmp/tmpc6qoZV/pdisk_1.dat 2024-11-21T08:56:44.914087Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25529, node 1 2024-11-21T08:56:44.932229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:44.932240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:44.932242Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:44.932274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9978 2024-11-21T08:56:44.957851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:44.957895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:44.958973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:44.988657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.992500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.052692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.069245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.076698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.124783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653896090397315:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.124810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.148689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.154189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.159597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.167483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.173929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.181095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.189604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653896090397807:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.189629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.189629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653896090397812:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.190132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.194023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653896090397814:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 32147, MsgBus: 29131 2024-11-21T08:56:44.753190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653891368991577:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:44.753326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033e4/r3tmp/tmpuYbZAJ/pdisk_1.dat 2024-11-21T08:56:44.797870Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32147, node 1 2024-11-21T08:56:44.806272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:44.806297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:44.806299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:44.806331Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29131 TClient is connected to server localhost:29131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:44.854613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:44.854639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:44.855719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:44.875435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.882491Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.897462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:44.917326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:56:44.945374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:56:44.958521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.018039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653895663960421:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.018070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.049145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.054712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.061767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.068834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.075451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.130225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.140909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653895663960927:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.140930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653895663960932:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.140932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.141444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.145096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653895663960934:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 14120, MsgBus: 25843 2024-11-21T08:56:44.666495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653893887645848:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:44.666514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033ea/r3tmp/tmpxDTXoj/pdisk_1.dat 2024-11-21T08:56:44.706685Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14120, node 1 2024-11-21T08:56:44.720061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:44.720078Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:44.720080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:44.720115Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25843 TClient is connected to server localhost:25843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:44.767326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:44.767357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:44.768476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:44.790258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.799659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.859730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.873811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.884861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:44.933369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653893887647392:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:44.933402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:44.963209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:44.969693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:44.978748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.033838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.041046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.047890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.056371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653898182615194:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.056396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.056405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653898182615199:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.056854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.061316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653898182615201:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:45.271643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.278388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.286510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryPerf::Replace-QueryService >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IndexReplace+QueryService [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18039, MsgBus: 29438 2024-11-21T08:56:45.282595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653897853094616:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:45.282613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033d1/r3tmp/tmp9emYyq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18039, node 1 2024-11-21T08:56:45.338976Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:45.339444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:45.339452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:45.339453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:45.339483Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29438 TClient is connected to server localhost:29438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:45.383735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:45.383764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:45.384816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:45.412942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.423562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.484687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.502912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.511761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.539447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897853096168:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.539470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.570056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.624647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.636310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.643052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.649924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.657259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.665681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897853096663:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.665696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897853096668:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.665716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.666285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.670052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653897853096670:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 30086, MsgBus: 62476 2024-11-21T08:56:45.499614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653896798562554:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:45.499630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033c3/r3tmp/tmpPaeVYo/pdisk_1.dat 2024-11-21T08:56:45.541272Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30086, node 1 2024-11-21T08:56:45.554010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:45.554035Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:45.554037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:45.554068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62476 TClient is connected to server localhost:62476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:45.600329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:45.600351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:45.601421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:45.625821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.637065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:56:45.650284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.707116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.714224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.755206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653896798564106:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.755233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.776789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.781193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.835561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.889636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.894672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.901766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.910065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653896798564625:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.910090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653896798564630:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.910090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.910574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.915302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653896798564632:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9015, MsgBus: 18941 2024-11-21T08:56:45.154797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653897008153467:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:45.155062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033d5/r3tmp/tmpshRfsk/pdisk_1.dat 2024-11-21T08:56:45.197469Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9015, node 1 2024-11-21T08:56:45.209901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:45.209915Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:45.209916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:45.209953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18941 TClient is connected to server localhost:18941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:45.255681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:45.255712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:45.256824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:45.279439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.288378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.349224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.364649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.373937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.458640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897008155011:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.458665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.481454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.487363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.496413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.551250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.558945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.566184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.622833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897008155531:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.622856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897008155536:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.622863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.623474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.628323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653897008155538:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:45.776001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.782058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.790166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 63994, MsgBus: 22472 2024-11-21T08:56:45.418077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653899317850010:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:45.418342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033c5/r3tmp/tmpJBT7bi/pdisk_1.dat 2024-11-21T08:56:45.464168Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63994, node 1 2024-11-21T08:56:45.473164Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:45.473179Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:45.473181Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:45.473214Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22472 TClient is connected to server localhost:22472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:45.519732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:45.519761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:45.520867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:45.542231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.545731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.605501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.618522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.630596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.674614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653899317851553:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.674639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.699063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.704329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.712877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.719812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.726788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.733959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.741607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653899317852044:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.741626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653899317852049:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.741632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.742083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.747171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653899317852051:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:45.942112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.947828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.958165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency |90.4%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::Replace-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> KqpQueryPerf::RangeRead-QueryService >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpQueryPerf::MultiRead+QueryService >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> RetryPolicy::RetryWithBatching [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 17448, MsgBus: 5661 2024-11-21T08:56:46.111793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653901775457844:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:46.111949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033bc/r3tmp/tmpG92V8z/pdisk_1.dat 2024-11-21T08:56:46.162226Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17448, node 1 2024-11-21T08:56:46.167970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:46.167980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:46.167982Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:46.168006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5661 TClient is connected to server localhost:5661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:46.210647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:46.212876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:46.212900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:46.214066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:46.219799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:46.279499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:46.293108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:46.301306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:46.359838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653901775459395:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:46.359860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:46.389446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:46.396075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:46.405856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:46.413088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:46.419480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:46.426764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:46.435573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653901775459886:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:46.435596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:46.435597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653901775459891:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:46.436113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:46.439952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653901775459893:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> KqpQueryPerf::UpdateOn+QueryService >> KqpQueryPerf::MultiDeleteFromTable+QueryService >> KqpQueryPerf::DeleteOn+QueryService >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::Delete-QueryService >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> KqpQueryPerf::RangeRead-QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2024-11-21T08:54:08.659884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:08.659921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:08.659929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00284e/r3tmp/tmpxVZVjy/pdisk_1.dat 2024-11-21T08:54:08.730215Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4422, node 1 2024-11-21T08:54:08.826450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:08.826473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:08.826477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:08.826579Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:08.833017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:08.909270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:08.909304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:08.920890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20185 2024-11-21T08:54:09.322065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.075101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.075127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.107242Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.107889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.153351Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.160933Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.160951Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.164604Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.164632Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.164645Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.164648Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.164652Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.164656Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.164659Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.164663Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.164724Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:10.336112Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.336138Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2551], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:10.337084Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1766:2558] 2024-11-21T08:54:10.338570Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1814:2581] 2024-11-21T08:54:10.338605Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1814:2581], schemeshard id = 72075186224037889 2024-11-21T08:54:10.339620Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:10.342348Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:10.342358Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:10.342365Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:10.342489Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.342502Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.344384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:10.345496Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:10.345516Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:10.348023Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:10.359449Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.391289Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:10.478471Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:10.654618Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:11.196024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2144:3022], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:11.196053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:11.198610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:11.230050Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:11.230091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:11.230113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:11.230126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:11.230140Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:11.230153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:11.230166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:11.230179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:11.230192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:11.230203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:11.230216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:11.230228Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2293:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:11.235234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2297:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:11.235260Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2297:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:11.235285Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2297:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:11.235298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2297:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:11.235310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2297:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:11.235323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2297:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... ode 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:43.818511Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8211:6204], schemeshard count = 1 2024-11-21T08:56:45.594755Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:45.594778Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:45.594788Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:45.594794Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:45.596172Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:45.608006Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:45.608149Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:45.608166Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:45.608458Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 2 2024-11-21T08:56:45.608466Z node 2 :STATISTICS WARN: [72075186224037897] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2024-11-21T08:56:45.608472Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:46.576945Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:46.588503Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:46.588590Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:46.588823Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8379:6298], server id = [2:8384:6303], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:46.588968Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8379:6298], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.589014Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8380:6299], server id = [2:8385:6304], tablet id = 72075186224037900, status = OK 2024-11-21T08:56:46.589020Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8380:6299], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.589275Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:46.589367Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8381:6300], server id = [2:8387:6306], tablet id = 72075186224037901, status = OK 2024-11-21T08:56:46.589375Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8381:6300], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.589393Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8382:6301], server id = [2:8386:6305], tablet id = 72075186224037902, status = OK 2024-11-21T08:56:46.589397Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8382:6301], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.589421Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T08:56:46.589578Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8379:6298], server id = [2:8384:6303], tablet id = 72075186224037899 2024-11-21T08:56:46.589583Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.589604Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8383:6302], server id = [2:8388:6307], tablet id = 72075186224037903, status = OK 2024-11-21T08:56:46.589610Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8383:6302], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.589627Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T08:56:46.589716Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T08:56:46.589744Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8380:6299], server id = [2:8385:6304], tablet id = 72075186224037900 2024-11-21T08:56:46.589747Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.589769Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8389:6308], server id = [2:8391:6310], tablet id = 72075186224037904, status = OK 2024-11-21T08:56:46.589775Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8389:6308], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.589795Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T08:56:46.589857Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8382:6301], server id = [2:8386:6305], tablet id = 72075186224037902 2024-11-21T08:56:46.589859Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.589884Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8390:6309], server id = [2:8392:6311], tablet id = 72075186224037905, status = OK 2024-11-21T08:56:46.589889Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8390:6309], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.589921Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8381:6300], server id = [2:8387:6306], tablet id = 72075186224037901 2024-11-21T08:56:46.589926Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.589978Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T08:56:46.589995Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8383:6302], server id = [2:8388:6307], tablet id = 72075186224037903 2024-11-21T08:56:46.589997Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.590017Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8393:6312], server id = [2:8396:6315], tablet id = 72075186224037906, status = OK 2024-11-21T08:56:46.590022Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8393:6312], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.590034Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8394:6313], server id = [2:8395:6314], tablet id = 72075186224037907, status = OK 2024-11-21T08:56:46.590038Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8394:6313], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.590080Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T08:56:46.590104Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8397:6316], server id = [2:8398:6317], tablet id = 72075186224037908, status = OK 2024-11-21T08:56:46.590108Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8397:6316], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.590165Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8389:6308], server id = [2:8391:6310], tablet id = 72075186224037904 2024-11-21T08:56:46.590168Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.590174Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T08:56:46.590230Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8390:6309], server id = [2:8392:6311], tablet id = 72075186224037905 2024-11-21T08:56:46.590233Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.590251Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T08:56:46.590263Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T08:56:46.590267Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:46.590291Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:46.590318Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:46.590391Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:46.590419Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8393:6312], server id = [2:8396:6315], tablet id = 72075186224037906 2024-11-21T08:56:46.590422Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.590895Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8394:6313], server id = [2:8395:6314], tablet id = 72075186224037907 2024-11-21T08:56:46.590903Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.590975Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:46.591037Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8397:6316], server id = [2:8398:6317], tablet id = 72075186224037908 2024-11-21T08:56:46.591041Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.594284Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8415:6334]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:46.594329Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:46.594334Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8415:6334], StatRequests.size() = 1 2024-11-21T08:56:46.617304Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTIzOTU5NDgtNDNjZjY5YzctOGZiNjcwODUtY2RhOGE1N2U=, TxId: 2024-11-21T08:56:46.617333Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTIzOTU5NDgtNDNjZjY5YzctOGZiNjcwODUtY2RhOGE1N2U=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T08:56:46.617508Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8423:6340]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:46.617590Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:46.618026Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:46.618036Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:46.618642Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T08:56:46.618652Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T08:56:46.618660Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:46.620138Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 23352, MsgBus: 13746 2024-11-21T08:56:46.823040Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653901629048434:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:46.823181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033b5/r3tmp/tmpSYk4kI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23352, node 1 2024-11-21T08:56:46.878399Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:46.878524Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:46.878534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:46.878536Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:46.878566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13746 TClient is connected to server localhost:13746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:46.918023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:46.924091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:46.924118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:46.925230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:46.929655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:46.990004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.008378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.016832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.058979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905924017273:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.059001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.093534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.098928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.105741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.112919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.119772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.126668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.135174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905924017764:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.135200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905924017769:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.135200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.135683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:47.140152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653905924017771:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 26376, MsgBus: 61294 2024-11-21T08:56:46.922515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653900494559933:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:46.922531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033ad/r3tmp/tmpkjnSJF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26376, node 1 2024-11-21T08:56:46.970669Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:46.973082Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:46.973093Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:46.973095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:46.973129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61294 TClient is connected to server localhost:61294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:47.023069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:47.023092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:47.024248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:47.046542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.049982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.062293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.079966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.089708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.155736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653904789528775:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.155766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.176990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.232093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.239143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.245470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.252990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.260148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.267819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653904789529281:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.267839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653904789529286:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.267841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.268356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:47.273180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653904789529288:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQueryPerf::UpdateOn+QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19999, MsgBus: 63720 2024-11-21T08:56:46.870543Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653903205100642:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:46.870599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00339d/r3tmp/tmpMCsBsg/pdisk_1.dat 2024-11-21T08:56:46.918452Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19999, node 1 2024-11-21T08:56:46.928037Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:46.928056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:46.928058Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:46.928095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63720 TClient is connected to server localhost:63720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:46.970257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:56:46.971249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:46.971268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:56:46.972289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:46.982369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.042966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.060579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.071849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.140078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907500069482:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.140100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.162789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.217802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.272331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.281129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.336312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.343857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.352826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907500070001:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.352851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.352861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907500070006:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.353358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:47.357101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653907500070008:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 10156, MsgBus: 27404 2024-11-21T08:56:46.965489Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653900510399820:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:46.965509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00337e/r3tmp/tmphFSYYs/pdisk_1.dat 2024-11-21T08:56:47.011416Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10156, node 1 2024-11-21T08:56:47.022272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:47.022288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:47.022290Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:47.022321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27404 TClient is connected to server localhost:27404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:47.066827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:47.066847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:47.067905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:47.068873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.079851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.139453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.152998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.162886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.267601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653904805368662:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.267643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.271996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.326752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.336908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.343401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.351202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.357977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.365860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653904805369179:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.365888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.365890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653904805369184:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.366385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:47.371070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653904805369186:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3902, MsgBus: 6349 2024-11-21T08:56:46.952682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653903134283003:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:46.952703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003387/r3tmp/tmpaR2VOy/pdisk_1.dat 2024-11-21T08:56:46.998836Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3902, node 1 2024-11-21T08:56:47.010138Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:47.010154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:47.010156Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:47.010192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6349 TClient is connected to server localhost:6349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:47.053768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:47.053800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:47.054897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:47.055122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.067732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.127734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.144125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.156139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.203191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907429251848:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.203216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.236631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.241943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.252872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.307677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.316227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.322896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.331271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907429252354:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.331297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.331299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907429252359:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.331816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:47.336037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653907429252361:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpQueryPerf::DeleteOn+QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2024-11-21T08:52:47.642328Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:47.642336Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:47.642340Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T08:52:47.642444Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T08:52:47.642458Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:47.642461Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:47.643106Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005997s 2024-11-21T08:52:47.643226Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T08:52:47.643238Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:47.643241Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:47.643256Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007317s 2024-11-21T08:52:47.643389Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T08:52:47.643404Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:47.643406Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T08:52:47.643426Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007687s 2024-11-21T08:52:47.686197Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1732179167686190 2024-11-21T08:52:47.831479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439652873167327763:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:47.831498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:52:47.834488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439652873429839863:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:52:47.834698Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00430d/r3tmp/tmpY2bIJb/pdisk_1.dat 2024-11-21T08:52:47.869157Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:47.872074Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:52:47.895120Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3117, node 1 2024-11-21T08:52:47.908019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00430d/r3tmp/yandex0aKjAD.tmp 2024-11-21T08:52:47.908033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00430d/r3tmp/yandex0aKjAD.tmp 2024-11-21T08:52:47.908095Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00430d/r3tmp/yandex0aKjAD.tmp 2024-11-21T08:52:47.908142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:52:47.913771Z INFO: TTestServer started on Port 6175 GrpcPort 3117 TClient is connected to server localhost:6175 PQClient connected to localhost:3117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:52:47.931945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:47.931971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:47.934020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:52:47.963881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:52:47.963963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:52:47.963983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:52:47.965575Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:52:47.965890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:52:48.140006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652877462295946:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:48.140025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652877462295973:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:48.140032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:48.140854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:52:48.143152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439652877462296004:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:48.143221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:52:48.144822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439652877462295975:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:52:48.167562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:52:48.172597Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439652877724807469:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:48.172701Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OThiYWFmYzEtN2QzMjFmOWMtOWZmM2I5Mi1mMzYzM2JlNw==, ActorId: [2:7439652877724807429:2277], ActorState: ExecuteState, TraceId: 01jd6ys2x7efhfp7fw574p1qkq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:48.173914Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:48.233827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:52:48.242211Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439652877462296206:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:52:48.242667Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzE0NWYyZjQtNjc5YTNmYjAtYmM1ZDMwODUtY2IxYmUwZDA=, ActorId: [1:7439652877462295943:2299], ActorState: ExecuteState, TraceId: 01jd6ys2wb4rtsd0snfvpaxnps, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:52:48.242950Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:52:48.302963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:3117", true, true, 1000); 2024-11-21T08:52:48.340472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd6ys31z8b5apm7hahr3fye2, Database: , Data ... T08:56:46.639330Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2024-11-21T08:56:46.639338Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2024-11-21T08:56:46.639343Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2024-11-21T08:56:46.639397Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1732179406638 2024-11-21T08:56:46.639433Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:56:46.640666Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 2024-11-21T08:56:46.640681Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640695Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T08:56:46.640700Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640702Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T08:56:46.640703Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640706Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T08:56:46.640707Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640710Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2024-11-21T08:56:46.640711Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640714Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2024-11-21T08:56:46.640716Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640718Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2024-11-21T08:56:46.640719Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640722Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2024-11-21T08:56:46.640724Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640727Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2024-11-21T08:56:46.640729Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640732Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2024-11-21T08:56:46.640734Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:56:46.640737Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2024-11-21T08:56:46.640748Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:56:46.640758Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:56:46.640761Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T08:56:46.640768Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:56:46.640795Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2024-11-21T08:56:46.640805Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:56:46.640841Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T08:56:46.640850Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:56:46.640863Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732179406638 queuesize 0 startOffset 0 2024-11-21T08:56:46.641047Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 1 } 2024-11-21T08:56:46.641059Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 1 2024-11-21T08:56:46.641063Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 2 2024-11-21T08:56:46.641066Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 3 2024-11-21T08:56:46.641068Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 4 2024-11-21T08:56:46.641070Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 5 2024-11-21T08:56:46.641072Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 6 2024-11-21T08:56:46.641080Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 7 2024-11-21T08:56:46.641082Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 8 2024-11-21T08:56:46.641084Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 9 2024-11-21T08:56:46.641086Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: acknoledged message 10 2024-11-21T08:56:46.641185Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: close. Timeout = 0 ms 2024-11-21T08:56:46.641196Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session will now close 2024-11-21T08:56:46.641202Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: aborting 2024-11-21T08:56:46.641465Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: gracefully shut down, all writes complete 2024-11-21T08:56:46.641495Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0] Write session: destroy 2024-11-21T08:56:46.641640Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0 grpc read done: success: 0 data: 2024-11-21T08:56:46.641652Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0 grpc read failed 2024-11-21T08:56:46.641658Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0 grpc closed 2024-11-21T08:56:46.641663Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|2310c1da-e5380018-cf02d03c-664c5fee_0 is DEAD 2024-11-21T08:56:46.641962Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:56:46.642006Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:46.642023Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7439653902516439175:2643] destroyed 2024-11-21T08:56:46.642039Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> KqpQueryPerf::MultiDeleteFromTable+QueryService [GOOD] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 10353, MsgBus: 26966 2024-11-21T08:56:47.470238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653905701623563:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:47.470263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00336a/r3tmp/tmpjYEfZK/pdisk_1.dat 2024-11-21T08:56:47.518936Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10353, node 1 2024-11-21T08:56:47.528375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:47.528402Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:47.528404Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:47.528434Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26966 TClient is connected to server localhost:26966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:47.570661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:47.570680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:47.570754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.571821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:47.581882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.643549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.657441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.666886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.728562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905701625109:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.728586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.754418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.758951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.812845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.819584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.826870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.833794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.842242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905701625616:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.842263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905701625621:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.842270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.842843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:47.847407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653905701625623:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpQueryPerf::Delete-QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9233, MsgBus: 10308 2024-11-21T08:56:47.449095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653907774553365:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:47.449225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00337c/r3tmp/tmpVpxPqK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9233, node 1 2024-11-21T08:56:47.496815Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:47.501196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:47.501213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:47.501215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:47.501257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10308 TClient is connected to server localhost:10308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:47.550120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:47.550146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:47.551184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:47.573028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.583359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.596737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.612739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.624660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.685143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907774554918:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.685173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.704284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.758822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.763600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.770976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.825186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.834463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.842377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907774555437:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.842395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.842403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653907774555442:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.842870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:47.847087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653907774555444:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant >> KqpQueryPerf::Delete+QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 7969, MsgBus: 63002 2024-11-21T08:56:47.462355Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653906885332812:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:47.462373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003377/r3tmp/tmpXyCtxj/pdisk_1.dat 2024-11-21T08:56:47.506339Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7969, node 1 2024-11-21T08:56:47.518258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:47.518270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:47.518272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:47.518306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63002 TClient is connected to server localhost:63002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:47.559045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.561496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.563552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:47.563571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:47.564744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:47.620493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.634764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.645654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.712131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653906885334358:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.712163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.734369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.739583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.793681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.806097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.860919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.869036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.877310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653906885334876:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.877333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.877358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653906885334881:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.878142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:47.882469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653906885334883:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 11315, MsgBus: 13869 2024-11-21T08:56:47.547914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653905171273255:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:47.548053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003368/r3tmp/tmpDwyGXi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11315, node 1 2024-11-21T08:56:47.601610Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:47.601836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:47.601845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:47.601847Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:47.601885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13869 TClient is connected to server localhost:13869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:47.637116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.648784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:47.648816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:47.649749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:47.650046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.710275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.728121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.737277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:47.779961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905171274804:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.779985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.807794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.813521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.868109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.922135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.976634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.987936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:47.995845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905171275324:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.995863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.995863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653905171275329:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:47.996381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:48.000898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653905171275331:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> KqpQueryPerf::Insert+QueryService >> KqpWorkload::STOCK [GOOD] >> TBackupCollectionWithRebootsTests::CreateDroppedWithReboots [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> KqpQueryPerf::IndexUpsert-QueryService >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> KqpQueryPerf::IdxLookupJoin-QueryService >> KqpQueryPerf::UpdateOn-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 5487, MsgBus: 4373 2024-11-21T08:56:41.170429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653881003077427:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:41.170557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033f9/r3tmp/tmp1NmlhS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5487, node 1 2024-11-21T08:56:41.224024Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:41.224180Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:41.224188Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:41.224193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:41.224235Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4373 TClient is connected to server localhost:4373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:41.271317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:41.271341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:41.272414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:41.293288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:41.391603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653881003078037:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.391626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.416065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.473643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.525606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:56:41.562867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653881003081809:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.562909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.562926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653881003081814:2602], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:41.563613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:56:41.565075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653881003081816:2603], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } took: 0.132762s took: 0.134204s took: 0.134609s took: 0.135325s took: 0.135411s took: 0.135461s took: 0.135900s took: 0.135866s took: 0.136549s took: 0.136963s 2024-11-21T08:56:46.170519Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653881003077427:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:46.170552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:56:46.780427Z node 1 :TX_DATASHARD ERROR: Complete [1732179406826 : 281474976716422] from 72075186224037929 at tablet 72075186224037929, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:46.780843Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTQ4YzlkZDEtOGQ5ZGYyZTYtYjJiYzIwYTEtODZjODJjNzM=, ActorId: [1:7439653902477928052:4877], ActorState: ExecuteState, TraceId: 01jd6z0bk799twawkm3t5fwdpd, Create QueryResponse for error on request, msg: 2024-11-21T08:56:46.781200Z node 1 :TX_DATASHARD ERROR: Complete [1732179406826 : 281474976716422] from 72075186224037898 at tablet 72075186224037898, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | took: 1.178730s took: 1.179224s took: 1.179898s took: 1.180042s took: 1.180446s took: 1.180565s took: 1.181200s took: 1.182273s took: 1.182398s took: 1.182769s 2024-11-21T08:56:47.630494Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmUyNDY5MjMtZTJjODUxMzktMjExNjYzYTAtYmYyMzMzMDY=, ActorId: [1:7439653906772900937:6173], ActorState: ExecuteState, TraceId: 01jd6z0cc5a6yv7rnbq04y1sds, Create QueryResponse for error on request, msg: 2024-11-21T08:56:47.630735Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODA0ZmUxYTUtODJjZmEwZC05NDhjMTZkLTcyMmZkM2Ew, ActorId: [1:7439653906772900943:6179], ActorState: ExecuteState, TraceId: 01jd6z0cc56etdq02zpbtzfbbz, Create QueryResponse for error on request, msg: 2024-11-21T08:56:47.630905Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODE3MzgzNDktNTJkNTQyNC0zNTkxMDY4Yi1kZTVjY2QzMA==, ActorId: [1:7439653906772900936:6172], ActorState: ExecuteState, TraceId: 01jd6z0cc5fwayw2vww7p869dy, Create QueryResponse for error on request, msg: 2024-11-21T08:56:47.631008Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTRkNmVkZTYtYzNiMjFiMTMtZmI0YmNkYjgtZjNlNWM0NzY=, ActorId: [1:7439653906772900939:6175], ActorState: ExecuteState, TraceId: 01jd6z0cc302ngx7jexw89akjb, Create QueryResponse for error on request, msg: 2024-11-21T08:56:47.631292Z node 1 :TX_DATASHARD ERROR: Complete [1732179407674 : 281474976716660] from 72075186224037910 at tablet 72075186224037910, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.631610Z node 1 :TX_DATASHARD ERROR: Complete [1732179407672 : 281474976716661] from 72075186224037929 at tablet 72075186224037929, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.631635Z node 1 :TX_DATASHARD ERROR: Complete [1732179407673 : 281474976716659] from 72075186224037929 at tablet 72075186224037929, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.631645Z node 1 :TX_DATASHARD ERROR: Complete [1732179407674 : 281474976716660] from 72075186224037929 at tablet 72075186224037929, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.631649Z node 1 :TX_DATASHARD ERROR: Complete [1732179407675 : 281474976716662] from 72075186224037929 at tablet 72075186224037929, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.631654Z node 1 :TX_DATASHARD ERROR: Complete [1732179407676 : 281474976716658] from 72075186224037929 at tablet 72075186224037929, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.631802Z node 1 :TX_DATASHARD ERROR: Complete [1732179407675 : 281474976716662] from 72075186224037893 at tablet 72075186224037893, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.631850Z node 1 :TX_DATASHARD ERROR: Complete [1732179407673 : 281474976716659] from 72075186224037923 at tablet 72075186224037923, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.631983Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM5ZjU0YmQtYjQ3MGMwNTYtZDk4NzJkNTItNWU5YzkzYjE=, ActorId: [1:7439653906772900941:6177], ActorState: ExecuteState, TraceId: 01jd6z0cc1ent3hpsyycfz404w, Create QueryResponse for error on request, msg: 2024-11-21T08:56:47.632357Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjkzMWUwOWItNTRjMDI3NmYtOTY1YTQ1ZGEtNmJjYzNkNjM=, ActorId: [1:7439653906772900933:6169], ActorState: ExecuteState, TraceId: 01jd6z0cc5226d64sxw56gj4yd, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T08:56:47.633015Z node 1 :TX_DATASHARD ERROR: Complete [1732179407672 : 281474976716661] from 72075186224037896 at tablet 72075186224037896, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.633037Z node 1 :TX_DATASHARD ERROR: Complete [1732179407676 : 281474976716658] from 72075186224037896 at tablet 72075186224037896, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.633139Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjU2NDk2M2QtZWUwMzViYmUtNjc3N2ZmMjYtMjhiYmEwZGU=, ActorId: [1:7439653906772900940:6176], ActorState: ExecuteState, TraceId: 01jd6z0cc56czcp89hqsf93w6q, Create QueryResponse for error on request, msg: 2024-11-21T08:56:47.633656Z node 1 :TX_DATASHARD ERROR: Complete [1732179407680 : 281474976716663] from 72075186224037929 at tablet 72075186224037929, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T08:56:47.634364Z node 1 :TX_DATASHARD ERROR: Complete [1732179407680 : 281474976716663] from 72075186224037910 at tablet 72075186224037910, error: EXECUTION_CANCELLED (Distributed t ... 8:56:48.702033Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2024-11-21T08:56:48.702061Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2024-11-21T08:56:48.702066Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2024-11-21T08:56:48.702132Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2024-11-21T08:56:48.702137Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2024-11-21T08:56:48.702138Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2024-11-21T08:56:48.702571Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2024-11-21T08:56:48.702580Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2024-11-21T08:56:48.702808Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2024-11-21T08:56:48.702814Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2024-11-21T08:56:48.702978Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2024-11-21T08:56:48.702984Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2024-11-21T08:56:48.703240Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2024-11-21T08:56:48.703607Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2024-11-21T08:56:48.703816Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2024-11-21T08:56:48.704129Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2024-11-21T08:56:48.704515Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2024-11-21T08:56:48.704801Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T08:56:48.704808Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2024-11-21T08:56:48.704979Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2024-11-21T08:56:48.704999Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2024-11-21T08:56:48.705003Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2024-11-21T08:56:48.705005Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T08:56:48.705129Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2024-11-21T08:56:48.705376Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2024-11-21T08:56:48.705499Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T08:56:48.705805Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2024-11-21T08:56:48.706043Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2024-11-21T08:56:48.706152Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2024-11-21T08:56:48.706291Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2024-11-21T08:56:48.706373Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2024-11-21T08:56:48.706401Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2024-11-21T08:56:48.706432Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2024-11-21T08:56:48.706529Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2024-11-21T08:56:48.706697Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2024-11-21T08:56:48.706945Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2024-11-21T08:56:48.707085Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2024-11-21T08:56:48.707094Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2024-11-21T08:56:48.732689Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2024-11-21T08:56:48.732706Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2024-11-21T08:56:48.732708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2024-11-21T08:56:48.732710Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2024-11-21T08:56:48.732712Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2024-11-21T08:56:48.732714Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2024-11-21T08:56:48.732716Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2024-11-21T08:56:48.732717Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2024-11-21T08:56:48.732719Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2024-11-21T08:56:48.732721Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2024-11-21T08:56:48.732722Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2024-11-21T08:56:48.732724Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2024-11-21T08:56:48.732726Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2024-11-21T08:56:48.732727Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2024-11-21T08:56:48.732729Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2024-11-21T08:56:48.732731Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2024-11-21T08:56:48.732733Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2024-11-21T08:56:48.732735Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2024-11-21T08:56:48.732737Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2024-11-21T08:56:48.732739Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2024-11-21T08:56:48.732741Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2024-11-21T08:56:48.732743Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2024-11-21T08:56:48.732745Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2024-11-21T08:56:48.732747Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2024-11-21T08:56:48.732749Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2024-11-21T08:56:48.732751Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2024-11-21T08:56:48.733028Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2024-11-21T08:56:48.733042Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2024-11-21T08:56:48.733043Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2024-11-21T08:56:48.733044Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2024-11-21T08:56:48.733046Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2024-11-21T08:56:48.733047Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2024-11-21T08:56:48.733049Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2024-11-21T08:56:48.733050Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2024-11-21T08:56:48.733051Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2024-11-21T08:56:48.733052Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2024-11-21T08:56:48.733053Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2024-11-21T08:56:48.733083Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2024-11-21T08:56:48.733088Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2024-11-21T08:56:48.733089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 5803, MsgBus: 26580 2024-11-21T08:56:48.037960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653910108370864:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:48.038068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00335c/r3tmp/tmpxCXCjw/pdisk_1.dat 2024-11-21T08:56:48.084559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5803, node 1 2024-11-21T08:56:48.094749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:48.094763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:48.094765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:48.094798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26580 TClient is connected to server localhost:26580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:48.139022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:48.139050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:48.140094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:48.142282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.151735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.212058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.225432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.233872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.309815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653910108372428:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:48.309847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:48.332182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:48.386517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:48.393618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:48.400765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:48.407539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:48.414997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:48.423982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653910108372936:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:48.424018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:48.424081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653910108372941:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:48.424841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:48.428028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653910108372943:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpQueryPerf::IndexUpdateOn+QueryService >> KqpQueryPerf::Update+QueryService >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::CreateDroppedWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:56:44.920340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:44.920382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:44.920387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:44.920394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:44.928251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:44.928286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:44.928322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:44.928428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:44.977857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:44.977881Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:44.982327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:44.995330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:44.995401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:44.997489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:44.997556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:45.018269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:45.032959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:45.038303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:45.063289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:45.063313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:45.063358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:45.063367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:45.063371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:45.063388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:45.064940Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:56:45.103738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:45.110763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:45.110848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:45.110890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:45.110897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:45.111901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:45.111936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:45.111989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:45.111998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:45.112002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:45.112006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:45.112464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:45.112488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:45.112492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:45.112800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:45.112806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:45.112809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:45.112814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:45.113306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:45.113746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:45.133550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:45.133835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:45.133870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:45.133887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:45.133960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:45.133967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:45.133993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:45.134005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:45.134652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:45.134659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:45.134692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:45.134695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:56:45.134747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:45.134752Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:45.134761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:45.134764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:45.134768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:45.134772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:45.134775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:45.134777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:45.134785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:45.134789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:45.134791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:56:45.135066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:45.135076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:45.135079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:56:45.135082Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:56:45.135085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:45.135093Z node 1 ... 01:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T08:56:48.889063Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 6 2024-11-21T08:56:48.889087Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:56:48.889092Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1006:0, ProgressState 2024-11-21T08:56:48.889095Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2024-11-21T08:56:48.889107Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:48.889155Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 10 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.889160Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 10 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.889163Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:56:48.889165Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 10 2024-11-21T08:56:48.889167Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:48.889240Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.889246Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.889248Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:56:48.889250Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2024-11-21T08:56:48.889252Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:56:48.889258Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T08:56:48.889674Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T08:56:48.889690Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T08:56:48.889866Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:48.889880Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:48.889884Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1006:0, HandleReply TEvOperationPlan: step# 5000007 2024-11-21T08:56:48.889894Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:56:48.889903Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 1 -> 240 2024-11-21T08:56:48.889919Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:56:48.889924Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:56:48.889971Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.890005Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T08:56:48.890251Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:48.890256Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:56:48.890269Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T08:56:48.890280Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:48.890283Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T08:56:48.890286Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 6 2024-11-21T08:56:48.890313Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:56:48.890317Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T08:56:48.890324Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T08:56:48.890326Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:56:48.890329Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T08:56:48.890335Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:56:48.890337Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T08:56:48.890339Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T08:56:48.890346Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T08:56:48.890349Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T08:56:48.890351Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 4], 11 2024-11-21T08:56:48.890353Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2024-11-21T08:56:48.890388Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.890393Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.890396Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:56:48.890398Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T08:56:48.890400Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:56:48.890423Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:48.890426Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T08:56:48.890430Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:48.890445Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.890449Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.890451Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:56:48.890453Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T08:56:48.890455Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:56:48.890459Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T08:56:48.890932Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:56:48.890948Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:56:48.890956Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T08:56:48.890981Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T08:56:48.890984Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T08:56:48.891020Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:56:48.891030Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:56:48.891032Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [16:437:2429] TestWaitNotification: OK eventTxId 1006 >> KqpQueryPerf::Delete+QueryService [GOOD] >> TConsistentOpsWithReboots::DropIndexedTableWithReboots [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] >> TTxAllocatorClientTest::ZeroRange [GOOD] >> KqpQueryPerf::Insert-QueryService >> KqpQueryPerf::IndexUpsert+QueryService >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant >> KqpQueryPerf::Insert+QueryService [GOOD] >> KqpQueryPerf::UpdateOn-QueryService [GOOD] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25214, MsgBus: 15890 2024-11-21T08:56:48.733546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653911667698848:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:48.733576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003344/r3tmp/tmpwyIDpI/pdisk_1.dat 2024-11-21T08:56:48.774664Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25214, node 1 2024-11-21T08:56:48.784309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:48.784326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:48.784327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:48.784363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15890 TClient is connected to server localhost:15890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:48.833824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:48.833858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:48.834976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:48.858391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.870958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.932566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.948085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:48.958790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.025821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915962667684:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.025854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.056866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.063133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.073429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.127885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.135439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.142823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.151413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915962668200:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.151430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915962668205:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.151435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.151897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.155798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653915962668207:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2024-11-21T08:56:43.911988Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T08:56:43.918470Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T08:56:43.926551Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T08:56:43.974495Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:43.999116Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T08:56:44.031197Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031232Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031239Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031248Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T08:56:44.031272Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.031284Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T08:56:44.031295Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T08:56:44.042051Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T08:56:44.042221Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.042228Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T08:56:44.042245Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2024-11-21T08:56:44.042249Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20980, MsgBus: 14629 2024-11-21T08:56:48.935460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653911677303052:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:48.935474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00333e/r3tmp/tmpKHnqt9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20980, node 1 2024-11-21T08:56:48.989571Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:48.991841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:48.991849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:48.991851Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:48.991882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14629 TClient is connected to server localhost:14629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.036292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.036314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.037398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.067405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.075521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.089179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.106681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.115523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.194501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915972271892:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.194536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.216854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.222800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.234338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.289537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.296917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.304394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.313065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915972272410:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.313095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.313188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915972272415:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.313798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.317120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653915972272417:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> KqpQueryPerf::Update+QueryService [GOOD] |90.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService >> KqpQueryPerf::IndexInsert+QueryService >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::Upsert-QueryService >> KqpQueryPerf::IndexUpdateOn+QueryService [GOOD] >> KqpQueryPerf::Insert-QueryService [GOOD] >> KqpQueryPerf::Update-QueryService >> KqpQueryPerf::IndexUpsert+QueryService [GOOD] >> KqpQueryPerf::DeleteOn-QueryService >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 8052, MsgBus: 25252 2024-11-21T08:56:49.224295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653915762286395:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.224312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032fe/r3tmp/tmpimL6IA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8052, node 1 2024-11-21T08:56:49.281683Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:49.284256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.284268Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.284270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.284298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25252 TClient is connected to server localhost:25252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:49.324938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.324961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.326058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.351445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.360007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.423077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.439577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.451810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.472226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915762287944:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.472256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.495351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.501255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.507098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.514064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.521356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.528322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.535567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915762288435:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.535590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.535601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653915762288440:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.536161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.541287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653915762288442:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 26909, MsgBus: 14894 2024-11-21T08:56:49.079893Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653912926858616:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.080180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00333d/r3tmp/tmp5v9nl3/pdisk_1.dat 2024-11-21T08:56:49.120765Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26909, node 1 2024-11-21T08:56:49.131625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.131638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.131639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.131667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14894 TClient is connected to server localhost:14894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.181221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.181246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.182283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.204113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.214911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.226495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.239597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.249255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.312526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653912926860163:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.312561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.335788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.390752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.445163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.450693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.458028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.465745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.473150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653912926860681:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.473178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.473181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653912926860686:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.473678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.478352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653912926860688:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |90.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 2090, MsgBus: 17570 2024-11-21T08:56:49.108452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653916044312988:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.108585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003325/r3tmp/tmpt7mrtp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2090, node 1 2024-11-21T08:56:49.164965Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:49.167839Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.167856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.167857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.167897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17570 TClient is connected to server localhost:17570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:56:49.209782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.209814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.210879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.212198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.223631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.284008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.299238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.312486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.371224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653916044314539:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.371249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.404405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.409916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.415804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.422434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.477494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.486203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.494555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653916044315046:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.494577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653916044315051:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.494589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.495127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.499352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653916044315053:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:56:49.650913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.656668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.668177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 17025, MsgBus: 19523 2024-11-21T08:56:49.440603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653912733262467:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.440633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032e6/r3tmp/tmpjM6NpO/pdisk_1.dat 2024-11-21T08:56:49.489343Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17025, node 1 2024-11-21T08:56:49.495923Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.495941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.495943Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.495977Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19523 TClient is connected to server localhost:19523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.538317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.541059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.541077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.541089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:56:49.542208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.599402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.612659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.621503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.679043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653912733264001:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.679072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.697784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.752570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.758880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.765770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.773141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.780673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.788789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653912733264507:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.788815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.788825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653912733264512:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.789470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.793605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653912733264514:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18585, MsgBus: 15036 2024-11-21T08:56:49.193919Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653913711930629:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.193937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032fd/r3tmp/tmp52aTF2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18585, node 1 2024-11-21T08:56:49.249054Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:49.251955Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.251967Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.251968Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.251998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15036 TClient is connected to server localhost:15036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.294591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.294625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.295702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.325283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.335999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.395249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.408182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.416338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.461393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653913711932170:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.461417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.484283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.538576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.549169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.603411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.657558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.667860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.675746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653913711932689:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.675764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.675772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653913711932694:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.676334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.681255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653913711932696:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 30103, MsgBus: 16383 2024-11-21T08:56:49.777300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653916133603437:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.777529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032c7/r3tmp/tmpM48K9R/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30103, node 1 2024-11-21T08:56:49.833412Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:49.833560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.833568Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.833570Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.833603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16383 TClient is connected to server localhost:16383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.878379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.878402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.879536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.901818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.908376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.968430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.985707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.040226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.059240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653920428572276:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.059269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.078548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.133357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.143938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.150540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.157751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.165009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.173183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653920428572778:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.173205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.173208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653920428572783:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.173706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:50.178031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653920428572785:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 62761, MsgBus: 62469 2024-11-21T08:56:49.752764Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653914091550726:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.752782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032c4/r3tmp/tmpoNu6Hf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62761, node 1 2024-11-21T08:56:49.807309Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:49.809116Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.809126Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.809128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.809155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62469 TClient is connected to server localhost:62469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.854185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.854210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.855262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.884227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.885718Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:49.893009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.952268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.965444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.976532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.999922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653914091552281:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.999946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.024519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.030325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.084682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.094580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.149122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.157825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.166679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653918386520095:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.166705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653918386520100:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.166705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.167309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:50.171442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653918386520102:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:50.319944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.326862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.340247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 14866, MsgBus: 26052 2024-11-21T08:56:49.405320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653914082489607:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.405335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032f7/r3tmp/tmpe5z7os/pdisk_1.dat 2024-11-21T08:56:49.446930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14866, node 1 2024-11-21T08:56:49.459508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.459518Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.459519Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.459541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26052 TClient is connected to server localhost:26052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.506255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.506274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.507405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.530737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.542923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.602121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.617073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.626322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.676823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653914082491153:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.676852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.697643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.703197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.710004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.763918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.818930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.829168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.844905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653914082491671:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.844939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.844954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653914082491676:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.845626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.849419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653914082491678:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:49.979759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.985841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.997117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 24263, MsgBus: 27320 2024-11-21T08:56:49.486185Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653916052264385:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.486289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032ef/r3tmp/tmphk3WXF/pdisk_1.dat 2024-11-21T08:56:49.524745Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24263, node 1 2024-11-21T08:56:49.535255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.535267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.535268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.535299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27320 TClient is connected to server localhost:27320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.587237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.587262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.588303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.608633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.617081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.677731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.690949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.703665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.725649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653916052265931:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.725673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.747973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.753855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.758850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.765783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.820201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.829120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:49.837676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653916052266424:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.837701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.837708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653916052266429:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.838245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:49.842668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653916052266431:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1895, MsgBus: 24666 2024-11-21T08:56:49.745949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653914564256585:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:49.746127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032da/r3tmp/tmp8FbKyE/pdisk_1.dat 2024-11-21T08:56:49.786227Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1895, node 1 2024-11-21T08:56:49.798582Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:49.798595Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:49.798596Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:49.798628Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24666 TClient is connected to server localhost:24666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:49.847258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:49.847283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:49.848383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:49.870215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.876094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.889597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.908741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.920809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:49.983876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653914564258151:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:49.983897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.008376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.014299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.024961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.032571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.038519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.045905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.054429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653918859225941:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.054446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.054466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653918859225946:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.054998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:50.059570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653918859225948:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQueryPerf::Upsert-QueryService [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] >> KqpQueryPerf::IndexInsert+QueryService [GOOD] >> KqpQueryPerf::DeleteOn-QueryService [GOOD] >> KqpPg::InsertFromSelect_Simple >> KqpPg::InsertNoTargetColumns_Simple ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 31860, MsgBus: 1457 2024-11-21T08:56:50.294708Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653917422502653:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:50.294878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00328f/r3tmp/tmpuipxSF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31860, node 1 2024-11-21T08:56:50.350755Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:50.351055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:50.351068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:50.351070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:50.351105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1457 TClient is connected to server localhost:1457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:50.395491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:50.395523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:50.396559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:50.426092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.427523Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:56:50.430899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.490579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.506355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.515526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.536712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653917422504206:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.536734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.558699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.613358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.668153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.722089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.731763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.738933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.747580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653917422504726:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.747607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653917422504731:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.747617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.748231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:50.751917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653917422504733:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpPg::EmptyQuery >> KqpPg::NoTableQuery >> KqpPg::ValuesInsert >> KqpPg::CreateTableBulkUpsertAndRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 22941, MsgBus: 16868 2024-11-21T08:56:50.445031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653917727551787:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:50.445048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00321b/r3tmp/tmpYblYbv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22941, node 1 2024-11-21T08:56:50.499455Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:50.501764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:50.501775Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:50.501777Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:50.501803Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16868 TClient is connected to server localhost:16868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:50.545713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:50.545738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:50.546832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:50.576894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.586215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.646159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.659334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.669254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.688619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653917727553326:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.688640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.709057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.714242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.724528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.731586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.739008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.745737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.801505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653917727553820:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.801526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.801526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653917727553825:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.801970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:50.807835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653917727553827:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9717, MsgBus: 27926 2024-11-21T08:56:50.285052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653920831924044:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:50.285160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032a6/r3tmp/tmpkCQPOf/pdisk_1.dat 2024-11-21T08:56:50.322225Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9717, node 1 2024-11-21T08:56:50.332074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:50.332084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:50.332085Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:50.332107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27926 TClient is connected to server localhost:27926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:50.386016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:50.386045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:50.387058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:50.407465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.416926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.477701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.490210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.545230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.562500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653920831925593:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.562524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.582248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.587074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.641098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.695544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.703664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.710974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.719489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653920831926100:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.719507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653920831926105:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.719510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.720041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:50.724067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653920831926107:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:56:50.884781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.890789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.899955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryPerf::Update-QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13210, MsgBus: 61374 2024-11-21T08:56:50.730176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653919433400423:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:50.730191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003211/r3tmp/tmpr4mCWF/pdisk_1.dat 2024-11-21T08:56:50.769255Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13210, node 1 2024-11-21T08:56:50.780413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:50.780425Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:50.780427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:50.780453Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61374 TClient is connected to server localhost:61374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:50.830897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:50.830929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:50.831924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:50.854279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.863731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.923549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.937617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.949842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.968601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653919433401964:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.968619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:50.989393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:50.994305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.004693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.011957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.019467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.026111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.037151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653923728369751:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:51.037174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:51.037182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653923728369756:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:51.037917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:51.046412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653923728369758:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |90.4%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardTxOrder::ZigZag_oo >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 8278, MsgBus: 26906 2024-11-21T08:56:50.851271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653920887400982:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:50.851489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00320d/r3tmp/tmpUvINvY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8278, node 1 2024-11-21T08:56:50.901889Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:50.904723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:50.904739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:50.904741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:50.904778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26906 TClient is connected to server localhost:26906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:50.952226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:50.952260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:50.953213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:50.979703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:50.987688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:51.050326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:51.067275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:51.076250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:51.098047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653925182369817:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:51.098069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:51.120856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.126941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.138737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.193568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.200908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.208026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:56:51.216118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653925182370312:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:51.216141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653925182370317:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:51.216144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:51.216672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:56:51.221086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653925182370319:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropIndexedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:50.617425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:50.617443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:50.617447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:50.617452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:50.617470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:50.617474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:50.617483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:50.617547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:50.625424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:50.625438Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:50.627019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:50.627084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:50.627112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:50.629543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:50.629648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:50.629760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.629983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:50.630894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.631176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:50.631189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.631205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:50.631212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:50.631220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:50.631262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:50.632798Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:50.651286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:50.651376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.651447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:50.651518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:50.651528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.652455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.652488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:50.652566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.652582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:50.652587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:50.652592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:50.653083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.653099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:50.653104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:50.653449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.653476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.653482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.653489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.654068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:50.654468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:50.654527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:50.654722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.654749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:50.654757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.654830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:50.654840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.654872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:50.654886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:50.655342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:50.655354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:50.655399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.655408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:50.655502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.655512Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:50.655526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:50.655531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.655538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:50.655544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.655550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:50.655555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:50.655568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:50.655575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:50.655581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 9.414782Z node 234 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:56:49.415229Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:49.415700Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:49.415907Z node 234 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2024-11-21T08:56:49.416357Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:56:49.416405Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 2024-11-21T08:56:49.416473Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:49.416487Z node 234 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T08:56:49.416568Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:49.416574Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2024-11-21T08:56:49.416586Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T08:56:49.416590Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T08:56:49.416594Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 Forgetting tablet 72075186233409549 2024-11-21T08:56:49.416653Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:56:49.416667Z node 234 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:56:49.416963Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:56:49.416997Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 Forgetting tablet 72075186233409546 2024-11-21T08:56:49.417284Z node 234 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T08:56:49.417567Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:49.417595Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:56:49.417721Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:56:49.417737Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T08:56:49.418199Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:56:49.418213Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:56:49.418258Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2024-11-21T08:56:49.418311Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:49.418318Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2024-11-21T08:56:49.418333Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 1 2024-11-21T08:56:49.418339Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 9], at schemeshard: 72057594046678944 2024-11-21T08:56:49.418344Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:56:49.418348Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T08:56:49.418353Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:56:49.418356Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:56:49.418361Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:56:49.418365Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:56:49.418370Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:56:49.418819Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:56:49.418833Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:56:49.418869Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:56:49.418874Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:56:49.419176Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:56:49.419184Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:56:49.419364Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:56:49.419403Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:56:49.419407Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:56:49.419459Z node 234 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:56:49.419477Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:56:49.419485Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [234:788:2729] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:56:49.419548Z node 234 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:49.419589Z node 234 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 50us result status StatusSuccess 2024-11-21T08:56:49.419662Z node 234 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:49.419704Z node 234 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:49.419720Z node 234 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1" took 16us result status StatusPathDoesNotExist 2024-11-21T08:56:49.419737Z node 234 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirB/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T08:56:49.419779Z node 234 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:56:49.419790Z node 234 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:56:49.419797Z node 234 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] Test command err: 2024-11-21T08:54:07.838197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:07.838235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:07.838243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002abe/r3tmp/tmpAllurU/pdisk_1.dat 2024-11-21T08:54:07.908115Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61351, node 1 2024-11-21T08:54:07.998916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:07.998935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:07.998938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:07.999000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:08.003493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:08.078630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:08.078661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:08.090061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6543 2024-11-21T08:54:08.484118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.216072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.216091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.248424Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:09.249036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.300088Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:09.309015Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:09.309042Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:09.315448Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:09.315595Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:09.315616Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:09.315622Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:09.315628Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:09.315634Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:09.315639Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:09.315645Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:09.315755Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:09.489158Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.489179Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.490034Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:09.491412Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:09.491490Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:09.491988Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:09.495492Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:09.495504Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:09.495512Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:09.496920Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.496940Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.497926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:09.499017Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:09.499037Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:09.500802Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:09.512158Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.533410Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:09.640538Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:09.795343Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:10.509740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.509768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.512235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:10.533465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.533507Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.533536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.533553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.533566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.533578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:10.533592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:10.533605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:10.533617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:10.533631Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:10.533644Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:10.533655Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:10.537414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:54:10.537449Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:54:10.537461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:54:10.537466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:54:10.537478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:54:10.537482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:54:10.537489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleI ... olumn count# 0 2024-11-21T08:56:46.165506Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 2 2024-11-21T08:56:46.165512Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 1 2024-11-21T08:56:46.165517Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 1 2024-11-21T08:56:46.165524Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:56:46.165545Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:46.165713Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:56:46.165882Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:46.165908Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:46.166002Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:56:46.166050Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:46.166055Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:46.166177Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:46.219998Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:46.220049Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:56:46.220233Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7598:5689], server id = [2:7599:5690], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:46.220276Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7598:5689], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:46.220513Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:46.220527Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:46.220585Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:46.220619Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:46.220696Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:46.220746Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7598:5689], server id = [2:7599:5690], tablet id = 72075186224037899 2024-11-21T08:56:46.220751Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:46.221325Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:46.224446Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7616:5707]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:46.224480Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:46.224485Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7616:5707], StatRequests.size() = 1 2024-11-21T08:56:46.257823Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWRjNDEyNmQtYWZhZmYwM2ItZTZhZjYxZDctMTkyYjYyNzE=, TxId: 2024-11-21T08:56:46.257847Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWRjNDEyNmQtYWZhZmYwM2ItZTZhZjYxZDctMTkyYjYyNzE=, TxId: 2024-11-21T08:56:46.257992Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:46.268821Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7628:5713] 2024-11-21T08:56:46.268887Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7628:5713], schemeshard id = 72075186224037889 2024-11-21T08:56:46.268903Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7553:5662], server id = [2:7629:5714], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:46.268923Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7629:5714] 2024-11-21T08:56:46.268936Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7629:5714], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T08:56:46.280092Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:46.280110Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:46.416831Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7635:5717] 2024-11-21T08:56:46.417050Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2671:3168] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2024-11-21T08:56:46.417058Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2671:3168] 2024-11-21T08:56:46.417073Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2024-11-21T08:56:46.995894Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T08:56:46.995926Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T08:56:47.709730Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:47.709768Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:47.709997Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:47.721566Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:47.721720Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:47.721732Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T08:56:47.733258Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:49.132875Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:49.132907Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T08:56:49.132927Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T08:56:49.132993Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T08:56:49.133139Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T08:56:49.133169Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T08:56:49.144677Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T08:56:50.491980Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:50.492010Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T08:56:50.492014Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T08:56:51.815743Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:51.815807Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:51.815812Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:51.815991Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:51.827223Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:51.827336Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:51.827348Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:51.827438Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:51.849200Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:51.849263Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T08:56:51.849399Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7802:5814], server id = [2:7803:5815], tablet id = 72075186224037899, status = OK 2024-11-21T08:56:51.849426Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7802:5814], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:56:51.849553Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:56:51.849560Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:56:51.849594Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:51.849616Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:51.849660Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:51.849707Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7802:5814], server id = [2:7803:5815], tablet id = 72075186224037899 2024-11-21T08:56:51.849710Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:51.850178Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:51.873893Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODUzMGZhN2QtYTVjYmYyZjQtZTY3YWU4YWEtYjVkNGRiZWQ=, TxId: 2024-11-21T08:56:51.873919Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODUzMGZhN2QtYTVjYmYyZjQtZTY3YWU4YWEtYjVkNGRiZWQ=, TxId: 2024-11-21T08:56:51.874064Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:51.885537Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:51.885560Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2671:3168] >> KqpPg::ReadPgArray |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableInsert |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> KqpPg::EmptyQuery [GOOD] >> KqpPg::DuplicatedColumns >> KqpPg::NoTableQuery [GOOD] >> KqpPg::PgCreateTable |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> KqpPg::InsertNoTargetColumns_Simple [GOOD] >> KqpPg::InsertNoTargetColumns_Serial |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> KqpPg::DuplicatedColumns [GOOD] >> KqpPg::DropTablePg |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> KqpPg::InsertNoTargetColumns_Serial [GOOD] >> KqpPg::InsertValuesFromTableWithDefault |90.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists |90.5%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] Test command err: 2024-11-21T08:53:32.875543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:506:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:32.875595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:53:32.875612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00382e/r3tmp/tmpDBFw7W/pdisk_1.dat 2024-11-21T08:53:32.984621Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3279, node 1 2024-11-21T08:53:33.080387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:33.080413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:33.080418Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:33.080465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:33.086100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:33.162362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:33.162404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:33.174679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27816 2024-11-21T08:53:33.614725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:34.480513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.480556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.523899Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T08:53:34.524686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.582376Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:34.591698Z node 4 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:34.591725Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:34.600313Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:34.600569Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:34.600598Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:34.600614Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:34.600623Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:34.600629Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:34.600635Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:34.600644Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:34.600779Z node 4 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:34.791931Z node 4 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:34.791964Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:1879:2552], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:34.792795Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:1886:2558] 2024-11-21T08:53:34.794818Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:1911:2571] 2024-11-21T08:53:34.794939Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:1911:2571], schemeshard id = 72075186224037889 2024-11-21T08:53:34.796197Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T08:53:34.800550Z node 4 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:34.800570Z node 4 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:34.800582Z node 4 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T08:53:34.802166Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.802198Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.804342Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:34.806177Z node 4 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:34.806216Z node 4 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:34.809734Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:34.822164Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.856244Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:35.011623Z node 4 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:35.179314Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:35.934045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:53:36.777229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:36.777271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:36.777407Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:36.777439Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:36.800355Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T08:53:36.801753Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:36.820998Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:36.846367Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T08:53:36.846722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:36.947926Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T08:53:36.947953Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:53:36.947964Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:2936:2905], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T08:53:36.948514Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:2938:2906] 2024-11-21T08:53:36.948555Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:2938:2906], schemeshard id = 72075186224037899 2024-11-21T08:53:37.882767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:53:38.591297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:38.591328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:38.591431Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:38.591442Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:38.603127Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:38.603795Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:38.604001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:38.604540Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:38.616892Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:38.870910Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2024-11-21T08:53:38.870936Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2024-11-21T08:53:38.870950Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3766:3112], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2024-11-21T08:53:38.871159Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:3767:3113] 2024-11-21T08:53:38.871225Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:3767:3113], schemeshard id = 72075186224037905 2024-11-21T08:53:39.980450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3902:3381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:39.980513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:39.984604Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2024-11-21T08:53:40.022002Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[4:3987:3212];tablet_id=72075186224037911;process=TTxInitSchema::Execute;f ... , TxId: 2024-11-21T08:56:48.264428Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:48.276147Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:48.276168Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:48.350047Z node 4 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:56:48.350077Z node 4 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:48.402363Z node 4 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [4:11879:7494], schemeshard count = 1 2024-11-21T08:56:48.753878Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T08:56:48.753910Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 196.000000s, at schemeshard: 72075186224037899 2024-11-21T08:56:48.753973Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2024-11-21T08:56:48.766050Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:50.734480Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:50.734509Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:50.734520Z node 4 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2024-11-21T08:56:50.734524Z node 4 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T08:56:50.735460Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:50.747600Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:50.747759Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:50.747773Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:50.748069Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:50.759524Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:50.759595Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 2, current Round: 0 2024-11-21T08:56:50.759800Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:12048:7591], server id = [4:12049:7592], tablet id = 72075186224037911, status = OK 2024-11-21T08:56:50.759952Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:12048:7591], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T08:56:50.760181Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2024-11-21T08:56:50.760190Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2024-11-21T08:56:50.760278Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:12048:7591], server id = [4:12049:7592], tablet id = 72075186224037911 2024-11-21T08:56:50.760282Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:50.760300Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:50.760332Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:50.760392Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T08:56:50.760956Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:50.764406Z node 4 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:12066:7609]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:50.764499Z node 4 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:50.764505Z node 4 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [4:12066:7609], StatRequests.size() = 1 2024-11-21T08:56:50.797697Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NDI1Y2IwZTEtZGRjNzhmZTUtMzUwMjFjYjQtZjY5M2IwMzA=, TxId: 2024-11-21T08:56:50.797720Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NDI1Y2IwZTEtZGRjNzhmZTUtMzUwMjFjYjQtZjY5M2IwMzA=, TxId: 2024-11-21T08:56:50.797980Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:50.809503Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T08:56:50.809521Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:50.841171Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2024-11-21T08:56:50.841199Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 189.000000s, at schemeshard: 72075186224037905 2024-11-21T08:56:50.841253Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 28 2024-11-21T08:56:50.864199Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:51.486877Z node 4 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T08:56:51.486906Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T08:56:53.582033Z node 4 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:53.582120Z node 4 :STATISTICS DEBUG: EvPropagateStatistics, node id = 4 2024-11-21T08:56:53.613594Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:53.613624Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:56:53.613630Z node 4 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2024-11-21T08:56:53.613634Z node 4 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2024-11-21T08:56:53.614562Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:53.627029Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:53.627211Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:53.627224Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:53.627373Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:53.639067Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:56:53.639144Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2024-11-21T08:56:53.639315Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:12215:7686], server id = [4:12216:7687], tablet id = 72075186224037912, status = OK 2024-11-21T08:56:53.639341Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:12215:7686], path = { OwnerId: 72075186224037905 LocalId: 2 } 2024-11-21T08:56:53.639489Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2024-11-21T08:56:53.639496Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2024-11-21T08:56:53.639523Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:56:53.639551Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:56:53.639632Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T08:56:53.639682Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:12215:7686], server id = [4:12216:7687], tablet id = 72075186224037912 2024-11-21T08:56:53.639687Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:56:53.640300Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:56:53.654340Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NjFiOTI1ZjEtYmI1NmFkOGItYTcyNmYxZWMtY2FmMjhkNTE=, TxId: 2024-11-21T08:56:53.654368Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NjFiOTI1ZjEtYmI1NmFkOGItYTcyNmYxZWMtY2FmMjhkNTE=, TxId: 2024-11-21T08:56:53.654676Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:53.655016Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:12231:5999]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:53.655100Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:53.655107Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:53.655761Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:53.655777Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T08:56:53.655787Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:53.659195Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2024-11-21T08:56:53.659488Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:12231:5999]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:53.659554Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:53.659557Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T08:56:53.659614Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:56:53.659620Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T08:56:53.659625Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T08:56:53.660273Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropIndex >> KqpPg::InsertValuesFromTableWithDefault [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> KqpPg::InsertValuesFromTableWithDefaultAndCast [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool >> KqpWorkload::KV [GOOD] >> KqpPg::DropIndex [GOOD] >> KqpPg::DropSequence >> KqpPg::InsertValuesFromTableWithDefaultBool [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TRtmrTestReboots::CreateRtmrVolumeWithReboots >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> KqpPg::DropSequence [GOOD] >> KqpPg::DropTableIfExists |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> KqpPg::InsertFromSelect_Simple [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> KqpOlapScheme::DropThenAddColumnCompaction [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> KqpPg::DropTableIfExists [GOOD] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain >> KqpPg::InsertFromSelect_NoReorder >> KqpPg::InsertNoTargetColumns_SerialNotNull >> KqpOlapScheme::DropTableAfterInsert >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> KqpPg::DropTableIfExists_GenericQuery >> KqpPg::InsertNoTargetColumns_SerialNotNull [GOOD] >> KqpPg::InsertFromSelect_NoReorder [GOOD] >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::TableInsert [GOOD] >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> KqpPg::ValuesInsert [GOOD] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> BasicUsage::RecreateObserver [GOOD] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:55:50.817754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:55:50.817777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:50.817782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:55:50.817786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:55:50.817792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:55:50.817796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:55:50.817804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:55:50.817872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:55:50.825323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:55:50.825342Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:55:50.826975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:55:50.827069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:55:50.827104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:55:50.829516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:55:50.829599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:55:50.829687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.829835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:50.830408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.830643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:50.830654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.830666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:55:50.830672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:50.830677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:55:50.830711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:55:50.831901Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:55:50.842420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:55:50.842477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.842529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:55:50.842571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:55:50.842575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.843159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.843173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:55:50.843215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.843223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:55:50.843226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:55:50.843229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:55:50.843509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.843515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:55:50.843517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:55:50.843808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.843823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.843827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.843832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.844230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:55:50.844539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:55:50.844573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:55:50.844705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:55:50.844723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:55:50.844728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.844781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:55:50.844788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:55:50.844807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:55:50.844815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:55:50.845085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:55:50.845091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:55:50.845122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:55:50.845125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:55:50.845183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:55:50.845187Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:55:50.845194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:55:50.845196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.845200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:55:50.845203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:55:50.845205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:55:50.845208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:55:50.845214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:55:50.845218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:55:50.845220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... calPathId: 1] was 2 2024-11-21T08:56:54.562310Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:56:54.562473Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:56:54.562675Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T08:56:54.562954Z node 249 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:54.562961Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:54.562990Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:56:54.563011Z node 249 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:54.563015Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [249:205:2208], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T08:56:54.563020Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [249:205:2208], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2024-11-21T08:56:54.563088Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:56:54.563095Z node 249 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1005:0 ProgressState 2024-11-21T08:56:54.563104Z node 249 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:56:54.563107Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:56:54.563112Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T08:56:54.563115Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:56:54.563119Z node 249 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:56:54.563122Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:56:54.563133Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:56:54.563138Z node 249 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T08:56:54.563141Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:56:54.563144Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:56:54.563258Z node 249 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:56:54.563268Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:56:54.563272Z node 249 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:56:54.563276Z node 249 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:56:54.563280Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:56:54.563411Z node 249 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:56:54.563421Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:56:54.563425Z node 249 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:56:54.563429Z node 249 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:56:54.563433Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:56:54.563442Z node 249 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T08:56:54.563479Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:56:54.563484Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:56:54.563492Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:54.564068Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:56:54.564083Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:56:54.565160Z node 249 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1004 2024-11-21T08:56:54.565216Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:56:54.565221Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2024-11-21T08:56:54.565233Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:56:54.565235Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:56:54.565324Z node 249 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:56:54.565342Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:56:54.565345Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [249:820:2761] 2024-11-21T08:56:54.565350Z node 249 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:56:54.565362Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:56:54.565364Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [249:820:2761] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2024-11-21T08:56:54.565424Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:56:54.565433Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:56:54.565441Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:56:54.565447Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T08:56:54.565454Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T08:56:54.565462Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T08:56:54.565469Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2024-11-21T08:56:54.565486Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2024-11-21T08:56:54.565492Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2024-11-21T08:56:54.565499Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2024-11-21T08:56:54.565585Z node 249 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:56:54.565610Z node 249 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 35us result status StatusSuccess 2024-11-21T08:56:54.565669Z node 249 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> DataShardTxOrder::ZigZag_oo [GOOD] >> TFileStoreWithReboots::CheckFileStoreSSDLimits >> KqpPg::InsertFromSelect_Serial >> KqpPg::TableArrayInsert >> KqpPg::CreateTableSerialColumns >> PgCatalog::PgType >> KqpPg::InsertFromSelect_Serial [GOOD] >> KqpPg::CreateTableSerialColumns [GOOD] >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> KqpPg::EquiJoin >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> TConsoleTests::TestAlterServerlessTenant >> KqpPg::CreateUniqPgColumn >> TConsoleTests::TestCreateSubSubDomain >> KqpPg::CreateUniqPgColumn [GOOD] >> KqpPg::CreateUniqComplexPgColumn >> KqpPg::CreateUniqComplexPgColumn [GOOD] >> KqpPg::CreateTempTable >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> KqpPg::PgCreateTable [GOOD] >> KqpPg::TableArrayInsert [GOOD] >> KqpPg::Returning >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> KqpPg::EquiJoin [GOOD] >> PgCatalog::CheckSetConfig [GOOD] >> KqpPg::PgUpdate >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder >> KqpPg::InsertNoTargetColumns_ColumnOrder [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize >> KqpPg::InsertNoTargetColumns_NotOneSize [GOOD] >> KqpPg::InsertNoTargetColumns_Alter >> KqpPg::InsertNoTargetColumns_Alter [GOOD] >> KqpPg::ExplainColumnsReorder >> PgCatalog::PgDatabase >> TFileStoreWithReboots::CheckFileStoreSSDLimits [GOOD] >> KqpOlapScheme::DropTableAfterInsert [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] >> KqpPg::PgUpdate [GOOD] >> KqpPg::Returning [GOOD] >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> KqpPg::DeleteWithQueryService >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> KqpPg::ExplainColumnsReorder [GOOD] >> PgCatalog::PgDatabase [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> KqpPg::PgUpdateCompoundKey >> KqpPg::SelectIndex >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> KqpPg::DeleteWithQueryService [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2024-11-21T08:56:51.971873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:51.971899Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:51.971917Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:56:51.974355Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:56:51.974476Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:56:51.974531Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:51.975221Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:56:51.982250Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:51.982377Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:56:51.982490Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:56:51.982502Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:56:51.982507Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:56:51.982538Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:56:51.985037Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:56:51.985094Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:56:51.985130Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:56:51.985135Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:56:51.985138Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:56:51.985142Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:51.985224Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:51.985230Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:51.985252Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:56:51.985269Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:56:51.985327Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:56:51.985333Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:56:51.985340Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:56:51.985345Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:56:51.985347Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:56:51.985352Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:56:51.985356Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:51.991511Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:51.991528Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:51.991535Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:56:51.991842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:56:51.991849Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:56:51.991866Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:56:51.991885Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:56:51.991892Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:56:51.991899Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:56:51.991904Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:56:51.991907Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:56:51.991910Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:56:51.991912Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:56:51.991963Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:56:51.991966Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:56:51.991969Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:56:51.991971Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:56:51.991977Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:56:51.991979Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:56:51.991982Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:56:51.991984Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:56:51.991987Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:56:52.013132Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:56:52.013165Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:56:52.013173Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:56:52.013189Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:56:52.013206Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:56:52.013339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:52.013347Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:52.013356Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:56:52.013379Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:56:52.013384Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:56:52.013434Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:56:52.013445Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:52.013449Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:56:52.013454Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:56:52.014170Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:56:52.014188Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:52.014244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:52.014248Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:52.014255Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:56:52.014260Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:56:52.014264Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:56:52.014270Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:56:52.014274Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:56:52.014279Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:52.014282Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:56:52.014285Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:56:52.014288Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:56:52.014326Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:56:52.014328Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:52.014330Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:56:52.014333Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:56:52.014335Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:56:52.014343Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:56:52.014345Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:56:52.014347Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:56:52.014349Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:56:52.014361Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:56:52.014363Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:56:52.014365Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:56:52.014369Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:52.014371Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:56:52.014374Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2024-11-21T08:57:00.494745Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:57:00.494747Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T08:57:00.494749Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2024-11-21T08:57:00.494751Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2024-11-21T08:57:00.494804Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2024-11-21T08:57:00.494809Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:57:00.494815Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:57:00.494817Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2024-11-21T08:57:00.494819Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2024-11-21T08:57:00.494821Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T08:57:00.494855Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2024-11-21T08:57:00.494858Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2024-11-21T08:57:00.494860Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2024-11-21T08:57:00.494862Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2024-11-21T08:57:00.494865Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:57:00.494866Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2024-11-21T08:57:00.494869Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2024-11-21T08:57:00.494871Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:57:00.494874Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T08:57:00.494876Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T08:57:00.494878Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T08:57:00.494900Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:228:2223], Recipient [6:228:2223]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:00.494903Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:00.494906Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:57:00.494908Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:57:00.494924Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:57:00.494927Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2024-11-21T08:57:00.494929Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2024-11-21T08:57:00.494932Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.494933Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2024-11-21T08:57:00.494935Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:57:00.494938Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2024-11-21T08:57:00.495014Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2024-11-21T08:57:00.495018Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495020Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:57:00.495022Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T08:57:00.495024Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T08:57:00.495027Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495029Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T08:57:00.495031Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:57:00.495033Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:57:00.495037Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2024-11-21T08:57:00.495039Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2024-11-21T08:57:00.495040Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2024-11-21T08:57:00.495044Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495046Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:57:00.495047Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T08:57:00.495050Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2024-11-21T08:57:00.495054Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495056Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T08:57:00.495058Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T08:57:00.495062Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2024-11-21T08:57:00.495064Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495065Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T08:57:00.495067Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T08:57:00.495070Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2024-11-21T08:57:00.495072Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495074Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T08:57:00.495076Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T08:57:00.495078Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2024-11-21T08:57:00.495080Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495082Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T08:57:00.495084Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:57:00.495086Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2024-11-21T08:57:00.495116Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2024-11-21T08:57:00.495119Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:57:00.495123Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495125Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:57:00.495127Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2024-11-21T08:57:00.495130Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T08:57:00.495156Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2024-11-21T08:57:00.495158Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2024-11-21T08:57:00.495160Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2024-11-21T08:57:00.495174Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2024-11-21T08:57:00.495177Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:00.495179Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2024-11-21T08:57:00.495181Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2024-11-21T08:57:00.495183Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:57:00.495185Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:57:00.495186Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:57:00.495188Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:57:00.506299Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2024-11-21T08:57:00.506326Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2024-11-21T08:57:00.506342Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:00.506353Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T08:57:00.506376Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:00.506388Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:00.506459Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2024-11-21T08:57:00.506464Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2024-11-21T08:57:00.506472Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T08:57:00.506476Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T08:57:00.506484Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:00.506489Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2024-11-21T08:55:15.719939Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1732179315719931 2024-11-21T08:55:15.848059Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653511802226395:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.848122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:15.850814Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653512889893730:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:15.892485Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003337/r3tmp/tmpdnFY6L/pdisk_1.dat 2024-11-21T08:55:15.904466Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:15.905741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:15.946011Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64178, node 1 2024-11-21T08:55:15.972418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003337/r3tmp/yandexczZ2Qx.tmp 2024-11-21T08:55:15.972432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003337/r3tmp/yandexczZ2Qx.tmp 2024-11-21T08:55:15.972495Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003337/r3tmp/yandexczZ2Qx.tmp 2024-11-21T08:55:15.972543Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:15.976760Z INFO: TTestServer started on Port 23974 GrpcPort 64178 TClient is connected to server localhost:23974 PQClient connected to localhost:64178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:55:15.997797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:15.997832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:15.998359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:15.998374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:16.000919Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:16.000959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:16.001057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:16.004183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:16.012431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:55:16.218606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653517184861165:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.218640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.218921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653517184861178:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:16.222451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:55:16.236195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653517184861180:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T08:55:16.297036Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653516097194538:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:16.297140Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjgwZWJjNmQtYjFiYmVmZjItZTNjZjEzN2EtYTU1ZjBmYzA=, ActorId: [1:7439653516097194520:2299], ActorState: ExecuteState, TraceId: 01jd6yxkgs8tbj8frp4p4s34f7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:16.297414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.297650Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:16.330336Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653517184861223:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:16.330478Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmMwMzg2YzUtZDAxMzYwYWQtZjI1ZDEzMGUtMjg4ZDY3MzQ=, ActorId: [2:7439653517184861162:2277], ActorState: ExecuteState, TraceId: 01jd6yxkfsejmathpz0ptvd1by, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:16.330789Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:16.364407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:16.439370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:64178", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T08:55:16.481642Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd6yxkqbek007ya91wrvbzgt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY5MmVkMDYtZjFiOWRjNDgtZWM5Yjg4MDUtZTY4MjVjZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653516097194975:2946] 2024-11-21T08:55:20.847445Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653511802226395:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:20.847485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:20.852351Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653512889893730:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:20.852403Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:55:21.538272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:64178 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:21.577830Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:64178 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ... session shared/user_3_3_17777381065812192416_v1 sending to client partition status >>> Got event: StartPartitionSession { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc1 Database path: /Root Database id: account-dc1 CommittedOffset: 0 EndOffset: 0 } 2024-11-21T08:56:58.054516Z :INFO: [/Root] [/Root] [3492c756-7029edb7-d493d97-2e45c87e] Closing read session. Close timeout: 0.000000s 2024-11-21T08:56:58.054530Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2024-11-21T08:56:58.054537Z :INFO: [/Root] [/Root] [3492c756-7029edb7-d493d97-2e45c87e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 293 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:56:58.054555Z :NOTICE: [/Root] [/Root] [3492c756-7029edb7-d493d97-2e45c87e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:56:58.054562Z :DEBUG: [/Root] [/Root] [3492c756-7029edb7-d493d97-2e45c87e] [] Abort session to cluster 2024-11-21T08:56:58.054664Z :INFO: [/Root] [/Root] [cf81847a-16bd2d3a-4f61b92d-599f68e0] Closing read session. Close timeout: 0.000000s 2024-11-21T08:56:58.054669Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T08:56:58.054672Z :INFO: [/Root] [/Root] [cf81847a-16bd2d3a-4f61b92d-599f68e0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 292 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:56:58.054676Z :NOTICE: [/Root] [/Root] [cf81847a-16bd2d3a-4f61b92d-599f68e0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:56:58.054679Z :DEBUG: [/Root] [/Root] [cf81847a-16bd2d3a-4f61b92d-599f68e0] [] Abort session to cluster 2024-11-21T08:56:58.054695Z :INFO: [/Root] [/Root] [c9e09b1a-9f9c0dec-21a4e8b5-7754c964] Closing read session. Close timeout: 0.000000s 2024-11-21T08:56:58.054699Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T08:56:58.054701Z :INFO: [/Root] [/Root] [c9e09b1a-9f9c0dec-21a4e8b5-7754c964] Counters: { Errors: 0 CurrentSessionLifetimeMs: 292 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:56:58.054704Z :NOTICE: [/Root] [/Root] [c9e09b1a-9f9c0dec-21a4e8b5-7754c964] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:56:58.054706Z :DEBUG: [/Root] [/Root] [c9e09b1a-9f9c0dec-21a4e8b5-7754c964] [] Abort session to cluster 2024-11-21T08:56:58.054721Z :INFO: [/Root] [/Root] [c9e09b1a-9f9c0dec-21a4e8b5-7754c964] Closing read session. Close timeout: 0.000000s 2024-11-21T08:56:58.054722Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T08:56:58.054723Z :INFO: [/Root] [/Root] [c9e09b1a-9f9c0dec-21a4e8b5-7754c964] Counters: { Errors: 0 CurrentSessionLifetimeMs: 292 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:56:58.054726Z :NOTICE: [/Root] [/Root] [c9e09b1a-9f9c0dec-21a4e8b5-7754c964] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:56:58.054744Z :INFO: [/Root] [/Root] [cf81847a-16bd2d3a-4f61b92d-599f68e0] Closing read session. Close timeout: 0.000000s 2024-11-21T08:56:58.054745Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T08:56:58.054746Z :INFO: [/Root] [/Root] [cf81847a-16bd2d3a-4f61b92d-599f68e0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 292 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:56:58.054748Z :NOTICE: [/Root] [/Root] [cf81847a-16bd2d3a-4f61b92d-599f68e0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:56:58.054754Z :INFO: [/Root] [/Root] [3492c756-7029edb7-d493d97-2e45c87e] Closing read session. Close timeout: 0.000000s 2024-11-21T08:56:58.054756Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2024-11-21T08:56:58.054757Z :INFO: [/Root] [/Root] [3492c756-7029edb7-d493d97-2e45c87e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 293 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:56:58.054761Z :NOTICE: [/Root] [/Root] [3492c756-7029edb7-d493d97-2e45c87e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:56:58.054803Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_17777381065812192416_v1 grpc read done: success# 0, data# { } 2024-11-21T08:56:58.054810Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_17777381065812192416_v1 grpc read failed 2024-11-21T08:56:58.054812Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_17777381065812192416_v1 grpc closed 2024-11-21T08:56:58.054825Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_17777381065812192416_v1 is DEAD 2024-11-21T08:56:58.054925Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:58.054935Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_17777381065812192416_v1 2024-11-21T08:56:58.054946Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439653954793243797:2518] destroyed 2024-11-21T08:56:58.054946Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653954793243790:2511] disconnected; active server actors: 1 2024-11-21T08:56:58.054952Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653954793243790:2511] client user disconnected session shared/user_3_3_17777381065812192416_v1 2024-11-21T08:56:58.054955Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_3_17777381065812192416_v1 2024-11-21T08:56:58.054961Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T08:56:58.054973Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T08:56:58.054986Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_3_2_14976625254625871473_v1" (Sender=[3:7439653954793243782:2510], Pipe=[3:7439653954793243794:2510], Partitions=[], ActiveFamilyCount=0) 2024-11-21T08:56:58.054991Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] consumer user family 1 status Active partitions [0] session "shared/user_3_2_14976625254625871473_v1" sender [3:7439653954793243782:2510] lock partition 0 for ReadingSession "shared/user_3_2_14976625254625871473_v1" (Sender=[3:7439653954793243782:2510], Pipe=[3:7439653954793243794:2510], Partitions=[], ActiveFamilyCount=1) generation 1 step 2 2024-11-21T08:56:58.054999Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2024-11-21T08:56:58.055001Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000022s 2024-11-21T08:56:58.055004Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_1889016610537008295_v1 grpc read done: success# 0, data# { } 2024-11-21T08:56:58.055006Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_14976625254625871473_v1 grpc read done: success# 0, data# { } 2024-11-21T08:56:58.055007Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_14976625254625871473_v1 grpc read failed 2024-11-21T08:56:58.055008Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_14976625254625871473_v1 grpc closed 2024-11-21T08:56:58.055010Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_14976625254625871473_v1 is DEAD 2024-11-21T08:56:58.055024Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1889016610537008295_v1 grpc read failed 2024-11-21T08:56:58.055028Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1889016610537008295_v1 grpc closed 2024-11-21T08:56:58.055031Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1889016610537008295_v1 is DEAD 2024-11-21T08:56:58.055111Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653954793243794:2510] disconnected; active server actors: 1 2024-11-21T08:56:58.055118Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653954793243794:2510] client user disconnected session shared/user_3_2_14976625254625871473_v1 2024-11-21T08:56:58.055121Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T08:56:58.055124Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T08:56:58.055127Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_3_1_1889016610537008295_v1" (Sender=[3:7439653954793243781:2509], Pipe=[3:7439653954793243791:2509], Partitions=[], ActiveFamilyCount=0) 2024-11-21T08:56:58.055130Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] consumer user family 1 status Active partitions [0] session "shared/user_3_1_1889016610537008295_v1" sender [3:7439653954793243781:2509] lock partition 0 for ReadingSession "shared/user_3_1_1889016610537008295_v1" (Sender=[3:7439653954793243781:2509], Pipe=[3:7439653954793243791:2509], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2024-11-21T08:56:58.055133Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T08:56:58.055134Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000009s 2024-11-21T08:56:58.055302Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653954793243791:2509] disconnected; active server actors: 1 2024-11-21T08:56:58.055310Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439653954793243791:2509] client user disconnected session shared/user_3_1_1889016610537008295_v1 |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> PgCatalog::PgRoles |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> KqpPg::PgUpdateCompoundKey [GOOD] >> KqpPg::SelectIndex [GOOD] >> PgCatalog::PgRoles [GOOD] >> KqpPg::PgAggregate >> KqpPg::TableDeleteAllData >> KqpPg::CreateTableIfNotExists_GenericQuery >> KqpPg::PgAggregate [GOOD] >> KqpPg::MkqlTerminate >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> PgCatalog::PgTables >> KqpPg::NoSelectFullScan [GOOD] >> PgCatalog::PgTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2024-11-21T08:56:03.549351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653716844750219:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:03.549621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00386d/r3tmp/tmpanKGzV/pdisk_1.dat 2024-11-21T08:56:03.581560Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:56:03.597671Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9764, node 1 2024-11-21T08:56:03.610841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00386d/r3tmp/yandexA9yp65.tmp 2024-11-21T08:56:03.610856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00386d/r3tmp/yandexA9yp65.tmp 2024-11-21T08:56:03.610920Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00386d/r3tmp/yandexA9yp65.tmp 2024-11-21T08:56:03.610965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:56:03.615949Z INFO: TTestServer started on Port 23217 GrpcPort 9764 TClient is connected to server localhost:23217 PQClient connected to localhost:9764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:03.640865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:56:03.650408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:03.650433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:03.650473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:03.651637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:56:03.858221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653716844750978:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.858244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653716844750953:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.858312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.859068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:56:03.859464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653716844751011:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.859524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:03.860993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653716844750982:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:56:03.897771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:56:03.940560Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653716844751119:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:03.940650Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2RlZTUzM2YtYmY5ZjgzZTQtMmUwMmNhNTktZDM4NzcxYzA=, ActorId: [1:7439653716844750950:2304], ActorState: ExecuteState, TraceId: 01jd6yz20h2g93zess0q2mwpb6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:56:03.941165Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:56:03.958498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:56:04.020385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439653721139718622:2594] 2024-11-21T08:56:08.549624Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653716844750219:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:08.549664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:56:09.074956Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:56:09.078504Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:56:09.078867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439653742614555391:2751], Recipient [1:7439653716844750638:2200]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:09.078877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:09.078879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:56:09.078885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439653742614555387:2748], Recipient [1:7439653716844750638:2200]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2024-11-21T08:56:09.078887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:56:09.084409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:56:09.084506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:56:09.084570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:56:09.084588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:56:09.084597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T08:56:09.084605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T08:56:09.084616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 7 ... eIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.527668Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.527687Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.527693Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.527697Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.528646Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653932715597936:2742], Partition 1, Sender [0:0:0], Recipient [5:7439653932715598012:2751], Cookie: 0 2024-11-21T08:57:03.528656Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653932715598012:2751]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.528657Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.528660Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.528665Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.528666Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.528667Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.530695Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653954190435129:2913], Partition 4, Sender [0:0:0], Recipient [5:7439653954190435223:2924], Cookie: 0 2024-11-21T08:57:03.530702Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653954190435223:2924]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.530703Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.530706Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.530711Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.530713Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.530714Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.530726Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653954190435125:2912], Partition 3, Sender [0:0:0], Recipient [5:7439653954190435224:2925], Cookie: 0 2024-11-21T08:57:03.530735Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653954190435224:2925]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.530738Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.530745Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.530758Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.530760Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.530764Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.556328Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653911240760232:2427], Partition 0, Sender [0:0:0], Recipient [5:7439653911240760293:2431], Cookie: 0 2024-11-21T08:57:03.556348Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653911240760293:2431]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.556352Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.556363Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.556383Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.556387Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.556391Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.602920Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7439653885470955349:2137]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:03.602935Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:03.602950Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7439653885470955349:2137], Recipient [5:7439653885470955349:2137]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:03.602952Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:03.628014Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653932715597932:2741], Partition 2, Sender [0:0:0], Recipient [5:7439653932715598010:2749], Cookie: 0 2024-11-21T08:57:03.628035Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653932715598010:2749]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.628039Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.628050Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.628068Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.628070Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.628074Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.628959Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653932715597936:2742], Partition 1, Sender [0:0:0], Recipient [5:7439653932715598012:2751], Cookie: 0 2024-11-21T08:57:03.628966Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653932715598012:2751]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.628968Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.628972Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.628976Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.628977Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.628979Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.631046Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653954190435129:2913], Partition 4, Sender [0:0:0], Recipient [5:7439653954190435223:2924], Cookie: 0 2024-11-21T08:57:03.631053Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653954190435223:2924]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.631056Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.631058Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.631062Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.631063Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.631065Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.631078Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653954190435125:2912], Partition 3, Sender [0:0:0], Recipient [5:7439653954190435224:2925], Cookie: 0 2024-11-21T08:57:03.631094Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653954190435224:2925]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.631097Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.631110Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.631127Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.631133Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.631136Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:57:03.656688Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439653911240760232:2427], Partition 0, Sender [0:0:0], Recipient [5:7439653911240760293:2431], Cookie: 0 2024-11-21T08:57:03.656711Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439653911240760293:2431]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.656715Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:57:03.656729Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:57:03.656752Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:57:03.656759Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:57:03.656763Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: 2024-11-21T08:54:09.518256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:09.518289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:09.518297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00271c/r3tmp/tmpqmTnD7/pdisk_1.dat 2024-11-21T08:54:09.582359Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17436, node 1 2024-11-21T08:54:09.670877Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:09.670890Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:09.670893Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:09.670947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:09.674691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.748171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.748193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.759127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28631 2024-11-21T08:54:10.156112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:10.887172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:10.887194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:10.919485Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:10.920102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:10.972880Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:10.981978Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:10.982005Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:10.988129Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:10.988321Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:10.988345Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:10.988349Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:10.988353Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:10.988357Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:10.988361Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:10.988366Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:10.988475Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:11.160155Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:11.160177Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:11.161059Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:11.162519Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:11.162586Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:11.163048Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:11.166033Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:11.166043Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:11.166051Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:11.167187Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:11.167205Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:11.168125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:11.169217Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:11.169237Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:11.171204Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:11.182625Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:11.204111Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:11.321531Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:11.477311Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:12.193117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.193146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:12.195664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:12.216015Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:12.216052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:12.216075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:12.216091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:12.216102Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:12.216117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:12.216129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:12.216141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:12.216154Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:12.216166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:12.216180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:12.216192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:12.220303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:54:12.220323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:54:12.220336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:54:12.220341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:54:12.220354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:54:12.220358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:54:12.220366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranule ... StatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T08:56:54.267967Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T08:56:54.268004Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 227.000000s, at schemeshard: 72075186224037889 2024-11-21T08:56:54.268114Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 51 2024-11-21T08:56:54.279545Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:56:55.833586Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:55.833616Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T08:56:55.833622Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T08:56:55.833633Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T08:56:55.833638Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:55.833729Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:56:55.834723Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T08:56:55.835592Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7289:5513], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:55.835611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7300:5518], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:55.835629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:55.837799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T08:56:55.848597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7303:5521], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T08:56:56.034034Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7420:5583]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:56:56.034081Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:56:56.034092Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7422:5585] 2024-11-21T08:56:56.034102Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7422:5585] 2024-11-21T08:56:56.034186Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7423:5586] 2024-11-21T08:56:56.034210Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7422:5585], server id = [2:7423:5586], tablet id = 72075186224037897, status = OK 2024-11-21T08:56:56.034232Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7423:5586], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:56:56.034242Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T08:56:56.034264Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:56:56.034275Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7420:5583], StatRequests.size() = 1 2024-11-21T08:56:56.049370Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODAwMGZjYzUtNjQyNGIwMGQtNmJlODNjODAtNzFkYTIxMmU=, TxId: 2024-11-21T08:56:56.049394Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODAwMGZjYzUtNjQyNGIwMGQtNmJlODNjODAtNzFkYTIxMmU=, TxId: 2024-11-21T08:56:56.049512Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:56:56.060958Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T08:56:56.060980Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T08:56:56.164316Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:56:56.164353Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:56:56.247374Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7422:5585], schemeshard count = 1 2024-11-21T08:56:57.317929Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:56:57.317964Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:57.318707Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:57.330580Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:57.330734Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:57.330743Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T08:56:57.342058Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:56:58.787593Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:56:58.787630Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T08:56:58.787637Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T08:56:58.787646Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:56:58.787651Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:56:58.787960Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:56:58.799217Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T08:56:58.799254Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:56:58.799359Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:56:58.799374Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2024-11-21T08:56:58.799639Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:56:59.179657Z node 2 :STATISTICS ERROR: [72075186224037897] Delete long analyze operation, OperationId=operationId 2024-11-21T08:56:59.333384Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:56:59.333455Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:57:01.069527Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T08:57:01.069555Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:57:01.069558Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:57:01.069561Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T08:57:03.178666Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T08:57:03.178688Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 195.000000s, at schemeshard: 72075186224037889 2024-11-21T08:57:03.178734Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 51 2024-11-21T08:57:03.293163Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:57:03.293200Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:2674:3169] 2024-11-21T08:57:03.293209Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T08:57:03.293227Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T08:57:03.293358Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7690:5698], server id = [2:7691:5699], tablet id = 72075186224037899, status = OK 2024-11-21T08:57:03.293402Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7690:5698], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:57:03.293587Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:57:03.293595Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:57:03.293639Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7690:5698], server id = [2:7691:5699], tablet id = 72075186224037899 2024-11-21T08:57:03.293642Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:57:03.293652Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:57:03.293668Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:57:03.293707Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:57:03.294198Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:57:03.297210Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7722:5718]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:57:03.297243Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:57:03.297248Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7722:5718], StatRequests.size() = 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 17553, MsgBus: 61658 2024-11-21T08:56:45.142326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653897916405681:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:45.142481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0033d6/r3tmp/tmpnt2MRj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17553, node 1 2024-11-21T08:56:45.190130Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:45.193873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:45.193883Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:45.193885Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:45.193911Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61658 TClient is connected to server localhost:61658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:45.240150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:45.243778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:45.243805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:45.245747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:45.398006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897916406293:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.398026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.421184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:45.460434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897916407773:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.460458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.460490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653897916407778:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:45.461137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:56:45.462612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653897916407780:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:56:50.142709Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653897916405681:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:50.142786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 0.029959s took: 0.029950s took: 0.029971s took: 0.030655s took: 0.030687s took: 0.030671s took: 0.030742s took: 0.030715s took: 0.030782s took: 0.030923s took: 0.087002s took: 0.086994s took: 0.087041s took: 0.087103s took: 0.087344s took: 0.087331s took: 0.087451s took: 0.087500s took: 0.087501s took: 0.088120s took: 0.039049s took: 0.039090s took: 0.039059s took: 0.039088s took: 0.039119s took: 0.039110s took: 0.039208s took: 0.039310s took: 0.039386s took: 0.039380s took: 0.004402s took: 0.004724s took: 0.004745s took: 0.004798s took: 0.004862s took: 0.004882s took: 0.004870s took: 0.005406s took: 0.005790s took: 0.005802s took: 0.013412s took: 0.014981s took: 0.022596s took: 0.024038s took: 0.024578s took: 0.025819s took: 0.025918s took: 0.034422s took: 0.034411s took: 0.034646s 2024-11-21T08:56:55.861021Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2024-11-21T08:56:55.861611Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2024-11-21T08:56:55.861624Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2024-11-21T08:56:55.861625Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2024-11-21T08:56:55.861626Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2024-11-21T08:56:55.861627Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2024-11-21T08:56:55.861628Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2024-11-21T08:56:55.861629Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2024-11-21T08:56:55.861630Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2024-11-21T08:56:55.861631Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2024-11-21T08:56:55.861632Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2024-11-21T08:56:55.861634Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2024-11-21T08:56:55.861635Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2024-11-21T08:56:55.861636Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2024-11-21T08:56:55.861637Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2024-11-21T08:56:55.861638Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2024-11-21T08:56:55.861639Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T08:56:55.861640Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2024-11-21T08:56:55.861641Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2024-11-21T08:56:55.861642Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2024-11-21T08:56:55.861643Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2024-11-21T08:56:55.861644Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2024-11-21T08:56:55.861645Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2024-11-21T08:56:55.861646Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T08:56:55.861647Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2024-11-21T08:56:55.861648Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2024-11-21T08:56:55.861650Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2024-11-21T08:56:55.861651Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2024-11-21T08:56:55.861677Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2024-11-21T08:56:55.861678Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2024-11-21T08:56:55.862999Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2024-11-21T08:56:55.863696Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2024-11-21T08:56:55.863707Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T08:56:55.863868Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2024-11-21T08:56:55.863873Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2024-11-21T08:56:55.863875Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T08:56:55.863876Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2024-11-21T08:56:55.863877Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2024-11-21T08:56:55.863878Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2024-11-21T08:56:55.863879Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 64852, MsgBus: 7433 2024-11-21T08:56:51.905197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653924748374444:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:51.905232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c4/r3tmp/tmprCJtlU/pdisk_1.dat 2024-11-21T08:56:52.208509Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64852, node 1 2024-11-21T08:56:52.220402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:52.220430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:52.221612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:52.224293Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.224353Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.469687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:52.469701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:52.469703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:52.469752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7433 TClient is connected to server localhost:7433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:52.870053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:53.000692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.141952Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2024-11-21T08:56:53.209175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.217615Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2024-11-21T08:56:53.226836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.283453Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2024-11-21T08:56:53.290474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.303505Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2024-11-21T08:56:53.310911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.324323Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2024-11-21T08:56:53.331474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.345531Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2024-11-21T08:56:53.352822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.366512Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2024-11-21T08:56:53.373161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.388237Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2024-11-21T08:56:53.395307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.409826Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T08:56:53.416171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.473695Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T08:56:53.486696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.499569Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T08:56:53.506927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.520658Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T08:56:53.528219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.541689Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T08:56:53.549953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.607026Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T08:56:53.619452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.632786Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T08:56:53.640244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.653103Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T08:56:53.661971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.719822Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T08:56:53.731275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710726:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.744456Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T08:56:53.751541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710730:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.767130Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T08:56:53.780620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboper ... raceId: 01jd6z0taj29hvpv6xe7dsn0zd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:57:01.525566Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:01.526694Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037889 not found 2024-11-21T08:57:01.532334Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715665:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18165, MsgBus: 1987 2024-11-21T08:57:01.638301Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653967197666980:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:01.638437Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c4/r3tmp/tmprJEDmS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18165, node 7 2024-11-21T08:57:01.651684Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:01.659482Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:01.659508Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:01.659511Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:01.659559Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1987 TClient is connected to server localhost:1987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:01.738613Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:01.738643Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:01.739680Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:01.740422Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:01.885283Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653967197667579:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:01.885334Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:01.886379Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:01.894130Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653967197667681:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:01.894155Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:01.894158Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653967197667686:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:01.894796Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:01.903159Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439653967197667688:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 8959, MsgBus: 21188 2024-11-21T08:57:02.104822Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439653970115517461:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:02.104881Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c4/r3tmp/tmpU4b4dM/pdisk_1.dat 2024-11-21T08:57:02.111998Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8959, node 8 2024-11-21T08:57:02.126889Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:02.126904Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:02.126906Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:02.126941Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21188 TClient is connected to server localhost:21188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:02.205046Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:02.205075Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:02.206158Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:02.206827Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:02.327897Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439653970115518044:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.327918Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.327950Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439653970115518071:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.328482Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:57:02.329987Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439653970115518073:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:57:02.412169Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:02.419634Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439653970115518239:2318], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Table name conflict: db.[/Root/test] is used to reference multiple tables. 2024-11-21T08:57:02.419693Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=NmY5N2RiNDctNDIwNzAwM2QtM2EwN2FmYTUtYWI0MmU1MA==, ActorId: [8:7439653970115518232:2314], ActorState: ExecuteState, TraceId: 01jd6z0v6hdevt90vmqz0bsxyh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:57:02.435904Z node 8 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 8, TabletId: 72075186224037888 not found 2024-11-21T08:57:02.436779Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439653970115518344:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:57:02.436840Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=NjAwOGJmYTItZDQxYzBlZmUtZjIzZGUzNWItODgzM2Y3Y2I=, ActorId: [8:7439653970115518338:2343], ActorState: ExecuteState, TraceId: 01jd6z0v734zh5r6gkf46a2vzc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:229:2060] recipient: [1:212:2140] 2024-11-21T08:56:04.577805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:04.577822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:04.577826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:04.577829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:04.577832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:04.577835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:04.577840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:04.577895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:04.585339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:04.585355Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:04.587335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:04.587357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:04.587376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:04.588865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:04.588916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:04.589009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:04.589065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:04.589572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:04.589783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:04.589790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:04.589814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:04.589819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:04.589823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:04.589836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.590786Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:339:2060] recipient: [1:17:2064] 2024-11-21T08:56:04.601643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:04.601711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.601769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:04.601804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:04.601809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.602413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:04.602432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:04.602475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.602482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:04.602485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:04.602488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:04.602776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.602783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:04.602787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:04.603029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.603035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.603039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:04.603045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:04.603411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:04.603676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:04.603713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:04.603838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:04.603854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:04.603861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:04.603894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:04.603898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:04.603917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:04.603925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:04.604225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:04.604234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:04.604266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:04.604269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:306:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:04.604344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:04.604348Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:04.604357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:04.604359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:04.604364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:04.604367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:04.604370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:04.604372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:04.604381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:04.604385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:04.604387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:56:04.604573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:04.604581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:04.604585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:56:04.604588Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:56:04.604591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:04.604601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... chemeshard: 72057594046678944, txId: 106 2024-11-21T08:56:55.341564Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2024-11-21T08:56:55.341568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:56:55.341574Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2024-11-21T08:56:55.341576Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:55.341972Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:55.342050Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T08:56:55.342054Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T08:56:55.342250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T08:56:55.342254Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T08:56:55.342299Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T08:56:55.342303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T08:56:55.342346Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:666:2497], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:55.342350Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:55.342353Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T08:56:55.342369Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2024-11-21T08:56:55.342372Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T08:56:55.342380Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T08:56:55.342391Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T08:56:55.342394Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:664:2495] 2024-11-21T08:56:55.342407Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:666:2497], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:55.342410Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:55.342412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2024-11-21T08:56:55.342463Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:542:2100], Recipient [7:229:2151] 2024-11-21T08:56:55.342466Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:56:55.342891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 542 RawX2: 34359740468 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:55.342924Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T08:56:55.342939Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:133, at schemeshard: 72057594046678944 2024-11-21T08:56:55.342970Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:55.343287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:133" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:55.343309Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:133, operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2024-11-21T08:56:55.343313Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2024-11-21T08:56:55.343360Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T08:56:55.343363Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T08:56:55.343395Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:672:2503], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:55.343398Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:55.343401Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T08:56:55.343411Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2024-11-21T08:56:55.343414Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T08:56:55.343419Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T08:56:55.343429Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T08:56:55.343432Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:670:2501] 2024-11-21T08:56:55.343444Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:672:2503], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:55.343446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:55.343448Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T08:56:55.343485Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:542:2100], Recipient [7:229:2151] 2024-11-21T08:56:55.343487Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:56:55.343811Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 542 RawX2: 34359740468 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:55.343831Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T08:56:55.343835Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2024-11-21T08:56:55.343854Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T08:56:55.344142Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:55.344160Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2024-11-21T08:56:55.344165Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T08:56:55.344234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T08:56:55.344239Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T08:56:55.344281Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:678:2509], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:55.344286Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:55.344289Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T08:56:55.344305Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2024-11-21T08:56:55.344308Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T08:56:55.344315Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T08:56:55.344330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T08:56:55.344334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:676:2507] 2024-11-21T08:56:55.344350Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:678:2509], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:55.344355Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:56:55.344358Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckFileStoreSSDLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:57:02.594997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:02.595038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:02.595044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:02.595049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:02.595074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:02.595078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:02.595087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:02.595195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:02.634918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:02.634940Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:02.637218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:02.637742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:02.637770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:02.646606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:02.646813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:02.662398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:02.669236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:02.678982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:02.700697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:02.700730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:02.700778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:02.700793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:02.700800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:02.700819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:57:02.702711Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:57:02.730869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:02.738729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:02.738834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:02.738882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:02.738893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:02.739766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:02.739797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:02.739839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:02.739846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:02.739849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:02.739852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:02.740154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:02.740169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:02.740172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:02.740485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:02.740492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:02.740496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:02.740500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:02.741036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:02.741404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:02.750100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:02.750434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:02.750487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:02.750507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:02.750602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:02.750610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:02.750646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:02.750660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:02.751397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:02.751407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:02.751451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:02.751456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:57:02.751544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:02.751552Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:02.751564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:02.751569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:02.751575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:02.751579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:02.751584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:02.751588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:02.751600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:02.751606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:02.751611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:57:02.751969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:02.751990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:02.751995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:57:02.752000Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:57:02.752006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:02.752020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T08:57:03.209547Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 5 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T08:57:03.209594Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type FileStore, boot OK, tablet id 72075186233409549 2024-11-21T08:57:03.209606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T08:57:03.209609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 110, shardIdx: 72057594046678944:4, partId: 0 2024-11-21T08:57:03.209618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T08:57:03.209622Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T08:57:03.209625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T08:57:03.209635Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 2 -> 3 2024-11-21T08:57:03.209789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T08:57:03.210008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T08:57:03.210270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T08:57:03.210287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T08:57:03.210291Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#110:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:03.210696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275054593 2024-11-21T08:57:03.210715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 0, tablet: 72075186233409549 2024-11-21T08:57:03.211328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2024-11-21T08:57:03.211349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: TxId: 110 Origin: 72075186233409549 Status: OK 2024-11-21T08:57:03.211354Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#110:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T08:57:03.211358Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 3 -> 128 2024-11-21T08:57:03.211837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T08:57:03.211954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T08:57:03.211959Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#110:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:03.211964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 110 ready parts: 1/1 2024-11-21T08:57:03.211982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 110 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:03.212241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:110 msg type: 269090816 2024-11-21T08:57:03.212258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 110 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 110 at step: 5000009 2024-11-21T08:57:03.212301Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:03.212313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 110 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:03.212318Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#110:0 HandleReply TEvOperationPlan, step: 5000009, at schemeshard: 72057594046678944 2024-11-21T08:57:03.212328Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 128 -> 240 2024-11-21T08:57:03.212346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:57:03.212352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: Erasing txId 110 2024-11-21T08:57:03.212759Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:03.212766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:03.212788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T08:57:03.212800Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:03.212803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 110, path id: 1 2024-11-21T08:57:03.212808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 110, path id: 5 2024-11-21T08:57:03.212853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T08:57:03.212857Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 110:0 ProgressState 2024-11-21T08:57:03.212866Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#110:0 progress is 1/1 2024-11-21T08:57:03.212869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2024-11-21T08:57:03.212873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 110, ready parts: 1/1, is published: false 2024-11-21T08:57:03.212876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2024-11-21T08:57:03.212879Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 110:0 2024-11-21T08:57:03.212881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 110:0 2024-11-21T08:57:03.212897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:57:03.212901Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 110, publications: 2, subscribers: 0 2024-11-21T08:57:03.212903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2024-11-21T08:57:03.212905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2024-11-21T08:57:03.212977Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T08:57:03.212983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T08:57:03.212986Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 110 2024-11-21T08:57:03.212988Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2024-11-21T08:57:03.212991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:57:03.213061Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T08:57:03.213066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T08:57:03.213068Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 110 2024-11-21T08:57:03.213071Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T08:57:03.213073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:57:03.213077Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 110, subscribers: 0 2024-11-21T08:57:03.213631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T08:57:03.213667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2024-11-21T08:57:03.213708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2024-11-21T08:57:03.213712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2024-11-21T08:57:03.213756Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2024-11-21T08:57:03.213768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2024-11-21T08:57:03.213771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:669:2620] TestWaitNotification: OK eventTxId 110 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 19038, MsgBus: 29819 2024-11-21T08:56:51.905216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653923386571909:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:51.905243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c9/r3tmp/tmpfoD2ct/pdisk_1.dat 2024-11-21T08:56:52.208527Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19038, node 1 2024-11-21T08:56:52.220406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:52.220429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:52.221362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:52.223858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:56:52.224170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439653927681539500:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:56:52.224567Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.224573Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.469649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:52.469662Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:52.469664Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:52.469718Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29819 TClient is connected to server localhost:29819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:52.870162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 16 2024-11-21T08:56:53.000872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgbool; DECLARE $key1 as pgbool; DECLARE $value0 as pgbool; DECLARE $value1 as pgbool; INSERT INTO `Pg16_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:53.139151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653931976507005:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.139173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653931976506994:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.139194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.139848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:56:53.141019Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:53.141407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653931976507008:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } f f t t 2024-11-21T08:56:53.909517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgbool; DECLARE $value1 as _pgbool; INSERT INTO `Pg1000_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:53.915200Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {t,t} 18 2024-11-21T08:56:53.961159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgchar; DECLARE $key1 as pgchar; DECLARE $value0 as pgchar; DECLARE $value1 as pgchar; INSERT INTO `Pg18_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:53.968113Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2024-11-21T08:56:54.010853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgchar; DECLARE $value1 as _pgchar; INSERT INTO `Pg1002_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:54.066623Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 21 2024-11-21T08:56:54.106130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as pgint2; DECLARE $value1 as pgint2; INSERT INTO `Pg21_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:54.112989Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2024-11-21T08:56:54.152884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint2; DECLARE $value1 as _pgint2; INSERT INTO `Pg1005_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:54.208440Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 23 2024-11-21T08:56:54.249240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint4; DECLARE $key1 as pgint4; DECLARE $value0 as pgint4; DECLARE $value1 as pgint4; INSERT INTO `Pg23_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:54.256159Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2024-11-21T08:56:54.296984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint4; DECLARE $value1 as _pgint4; INSERT INTO `Pg1007_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:54.303705Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 20 2024-11-21T08:56:54.352504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint8; DECLARE $key1 as pgint8; DECLARE $value0 as pgint8; DECLARE $value1 as pgint8; INSERT INTO `Pg20_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:54.359192Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2024-11-21T08:56:54.396518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint8; DECLARE $value1 as _pgint8; INSERT INTO `Pg1016_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:54.402886Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 700 2024-11-21T08:56:54.443591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgfloat4; DECLARE $key1 as pgfloat4; DECLARE $value0 as pgfloat4; DECLARE $value1 as pgfloat4; INSERT INTO `Pg700_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T08:56:54.449798Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2024-11-21T08:56:54.492125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... oadService] [TPoolFetcherActor] ActorId: [8:7439653972450467642:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.018106Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.018651Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:57:02.019877Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439653972450467652:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 11628, MsgBus: 4803 2024-11-21T08:57:02.236370Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439653969318920936:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:02.236384Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c9/r3tmp/tmpqfyEYn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11628, node 9 2024-11-21T08:57:02.250455Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:02.256452Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:02.256462Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:02.256463Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:02.256491Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4803 TClient is connected to server localhost:4803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:02.336918Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:02.336944Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:02.337959Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:02.338126Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:02.448320Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439653969318921534:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.448335Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439653969318921545:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.448339Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.448853Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:57:02.450103Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7439653969318921548:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 19030, MsgBus: 24109 2024-11-21T08:57:02.701125Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439653969307408664:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:02.701154Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c9/r3tmp/tmpWorxfL/pdisk_1.dat 2024-11-21T08:57:02.711772Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19030, node 10 2024-11-21T08:57:02.722005Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:02.722017Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:02.722018Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:02.722046Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24109 TClient is connected to server localhost:24109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:02.801298Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:02.801322Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:02.802397Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:02.803495Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:02.919209Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7439653969307409269:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.919227Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7439653969307409262:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.919273Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:02.919690Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:57:02.920911Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439653969307409276:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:57:03.001281Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:03.006528Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:03.198600Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2024-11-21T08:57:03.199908Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:03.268018Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7439653973602377125:2401], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=10&id=ZWUyZjE1MGQtNjBiMDdhM2EtYjBmNzA5OTYtMmZjNzBlMjc=. TraceId : 01jd6z0w0p4638xkv16ds8y7t5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2024-11-21T08:57:03.268125Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7439653973602377126:2402], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=10&id=ZWUyZjE1MGQtNjBiMDdhM2EtYjBmNzA5OTYtMmZjNzBlMjc=. TraceId : 01jd6z0w0p4638xkv16ds8y7t5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [10:7439653973602377122:2398], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T08:57:03.268410Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZWUyZjE1MGQtNjBiMDdhM2EtYjBmNzA5OTYtMmZjNzBlMjc=, ActorId: [10:7439653973602377116:2398], ActorState: ExecuteState, TraceId: 01jd6z0w0p4638xkv16ds8y7t5, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::DropTableIfExists_GenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 3460, MsgBus: 2302 2024-11-21T08:56:51.905198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653923268925145:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:51.905212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c7/r3tmp/tmpiy1QWd/pdisk_1.dat 2024-11-21T08:56:52.208516Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3460, node 1 2024-11-21T08:56:52.224314Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.224347Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.249311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:52.249350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:52.250363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:52.469670Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:52.469683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:52.469685Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:52.469727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2302 TClient is connected to server localhost:2302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:52.870154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:52.990085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653927563892850:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.990086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653927563892859:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.990104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.998041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:52.999304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653927563892864:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } Trying to start YDB, gRPC: 19802, MsgBus: 10152 2024-11-21T08:56:53.759868Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653933224732716:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:53.759916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c7/r3tmp/tmpfzeraj/pdisk_1.dat 2024-11-21T08:56:53.770967Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19802, node 2 2024-11-21T08:56:53.786729Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:53.786745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:53.786748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:53.786787Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10152 TClient is connected to server localhost:10152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:53.859987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:53.860013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:53.861175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:53.863363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:53.992350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653933224733315:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.992366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653933224733323:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.992371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.992969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:53.994472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653933224733329:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 2802, MsgBus: 8558 2024-11-21T08:56:54.250517Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653936844486171:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c7/r3tmp/tmpWGc5dk/pdisk_1.dat 2024-11-21T08:56:54.253178Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 2802, node 3 2024-11-21T08:56:54.263681Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:54.273693Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:54.273705Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:54.273707Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:54.273737Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8558 TClient is connected to server localhost:8558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:54.350444Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:54.350471Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:54.351567Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:54.352802Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:54.359856Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperat ... _exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653945200111854:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:56.765701Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c7/r3tmp/tmp4ymPtA/pdisk_1.dat 2024-11-21T08:56:56.775818Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7347, node 7 2024-11-21T08:56:56.781461Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:56.781472Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:56.781473Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:56.781511Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4650 TClient is connected to server localhost:4650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:56.865884Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:56.865909Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:56.866984Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:56.868166Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:57.027442Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653949495079748:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.027467Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.029936Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653949495079776:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.029954Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.031757Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:56:57.038644Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653949495079865:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.038660Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.038674Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653949495079870:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.039139Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:56:57.045568Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439653949495079872:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:56:57.122530Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2024-11-21T08:56:57.123884Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7439653949495080020:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:57.123979Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ODY0MGUwMmEtNjg2NWQxNDgtYjI0NWQzOGItOTM0OTJmNzU=, ActorId: [7:7439653949495080017:2342], ActorState: ExecuteState, TraceId: 01jd6z0p122ee6r3bcf2erhmk0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 7958, MsgBus: 21167 2024-11-21T08:56:57.256141Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439653948337592546:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:57.256462Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c7/r3tmp/tmpld5Lmu/pdisk_1.dat 2024-11-21T08:56:57.267510Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7958, node 8 2024-11-21T08:56:57.276539Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:57.276558Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:57.276560Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:57.276598Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21167 TClient is connected to server localhost:21167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:57.356436Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:57.356469Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:57.357566Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:57.358748Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:57.508110Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439653948337593128:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.508132Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.508190Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439653948337593155:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.508995Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:57.510925Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439653948337593157:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:56:57.576019Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:56:57.595023Z node 8 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 8, TabletId: 72075186224037888 not found 2024-11-21T08:56:57.595669Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439653948337593406:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:56:57.595720Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=YjdlY2JkMGUtYjUzZjNjNzItNGRiMDJjZjYtOTZkMTc0YjY=, ActorId: [8:7439653948337593403:2336], ActorState: ExecuteState, TraceId: 01jd6z0pft5nt5bhac419xy33z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropTableAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 28763, MsgBus: 29217 2024-11-21T08:56:14.898097Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653765913982824:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:14.898348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040f1/r3tmp/tmpJFPdrc/pdisk_1.dat 2024-11-21T08:56:14.948863Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28763, node 1 2024-11-21T08:56:14.965601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:14.965616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:14.965618Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:14.965650Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29217 2024-11-21T08:56:14.998057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:14.998082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:29217 2024-11-21T08:56:14.999113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:15.010080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =16); 2024-11-21T08:56:15.206909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653770208950717:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.206951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:15.234455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:15.246644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:56:15.246684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:56:15.246690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:56:15.246713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:56:15.246725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:56:15.246725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:56:15.246736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:56:15.246754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:56:15.246765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:56:15.246771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:56:15.246784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:56:15.246802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:56:15.246804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:56:15.246815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:56:15.246837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:56:15.246840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:56:15.246873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439653770208950889:2311];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:56:15.246882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:56:15.246913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:56:15.246932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:56:15.246950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:56:15.246965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:56:15.246981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:56:15.246996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439653770208950885:2307];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:56:15.247247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:56:15.247260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:56:15.247269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:56:15.247281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:56:15.247295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:56:15.247306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:56:15.247313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:56:15.247322Z node 1 :TX_COLUMNSHARD WARN: t ... escription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:57.865289Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:57.865318Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:57.866417Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:57.866994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Uint64 NOT NULL, int_column Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:56:58.013755Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653954270834676:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:58.013818Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:58.014908Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:58.020366Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:56:58.020392Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:56:58.020425Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:56:58.020443Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:56:58.020459Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:56:58.020477Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:56:58.020495Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:56:58.020513Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:56:58.020531Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:56:58.020547Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:56:58.020564Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:56:58.020581Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439653954270834722:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:56:58.020947Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:56:58.020956Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:56:58.020964Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:56:58.020967Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:56:58.020976Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:56:58.020979Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:56:58.020984Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:56:58.020987Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:56:58.020992Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:56:58.021003Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:56:58.021009Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:56:58.021011Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:56:58.021045Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:56:58.021054Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:56:58.021065Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:56:58.021071Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:56:58.021077Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:56:58.021083Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:56:58.021093Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:56:58.021099Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:56:58.021105Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:56:58.021111Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:56:58.022115Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:56:58.069401Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1392;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1392;columns=2; 2024-11-21T08:56:58.082285Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439653954270834813:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:58.082310Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:58.084371Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:56:58.087418Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:56:58.089083Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037888 not found WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T08:57:02.765498Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439653949975866783:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:02.765535Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::NoSelectFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 29668, MsgBus: 19179 2024-11-21T08:56:51.905192Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653922514882035:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:51.905216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044af/r3tmp/tmpyEn0qr/pdisk_1.dat 2024-11-21T08:56:52.208538Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29668, node 1 2024-11-21T08:56:52.221470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:52.221515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:52.222473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:52.224278Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.224331Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.469690Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:52.469707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:52.469710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:52.469758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19179 TClient is connected to server localhost:19179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:52.870052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:52.990080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653926809849750:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.990087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653926809849739:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.990120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.998047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:52.999641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653926809849753:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } Trying to start YDB, gRPC: 24504, MsgBus: 25699 2024-11-21T08:56:53.769162Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653930048457577:2056];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044af/r3tmp/tmpTJunM5/pdisk_1.dat 2024-11-21T08:56:53.772833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 24504, node 2 2024-11-21T08:56:53.784719Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:53.792653Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:53.792666Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:53.792667Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:53.792709Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25699 TClient is connected to server localhost:25699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:53.869049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:53.869070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:53.870201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:53.871353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 16 --!syntax_pg CREATE TABLE Pg16 ( key bool PRIMARY KEY, value bool ); 2024-11-21T08:56:54.035001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653934343425464:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:54.035022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:54.037079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg16 (key, value) VALUES ( 'false'::bool, 'false'::bool ) 2024-11-21T08:56:54.096490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653934343425567:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:54.096501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653934343425572:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:54.096515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:54.097084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:56:54.105552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653934343425574:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } --!syntax_pg INSERT INTO Pg16 (key, value) VALUES ( 'true'::bool, 'true'::bool ) f f t t --!syntax_pg CREATE TABLE Pg16array ( key int2 PRIMARY KEY, value _bool ); 2024-11-21T08:56:54.251229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg16array (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); --!syntax_pg INSERT INTO Pg16array (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 21 --!syntax_pg CREATE TABLE Pg21 ( key int2 PRIMARY KEY, value int2 ); 2024-11-21T08:56:54.329665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '0'::int2, '0'::int2 ) --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '1'::int2, '1'::int2 ) --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '2'::int2, '2'::int2 ) 0 0 1 1 2 2 --!syntax_pg CREATE TABLE Pg21array ( key int2 PRIMARY KEY, value _int2 ); 2024-11-21T08:56:54.423290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg21array (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg21array (k ... st_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044af/r3tmp/tmpfMFHWa/pdisk_1.dat 2024-11-21T08:57:04.149122Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22236, node 6 2024-11-21T08:57:04.162340Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:04.162350Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:04.162351Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:04.162380Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5851 TClient is connected to server localhost:5851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:04.243203Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:04.243230Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:04.244264Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:04.244937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:04.364791Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653977145658749:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:04.364806Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653977145658730:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:04.364822Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:04.365223Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:57:04.366283Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439653977145658759:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:57:04.461022Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:04.535297Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YmE4ZTgyZmMtZTY3ODNlMDItM2E2MGI0YmYtNjM1Y2MwNTU=, ActorId: [6:7439653977145658918:2314], ActorState: ExecuteState, TraceId: 01jd6z0x6j8ba672v7ww435xar, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:913: ydb/core/kqp/query_data/kqp_query_data.cpp:517: Terminate was called, reason(56): ERROR: invalid byte sequence for encoding "UTF8": 0x00 Trying to start YDB, gRPC: 1351, MsgBus: 14078 2024-11-21T08:57:04.807870Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653980670472668:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:04.807907Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044af/r3tmp/tmpwi0GdN/pdisk_1.dat 2024-11-21T08:57:04.818627Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1351, node 7 2024-11-21T08:57:04.826817Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:04.826827Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:04.826829Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:04.826858Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14078 TClient is connected to server localhost:14078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:04.908298Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:04.908324Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:04.909318Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:04.909450Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:05.024525Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653984965440544:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:05.024542Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653984965440571:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:05.024546Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:05.025013Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:57:05.026168Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439653984965440573:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:57:05.120225Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["aid (4, 3)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["aid (4, 3)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 5169, MsgBus: 29806 2024-11-21T08:56:51.905200Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653921429757420:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:51.905231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e8/r3tmp/tmpKF7hcO/pdisk_1.dat 2024-11-21T08:56:52.208508Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5169, node 1 2024-11-21T08:56:52.222577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:52.222612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:52.223777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:52.224384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:56:52.224422Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.224428Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.224668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439653925724725037:2285], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:56:52.469648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:52.469664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:52.469666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:52.469727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29806 TClient is connected to server localhost:29806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:52.870054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 16 2024-11-21T08:56:53.000774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.141957Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:53.142679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.149124Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:53.170761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653930019692596:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.170761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653930019692588:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.170773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.171336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:56:53.172809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653930019692602:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } f f t t 18 2024-11-21T08:56:53.686953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.695783Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:53.696527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.708622Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2024-11-21T08:56:53.762759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.771505Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:53.772220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.778884Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2024-11-21T08:56:53.825659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.833324Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:53.834017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.840480Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2024-11-21T08:56:53.882869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.939717Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:53.940454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.947153Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2024-11-21T08:56:53.995793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.003946Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:54.004837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.016191Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2024-11-21T08:56:54.064231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.072386Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:54.073201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 25 2024-11-21T08:56:54.128449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.136476Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:54.137292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710724:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.149626Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill text 0 text 0 text 1 text 1 text 2 text 2 text 3 text 3 text 4 text 4 text 5 text 5 text 6 text 6 text 7 text 7 text 8 text 8 text 9 text 9 1042 2024-11-21T08:56:54.201016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710731:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.209749Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:54.210451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710733:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.219283Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill bpchar 0 bpchar 0 bpchar 1 bpchar 1 bpchar 2 bpchar 2 bpchar 3 bpchar 3 bpchar 4 bpchar 4 bpchar 5 bpchar 5 bpchar 6 bpchar 6 bpchar 7 bpchar 7 bpchar 8 bpchar 8 bpchar 9 bpchar 9 1043 2024-11-21T08:56:54.267189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710740:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.276343Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:56:54.277139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710742:0, at ... rd: 72057594046644480 2024-11-21T08:57:00.049543Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:00.141507Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439653960945444742:2346], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2024-11-21T08:57:00.141595Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MWI5NDIyZTktODBjY2Q1ZGQtZGJmZjY3Mi1lOWY2YjlhZA==, ActorId: [6:7439653960945444740:2345], ActorState: ExecuteState, TraceId: 01jd6z0rzadaytqg5pj245eqnh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:57:00.159833Z node 6 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2024-11-21T08:57:00.161517Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439653960945444753:2351], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
: Error: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" , code: 2029 2024-11-21T08:57:00.161628Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZTBmMzJmZDItMzVhYjY1ODMtMjc3NzJiZjUtMjIxMTUzZDA=, ActorId: [6:7439653960945444751:2350], ActorState: ExecuteState, TraceId: 01jd6z0rze3yvtmgjff1q219a1, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2024-11-21T08:57:00.164924Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439653960945444765:2357], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/nopg] 2024-11-21T08:57:00.165316Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ODg4Y2M1NmEtOGNiODI1OWItYzU5ZTAxNWUtNmZlYmZlNzg=, ActorId: [6:7439653960945444763:2356], ActorState: ExecuteState, TraceId: 01jd6z0s028ppd6sqwrptf82qn, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 15115, MsgBus: 7064 2024-11-21T08:57:00.396604Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653962872031423:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:00.396623Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e8/r3tmp/tmp43xMsC/pdisk_1.dat 2024-11-21T08:57:00.406908Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15115, node 7 2024-11-21T08:57:00.418132Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:00.418141Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:00.418143Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:00.418172Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7064 TClient is connected to server localhost:7064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:00.497018Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:00.497048Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:00.498063Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:00.498739Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:00.629076Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653962872032004:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:00.629093Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653962872032030:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:00.629099Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:00.629666Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:57:00.630837Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439653962872032033:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:57:00.699632Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:00.733795Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:01.123532Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:295:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:01.123583Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:01.123599Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044e8/r3tmp/tmpRVCSAj/pdisk_1.dat 2024-11-21T08:57:01.209730Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:57:01.223375Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:01.267008Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:01.267049Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:01.277697Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:01.382395Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:609:2518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:01.382425Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:618:2523], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:01.382435Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:01.383279Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T08:57:01.518158Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:623:2526], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } PreparedQuery: "da8de54e-1a4312cf-6de54955-def00b6b" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"b7e0f25e-d9eb38e6-9afeb890-a06a84b6\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull [GOOD] Test command err: Trying to start YDB, gRPC: 17870, MsgBus: 14001 2024-11-21T08:56:51.905194Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653922800260920:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:51.905216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b3/r3tmp/tmprDojgv/pdisk_1.dat 2024-11-21T08:56:52.208515Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17870, node 1 2024-11-21T08:56:52.220421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:52.220447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:52.221593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:52.224314Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.224341Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:56:52.469648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:52.469669Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:52.469671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:52.469718Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14001 TClient is connected to server localhost:14001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:52.870071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:52.971540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653927095228621:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.971547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653927095228632:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.971560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:52.998092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:52.999728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653927095228635:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T08:56:53.338376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15651, MsgBus: 13610 2024-11-21T08:56:53.949527Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653933849581391:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:53.949682Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b3/r3tmp/tmpDqzlE3/pdisk_1.dat 2024-11-21T08:56:53.956014Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15651, node 2 2024-11-21T08:56:53.970122Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:53.970135Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:53.970138Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:53.970173Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13610 TClient is connected to server localhost:13610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:54.049863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:54.049894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:54.050958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:54.051171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:54.187970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653938144549283:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:54.187984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653938144549291:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:54.187989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:54.188492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:54.189633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439653938144549297:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:56:54.269050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21143, MsgBus: 6583 2024-11-21T08:56:54.640056Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653934133719784:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:54.640394Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b3/r3tmp/tmp9XPMPT/pdisk_1.dat 2024-11-21T08:56:54.651145Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21143, node 3 2024-11-21T08:56:54.669246Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:54.669265Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:54.669268Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:54.669312Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6583 TClient is connected to server localhost:6583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:54.740600Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:54.740634Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:54.741677Z node 3 :HIVE WA ... ol default not found or you don't have access permissions } 2024-11-21T08:56:56.087620Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:56.088193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:56.089496Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439653943891400543:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:56:56.192519Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19936, MsgBus: 11678 2024-11-21T08:56:56.486404Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653943079382921:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:56.486423Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b3/r3tmp/tmpQK4nFx/pdisk_1.dat 2024-11-21T08:56:56.496906Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19936, node 6 2024-11-21T08:56:56.507644Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:56.507654Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:56.507655Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:56.507685Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11678 TClient is connected to server localhost:11678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:56.586821Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:56.586849Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:56.587918Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:56.588549Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:56.706743Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653943079383522:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:56.706757Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439653943079383503:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:56.706762Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:56.707348Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:56.709046Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439653943079383532:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:56:56.952874Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439653943079383600:2306], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: invalid input syntax for type integer: "text" 2024-11-21T08:56:56.952976Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=OGRlNTNmMzUtNTgxNDc1NDYtODhiYTAyZjYtNWIxMTE3ZTk=, ActorId: [6:7439653943079383501:2297], ActorState: ExecuteState, TraceId: 01jd6z0ngfav6nb22vxmawgzgw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 13540, MsgBus: 26193 2024-11-21T08:56:57.165900Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653951086903240:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:57.166089Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b3/r3tmp/tmpM2kIXx/pdisk_1.dat 2024-11-21T08:56:57.176369Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13540, node 7 2024-11-21T08:56:57.192328Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:57.192345Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:57.192347Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:57.192383Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26193 TClient is connected to server localhost:26193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:57.266276Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:57.266318Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:57.267381Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:57.269085Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:57.455474Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653951086903843:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.455508Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439653951086903824:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.455554Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:57.456063Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:56:57.457298Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439653951086903853:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:56:57.514712Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:56:57.526485Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:56:57.538473Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7439653951086904134:2328], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2024-11-21T08:56:57.538559Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=M2FiMmMyZS00NjFiNTM4Ny1mMzI3YTZjYi1mN2ZjMTQ3, ActorId: [7:7439653951086904132:2327], ActorState: ExecuteState, TraceId: 01jd6z0pe0cdbraeapev6hemcj, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |90.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |90.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig |90.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |90.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |90.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |90.7%| [LD] {RESULT} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> KqpScheme::FamilyColumnTest >> KqpConstraints::DropCreateSerial >> KqpScheme::CreateAndAlterTableWithPartitionBy >> KqpScheme::InvalidationAfterDropCreate |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::AlterIndexImplTable >> KqpConstraints::AddSerialColumnForbidden >> KqpScheme::CreateTableWithCompactionPolicyUncompat >> KqpConstraints::CreateTableSerialTypeForbidden >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat >> KqpScheme::DropChangefeedNegative >> KqpScheme::DropNonExistingExternalDataSource >> KqpScheme::AlterTableWithDecimalColumn |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat >> KqpScheme::CreateAndAlterTableWithPartitionBy [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat >> KqpConstraints::DropCreateSerial [GOOD] >> KqpConstraints::DefaultsAndDeleteAndUpdate >> KqpScheme::FamilyColumnTest [GOOD] >> KqpScheme::Int8Int16 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] Test command err: 2024-11-21T08:56:26.800747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:26.800772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:26.800776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:26.800779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:26.800790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:26.800793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:26.800800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:26.800869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:26.803214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:26.803229Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:26.804679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:26.804768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:26.804785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T08:56:26.805693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:26.805725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:26.805786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:26.805904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:26.806416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:26.806680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:26.806687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:26.806704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:26.806708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:26.806712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:26.806746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T08:56:26.847430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T08:56:26.847504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:26.847563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T08:56:26.847595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T08:56:26.847601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:26.848421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:26.848446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T08:56:26.848486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:26.848494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T08:56:26.848497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:26.848500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:26.848790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:26.848796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T08:56:26.848799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:26.849032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:26.849037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:26.849041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:26.849046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:26.849444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:26.849753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:26.849795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:56:26.849978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:26.849982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:56:26.849985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.334258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.334305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 665 RawX2: 4294969525 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:27.334313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.334377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:27.334394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.334416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.334424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.334801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.334808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.334843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.334846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:674:2236], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:27.334888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.334894Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:27.334902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:27.334904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.334908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:27.334911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.334914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:27.334917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:27.334922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:27.334926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:27.334929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:27.335221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.335231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.335234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:27.335238Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:27.335241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.335250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:27.335253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:531:2138] 2024-11-21T ... ode 123 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:57:07.019391Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [123:237:2227], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:57:07.019807Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:57:07.019819Z node 123 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:07.020033Z node 123 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:07.020041Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:07.020050Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:07.020371Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:07.020382Z node 123 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:07.020446Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:07.020463Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:57:07.020469Z node 123 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:57:07.020473Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:57:07.021520Z node 123 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:57:07.021583Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:57:07.021588Z node 123 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:57:07.021594Z node 123 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:57:07.021601Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:57:07.021620Z node 123 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:57:07.021625Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [123:95:2130] 2024-11-21T08:57:07.027129Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 1 2024-11-21T08:57:07.027226Z node 123 :TX_PROXY DEBUG: actor# [123:288:2270] Bootstrap 2024-11-21T08:57:07.028732Z node 123 :TX_PROXY DEBUG: actor# [123:288:2270] Become StateWork (SchemeCache [123:293:2275]) 2024-11-21T08:57:07.029015Z node 123 :TX_PROXY DEBUG: actor# [123:288:2270] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:57:07.031715Z node 123 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:57:07.034778Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:07.035849Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:07.036132Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:07.036535Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:07.037138Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:07.037151Z node 123 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:07.037195Z node 123 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:07.038991Z node 123 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:07.039057Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:07.039090Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:07.039137Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:07.039154Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:07.039288Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:07.060515Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:07.060575Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:07.071581Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:07.071635Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:07.071654Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:07.071668Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:07.071698Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:07.071707Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:07.071713Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:07.071738Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:07.082922Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:07.082981Z node 123 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:07.083186Z node 123 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:07.083194Z node 123 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:07.083242Z node 123 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:07.083386Z node 123 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/003131/r3tmp/tmpzeNx4N/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T08:57:07.083446Z node 123 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 123:1 Path# /home/runner/.ya/build/build_root/jptk/003131/r3tmp/tmpzeNx4N/pdisk_1.dat 2024-11-21T08:57:07.094912Z node 123 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:07.094991Z node 123 :CONFIGS_DISPATCHER DEBUG: TConfigsDispatcher Bootstrap 2024-11-21T08:57:07.095066Z node 123 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:07.095078Z node 123 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:07.095112Z node 123 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:07.095151Z node 123 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:07.095180Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [123:379:2337], Recipient [123:378:2336]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:57:07.095187Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:57:07.095270Z node 123 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:07.095278Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:07.095283Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:07.095297Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[123:386:2341] 2024-11-21T08:57:07.095324Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [123:381:2335], Recipient [123:378:2336]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:57:07.095328Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:57:07.095818Z node 123 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:07.095829Z node 123 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [123:379:2337] 2024-11-21T08:57:07.096029Z node 123 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[123:386:2341] 2024-11-21T08:57:07.096040Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:07.096047Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:07.096050Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:07.101361Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [123:412:2348], Recipient [123:378:2336]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:57:07.101383Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:57:07.114998Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [123:436:2374], Recipient [123:378:2336]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T08:57:07.115023Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest >> KqpConstraints::CreateTableSerialTypeForbidden [GOOD] >> KqpConstraints::CreateTableWithDefaultForbidden >> KqpScheme::InvalidationAfterDropCreate [GOOD] >> KqpScheme::InvalidationAfterDropCreateCompatSchema >> KqpScheme::DropNonExistingExternalDataSource [GOOD] >> KqpScheme::DropResourcePool >> KqpScheme::CreateTableWithVectorIndex >> KqpScheme::CreateTableWithCompactionPolicyUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat [GOOD] >> KqpScheme::CreateTableWithCompactionPolicyCompat >> KqpScheme::CreateAndDropUser >> KqpConstraints::AddSerialColumnForbidden [GOOD] >> KqpConstraints::AddColumnWithDefaultForbidden >> KqpScheme::AlterTableWithDecimalColumn [GOOD] >> KqpScheme::AlterTableWithPgColumn >> KqpOlapScheme::InvalidColumnInTieringRule >> KqpScheme::CreateDroppedTable >> KqpAcl::FailNavigate >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace >> KqpScheme::AlterTableAddImplicitSyncIndex >> KqpScheme::CreateTableWithTtlSettingsUncompat >> KqpOlapScheme::CreateTableWithTtl >> KqpScheme::DropChangefeedNegative [GOOD] >> KqpScheme::DropDependentExternalDataSource >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> KqpScheme::AlterIndexImplTable [GOOD] >> KqpScheme::AlterIndexImplTableUsingPublicAPI >> KqpScheme::AlterTableAlterIndex >> KqpScheme::SchemaVersionMissmatchWithRead >> KqpScheme::AlterCompressionLevelInColumnFamily >> KqpOlapTypes::Timestamp |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScheme::SchemaVersionMissmatchWithWrite >> KqpScheme::DescribeIndexTable >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization >> KqpScheme::ChangefeedAwsRegion >> KqpOlapScheme::CreateTableWithTtl [GOOD] >> KqpOlapScheme::CreateTableWithoutTtl >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsCompat >> KqpScheme::Int8Int16 [GOOD] >> KqpScheme::Int8Int16Olap >> KqpConstraints::CreateTableWithDefaultForbidden [GOOD] >> KqpConstraints::DefaultValuesForTable >> KqpScheme::InvalidationAfterDropCreateCompatSchema [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2 >> KqpScheme::DropResourcePool [GOOD] >> KqpScheme::DropNonExistingResourcePool >> KqpScheme::CreateTableWithVectorIndex [GOOD] >> KqpScheme::CreateTableWithVectorIndexCovered >> KqpScheme::CreateAndDropUser [GOOD] >> KqpConstraints::AddColumnWithDefaultForbidden [GOOD] >> KqpScheme::CreateAndDropGroup >> KqpConstraints::AlterTableAddColumnWithDefaultValue >> KqpScheme::CreateTableWithCompactionPolicyCompat [GOOD] >> KqpScheme::CreateTableWithDefaultFamily >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat >> KqpAcl::FailNavigate [GOOD] >> KqpAcl::FailResolve >> KqpConstraints::DefaultsAndDeleteAndUpdate [GOOD] >> KqpConstraints::DefaultValuesForTableNegative4 >> KqpScheme::DropDependentExternalDataSource [GOOD] >> KqpScheme::DropAsyncReplication >> KqpPg::TableDeleteAllData [GOOD] >> KqpPg::TableDeleteWhere >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::DropColumnAfterAdd >> KqpScheme::SchemaVersionMissmatchWithRead [GOOD] >> KqpScheme::SchemaVersionMissmatchWithIndexRead >> KqpScheme::CreateDroppedTable [GOOD] >> KqpScheme::CreateDropTableViaApiMultipleTime >> KqpOlapTypes::Timestamp [GOOD] >> KqpOlapTypes::Decimal35 >> KqpScheme::CreateTableWithTtlSettingsUncompat [GOOD] >> KqpScheme::CreateTableWithTtlSettingsCompat >> KqpScheme::AlterTableWithPgColumn [GOOD] >> KqpScheme::AlterUser >> KqpScheme::CreateTableWithUniformPartitionsUncompat >> KqpScheme::AlterCompressionLevelInColumnFamily [GOOD] >> KqpScheme::AlterGroup >> KqpScheme::DescribeIndexTable [GOOD] >> KqpScheme::DisableCreateExternalDataSource >> KqpScheme::AlterTableAlterIndex [GOOD] >> KqpScheme::AlterTableAlterVectorIndex >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace [GOOD] >> KqpScheme::TouchIndexAfterMoveTableRead >> KqpOlapScheme::CreateTableWithoutTtl [GOOD] >> KqpOlapScheme::AddPgColumnWithStore >> KqpScheme::AlterTableAddImplicitSyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitSyncIndex >> KqpScheme::SchemaVersionMissmatchWithWrite [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexRead |90.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> KqpScheme::CreateTableWithVectorIndexCovered [GOOD] >> KqpScheme::CreateTableWithVectorIndexCaseIncentive >> KqpScheme::CreateDropTableMultipleTime >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithBloomFilterUncompat >> KqpConstraints::DefaultValuesForTableNegative4 [GOOD] >> KqpConstraints::IndexedTableAndNotNullColumn >> KqpScheme::Int8Int16Olap [GOOD] >> KqpScheme::DropResourcePoolClassifier >> KqpConstraints::DefaultValuesForTable [GOOD] >> KqpConstraints::DefaultValuesForTableNegative2 |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |90.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> KqpScheme::InvalidationAfterDropCreateTable2 [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTx >> KqpScheme::DropNonExistingResourcePool [GOOD] >> KqpScheme::DropNonExistingResourcePoolClassifier >> KqpScheme::ChangefeedAwsRegion [GOOD] >> KqpScheme::ChangefeedRetentionPeriod >> KqpScheme::AddColumnFamilyWithCompressionLevel >> KqpScheme::CreateAndDropGroup [GOOD] >> KqpScheme::CreateAsyncReplication >> KqpConstraints::AlterTableAddColumnWithDefaultValue [GOOD] >> KqpConstraints::AddNonColumnDoesnotReturnInternalError >> KqpScheme::CreateTableWithDefaultFamily [GOOD] >> KqpScheme::CreateTableWithDecimalColumn >> KqpOlapScheme::DropColumnAfterAdd [GOOD] >> KqpOlapScheme::DropColumnErrors >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat >> KqpAcl::FailResolve [GOOD] >> KqpAcl::ReadSuccess >> KqpScheme::AlterIndexImplTableUsingPublicAPI [GOOD] >> KqpScheme::AlterResourcePool >> KqpScheme::SchemaVersionMissmatchWithIndexRead [GOOD] >> KqpScheme::SchemaVersionMissmatchWithIndexWrite >> KqpScheme::CreateTableWithTtlSettingsCompat [GOOD] >> KqpScheme::CreateTableWithTtlOnIntColumn >> KqpScheme::AlterUser [GOOD] >> KqpScheme::AsyncReplicationConnectionString >> KqpScheme::CreateTableWithUniformPartitionsUncompat [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsCompat >> KqpScheme::DisableCreateExternalDataSource [GOOD] >> KqpScheme::DisableDropExternalDataSource >> KqpScheme::AlterGroup [GOOD] >> KqpScheme::AlterColumnTableTtl >> KqpScheme::TouchIndexAfterMoveTableRead [GOOD] >> KqpScheme::TouchIndexAfterMoveTableWrite >> KqpOlapScheme::InvalidColumnInTieringRule [GOOD] >> KqpOlapScheme::NullColumnError >> KqpScheme::AlterTableAlterVectorIndex [GOOD] >> KqpScheme::AlterTableAlterMissedIndex >> KqpScheme::TouchIndexAfterMoveIndexRead [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWrite >> KqpScheme::CreateTableWithVectorIndexCaseIncentive [GOOD] >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag >> KqpOlapScheme::DropColumnErrors [GOOD] >> KqpOlapScheme::DropColumnTableStoreErrors >> KqpOlapTypes::Decimal35 [GOOD] >> KqpOlapTypes::DecimalCsv >> KqpOlapScheme::AddPgColumnWithStore [GOOD] >> KqpOlapScheme::BulkError |90.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2024-11-21T08:56:38.094369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:38.094396Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:38.094437Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:56:38.097903Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:56:38.098072Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:56:38.098152Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:38.099229Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:56:38.108545Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:38.108701Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:56:38.108871Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:56:38.108887Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:56:38.108895Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:56:38.108948Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:56:38.112733Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:56:38.112804Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:56:38.112856Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:56:38.112862Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:56:38.112867Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:56:38.112873Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:38.112977Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:38.112984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:38.113014Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:56:38.113037Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:56:38.113102Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:56:38.113110Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:56:38.113119Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:56:38.113125Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:56:38.113129Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:56:38.113134Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:56:38.113139Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:56:38.120623Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:38.120647Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:38.120654Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:56:38.120962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:56:38.120970Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:56:38.120990Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:56:38.121016Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:56:38.121026Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:56:38.121034Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:56:38.121040Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:56:38.121043Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:56:38.121046Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:56:38.121049Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:56:38.121105Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:56:38.121107Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:56:38.121109Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:56:38.121111Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:56:38.121119Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:56:38.121121Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:56:38.121124Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:56:38.121126Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:56:38.121129Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:56:38.142378Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:56:38.142411Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:56:38.142418Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:56:38.142432Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:56:38.142448Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:56:38.142587Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:38.142594Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:56:38.142602Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:56:38.142622Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:56:38.142627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:56:38.142680Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:56:38.142690Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:38.142694Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:56:38.142699Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:56:38.143438Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:56:38.143456Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:56:38.143519Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:38.143525Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:56:38.143533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:56:38.143541Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:56:38.143546Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:56:38.143556Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:56:38.143561Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:56:38.143567Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:38.143572Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:56:38.143576Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:56:38.143580Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:56:38.143633Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:56:38.143637Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:38.143640Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:56:38.143644Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:56:38.143647Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:56:38.143659Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:56:38.143663Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:56:38.143666Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:56:38.143669Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:56:38.143684Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:56:38.143688Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:56:38.143691Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:56:38.143696Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:56:38.143699Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:56:38.143704Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... -11-21T08:57:09.317479Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317515Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317519Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317525Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317529Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317551Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317556Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317561Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317565Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317587Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317591Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317597Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317600Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317627Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317633Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317638Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317642Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317665Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317669Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317674Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317678Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317702Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317706Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317712Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317716Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317742Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317746Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317752Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317756Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317779Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317783Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317789Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317793Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317816Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317821Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317827Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317832Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317859Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317863Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317868Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317873Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317907Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317911Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317917Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317924Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317949Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317954Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317959Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.317963Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.317986Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.317990Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.317995Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.318000Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.318029Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:09.318033Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2024-11-21T08:57:09.318040Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:09.318044Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:09.318112Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T08:57:09.318118Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:09.318124Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2024-11-21T08:57:09.318153Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T08:57:09.318156Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:09.318160Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2024-11-21T08:57:09.318170Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T08:57:09.318174Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:09.318177Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T08:57:09.318191Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T08:57:09.318194Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:09.318198Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T08:57:09.318206Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T08:57:09.318212Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:09.318215Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T08:57:09.318224Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T08:57:09.318227Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:09.318230Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T08:57:09.318241Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T08:57:09.318244Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:09.318247Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2024-11-21T08:57:09.318257Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2024-11-21T08:57:09.318260Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:09.318263Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 29 31 25 27 11 30 29 31 29 31 21 10 26 17 28 28 17 10 17 13 14 28 2 14 15 10 28 28 14 14 - - actual 29 31 25 27 11 30 29 31 29 31 21 10 26 17 28 28 17 10 17 13 14 28 2 14 15 10 28 28 14 14 - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTx [GOOD] >> KqpScheme::AddColumnFamilyWithCompressionLevel [GOOD] >> KqpScheme::AddDropColumn >> KqpScheme::AlterTableAddExplicitSyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitAsyncIndex |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |90.7%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat >> KqpAcl::ReadSuccess [GOOD] >> KqpAcl::WriteSuccess >> KqpScheme::CreateAndAlterTableWithBloomFilterUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat >> KqpConstraints::DefaultValuesForTableNegative2 [GOOD] >> KqpConstraints::DefaultValuesForTableNegative3 >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> KqpScheme::AlterResourcePool [GOOD] >> KqpScheme::AlterNonExistingResourcePool >> KqpScheme::ChangefeedRetentionPeriod [GOOD] >> KqpScheme::ChangefeedAttributes >> KqpScheme::AlterColumnTableTtl [GOOD] >> KqpScheme::AlterColumnTableTiering >> KqpScheme::CreateTableWithDecimalColumn [GOOD] >> KqpScheme::CreateTableStoreNegative >> KqpScheme::CreateAsyncReplication [GOOD] >> KqpScheme::CreateAsyncReplicationWithSecret >> KqpScheme::DropAsyncReplication [GOOD] >> KqpScheme::DropAsyncReplicationCascade >> KqpOlapScheme::DropColumnTableStoreErrors [GOOD] >> KqpOlapScheme::DropColumnAfterInsert >> KqpOlapScheme::NullColumnError [GOOD] >> KqpOlapScheme::DropTtlColumn >> KqpOlapScheme::BulkError [GOOD] >> KqpOlapScheme::DropColumn >> KqpConstraints::IndexedTableAndNotNullColumn [GOOD] >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn >> KqpScheme::CreateTableWithUniformPartitionsCompat [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUuid >> KqpScheme::DisableDropExternalDataSource [GOOD] >> KqpScheme::DisableCreateExternalTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTx [GOOD] Test command err: Trying to start YDB, gRPC: 11358, MsgBus: 19612 2024-11-21T08:57:06.886517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653988425929386:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.886593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040dc/r3tmp/tmpiHqShF/pdisk_1.dat 2024-11-21T08:57:06.967710Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11358, node 1 2024-11-21T08:57:06.985880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:06.985912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:06.987032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:06.988371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.988380Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.988382Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.988415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19612 TClient is connected to server localhost:19612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.077325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.082743Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.086242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:07.116095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.172750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.186322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.254991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653992720898079:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.255017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.285291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.290339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.300885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.307939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.315468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.329285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.338108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653992720898581:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.338139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653992720898586:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.338145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.338777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.342055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653992720898588:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:07.540338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.540504Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found Trying to start YDB, gRPC: 20890, MsgBus: 19399 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040dc/r3tmp/tmpNJE6VF/pdisk_1.dat 2024-11-21T08:57:07.758144Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:07.758672Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20890, node 2 2024-11-21T08:57:07.769026Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.769040Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.769042Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.769073Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19399 TClient is connected to server localhost:19399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.849169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.849202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.850270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.851938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.856533Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.882365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.892919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.915903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.933357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.080313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653994476591050:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.080341Z node 2 :K ... pId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.802565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.813905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.978699Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653995983514046:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.978744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.980897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.988120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.043432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.050798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.058983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.072903Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.085860Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654000278481848:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.085891Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.085899Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654000278481853:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.086432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:09.092889Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654000278481855:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:09.307100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.309039Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037911 not found Trying to start YDB, gRPC: 26335, MsgBus: 30464 2024-11-21T08:57:09.580234Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439654002427913854:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040dc/r3tmp/tmpSemmjQ/pdisk_1.dat 2024-11-21T08:57:09.585315Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:09.588080Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26335, node 4 2024-11-21T08:57:09.595956Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.595968Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.595970Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.596007Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30464 TClient is connected to server localhost:30464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.680766Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.680795Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.682878Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.683199Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.686235Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.691739Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.706863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.729905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.744519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.935025Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654002427915246:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.935053Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.938985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.945973Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.954579Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.961264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.968201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.986162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.107023Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654006722883067:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.107061Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.107100Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654006722883072:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.107974Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.109767Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654006722883074:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.321152Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037911 not found 2024-11-21T08:57:10.321607Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpScheme::AsyncReplicationConnectionString [GOOD] >> KqpScheme::AsyncReplicationConnectionStringWithSsl >> KqpScheme::SchemaVersionMissmatchWithIndexWrite [GOOD] >> KqpScheme::ResourcePoolsValidation >> KqpScheme::TouchIndexAfterMoveTableWrite [GOOD] >> KqpScheme::TwoSimilarFamiliesTest >> KqpScheme::AlterTableAlterMissedIndex [GOOD] >> KqpScheme::AlterTableRenameIndex >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag [GOOD] >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi >> KqpScheme::CreateTableStoreNegative [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2024-11-21T08:57:08.168637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:08.168671Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:08.168733Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:08.173851Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:08.174014Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:57:08.174082Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:08.175067Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:08.183134Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:08.183303Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:57:08.183456Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:57:08.183472Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:57:08.183478Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:57:08.183524Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:57:08.187509Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:57:08.187591Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:57:08.187638Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:57:08.187645Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:57:08.187649Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:57:08.187655Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:08.187762Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:08.187771Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:08.187800Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:57:08.187827Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:57:08.187896Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:57:08.187905Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:57:08.187913Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:57:08.187919Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:57:08.187923Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:57:08.187928Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:57:08.187933Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:08.196092Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:08.196124Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:08.196136Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:57:08.196610Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:57:08.196631Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:57:08.196660Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:57:08.196698Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:57:08.196709Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:57:08.196720Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:57:08.196728Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:57:08.196733Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:57:08.196739Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:57:08.196743Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:57:08.196822Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:57:08.196828Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:57:08.196832Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:57:08.196835Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:57:08.196847Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:57:08.196851Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:57:08.196854Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:57:08.196857Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:57:08.196862Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:57:08.224540Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:57:08.224575Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:57:08.224584Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:57:08.224598Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:57:08.224615Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:57:08.224762Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:08.224772Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:08.224780Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:57:08.224803Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T08:57:08.224807Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:57:08.224862Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T08:57:08.224872Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:57:08.224876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:57:08.224881Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:57:08.226201Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:57:08.226232Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:08.226314Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:08.226320Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:08.226331Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:57:08.226340Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:57:08.226346Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:57:08.226355Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T08:57:08.226360Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T08:57:08.226369Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:57:08.226373Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:57:08.226378Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:57:08.226382Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:57:08.226434Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T08:57:08.226439Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:57:08.226442Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:57:08.226445Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:57:08.226449Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:57:08.226463Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:57:08.226466Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:57:08.226470Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:57:08.226474Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:57:08.226488Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2024-11-21T08:57:08.226492Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2024-11-21T08:57:08.226495Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2024-11-21T08:57:08.226501Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T08:57:08.226505Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:57:08.226508Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit MakeSnapshot 2024-11-21T08:57:08.226512Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit MakeSnapshot 2024-11-21T08:57:08.226533Z node 1 :TX_DATASHARD ... 7186 2024-11-21T08:57:10.672782Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2024-11-21T08:57:10.672786Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T08:57:10.672788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T08:57:10.672822Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2024-11-21T08:57:10.672827Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672831Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2024-11-21T08:57:10.672846Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2024-11-21T08:57:10.672849Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672852Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2024-11-21T08:57:10.672862Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2024-11-21T08:57:10.672864Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672865Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2024-11-21T08:57:10.672872Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2024-11-21T08:57:10.672874Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672876Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2024-11-21T08:57:10.672883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T08:57:10.672885Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672887Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T08:57:10.672896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T08:57:10.672898Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672900Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T08:57:10.672905Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T08:57:10.672906Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672908Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T08:57:10.672918Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T08:57:10.672919Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672921Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T08:57:10.672926Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T08:57:10.672928Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672930Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T08:57:10.672952Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T08:57:10.672955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672957Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T08:57:10.672966Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T08:57:10.672968Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672970Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T08:57:10.672975Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T08:57:10.672977Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672978Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T08:57:10.672987Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T08:57:10.672989Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.672990Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T08:57:10.673000Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:10.673003Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437184 on unit CompleteOperation 2024-11-21T08:57:10.673009Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T08:57:10.673012Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T08:57:10.673015Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:10.673032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:10.673036Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:151] at 9437184 on unit CompleteOperation 2024-11-21T08:57:10.673042Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T08:57:10.673048Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T08:57:10.673050Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:10.673069Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:10.673072Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437184 on unit CompleteOperation 2024-11-21T08:57:10.673076Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T08:57:10.673080Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T08:57:10.673083Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:10.673094Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:10.673097Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:154] at 9437184 on unit CompleteOperation 2024-11-21T08:57:10.673101Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T08:57:10.673104Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T08:57:10.673106Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:10.673124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T08:57:10.673126Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.673128Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T08:57:10.673139Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T08:57:10.673141Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.673143Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T08:57:10.673152Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T08:57:10.673154Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.673155Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T08:57:10.673163Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T08:57:10.673165Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T08:57:10.673166Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> KqpScheme::DropNonExistingResourcePoolClassifier [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWrite [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexReadReplace >> KqpOlapTypes::DecimalCsv [GOOD] >> KqpOlapTypes::TimestampCmpErr >> KqpScheme::AlterColumnTableTiering [GOOD] >> KqpScheme::AlterAsyncReplication >> KqpScheme::AddDropColumn [GOOD] >> KqpScheme::AddChangefeed >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat [GOOD] >> KqpConstraints::DefaultValuesForTableNegative3 [GOOD] >> KqpOlapScheme::DropTtlColumn [GOOD] |90.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |90.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableStoreNegative [GOOD] Test command err: Trying to start YDB, gRPC: 4244, MsgBus: 29874 2024-11-21T08:57:06.879476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653988882478937:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.879493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e5/r3tmp/tmpSOFClD/pdisk_1.dat 2024-11-21T08:57:06.944837Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4244, node 1 2024-11-21T08:57:06.967463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.967475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.967477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.967506Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29874 2024-11-21T08:57:06.979900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:06.979928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:06.981051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:57:07.022343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.024732Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.026030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.052070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.073103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:07.086182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.206666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993177447759:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.206701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.230826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.237573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.249324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.259177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.266302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.273295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.281973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993177448274:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.282003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993177448279:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.282002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.282589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.286628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653993177448281:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:07.449264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithCompactionPolicy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithCompactionPolicy" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732179427503 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithCompactionPolicy" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) 2024-11-21T08:57:07.459291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithCompactionPolicy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithCompactionPolicy" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732179427503 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithCompactionPolicy" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) Trying to start YDB, gRPC: 31532, MsgBus: 5457 2024-11-21T08:57:07.790686Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653993408807559:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e5/r3tmp/tmph6UTzt/pdisk_1.dat 2024-11-21T08:57:07.792754Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:07.802767Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31532, node 2 2024-11-21T08:57:07.809062Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.809076Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.809078Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.809115Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5457 TClient is connected to server localhost:5457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 Schemes ... IVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.709241Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.714472Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.716642Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.724072Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.731596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.750751Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:09.761798Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.993508Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653999666318261:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.993542Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.999027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.005737Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.017752Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.031031Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.038083Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.053152Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.229203Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654003961286077:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.229234Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.229237Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654003961286082:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.229859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.231570Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654003961286084:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.422224Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.436937Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.457290Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.474670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.492653Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13959, MsgBus: 32298 2024-11-21T08:57:10.818978Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654005914288907:2191];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e5/r3tmp/tmpw702V5/pdisk_1.dat 2024-11-21T08:57:10.826525Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 13959, node 5 2024-11-21T08:57:10.841172Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:10.841191Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:10.841193Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:10.841229Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:10.841522Z node 5 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32298 TClient is connected to server localhost:32298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:10.919192Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.919225Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:57:10.924594Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:10.924927Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.926175Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.155418Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010209256653:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.155441Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.160796Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010209256666:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.160815Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.163903Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010209256670:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.163926Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpOlapScheme::InsertAddInsertDrop >> KqpScheme::DropResourcePoolClassifier [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat >> KqpAcl::WriteSuccess [GOOD] >> KqpAcl::RecursiveCreateTableShouldSuccess >> KqpOlapScheme::DropColumn [GOOD] >> KqpScheme::AlterTableAddExplicitAsyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex >> KqpScheme::AlterNonExistingResourcePool [GOOD] >> KqpScheme::AlterNonExistingResourcePoolClassifier >> KqpScheme::ChangefeedAttributes [GOOD] >> KqpScheme::ChangefeedOnIndexTable >> KqpScheme::DisableCreateExternalTable [GOOD] >> KqpScheme::DisableDropExternalTable >> KqpScheme::CreateExternalDataSourceWithSa ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropNonExistingResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 2852, MsgBus: 10691 2024-11-21T08:57:06.863333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653986822832539:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.863474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e9/r3tmp/tmpenfZWd/pdisk_1.dat 2024-11-21T08:57:06.923166Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2852, node 1 2024-11-21T08:57:06.943911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.943920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.943921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.943946Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:06.962349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:06.962379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:10691 2024-11-21T08:57:06.963813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.013472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.019082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.043580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.070367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.079693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.187309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653991117801229:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.187344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.236283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.243148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.252939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.267309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.280348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.294534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.303527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653991117801743:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.303557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.303619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653991117801748:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.304310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.308189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653991117801750:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 11219, MsgBus: 22027 2024-11-21T08:57:07.729035Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653991635224746:2172];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e9/r3tmp/tmpimGk1f/pdisk_1.dat 2024-11-21T08:57:07.734124Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:07.741150Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11219, node 2 2024-11-21T08:57:07.748136Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.748152Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.748154Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.748194Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22027 TClient is connected to server localhost:22027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.828392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.828424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.829592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.831262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.847042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.854917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.872885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.883658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.055050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653995930193455:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.055083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.064384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.075791Z node 2 :FLAT_TX_SCHEMESHAR ... LOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439653997157693972:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.969777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.974265Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.981218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.989929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.045828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.060730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.073100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.087523Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654001452661782:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.087546Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.087549Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654001452661787:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.087959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:09.092485Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654001452661789:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 7751, MsgBus: 31484 2024-11-21T08:57:09.569783Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439654000264989921:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.569900Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e9/r3tmp/tmpsWVb5r/pdisk_1.dat 2024-11-21T08:57:09.589032Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7751, node 4 2024-11-21T08:57:09.602085Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.602103Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.602104Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.602149Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31484 TClient is connected to server localhost:31484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.675527Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.675553Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.675930Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.676344Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.680599Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.684480Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.717503Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.745367Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.758057Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.886049Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654000264991457:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.886078Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.892620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.899243Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.912970Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.918952Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.927574Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.940342Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.954908Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654000264991969:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.954926Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.954949Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654000264991974:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.955526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:09.960391Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654000264991976:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.572847Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:10.638134Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.701254Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:10.793967Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.945657Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.082023Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::DefaultValuesForTableNegative3 [GOOD] Test command err: Trying to start YDB, gRPC: 28559, MsgBus: 4049 2024-11-21T08:57:06.871680Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653987255784221:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.871859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e3/r3tmp/tmpJqtyAh/pdisk_1.dat 2024-11-21T08:57:06.961129Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28559, node 1 2024-11-21T08:57:06.971985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:06.972010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:06.973105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:06.983201Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.983215Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.983217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.983263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4049 TClient is connected to server localhost:4049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.054659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.057510Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.069287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.134648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.164637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.173865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.196543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653991550753045:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.196582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.236231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.242189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.252563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.266120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.273512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.281762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.295627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653991550753538:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.295651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653991550753543:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.295656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.296229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.301538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653991550753545:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 29158, MsgBus: 26140 2024-11-21T08:57:07.732137Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653990468531264:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e3/r3tmp/tmppNA9oc/pdisk_1.dat 2024-11-21T08:57:07.734743Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:07.740157Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29158, node 2 2024-11-21T08:57:07.750772Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.750789Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.750791Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.750821Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26140 TClient is connected to server localhost:26140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.831681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.831711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.832725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.833856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.849083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.857916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.877954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.897088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.060606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653994763499941:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.060687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.063560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... 72057594046644480 2024-11-21T08:57:09.664428Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.678046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.687443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.708606Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.766559Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.899557Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653998920674698:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.899583Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.904964Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.911778Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.919078Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.926522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.940567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.953694Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.964367Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653998920675205:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.964394Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.964435Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439653998920675210:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.965029Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:09.967355Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439653998920675212:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.355892Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5042, MsgBus: 8371 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e3/r3tmp/tmpxrJb6A/pdisk_1.dat 2024-11-21T08:57:10.720374Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:10.720995Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5042, node 5 2024-11-21T08:57:10.736595Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:10.736607Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:10.736609Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:10.736651Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8371 2024-11-21T08:57:10.804513Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.804547Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:10.804830Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:10.815431Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.816824Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:10.825222Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.848090Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.890861Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.904550Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.078651Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654009999232808:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.078670Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.084125Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.096774Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.116845Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.134688Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.149622Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.163807Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.188890Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654009999233311:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.188919Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.188965Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654009999233316:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.189820Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.193885Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:11.193929Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654009999233318:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpScheme::AsyncReplicationConnectionStringWithSsl [GOOD] >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> KqpScheme::CreateTableWithUniformPartitionsUuid [GOOD] >> KqpScheme::CreateTableWithUniqConstraint >> KqpScheme::ResourcePoolsValidation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat [GOOD] Test command err: Trying to start YDB, gRPC: 2421, MsgBus: 14912 2024-11-21T08:57:06.845457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653986556215052:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.845511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e0/r3tmp/tmpxi2TGF/pdisk_1.dat 2024-11-21T08:57:06.909913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2421, node 1 2024-11-21T08:57:06.932370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.932382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.932385Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.932421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14912 2024-11-21T08:57:06.945689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:06.945718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:06.946717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:06.995132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.008438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.072545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.089874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.105387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.140907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990851183882:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.140942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.171482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.176450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.188933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.195806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.210302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.225573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.240793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990851184375:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.240818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.240863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990851184380:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.241444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.245951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653990851184382:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:07.453261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1481, MsgBus: 15626 2024-11-21T08:57:07.716027Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653993159975835:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.716181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e0/r3tmp/tmpX3VYyS/pdisk_1.dat 2024-11-21T08:57:07.724122Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1481, node 2 2024-11-21T08:57:07.734523Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.734532Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.734534Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.734564Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15626 TClient is connected to server localhost:15626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.816604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.816638Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.817678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.818966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.822082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.833824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.854077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.863575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.004620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653997454944674:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.004639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.009996Z node 2 :FLAT_TX_SCHEMESHARD ... EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) Trying to start YDB, gRPC: 2318, MsgBus: 11310 2024-11-21T08:57:10.655464Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654006642160802:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:10.657456Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e0/r3tmp/tmphOAS7H/pdisk_1.dat 2024-11-21T08:57:10.671136Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2318, node 5 2024-11-21T08:57:10.680791Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:10.680806Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:10.680809Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:10.680858Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11310 TClient is connected to server localhost:11310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:10.731531Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.733055Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.750921Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.753462Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.753517Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:10.754645Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:10.817295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.848522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.864847Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.036675Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010937129466:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.036696Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.044776Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.068656Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.081135Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.089349Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.105612Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.118178Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.148366Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010937129968:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.148455Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.148717Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010937129973:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.149464Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.153231Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:11.153321Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654010937129977:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:11.350268Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732179431402 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) 2024-11-21T08:57:11.366930Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732179431402 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) 2024-11-21T08:57:11.380011Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithBloomFilter TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithBloomFilter" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732179431402 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithBloomFilter" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key"... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 11162, MsgBus: 20338 2024-11-21T08:57:06.867864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653985487737209:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.868098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e6/r3tmp/tmpUaiXXG/pdisk_1.dat 2024-11-21T08:57:06.954390Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11162, node 1 2024-11-21T08:57:06.968131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:06.968155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:06.969057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:06.971088Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.971099Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.971101Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.971144Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20338 TClient is connected to server localhost:20338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.026851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.030354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.048490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:07.079585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.091995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.217207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653989782706035:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.217235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.254997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.261689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.273834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.279619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.287188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.295121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.311533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653989782706547:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.311562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.311583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653989782706552:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.312307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.314555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653989782706554:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:07.524394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21406, MsgBus: 22553 2024-11-21T08:57:07.734668Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653990849489371:2158];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e6/r3tmp/tmp9K7fLo/pdisk_1.dat 2024-11-21T08:57:07.741394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:07.746416Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21406, node 2 2024-11-21T08:57:07.758212Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.758224Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.758227Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.758260Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22553 TClient is connected to server localhost:22553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.837033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.837073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.837260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.837952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.860661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.868961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:07.886946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.896813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.032894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653995144458090:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.032953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.035726Z node 2 :FLAT_TX_SCHEMESH ... malizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:09.236589Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:09.236598Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:09.236618Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:09.236638Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:09.236649Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:09.236654Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:09.236673Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:09.236678Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:09.236689Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:09.236692Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=368;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=368;columns=4; 2024-11-21T08:57:09.324531Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179429330, txId: 18446744073709551615] shutting down Trying to start YDB, gRPC: 12026, MsgBus: 3018 2024-11-21T08:57:09.540182Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439654002006678633:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e6/r3tmp/tmp4s3Hj7/pdisk_1.dat 2024-11-21T08:57:09.542391Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:09.553734Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12026, node 4 2024-11-21T08:57:09.563814Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.563842Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.563844Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.563880Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3018 2024-11-21T08:57:09.642925Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.642961Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.643430Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.656154Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.662785Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.669058Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.680680Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.702877Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:09.712265Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.846651Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654002006680017:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.846694Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.852471Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.907982Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.920686Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.934094Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.949421Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.957304Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.970296Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654002006680535:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.970320Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654002006680540:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.970327Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.970976Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:09.983415Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654002006680542:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.543902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:10.607920Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.677148Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:10.753922Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.819664Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.882379Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.225969Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654010596616317:2670], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:57:11.226001Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi [GOOD] >> KqpScheme::TwoSimilarFamiliesTest [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropColumn [GOOD] Test command err: Trying to start YDB, gRPC: 25722, MsgBus: 12473 2024-11-21T08:57:07.900276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653991831985415:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.945539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c7/r3tmp/tmpBYKUh7/pdisk_1.dat 2024-11-21T08:57:07.991889Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25722, node 1 2024-11-21T08:57:08.010447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.010474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.010475Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.010514Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12473 TClient is connected to server localhost:12473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:08.063491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.063519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.068834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.080118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.082966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (created_at, id_second)) PARTITION BY HASH(created_at) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1, TTL = Interval("PT1H") ON created_at); 2024-11-21T08:57:08.307125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653996126953166:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.308568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.311405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.319795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:08.319841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:08.319889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:08.319907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:08.319925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:08.319941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:08.319955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:08.319973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:08.319989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:08.320003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:08.320018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:08.320033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653996126953242:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:08.321743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:08.321758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:08.321769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:08.321774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:08.321791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:08.321796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:08.321805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:08.321811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:08.321820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:08.321825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:08.321832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:08.321841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:08.321916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:08.321927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:08.321944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:08.321948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:08.321959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:08.321963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:08.321981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:08.321985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:08.321995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Ex ... 3569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:11.301806Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:11.301840Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:11.301864Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:11.301877Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:11.301893Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:11.301905Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:11.301918Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:11.301931Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:11.301944Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:11.301958Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:11.301970Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010123569631:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:11.305561Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:11.305590Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:11.305604Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:11.305609Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:11.305642Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:11.305647Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:11.305656Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:11.305662Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:11.305674Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:11.305680Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:11.305688Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:11.305694Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:11.305755Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:11.305762Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:11.305782Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:11.305787Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:11.305798Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:11.305803Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:11.305823Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:11.305828Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:11.305841Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:11.305845Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=320;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=320;columns=3; 2024-11-21T08:57:11.368393Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010123569723:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.368422Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.368579Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010123569728:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.369387Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.374866Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:57:11.374959Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654010123569730:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:11.506172Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179431423, txId: 18446744073709551615] shutting down 2024-11-21T08:57:11.509524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.562956Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179431563, txId: 18446744073709551615] shutting down 2024-11-21T08:57:11.568294Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7439654010123570170:2491], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:1:8: Error: At function: Member
:1:8: Error: Member not found: resource_id 2024-11-21T08:57:11.568383Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZTJmNjE4NGItYjk5ZDI2YzAtMTU0NTYwYzgtYTQ3YTBhZDQ=, ActorId: [5:7439654010123570168:2490], ActorState: ExecuteState, TraceId: 01jd6z144ddjmzgxh2er0pghxp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T08:57:11.577076Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.613373Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179431633, txId: 18446744073709551615] shutting down |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest |90.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapTypes::TimestampCmpErr [GOOD] >> KqpOlapTypes::JsonImport ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AsyncReplicationConnectionStringWithSsl [GOOD] Test command err: Trying to start YDB, gRPC: 28283, MsgBus: 11465 2024-11-21T08:57:07.008422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653990923341515:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.008466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d9/r3tmp/tmpLIvCgX/pdisk_1.dat 2024-11-21T08:57:07.082994Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28283, node 1 2024-11-21T08:57:07.101708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.101719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.101722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.101756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:07.108002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.108031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.109120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11465 TClient is connected to server localhost:11465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.156768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.160455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.193316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.257231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.274264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.283483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.321323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990923342909:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.321351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.353828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.409399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.420149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.434892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.448497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.455143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.463476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990923343415:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.463501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.463504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990923343420:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.464151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.468315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653990923343422:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:07.602233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.614883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.621840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.634066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.648296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.661580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64978, MsgBus: 32756 2024-11-21T08:57:07.904035Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653993055726350:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.904054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d9/r3tmp/tmpHowXfN/pdisk_1.dat 2024-11-21T08:57:07.919662Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64978, node 2 2024-11-21T08:57:07.930911Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.930929Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.930932Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.930970Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32756 TClient is connected to server localhost:32756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.989331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.994184Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.007749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.007773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.008826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:08.010116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... ed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.143707Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.307866Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654002836642734:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.307904Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.310540Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.318853Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.334019Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.348235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.361397Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.375170Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.485731Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654002836643254:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.485758Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.485766Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654002836643259:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.486385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.487935Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654002836643261:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.684886Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.699616Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14605, MsgBus: 8219 2024-11-21T08:57:11.129625Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654008937314885:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:11.131828Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d9/r3tmp/tmpUJoDkZ/pdisk_1.dat 2024-11-21T08:57:11.144068Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14605, node 5 2024-11-21T08:57:11.154423Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.154436Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.154439Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.154480Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8219 TClient is connected to server localhost:8219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:11.228849Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:11.228897Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:11.229834Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:11.232427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.233866Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.237426Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.297512Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.329350Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.340968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.482052Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654008937316279:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.483829Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.484634Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.504701Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.518106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.529279Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.536968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.552113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.570466Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654008937316791:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.570492Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.570605Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654008937316796:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.571440Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.578801Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654008937316798:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:11.788252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.805624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ResourcePoolsValidation [GOOD] Test command err: Trying to start YDB, gRPC: 7926, MsgBus: 1178 2024-11-21T08:57:08.113450Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653994706812511:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.113517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c1/r3tmp/tmpnzAD2C/pdisk_1.dat 2024-11-21T08:57:08.187926Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7926, node 1 2024-11-21T08:57:08.205733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.205746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.205748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.205793Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:08.212842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.212878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.213923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1178 TClient is connected to server localhost:1178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.271355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.273662Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.284861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.360876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.425074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.435873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.484752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994706813901:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.484787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.512793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.518642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.526401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.532948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.540051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.547089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.555277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994706814394:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.555300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.555388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994706814399:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.555967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.560123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653994706814401:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:08.737139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5357, MsgBus: 19643 2024-11-21T08:57:09.005005Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654000981457824:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.005248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c1/r3tmp/tmpYDKqQ4/pdisk_1.dat 2024-11-21T08:57:09.014799Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5357, node 2 2024-11-21T08:57:09.022199Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.022214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.022216Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.022255Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19643 TClient is connected to server localhost:19643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.105283Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.105310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.106311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.107004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.109022Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.120863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.131670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.153621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.166691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.300765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654000981459353:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissio ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:10.058605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.198459Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654005409570622:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.198482Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.202898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.208991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.220427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.227087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.234466Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.290705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.357210Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654005409571140:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.357234Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.357356Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654005409571145:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.358056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.360039Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:10.360135Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654005409571147:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.552123Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:10.567973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.648988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 3037, MsgBus: 7715 2024-11-21T08:57:11.052755Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439654011216859468:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c1/r3tmp/tmpBK56FZ/pdisk_1.dat 2024-11-21T08:57:11.058396Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:11.073519Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3037, node 4 2024-11-21T08:57:11.084648Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.084658Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.084659Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.084692Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7715 TClient is connected to server localhost:7715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:11.155963Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:11.155997Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:11.156328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.157650Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:11.157984Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.163522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.176830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.213774Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.241128Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.406929Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654011216860867:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.406952Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.413126Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.421995Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.438857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.448247Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.460280Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.520585Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.595279Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654011216861384:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.595313Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.595361Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654011216861389:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.596169Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.598385Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654011216861391:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpScheme::AlterTableRenameIndex [GOOD] >> KqpScheme::AlterTableReplaceIndex >> TableWriter::Restore [GOOD] >> KqpAcl::RecursiveCreateTableShouldSuccess [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexReadReplace [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TwoSimilarFamiliesTest [GOOD] Test command err: Trying to start YDB, gRPC: 18161, MsgBus: 20532 2024-11-21T08:57:07.920194Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653991180332209:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.920225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040cb/r3tmp/tmpMRxAIv/pdisk_1.dat 2024-11-21T08:57:07.981579Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18161, node 1 2024-11-21T08:57:08.004409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.004419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.004420Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.004450Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20532 2024-11-21T08:57:08.020499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.020523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.021547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.083981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.088441Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.097897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.165267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.203618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.218424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.324361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653995475301039:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.324398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.460818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.466413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.477282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.484930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.539844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.548347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.555434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653995475301563:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.555454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.555455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653995475301568:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.556011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.560447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653995475301570:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:08.753678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:08.771088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.805184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:2, at schemeshard: 72057594046644480 2024-11-21T08:57:08.823168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.922099Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found Trying to start YDB, gRPC: 26039, MsgBus: 1789 2024-11-21T08:57:09.223020Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654000669257977:2195];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.224254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040cb/r3tmp/tmpGz7Rph/pdisk_1.dat 2024-11-21T08:57:09.237562Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26039, node 2 2024-11-21T08:57:09.243610Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.243625Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.243627Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.243664Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1789 TClient is connected to server localhost:1789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.322369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.322396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.323468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.325261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.327541Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.334376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.348425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose it ... suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.251514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.273405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:10.284814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.449438Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654005320845877:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.449460Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.455455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.462921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.471983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.479818Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.486172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.493241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.505168Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654005320846379:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.505197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.505277Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654005320846384:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.506186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.514319Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654005320846386:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.714292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:10.728189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12032, MsgBus: 2372 2024-11-21T08:57:11.106163Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439654007904698595:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040cb/r3tmp/tmphRxpNY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12032, node 4 2024-11-21T08:57:11.129670Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:11.131753Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.131757Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.131759Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.131801Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:11.145029Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2372 TClient is connected to server localhost:2372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:11.210637Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:11.210673Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:11.211151Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.212650Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:11.214091Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.217601Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.231381Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.259271Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.292884Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.476337Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654007904699988:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.476382Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.482033Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.505261Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.517193Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.529623Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.536926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.551553Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.570545Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654007904700491:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.570574Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.570650Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654007904700496:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.571339Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.578023Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654007904700498:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi [GOOD] Test command err: Trying to start YDB, gRPC: 11107, MsgBus: 6628 2024-11-21T08:57:07.835010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653994031476010:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.835045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d6/r3tmp/tmpSlSCha/pdisk_1.dat 2024-11-21T08:57:07.893160Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11107, node 1 2024-11-21T08:57:07.924410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.924420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.924421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.924455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:07.936398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.936423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.939049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6628 TClient is connected to server localhost:6628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.013390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.015536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.023157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.043299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.066186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.075756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.192059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998326444833:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.192084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.236826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.253358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.262231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.276819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.295479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.313477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.325067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998326445349:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.325086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.325120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998326445354:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.325746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.332154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653998326445356:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:08.511133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6669, MsgBus: 28933 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d6/r3tmp/tmplSeMkW/pdisk_1.dat 2024-11-21T08:57:08.736425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:08.737960Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6669, node 2 2024-11-21T08:57:08.749135Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.749147Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.749149Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.749188Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28933 TClient is connected to server localhost:28933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.826717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.826749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.827006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.828020Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:08.834549Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.835577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.844323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.860813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.872138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.017767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653999443881233:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.017793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource poo ... de 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:10.465395Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.475704Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.494032Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.506313Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.699829Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654006110317239:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.699915Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.702294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.712123Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.725913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.739640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.754450Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.767336Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.850309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654006110317743:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.850353Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.850562Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654006110317748:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.851468Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.855776Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654006110317750:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 26655, MsgBus: 62937 2024-11-21T08:57:11.300788Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654010157857585:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:11.301858Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d6/r3tmp/tmp1uHUrA/pdisk_1.dat 2024-11-21T08:57:11.323556Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26655, node 5 2024-11-21T08:57:11.334989Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.335003Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.335005Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.335053Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62937 TClient is connected to server localhost:62937 2024-11-21T08:57:11.399132Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:11.399167Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:11.400433Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:11.407912Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.417060Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.429615Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.441532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.476096Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.489658Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.644233Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010157858984:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.644259Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.650696Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.656944Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.669157Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.676486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.683378Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.690140Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.709347Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010157859477:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.709392Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010157859484:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.709403Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.710184Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.718211Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654010157859486:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:11.941956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:56:50.797372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:50.797401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:50.797405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:50.797409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:50.797418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:50.797421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:50.797427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:50.797534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:50.861720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:50.861739Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:56:50.863381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:50.863448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:50.863480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:50.879413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:50.879587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:50.914002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:50.921097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:50.929488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:50.964597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:50.964624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:50.964693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:50.964705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:50.964713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:50.964736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:56:50.966344Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:56:50.997627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:51.008052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.008149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:51.008192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:51.008226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:51.009256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:51.009270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:51.009273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:51.009685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:51.009948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:51.009972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:51.010500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:51.010841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:51.020073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:51.020483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:51.020526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:51.020535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:51.020631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:51.020642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:51.020675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:51.020691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:51.021596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:51.021609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:51.021659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:51.021665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:56:51.021747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.021755Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:51.021768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:51.021773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:51.021780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:51.021786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:51.021791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:51.021795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:51.021814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:51.021820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:51.021824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... tToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:12.063757Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:12.063805Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 58us result status StatusSuccess 2024-11-21T08:57:12.063947Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:12.074299Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T08:57:12.074343Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:714:2614] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T08:57:12.074401Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732179432060766 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732179432060766 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732179432060766 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:57:12.075474Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T08:57:12.075527Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:714:2614] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn [GOOD] >> KqpScheme::DisableDropExternalTable [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat [GOOD] >> KqpScheme::DropAsyncReplicationCascade [GOOD] >> KqpScheme::ChangefeedOnIndexTable [GOOD] >> KqpScheme::AsyncReplicationEndpointAndDatabase |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpAcl::RecursiveCreateTableShouldSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 9300, MsgBus: 4241 2024-11-21T08:57:07.888847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653990487983126:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.888881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d3/r3tmp/tmppaQW7g/pdisk_1.dat 2024-11-21T08:57:07.943949Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9300, node 1 2024-11-21T08:57:07.964438Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.964454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.964456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.964490Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:07.989303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.989331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.993851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4241 TClient is connected to server localhost:4241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:57:08.039163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.041065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.046904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.065759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.129757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.142866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.227398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994782951948:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.227424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.276836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.286432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.295490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.302856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.309132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.316545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.328487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994782952461:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.328508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994782952466:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.328515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.329306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.410365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653994782952468:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:08.578377Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439653994782952766:3475], for# user0@builtin, access# DescribeSchema 2024-11-21T08:57:08.578397Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439653994782952766:3475], for# user0@builtin, access# DescribeSchema 2024-11-21T08:57:08.579722Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653994782952763:2460], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/TwoShard]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:57:08.579793Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzQwN2FhNGEtZjIyMDgyODUtOGQ2ZjdlMzgtYWFjYTQxYzI=, ActorId: [1:7439653994782952754:2455], ActorState: ExecuteState, TraceId: 01jd6z117078hrb47kwx5svssr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 13834, MsgBus: 24107 2024-11-21T08:57:08.769192Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653994412053989:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.769257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d3/r3tmp/tmp6ywwye/pdisk_1.dat 2024-11-21T08:57:08.780646Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13834, node 2 2024-11-21T08:57:08.794430Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.794444Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.794446Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.794492Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24107 TClient is connected to server localhost:24107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.869564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.869593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.869961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.870491Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:08.870752Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.875085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.887372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... shard: 72057594046644480 waiting... 2024-11-21T08:57:10.873195Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.099714Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654008655178642:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.099735Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.104568Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.113364Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.126082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.142763Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.159972Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.172619Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.244309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654008655179157:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.244338Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.244485Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654008655179162:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.246859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.249040Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:57:11.249277Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654008655179164:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:11.466111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.482794Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd6z141g7gfyma0yaqkrzz8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzRlMTc4ZTktZmRjMDEzYjgtYzE2NjBlYWEtYWMwN2RjZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trying to start YDB, gRPC: 28061, MsgBus: 25282 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d3/r3tmp/tmptxZ8OH/pdisk_1.dat 2024-11-21T08:57:11.804292Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:11.811291Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28061, node 5 2024-11-21T08:57:11.820813Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.820826Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.820828Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.820867Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25282 TClient is connected to server localhost:25282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:11.899923Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:11.899953Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:57:11.900273Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.901257Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:11.911704Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.926313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.950346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.962000Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.170495Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654013851038502:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.170520Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.179638Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.192478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.206333Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.222958Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.243373Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.257247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.276419Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654013851038995:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.276449Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.276627Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654013851039000:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.277208Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:12.279033Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654013851039002:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:12.503300Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.514281Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.525050Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:3, at schemeshard: 72057594046644480 >> TableWriter::Backup [GOOD] |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TouchIndexAfterMoveIndexReadReplace [GOOD] Test command err: Trying to start YDB, gRPC: 21554, MsgBus: 28184 2024-11-21T08:57:08.190618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653996288018381:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.190686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c2/r3tmp/tmpOSGwMf/pdisk_1.dat 2024-11-21T08:57:08.240668Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21554, node 1 2024-11-21T08:57:08.267775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.267805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.267807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.267841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28184 2024-11-21T08:57:08.295075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.295099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.300714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.331437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.344857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.447764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.465624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.473984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.583906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653996288019780:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.583934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.613201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.619054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.634055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.689839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.701460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.716692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.741837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653996288020297:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.741864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.741907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653996288020302:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.742543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.744218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653996288020304:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:08.929270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16017, MsgBus: 14916 2024-11-21T08:57:09.298890Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654002393222266:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.299248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c2/r3tmp/tmp4xZtsN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16017, node 2 2024-11-21T08:57:09.313128Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:09.314799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.314807Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.314809Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.314843Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14916 TClient is connected to server localhost:14916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.398793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.398829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.399980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.401027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.405605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.419686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.434473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.443803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.616304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654002393223778:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.616351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.620597Z node 2 :FLAT_TX_SCHEMESHA ... d: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.537221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.544079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.557116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.611780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.618964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.626270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.646029Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654006416553081:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.646057Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.646059Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654006416553086:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.646741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.657989Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654006416553088:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.881282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:10.947172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 25410, MsgBus: 25779 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c2/r3tmp/tmpZgjNUn/pdisk_1.dat 2024-11-21T08:57:11.545301Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:11.545351Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 25410, node 4 2024-11-21T08:57:11.555413Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.555426Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.555428Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.555461Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25779 TClient is connected to server localhost:25779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:11.638329Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:11.638358Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:11.638565Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.639433Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:11.639909Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.641946Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.702247Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.725680Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.783051Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.863062Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654009815913432:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.863096Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.867106Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.874988Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.888173Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.901917Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.917947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.930191Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.072523Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654014110881257:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.072540Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.072663Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654014110881262:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.073347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:12.075419Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:12.075510Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654014110881264:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:12.274817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:12.370461Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.407038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:2, at schemeshard: 72057594046644480 2024-11-21T08:57:12.427340Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.482114Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715674, at schemeshard: 72057594046644480 2024-11-21T08:57:12.485729Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037920 not found >> KqpScheme::CreateTableWithUniqConstraint [GOOD] >> KqpScheme::CreateTableWithUniqConstraintPublicApi >> KqpOlapTypes::JsonImport [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> KqpPg::TableDeleteWhere [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat [GOOD] Test command err: Trying to start YDB, gRPC: 28455, MsgBus: 18901 2024-11-21T08:57:07.687585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653990751595218:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.687625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d7/r3tmp/tmpcu0S7x/pdisk_1.dat 2024-11-21T08:57:07.742181Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28455, node 1 2024-11-21T08:57:07.757377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.757393Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.757395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.757434Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18901 2024-11-21T08:57:07.788000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.788030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.789222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18901 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.801387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.813408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.877817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.910904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.929045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.048450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653995046564047:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.048489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.158079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.179849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.195417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.205316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.219426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.233392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.260608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653995046564575:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.260637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.260672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653995046564580:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.261259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.264899Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T08:57:08.264946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653995046564582:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:08.541044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.551834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15725, MsgBus: 20622 2024-11-21T08:57:08.774735Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d7/r3tmp/tmpCfBcID/pdisk_1.dat 2024-11-21T08:57:08.780466Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15725, node 2 2024-11-21T08:57:08.788733Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.788747Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.788749Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.788793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20622 TClient is connected to server localhost:20622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.870288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.870315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.871087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.871450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:08.872323Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.880977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.891785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.910918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.921840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.109539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653999823063181:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or yo ... EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) Trying to start YDB, gRPC: 28636, MsgBus: 3956 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d7/r3tmp/tmpTvrVhO/pdisk_1.dat 2024-11-21T08:57:11.798469Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:11.808729Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28636, node 5 2024-11-21T08:57:11.832991Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.833006Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.833008Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.833058Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3956 2024-11-21T08:57:11.893276Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:11.893311Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:11.896115Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:11.911176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.913786Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.919725Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.942539Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:11.965088Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.977661Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.154520Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654012121418516:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.154555Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.160322Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.169521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.180929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.197900Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.219957Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.231170Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.255255Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654012121419018:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.255297Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.255467Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654012121419023:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.256372Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:12.258934Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:12.259020Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654012121419025:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:12.505994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithPartitioningByLoad TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithPartitioningByLoad" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732179432613 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) 2024-11-21T08:57:12.573173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithPartitioningByLoad TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithPartitioningByLoad" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732179432613 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) 2024-11-21T08:57:12.603520Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithPartitioningByLoad TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithPartitioningByLoad" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732179432613 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableDropExternalTable [GOOD] Test command err: Trying to start YDB, gRPC: 3358, MsgBus: 19438 2024-11-21T08:57:08.214637Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653994078162904:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.214651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040b9/r3tmp/tmpoHMujJ/pdisk_1.dat 2024-11-21T08:57:08.278435Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3358, node 1 2024-11-21T08:57:08.298091Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.298108Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.298110Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.298145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:08.314789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.314811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:19438 2024-11-21T08:57:08.315950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.461154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.467855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.483414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.499649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.508876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.611433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994078164437:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.611464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.640745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.647386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.659931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.675096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.687164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.694311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.705465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994078164950:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.705517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.705579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994078164955:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.706229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.714881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653994078164957:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:08.903765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26853, MsgBus: 17112 2024-11-21T08:57:09.112302Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654002125173909:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.112318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040b9/r3tmp/tmpQ8iPrA/pdisk_1.dat 2024-11-21T08:57:09.128175Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26853, node 2 2024-11-21T08:57:09.139906Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.139920Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.139922Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.139981Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17112 TClient is connected to server localhost:17112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.212313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.212339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.213407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.221787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.224608Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.233449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.292314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.310775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.322067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.458503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654002125175465:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.458579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: N ... 8:57:11.129723Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.132472Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:11.133086Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.145104Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.166723Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.231246Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.297442Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.458794Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654008764043473:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.458819Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.465264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.476621Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.489719Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.502063Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.515741Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.530062Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.589234Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654008764043985:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.589259Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.589267Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654008764043990:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.589969Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.591660Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654008764043992:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 28254, MsgBus: 25856 2024-11-21T08:57:11.954003Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654010601083812:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:11.954019Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040b9/r3tmp/tmpylE4A3/pdisk_1.dat 2024-11-21T08:57:11.967947Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28254, node 5 2024-11-21T08:57:11.976865Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.976880Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.976882Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.976926Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25856 TClient is connected to server localhost:25856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:12.054426Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:12.054459Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:12.055533Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:12.057801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.112807Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.121510Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:12.140674Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.150228Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.347726Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654014896052677:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.347752Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.350271Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.367205Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.426735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.432952Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.449325Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.462936Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.481629Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654014896053194:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.481664Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.481718Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654014896053199:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.482621Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:12.491380Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654014896053201:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropAsyncReplicationCascade [GOOD] Test command err: Trying to start YDB, gRPC: 4076, MsgBus: 22556 2024-11-21T08:57:06.987289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653985511854496:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.987369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040dd/r3tmp/tmpjDCmr4/pdisk_1.dat 2024-11-21T08:57:07.044300Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4076, node 1 2024-11-21T08:57:07.060639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.060650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.060652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.060686Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22556 2024-11-21T08:57:07.089049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.089078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.091107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.124131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.126085Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.134861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.157076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.176281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.190523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.315178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653989806823199:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.315198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.352648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.359302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.371871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.426883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.481888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.490234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.498693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653989806823716:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.498734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.498753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653989806823721:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.499356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.503467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653989806823723:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:07.683454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29823, MsgBus: 6677 2024-11-21T08:57:07.891023Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653991156209191:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.891061Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040dd/r3tmp/tmpojcVw9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29823, node 2 2024-11-21T08:57:07.909692Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:07.910092Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.910100Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.910103Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.910129Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6677 TClient is connected to server localhost:6677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.992623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.992650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.994844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.995147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.002061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.013186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.039590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.050691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.185716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653995451178020:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.187480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_ ... 1T08:57:09.240941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654000504832836:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:09.424318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.437780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.629241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.637871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.496515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropReplication, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.498614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715679:2, at schemeshard: 72057594046644480 2024-11-21T08:57:10.500704Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,35) wasn't found 2024-11-21T08:57:10.500722Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,34) wasn't found 2024-11-21T08:57:10.500844Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037922 not found 2024-11-21T08:57:10.500859Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037921 not found 2024-11-21T08:57:10.505384Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037920 not found Trying to start YDB, gRPC: 6516, MsgBus: 4753 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040dd/r3tmp/tmp8W1GXE/pdisk_1.dat 2024-11-21T08:57:10.832623Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:10.833164Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6516, node 4 2024-11-21T08:57:10.848856Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:10.848874Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:10.848876Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:10.848922Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4753 TClient is connected to server localhost:4753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:10.917654Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.917684Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:10.917964Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.918597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:10.921275Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:10.924015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.935724Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.958741Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.977839Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.129926Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654007261064006:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.129956Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.133911Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.143038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.157385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.168377Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.181346Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.196935Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.281170Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654007261064513:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.281194Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.281274Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654007261064518:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.282038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.286741Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:11.286847Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654007261064520:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:11.525831Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.537523Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.612498Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.624581Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.562440Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropReplicationCascade, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.565086Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715678:2, at schemeshard: 72057594046644480 2024-11-21T08:57:12.567213Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037921 not found 2024-11-21T08:57:12.567231Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037922 not found 2024-11-21T08:57:12.567908Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,35) wasn't found 2024-11-21T08:57:12.567920Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,34) wasn't found 2024-11-21T08:57:12.585203Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037920 not found 2024-11-21T08:57:12.585218Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037923 not found |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn [GOOD] Test command err: Trying to start YDB, gRPC: 17590, MsgBus: 5343 2024-11-21T08:57:06.855299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653989325244751:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.855588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e8/r3tmp/tmpjflTcc/pdisk_1.dat 2024-11-21T08:57:06.916450Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17590, node 1 2024-11-21T08:57:06.939305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.939318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.939320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.939353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:06.956151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:06.956177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:06.958122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5343 TClient is connected to server localhost:5343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:06.996578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:06.998536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.004927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.024147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.043605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.054709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.193681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993620213579:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.193703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.226587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.232877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.245596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.265988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.277311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.287106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.296189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993620214090:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.296235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.296253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993620214095:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.296930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.300165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653993620214097:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:07.478687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.571414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976710675:1, at schemeshard: 72057594046644480 2024-11-21T08:57:07.577518Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T08:57:07.580309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11742, MsgBus: 8383 2024-11-21T08:57:07.729727Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653990826190796:2194];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e8/r3tmp/tmpLKEGP6/pdisk_1.dat 2024-11-21T08:57:07.734335Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:07.740521Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11742, node 2 2024-11-21T08:57:07.752344Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.752360Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.752362Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.752394Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8383 TClient is connected to server localhost:8383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.829286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.829317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.830348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.833647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.834492Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.853069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.864988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.880192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... 39654004514805614:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.225337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15124, MsgBus: 2105 2024-11-21T08:57:10.938677Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e8/r3tmp/tmp3j8npT/pdisk_1.dat 2024-11-21T08:57:10.945581Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15124, node 5 2024-11-21T08:57:10.960446Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:10.960461Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:10.960463Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:10.960507Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2105 TClient is connected to server localhost:2105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:11.027958Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:11.027986Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:11.029068Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:11.032344Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.037562Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.048708Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.078178Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.099868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.122422Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.307825Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654008483904411:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.307852Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.313424Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.320742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.393512Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.413766Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.426971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.439150Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.465197Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654008483904927:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.465219Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.465363Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654008483904932:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.466111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.469375Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654008483904934:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:11.665758Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.848968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710757:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.875604Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.930758Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.956565Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710765:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.997406Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710767:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.030287Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710770:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.071486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710772:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.093555Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710775:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.134324Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710777:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.156355Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710780:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.225051Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710782:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.247725Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710785:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.305075Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710787:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.326031Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710790:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.401713Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710792:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.452962Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710795:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.495638Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710797:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.518639Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710800:0, at schemeshard: 72057594046644480 >> KqpScheme::AlterNonExistingResourcePoolClassifier [GOOD] |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapTypes::JsonImport [GOOD] Test command err: Trying to start YDB, gRPC: 8602, MsgBus: 26755 2024-11-21T08:57:08.139077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653994432393121:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.139093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040be/r3tmp/tmp87MU3R/pdisk_1.dat 2024-11-21T08:57:08.208577Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8602, node 1 2024-11-21T08:57:08.227772Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.227782Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.227784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.227814Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:08.239851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.239871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.240553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26755 TClient is connected to server localhost:26755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.305296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.311980Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, timestamp Timestamp NOT NULL, ui64_type Uint64 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:08.556834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994432393719:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.556855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.591811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.598315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:08.598354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:08.598401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:08.598424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:08.598441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:08.598465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:08.598488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:08.598508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:08.598535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:08.598552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:08.598574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:08.598594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653994432393795:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:08.599113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:08.599138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:08.599155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:08.599167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:08.599194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:08.599212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:08.599223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:08.599237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:08.599249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:08.599260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:08.599267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:08.599272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:08.599362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:08.599377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:08.599396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:08.599401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:08.599412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:08.599418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:08.599437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:08.599448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:08.599460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Res ... 594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:12.709217Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.711884Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, json Json, json_doc JsonDocument, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:12.954061Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654015286799146:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.954095Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.956240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.965156Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:12.965192Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:12.965272Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:12.965308Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:12.965338Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:12.965371Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:12.965400Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:12.965428Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:12.965459Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:12.965482Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:12.965517Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:12.965538Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654015286799192:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:12.966248Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:12.966257Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:12.966270Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:12.966277Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:12.966295Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:12.966299Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:12.966317Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:12.966323Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:12.966334Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:12.966339Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:12.966350Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:12.966355Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:12.966415Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:12.966421Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:12.966439Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:12.966444Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:12.966458Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:12.966462Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:12.966480Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:12.966484Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:12.966496Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:12.966500Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=512;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=512;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=400;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=400;columns=3; Bulk upsert to table '/Root/ColumnTableTest'bad batch in data: Invalid UTF8 sequence at string index 0; order:id, json, json_doc |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere [GOOD] Test command err: Trying to start YDB, gRPC: 23107, MsgBus: 18196 2024-11-21T08:56:53.127011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653933343623593:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:53.127180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044aa/r3tmp/tmpySuJWF/pdisk_1.dat 2024-11-21T08:56:53.147182Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23107, node 1 2024-11-21T08:56:53.162868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:53.162881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:53.162882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:53.162915Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18196 TClient is connected to server localhost:18196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:56:53.227117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:53.227144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:53.228294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:53.230054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:56:53.372248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.431348Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 0'::bytea, 'bytea 0'::bytea ) 2024-11-21T08:56:53.433536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653933343624297:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.433539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653933343624289:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.433551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:56:53.434184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:56:53.440395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653933343624303:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 1'::bytea, 'bytea 1'::bytea ) --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 2'::bytea, 'bytea 2'::bytea ) \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 2024-11-21T08:56:53.714908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.721694Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '0'::int2, '{a0, b10}'::_bytea ) --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '1'::int2, '{a1, b11}'::_bytea ) --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '2'::int2, '{a2, b12}'::_bytea ) {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} 16 2024-11-21T08:56:53.805586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.812316Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg16_b (key, value) VALUES ( 'false'::bool, 'false'::bool ) --!syntax_pg INSERT INTO Pg16_b (key, value) VALUES ( 'true'::bool, 'true'::bool ) f f t t 18 2024-11-21T08:56:53.872069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T08:56:53.878894Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '0'::"char", '0'::"char" ) --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '1'::"char", '1'::"char" ) --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '2'::"char", '2'::"char" ) 0 0 1 1 2 2 21 2024-11-21T08:56:53.963867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.019610Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '0'::int2, '0'::int2 ) --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '1'::int2, '1'::int2 ) --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '2'::int2, '2'::int2 ) 0 0 1 1 2 2 23 2024-11-21T08:56:54.117735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.125377Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '0'::int4, '0'::int4 ) --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '1'::int4, '1'::int4 ) --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '2'::int4, '2'::int4 ) 0 0 1 1 2 2 20 2024-11-21T08:56:54.214492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715729:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.270697Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '0'::int8, '0'::int8 ) --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '1'::int8, '1'::int8 ) --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '2'::int8, '2'::int8 ) 0 0 1 1 2 2 700 2024-11-21T08:56:54.356106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715741:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.362925Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '0.5'::float4, '0.5'::float4 ) --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '1.5'::float4, '1.5'::float4 ) --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '2.5'::float4, '2.5'::float4 ) 0.5 0.5 1.5 1.5 2.5 2.5 701 2024-11-21T08:56:54.455224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715753:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.461943Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '0.5'::float8, '0.5'::float8 ) --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '1.5'::float8, '1.5'::float8 ) --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '2.5'::float8, '2.5'::float8 ) 0.5 0.5 1.5 1.5 2.5 2.5 25 2024-11-21T08:56:54.559364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715765:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.566868Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 0'::text, 'text 0'::text ) --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 1'::text, 'text 1'::text ) --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 2'::text, 'text 2'::text ) text 0 text 0 text 1 text 1 text 2 text 2 1042 2024-11-21T08:56:54.674611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715777:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.682328Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1042_b (key, value) VALUES ( 'bpchar 0'::bpchar, 'bpchar 0'::bpchar ) --!syntax_pg INSERT INTO Pg1042_b (key, value) VALUES ( 'bpchar 1'::bpchar, 'bpchar 1' ... itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715824:0, at schemeshard: 72057594046644480 628 2024-11-21T08:57:12.328707Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.337510Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715826:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.391193Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.398219Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715828:0, at schemeshard: 72057594046644480 601 2024-11-21T08:57:12.423971Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.429041Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715830:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.445231Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.457197Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715832:0, at schemeshard: 72057594046644480 603 2024-11-21T08:57:12.471907Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.473949Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715834:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.489001Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.492441Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715836:0, at schemeshard: 72057594046644480 602 2024-11-21T08:57:12.503057Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.505367Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715838:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.518397Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.521421Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715840:0, at schemeshard: 72057594046644480 604 2024-11-21T08:57:12.535957Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.538753Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715842:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.551139Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.556965Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715844:0, at schemeshard: 72057594046644480 718 2024-11-21T08:57:12.572690Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.575406Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715846:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.599529Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.602742Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715848:0, at schemeshard: 72057594046644480 869 2024-11-21T08:57:12.625574Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.628277Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715850:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.652593Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.655081Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715852:0, at schemeshard: 72057594046644480 650 2024-11-21T08:57:12.686324Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.688567Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715854:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.707805Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.710472Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715856:0, at schemeshard: 72057594046644480 829 2024-11-21T08:57:12.744847Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.757562Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715858:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.773085Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.776114Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715860:0, at schemeshard: 72057594046644480 774 2024-11-21T08:57:12.790723Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.792980Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715862:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.805152Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.807282Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715864:0, at schemeshard: 72057594046644480 2950 2024-11-21T08:57:12.820402Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.822985Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715866:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.838443Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.840110Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715868:0, at schemeshard: 72057594046644480 114 2024-11-21T08:57:12.854832Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.859474Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715870:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.872306Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.875307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715872:0, at schemeshard: 72057594046644480 3802 2024-11-21T08:57:12.891023Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.893733Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715874:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.907521Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.909915Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715876:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.920444Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 4072 2024-11-21T08:57:12.922734Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715878:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.933572Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.935776Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715880:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.946424Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 142 2024-11-21T08:57:12.948548Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715882:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.961518Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.963588Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715884:0, at schemeshard: 72057594046644480 3615 2024-11-21T08:57:12.978119Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:12.989456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715886:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.004766Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:13.007448Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715888:0, at schemeshard: 72057594046644480 3614 2024-11-21T08:57:13.019274Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:13.023898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715890:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.044478Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:13.047113Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715892:0, at schemeshard: 72057594046644480 22 2024-11-21T08:57:13.070165Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:13.072933Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715894:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.095491Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T08:57:13.098328Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715896:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.115642Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill |90.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> TUniqueIndexTests::CreateTable >> KqpScheme::AlterTableReplaceIndex [GOOD] >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex [GOOD] >> KqpScheme::AlterResourcePoolClassifier >> DataShardTxOrder::ZigZag |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScheme::CreateAsyncReplicationWithSecret [GOOD] >> KqpScheme::CreateTableWithTtlOnIntColumn [GOOD] >> KqpScheme::CreateTableWithTtlOnDatetime64Column >> KqpScheme::AsyncReplicationEndpointAndDatabase [GOOD] >> KqpScheme::CreateExternalDataSourceWithSa [GOOD] >> KqpScheme::CreateExternalTable >> TUniqueIndexTests::CreateTable [GOOD] |90.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterNonExistingResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 6069, MsgBus: 16884 2024-11-21T08:57:06.888376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653986499697257:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.888438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040da/r3tmp/tmpU4Li9B/pdisk_1.dat 2024-11-21T08:57:06.968868Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6069, node 1 2024-11-21T08:57:06.987962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.987973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.987975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.988005Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:06.988700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:06.988726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:06.989708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16884 TClient is connected to server localhost:16884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.046472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.053002Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.060396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:07.123580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.147526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.157573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.218855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990794665954:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.218897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.256263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.263006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.273638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.288259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.302005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.317474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.331346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990794666457:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.331369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653990794666462:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.331377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.331982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.335400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653990794666464:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:07.513440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.519363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.532432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17983, MsgBus: 11825 2024-11-21T08:57:08.029366Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653994165131101:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.031681Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040da/r3tmp/tmpsAkYnq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17983, node 2 2024-11-21T08:57:08.053904Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.053917Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.053919Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.053955Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:08.054048Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11825 TClient is connected to server localhost:11825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.128774Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.128819Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.129870Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:08.133095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.137163Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.145524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.166843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.192631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.204063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part propose ... AD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654009203083406:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.229075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.230794Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.239516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.250685Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.266753Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.280167Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.292933Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.357444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654009203083921:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.357475Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.359557Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654009203083926:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.360534Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.363419Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654009203083928:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 15900, MsgBus: 26756 2024-11-21T08:57:11.927322Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654010064951202:2261];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:11.927356Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040da/r3tmp/tmpHrrxdQ/pdisk_1.dat 2024-11-21T08:57:11.939366Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15900, node 5 2024-11-21T08:57:11.952494Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:11.952507Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:11.952509Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:11.952543Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26756 TClient is connected to server localhost:26756 2024-11-21T08:57:12.029531Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:12.029581Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:12.031216Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:12.037713Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.040116Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:12.115836Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.185657Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.224100Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.236030Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.324846Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654014359919816:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.324871Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.336855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.394573Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.404241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.421475Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.436359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.448066Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.469351Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654014359920330:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.469394Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.469454Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654014359920335:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.470285Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:12.473925Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654014359920337:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:12.928541Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:12.997107Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.062893Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:13.133206Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.205636Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.281399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTableReplaceIndex [GOOD] Test command err: Trying to start YDB, gRPC: 7589, MsgBus: 16949 2024-11-21T08:57:08.119975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653994556047918:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.120046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040bd/r3tmp/tmp4nTmsn/pdisk_1.dat 2024-11-21T08:57:08.188221Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7589, node 1 2024-11-21T08:57:08.205244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.205261Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.205262Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.205301Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16949 TClient is connected to server localhost:16949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:08.265107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.265139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.266136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.272779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.289055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.365778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.480353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.492146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.544957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994556049342:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.545000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.573022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.579220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.634327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.645119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.652713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.659231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.668036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994556049846:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.668059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.668092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994556049851:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.668632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.672559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653994556049853:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:08.862839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.870850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.888255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.066951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:09.077978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715678:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61967, MsgBus: 18714 2024-11-21T08:57:09.213538Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654000519964517:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040bd/r3tmp/tmpCZwZ4u/pdisk_1.dat 2024-11-21T08:57:09.219346Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:09.231270Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61967, node 2 2024-11-21T08:57:09.238573Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.238585Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.238587Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.238629Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18714 TClient is connected to server localhost:18714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.312984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.313024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.315337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.315704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.320799Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.326902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.336348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:09. ... ration part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.707570Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.719524Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.733895Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.747333Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.926432Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654007436338560:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.926466Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.926708Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654007436338565:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.927589Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.934503Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:11.934632Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654007436338568:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:12.153624Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.161301Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.173877Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15723, MsgBus: 23453 2024-11-21T08:57:12.655707Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654012339397225:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:12.655911Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040bd/r3tmp/tmpup52RE/pdisk_1.dat 2024-11-21T08:57:12.683088Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15723, node 5 2024-11-21T08:57:12.706962Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:12.706976Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:12.706978Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:12.707103Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23453 2024-11-21T08:57:12.759063Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:12.759113Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:12.760193Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:12.769573Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.771170Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:12.778822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.794649Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.819360Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.831014Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.985296Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654012339398760:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.985317Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.992909Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.000729Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.014042Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.027282Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.034711Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.051659Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.072304Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654016634366568:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.072360Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.072563Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654016634366573:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.073517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:13.077265Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:13.077340Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654016634366575:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:13.286798Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.300363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.366610Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.637454Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:13.660137Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAsyncReplicationWithSecret [GOOD] Test command err: Trying to start YDB, gRPC: 25049, MsgBus: 14962 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e2/r3tmp/tmpBsCbeA/pdisk_1.dat 2024-11-21T08:57:06.964932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:06.967153Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25049, node 1 2024-11-21T08:57:06.988490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.988512Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.988514Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.988572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14962 TClient is connected to server localhost:14962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.047431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.047454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.047691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.048618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.049461Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.055346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.118966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.141178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.164348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.221585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993329121916:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.221610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.253324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.260913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.273434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.327929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.336600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.350639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.367812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993329122431:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.367842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993329122436:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.367842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.368687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.377918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653993329122438:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:07.571990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.582594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9450, MsgBus: 24457 2024-11-21T08:57:07.765547Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653989965211781:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e2/r3tmp/tmpLTKwri/pdisk_1.dat 2024-11-21T08:57:07.769139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:07.774359Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9450, node 2 2024-11-21T08:57:07.785449Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.785466Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.785469Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.785522Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24457 TClient is connected to server localhost:24457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.865080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.865104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.866740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.867041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.868711Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.895289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:07.912439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.944523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.964349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.096695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653994260180441:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you ... opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.110524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654003919607819:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.110557Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.110572Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654003919607824:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.111168Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.114273Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654003919607826:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.513441Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.526620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9588, MsgBus: 62391 2024-11-21T08:57:10.878879Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040e2/r3tmp/tmph5VeHP/pdisk_1.dat 2024-11-21T08:57:10.887351Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9588, node 5 2024-11-21T08:57:10.901151Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:10.901166Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:10.901168Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:10.901208Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62391 2024-11-21T08:57:10.977687Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.977714Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:10.979580Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:10.998281Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.000275Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:11.007666Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.030345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.053344Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.074321Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.261001Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654011063547001:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.261048Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.267018Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.323059Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.334644Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.349414Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.362700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.382064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.405403Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654011063547516:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.405431Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.405597Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654011063547524:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.406599Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:11.413323Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:11.413434Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654011063547526:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:11.662450Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.880988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2024-11-21T08:57:11.954171Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.010960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:1, at schemeshard: 72057594046644480 2024-11-21T08:57:12.070323Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.133392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.192938Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.269720Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:12.335315Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.626209Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.833298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.841523Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 >> KqpScheme::CreateTableWithUniqConstraintPublicApi [GOOD] |90.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |90.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:57:14.127430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:14.127455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:14.127460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:14.127464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:14.127470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:14.127474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:14.127482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:14.127559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:14.136871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:14.136896Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:14.139388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:14.140169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:14.140227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:14.141682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:14.141879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:14.141976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:14.142062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:14.143115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:14.143382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:14.143395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:14.143432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:14.143437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:14.143441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:14.143453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.144540Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:57:14.161027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:14.161111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.161178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:14.161224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:14.161232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.162054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:14.162082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:14.162138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.162147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:14.162151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:14.162156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:14.162582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.162593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:14.162597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:14.162992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.163009Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.163016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:14.163023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:14.163712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:14.164138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:14.164190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:14.164395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:14.164418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:14.164426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:14.164486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:14.164493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:14.164531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:14.164544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:14.164947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:14.164956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:14.164996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:14.165001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:57:14.165090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.165097Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:14.165110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:14.165115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:14.165121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:14.165127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:14.165132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:14.165135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:14.165146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:14.165151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:14.165155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:57:14.165444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:14.165458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:14.165463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:57:14.165468Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:57:14.165473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:14.165499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... .248695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T08:57:14.248705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:14.248709Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:57:14.248714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:57:14.248720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T08:57:14.248777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T08:57:14.248782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T08:57:14.248790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T08:57:14.248793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:57:14.248800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T08:57:14.248805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:14.248808Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.248811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:57:14.248814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T08:57:14.249641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:57:14.249661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:57:14.249682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:57:14.251088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:57:14.251127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:57:14.251145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.251163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:57:14.251231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.251278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:57:14.251285Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T08:57:14.251300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T08:57:14.251321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T08:57:14.251327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T08:57:14.251358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:57:14.251364Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T08:57:14.251369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T08:57:14.251372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T08:57:14.251376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T08:57:14.251396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T08:57:14.251401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T08:57:14.251407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:57:14.251412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:57:14.251479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:57:14.251484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T08:57:14.251486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T08:57:14.251490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:57:14.251497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T08:57:14.251500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T08:57:14.251506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:57:14.252118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:57:14.252129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T08:57:14.252267Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:14.252342Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 88us result status StatusSuccess 2024-11-21T08:57:14.252593Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AsyncReplicationEndpointAndDatabase [GOOD] Test command err: Trying to start YDB, gRPC: 23996, MsgBus: 6276 2024-11-21T08:57:08.491633Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653998186496751:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.491883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040b3/r3tmp/tmpLxj6ln/pdisk_1.dat 2024-11-21T08:57:08.538114Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23996, node 1 2024-11-21T08:57:08.554235Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.554246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.554248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.554285Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6276 TClient is connected to server localhost:6276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:08.591871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.591894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.592993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.615782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.619530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.682422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.701206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.712949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.784963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998186498292:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.784987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.824819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.836429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.850776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.907872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.920627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.933705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.948415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998186498809:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.948439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.948477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998186498814:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.949157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.952346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653998186498816:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:09.122664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3299, MsgBus: 3858 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040b3/r3tmp/tmpWmoY0U/pdisk_1.dat 2024-11-21T08:57:09.635404Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:09.635885Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3299, node 2 2024-11-21T08:57:09.652686Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.652698Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.652700Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.652736Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3858 TClient is connected to server localhost:3858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.716689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.716713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.718980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.724737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.732596Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.739404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:09.753857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.778845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.793377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.932044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654002284363803:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.932066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.937060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose it ... suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.101769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.125421Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.136772Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.248671Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654011870624654:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.248703Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.252262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.268176Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.278330Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.290643Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.303099Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.314613Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.333529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654011870625157:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.333555Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.333679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654011870625162:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.334556Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:12.342005Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654011870625164:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:12.620191Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21741, MsgBus: 21607 2024-11-21T08:57:13.142820Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040b3/r3tmp/tmpvQOy4E/pdisk_1.dat 2024-11-21T08:57:13.158456Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21741, node 5 2024-11-21T08:57:13.172442Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:13.172460Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:13.172462Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:13.172512Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21607 TClient is connected to server localhost:21607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:13.244560Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:13.244600Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:13.244930Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.245609Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:13.253827Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:13.259195Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.282392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.312986Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.328872Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.556171Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654018534833387:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.556225Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.564735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.598142Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.608266Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.618349Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.675895Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.698626Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.716867Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654018534833905:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.716899Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.716940Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654018534833910:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.717704Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:13.719402Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654018534833912:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:13.932393Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.947842Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 |90.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |90.8%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |90.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |90.8%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> KikimrIcGateway::TestLoadTableMetadata >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KqpScheme::CreateTableWithTtlOnDatetime64Column [GOOD] >> KqpScheme::CreateTableWithStoreExternalBlobs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithUniqConstraintPublicApi [GOOD] Test command err: Trying to start YDB, gRPC: 62838, MsgBus: 11651 2024-11-21T08:57:09.057327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653999941123455:2156];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.057427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ad/r3tmp/tmpWEUNVY/pdisk_1.dat 2024-11-21T08:57:09.117095Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62838, node 1 2024-11-21T08:57:09.134448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.134461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.134463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.134498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11651 2024-11-21T08:57:09.156853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.156885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.157997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.182241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.184168Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.192186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.255515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.313688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.325515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.354509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653999941124889:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.354530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.391754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.398243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.408199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.414552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.422249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.429154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.438665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653999941125381:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.438686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.438691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653999941125386:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.439374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:09.442250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653999941125388:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:09.624664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28952, MsgBus: 11033 2024-11-21T08:57:09.972537Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653998847369209:2073];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ad/r3tmp/tmp60LNHT/pdisk_1.dat 2024-11-21T08:57:09.976873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:09.984777Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28952, node 2 2024-11-21T08:57:09.993955Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.993965Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.993975Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.994009Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11033 TClient is connected to server localhost:11033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:10.071262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.071292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:10.072427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:10.074048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.081134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.089228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.108359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.121675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.339240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654003142338023:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.339260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: ... 57:12.300885Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.337533Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.405421Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.509402Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654012600317260:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.509429Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.514986Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.522863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.530654Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.545925Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.559015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.577111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.594355Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654012600317754:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.594389Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.594494Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654012600317759:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.595321Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:12.610693Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654012600317761:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:13.061029Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9153, MsgBus: 2256 2024-11-21T08:57:13.364317Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654018642886647:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ad/r3tmp/tmpbE6soy/pdisk_1.dat 2024-11-21T08:57:13.368485Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:13.383499Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9153, node 5 2024-11-21T08:57:13.417040Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:13.417054Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:13.417056Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:13.417105Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2256 TClient is connected to server localhost:2256 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:13.473765Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:13.473805Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:13.474675Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:13.483901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.485664Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.488695Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.521212Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.580420Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.606844Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:13.700680Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654018642888048:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.700711Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.706933Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.718654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.780363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.847509Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.862193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.875876Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.954655Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654018642888582:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.954681Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.954830Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654018642888587:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:13.955603Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:13.958569Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:13.958648Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654018642888589:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:14.161693Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpScheme::CreateExternalTable [GOOD] >> KqpScheme::CreateExternalTableCheckPrimaryKey >> KikimrIcGateway::TestListPath |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> KqpConstraints::AddNonColumnDoesnotReturnInternalError [GOOD] >> KqpConstraints::AlterTableAddNotNullWithDefault >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata |90.8%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateTableWithStoreExternalBlobs [GOOD] >> KqpScheme::CreateExternalTableCheckPrimaryKey [GOOD] >> KqpScheme::CreateExternalTableValidation >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> KqpScheme::CreateDropTableViaApiMultipleTime [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithStoreExternalBlobs [GOOD] Test command err: Trying to start YDB, gRPC: 3499, MsgBus: 18074 2024-11-21T08:57:07.912662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653993726191040:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.912677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040cc/r3tmp/tmpRYXnPF/pdisk_1.dat 2024-11-21T08:57:07.973445Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3499, node 1 2024-11-21T08:57:07.994275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.994291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.994293Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.994334Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18074 TClient is connected to server localhost:18074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.047469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.047494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.048275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.049813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:08.072361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.080946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.143158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.160952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.201606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.286297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998021159883:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.286327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.319699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.327876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.388726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.408763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.419250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.435893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.447840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998021160399:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.447890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.447979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998021160404:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.448777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.455687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653998021160406:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:08.667037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715676 CreateStep: 1732179428721 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2024-11-21T08:57:08.680579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715676 CreateStep: 1732179428721 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2024-11-21T08:57:08.713419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715676 CreateStep: 1732179428721 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2024-11-21T08:57:08.725134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.740219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9211, MsgBus: 16653 2024-11-21T08:57:09.007482Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654001673892011:2053];send_to=[0:7307 ... 72057594046644480 Trying to start YDB, gRPC: 9172, MsgBus: 18551 2024-11-21T08:57:14.220283Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439654021505675689:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:14.220790Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040cc/r3tmp/tmpU1Ya73/pdisk_1.dat 2024-11-21T08:57:14.252301Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9172, node 7 2024-11-21T08:57:14.272420Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:14.272486Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:14.272489Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:14.272535Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18551 2024-11-21T08:57:14.320682Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:14.320743Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:14.321955Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:14.403000Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:14.405014Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 --!syntax_v1 CREATE TABLE `/Root/TableWithTtlSettings` ( Key Uint64, Datetime64Column Datetime64, PRIMARY KEY (Key) ) WITH ( TTL = Interval("P1D") ON Datetime64Column ) 2024-11-21T08:57:14.636136Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439654021505676283:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:14.636160Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:14.639638Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4038, MsgBus: 6151 2024-11-21T08:57:14.946522Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439654020527918745:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:14.946554Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040cc/r3tmp/tmp4sBRHT/pdisk_1.dat 2024-11-21T08:57:14.965025Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4038, node 8 2024-11-21T08:57:14.973402Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:14.973417Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:14.973418Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:14.973465Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6151 TClient is connected to server localhost:6151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:15.046491Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:15.046526Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:15.047592Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:15.049799Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.053119Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.088967Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.103369Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.126882Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.140919Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.267360Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439654024822887581:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.267411Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.273439Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.284953Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.295219Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.302711Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.370547Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.382533Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.416313Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439654024822888098:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.416339Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.416490Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439654024822888103:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.417264Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:15.422235Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T08:57:15.422327Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439654024822888105:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:15.602112Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpScheme::CreateExternalDataSource >> KqpScheme::AlterResourcePoolClassifier [GOOD] |90.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> TSchemeShardMoveTest::TwoTables |90.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> KqpOlapScheme::DropColumnAfterInsert [GOOD] >> BsControllerConfig::MoveGroups >> BsControllerConfig::Basic >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::ExtendByCreatingSeparateBox >> BsControllerConfig::OverlayMapCrossReferences >> BsControllerConfig::SelectAllGroups >> BsControllerConfig::OverlayMap >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestDropResourcePool >> BsControllerConfig::ManyPDisksRestarts >> BsControllerConfig::ReassignGroupDisk >> BsControllerConfig::PDiskCreate >> BsControllerConfig::OverlayMap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 2974, MsgBus: 24582 2024-11-21T08:57:07.922461Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653990531285985:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.922543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d0/r3tmp/tmpUvyBWT/pdisk_1.dat 2024-11-21T08:57:07.984639Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2974, node 1 2024-11-21T08:57:08.002489Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.002499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.002501Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.002526Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24582 2024-11-21T08:57:08.021319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.021344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.022495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.053807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.064639Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.085219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.110055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.147004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.160677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.280294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994826254670:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.280325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.316982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.324488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.431404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.439330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.450391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.507777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.520713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994826255198:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.520744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.520768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653994826255203:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.521400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.525547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653994826255205:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:08.675707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.685090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.742382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.954742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:08.972533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.004404Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T08:57:09.015699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2024-11-21T08:57:09.032978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7855, MsgBus: 15554 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d0/r3tmp/tmp8UGTpj/pdisk_1.dat 2024-11-21T08:57:09.218328Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:09.220533Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7855, node 2 2024-11-21T08:57:09.228721Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.228733Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.228735Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.228770Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15554 TClient is connected to server localhost:15554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.309964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.309989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.310203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.310876Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11 ... 08:57:13.461802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710784:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.475018Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037941 not found 2024-11-21T08:57:13.518370Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710786:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.525546Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037942 not found 2024-11-21T08:57:13.625949Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710787:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.636608Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037943 not found 2024-11-21T08:57:13.636626Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037944 not found Trying to start YDB, gRPC: 22495, MsgBus: 19933 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d0/r3tmp/tmpkdtRFJ/pdisk_1.dat 2024-11-21T08:57:14.064851Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:14.067877Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22495, node 5 2024-11-21T08:57:14.080372Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:14.080390Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:14.080393Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:14.080434Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19933 TClient is connected to server localhost:19933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:14.143311Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:14.143348Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:14.143581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:14.144373Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:14.146274Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:14.154729Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:14.173604Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:14.183439Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:14.351934Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654023468779456:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:14.351972Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:14.357788Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:14.366811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:14.379084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:14.393939Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:14.406948Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:14.421800Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:14.437480Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654023468779957:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:14.437516Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:14.437532Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654023468779962:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:14.438131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:14.440395Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654023468779964:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:15.066556Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:15.130399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.206039Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:15.290531Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.378489Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.454504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.760282Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654027763748432:2670], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:57:15.760315Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:57:15.899527Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654027763748624:2732], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:57:15.899559Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:57:15.998975Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654027763748784:2786], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:57:15.999015Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:57:16.152547Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654032058716288:2859], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T08:57:16.152591Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } >> KqpScheme::CreateExternalDataSource [GOOD] >> KqpScheme::CreateExternalDataSourceValidationAuthMethod >> TSchemeShardMoveTest::TwoTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropColumnAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 6188, MsgBus: 7698 2024-11-21T08:57:08.251080Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653995073919642:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:08.251168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040b8/r3tmp/tmp57bang/pdisk_1.dat 2024-11-21T08:57:08.302882Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6188, node 1 2024-11-21T08:57:08.326249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.326260Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.326262Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.326307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:08.350409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.350428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.354307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7698 TClient is connected to server localhost:7698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.484261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.486422Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:08.661773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653995073920096:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.661804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.692004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.700115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:08.700169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:08.700279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:08.700304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:08.700329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:08.700347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:08.700367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:08.700390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:08.700416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:08.700437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:08.700462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:08.700483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995073920172:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:08.700984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:08.700998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:08.701028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:08.701036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:08.701051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:08.701054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:08.701085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:08.701096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:08.701123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:08.701130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:08.701135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:08.701143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:08.701251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:08.701285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:08.701305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:08.701313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:08.701323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:08.701331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:08.701347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:08.701351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:08.701361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-2 ... EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:10.990344Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.990374Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:10.991153Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:10.997871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:11.001891Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Uint64 NOT NULL, int_column Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:11.227498Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010572734560:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.227519Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.277627Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.289166Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:11.289195Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:11.289240Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:11.289258Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:11.289276Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:11.289293Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:11.289313Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:11.289331Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:11.289354Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:11.289370Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:11.289392Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:11.289410Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439654010572734617:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:11.291386Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:11.291400Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:11.291413Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:11.291418Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:11.291441Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:11.291445Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:11.291455Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:11.291461Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:11.291472Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:11.291476Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:11.291482Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:11.291488Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:11.291540Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:11.291547Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:11.291564Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:11.291570Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:11.291581Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:11.291586Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:11.291605Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:11.291609Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:11.291622Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:11.291626Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:11.292701Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:57:11.300267Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1392;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1392;columns=2; 2024-11-21T08:57:11.328258Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654010572734709:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.328316Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:11.329158Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:11.340476Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> KikimrIcGateway::TestDropResourcePool [GOOD] >> KqpScheme::CreateExternalTableValidation [GOOD] >> KqpScheme::CreateExternalTableWithSettings |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:57:16.590147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:16.590170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:16.590173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:16.590177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:16.590182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:16.590185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:16.590191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:16.590285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:16.609467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:16.609505Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:16.612362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:16.613077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:16.613106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:16.614983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:16.615267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:16.615374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:16.615450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:16.616662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:16.616952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:16.616964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:16.617002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:16.617009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:16.617015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:16.617028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:57:16.618088Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:57:16.632377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:16.632447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:16.632500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:16.632537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:16.632541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:16.633285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:16.633309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:16.633348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:16.633356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:16.633359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:16.633363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:16.633809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:16.633820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:16.633824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:16.634139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:16.634147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:16.634152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:16.634156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:16.634515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:16.634798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:16.634840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:16.634974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:16.634991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:16.634997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:16.635037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:16.635041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:16.635060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:16.635070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:16.635404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:16.635410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:16.635441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:16.635445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:57:16.635497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:16.635502Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:16.635509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:16.635512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:16.635516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:16.635519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:16.635522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:16.635524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:16.635531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:16.635535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:16.635538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:57:16.635741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:16.635750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:16.635753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:57:16.635757Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:57:16.635760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:16.635772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:57:16.783399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T08:57:16.783401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T08:57:16.783407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:57:16.783411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:57:16.783466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:57:16.783471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:57:16.783483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T08:57:16.783489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:57:16.783497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:57:16.784048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:57:16.784065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:492:2456] TestWaitNotification: OK eventTxId 103 2024-11-21T08:57:16.784167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784198Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 35us result status StatusPathDoesNotExist 2024-11-21T08:57:16.784253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784303Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784333Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 32us result status StatusSuccess 2024-11-21T08:57:16.784401Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784490Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784503Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 14us result status StatusPathDoesNotExist 2024-11-21T08:57:16.784516Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784551Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784564Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 15us result status StatusSuccess 2024-11-21T08:57:16.784596Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:16.784639Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 11us result status StatusSuccess 2024-11-21T08:57:16.784676Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> TSchemeShardMoveTest::ResetCachedPath |90.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |90.9%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 1058, MsgBus: 3550 2024-11-21T08:57:15.193269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654027686640418:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:15.193326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c8/r3tmp/tmpbZ6hQe/pdisk_1.dat 2024-11-21T08:57:15.276499Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1058, node 1 2024-11-21T08:57:15.294262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:15.294297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:15.294748Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:15.294751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:15.294753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:15.294796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:15.295460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3550 TClient is connected to server localhost:3550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:15.368071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.374309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:15.390279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:57:15.551075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654027686640929:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.551112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.588138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.612046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.620987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.680568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.694648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654027686641235:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.694691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.694709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654027686641240:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.695758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2024-11-21T08:57:15.701780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654027686641242:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } Trying to start YDB, gRPC: 25604, MsgBus: 29602 2024-11-21T08:57:16.060732Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654029934345782:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:16.060762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c8/r3tmp/tmptsnncf/pdisk_1.dat 2024-11-21T08:57:16.071052Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25604, node 2 2024-11-21T08:57:16.077892Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:16.077902Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:16.077903Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:16.077930Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29602 TClient is connected to server localhost:29602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:16.161115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:16.161146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:16.162237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:16.162992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:16.164019Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:16.384812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654029934346421:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.384841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.387336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.405624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.415504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.423006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.440144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654029934346724:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.440169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.440185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654029934346729:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.440857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2024-11-21T08:57:16.444781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654029934346731:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking }
: Info: Success, code: 4 2024-11-21T08:57:16.571792Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found Trying to start YDB, gRPC: 27010, MsgBus: 21163 2024-11-21T08:57:16.759756Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439654030576276041:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:16.759775Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c8/r3tmp/tmpr1pgqC/pdisk_1.dat 2024-11-21T08:57:16.770617Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27010, node 3 2024-11-21T08:57:16.779501Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:16.779515Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:16.779518Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:16.779564Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21163 TClient is connected to server localhost:21163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:16.859994Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:16.860031Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:16.861026Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:16.862228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:16.868140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> KqpScheme::CreateExternalDataSourceValidationAuthMethod [GOOD] >> KqpScheme::CreateExternalDataSourceValidationLocation >> TSchemeShardMoveTest::MoveTableForBackup >> KqpScheme::CreateExternalTableWithSettings [GOOD] >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> DataShardTxOrder::ZigZag [GOOD] >> KqpOlap::PredicatePushdownNulls ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:57:17.448765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:17.448792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:17.448797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:17.448802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:17.448808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:17.448812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:17.448820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:17.448969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:17.459856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:17.459880Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:17.467376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:17.468269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:17.468324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:17.469915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:17.470109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:17.470219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:17.470293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:17.471187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:17.471485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:17.471497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:17.471543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:17.471552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:17.471558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:17.471573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.473030Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:57:17.492601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:17.492695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.492772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:17.492817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:17.492826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.494125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:17.494153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:17.494209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.494220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:17.494225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:17.494230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:17.494714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.494726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:17.494731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:17.495096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.495107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.495113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:17.495121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:17.495754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:17.496127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:17.496182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:17.496405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:17.496429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:17.496436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:17.496496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:17.496502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:17.496534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:17.496546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:17.497002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:17.497011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:17.497056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:17.497062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:57:17.497143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.497149Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:17.497161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:17.497165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:17.497171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:17.497176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:17.497181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:17.497185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:17.497196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:17.497203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:17.497207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:57:17.497534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:17.497551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:17.497556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:57:17.497562Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:57:17.497566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:17.497579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 149000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 63 } } 2024-11-21T08:57:17.775263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2024-11-21T08:57:17.775284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 149000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 63 } } 2024-11-21T08:57:17.775294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 149000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 63 } } 2024-11-21T08:57:17.775297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T08:57:17.775326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.775330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2024-11-21T08:57:17.775873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.775915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.775922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#105:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:17.775932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2024-11-21T08:57:17.775962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:17.776637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2024-11-21T08:57:17.776677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2024-11-21T08:57:17.776757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:17.776772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:17.776777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2024-11-21T08:57:17.776845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2024-11-21T08:57:17.776867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T08:57:17.778022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:17.778029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:57:17.778074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:17.778080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T08:57:17.778179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.778185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2024-11-21T08:57:17.778466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:57:17.778478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T08:57:17.778481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T08:57:17.778484Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2024-11-21T08:57:17.778491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:57:17.778506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T08:57:17.778638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 218 } } 2024-11-21T08:57:17.778644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2024-11-21T08:57:17.778658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 218 } } 2024-11-21T08:57:17.778669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 218 } } 2024-11-21T08:57:17.778791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 658 RawX2: 4294969896 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2024-11-21T08:57:17.778795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2024-11-21T08:57:17.778803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 658 RawX2: 4294969896 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2024-11-21T08:57:17.778807Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:57:17.778811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 658 RawX2: 4294969896 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2024-11-21T08:57:17.778818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:17.778821Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.778823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T08:57:17.778827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2024-11-21T08:57:17.779386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T08:57:17.779454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.779472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.779512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T08:57:17.779517Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T08:57:17.779525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T08:57:17.779528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:57:17.779532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2024-11-21T08:57:17.779541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 105 2024-11-21T08:57:17.779544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T08:57:17.779548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T08:57:17.779551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T08:57:17.779566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:57:17.779891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T08:57:17.779899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:815:2739] TestWaitNotification: OK eventTxId 105 >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] >> KqpOlap::PredicatePushdown_LikePushedDownForStringType >> KqpOlapIndexes::IndexesModificationError >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalTableWithSettings [GOOD] Test command err: Trying to start YDB, gRPC: 23313, MsgBus: 31805 2024-11-21T08:57:12.120486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654014803851634:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:12.120506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004099/r3tmp/tmpSXoc8L/pdisk_1.dat 2024-11-21T08:57:12.181608Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23313, node 1 2024-11-21T08:57:12.221087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:12.221116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:12.222262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:12.229326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:12.229340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:12.229342Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:12.229379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31805 TClient is connected to server localhost:31805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:12.302883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.306172Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:12.321408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.393786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:12.424396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:12.443484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.490470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654014803853165:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.490497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.531666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.538159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.551680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.609802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.622798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.684035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:12.702502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654014803853685:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.702528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.702647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654014803853690:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:12.703389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:12.714167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654014803853692:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:13.122160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:13.303373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.376187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:13.459882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.567358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.648816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T08:57:13.711421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:13.721769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:14.059724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14795, MsgBus: 15964 2024-11-21T08:57:14.228170Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654022007276176:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:14.228424Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004099/r3tmp/tmp0m675x/pdisk_1.dat 2024-11-21T08:57:14.246279Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14795, node 2 2024-11-21T08:57:14.254180Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:14.254193Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:14.254195Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:14.254237Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15964 TClient is connected to server localhost:15964 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:14.328760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:14.328786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:14.331118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDes ... t schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:16.152238Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:16.168609Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:16.192364Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:16.209836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.346536Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654028806192987:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.346583Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.352489Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.362760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.378085Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.396863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.405221Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.419354Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.431064Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654028806193501:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.431078Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654028806193506:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.431088Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.431754Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:16.435565Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654028806193508:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 4597, MsgBus: 21527 2024-11-21T08:57:17.198380Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654033928320462:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004099/r3tmp/tmp3m8yyS/pdisk_1.dat 2024-11-21T08:57:17.207335Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:17.211993Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4597, node 5 2024-11-21T08:57:17.222264Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:17.222278Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:17.222280Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:17.222322Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21527 TClient is connected to server localhost:21527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:17.304954Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:17.304990Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:17.305276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.306061Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:17.315717Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.324809Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.342316Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.352285Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.529782Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654033928321857:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.529837Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.535533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.544078Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.556734Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.570725Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.584643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.591681Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.650811Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654033928322359:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.650837Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.650844Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654033928322364:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.651646Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:17.660611Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654033928322366:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:17.832679Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.834566Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 2939, MsgBus: 3649 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044ca/r3tmp/tmpENR1C8/pdisk_1.dat 2024-11-21T08:57:14.916226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654023088816260:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:14.974793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:14.983682Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2939, node 1 2024-11-21T08:57:14.994097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:14.994117Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:14.994119Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:14.994158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3649 TClient is connected to server localhost:3649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:15.054106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:15.054142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:15.055312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:15.055767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.087030Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T08:57:15.237936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654027383784059:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.237968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.272524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.290796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.303357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.309014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.318415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654027383784362:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.318454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654027383784367:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.318455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.319140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2024-11-21T08:57:15.322232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654027383784369:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } Trying to start YDB, gRPC: 64564, MsgBus: 9059 2024-11-21T08:57:15.663016Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654024678044820:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:15.663031Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044ca/r3tmp/tmpywYI7l/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64564, node 2 2024-11-21T08:57:15.681147Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:15.683381Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:15.683390Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:15.683392Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:15.683437Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9059 TClient is connected to server localhost:9059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:15.763237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:15.763270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:15.764388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:15.765032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.771109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.784573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.803070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.814434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:16.015713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654028973013669:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.015754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.018509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.024377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.037298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.044657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.058507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.065985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.073396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654028973014159:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.073421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.073421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654028973014164:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:16.073984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:16.078212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654028973014166:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:16.664655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:16.727337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.775128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:16.829695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.893581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.953070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.005801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:17.017822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.273536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16860, MsgBus: 12111 2024-11-21T08:57:17.617432Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439654033068792749:2139];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044ca/r3tmp/tmpwVnKX4/pdisk_1.dat 2024-11-21T08:57:17.621823Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:17.630597Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16860, node 3 2024-11-21T08:57:17.636705Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:17.636717Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:17.636719Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:17.636771Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12111 TClient is connected to server localhost:12111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:17.717638Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:17.717680Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:17.718691Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:17.719926Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.721662Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:17.728195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.746385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.768349Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.777178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.927970Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654033068794214:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.928013Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.934869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.946344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.959835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.974444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.987585Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.997381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.006430Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654037363762021:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.006456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.006462Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654037363762026:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.007076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:18.010041Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654037363762028:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> BsControllerConfig::SelectAllGroups [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2024-11-21T08:57:14.154100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:14.154117Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:14.154133Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:14.156870Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:14.156973Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T08:57:14.157038Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:14.157716Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:14.165277Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:14.165374Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T08:57:14.165509Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T08:57:14.165523Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T08:57:14.165529Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T08:57:14.165564Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T08:57:14.168077Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T08:57:14.168129Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T08:57:14.168158Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T08:57:14.168161Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T08:57:14.168165Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T08:57:14.168168Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:14.168258Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:14.168266Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:14.168290Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T08:57:14.168309Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T08:57:14.168345Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:57:14.168352Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:57:14.168358Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T08:57:14.168363Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:57:14.168366Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:57:14.168371Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T08:57:14.168376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:14.174845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:14.174863Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:14.174870Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T08:57:14.175252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T08:57:14.175270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T08:57:14.175292Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T08:57:14.175321Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T08:57:14.175331Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T08:57:14.175339Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T08:57:14.175346Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:57:14.175350Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T08:57:14.175355Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T08:57:14.175359Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:57:14.175419Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T08:57:14.175423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T08:57:14.175426Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T08:57:14.175429Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:57:14.175438Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T08:57:14.175441Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T08:57:14.175445Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T08:57:14.175448Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T08:57:14.175453Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T08:57:14.196466Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T08:57:14.196491Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T08:57:14.196498Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T08:57:14.196509Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T08:57:14.196522Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T08:57:14.196628Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:14.196635Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:14.196643Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T08:57:14.196661Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T08:57:14.196665Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T08:57:14.196726Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T08:57:14.196736Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:57:14.196741Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T08:57:14.196745Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T08:57:14.197418Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T08:57:14.197431Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:14.197495Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:14.197501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:14.197508Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:57:14.197515Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:57:14.197518Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:57:14.197525Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T08:57:14.197529Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T08:57:14.197535Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:57:14.197540Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T08:57:14.197543Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:57:14.197547Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T08:57:14.197585Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T08:57:14.197588Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:57:14.197591Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:57:14.197594Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T08:57:14.197598Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T08:57:14.197607Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T08:57:14.197610Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T08:57:14.197614Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:57:14.197618Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:57:14.197629Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T08:57:14.197632Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T08:57:14.197636Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T08:57:14.197641Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T08:57:14.197644Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:57:14.197648Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2024-11-21T08:57:18.001201Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:57:18.001203Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T08:57:18.001205Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2024-11-21T08:57:18.001207Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2024-11-21T08:57:18.001266Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2024-11-21T08:57:18.001272Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:57:18.001279Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:57:18.001282Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2024-11-21T08:57:18.001286Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2024-11-21T08:57:18.001289Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T08:57:18.001334Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2024-11-21T08:57:18.001338Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2024-11-21T08:57:18.001340Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2024-11-21T08:57:18.001342Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2024-11-21T08:57:18.001346Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T08:57:18.001348Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2024-11-21T08:57:18.001350Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2024-11-21T08:57:18.001353Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:57:18.001354Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T08:57:18.001356Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T08:57:18.001358Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T08:57:18.001393Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:231:2226], Recipient [2:231:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:18.001398Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T08:57:18.001402Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T08:57:18.001404Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T08:57:18.001407Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:57:18.001409Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2024-11-21T08:57:18.001411Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2024-11-21T08:57:18.001414Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001416Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2024-11-21T08:57:18.001418Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2024-11-21T08:57:18.001420Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2024-11-21T08:57:18.001509Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2024-11-21T08:57:18.001513Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001515Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2024-11-21T08:57:18.001517Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T08:57:18.001519Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T08:57:18.001521Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001523Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T08:57:18.001525Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T08:57:18.001528Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T08:57:18.001532Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2024-11-21T08:57:18.001534Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2024-11-21T08:57:18.001536Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2024-11-21T08:57:18.001539Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001541Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T08:57:18.001543Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T08:57:18.001545Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2024-11-21T08:57:18.001549Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001551Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T08:57:18.001553Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T08:57:18.001555Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2024-11-21T08:57:18.001557Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001559Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T08:57:18.001563Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T08:57:18.001565Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2024-11-21T08:57:18.001567Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001569Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T08:57:18.001571Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T08:57:18.001573Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2024-11-21T08:57:18.001575Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001577Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T08:57:18.001579Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2024-11-21T08:57:18.001581Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2024-11-21T08:57:18.001614Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2024-11-21T08:57:18.001618Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T08:57:18.001622Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001624Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2024-11-21T08:57:18.001626Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2024-11-21T08:57:18.001629Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T08:57:18.001657Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2024-11-21T08:57:18.001659Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2024-11-21T08:57:18.001661Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2024-11-21T08:57:18.001663Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2024-11-21T08:57:18.001665Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T08:57:18.001667Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2024-11-21T08:57:18.001669Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2024-11-21T08:57:18.001671Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T08:57:18.001673Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T08:57:18.001675Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T08:57:18.001677Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T08:57:18.013074Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2024-11-21T08:57:18.013100Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2024-11-21T08:57:18.013113Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T08:57:18.013120Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T08:57:18.013141Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:57:18.013150Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T08:57:18.013246Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2024-11-21T08:57:18.013250Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2024-11-21T08:57:18.013255Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T08:57:18.013258Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T08:57:18.013264Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T08:57:18.013268Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> KqpDecimalColumnShard::TestJoinByDecimal >> TxOrderInternals::OperationOrder [GOOD] >> KqpScheme::CreateExternalDataSourceValidationLocation [GOOD] |90.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> KqpOlap::PredicatePushdownNulls [GOOD] |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |90.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |90.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2024-11-21T08:57:16.991010Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:17.013137Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:17.039801Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:17.039946Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:17.040087Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040094Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040132Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:17.092821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:17.092869Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:17.109046Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:17.109110Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.109127Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.109137Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] >> KqpOlap::PredicatePushdown_LikePushedDownForStringType [GOOD] >> KqpOlapIndexes::IndexesModificationError [GOOD] |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownNulls [GOOD] Test command err: Trying to start YDB, gRPC: 4442, MsgBus: 10035 2024-11-21T08:57:18.466132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654037745275387:2057];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:18.466847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ea/r3tmp/tmpI6DK9m/pdisk_1.dat 2024-11-21T08:57:18.585532Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:18.588167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:18.588191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:18.589025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4442, node 1 2024-11-21T08:57:18.597979Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:18.597993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:18.597995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:18.598035Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10035 TClient is connected to server localhost:10035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:18.644139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:18.653173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:18.664005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:18.664075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:18.664124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:18.664147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:18.664169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:18.664182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:18.664204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:18.664237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:18.664256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:18.664274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.664301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:18.664323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654037745276025:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:18.664854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:18.664872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:18.664884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:18.664889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:18.664906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:18.664917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:18.664926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:18.664933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:18.664941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:18.664949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:18.664955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:18.664963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:18.665034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:18.665044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:18.665064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:18.665072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.665083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:18.665091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:18.665108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:18.665116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:18.665126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:18.665134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:18.668055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654037745276026:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:18.668075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654037745276026:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:18.668119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654037745276026:2289];tablet_id=720751862240 ... 8.672735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:18.672746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:18.672754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:18.675502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:18.675531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:18.675564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:18.675583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:18.675602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:18.675621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:18.675640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:18.675659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:18.675677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:18.675695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.675720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:18.675739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654037745276043:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:18.676339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:18.676354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:18.676364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:18.676368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:18.676383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:18.676392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:18.676400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:18.676407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:18.676415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:18.676423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:18.676429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:18.676437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:18.676475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:18.676485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:18.676501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:18.676510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.676521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:18.676529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:18.676544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:18.676552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:18.676563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:18.676571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:18.710794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11224;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11224;columns=5; 2024-11-21T08:57:18.827684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654037745276335:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.827709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.827859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654037745276362:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.828791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:18.831837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654037745276364:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:18.964617Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T08:57:18.964648Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654037745276450:2384] TxId: 281474976715663. Ctx: { TraceId: 01jd6z1b4c5q0p3xvpyn07093q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U2ZDFjNTItMmM3MTdmMjItOTE4MzZiYWEtNGNhNzQ4OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T08:57:18.964765Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439654037745276507:2449], TxId: 281474976715663, task: 53. Ctx: { SessionId : ydb://session/3?node_id=1&id=N2U2ZDFjNTItMmM3MTdmMjItOTE4MzZiYWEtNGNhNzQ4OGQ=. CustomerSuppliedId : . TraceId : 01jd6z1b4c5q0p3xvpyn07093q. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439654037745276450:2384], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T08:57:18.965753Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2U2ZDFjNTItMmM3MTdmMjItOTE4MzZiYWEtNGNhNzQ4OGQ=, ActorId: [1:7439654037745276333:2384], ActorState: ExecuteState, TraceId: 01jd6z1b4c5q0p3xvpyn07093q, Create QueryResponse for error on request, msg: 2024-11-21T08:57:18.966318Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179438885, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalDataSourceValidationLocation [GOOD] Test command err: Trying to start YDB, gRPC: 17253, MsgBus: 23230 2024-11-21T08:57:07.917233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653993736482190:2142];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.917316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d4/r3tmp/tmpedjYuc/pdisk_1.dat 2024-11-21T08:57:07.997654Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17253, node 1 2024-11-21T08:57:08.014813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.014825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.014836Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.014868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:08.019279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.019297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.023244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23230 TClient is connected to server localhost:23230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.081120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.083470Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.090243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.120906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.141211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:08.161367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.324786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998031450930:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.324815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.466220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.471980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.484391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.490833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.502364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.512059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.573477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998031451451:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.573520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.573568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653998031451456:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.574240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.581679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653998031451458:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:08.808171Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2024-11-21T08:57:08.815017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64170, MsgBus: 14641 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d4/r3tmp/tmpr3QPZ0/pdisk_1.dat 2024-11-21T08:57:09.011512Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:09.011715Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64170, node 2 2024-11-21T08:57:09.023036Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.023049Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.023050Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.023096Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14641 TClient is connected to server localhost:14641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.104535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.104565Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.105279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.105559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:09.108099Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.110572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.127708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.149386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.160503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.345835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654001012428761:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.345881Z node 2 :K ... 97 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:17.024134Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:17.025201Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:17.025866Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.032175Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.040825Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.059185Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.068684Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:17.221727Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654034656951015:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.221755Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.226964Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.236648Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.293903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.304438Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.318642Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.325132Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.333010Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654034656951529:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.333031Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.333050Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654034656951534:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.333574Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:17.338741Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654034656951536:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 16101, MsgBus: 29256 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040d4/r3tmp/tmpequwl0/pdisk_1.dat 2024-11-21T08:57:18.156312Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:18.184805Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16101, node 5 2024-11-21T08:57:18.198908Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:18.198922Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:18.198925Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:18.198971Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29256 2024-11-21T08:57:18.260590Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:18.260619Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:18.264528Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:18.290742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:18.296487Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:18.309031Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:18.321033Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:18.340833Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:18.369838Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:18.553423Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654040639202063:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.553450Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.578702Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.589828Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.599662Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.606572Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.623750Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.634543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.646356Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654040639202567:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.646389Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.646486Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654040639202572:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:18.647268Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:18.654656Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654040639202574:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:57:18.382495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:18.382528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:18.382533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:18.382538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:18.382546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:18.382550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:18.382559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:18.382675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:18.395223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:18.395253Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:18.399125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:18.400061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:18.400103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:18.402844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:18.403077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:18.403208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:18.403311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:18.404429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:18.404753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:18.404765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:18.404807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:18.404814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:18.404821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:18.404838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:57:18.406302Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:57:18.425310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:18.425408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:18.425483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:18.425549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:18.425559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:18.426612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:18.426640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:18.426693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:18.426702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:18.426707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:18.426712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:18.427186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:18.427198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:18.427202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:18.427567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:18.427576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:18.427582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:18.427589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:18.428243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:18.428645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:18.428704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:18.428888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:18.428913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:18.428920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:18.428978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:18.428984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:18.429018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:18.429030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:18.429474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:18.429483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:18.429540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:18.429546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:57:18.429622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:18.429628Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:18.429639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:18.429643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:18.429648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:18.429653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:18.429658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:18.429662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:18.429672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:18.429678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:18.429682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:57:18.429976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:18.429992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:18.429998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:57:18.430003Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:57:18.430007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:18.430020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... equenceShard::TEvDropSequenceResult> execute, operationId: 102:1, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 102 TxPartId: 1 2024-11-21T08:57:19.093961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDropParts HandleReply TEvDropSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 102:1 at tablet 72057594046678944 2024-11-21T08:57:19.094008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:57:19.094032Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 4 -> 240 2024-11-21T08:57:19.094801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2024-11-21T08:57:19.094849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2024-11-21T08:57:19.094857Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveSequence TDone, operationId: 102:1 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:19.094864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDone, operationId: 102:1 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:57:19.094876Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2024-11-21T08:57:19.094880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2024-11-21T08:57:19.094887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2024-11-21T08:57:19.094902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:380:2343] message: TxId: 102 2024-11-21T08:57:19.094908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2024-11-21T08:57:19.094915Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:57:19.094920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:57:19.094971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:57:19.094976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:57:19.094980Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2024-11-21T08:57:19.094983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2024-11-21T08:57:19.094991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:57:19.094995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:57:19.095102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:57:19.095108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:57:19.095123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:57:19.095129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:57:19.095135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:57:19.095755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:57:19.095768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:465:2421] 2024-11-21T08:57:19.095841Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2024-11-21T08:57:19.096500Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:19.096554Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 63us result status StatusPathDoesNotExist 2024-11-21T08:57:19.096593Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:57:19.096641Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:19.096650Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 9us result status StatusPathDoesNotExist 2024-11-21T08:57:19.096659Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:57:19.096684Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:19.096729Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 47us result status StatusSuccess 2024-11-21T08:57:19.096808Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:19.096849Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:19.096860Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 11us result status StatusSuccess 2024-11-21T08:57:19.096883Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_LikePushedDownForStringType [GOOD] Test command err: Trying to start YDB, gRPC: 15278, MsgBus: 4095 2024-11-21T08:57:18.714152Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654039026830569:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:18.714321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048d8/r3tmp/tmpZ2tr8a/pdisk_1.dat 2024-11-21T08:57:18.766317Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15278, node 1 2024-11-21T08:57:18.778134Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:18.778149Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:18.778152Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:18.778194Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4095 TClient is connected to server localhost:4095 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:18.815518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:18.815544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:57:18.816578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:18.823817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:18.845231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:18.854513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:18.854597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:18.854646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:18.854674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:18.854698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:18.854727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:18.854742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:18.854760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:18.854778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:18.854801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.854824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:18.854851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654039026831209:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:18.855445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:18.855453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:18.855465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:18.855469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:18.855490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:18.855498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:18.855509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:18.855516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:18.855526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:18.855530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:18.855539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:18.855543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:18.855608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:18.855614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:18.855629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:18.855634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.855645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:18.855650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:18.855667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:18.855671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:18.855682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:18.855685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:18.859026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654039026831214:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:18.859049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654039026831214:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:18.859078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654039026831214:2290];tablet_id=7207518622403 ... rmalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.866732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654039026831233:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:18.866753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654039026831233:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:18.867381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:18.867399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:18.867411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:18.867414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:18.867426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:18.867428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:18.867434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:18.867437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:18.867444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:18.867446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:18.867450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:18.867453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:18.867502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:18.867513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:18.867525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:18.867528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.867536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:18.867544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:18.867561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:18.867571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:18.867583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:18.867594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:18.867655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:18.867666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:18.867671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:18.867675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:18.867684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:18.867686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:18.867691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:18.867694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:18.867698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:18.867701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:18.867704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:18.867707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:18.867722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:18.867731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:18.867739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:18.867747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.867754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:18.867757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:18.867770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:18.867772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:18.867783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:18.867785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2024-11-21T08:57:19.025904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654043321798801:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.025924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.026020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654043321798813:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.026763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:19.028953Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T08:57:19.029035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654043321798815:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesModificationError [GOOD] Test command err: Trying to start YDB, gRPC: 7567, MsgBus: 15882 2024-11-21T08:57:18.842309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654038955993639:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:18.842395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048d5/r3tmp/tmpv021tL/pdisk_1.dat 2024-11-21T08:57:18.897717Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7567, node 1 2024-11-21T08:57:18.907874Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:18.907888Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:18.907890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:18.907939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15882 TClient is connected to server localhost:15882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:18.942118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:18.942149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:18.943315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:18.973563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:18.982885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.994144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:18.994199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:18.994252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:18.994269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:18.994285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:18.994299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:18.994318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:18.994338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:18.994358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:18.994373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.994389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:18.994406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654038955994145:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:18.997615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:18.997642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:18.997680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:18.997695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:18.997716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:18.997736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:18.997756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:18.997777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:18.997799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:18.997824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:18.997849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:18.997869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654038955994147:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:18.998345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:18.998363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:18.998380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:18.998391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:18.998409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:18.998417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:18.998426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:18.998435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:18.998444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:18.998452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:18.998457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890 ... 11-21T08:57:19.005157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:19.005163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:19.005166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:19.005169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:19.005173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:19.005192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:19.005195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:19.005206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:19.005209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:19.005217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:19.005220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:19.005231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:19.005234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:19.005241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:19.005243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:19.006002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:19.006010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:19.006019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:19.006022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:19.006036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:19.006040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:19.006048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:19.006051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:19.006058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:19.006061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:19.006066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:19.006069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:19.006101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:19.006105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:19.006118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:19.006121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:19.006129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:19.006132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:19.006146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:19.006149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:19.006157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:19.006160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:19.050131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:19.184426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654043250961734:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.189449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.190037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.199156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654043250961788:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.199175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.223828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654043250961804:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.223858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.229974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654043250961812:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.229999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.230827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.250874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654043250961859:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.250879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.250919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpConstraints::AlterTableAddNotNullWithDefault [GOOD] >> KqpOlap::SimpleQueryOlap >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2024-11-21T08:56:27.134823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653821841359632:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:27.134841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpxoqDTj/pdisk_1.dat 2024-11-21T08:56:27.176791Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:27.179950Z node 1 :HTTP ERROR: (#30,[::1]:24309) connection closed with error: Connection refused 2024-11-21T08:56:27.180049Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:27.236068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:27.236101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:27.237221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:29.379968Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653828057013754:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:29.380103Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmprUKlDw/pdisk_1.dat 2024-11-21T08:56:29.388399Z node 2 :HTTP ERROR: (#32,[::1]:3553) connection closed with error: Connection refused 2024-11-21T08:56:29.388484Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:29.394266Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:29.480183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:29.480238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:29.481281Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:31.634547Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439653837425439818:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:31.634564Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpMSzPvL/pdisk_1.dat 2024-11-21T08:56:31.641685Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:31.645212Z node 3 :HTTP ERROR: (#30,[::1]:4356) connection closed with error: Connection refused 2024-11-21T08:56:31.645283Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:31.735038Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:31.735070Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:31.736118Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:33.937683Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439653846997266579:2194];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:33.945495Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpYJTS32/pdisk_1.dat 2024-11-21T08:56:33.953437Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:33.957281Z node 4 :HTTP ERROR: (#32,[::1]:27636) connection closed with error: Connection refused 2024-11-21T08:56:33.957321Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:34.040231Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:34.040268Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:34.041581Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:36.227364Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439653859458464147:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:36.227621Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpDyrGfE/pdisk_1.dat 2024-11-21T08:56:36.238304Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:36.242373Z node 5 :HTTP ERROR: (#34,[::1]:13327) connection closed with error: Connection refused 2024-11-21T08:56:36.242458Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:36.328189Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:36.328242Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:36.329403Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:38.497718Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439653867740572517:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:38.497740Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpj4qoeH/pdisk_1.dat 2024-11-21T08:56:38.505950Z node 6 :HTTP ERROR: (#33,[::1]:29658) connection closed with error: Connection refused 2024-11-21T08:56:38.506032Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:38.514079Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:38.597866Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:38.597893Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:38.598891Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:40.769304Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439653874379758245:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:40.769330Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmp22OH6Q/pdisk_1.dat 2024-11-21T08:56:40.780966Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:40.783643Z node 7 :HTTP ERROR: (#38,[::1]:2799) connection closed with error: Connection refused 2024-11-21T08:56:40.783772Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:40.869585Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:40.869614Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:40.870749Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:43.057709Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439653890592111431:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:43.057732Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpYgcDxG/pdisk_1.dat 2024-11-21T08:56:43.068974Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:43.075307Z node 8 :HTTP ERROR: (#40,[::1]:21908) connection closed with error: Connection refused 2024-11-21T08:56:43.075424Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:43.158233Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:43.158264Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:43.159329Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:45.322456Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439653896245045443:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:45.322634Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpEs6Fgb/pdisk_1.dat 2024-11-21T08:56:45.332200Z node 9 :HTTP ERROR: (#42,[::1]:3126) connection closed with error: Connection refused 2024-11-21T08:56:45.332353Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:45.336550Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:45.422803Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:45.422844Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:45.423929Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:47.580772Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439653907104664587:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:56:47.581065Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpU1ojYw/pdisk_1.dat 2024-11-21T08:56:47.587935Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:47.590552Z node 10 :HTTP ERROR: (#30,[::1]:5199) connection closed with error: Connection refused 2024-11-21T08:56:47.590629Z node 10 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:47.681253Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: ... eme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpwxBxnt/pdisk_1.dat 2024-11-21T08:56:56.666897Z node 14 :HTTP ERROR: (#38,[::1]:14687) connection closed with error: Connection refused 2024-11-21T08:56:56.667182Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:56.672074Z node 14 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:56.758523Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:56.758557Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:56.759654Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:58.967673Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439653953483476958:2057];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpiVD89r/pdisk_1.dat 2024-11-21T08:56:58.970394Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:56:58.977156Z node 15 :HTTP ERROR: (#40,[::1]:16669) connection closed with error: Connection refused 2024-11-21T08:56:58.977218Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:56:58.981110Z node 15 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:59.070128Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:59.070153Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:59.071207Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:01.202713Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7439653964190782092:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:01.202740Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpWJwloA/pdisk_1.dat 2024-11-21T08:57:01.209615Z node 16 :HTTP ERROR: (#42,[::1]:22195) connection closed with error: Connection refused 2024-11-21T08:57:01.210642Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:57:01.216543Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:01.302964Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:01.302988Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:01.304023Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:03.452135Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7439653976103504247:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:03.452278Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpNPK3mL/pdisk_1.dat 2024-11-21T08:57:03.459939Z node 17 :HTTP ERROR: (#30,[::1]:16564) connection closed with error: Connection refused 2024-11-21T08:57:03.460000Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:57:03.464821Z node 17 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:03.552688Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:03.552710Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:03.553781Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:05.709051Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7439653985084766046:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:05.709216Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpxNbVOC/pdisk_1.dat 2024-11-21T08:57:05.717681Z node 18 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:05.721236Z node 18 :HTTP ERROR: (#32,[::1]:22480) connection closed with error: Connection refused 2024-11-21T08:57:05.721368Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:57:05.809182Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:05.809220Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:05.810311Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:08.010236Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7439653997061159402:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmp2EoP44/pdisk_1.dat 2024-11-21T08:57:08.012538Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:08.021085Z node 19 :HTTP ERROR: (#34,[::1]:64838) connection closed with error: Connection refused 2024-11-21T08:57:08.021201Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:57:08.024455Z node 19 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:08.109626Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.109681Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:08.110944Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:10.282493Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7439654003245201193:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpnDHkcH/pdisk_1.dat 2024-11-21T08:57:10.293744Z node 20 :HTTP ERROR: (#36,[::1]:25312) connection closed with error: Connection refused 2024-11-21T08:57:10.293887Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:57:10.295739Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:10.303707Z node 20 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:10.382357Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.382405Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:10.384184Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:12.583768Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7439654013165106962:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:12.583802Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpwIKGEg/pdisk_1.dat 2024-11-21T08:57:12.604613Z node 21 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:12.608045Z node 21 :HTTP ERROR: (#38,[::1]:26160) connection closed with error: Connection refused 2024-11-21T08:57:12.608162Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:57:12.681902Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:12.681927Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:12.682775Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:14.882157Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7439654022512625589:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:14.884971Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpognWGG/pdisk_1.dat 2024-11-21T08:57:14.895012Z node 22 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:14.899031Z node 22 :HTTP ERROR: (#40,[::1]:12528) connection closed with error: Connection refused 2024-11-21T08:57:14.899152Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:57:14.984821Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:14.984849Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:14.985738Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:17.179053Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7439654035468215322:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00311b/r3tmp/tmpolAeBi/pdisk_1.dat 2024-11-21T08:57:17.183284Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:17.189433Z node 23 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:17.194242Z node 23 :HTTP ERROR: (#39,[::1]:1451) connection closed with error: Connection refused 2024-11-21T08:57:17.195057Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T08:57:17.279458Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:17.279497Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:17.280538Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] Test command err: 2024-11-21T08:54:07.909124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:54:07.909165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:54:07.909174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a32/r3tmp/tmpSzJXST/pdisk_1.dat 2024-11-21T08:54:07.979399Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24641, node 1 2024-11-21T08:54:08.069800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:54:08.069816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:54:08.069820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:54:08.069894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:54:08.075306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:54:08.149728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:08.149752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:08.160928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6670 2024-11-21T08:54:08.554492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:54:09.295638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.295662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.328279Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:54:09.329186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.382019Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:54:09.390538Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:54:09.390559Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:54:09.396784Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:54:09.396924Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:54:09.396944Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:54:09.396949Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:54:09.396955Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:54:09.396960Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:54:09.396965Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:54:09.396971Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:54:09.397073Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:54:09.569309Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.569327Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:54:09.570210Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:54:09.571602Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:54:09.571670Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:54:09.572071Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:54:09.574850Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:54:09.574859Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:54:09.574866Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:54:09.576088Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:54:09.576107Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:54:09.577238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:54:09.578424Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:54:09.578444Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:54:09.580370Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:54:09.591740Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:54:09.613166Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:54:09.718186Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:54:09.882364Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:54:10.585051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.585078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:54:10.587587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:54:10.607338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:54:10.607366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:54:10.607395Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:54:10.607410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:54:10.607422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:54:10.607435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:54:10.607447Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:54:10.607458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:54:10.607469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:54:10.607480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:54:10.607493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:54:10.607505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:54:10.611413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:54:10.611428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:54:10.611440Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:54:10.611444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:54:10.611455Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:54:10.611459Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:54:10.611466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleI ... O `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:57:10.594305Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODQzNDM1NjQtZmQzYmVjMjktNzAxMzUzZjgtMjg5MjE0MDQ=, TxId: 2024-11-21T08:57:10.594336Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODQzNDM1NjQtZmQzYmVjMjktNzAxMzUzZjgtMjg5MjE0MDQ=, TxId: 2024-11-21T08:57:10.594492Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:57:10.617523Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:57:10.617552Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId7, ActorId=[1:2672:3169] 2024-11-21T08:57:11.105188Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2024-11-21T08:57:11.105226Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T08:57:11.582579Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:57:11.582614Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2024-11-21T08:57:11.582620Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2024-11-21T08:57:11.582623Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T08:57:11.582706Z node 2 :STATISTICS DEBUG: Event round 9 is different from the current 0 2024-11-21T08:57:11.582712Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2024-11-21T08:57:12.569669Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:57:12.611464Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:57:13.681288Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:57:13.681325Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2024-11-21T08:57:13.681331Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2024-11-21T08:57:13.681336Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T08:57:14.648489Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:57:14.648595Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:57:14.676641Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:57:14.676721Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:57:14.676726Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:57:14.676989Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:57:14.695198Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:57:14.695358Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:57:14.695377Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:57:14.695534Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:57:14.724926Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:57:14.725028Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 11, current Round: 0 2024-11-21T08:57:14.725250Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9868:6981], server id = [2:9869:6982], tablet id = 72075186224037899, status = OK 2024-11-21T08:57:14.725297Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9868:6981], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:57:14.725502Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:57:14.725514Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:57:14.725624Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:57:14.725667Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:57:14.725707Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9868:6981], server id = [2:9869:6982], tablet id = 72075186224037899 2024-11-21T08:57:14.725711Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:57:14.725764Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:57:14.726712Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:57:14.741841Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDhjNzg3MWYtNDVlYmY3OC03ZWZmM2RlMy1iZDhmOTMyYw==, TxId: 2024-11-21T08:57:14.741889Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDhjNzg3MWYtNDVlYmY3OC03ZWZmM2RlMy1iZDhmOTMyYw==, TxId: 2024-11-21T08:57:14.742065Z node 2 :SYSTEM_VIEWS WARN: [72075186224037896] TEvIntervalQuerySummary, wrong stage: node id# 2 2024-11-21T08:57:14.742092Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:57:14.764803Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:57:14.764837Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId8, ActorId=[1:2672:3169] 2024-11-21T08:57:15.294477Z node 2 :STATISTICS DEBUG: Event round 11 is different from the current 0 2024-11-21T08:57:15.294514Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T08:57:15.758104Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2024-11-21T08:57:15.758143Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2024-11-21T08:57:15.758184Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:57:15.758191Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2024-11-21T08:57:15.758196Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T08:57:16.717382Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:57:17.695101Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:57:17.727680Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T08:57:17.727718Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2024-11-21T08:57:17.727724Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T08:57:18.789478Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:57:18.789560Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T08:57:18.789566Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:57:18.789826Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T08:57:18.811512Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T08:57:18.811637Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T08:57:18.811652Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T08:57:18.811778Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T08:57:18.834641Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T08:57:18.834720Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 12, current Round: 0 2024-11-21T08:57:18.834933Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10049:7084], server id = [2:10050:7085], tablet id = 72075186224037899, status = OK 2024-11-21T08:57:18.834978Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:10049:7084], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T08:57:18.835172Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T08:57:18.835183Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T08:57:18.835240Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T08:57:18.835275Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T08:57:18.835349Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T08:57:18.835415Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:10049:7084], server id = [2:10050:7085], tablet id = 72075186224037899 2024-11-21T08:57:18.835420Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T08:57:18.836294Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T08:57:18.850525Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTFhODIxMzYtMTZmOGNhYTAtMjJkMGFiY2ItN2YzYTliMDY=, TxId: 2024-11-21T08:57:18.850550Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTFhODIxMzYtMTZmOGNhYTAtMjJkMGFiY2ItN2YzYTliMDY=, TxId: 2024-11-21T08:57:18.850692Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T08:57:18.872584Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T08:57:18.872608Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId9, ActorId=[1:2672:3169] >> KqpDatetime64ColumnShard::UseDate32AsPrimaryKey >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> BsControllerConfig::PDiskCreate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AlterTableAddNotNullWithDefault [GOOD] Test command err: Trying to start YDB, gRPC: 23611, MsgBus: 62904 2024-11-21T08:57:06.896798Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653988999942748:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:06.896838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040df/r3tmp/tmps16FP7/pdisk_1.dat 2024-11-21T08:57:06.962729Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23611, node 1 2024-11-21T08:57:06.984290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:06.984306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:06.984308Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:06.984354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62904 2024-11-21T08:57:07.043002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.043035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.044330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:57:07.070810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.074644Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.077811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:07.094787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.113297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.123563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.221805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993294911449:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.221830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.259563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.266565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.273331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.280484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.294534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.301814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:07.316974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993294911961:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.317004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.317035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653993294911966:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:07.317661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:07.321451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653993294911968:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:07.507658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26923, MsgBus: 63714 2024-11-21T08:57:07.812022Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653991134416450:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.812154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040df/r3tmp/tmpoW5CKC/pdisk_1.dat 2024-11-21T08:57:07.822629Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26923, node 2 2024-11-21T08:57:07.828970Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.828983Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.828984Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.829013Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63714 TClient is connected to server localhost:63714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:07.911989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.912021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.913072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:07.914341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.917586Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:07.927538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.935015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.951547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:07.960497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.108781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439653995429385275:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access p ... >:2:37: Error: At function: Member
:2:37: Error: Member not found: Value3. Did you mean Value? 2024-11-21T08:57:14.688331Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWYyYjViYWUtNTk4NjIyNjEtODUxNzg0Yy01MjFlODg1ZQ==, ActorId: [4:2587:4034], ActorState: ExecuteState, TraceId: 01jd6z175w3x772v9f19p9358m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: GENERIC_ERROR:
: Error: Type annotation, code: 1030
:2:17: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:2:37: Error: At function: Member
:2:37: Error: Member not found: Value3. Did you mean Value? 2024-11-21T08:57:14.704143Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:3065:4421], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Column is under build operation, write operation is not allowed to column: Value3 for table: /Root/AddNonColumnDoesnotReturnInternalError, code: 2017 2024-11-21T08:57:14.704673Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWYyYjViYWUtNTk4NjIyNjEtODUxNzg0Yy01MjFlODg1ZQ==, ActorId: [4:2587:4034], ActorState: ExecuteState, TraceId: 01jd6z176cfpgmkzbmhkppjaka, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: BAD_REQUEST:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Column is under build operation, write operation is not allowed to column: Value3 for table: /Root/AddNonColumnDoesnotReturnInternalError, code: 2017 2024-11-21T08:57:14.731883Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:3074:4430], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:2:94: Error: At function: KiUpdateTable!
:2:94: Error: Column 'Value3' is under the build operation '/Root/AddNonColumnDoesnotReturnInternalError'., code: 2017 2024-11-21T08:57:14.732372Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWYyYjViYWUtNTk4NjIyNjEtODUxNzg0Yy01MjFlODg1ZQ==, ActorId: [4:2587:4034], ActorState: ExecuteState, TraceId: 01jd6z17788tnyw9mw7qmh6cx2, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: BAD_REQUEST:
: Error: Type annotation, code: 1030
:2:94: Error: At function: KiUpdateTable!
:2:94: Error: Column 'Value3' is under the build operation '/Root/AddNonColumnDoesnotReturnInternalError'., code: 2017 2024-11-21T08:57:14.749102Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:3083:4439], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:89: Error: At tuple, At tuple, At function: KqlDeleteRows, At function: Map
:2:29: Error: At function: Filter
:2:88: Error: At function: ==
:2:82: Error: At function: Member, At function: Member, At function: Member
:2:82: Error: Member not found: Value3. Did you mean Value? 2024-11-21T08:57:14.749505Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWYyYjViYWUtNTk4NjIyNjEtODUxNzg0Yy01MjFlODg1ZQ==, ActorId: [4:2587:4034], ActorState: ExecuteState, TraceId: 01jd6z177sc66wgk6d7cffkpmj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: GENERIC_ERROR:
: Error: Execution, code: 1060
:2:89: Error: At tuple, At tuple, At function: KqlDeleteRows, At function: Map
:2:29: Error: At function: Filter
:2:88: Error: At function: ==
:2:82: Error: At function: Member, At function: Member, At function: Member
:2:82: Error: Member not found: Value3. Did you mean Value? 2024-11-21T08:57:14.824294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20073, MsgBus: 11574 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040df/r3tmp/tmpMeWmIQ/pdisk_1.dat 2024-11-21T08:57:15.548372Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:15.571254Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20073, node 5 2024-11-21T08:57:15.587277Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:15.587291Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:15.587294Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:15.587345Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11574 2024-11-21T08:57:15.641595Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:15.641624Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:15.643892Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:15.655604Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.656804Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:15.665751Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.681865Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:15.709066Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.721394Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.866357Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654026140082934:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.866401Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.868623Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.875591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.882959Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.890202Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.897211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.952485Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.962068Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654026140083447:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.962096Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654026140083452:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.962099Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.962698Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:15.966067Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654026140083454:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:16.322019Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.369532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710757:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.389503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::DeleteStoragePool >> BsControllerConfig::AddDriveSerialMassive >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsAfterWait >> KqpOlapBlobsSharing::BlobsSharingSplit3_1 >> KqpOlapAggregations::Aggregation_Some_NullGroupBy >> KqpOlap::SimpleQueryOlap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T08:57:16.991004Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:17.013096Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:17.039830Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:17.039982Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:17.040144Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040149Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040198Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:17.092822Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:17.092870Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:17.108841Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:17.108904Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.108922Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.108930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:232:2066] recipient: [1:20:2067] 2024-11-21T08:57:17.137295Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:17.137352Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.147723Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.147777Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.147792Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.147801Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.147833Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.147840Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.147845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.147853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.158439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.158503Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:17.158714Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:17.158721Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:17.158747Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:17.161283Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2024-11-21T08:57:17.161475Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk2 2024-11-21T08:57:17.161483Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk3 2024-11-21T08:57:17.161502Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2024-11-21T08:57:17.161507Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1001 Path# /dev/disk1 2024-11-21T08:57:17.161511Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk3 2024-11-21T08:57:17.161514Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2024-11-21T08:57:17.161518Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk2 2024-11-21T08:57:17.161522Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk3 2024-11-21T08:57:17.161526Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2024-11-21T08:57:17.161531Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1002 Path# /dev/disk1 2024-11-21T08:57:17.161535Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk3 2024-11-21T08:57:17.161540Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2024-11-21T08:57:17.161544Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2024-11-21T08:57:17.161548Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1001 Path# /dev/disk3 2024-11-21T08:57:17.161558Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1002 Path# /dev/disk1 2024-11-21T08:57:17.161562Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2024-11-21T08:57:17.161566Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk3 2024-11-21T08:57:17.161570Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1001 Path# /dev/disk1 2024-11-21T08:57:17.161576Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2024-11-21T08:57:17.161580Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1002 Path# /dev/disk1 2024-11-21T08:57:17.161584Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1002 Path# /dev/disk2 2024-11-21T08:57:17.161588Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk3 2024-11-21T08:57:17.161592Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1002 Path# /dev/disk1 2024-11-21T08:57:17.161596Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2024-11-21T08:57:17.161600Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1000 Path# /dev/disk2 2024-11-21T08:57:17.161603Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1001 Path# /dev/disk1 2024-11-21T08:57:17.161607Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1002 Path# /dev/disk1 2024-11-21T08:57:17.161611Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1002 Path# /dev/disk2 2024-11-21T08:57:17.161615Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1001 Path# /dev/disk3 2024-11-21T08:57:17.161619Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1002 Path# /dev/disk1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T08:57:18.935272Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:18.935443Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:18.935668Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:18.935748Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:18.935850Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:18.935856Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:18.935889Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:18.936768Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:18.936799Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:18.936825Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:18.936841Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:18.936851Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:18.936859Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:232:2066] recipient: [11:20:2067] 2024-11-21T08:57:18.947345Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:18.947398Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:18.957932Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:18.957989Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:18.958007Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:18.958019Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:18.958048Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:18.958057Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:18.958064Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:18.958072Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:18.968392Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:18.968470Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:18.968665Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:18.968672Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:18.968697Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:18.968878Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2024-11-21T08:57:18.969017Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk3 2024-11-21T08:57:18.969024Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1001 Path# /dev/disk1 2024-11-21T08:57:18.969028Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk2 2024-11-21T08:57:18.969033Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk3 2024-11-21T08:57:18.969037Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk2 2024-11-21T08:57:18.969041Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1001 Path# /dev/disk1 2024-11-21T08:57:18.969046Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2024-11-21T08:57:18.969052Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk3 2024-11-21T08:57:18.969057Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1001 Path# /dev/disk1 2024-11-21T08:57:18.969061Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1001 Path# /dev/disk1 2024-11-21T08:57:18.969066Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk2 2024-11-21T08:57:18.969071Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1001 Path# /dev/disk3 2024-11-21T08:57:18.969076Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1002 Path# /dev/disk2 2024-11-21T08:57:18.969080Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk3 2024-11-21T08:57:18.969085Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2024-11-21T08:57:18.969089Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2024-11-21T08:57:18.969096Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1002 Path# /dev/disk2 2024-11-21T08:57:18.969101Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk3 2024-11-21T08:57:18.969105Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1001 Path# /dev/disk3 2024-11-21T08:57:18.969109Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1001 Path# /dev/disk1 2024-11-21T08:57:18.969114Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1002 Path# /dev/disk1 2024-11-21T08:57:18.969119Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1002 Path# /dev/disk2 2024-11-21T08:57:18.969124Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2024-11-21T08:57:18.969129Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1002 Path# /dev/disk2 2024-11-21T08:57:18.969134Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1001 Path# /dev/disk3 2024-11-21T08:57:18.969139Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1002 Path# /dev/disk2 2024-11-21T08:57:18.969144Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1001 Path# /dev/disk3 2024-11-21T08:57:18.969148Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1002 Path# /dev/disk1 2024-11-21T08:57:18.969153Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1002 Path# /dev/disk2 2024-11-21T08:57:18.969158Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1002 Path# /dev/disk1 >> TGRpcCmsTest::RemoveWithAnotherTokenTest >> TGRpcCmsTest::SimpleTenantsTest >> TGRpcCmsTest::AuthTokenTest >> KqpOlapSysView::StatsSysViewFilter >> TGRpcCmsTest::AlterRemoveTest >> TGRpcCmsTest::DescribeOptionsTest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TGRpcCmsTest::DisabledTxTest |90.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest |90.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] >> KqpOlapIndexes::SchemeActualizationOnceOnStart >> KqpDecimalColumnShard::TestJoinByDecimal [GOOD] >> KqpOlap::OlapRead_GenericQuery >> KqpDatetime64ColumnShard::UseDate32AsPrimaryKey [GOOD] >> KqpDatetime64ColumnShard::Csv >> KqpOlapSysView::StatsSysViewBytesDictActualization |90.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleQueryOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28509, MsgBus: 8603 2024-11-21T08:57:19.927535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654042748392291:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:19.927655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ce/r3tmp/tmphBqT0k/pdisk_1.dat 2024-11-21T08:57:19.980725Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28509, node 1 2024-11-21T08:57:19.993017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:19.993028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:19.993030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:19.993058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8603 2024-11-21T08:57:20.028283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:20.028334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:20.029368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:20.059223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:20.064617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:20.084529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:20.106583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:20.111609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:20.111715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:20.111741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:20.111759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:20.111779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:20.111797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:20.111816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:20.111845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:20.111869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.111896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:20.111925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654047043360111:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:20.115891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:20.115911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:20.115955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:20.115973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:20.115990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:20.116007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:20.116021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:20.116038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:20.116056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:20.116080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.116102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:20.116118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654047043360113:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:20.120923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:20.120948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:20.120961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:20.120967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:20.120989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:20.120994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:20.121004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:20.121021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:20.121031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:20.121040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_swi ... ct.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:20.130287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:20.130407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:20.130412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:20.130422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:20.130426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:20.130443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:20.130447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:20.130455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:20.130459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:20.130467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:20.130471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:20.130477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:20.130481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:20.130514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:20.130518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:20.130535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:20.130539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.130550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:20.130553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:20.130568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:20.130583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:20.130594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:20.130596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:20.131570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:20.131583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:20.131593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:20.131598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:20.131613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:20.131617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:20.131626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:20.131630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:20.131638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:20.131642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:20.131648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:20.131652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:20.131690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:20.131694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:20.131710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:20.131714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.131723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:20.131727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:20.131742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:20.131746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:20.131757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:20.131761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:20.144444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:20.258224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654047043360423:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.258255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.258382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654047043360435:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.259071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:20.260714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654047043360437:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:20.405609Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179440313, txId: 18446744073709551615] shutting down >> KqpOlap::TableSinkWithOlapStore >> KqpOlapIndexes::IndexesInBS |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestJoinByDecimal [GOOD] Test command err: Trying to start YDB, gRPC: 4522, MsgBus: 16296 2024-11-21T08:57:19.022405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654042901123441:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:19.022472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048cf/r3tmp/tmpFT5J4f/pdisk_1.dat 2024-11-21T08:57:19.091554Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4522, node 1 2024-11-21T08:57:19.102112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:19.102127Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:19.102129Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:19.102167Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16296 2024-11-21T08:57:19.125725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:19.125758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:19.126385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:19.163166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:19.168961Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:19.383494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654042901124057:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.383533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.411274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.418589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:19.418635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:19.418664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:19.418689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:19.418704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:19.418717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:19.418732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:19.418748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:19.418767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:19.418784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:19.418799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:19.418815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654042901124133:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:19.419186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:19.419205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:19.419220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:19.419224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:19.419248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:19.419258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:19.419270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:19.419279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:19.419293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:19.419301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:19.419310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:19.419317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:19.419368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:19.419376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:19.419387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:19.419394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:19.419403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:19.419412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:19.419432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:19.419441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:19.419456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57 ... ormalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:19.988404Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:19.988428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:19.988452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:19.988474Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:19.988497Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:19.988524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:19.988547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:19.988572Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:19.988593Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654042853775615:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:19.990018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:19.990037Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:19.990049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:19.990053Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:19.990070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:19.990075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:19.990087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:19.990095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:19.990113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:19.990118Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:19.990125Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:19.990131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:19.990187Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:19.990194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:19.990211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:19.990215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:19.990225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:19.990230Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:19.990248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:19.990253Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:19.990264Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:19.990268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=352;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=352;columns=3; 2024-11-21T08:57:20.056585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654047196091678:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.056609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.056724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654047196091683:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.057431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:20.058964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654047196091685:2396], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:20.254893Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179440110, txId: 18446744073709551615] shutting down 2024-11-21T08:57:20.256660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654047148742964:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.256695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.256718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654047148742969:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.257396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:20.259017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654047148742971:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:20.400189Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179440313, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:56:50.797373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:50.797400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:50.797404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:50.797408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:50.797418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:50.797432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:50.797438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:50.797546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:50.861750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:50.861767Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:56:50.863382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:50.863456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:50.863485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:50.879489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:50.879567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:50.913994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:50.921364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:50.929520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:50.964572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:50.964614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:50.964635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:50.964652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:50.964659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:50.964705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:56:50.966267Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:56:50.997624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:51.008052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.008149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:51.008192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:51.008199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:51.009215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:51.009231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:51.009236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:51.009780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:51.009809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:51.010330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.010346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.010353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:51.010361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:51.011074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:51.011599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:51.020087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:51.020486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:51.020539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:51.020550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:51.020647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:51.020658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:51.020693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:51.020708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:51.021457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:51.021470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:51.021532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:51.021538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:51.021635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:51.021644Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:51.021659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:51.021664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:51.021671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:51.021676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:51.021682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:51.021686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:51.021699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:51.021705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:51.021709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... schemeshard: 72057594046678944 2024-11-21T08:57:20.652702Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 112us result status StatusSuccess 2024-11-21T08:57:20.652933Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:20.653907Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:20.653967Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 67us result status StatusSuccess 2024-11-21T08:57:20.654132Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] >> KqpDatetime64ColumnShard::Csv [GOOD] >> KqpOlap::OlapRead_GenericQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:266:2068] recipient: [1:243:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:266:2068] recipient: [1:243:2077] Leader for TabletID 72057594037932033 is [1:268:2079] sender: [1:269:2068] recipient: [1:243:2077] 2024-11-21T08:57:16.991020Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:17.013122Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:17.039829Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:17.039940Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:17.040097Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040103Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040141Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:17.092820Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:17.092870Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:17.108330Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:17.108424Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.108445Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.108454Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:268:2079] sender: [1:294:2068] recipient: [1:22:2069] 2024-11-21T08:57:17.131102Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:17.131134Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.141705Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.141762Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.141789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.141802Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.141831Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.141840Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.141847Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.141855Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.152461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.152519Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:17.152716Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:17.152725Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:17.152772Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:17.157911Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T08:57:17.158712Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk 2024-11-21T08:57:17.158730Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2024-11-21T08:57:17.158735Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2024-11-21T08:57:17.158739Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2024-11-21T08:57:17.158743Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2024-11-21T08:57:17.158748Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2024-11-21T08:57:17.158752Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2024-11-21T08:57:17.158756Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2024-11-21T08:57:17.158761Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2024-11-21T08:57:17.158765Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2024-11-21T08:57:17.158769Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2024-11-21T08:57:17.158778Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2024-11-21T08:57:17.220601Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:266:2068] recipient: [13:243:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:266:2068] recipient: [13:243:2077] Leader for TabletID 72057594037932033 is [13:268:2079] sender: [13:269:2068] recipient: [13:243:2077] 2024-11-21T08:57:19.190165Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:19.190326Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:19.190547Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:19.190618Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:19.190722Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:19.190728Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:19.190762Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:19.191669Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:19.191711Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:19.191739Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:19.191753Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:19.191764Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:19.191771Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:268:2079] sender: [13:294:2068] recipient: [13:22:2069] 2024-11-21T08:57:19.203288Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:19.203342Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:19.215612Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:19.215677Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:19.215698Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:19.215710Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:19.215739Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:19.215747Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:19.215753Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:19.215760Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:19.228430Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:19.228481Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:19.228661Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:19.228668Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:19.228690Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:19.228857Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T08:57:19.228957Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 24:1000 Path# /dev/disk 2024-11-21T08:57:19.228965Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2024-11-21T08:57:19.228969Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2024-11-21T08:57:19.228974Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2024-11-21T08:57:19.228977Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2024-11-21T08:57:19.228981Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2024-11-21T08:57:19.228985Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2024-11-21T08:57:19.228990Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2024-11-21T08:57:19.228995Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2024-11-21T08:57:19.229000Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2024-11-21T08:57:19.229006Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2024-11-21T08:57:19.229011Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2024-11-21T08:57:19.244187Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" >> KqpOlapIndexes::SchemeActualizationOnceOnStart [GOOD] >> KqpOlap::TableSinkWithOlapStore [GOOD] >> test.py::test[pg-range_function_multi-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 23437, MsgBus: 64376 2024-11-21T08:57:15.398996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654026498596451:2072];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b0/r3tmp/tmp3yFaht/pdisk_1.dat 2024-11-21T08:57:15.431609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:15.455758Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23437, node 1 2024-11-21T08:57:15.478015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:15.478027Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:15.478029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:15.478066Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64376 2024-11-21T08:57:15.529702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:15.529735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:15.532676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:15.546745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.549172Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:15.557315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.582115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.613037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.633201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:15.747963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654026498597975:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.748098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.755392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.762095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.816661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.827109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.882506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.890684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:15.906165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654026498598492:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.906182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654026498598497:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.906203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:15.906769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:15.910423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654026498598499:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:16.432254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:16.498467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.544774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:16.616408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.668730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.725303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:57:16.774411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:16.787051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:17.007489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63263, MsgBus: 24621 2024-11-21T08:57:17.319208Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654034851702107:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b0/r3tmp/tmp8JbkXD/pdisk_1.dat 2024-11-21T08:57:17.324464Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:17.327777Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63263, node 2 2024-11-21T08:57:17.337201Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:17.337218Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:17.337220Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:17.337266Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24621 TClient is connected to server localhost:24621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:17.421232Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:17.421264Z node 2 :HIVE WARN: HIVE#72057 ... status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:17.745755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:17.751440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654034851703987:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:18.326298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:18.419919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.481336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:18.533806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.603547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.664258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:57:18.718291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:18.725543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.206200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715714:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7218, MsgBus: 15379 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b0/r3tmp/tmpg8FsXk/pdisk_1.dat 2024-11-21T08:57:19.453554Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439654044362534098:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:19.453804Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 7218, node 3 2024-11-21T08:57:19.468988Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:19.471244Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:19.471257Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:19.471259Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:19.471300Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15379 TClient is connected to server localhost:15379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:19.553645Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:19.553684Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:19.554712Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:19.555408Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:19.557620Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:19.566007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:19.587792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:19.598849Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:19.763775Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654044362535635:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.763805Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.769347Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.777200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.789953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.804192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.817313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.832649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:19.848520Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654044362536138:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.848566Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.848713Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654044362536143:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:19.849734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:19.858866Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654044362536145:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:20.455387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T08:57:20.516264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T08:57:20.572074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T08:57:20.688136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:20.747025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:57:20.800093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:57:20.855365Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T08:57:20.868522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.340608Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715714:0, at schemeshard: 72057594046644480 >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDatetime64ColumnShard::Csv [GOOD] Test command err: Trying to start YDB, gRPC: 17672, MsgBus: 14333 2024-11-21T08:57:20.048342Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654045773343407:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:20.048408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048cd/r3tmp/tmpZPAIk5/pdisk_1.dat 2024-11-21T08:57:20.106920Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17672, node 1 2024-11-21T08:57:20.124818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:20.124833Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:20.124834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:20.124879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14333 2024-11-21T08:57:20.147898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:20.147929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:20.149253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:20.184462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:20.190433Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (date Date32 NOT NULL, interval Interval64, PRIMARY KEY (date)) PARTITION BY HASH(date) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:20.383190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654045773343863:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.383240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.412489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:20.420754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:20.420835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:20.420887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:20.420911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:20.420933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:20.420961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:20.420983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:20.421005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:20.421028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:20.421054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.421076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:20.421098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654045773343939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:20.421671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:20.421689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:20.421703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:20.421712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:20.421734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:20.421742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:20.421753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:20.421762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:20.421772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:20.421780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:20.421786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:20.421790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:20.421855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:20.421870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:20.421887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:20.421898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.421912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:20.421921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:20.421937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:20.421946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:20.421959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11 ... 86224037888;self_id=[2:7439654052034647297:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.250325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654052034647297:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.250344Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654052034647297:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.250367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654052034647297:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.250387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654052034647297:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.250406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654052034647297:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.250425Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654052034647297:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.252750Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:21.252774Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:21.252789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:21.252794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:21.252813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:21.252818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:21.252829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:21.252837Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:21.252847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:21.252852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:21.252859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:21.252865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:21.252921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:21.252928Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:21.252945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:21.252950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.252962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:21.252967Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:21.252985Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:21.252989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:21.253001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:21.253004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:21.267930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654052034647389:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.267980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.268092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654052034647394:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.268804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:21.270729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654052034647396:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:21.417149Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=manager.cpp:16;event=register_group;external_process_id=281474976715662;external_group_id=7;size=1;external_scope_id=7; 2024-11-21T08:57:21.417268Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=25;stage=FO::ACCESSORS; 2024-11-21T08:57:21.417413Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=26;stage=FO::FETCHING; 2024-11-21T08:57:21.417752Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=27;stage=FO::FETCHING; 2024-11-21T08:57:21.417821Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=28;stage=FO::MERGE; 2024-11-21T08:57:21.417985Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=27;wait=0;internal_group_id=1;allocation_status=Allocated; 2024-11-21T08:57:21.417992Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=26;wait=0;internal_group_id=1;allocation_status=Allocated; 2024-11-21T08:57:21.417999Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T08:57:21.418002Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T08:57:21.418005Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=25;wait=0;internal_group_id=1;allocation_status=Allocated; 2024-11-21T08:57:21.418007Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T08:57:21.418010Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T08:57:21.418024Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T08:57:21.418026Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T08:57:21.418102Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=manager.cpp:25;event=unregister_group;external_process_id=281474976715662;external_group_id=7;size=1; 2024-11-21T08:57:21.418106Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=process.h:139;event=remove_group;external_group_id=7;internal_group_id=1; 2024-11-21T08:57:21.418110Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=28;wait=0;internal_group_id=1;allocation_status=Allocated; 2024-11-21T08:57:21.418113Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=NO_VALUE_OPTIONAL;external_scope_id=7;forced_external_group_id=NO_VALUE_OPTIONAL; 2024-11-21T08:57:21.418115Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=NO_VALUE_OPTIONAL;external_scope_id=7;forced_external_group_id=NO_VALUE_OPTIONAL; 2024-11-21T08:57:21.418262Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=process.h:68;event=scope_cleaned;process_id=281474976715662;external_scope_id=7; 2024-11-21T08:57:21.422907Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179441321, txId: 18446744073709551615] shutting down >> TConsoleTests::TestNotifyOperationCompletion ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_GenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 13858, MsgBus: 4175 2024-11-21T08:57:20.896328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654046635450428:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:20.896349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c0/r3tmp/tmpv5CdCY/pdisk_1.dat 2024-11-21T08:57:20.962344Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13858, node 1 2024-11-21T08:57:20.973729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:20.973745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:20.973747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:20.973785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4175 2024-11-21T08:57:20.998065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:20.998093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:20.999020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:21.031680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:21.037393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:21.049084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:21.131887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.131940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.131975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.131992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.132006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.132018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.132038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.132055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.132069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.132083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.132100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.132118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050930418358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.140754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.140783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.140829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.140845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.140861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.140884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.140899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.140913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.140930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.140946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.140965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.140979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050930418360:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.144321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050930418361:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.144349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050930418361:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.144397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050930418361:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.144414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050930418361:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.144430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050930418361:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.144460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050930418361:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.144475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050930418361:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.144499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050930418361:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... tion=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:21.210569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:21.210573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:21.210696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:21.210701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:21.210709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:21.210712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:21.210727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:21.210730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:21.210737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:21.210740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:21.210747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:21.210751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:21.210756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:21.210759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:21.210785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:21.210788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:21.210801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:21.210805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.210818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:21.210821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:21.210834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:21.210837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:21.210844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:21.210847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:21.210910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:21.210913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:21.210920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:21.210924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:21.210936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:21.210939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:21.210946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:21.210949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:21.210955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:21.210958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:21.210967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:21.210971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:21.210995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:21.210998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:21.211012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:21.211015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.211024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:21.211027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:21.211039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:21.211042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:21.211051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:21.211053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:21.218484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2024-11-21T08:57:21.302740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050930418858:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.302762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050930418866:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.302768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.303413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:21.304729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654050930418872:2409], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } >> KqpOlapAggregations::Aggregation_Some_NullGroupBy [GOOD] >> KqpOlap::OlapRead_UsesScanOnJoin >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_2_1_clean >> KqpOlap::PredicatePushdownWithParameters |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::SchemeActualizationOnceOnStart [GOOD] Test command err: Trying to start YDB, gRPC: 25633, MsgBus: 15606 2024-11-21T08:57:20.871937Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654045999739162:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:20.871955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c3/r3tmp/tmpuWNz0E/pdisk_1.dat 2024-11-21T08:57:20.924612Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25633, node 1 2024-11-21T08:57:20.942737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:20.942751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:20.942752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:20.942778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15606 2024-11-21T08:57:20.976496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:20.976523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:20.979465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:21.004998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:21.009474Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:21.019641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:21.031122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.031194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.031238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.031256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.031270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.031289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.031304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.031319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.031334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.031355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.031370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.031383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294707115:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.032385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:21.032405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:21.032418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:21.032423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:21.032442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:21.032452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:21.032462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:21.032477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:21.032493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:21.032497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:21.032503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:21.032507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:21.032571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:21.032577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:21.032594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:21.032599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.032610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:21.032614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:21.032632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:21.032636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:21.032647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:21.032650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:21.035804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050294707118:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.035830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050294707118:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLA ... itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T08:57:21.340680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=9e5e2d56-a7e611ef-8b9907ae-af80f684;fline=with_appended.cpp:80;portions=3,;task_id=9e5e2d56-a7e611ef-8b9907ae-af80f684; 2024-11-21T08:57:21.341073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=9e5e2cca-a7e611ef-a818eccb-57687614;fline=with_appended.cpp:80;portions=3,;task_id=9e5e2cca-a7e611ef-a818eccb-57687614; 2024-11-21T08:57:21.373897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707474:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.376130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.377025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.378848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=9e5e5a06-a7e611ef-9c3560d9-e190ac98;fline=with_appended.cpp:80;portions=3,;task_id=9e5e5a06-a7e611ef-9c3560d9-e190ac98; 2024-11-21T08:57:21.395677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.396162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707535:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.396335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.402300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707581:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.402322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.403990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.417139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707629:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.417164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.418753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.431689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707676:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.431705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.434400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.448399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707723:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.448422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.448883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.458278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707770:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.458300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.459828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.474144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707817:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.474167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.475657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.485884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707864:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.485917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.487541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.502042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707911:2453], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.502048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.502072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.520954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707965:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.520992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.521048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050294707988:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.522863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480 2024-11-21T08:57:21.524469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050294707981:2460];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:57:21.524586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050294707967:2459];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:57:21.529214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050294708042:2465];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:57:21.529996Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2024-11-21T08:57:21.530197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050294708027:2464];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:57:21.530207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654050294707995:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2024-11-21T08:57:21.770271Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179441580, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::TableSinkWithOlapStore [GOOD] Test command err: Trying to start YDB, gRPC: 10638, MsgBus: 14475 2024-11-21T08:57:21.160623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654050089367119:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.160841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048bb/r3tmp/tmpjyegEh/pdisk_1.dat 2024-11-21T08:57:21.231115Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10638, node 1 2024-11-21T08:57:21.244679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:21.244694Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:21.244696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:21.244729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14475 2024-11-21T08:57:21.261784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.261810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.263344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:57:21.301209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.303430Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:21.313910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.326549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.326730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.326786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.326821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.326841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.326860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.326883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.326906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.326935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.326957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.326976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.326996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050089367760:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.331006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.331245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.331295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.331314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.331332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.331350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.331369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.331389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.331409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.331440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.331459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.331476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050089367761:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.335392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050089367762:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.335412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050089367762:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.335451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050089367762:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.335470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050089367762:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.335493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050089367762:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.335511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050089367762:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.335530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050089367762:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.335550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050089367762:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... OLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:21.352227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:21.352235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:21.352239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:21.352897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:21.352910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:21.352919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:21.352924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:21.352940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:21.352945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:21.352953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:21.352957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:21.352964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:21.352969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:21.352975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:21.352979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:21.353017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:21.353023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:21.353038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:21.353043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.353054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:21.353058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:21.353073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:21.353077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:21.353086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:21.353090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:21.386357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.395675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2616;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2616;columns=5; 2024-11-21T08:57:21.544102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050089368112:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.544127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654050089368123:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.544134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:21.544924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:21.546643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654050089368126:2385], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:57:21.745732Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654050089368219:2380] TxId: 281474976715663. Ctx: { TraceId: 01jd6z1drcd1n39zs9e2nw3xck, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdiOWM4OS01OGE5NTNjLTU4NDc5NjgxLWExMWNjNDQ2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:21.771290Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654050089368506:2380] TxId: 281474976715664. Ctx: { TraceId: 01jd6z1drcd1n39zs9e2nw3xck, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdiOWM4OS01OGE5NTNjLTU4NDc5NjgxLWExMWNjNDQ2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:21.788365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=9;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000002 ] uid: [ "uid_1000002" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715663}],"finishes":[{"inc":{"count_include":1},"id":281474976715663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000002;uid_1000002;"}}]}; 2024-11-21T08:57:21.788371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=8;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000000, 1970-01-01 00:00:01.000001 ] uid: [ "uid_1000000", "uid_1000001" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715663}],"finishes":[{"inc":{"count_include":1},"id":281474976715663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000000;uid_1000000;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715663}],"finishes":[{"inc":{"count_include":1},"id":281474976715663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000001;uid_1000001;"}}]}; 2024-11-21T08:57:21.789736Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654050089368989:2380] TxId: 281474976715665. Ctx: { TraceId: 01jd6z1drcd1n39zs9e2nw3xck, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdiOWM4OS01OGE5NTNjLTU4NDc5NjgxLWExMWNjNDQ2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:21.799710Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654050089369264:2380] TxId: 281474976715666. Ctx: { TraceId: 01jd6z1drcd1n39zs9e2nw3xck, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdiOWM4OS01OGE5NTNjLTU4NDc5NjgxLWExMWNjNDQ2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:21.822208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=10;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000000, 1970-01-01 00:00:01.000001 ] uid: [ "uid_1000000", "uid_1000001" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715663}],"starts":[{"inc":{"count_not_include":2},"id":281474976715663}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715663}]},"p":{"include":2147483647}}]}; 2024-11-21T08:57:21.822351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=7;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000002 ] uid: [ "uid_1000002" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715663}],"starts":[{"inc":{"count_not_include":2},"id":281474976715663}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715663}]},"p":{"include":2147483647}}]}; >> KqpOlapAggregations::Blocks_NoAggPushdown |91.0%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapAggregations::Json_GetValue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 25337, MsgBus: 14974 2024-11-21T08:57:20.593501Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654048085340690:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:20.593614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c8/r3tmp/tmp7e50hc/pdisk_1.dat 2024-11-21T08:57:20.650453Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25337, node 1 2024-11-21T08:57:20.664398Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:20.664414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:20.664416Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:20.664452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14974 2024-11-21T08:57:20.693717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:20.693753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:20.694811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14974 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:20.727499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:20.731576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:20.740488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:20.740560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:20.740616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:20.740644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:20.740667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:20.740688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:20.740713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:20.740742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:20.740770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:20.740799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.740823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:20.740845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654048085341318:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:20.741317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:20.741332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:20.741343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:20.741353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:20.741369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:20.741378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:20.741387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:20.741400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:20.741413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:20.741421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:20.741428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:20.741436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:20.741502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:20.741512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:20.741528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:20.741538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.741554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:20.741563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:20.741584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:20.741592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:20.741607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:20.741616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:20.744664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654048085341319:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:20.744695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654048085341319:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:20.744737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654048085341319:2289];tablet_id=7207518622 ... =TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:20.753114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:20.753128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:20.753139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:20.753148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:20.753165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:20.753174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:20.753182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:20.753192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:20.753204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:20.753212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:20.753218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:20.753227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:20.753264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:20.753276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:20.753292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:20.753300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.753317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:20.753325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:20.753340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:20.753344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:20.753354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:20.753362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2024-11-21T08:57:20.895414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654048085341622:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.895434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654048085341597:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.895479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:20.896060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:20.897728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654048085341626:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2024-11-21T08:57:21.964819Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179441000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '829) '('"_id" '"e41f6232-a4a097b9-5ee07e9c-bb90da38") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $23 (Int32 '1)) (let $24 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $23) $24))) (RangeCreate (AsList '($24 '((Just (Int32 '"7")) $23)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $1)) (let $11 '('('"_logical_id" '888) '('"_id" '"80aedb56-383afad1-e2d48c81-bd553a3a") '('"_wide_channels" (StructType '('_yql_agg_0 $2) $10)))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id")) (let $27 '('('"UsedKeyColumns" $26) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $28 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $27 (lambda '($29) (TKqpOlapAgg $29 '('('_yql_agg_0 'some '"level")) $26)))) (return (FromFlow $28)) ))) $11)) (let $13 (DqCnHashShuffle (TDqOutput $12 '0) '('1))) (let $14 (StructType '('"column1" $2) $10)) (let $15 '('('"_logical_id" '1311) '('"_id" '"52b4dda9-e8908df7-d02f2008-ff13776f") '('"_wide_channels" $14))) (let $16 (DqPhyStage '($13) (lambda '($30) (block '( (let $31 (lambda '($42 $43) $43 $42)) (let $32 (WideCombiner (ToFlow $30) '"" (lambda '($33 $34) $34) (lambda '($35 $36 $37) $36) (lambda '($38 $39 $40 $41) (Coalesce $41 $39)) $31)) (return (FromFlow (WideSort $32 '('('1 (Bool 'true)))))) ))) $15)) (let $17 (DqCnMerge (TDqOutput $16 '0) '('('1 '"Asc")))) (let $18 (DqPhyStage '($17) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46) (AsStruct '('"column1" $45) '('"id" $46)))))) '('('"_logical_id" '1323) '('"_id" '"986d2303-fa8776cf-817aa822-319b39eb")))) (let $19 '($12 $16 $18)) (let $20 (DqCnResult (TDqOutput $18 '0) '('"id" '"column1"))) (let $21 (KqpTxResultBinding $9 '0 '0)) (let $22 (KqpPhysicalTx $19 '($20) '('($7 $21)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $22) '((KqpTxResultBinding (ListType $14) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL >> KqpOlap::OlapRead_UsesScanOnJoin [GOOD] >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> TGRpcCmsTest::DisabledTxTest [GOOD] >> KqpOlap::PredicatePushdownWithParameters [GOOD] >> KqpOlapAggregations::Aggregation_ResultCountExpr >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> TGRpcCmsTest::AuthTokenTest [GOOD] >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> KqpOlapAggregations::Aggregation_Count_NullGroupBy >> KqpOlapAggregations::Aggregation_Some_NullMixGroupBy >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> KqpOlapAggregations::Json_GetValue [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2024-11-21T08:57:21.512821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654052836707235:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.512837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d2e/r3tmp/tmpDbB90o/pdisk_1.dat 2024-11-21T08:57:21.915147Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:21.954392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.954418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.956968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13546, node 1 2024-11-21T08:57:22.151583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.151601Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.151603Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.151655Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.468508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.484686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.484708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:57:22.487398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.487405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:57:22.487999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.488017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T08:57:22.489169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.492727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442539, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:57:22.493883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:57:22.494435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:57:22.494499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:57:22.494507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:57:22.494518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:57:22.494969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:57:22.494977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:57:22.494981Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:57:22.494993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:57:22.606449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/users, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.609959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.609982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/users/user-1, opId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.610081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.610087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.610627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/users/user-1 2024-11-21T08:57:22.610680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.610747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.610765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.610775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 2 -> 3 2024-11-21T08:57:22.620362Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.620415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:57:22.620428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:57:22.620436Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T08:57:22.620470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:57:22.620486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:57:22.620492Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T08:57:22.620507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:57:22.620509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:57:22.620511Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2024-11-21T08:57:22.620530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T08:57:22.621168Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.621179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 3 -> 128 2024-11-21T08:57:22.621876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.622774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442672, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.622800Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732179442672, at schemeshard: 72057594046644480 2024-11-21T08:57:22.622831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T08:57:22.622855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:1, at tablet 72057594046644480 2024-11-21T08:57:22.622899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2024-11-21T08:57:22.623597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.623655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.623671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2024-11-21T08:57:22.623685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 1/2 2024-11-21T08:57:22.623716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T08:57:22.623721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 2/2 2024-11-21T08:57:22.623727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:57:22.623738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2024-11-21T08:57:22.623748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 3, subscribers: 1 2024-11-21T08:57:22.623940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:57:22.623954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:57:22.623957Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:57:22.623985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:57:22.623987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:57:22.623989Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T08:57:22.624001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:57:22.624008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:57:22.624010Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T08:57:22.624014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T08:57:22.635486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/users/user-1, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.635559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.635563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.636374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/users/user-1 2024-11-21T08:57:22.636438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.636442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 2 -> 3 2024-11-21T08:57:22.636650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T08:57:22.638668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.638680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 3 -> 128 2024-11-21T08:57:22.639243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.640047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442686, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.640056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710659:0, at tablet 72057594046644480 2024-11-21T08:57:22.640111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2024-11-21T08:57:22.640473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.640514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.640523Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2024-11-21T08:57:22.640532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T08:57:22.640540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:57:22.640551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T08:57:22.640878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T08:57:22.640885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T08:57:22.640889Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2024-11-21T08:57:22.640900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2024-11-21T08:57:21.514719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654050947137198:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.514734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d02/r3tmp/tmp1QMSQB/pdisk_1.dat 2024-11-21T08:57:21.905524Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:21.955836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.955877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.958495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13916, node 1 2024-11-21T08:57:22.151337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.151353Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.151354Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.151411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.468512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.484549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.484571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:57:22.487415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.487424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:57:22.487932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T08:57:22.489196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.492723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442539, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:57:22.493897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:57:22.494430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494493Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:57:22.494507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:57:22.494519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:57:22.494534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:57:22.494953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:57:22.494967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:57:22.494972Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:57:22.494985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:25439 2024-11-21T08:57:22.521100Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2024-11-21T08:57:22.521112Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2024-11-21T08:57:22.531440Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2024-11-21T08:57:22.569601Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7439654055242105326:2293], Recipient [1:7439654050947137679:2195]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2024-11-21T08:57:22.569624Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2024-11-21T08:57:22.570066Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownWithParameters [GOOD] Test command err: Trying to start YDB, gRPC: 17777, MsgBus: 30049 2024-11-21T08:57:22.392096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654057665048702:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:22.392147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b9/r3tmp/tmp7AQH2l/pdisk_1.dat 2024-11-21T08:57:22.452587Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17777, node 1 2024-11-21T08:57:22.475280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.475294Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.475297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.475346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30049 2024-11-21T08:57:22.492903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:22.492926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:22.494041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.522515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:22.532840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:22.544441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:22.544519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:22.544573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:22.544600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:22.544623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:22.544648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:22.544670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:22.544692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:22.544718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:22.544735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.544752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:22.544769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654057665049216:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:22.545290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:22.545299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:22.545319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:22.545327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:22.545348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:22.545352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:22.545361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:22.545366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:22.545375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:22.545379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:22.545385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:22.545389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:22.545453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:22.545459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:22.545476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:22.545480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.545506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:22.545510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:22.545527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:22.545535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:22.545546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:22.545549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:22.548545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654057665049214:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:22.548569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654057665049214:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:22.548603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654057665049214:2288];tablet_id=7207518622 ... :22.554901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:22.554915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:22.554918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:22.554926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:22.554929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:22.554937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:22.554941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:22.554945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:22.554949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:22.554976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:22.554980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:22.554994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:22.554998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.555008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:22.555019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:22.555034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:22.555037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:22.555046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:22.555049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:22.555104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:22.555107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:22.555115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:22.555118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:22.555129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:22.555132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:22.555138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:22.555148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:22.555155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:22.555158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:22.555163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:22.555166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:22.555189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:22.555193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:22.555206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:22.555209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.555217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:22.555220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:22.555231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:22.555235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:22.555247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:22.555249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:22.593160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; 2024-11-21T08:57:22.708871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654057665049525:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:22.708905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:22.709015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654057665049552:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:22.709931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:22.712598Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T08:57:22.712800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654057665049554:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:22.920164Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179442763, txId: 18446744073709551615] shutting down 2024-11-21T08:57:22.985914Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179443001, txId: 18446744073709551615] shutting down Query: --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = "false"; DECLARE $in_uid AS Utf8; DECLARE $in_level AS Int32; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE uid > $in_uid AND level > $in_level ORDER BY `timestamp`; Expected: [[3000983u];[3000984u];[3000988u];[3000989u];[3000993u];[3000994u];[3000998u];[3000999u]] Received: [[3000983u];[3000984u];[3000988u];[3000989u];[3000993u];[3000994u];[3000998u];[3000999u]] >> KqpOlap::SimpleQueryOlapStats >> KqpOlapAggregations::Aggregation_Some_GroupByNull ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesScanOnJoin [GOOD] Test command err: Trying to start YDB, gRPC: 8362, MsgBus: 16202 2024-11-21T08:57:22.371324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654057471936972:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:22.371421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ba/r3tmp/tmpfWFT8B/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8362, node 1 2024-11-21T08:57:22.436624Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:22.438288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.438300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.438303Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.438336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16202 TClient is connected to server localhost:16202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.503833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:22.503858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:22.504268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.504828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:22.515222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:22.522714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:22.522770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:22.522803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:22.522823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:22.522834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:22.522844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:22.522861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:22.522876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:22.522891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:22.522907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.522922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:22.522937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057471937478:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:22.523318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:22.523330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:22.523337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:22.523340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:22.523349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:22.523355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:22.523361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:22.523368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:22.523373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:22.523379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:22.523383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:22.523389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:22.523436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:22.523443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:22.523454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:22.523460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.523467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:22.523473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:22.523484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:22.523491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:22.523501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:22.523511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:22.525803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654057471937474:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:22.525821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654057471937474:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:22.525847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654057471937474:2288];tablet_id=720751862240 ... 37895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:22.564452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:22.564455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:22.564463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:22.564467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:22.564474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:22.564477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:22.564483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:22.564486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:22.564513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:22.564517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:22.564532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:22.564536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.564546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:22.564549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:22.564564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:22.564567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:22.564576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:22.564594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:22.564654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:22.564658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:22.564665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:22.564669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:22.564681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:22.564684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:22.564691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:22.564695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:22.564700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:22.564704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:22.564709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:22.564712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:22.564735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:22.564738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:22.564752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:22.564755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.564765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:22.564768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:22.564782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:22.564785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:22.564794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:22.564797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:22.571447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:22.722793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654057471937990:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:22.722844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:22.736332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654057471938011:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:22.736358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:22.736849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654057471938016:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:22.737729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:22.740083Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T08:57:22.740167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654057471938018:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:57:22.962650Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179442791, txId: 18446744073709551615] shutting down >> KqpOlapStatistics::StatsUsageNotPK ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2024-11-21T08:57:21.513568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654053372773080:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.513585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c75/r3tmp/tmpXh9z12/pdisk_1.dat 2024-11-21T08:57:21.917999Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:21.938247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.938276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.955297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:21.955474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:21.964723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439654053372773720:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 11370, node 1 2024-11-21T08:57:22.151531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.151543Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.151545Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.151583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.468758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.485560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.485581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:57:22.487398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.487403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:57:22.487890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T08:57:22.489226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.492739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442539, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:57:22.493901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:57:22.494433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494495Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:57:22.494509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:57:22.494520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:57:22.494532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:57:22.494953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:57:22.494968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:57:22.494972Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:57:22.494990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:16792 2024-11-21T08:57:22.513397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.513508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.513515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.513537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T08:57:22.513572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T08:57:22.513581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T08:57:22.514350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:user-1@builtin 2024-11-21T08:57:22.514406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.514469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.514622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T08:57:22.514638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T08:57:22.514643Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T08:57:22.514665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2024-11-21T08:57:22.569502Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439654057667741205:2292], Recipient [1:7439654053372773547:2198]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2024-11-21T08:57:22.569519Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T08:57:22.569525Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.569528Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.569550Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" 2024-11-21T08:57:22.569585Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732179442569523) 2024-11-21T08:57:22.569672Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732179442569523 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T08:57:22.569758Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T08:57:22.571138Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T08:57:22.571282Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569523&action=1" } } } 2024-11-21T08:57:22.571332Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.571352Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.585775Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.585923Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T08:57:22.585957Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/us ... with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:10 2024-11-21T08:57:22.853016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.853018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.853026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2024-11-21T08:57:22.853027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.853029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.853035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:9 2024-11-21T08:57:22.853037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.853039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.853045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:6 2024-11-21T08:57:22.853047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.853048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.853054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:3 2024-11-21T08:57:22.853056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.853058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.853064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2024-11-21T08:57:22.853065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.853067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.853073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2024-11-21T08:57:22.853075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.853077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.854263Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2024-11-21T08:57:22.854301Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.854305Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.854941Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439654053372773448:2194], Recipient [1:7439654053372773547:2198]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.854944Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T08:57:22.854951Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.854953Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.854961Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2024-11-21T08:57:22.854971Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1732179442830181 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2024-11-21T08:57:22.856565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:57:22.856572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:57:22.857012Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2024-11-21T08:57:22.857707Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2024-11-21T08:57:22.857711Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2024-11-21T08:57:22.857713Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2024-11-21T08:57:22.857716Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2024-11-21T08:57:22.857718Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2024-11-21T08:57:22.857720Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2024-11-21T08:57:22.857735Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2024-11-21T08:57:22.857738Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2024-11-21T08:57:22.857741Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2024-11-21T08:57:22.857736Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:22.858275Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2024-11-21T08:57:22.858295Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.858305Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.858344Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.858514Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2024-11-21T08:57:22.858524Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2024-11-21T08:57:22.860692Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2024-11-21T08:57:22.860724Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7439654057667741960:2198], Recipient [1:7439654053372773547:2198]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2024-11-21T08:57:22.860743Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2024-11-21T08:57:22.860748Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.860750Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.860760Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2024-11-21T08:57:22.860768Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2024-11-21T08:57:22.866325Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T08:57:22.866341Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.866343Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.866345Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.866370Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1732179442830181 2024-11-21T08:57:22.866374Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732179442830181 issue=AccessDenied: Access denied for request 2024-11-21T08:57:22.866377Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1732179442830181 issue=AccessDenied: Access denied for request 2024-11-21T08:57:22.866378Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2024-11-21T08:57:22.866411Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1732179442830181 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2024-11-21T08:57:22.867290Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2024-11-21T08:57:22.867306Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.904259Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654057667741976:2363], Recipient [1:7439654053372773547:2198]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442830181&action=2" } UserToken: "" } 2024-11-21T08:57:22.904273Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.904327Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442830181&action=2" ready: true status: SUCCESS } } 2024-11-21T08:57:22.908919Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T08:57:22.909019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:57:23.012299Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:23.012463Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439654061603163450:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:23.090564Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439654061603163450:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:23.167287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439654061603163450:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |91.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2024-11-21T08:57:21.513414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654052410875519:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.513433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c7d/r3tmp/tmpNlir1o/pdisk_1.dat 2024-11-21T08:57:21.918635Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:21.955224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.955256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.957176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16795, node 1 2024-11-21T08:57:22.151961Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.151976Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.151977Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.152013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.468543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.485439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.485462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:57:22.487399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.487411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:57:22.487818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T08:57:22.489129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:22.492820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442539, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:57:22.493894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:57:22.494446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:57:22.494501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:57:22.494507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:57:22.494517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:57:22.494955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:57:22.494984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:57:22.494988Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:57:22.495002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:57:22.569413Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439654056705843641:2292], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2024-11-21T08:57:22.569426Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T08:57:22.569434Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.569437Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.569459Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2024-11-21T08:57:22.569524Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732179442569185) 2024-11-21T08:57:22.569597Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732179442569185 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T08:57:22.569642Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T08:57:22.572637Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T08:57:22.572831Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569185&action=1" } } } 2024-11-21T08:57:22.572878Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.572896Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.585775Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.585923Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T08:57:22.585958Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T08:57:22.600840Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2024-11-21T08:57:22.600872Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.600892Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7439654056705843646:2193], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.600895Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.600900Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.600903Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.600916Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2024-11-21T08:57:22.600924Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2024-11-21T08:57:22.600945Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2024-11-21T08:57:22.603465Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T08:57:22.603480Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.603482Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.603483Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.603497Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2024-11-21T08:57:22.603503Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1732179442569185 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.603942Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654056705843649:2293], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569185&action=1" } UserToken: "" } 2024-11-21T08:57:22.603949Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.603985Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569185&action=1" } } 2024-11-21T08:57:22.605094Z node 1 :CMS_TE ... : 281474976710660 2024-11-21T08:57:22.699863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T08:57:22.699866Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T08:57:22.699905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T08:57:22.699908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T08:57:22.699909Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:57:22.704890Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654056705843931:2299], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442693198&action=2" } UserToken: "" } 2024-11-21T08:57:22.704904Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.704942Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442693198&action=2" } } 2024-11-21T08:57:22.745607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:0 ProgressState 2024-11-21T08:57:22.745640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:0 progress is 1/1 2024-11-21T08:57:22.745654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2024-11-21T08:57:22.751609Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2024-11-21T08:57:22.751621Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2024-11-21T08:57:22.751637Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T08:57:22.751657Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7439654056705843893:2193], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T08:57:22.751661Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2024-11-21T08:57:22.751670Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.751673Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.751689Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2024-11-21T08:57:22.751698Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1732179442693198 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.751719Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732179442693198 issue= 2024-11-21T08:57:22.754437Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2024-11-21T08:57:22.754467Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.754471Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.754789Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439654052410875890:2194], Recipient [1:7439654052410876013:2193]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.754792Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T08:57:22.754798Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.754799Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.754805Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2024-11-21T08:57:22.754811Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1732179442693198 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.757322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037893, at schemeshard: 72057594046644480 2024-11-21T08:57:22.757338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037890, at schemeshard: 72057594046644480 2024-11-21T08:57:22.757340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037895, at schemeshard: 72057594046644480 2024-11-21T08:57:22.757664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037889, at schemeshard: 72057594046644480 2024-11-21T08:57:22.757668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037894, at schemeshard: 72057594046644480 2024-11-21T08:57:22.757670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037891, at schemeshard: 72057594046644480 2024-11-21T08:57:22.757671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037897, at schemeshard: 72057594046644480 2024-11-21T08:57:22.757672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037888, at schemeshard: 72057594046644480 2024-11-21T08:57:22.757674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037896, at schemeshard: 72057594046644480 2024-11-21T08:57:22.758239Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2024-11-21T08:57:22.758251Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.758260Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.758300Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.758513Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2024-11-21T08:57:22.758522Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2024-11-21T08:57:22.760962Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654056705844072:2301], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442693198&action=2" } UserToken: "" } 2024-11-21T08:57:22.760974Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.761011Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442693198&action=2" } } 2024-11-21T08:57:22.772884Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2024-11-21T08:57:22.772922Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7439654056705844064:2193], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2024-11-21T08:57:22.772926Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2024-11-21T08:57:22.772931Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.772933Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.772948Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2024-11-21T08:57:22.772955Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2024-11-21T08:57:22.774330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:57:22.774338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:57:22.774368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037892, at schemeshard: 72057594046644480 2024-11-21T08:57:22.812593Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T08:57:22.812620Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.812622Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.812624Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.812648Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1732179442693198 2024-11-21T08:57:22.812652Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732179442693198 issue= 2024-11-21T08:57:22.812655Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1732179442693198 issue= 2024-11-21T08:57:22.812657Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2024-11-21T08:57:22.812679Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1732179442693198 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.818888Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654056705844087:2303], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442693198&action=2" } UserToken: "" } 2024-11-21T08:57:22.818901Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.818939Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442693198&action=2" } } 2024-11-21T08:57:22.820799Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2024-11-21T08:57:22.820853Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.872015Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654056705844092:2305], Recipient [1:7439654052410876013:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442693198&action=2" } UserToken: "" } 2024-11-21T08:57:22.872027Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.872069Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442693198&action=2" ready: true status: SUCCESS } } |91.0%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue [GOOD] Test command err: Trying to start YDB, gRPC: 6801, MsgBus: 7674 2024-11-21T08:57:22.798379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654057026122539:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:22.798863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b2/r3tmp/tmpF2MS4U/pdisk_1.dat 2024-11-21T08:57:22.940755Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:22.941040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:22.941051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:22.941830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6801, node 1 2024-11-21T08:57:22.956480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.956499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.956501Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.956540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7674 TClient is connected to server localhost:7674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:23.004048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:23.006384Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:23.017062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:23.025248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.025315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.025371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.025387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.025402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.025431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.025449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.025464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.025516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.025532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.025552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.025566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654061321090330:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.029058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.029084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.029118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.029133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.029143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.029152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.029160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.029171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.029183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.029193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.029205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.029214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654061321090331:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.032270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654061321090329:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.032282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654061321090329:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.032329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654061321090329:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.032350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654061321090329:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.032364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654061321090329:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.032377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654061321090329:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.032389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654061321090329:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.032402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654061321090329:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:23.035493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:23.035495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:23.035507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:23.035509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:23.035517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:23.035518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.035524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:23.035526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:23.035533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:23.035535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:23.035540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:23.035541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1") = "val1" AND id = 1; 2024-11-21T08:57:23.126887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654061321090626:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.126910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654061321090607:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.126927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.127569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:23.129215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654061321090636:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:23.291113Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179443183, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1") = "val1" AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1286) '('"_id" '"d3a1e7de-ecb30855-6b796ad-4449f31d") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '1)) (let $20 (Just $19)) (let $21 '($20 $19)) (let $22 (If (== $19 (Int32 '2147483647)) $21 '((+ $20 $19) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (OptionalType (DataType 'JsonDocument))) (let $27 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $11)))) (let $28 (ResourceType '"JsonNode")) (let $29 (OptionalType $28)) (let $30 '((ResourceType '"JsonPath"))) (let $31 (DictType $10 $28)) (let $32 '($31)) (let $33 (CallableType '() $27 '($29) $30 $32)) (let $34 '('('"strict"))) (let $35 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $33 (VoidType) '"" $34)) (let $36 (lambda '($54) (block '( (let $55 '((DataType 'Json) '"" '1)) (let $56 (CallableType '() '($28) $55)) (let $57 (Udf '"Json2.Parse" (Void) (VoidType) '"" $56 (VoidType) '"" '())) (return (Just (Apply $57 $54))) )))) (let $37 (Nothing $29)) (let $38 (CallableType '() $30 '($10))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.col1"))) (let $41 (Dict $31)) (let $42 (lambda '($58) (Nothing $11))) (let $43 (lambda '($59) $59)) (let $44 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($45) (block '( (let $46 (DataType 'Json)) (let $47 (StructType $9 '('"jsondoc" $26) '('"jsonval" (OptionalType $46)))) (let $48 (KqpOlapApply $47 '('"jsonval") (lambda '($51) (block '( (let $52 (IfPresent $51 $36 $37)) (let $53 (Apply $35 $52 $40 $41)) (return (Visit $53 '0 $42 '1 $43)) ))))) (let $49 '('eq $48 (String '"val1"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $45 $50)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($60 $61 $62) (block '( (let $63 (IfPresent $62 $36 $37)) (let $64 (Apply $35 $63 $40 $41)) (let $65 (Visit $64 '0 $42 '1 $43)) (let $66 (CallableType '() $27 '($26) $30 $32)) (let $67 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $66 (VoidType) '"" $34)) (let $68 (Apply $67 $61 $40 $41)) (let $69 (Visit $68 '0 $42 '1 $43)) (return (AsStruct '('"column1" $65) '('"column2" $69) '('"id" $60))) )))))) ))) '('('"_logical_id" '1357) '('"_id" '"4d6e7915-263cd81e-3e4922ce-c88ae242")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($70) $70) '('('"_logical_id" '2217) '('"_id" '"8eb9d857-115a3444-e6cb4c76-3bdd7a81")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $8 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $11) $9)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2024-11-21T08:57:21.513022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654050396407826:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.513037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d3c/r3tmp/tmpr1XnYi/pdisk_1.dat 2024-11-21T08:57:21.918036Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:21.954157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.954190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.955069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25344, node 1 2024-11-21T08:57:22.150610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.150625Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.150627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.150688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.468507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.484873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.484900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:57:22.487399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.487408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:57:22.487866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T08:57:22.489180Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.492736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442539, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:57:22.493880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:57:22.494428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:57:22.494499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:57:22.494506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:57:22.494517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:57:22.494960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:57:22.494973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:57:22.494976Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:57:22.494998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:57:22.569550Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439654054691375950:2292], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2024-11-21T08:57:22.569564Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T08:57:22.569568Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.569571Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.569586Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2024-11-21T08:57:22.569619Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732179442569385) 2024-11-21T08:57:22.569698Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732179442569385 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T08:57:22.569758Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T08:57:22.572933Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T08:57:22.573090Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569385&action=1" } } } 2024-11-21T08:57:22.573129Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.573146Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.585817Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.585936Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T08:57:22.585961Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T08:57:22.597254Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2024-11-21T08:57:22.597288Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.597305Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7439654054691375955:2191], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.597309Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.597315Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.597317Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.597329Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2024-11-21T08:57:22.597337Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2024-11-21T08:57:22.597353Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2024-11-21T08:57:22.598871Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T08:57:22.598880Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.598881Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.598883Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.598894Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2024-11-21T08:57:22.598901Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1732179442569385 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.600348Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654054691375958:2293], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569385&action=1" } UserToken: "" } 2024-11-21T08:57:22.600361Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.600400Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569385&action=1" } } 2024-11-21T08:57:22.601247Z node 1 :CMS_T ... 94037968897 2024-11-21T08:57:22.819614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.819621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2024-11-21T08:57:22.819622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.819623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.819626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2024-11-21T08:57:22.819628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.819629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.821953Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2024-11-21T08:57:22.821969Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2024-11-21T08:57:22.821983Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T08:57:22.821997Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7439654054691376532:2191], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T08:57:22.822001Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2024-11-21T08:57:22.822008Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.822010Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.822023Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2024-11-21T08:57:22.822032Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1732179442777063 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.822055Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732179442777063 issue= 2024-11-21T08:57:22.823715Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2024-11-21T08:57:22.823743Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.823747Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.823808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:57:22.823813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:57:22.823897Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439654050396408204:2198], Recipient [1:7439654050396408345:2191]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.823900Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T08:57:22.823907Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.823908Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.823914Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2024-11-21T08:57:22.823919Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1732179442777063 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.825082Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2024-11-21T08:57:22.825093Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2024-11-21T08:57:22.825097Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2024-11-21T08:57:22.825099Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2024-11-21T08:57:22.825101Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2024-11-21T08:57:22.825104Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2024-11-21T08:57:22.825106Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2024-11-21T08:57:22.825108Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2024-11-21T08:57:22.825110Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2024-11-21T08:57:22.825113Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2024-11-21T08:57:22.825018Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:22.832639Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2024-11-21T08:57:22.832662Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.832678Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.832735Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.832943Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2024-11-21T08:57:22.832952Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2024-11-21T08:57:22.836010Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2024-11-21T08:57:22.836047Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7439654054691376749:2191], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2024-11-21T08:57:22.836050Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2024-11-21T08:57:22.836056Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.836059Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.836071Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2024-11-21T08:57:22.836078Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2024-11-21T08:57:22.837932Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T08:57:22.837945Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.837947Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.837949Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.837962Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1732179442777063 2024-11-21T08:57:22.837966Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732179442777063 issue= 2024-11-21T08:57:22.837968Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1732179442777063 issue= 2024-11-21T08:57:22.837969Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2024-11-21T08:57:22.837997Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1732179442777063 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.838059Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654054691376762:2384], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442777063&action=2" } UserToken: "" } 2024-11-21T08:57:22.838069Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.838113Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442777063&action=2" } } 2024-11-21T08:57:22.839184Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2024-11-21T08:57:22.839211Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.892608Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654054691376769:2386], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442777063&action=2" } UserToken: "" } 2024-11-21T08:57:22.892623Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T08:57:22.892686Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442777063&action=2" ready: true status: SUCCESS } } 2024-11-21T08:57:22.893752Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439654054691376772:2388], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2024-11-21T08:57:22.893762Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T08:57:22.893798Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2024-11-21T08:57:22.894504Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7439654054691376775:2389], Recipient [1:7439654050396408345:2191]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2024-11-21T08:57:22.894507Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2024-11-21T08:57:22.894565Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2024-11-21T08:57:22.904561Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T08:57:22.904655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2024-11-21T08:57:21.512894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654052985863276:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.512918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003cff/r3tmp/tmpO9JevH/pdisk_1.dat 2024-11-21T08:57:21.918077Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:21.938887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.938929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.956901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22566, node 1 2024-11-21T08:57:22.150663Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.150679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.150681Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.150751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.472722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.484523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.484543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:57:22.487751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.487756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:57:22.488199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.488228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T08:57:22.489213Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.492722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442539, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:57:22.493879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:57:22.494443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:57:22.494501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:57:22.494507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:57:22.494525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:57:22.494954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:57:22.494977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:57:22.494979Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:57:22.494991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:57:22.569725Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439654057280831395:2292], Recipient [1:7439654052985863753:2192]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T08:57:22.569738Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T08:57:22.569743Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.569745Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.569765Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" 2024-11-21T08:57:22.569800Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732179442569557) 2024-11-21T08:57:22.569879Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732179442569557 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T08:57:22.569915Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T08:57:22.573534Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T08:57:22.573735Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569557&action=1" } } } 2024-11-21T08:57:22.573782Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.573800Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.585780Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.585923Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T08:57:22.585957Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T08:57:22.597356Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2024-11-21T08:57:22.597388Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.597410Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7439654057280831400:2192], Recipient [1:7439654052985863753:2192]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.597414Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.597420Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.597422Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.597441Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2024-11-21T08:57:22.597448Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2024-11-21T08:57:22.597475Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2024-11-21T08:57:22.602159Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T08:57:22.603302Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.603316Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.603318Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.603351Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2024-11-21T08:57:22.603360Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1732179442569557 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.603459Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439654057280831404:2293], Recipient [1:7439654052985863753:2192]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569557&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T08:57:22.603472Z node 1 :CMS_TENANTS TRA ... 1-21T08:57:22.780016Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439654052985863643:2191], Recipient [1:7439654052985863753:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.780018Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T08:57:22.780149Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2024-11-21T08:57:22.781434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442826, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.781446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710659:0, at tablet 72057594046644480 2024-11-21T08:57:22.781554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2024-11-21T08:57:22.781605Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3], Generation: 1, ActorId:[3:7439654054778772604:2275], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 72075186224037888, TenantSysViewProcessor: 72075186224037896, TenantStatisticsAggregator: 72075186224037897, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186224037888, tenantSysViewProcessor: 72075186224037896, at schemeshard: 72057594046644480 2024-11-21T08:57:22.782076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.782119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.782129Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TSyncHive, operationId 281474976710659:0, ProgressState, NeedSyncHive: 0 2024-11-21T08:57:22.782132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 240 -> 240 2024-11-21T08:57:22.782526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T08:57:22.782535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T08:57:22.782540Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2024-11-21T08:57:22.782995Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439654057280831950:2359], Recipient [1:7439654052985863753:2192]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T08:57:22.783001Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T08:57:22.783009Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.783090Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439654052985863643:2191], Recipient [1:7439654052985863753:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.783091Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T08:57:22.783182Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2024-11-21T08:57:22.783228Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2024-11-21T08:57:22.783243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T08:57:22.783257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T08:57:22.783784Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2024-11-21T08:57:22.783791Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2024-11-21T08:57:22.783799Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2024-11-21T08:57:22.783810Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7439654057280831498:2192], Recipient [1:7439654052985863753:2192]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2024-11-21T08:57:22.783812Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2024-11-21T08:57:22.783816Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.783818Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.783825Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2024-11-21T08:57:22.783831Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1732179442569557 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.783848Z node 1 :CMS_TENANTS TRACE: Update database for /Root/users/user-1 confirmedsubdomain=2 2024-11-21T08:57:22.784701Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439654057280831969:2360], Recipient [1:7439654052985863753:2192]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T08:57:22.784714Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T08:57:22.784725Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.784747Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439654052985863643:2191], Recipient [1:7439654052985863753:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.784749Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T08:57:22.784874Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2024-11-21T08:57:22.784949Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2024-11-21T08:57:22.784955Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.785940Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439654057280831975:2361], Recipient [1:7439654052985863753:2192]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T08:57:22.785949Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T08:57:22.785958Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.785977Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439654052985863643:2191], Recipient [1:7439654052985863753:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.785979Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T08:57:22.786083Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2024-11-21T08:57:22.786011Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28487 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037889 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037889 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanReso... (TRUNCATED) 2024-11-21T08:57:22.826225Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:22.828151Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T08:57:22.828396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:57:23.022310Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439654059073740234:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:23.022368Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:23.119162Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439654059073740234:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:23.174427Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439654059073740234:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2024-11-21T08:57:21.528226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654053863309874:2059];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.528283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003c7a/r3tmp/tmpxOiY7J/pdisk_1.dat 2024-11-21T08:57:21.903782Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:21.938240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.938277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.956993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61073, node 1 2024-11-21T08:57:22.151738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.151751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.151751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.151779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.468788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.484633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:57:22.484659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:57:22.487392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:57:22.487399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T08:57:22.487877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:57:22.487890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T08:57:22.489384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.492805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179442539, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:57:22.493832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T08:57:22.493900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T08:57:22.494437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:57:22.494489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T08:57:22.494499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T08:57:22.494507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T08:57:22.494517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T08:57:22.494981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T08:57:22.495011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T08:57:22.495022Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:57:22.495044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T08:57:22.569618Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439654058158277977:2292], Recipient [1:7439654053863310352:2193]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2024-11-21T08:57:22.569647Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T08:57:22.569651Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.569654Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.569678Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2024-11-21T08:57:22.569713Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732179442569608) 2024-11-21T08:57:22.569806Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732179442569608 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T08:57:22.569878Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T08:57:22.572435Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T08:57:22.572534Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569608&action=1" } } } 2024-11-21T08:57:22.572574Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.572590Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.585775Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.585931Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7439654058158277977:2292], Recipient [1:7439654053863310352:2193]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569608&action=1" } UserToken: "" } 2024-11-21T08:57:22.585938Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2024-11-21T08:57:22.596399Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7439654058158277977:2292] 2024-11-21T08:57:22.596465Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442569608&action=1" } } 2024-11-21T08:57:22.596522Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T08:57:22.596582Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T08:57:22.598711Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2024-11-21T08:57:22.598730Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.598744Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7439654058158277982:2193], Recipient [1:7439654053863310352:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.598748Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2024-11-21T08:57:22.598751Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.598753Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.598764Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2024-11-21T08:57:22.598768Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2024-11-21T08:57:22.598781Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2024-11-21T08:57:22.601173Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T08:57:22.601189Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.601191Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.601193Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.601209Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_ ... to 72057594037968897 2024-11-21T08:57:22.809478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2024-11-21T08:57:22.809478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.809479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.809483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:9 2024-11-21T08:57:22.809483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.809484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.809504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:6 2024-11-21T08:57:22.809504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.809505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.809509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:3 2024-11-21T08:57:22.809509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.809511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.809514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2024-11-21T08:57:22.809514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.809515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.809519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2024-11-21T08:57:22.809519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T08:57:22.809520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T08:57:22.810772Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2024-11-21T08:57:22.810791Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.810795Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.811806Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439654053863310234:2192], Recipient [1:7439654053863310352:2193]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T08:57:22.811808Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T08:57:22.811815Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.811817Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.811823Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2024-11-21T08:57:22.811828Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1732179442776995 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.813237Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:22.815188Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2024-11-21T08:57:22.815201Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2024-11-21T08:57:22.815204Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2024-11-21T08:57:22.815206Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2024-11-21T08:57:22.815208Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2024-11-21T08:57:22.815211Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2024-11-21T08:57:22.815213Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2024-11-21T08:57:22.815215Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2024-11-21T08:57:22.815217Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2024-11-21T08:57:22.815219Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2024-11-21T08:57:22.815519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:57:22.815524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T08:57:22.816848Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2024-11-21T08:57:22.816865Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.816874Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T08:57:22.816909Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T08:57:22.817036Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2024-11-21T08:57:22.817042Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2024-11-21T08:57:22.818408Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2024-11-21T08:57:22.818439Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7439654058158278765:2193], Recipient [1:7439654053863310352:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2024-11-21T08:57:22.818442Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2024-11-21T08:57:22.818447Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.818449Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.818459Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2024-11-21T08:57:22.818465Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2024-11-21T08:57:22.819887Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T08:57:22.819898Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T08:57:22.819900Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.819902Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T08:57:22.819915Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1732179442776995 2024-11-21T08:57:22.819919Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732179442776995 issue= 2024-11-21T08:57:22.819921Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1732179442776995 issue= 2024-11-21T08:57:22.819922Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2024-11-21T08:57:22.819940Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1732179442776995 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T08:57:22.823491Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2024-11-21T08:57:22.823538Z node 1 :CMS_TENANTS TRACE: Send /Root/users/user-1 notification to [1:7439654058158278593:2382]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732179442776995&action=2" ready: true status: SUCCESS } } 2024-11-21T08:57:22.823567Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T08:57:22.824412Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439654058158278784:2383], Recipient [1:7439654053863310352:2193]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2024-11-21T08:57:22.824415Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T08:57:22.824442Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2024-11-21T08:57:22.824904Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7439654058158278787:2384], Recipient [1:7439654053863310352:2193]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2024-11-21T08:57:22.824908Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2024-11-21T08:57:22.824958Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2024-11-21T08:57:22.828402Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T08:57:22.828454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T08:57:23.070544Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:23.070754Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439654061028741268:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:23.138222Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439654061028741268:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |91.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |91.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> KqpOlap::SelectLimit1ManyShards >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> KqpOlap::SimpleQueryOlapStats [GOOD] >> KqpScheme::CreateDropTableMultipleTime [GOOD] >> KqpScheme::CreateDropColumnTable >> KqpOlapStatistics::StatsUsageNotPK [GOOD] >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas |91.0%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlap::OlapRead_FailsOnDataQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T08:57:16.991021Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:17.013122Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:17.039855Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:17.039988Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:17.040133Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040140Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:17.092821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:17.092870Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:17.109120Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:17.109165Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.109177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.109184Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:229:2066] recipient: [1:20:2067] 2024-11-21T08:57:17.131133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:17.131174Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.142158Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.142200Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.142218Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.142225Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.142244Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.142252Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.142258Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.142265Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.152531Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.152582Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:17.152789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:17.152795Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:17.152812Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:17.158082Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T08:57:17.164689Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T08:57:17.164814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T08:57:18.936004Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:18.936118Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:18.936333Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:18.936407Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:18.936498Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:18.936503Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:18.936528Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:18.937147Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:18.937166Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:18.937183Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:18.937195Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:18.937205Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:18.937212Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:229:2066] recipient: [11:20:2067] 2024-11-21T08:57:18.947495Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:18.947528Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:18.957846Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:18.957907Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:18.957927Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:18.957941Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:18.957973Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:18.957982Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:18.957990Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:18.957999Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:18.968441Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:18.968490Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:18.968617Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:18.968622Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:18.968640Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:18.968726Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T08:57:18.968869Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T08:57:18.968921Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:204:2066] recipient: [21:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:204:2066] recipient: [21:183:2075] Leader for TabletID 72057594037932033 is [21:206:2077] sender: [21:207:2066] recipient: [21:183:2075] 2024-11-21T08:57:20.558933Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:20.559065Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:20.559244Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:20.559310Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:20.559386Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NAct ... 01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2024-11-21T08:57:20.592223Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2024-11-21T08:57:20.592298Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2024-11-21T08:57:20.592364Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2024-11-21T08:57:20.592428Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2024-11-21T08:57:20.592494Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2024-11-21T08:57:20.592570Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2024-11-21T08:57:20.592647Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2024-11-21T08:57:20.592714Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2024-11-21T08:57:20.592806Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2024-11-21T08:57:20.592868Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2024-11-21T08:57:20.592936Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2024-11-21T08:57:20.592999Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2024-11-21T08:57:20.593063Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2024-11-21T08:57:20.593131Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2024-11-21T08:57:20.593201Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2024-11-21T08:57:20.593266Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2024-11-21T08:57:20.593332Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:204:2066] recipient: [31:188:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:204:2066] recipient: [31:188:2075] Leader for TabletID 72057594037932033 is [31:206:2077] sender: [31:207:2066] recipient: [31:188:2075] 2024-11-21T08:57:22.581843Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:22.582010Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:22.582218Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:22.582289Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:22.582387Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:22.582392Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:22.582424Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:22.583254Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:22.583278Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:22.583306Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:22.583320Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:22.583330Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:22.583339Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:206:2077] sender: [31:229:2066] recipient: [31:20:2067] 2024-11-21T08:57:22.593724Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:22.593792Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:22.604154Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:22.604224Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:22.604246Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:22.604260Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:22.604288Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:22.604296Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:22.604302Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:22.604309Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:22.614669Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:22.614721Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:22.614867Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:22.614872Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:22.614892Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:22.614984Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2024-11-21T08:57:22.615169Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2024-11-21T08:57:22.615225Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2024-11-21T08:57:22.615303Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2024-11-21T08:57:22.615354Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2024-11-21T08:57:22.615423Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2024-11-21T08:57:22.615473Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2024-11-21T08:57:22.615534Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2024-11-21T08:57:22.615605Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2024-11-21T08:57:22.615677Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2024-11-21T08:57:22.615745Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2024-11-21T08:57:22.615822Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2024-11-21T08:57:22.615891Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2024-11-21T08:57:22.615961Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2024-11-21T08:57:22.616037Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2024-11-21T08:57:22.616116Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2024-11-21T08:57:22.616189Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2024-11-21T08:57:22.616292Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2024-11-21T08:57:22.616372Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2024-11-21T08:57:22.616445Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleQueryOlapStats [GOOD] Test command err: Trying to start YDB, gRPC: 25523, MsgBus: 22033 2024-11-21T08:57:23.664357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654059965734545:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:23.664428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a7/r3tmp/tmpgY5mrY/pdisk_1.dat 2024-11-21T08:57:23.730866Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25523, node 1 2024-11-21T08:57:23.750040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:23.750053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:23.750055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:23.750093Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:23.767337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:23.767367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:22033 2024-11-21T08:57:23.768598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:23.806071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:23.808920Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:23.813639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:23.828452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.828492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.828535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.828553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.828573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.828596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.828613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.828634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.828652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.828668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.828682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.828702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059965735059:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.831998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.832014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.832053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.832068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.832085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.832102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.832116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.832131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.832151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.832169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.832185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.832201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059965735060:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.832852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:23.832865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:23.832874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:23.832878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:23.832890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:23.832893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:23.832900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:23.832905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:23.832910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:23.832913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_ ... "DstStageId":1}],"WaitInputTimeUs":3520,"TaskId":62}],"CpuTimeUs":103,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":17,"NodeId":1,"OutputChannels":[{"ChannelId":39,"DstStageId":1}],"WaitInputTimeUs":3091,"TaskId":39}],"CpuTimeUs":89,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":16,"NodeId":1,"OutputChannels":[{"ChannelId":45,"DstStageId":1}],"WaitInputTimeUs":3545,"TaskId":45}],"CpuTimeUs":101,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":16,"NodeId":1,"OutputChannels":[{"ChannelId":53,"DstStageId":1}],"WaitInputTimeUs":2776,"TaskId":53}],"CpuTimeUs":104,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444180,"ComputeTimeUs":9,"NodeId":1,"OutputChannels":[{"ChannelId":56,"DstStageId":1}],"WaitInputTimeUs":2437,"TaskId":56}],"CpuTimeUs":112,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":14,"NodeId":1,"OutputChannels":[{"ChannelId":63,"DstStageId":1}],"WaitInputTimeUs":3440,"TaskId":63}],"CpuTimeUs":98,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444178,"ComputeTimeUs":9,"NodeId":1,"OutputChannels":[{"ChannelId":46,"DstStageId":1}],"WaitInputTimeUs":4037,"TaskId":46}],"CpuTimeUs":110,"DurationUs":6000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":34,"NodeId":1,"OutputChannels":[{"ChannelId":54,"DstStageId":1}],"WaitInputTimeUs":2675,"TaskId":54}],"CpuTimeUs":121,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444180,"ComputeTimeUs":11,"NodeId":1,"OutputChannels":[{"ChannelId":57,"DstStageId":1}],"WaitInputTimeUs":2344,"TaskId":57}],"CpuTimeUs":96,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":16,"NodeId":1,"OutputChannels":[{"ChannelId":64,"DstStageId":1}],"WaitInputTimeUs":3359,"TaskId":64}],"CpuTimeUs":100,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":16,"NodeId":1,"OutputChannels":[{"ChannelId":50,"DstStageId":1}],"WaitInputTimeUs":3098,"TaskId":50}],"CpuTimeUs":121,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444180,"ComputeTimeUs":16,"NodeId":1,"OutputChannels":[{"ChannelId":55,"DstStageId":1}],"WaitInputTimeUs":2594,"TaskId":55}],"CpuTimeUs":97,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":10,"NodeId":1,"OutputChannels":[{"ChannelId":58,"DstStageId":1}],"WaitInputTimeUs":2804,"TaskId":58}],"CpuTimeUs":88,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444178,"ComputeTimeUs":15,"NodeId":1,"OutputChannels":[{"ChannelId":48,"DstStageId":1}],"WaitInputTimeUs":3709,"TaskId":48}],"CpuTimeUs":115,"DurationUs":6000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444178,"ComputeTimeUs":14,"NodeId":1,"OutputChannels":[{"ChannelId":49,"DstStageId":1}],"WaitInputTimeUs":3620,"TaskId":49}],"CpuTimeUs":105,"DurationUs":6000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":14,"NodeId":1,"OutputChannels":[{"ChannelId":59,"DstStageId":1}],"WaitInputTimeUs":2711,"TaskId":59}],"CpuTimeUs":112,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":14,"NodeId":1,"OutputChannels":[{"ChannelId":34,"DstStageId":1}],"WaitInputTimeUs":3545,"TaskId":34}],"CpuTimeUs":92,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":9,"NodeId":1,"OutputChannels":[{"ChannelId":60,"DstStageId":1}],"WaitInputTimeUs":2627,"TaskId":60}],"CpuTimeUs":102,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732179444184,"Host":"ghrun-qcxhsi27zq","StartTimeMs":1732179444179,"ComputeTimeUs":11,"NodeId":1,"OutputChannels":[{"ChannelId":51,"DstStageId":1}],"WaitInputTimeUs":2945,"TaskId":51}],"CpuTimeUs":108,"DurationUs":5000}],"UseLlvm":false,"DurationUs":{"Count":64,"Max":7000,"Sum":338000,"Min":4000},"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6}},"Name":"3","Push":{"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResumeMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"PauseMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"WaitTimeUs":{"Count":64,"Max":4319,"Sum":178988,"Min":1752},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1}}}],"MaxMemoryUsage":{"Count":64,"Max":1048576,"Sum":67108864,"Min":1048576},"Tasks":64,"OutputRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":8000,"BaseTimeMs":1732179444176,"NodesScanShards":[{"node_id":1,"shards_count":3}],"WaitInputTimeUs":{"Count":64,"Max":4468,"Sum":187828,"Min":1848},"CpuTimeUs":{"Count":64,"Max":201,"Sum":4506,"Min":36},"OutputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16}}}],"Node Type":"Merge","SortColumns":["resource_id (Asc)","timestamp (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"PeakMemoryUsageBytes":65536,"Tasks":[{"InputBytes":16,"FinishTimeMs":1732179444185,"Host":"ghrun-qcxhsi27zq","ResultRows":2,"ResultBytes":16,"OutputRows":2,"StartTimeMs":1732179444182,"InputRows":2,"InputChannels":[{"ChannelId":1,"WaitTimeUs":3364,"Rows":2,"Bytes":16,"SrcStageId":0},{"ChannelId":2,"SrcStageId":0},{"ChannelId":3,"SrcStageId":0},{"ChannelId":4,"SrcStageId":0},{"ChannelId":5,"SrcStageId":0},{"ChannelId":6,"SrcStageId":0},{"ChannelId":7,"SrcStageId":0},{"ChannelId":8,"SrcStageId":0},{"ChannelId":9,"SrcStageId":0},{"ChannelId":10,"SrcStageId":0},{"ChannelId":11,"SrcStageId":0},{"ChannelId":12,"SrcStageId":0},{"ChannelId":13,"SrcStageId":0},{"ChannelId":14,"SrcStageId":0},{"ChannelId":15,"SrcStageId":0},{"ChannelId":16,"SrcStageId":0},{"ChannelId":17,"SrcStageId":0},{"ChannelId":18,"SrcStageId":0},{"ChannelId":19,"SrcStageId":0},{"ChannelId":20,"SrcStageId":0},{"ChannelId":21,"SrcStageId":0},{"ChannelId":22,"SrcStageId":0},{"ChannelId":23,"SrcStageId":0},{"ChannelId":24,"SrcStageId":0},{"ChannelId":25,"SrcStageId":0},{"ChannelId":26,"SrcStageId":0},{"ChannelId":27,"SrcStageId":0},{"ChannelId":28,"SrcStageId":0},{"ChannelId":29,"SrcStageId":0},{"ChannelId":30,"SrcStageId":0},{"ChannelId":31,"SrcStageId":0},{"ChannelId":32,"SrcStageId":0},{"ChannelId":33,"SrcStageId":0},{"ChannelId":34,"SrcStageId":0},{"ChannelId":35,"SrcStageId":0},{"ChannelId":36,"SrcStageId":0},{"ChannelId":37,"SrcStageId":0},{"ChannelId":38,"SrcStageId":0},{"ChannelId":39,"SrcStageId":0},{"ChannelId":40,"SrcStageId":0},{"ChannelId":41,"SrcStageId":0},{"ChannelId":42,"SrcStageId":0},{"ChannelId":43,"SrcStageId":0},{"ChannelId":44,"SrcStageId":0},{"ChannelId":45,"SrcStageId":0},{"ChannelId":46,"SrcStageId":0},{"ChannelId":47,"SrcStageId":0},{"ChannelId":48,"SrcStageId":0},{"ChannelId":49,"SrcStageId":0},{"ChannelId":50,"SrcStageId":0},{"ChannelId":51,"SrcStageId":0},{"ChannelId":52,"SrcStageId":0},{"ChannelId":53,"SrcStageId":0},{"ChannelId":54,"SrcStageId":0},{"ChannelId":55,"SrcStageId":0},{"ChannelId":56,"SrcStageId":0},{"ChannelId":57,"SrcStageId":0},{"ChannelId":58,"SrcStageId":0},{"ChannelId":59,"SrcStageId":0},{"ChannelId":60,"SrcStageId":0},{"ChannelId":61,"SrcStageId":0},{"ChannelId":62,"SrcStageId":0},{"ChannelId":63,"SrcStageId":0},{"ChannelId":64,"SrcStageId":0}],"ComputeTimeUs":84,"NodeId":1,"OutputChannels":[{"ChannelId":65,"DstStageId":0,"Rows":2,"Bytes":16}],"WaitInputTimeUs":3338,"TaskId":65,"OutputBytes":16}],"CpuTimeUs":367,"DurationUs":3000}],"UseLlvm":false,"OutputRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"PhysicalStageId":1,"InputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"DurationUs":{"Count":1,"Max":3000,"Sum":3000,"Min":3000},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"Min":1048576},"BaseTimeMs":1732179444176,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResumeMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"PauseMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"WaitTimeUs":{"Count":1,"Max":3402,"Sum":3402,"Min":3402},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":6,"Min":3}}}],"CpuTimeUs":{"Count":1,"Max":136,"Sum":136,"Min":136},"StageDurationUs":3000,"WaitInputTimeUs":{"Count":1,"Max":3338,"Sum":3338,"Min":3338},"ResultRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResultBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"OutputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6}},"Name":"1","Push":{"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"PauseMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"WaitTimeUs":{"Count":1,"Max":3364,"Sum":3364,"Min":3364},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":6,"Min":3}}}],"Tasks":1,"InputRows":{"Count":1,"Max":2,"Sum":2,"Min":2}}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":32889,"ProcessCpuTimeUs":92,"Compilation":{"FromCache":false,"CpuTimeUs":18266,"DurationUs":18856}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"A-Rows":2,"A-Cpu":0.201,"SortBy":"","Name":"Sort"}],"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["resource_id","timestamp"],"E-Cost":"No estimate","SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":2},{"Id":1}]}}]}}],"Node Type":"TableFullScan"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} >> KqpOlapAggregations::Aggregation_Some_NullMixGroupBy [GOOD] >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix >> KqpOlapAggregations::Aggregation_Avg >> KqpOlap::ScanFailedSnapshotTooOld >> KqpOlapAggregations::Blocks_NoAggPushdown [GOOD] >> KqpOlap::BlockChannelScalar >> KqpOlapAggregations::Aggregation_Count_NullGroupBy [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStatistics::StatsUsageNotPK [GOOD] Test command err: Trying to start YDB, gRPC: 7797, MsgBus: 12675 2024-11-21T08:57:23.858619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654058977236825:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:23.858920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a5/r3tmp/tmpfmIaxy/pdisk_1.dat 2024-11-21T08:57:23.925626Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7797, node 1 2024-11-21T08:57:23.942335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:23.942353Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:23.942355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:23.942398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12675 2024-11-21T08:57:23.958932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:23.958961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:23.960049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:23.986920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:23.989653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:23.997712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:24.008154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:24.008189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:24.008244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:24.008263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:24.008279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:24.008291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:24.008305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:24.008329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:24.008344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:24.008362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:24.008376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:24.008392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654063272204760:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:24.010834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:24.010864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:24.010899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:24.010920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:24.010939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:24.010961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:24.010989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:24.011004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:24.011020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:24.011035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:24.011056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:24.011075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654063272204761:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:24.011611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:24.011635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:24.011653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:24.011664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:24.011679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:24.011683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:24.011692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:24.011702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:24.011711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:24.011721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_sw ... 186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:24.017696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:24.017704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:24.017712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:24.017720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:24.017729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:24.017737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:24.017742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:24.017750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:24.017789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:24.017798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:24.017814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:24.017822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:24.017833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:24.017842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:24.017859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:24.017867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:24.017876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:24.017884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:24.018039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:24.018051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:24.018059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:24.018062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:24.018075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:24.018078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:24.018085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:24.018089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:24.018095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:24.018103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:24.018107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:24.018110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:24.018134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:24.018143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:24.018155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:24.018162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:24.018171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:24.018179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:24.018192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:24.018201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:24.018211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:24.018218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:24.056735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:24.214135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654063272205054:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.214178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.221868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654063272205088:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.221893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.223939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T08:57:24.232365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654063272205135:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.232432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.233572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:24.253469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654063272205181:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.253499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> KqpScheme::CreateDropColumnTable [GOOD] >> KqpScheme::CreateDropColumnTableNegative >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable >> KqpOlapAggregations::Aggregation_MinL >> KqpOlapAggregations::Aggregation_Avg_NullMix >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_1 >> KqpOlap::CheckEarlyFilterOnEmptySelect >> KqpOlapAggregations::Aggregation_Some_GroupByNull [GOOD] >> KqpOlap::OlapRead_FailsOnDataQuery [GOOD] >> KqpScheme::CreateDropColumnTableNegative [GOOD] >> KqpScheme::CreateBackupCollectionDisabledByDefault >> KqpScheme::AddChangefeed [GOOD] >> KqpScheme::AddChangefeedWhenDisabled ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 18238, MsgBus: 18442 2024-11-21T08:57:23.462000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654060275054409:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:23.462366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ab/r3tmp/tmpoWVXy9/pdisk_1.dat 2024-11-21T08:57:23.510789Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18238, node 1 2024-11-21T08:57:23.524054Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:23.524068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:23.524070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:23.524106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18442 2024-11-21T08:57:23.564518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:23.564543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:23.565431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:57:23.590828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:23.592992Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:23.597054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:23.609163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.609235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.609276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.609297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.609319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.609340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.609358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.609377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.609398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.609422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.609444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.609464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060275055047:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.613373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.613399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.613443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.613468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.613506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.613527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.613552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.613574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.613604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.613627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.613650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.613675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060275055046:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.617439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060275055045:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.617468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060275055045:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.617523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060275055045:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.617546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060275055045:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.617577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060275055045:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.617600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060275055045:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.617621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060275055045:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.617643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060275055045:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:23.623847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:23.623855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:23.623859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:23.623873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:23.623876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:23.623884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:23.623888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:23.623895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:23.623898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:23.623903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:23.623906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:23.623936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:23.623940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:23.623954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:23.623957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.623967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:23.623970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:23.623984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:23.623988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:23.623996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:23.623999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2024-11-21T08:57:23.819995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654060275055334:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.820013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.820046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654060275055346:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.820605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:23.822233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:57:23.822276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654060275055348:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:24.599289Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179444002, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '829) '('"_id" '"6351c14a-85e56d7e-8eeacb0e-2c85ef8d") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $23 (Int32 '0)) (let $24 '((Nothing $2) $23)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $23) $24))) (RangeCreate (AsList '($24 '((Just (Int32 '"7")) $23)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $1)) (let $11 '('('"_logical_id" '888) '('"_id" '"fcc39869-36103475-56320517-8677bd61") '('"_wide_channels" (StructType '('_yql_agg_0 $2) $10)))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id")) (let $27 '('('"UsedKeyColumns" $26) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $28 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $27 (lambda '($29) (TKqpOlapAgg $29 '('('_yql_agg_0 'some '"level")) $26)))) (return (FromFlow $28)) ))) $11)) (let $13 (DqCnHashShuffle (TDqOutput $12 '0) '('1))) (let $14 (StructType '('"column1" $2) $10)) (let $15 '('('"_logical_id" '1311) '('"_id" '"3fb37b56-5508ee07-d9a6eac2-780d3bc4") '('"_wide_channels" $14))) (let $16 (DqPhyStage '($13) (lambda '($30) (block '( (let $31 (lambda '($42 $43) $43 $42)) (let $32 (WideCombiner (ToFlow $30) '"" (lambda '($33 $34) $34) (lambda '($35 $36 $37) $36) (lambda '($38 $39 $40 $41) (Coalesce $41 $39)) $31)) (return (FromFlow (WideSort $32 '('('1 (Bool 'true)))))) ))) $15)) (let $17 (DqCnMerge (TDqOutput $16 '0) '('('1 '"Asc")))) (let $18 (DqPhyStage '($17) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46) (AsStruct '('"column1" $45) '('"id" $46)))))) '('('"_logical_id" '1323) '('"_id" '"2eb6a0f2-c1930846-c0b7a3f1-712d8ca8")))) (let $19 '($12 $16 $18)) (let $20 (DqCnResult (TDqOutput $18 '0) '('"id" '"column1"))) (let $21 (KqpTxResultBinding $9 '0 '0)) (let $22 (KqpPhysicalTx $19 '($20) '('($7 $21)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $22) '((KqpTxResultBinding (ListType $14) '1 '0)) '('('"type" '"scan_query")))) ) |91.0%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 5377, MsgBus: 9525 2024-11-21T08:57:23.486292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654059066466248:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:23.486512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a8/r3tmp/tmp5cdb2v/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5377, node 1 2024-11-21T08:57:23.562064Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:23.562077Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:23.562079Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:23.562122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:23.562618Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9525 2024-11-21T08:57:23.587475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:23.587499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:23.589298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:23.621976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:23.624920Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:23.636934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:23.653029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.653098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.653137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.653180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.653197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.653213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.653229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.653247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.653261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.653274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.653291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.653310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059066466892:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.660592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.660620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.660666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.660685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.660702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.660718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.660737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.660760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.660778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.660798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.660820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.660835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059066466893:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.664623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654059066466894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.664641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654059066466894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.664684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654059066466894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.664705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654059066466894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.664722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654059066466894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.664738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654059066466894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.664753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654059066466894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.664775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654059066466894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... ss=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:23.670610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:23.670619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:23.670628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:23.670633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:23.670647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:23.670652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:23.670661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:23.670665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:23.670673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:23.670677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:23.670682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:23.670686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:23.670716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:23.670721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:23.670736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:23.670740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.670749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:23.670752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:23.670767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:23.670770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:23.670781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:23.670784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2024-11-21T08:57:23.829076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654059066467184:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.829104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.829182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654059066467196:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.829921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:23.831550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654059066467198:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:24.675539Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179444000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '836) '('"_id" '"c97f760a-b22aa8ad-99069298-d2e09cf1") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Uint64)) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '895) '('"_id" '"21df0f06-9e4569be-10434d0f-8004e71") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'count '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1318) '('"_id" '"cbb3739a-8167c6d5-b1c29c18-2a09ab2f") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1330) '('"_id" '"51731a80-18bcee85-45c5aee3-bc9cfb98")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Blocks_NoAggPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 11852, MsgBus: 25827 2024-11-21T08:57:22.782639Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654054974476696:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b0/r3tmp/tmpeifirK/pdisk_1.dat 2024-11-21T08:57:22.843506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:22.875866Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11852, node 1 2024-11-21T08:57:22.920379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.920392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.920394Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.920431Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:22.936445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:22.936468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:22.940506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25827 TClient is connected to server localhost:25827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.988450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:22.995369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:23.010423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.010495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.010545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.010573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.010595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.010615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.010629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.010652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.010666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.010688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.010703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.010718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059269444489:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.011179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:23.011185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:23.011193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:23.011196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:23.011208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:23.011211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:23.011217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:23.011223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:23.011230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:23.011233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:23.011237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:23.011239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:23.011279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:23.011283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:23.011293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:23.011296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.011304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:23.011307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:23.011321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:23.011325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:23.011335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:23.011338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:23.014071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059269444490:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.014081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059269444490:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.014105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059269444490:2289];tablet_id=7207518622 ... s=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:23.022041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:23.022045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:23.022059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:23.022062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:23.022071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:23.022075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:23.022083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:23.022086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:23.022092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:23.022095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:23.022126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:23.022131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:23.022145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:23.022149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.022159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:23.022163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:23.022176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:23.022179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:23.022189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:23.022192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls`; 2024-11-21T08:57:23.145411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654059269444768:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.145428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654059269444787:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.145436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:23.146031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:23.147712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654059269444797:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:24.769549Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179443197, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":6,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"Aggregate-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (lambda '($26) $26)) (let $2 '('('"_logical_id" '601) '('"_id" '"ae3fd07f-a5873212-8189f3d2-75dd0486") '('"_wide_channels" (StructType '('"id" (DataType 'Int32)))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 (KqpBlockReadOlapTableRanges $23 (Void) '('"id") '() '() (lambda '($25) $25))) (return (FromFlow (WideCombiner (WideFromBlocks $24) '-1073741824 $1 (lambda '($27 $28) $27) (lambda '($29 $30 $31) $31) (lambda '($32 $33) $33)))) ))) $2)) (let $4 (DqCnHashShuffle (TDqOutput $3 '0) '('0))) (let $5 (Uint64 '1)) (let $6 (Bool 'false)) (let $7 (DataType 'Uint64)) (let $8 '('('"_logical_id" '1309) '('"_id" '"579fbb2b-c41a5d67-b6c5eed9-30c4ac54") '('"_wide_channels" (StructType '('_yql_agg_0 (OptionalType $7)))))) (let $9 (DqPhyStage '($4) (lambda '($34) (block '( (let $35 (lambda '($38 $39))) (let $36 (WideCombiner (ToFlow $34) '"" $1 $35 $35 $1)) (let $37 (Condense1 (NarrowMap $36 (lambda '($40) (AsStruct '('"id" $40)))) (lambda '($41) $5) (lambda '($42 $43) $6) (lambda '($44 $45) (Inc $45)))) (return (FromFlow (ExpandMap $37 (lambda '($46) (Just $46))))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '0))) (let $11 (DqPhyStage '($10) (lambda '($47) (block '( (let $48 (Condense (NarrowMap (WideFromBlocks (WideTakeBlocks (WideToBlocks (WideCondense1 (ToFlow $47) (lambda '($49) $49) (lambda '($50 $51) $6) (lambda '($52 $53) (IfPresent $52 (lambda '($54) (IfPresent $53 (lambda '($55) (Just (AggrAdd $54 $55))) $52)) $53)))) $5)) (lambda '($56) (AsStruct '('Count0 (Unwrap $56))))) (Nothing (OptionalType (StructType '('Count0 $7)))) (lambda '($57 $58) $6) (lambda '($59 $60) (Just $59)))) (return (FromFlow (Map $48 (lambda '($61) (AsList (AsStruct '('"column0" (Coalesce (Member $61 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1908) '('"_id" '"a48d174b-630a07c-69f96389-647f8079")))) (let $12 '($3 $9 $11)) (let $13 (DqCnValue (TDqOutput $11 '0))) (let $14 '('('"type" '"scan"))) (let $15 (KqpPhysicalTx $12 '($13) '() $14)) (let $16 '"%kqp%tx_result_binding_0_0") (let $17 (ListType (StructType '('"column0" $7)))) (let $18 '('('"_logical_id" '2011) '('"_id" '"f72d1f06-dd7da602-5f4da23-1fbba0fd") '('"_partition_mode" '"single"))) (let $19 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $18)) (let $20 (DqCnResult (TDqOutput $19 '0) '('"column0"))) (let $21 (KqpTxResultBinding $17 '0 '0)) (let $22 (KqpPhysicalTx '($19) '($20) '('($16 $21)) $14)) (return (KqpPhysicalQuery '($15 $22) '((KqpTxResultBinding $17 '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:229:2060] recipient: [1:212:2140] 2024-11-21T08:56:03.840957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:03.840978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:03.840981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:03.840984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:03.840988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:03.840990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:03.840997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:03.841076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:03.850346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:03.850366Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:03.852764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:03.852811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:03.852836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:03.854523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:03.854580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:03.854679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:03.854737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:03.855329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:03.855620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:03.855632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:03.855666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:03.855672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:03.855678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:03.855696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:56:03.856964Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:339:2060] recipient: [1:17:2064] 2024-11-21T08:56:03.871860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:03.871957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:03.872036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:03.872085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:03.872093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:03.872933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:03.872959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:03.873014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:03.873021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:03.873024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:03.873028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:03.873351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:03.873358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:03.873361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:03.873632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:03.873640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:03.873646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:03.873653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:03.874153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:03.874489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:03.874540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:03.874700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:03.874719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:03.874727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:03.874769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:03.874774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:03.874799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:03.874807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:03.875100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:03.875111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:03.875150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:03.875155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:306:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:03.875252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:03.875258Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:03.875270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:03.875275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:03.875280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:03.875286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:03.875291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:03.875295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:03.875305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:03.875310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:03.875314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:56:03.875545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:03.875556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:56:03.875560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:56:03.875563Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:56:03.875566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:03.875577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rocessing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:22.229261Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:22.229266Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:22.486029Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:22.486070Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:22.486091Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:22.486096Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:22.748406Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:22.748450Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:22.748470Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:22.748475Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:23.000433Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:23.000478Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:23.000503Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:23.000508Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:23.260821Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:23.260848Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:23.261023Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:23.261030Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:23.496528Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:23.496552Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:23.496564Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:23.496568Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:23.746365Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:23.746391Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:23.746403Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:23.746407Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:24.007739Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:24.007780Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:24.007798Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:24.007802Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:24.244083Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:24.244129Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:24.244147Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:24.244151Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:24.512345Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:24.512377Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T08:57:24.512395Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:24.512399Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T08:57:24.555188Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1059:2824], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T08:57:24.555216Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:57:24.555243Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:24.555301Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 45us result status StatusPathDoesNotExist 2024-11-21T08:57:24.555343Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:57:24.555459Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1060:2825], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T08:57:24.555466Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:57:24.555476Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:24.555493Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 15us result status StatusPathDoesNotExist 2024-11-21T08:57:24.555513Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:57:24.555562Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1061:2826], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2024-11-21T08:57:24.555569Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:57:24.555581Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:24.555611Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 26us result status StatusPathDoesNotExist 2024-11-21T08:57:24.555632Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_GroupByNull [GOOD] Test command err: Trying to start YDB, gRPC: 20891, MsgBus: 26817 2024-11-21T08:57:23.723513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654060553238406:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:23.724236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a6/r3tmp/tmp9zrkxa/pdisk_1.dat 2024-11-21T08:57:23.796894Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20891, node 1 2024-11-21T08:57:23.815448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:23.815459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:23.815462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:23.815496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:23.824420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:23.824450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:23.825159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26817 TClient is connected to server localhost:26817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:23.880519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:23.883180Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:23.888938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:23.900814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.900883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.900921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.900940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.900956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.900973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.900990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.901011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.901034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.901052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.901073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.901089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654060553238911:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.908774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.908801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.908879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.908905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.908931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.908955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.908971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.908987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.909004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.909017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.909028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.909042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654060553238914:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.912163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060553238915:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.912178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060553238915:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.912332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060553238915:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.912355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060553238915:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.912371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060553238915:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.912388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060553238915:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.912403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060553238915:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.912418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654060553238915:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:23.916113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:23.916116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:23.916129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:23.916132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:23.916138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:23.916142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:23.916149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:23.916153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:23.916158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:23.916161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:23.916185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:23.916190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:23.916447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:23.916462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.916476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:23.916485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:23.916501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:23.916509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:23.916519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:23.916526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id = 6 GROUP BY level ORDER BY level; 2024-11-21T08:57:24.035430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654064848206508:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.035458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654064848206500:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.035477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:24.036278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:24.038252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654064848206514:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:25.024266Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179444093, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id = 6 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1274) '('"_id" '"ef11ff65-ccb473ba-9c65803e-4ad3b8e") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $25 (Int32 '"6")) (let $26 (Just $25)) (let $27 (Int32 '1)) (let $28 '($26 $27)) (let $29 (If (== $25 (Int32 '2147483647)) $28 '((+ $26 $27) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($28 $29)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"level" $7)) (let $11 (StructType '('_yql_agg_0 $6) '('_yql_agg_1 $7) $10)) (let $12 '('('"_logical_id" '1333) '('"_id" '"db33f404-2a00f5e9-8a28443f-854431a9") '('"_wide_channels" $11))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $30 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $31 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $32 (KqpWideReadOlapTableRanges $30 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $31 (lambda '($33) (block '( (let $34 '('_yql_agg_0 'some '"id")) (let $35 '('_yql_agg_1 'some '"level")) (return (TKqpOlapAgg $33 '($34 $35) '('"level"))) ))))) (return (FromFlow $32)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('2))) (let $15 (StructType '('"column1" $6) '('"column2" $7) $10)) (let $16 '('('"_logical_id" '1871) '('"_id" '"a5cbcccd-429a1cec-a0459623-67e19361") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($36) (block '( (let $37 (lambda '($44 $45 $46 $47) $45 $46)) (let $38 (lambda '($48 $49 $50 $51 $52 $53) $52 (Coalesce $53 $50))) (let $39 (lambda '($54 $55 $56) $55 $56 $54)) (let $40 (WideCombiner (ToFlow $36) '"" (lambda '($41 $42 $43) $43) $37 $38 $39)) (return (FromFlow (WideSort $40 '('('2 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('2 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($57) (FromFlow (NarrowMap (ToFlow $57) (lambda '($58 $59 $60) (AsStruct '('"column1" $58) '('"column2" $59) '('"level" $60)))))) '('('"_logical_id" '1883) '('"_id" '"2c353ac9-12663429-9228a2c5-e77c0c68")))) (let $20 '($13 $17 $19)) (let $21 '('"level" '"column1" '"column2")) (let $22 (DqCnResult (TDqOutput $19 '0) $21)) (let $23 (KqpTxResultBinding $9 '0 '0)) (let $24 (KqpPhysicalTx $20 '($22) '('($5 $23)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $24) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::CountWhereColumnIsNull ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_FailsOnDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 64153, MsgBus: 20059 2024-11-21T08:57:24.905746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654065886567042:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:24.905809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00489e/r3tmp/tmpK6nmKK/pdisk_1.dat 2024-11-21T08:57:24.964452Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64153, node 1 2024-11-21T08:57:24.974253Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:24.974287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:24.974289Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:24.974332Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20059 TClient is connected to server localhost:20059 2024-11-21T08:57:25.006287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.006319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:25.007314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.021198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.027621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.036103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.036159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.036242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.036281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.036299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.036323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.036348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.036380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.036409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.036438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.036461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.036487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654070181534829:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.040022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.040059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.040105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.040132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.040161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.040186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.040230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.040259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.040294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.040312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.040330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.040349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654070181534827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.040874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.040887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.040901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.040904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.040918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.040922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.040932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.040939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.040948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.040951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.040957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... 7892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.111554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.111573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.111582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.111591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.111601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.111610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.111619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.111626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.111635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.111673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.111683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.111701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.111711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.111722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.111733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.111750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.111759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.111769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.111778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:25.111869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.111882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.111890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.111894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.111918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.111927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.111935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.111944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.111952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.111961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.111967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.111976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.112009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.112020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.112036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.112045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.112056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.112065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.112081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.112090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.112101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.112110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:25.147353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:25.177530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654070181535353:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.177559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654070181535364:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.177568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.178408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:25.180677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654070181535367:2415], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T08:57:25.318191Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654070181535449:2409] TxId: 281474976710663. Ctx: { TraceId: 01jd6z1hds3kdvdqf074f33mj0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMyNDcyMjMtMjRlMmE3MzAtMTE1YmRlY2ItYjNiN2Q3MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2024-11-21T08:57:25.320809Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGMyNDcyMjMtMjRlMmE3MzAtMTE1YmRlY2ItYjNiN2Q3MWM=, ActorId: [1:7439654070181535350:2409], ActorState: ExecuteState, TraceId: 01jd6z1hds3kdvdqf074f33mj0, Create QueryResponse for error on request, msg: >> KqpOlapAggregations::CountAllPushdown+UseLlvm >> KqpOlapAggregations::Aggregation_Avg_NullMix [GOOD] >> KqpOlap::BlockChannelScalar [GOOD] >> KqpOlapWrite::TierDraftsGC ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable [GOOD] Test command err: Trying to start YDB, gRPC: 21099, MsgBus: 23847 2024-11-21T08:57:25.107625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654068497545917:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.107759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004897/r3tmp/tmp4RJLo7/pdisk_1.dat 2024-11-21T08:57:25.163294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21099, node 1 2024-11-21T08:57:25.175907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.175919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.175920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.175946Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23847 2024-11-21T08:57:25.209065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.209097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.210196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.240607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.243946Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:25.249777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.258797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.258855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.258882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.258894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.258910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.258920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.258932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.258960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.258972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.258984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.258999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.259010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654068497546564:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.263509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.263534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.263570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.263587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.263604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.263620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.263635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.263655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.263677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.263694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.263710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.263725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068497546562:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.265785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654068497546563:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.265795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654068497546563:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.265815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654068497546563:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.265825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654068497546563:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.265838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654068497546563:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.265847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654068497546563:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.265855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654068497546563:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.265864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654068497546563:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... =TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.337135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.337145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.337148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.337159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.337162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.337170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.337172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:25.339765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.339787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.339816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.339835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.339847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.339860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.339873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.339886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.339900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.339914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.339931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.339944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654068497546815:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.340814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.340826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.340835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.340839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.340860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.340863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.340870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.340874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.340881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.340884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.340889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.340892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.340928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.340933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.340947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.340950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.340960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.340964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.340978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.340982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.340994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.340999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 2024-11-21T08:57:25.382403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:25.414249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654068497547077:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.414271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.414379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654068497547089:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.414985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:25.416641Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T08:57:25.416697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654068497547091:2414], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } |91.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |91.0%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullMix [GOOD] Test command err: Trying to start YDB, gRPC: 11436, MsgBus: 16081 2024-11-21T08:57:25.174127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654067527197215:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.174197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004894/r3tmp/tmpzCz7uK/pdisk_1.dat 2024-11-21T08:57:25.235237Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11436, node 1 2024-11-21T08:57:25.249667Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.249684Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.249686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.249729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16081 2024-11-21T08:57:25.274628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.274653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.275960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.311915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.314620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:25.317325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.326826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.326880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.326925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.326947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.326975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.326995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.327013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.327036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.327058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.327080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.327099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.327120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654067527197712:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.330269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.330295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.330331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.330346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.330359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.330373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.330390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.330404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.330418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.330433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.330448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.330464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067527197710:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.333718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067527197711:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.333741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067527197711:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.333772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067527197711:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.333789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067527197711:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.333809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067527197711:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.333823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067527197711:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.333843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067527197711:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.333868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067527197711:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... xecute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.337452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:25.337554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.337558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.337565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.337568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.337581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.337585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.337593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.337597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.337607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.337611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.337616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.337619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.337644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.337648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.337665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.337669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.337679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.337682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.337695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.337699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.337709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.337712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AVG(level) FROM `/Root/tableWithNulls`; 2024-11-21T08:57:25.484015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654067527198000:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.484051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.484339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654067527198019:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.485038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:25.486803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654067527198021:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:25.771203Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179445542, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AVG(level) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Double)))))) (let $1 (DataType 'Double)) (let $2 (OptionalType (TupleType $1 (DataType 'Uint64)))) (let $3 '('('"_logical_id" '1423) '('"_id" '"d464f723-65ed5f3a-d15d1892-714f8fae") '('"_wide_channels" (StructType '('_yql_agg_0 $2))))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $18 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $19 (KqpWideReadOlapTableRanges $18 (Void) '('"level") '() '() (lambda '($20) (block '( (let $21 '('"_yql_agg_0_sum" '"sum" '"level")) (let $22 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $20 '($21 $22) '())) ))))) (return (FromFlow (WideMap $19 (lambda '($23 $24) (IfPresent $24 (lambda '($25) (Just '((Convert $25 'Double) $23))) (Nothing $2)))))) ))) $3)) (let $5 (DqCnUnionAll (TDqOutput $4 '0))) (let $6 (OptionalType $1)) (let $7 (DqPhyStage '($5) (lambda '($26) (block '( (let $27 (Bool 'false)) (let $28 (WideCondense1 (ToFlow $26) (lambda '($30) $30) (lambda '($31 $32) $27) (lambda '($33 $34) (IfPresent $33 (lambda '($35) (IfPresent $34 (lambda '($36) (Just '((AggrAdd (Nth $35 '0) (Nth $36 '0)) (AggrAdd (Nth $35 '1) (Nth $36 '1))))) $33)) $34)))) (let $29 (Condense (NarrowMap (Take $28 (Uint64 '1)) (lambda '($37) (block '( (let $38 (IfPresent $37 (lambda '($39) (Just (Div (Nth $39 '0) (Nth $39 '1)))) (Nothing $6))) (return (AsStruct '('Avg0 $38))) )))) (Nothing (OptionalType (StructType '('Avg0 $6)))) (lambda '($40 $41) $27) (lambda '($42 $43) (Just $42)))) (return (FromFlow (Map $29 (lambda '($44) (AsList (AsStruct '('"column0" (Member $44 'Avg0)))))))) ))) '('('"_logical_id" '3080) '('"_id" '"970bbdb8-dc111529-d3291658-cff9b82c")))) (let $8 (DqCnValue (TDqOutput $7 '0))) (let $9 '('('"type" '"scan"))) (let $10 (KqpPhysicalTx '($4 $7) '($8) '() $9)) (let $11 '"%kqp%tx_result_binding_0_0") (let $12 (ListType (StructType '('"column0" $6)))) (let $13 '('('"_logical_id" '3176) '('"_id" '"cf084f46-a1878c42-5303fe-436e7f9b") '('"_partition_mode" '"single"))) (let $14 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $13)) (let $15 (DqCnResult (TDqOutput $14 '0) '('"column0"))) (let $16 (KqpTxResultBinding $12 '0 '0)) (let $17 (KqpPhysicalTx '($14) '($15) '('($11 $16)) $9)) (return (KqpPhysicalQuery '($10 $17) '((KqpTxResultBinding $12 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpScheme::CreateBackupCollectionDisabledByDefault [GOOD] >> KqpScheme::CreateBackupCollection >> KqpScheme::AddChangefeedWhenDisabled [GOOD] >> KqpScheme::AddChangefeedNegative >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BlockChannelScalar [GOOD] Test command err: Trying to start YDB, gRPC: 16333, MsgBus: 3241 2024-11-21T08:57:25.018986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654068701833659:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.019002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00489d/r3tmp/tmpTsLZRw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16333, node 1 2024-11-21T08:57:25.078274Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:25.078606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.078620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.078622Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.078661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3241 TClient is connected to server localhost:3241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:25.120363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.120391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.121460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.147480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.152045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.169135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.228185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.239668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.318989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654068701835205:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.319044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.326785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.335819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.357086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.412387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.424523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.431284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.440486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654068701835720:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.440517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.440521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654068701835725:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.441128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:25.444548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654068701835727:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:25.582580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.595648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.595648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654068701836077:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.595695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654068701836077:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.595730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.595818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654068701836077:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.595821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.595861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.595863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654068701836077:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.595884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.595884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654068701836077:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.595903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654068701836077:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.595906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.595924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.595926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654068701836077:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.595957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654068701836077:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.595960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.595981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439654068701836081:2462];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-2 ... 19;self_id=[1:7439654068701836116:2463];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.600117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654068701836076:2460];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.600135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654068701836076:2460];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.600144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654068701836116:2463];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.600152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654068701836076:2460];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.600177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654068701836076:2460];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.600609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.600625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.600625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.600632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.600636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.600641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.600644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.600649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.600655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.600665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.600666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.600670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.600675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.600679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.600680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.600684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.600688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.600691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.600692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.600696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.600697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.600701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.600703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.600707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.600748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.600755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.600756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.600760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.600773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.600774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.600777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.600778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.600788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.600789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.600791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.600793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.600809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.600809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.600812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.600814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.600822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.600825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.600826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:25.600829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:25.655796Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654068701836320:2535] TxId: 281474976715672. Ctx: { TraceId: 01jd6z1hwd54bq9nc8ka87n1b8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0NDAxZi1kNzE0NjhmYy02NTc0YzJmNi00MDViM2U3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:25.748295Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654068701836417:2555] TxId: 281474976715674. Ctx: { TraceId: 01jd6z1hya47prka8hcfeq21yn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNTdjMWEtZDBmYWVmYzItNmQzODFmODgtMTUwNGM3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root >> KqpOlapAggregations::Aggregation_SumL_GroupL_OrderL >> KqpOlap::ScanQueryOltpAndOlap >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 26427, MsgBus: 12357 2024-11-21T08:57:25.029421Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654069136583675:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.029599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004898/r3tmp/tmp3IaQc5/pdisk_1.dat 2024-11-21T08:57:25.072607Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26427, node 1 2024-11-21T08:57:25.083885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.083896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.083898Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.083934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12357 TClient is connected to server localhost:12357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.130172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.130204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.131324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:25.154056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.160022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.168572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.168641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.168698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.168726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.168748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.168770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.168789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.168814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.168838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.168860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.168883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.168904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654069136584313:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.169324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.169337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.169349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.169352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.169367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.169371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.169381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.169396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.169410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.169417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.169424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.169431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.169501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.169510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.169528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.169539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.169550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.169553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.169569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.169572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.169582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.169590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:25.171960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069136584312:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.171985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069136584312:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.172022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069136584312:2288];tablet_id=7207518622 ... tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.182928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.182933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.182949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.182953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.182963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.182969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.182977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.182983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.182989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.182998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.183036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.183054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.183072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.183082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.183093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.183102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.183119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.183123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.183135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.183138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; 2024-11-21T08:57:25.306775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654069136584608:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.306796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.306809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654069136584617:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.307685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:25.309930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654069136584622:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:26.295981Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179445360, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1214) '('"_id" '"f36cae48-5daf1f53-b7d2df1-b5ae4399") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) (Int32 '1)) '((Nothing $2) (Int32 '0)))))))))))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Int64)) (let $11 (OptionalType $10)) (let $12 '('"level" $2)) (let $13 (StructType '('_yql_agg_0 $10) '('_yql_agg_1 $11) $12)) (let $14 '('('"_logical_id" '1273) '('"_id" '"fa74295b-165e6398-b2a713c4-7532ea35") '('"_wide_channels" $13))) (let $15 (DqPhyStage '() (lambda '() (block '( (let $27 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $28 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $27 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (block '( (let $31 '('_yql_agg_0 'sum '"id")) (let $32 '('_yql_agg_1 'sum '"level")) (return (TKqpOlapAgg $30 '($31 $32) '('"level"))) ))))) (return (FromFlow $29)) ))) $14)) (let $16 (DqCnHashShuffle (TDqOutput $15 '0) '('2))) (let $17 (StructType '('"column1" $10) '('"column2" $11) $12)) (let $18 '('('"_logical_id" '2113) '('"_id" '"be9973be-edf285ed-fd2c0a02-412b95c1") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($16) (lambda '($33) (block '( (let $34 (lambda '($41 $42 $43 $44) $42 $43)) (let $35 (lambda '($45 $46 $47 $48 $49 $50) (AggrAdd $46 $49) (AggrAdd $47 $50))) (let $36 (lambda '($51 $52 $53) $52 $53 $51)) (let $37 (WideCombiner (ToFlow $33) '"" (lambda '($38 $39 $40) $40) $34 $35 $36)) (return (FromFlow (WideSort $37 '('('2 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('2 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($54) (FromFlow (NarrowMap (ToFlow $54) (lambda '($55 $56 $57) (AsStruct '('"column1" $55) '('"column2" $56) '('"level" $57)))))) '('('"_logical_id" '2125) '('"_id" '"c17dfbb3-567e8864-35a5cc23-3eb74995")))) (let $22 '($15 $19 $21)) (let $23 '('"level" '"column1" '"column2")) (let $24 (DqCnResult (TDqOutput $21 '0) $23)) (let $25 (KqpTxResultBinding $9 '0 '0)) (let $26 (KqpPhysicalTx $22 '($24) '('($7 $25)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $26) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpOlapSysView::StatsSysViewEnumStringBytes >> KqpOlapStats::AddRowsTableInTableStore >> KqpOlapAggregations::CountAllPushdown+UseLlvm [GOOD] >> KqpScheme::CreateBackupCollection [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdown+UseLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 4892, MsgBus: 26319 2024-11-21T08:57:25.994079Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654069870560302:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.994136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488d/r3tmp/tmpq3sAXx/pdisk_1.dat 2024-11-21T08:57:26.059719Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4892, node 1 2024-11-21T08:57:26.071333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:26.071356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:26.071358Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:26.071393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26319 2024-11-21T08:57:26.095705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:26.095729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:26.096608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:26.139193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.141617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:26.148253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:26.160833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.160904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.160952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.160984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.161014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.161033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.161050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.161073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.161096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.161114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.161133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.161155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074165528243:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.165067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.165088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.165129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.165146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.165162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.165178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.165193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.165213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.165234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.165249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.165266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.165281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074165528244:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.168848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074165528246:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.168872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074165528246:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.168902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074165528246:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.168919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074165528246:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.168936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074165528246:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.168953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074165528246:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.168968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074165528246:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.168984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074165528246:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... Chunks; 2024-11-21T08:57:26.178307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:26.178312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:26.178331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:26.178335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:26.178343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:26.178347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:26.178356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:26.178360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:26.178366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:26.178370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:26.178401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:26.178405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:26.178423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:26.178427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.178441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:26.178446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:26.178462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:26.178472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:26.178483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:26.178486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:26.188269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:57:26.399184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654074165528687:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.399186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654074165528696:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.399206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.399801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:26.401189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654074165528701:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T08:57:26.807267Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179446452, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '476) '('"_id" '"9b6b615a-7e131b9b-e1c76ba5-55101d3") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '995) '('"_id" '"f454f75c-6e453520-8b5c7c33-64e9664d")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1098) '('"_id" '"2369cdaf-3df86634-64b799dc-564fa1d8") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpScheme::AddChangefeedNegative [GOOD] >> KqpOlap::ScanQueryOltpAndOlap [GOOD] >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable [GOOD] >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateBackupCollection [GOOD] Test command err: Trying to start YDB, gRPC: 28585, MsgBus: 22351 2024-11-21T08:57:09.524661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653999993199511:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.524728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00409b/r3tmp/tmp3o2yMD/pdisk_1.dat 2024-11-21T08:57:09.584051Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28585, node 1 2024-11-21T08:57:09.625243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.625267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.626788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.630758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.630768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.630770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.630794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22351 TClient is connected to server localhost:22351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.701019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.712525Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.724777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.792095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:57:09.815738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.826931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.892068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653999993201062:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.892098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.933257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.939786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.946850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.954879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.960738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.969974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:09.985137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653999993201562:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.985152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653999993201567:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.985156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.985746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:09.988169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653999993201569:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.176798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:1, at schemeshard: 72057594046644480 2024-11-21T08:57:10.193259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.194952Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2024-11-21T08:57:10.213643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.215807Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T08:57:10.233839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.237250Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2024-11-21T08:57:10.250890Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2024-11-21T08:57:10.252535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.279648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715734:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.282519Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2024-11-21T08:57:10.299499Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2024-11-21T08:57:10.309048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715742:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.329369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715752:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.330618Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T08:57:10.355854Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2024-11-21T08:57:10.357507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715770:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.377234Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2024-11-21T08:57:10.377673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.409058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715799:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.410386Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2024-11-21T08:57:10.477134Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2024-11-21T08:57:10.477433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715832:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.492851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715842:0, at schemesha ... o unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.733295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.751583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.760479Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.919292Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654067619788493:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.919324Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.921996Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.928254Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.935381Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.990017Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.000738Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.012824Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.025461Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654071914756299:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.025514Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.025582Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439654071914756304:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.026290Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:26.033045Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439654071914756306:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 65102, MsgBus: 3358 2024-11-21T08:57:26.557941Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439654072584849135:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:26.558205Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00409b/r3tmp/tmpxdxpAu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65102, node 5 2024-11-21T08:57:26.574918Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:26.578123Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:26.578128Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:26.578131Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:26.578209Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3358 TClient is connected to server localhost:3358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:26.658240Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:26.658281Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:26.659347Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:26.661105Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.663755Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:26.672688Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.679857Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.692668Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.706125Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.836733Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654072584850668:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.836762Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.842131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.848243Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.859269Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.866093Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.873304Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.880093Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.889302Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654072584851183:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.889332Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.889334Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439654072584851188:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.889853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:26.893324Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439654072584851190:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:27.089628Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976715672:2, at schemeshard: 72057594046644480 2024-11-21T08:57:27.095614Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpOlap::CountWhereColumnIsNull [GOOD] >> KqpDecimalColumnShard::TestSimpleQueries ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable [GOOD] Test command err: Trying to start YDB, gRPC: 24023, MsgBus: 20187 2024-11-21T08:57:26.762485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654074043545236:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:26.762620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004888/r3tmp/tmp03Uu2F/pdisk_1.dat 2024-11-21T08:57:26.809258Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24023, node 1 2024-11-21T08:57:26.817890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:26.817904Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:26.817905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:26.817938Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20187 TClient is connected to server localhost:20187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:26.858616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.863637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:26.863663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:26.864744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:26.871200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:26.878571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.878639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.878675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.878702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.878724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.878745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.878765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.878791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.878814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.878839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.878862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.878883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074043545883:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.881181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.881202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.881235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.881252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.881274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.881290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.881308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.881328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.881347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.881368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.881387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.881406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074043545930:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.881778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:26.881791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:26.881802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:26.881806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:26.881823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:26.881833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:26.881842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:26.881855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:26.881870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:26.881878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:26.881884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... ne=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:26.955235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:26.955248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:26.955257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:26.955261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:26.955263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:26.955273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:26.955275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:26.955278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:26.955284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:26.955300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:26.955306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:26.955315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:26.955321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.955327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:26.955332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:26.955341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:26.955346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:26.955352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:26.955357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:26.955420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:26.955427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:26.955431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:26.955434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:26.955441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:26.955443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:26.955447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:26.955449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:26.955454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:26.955456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:26.955459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:26.955461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:26.955475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:26.955481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:26.955489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:26.955491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.955498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:26.955504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:26.955512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:26.955514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:26.955520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:26.955522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:26.992061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:27.076944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654078338513691:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.077005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.089734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654078338513728:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.089759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.089757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654078338513733:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.090556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:27.092119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654078338513735:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T08:57:27.271828Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179447145, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ScanQueryOltpAndOlap [GOOD] Test command err: Trying to start YDB, gRPC: 62202, MsgBus: 21312 2024-11-21T08:57:26.792557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654071523315385:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:26.792579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004887/r3tmp/tmp1zIh91/pdisk_1.dat 2024-11-21T08:57:26.833174Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62202, node 1 2024-11-21T08:57:26.845019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:26.845028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:26.845029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:26.845061Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21312 TClient is connected to server localhost:21312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:26.884156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.893825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:26.893850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:26.894940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:26.897207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:26.905728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.905797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.905842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.905866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.905888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.905914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.905936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.905958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.905983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.906006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.906031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.906052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654071523316043:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.906492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:26.906507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:26.906519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:26.906523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:26.906538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:26.906546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:26.906560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:26.906569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:26.906579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:26.906587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:26.906594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:26.906603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:26.906669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:26.906679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:26.906697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:26.906705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.906716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:26.906724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:26.906740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:26.906748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:26.906759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:26.906767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:26.909702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071523316041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.909725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071523316041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.909765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071523316041:2288];tablet_id=7207518622 ... tract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:26.914621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.914633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:26.914642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:26.914660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:26.914669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:26.914680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:26.914685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:26.917683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.917705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.917743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.917761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.917783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.917803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.917822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.917838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.917860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.917880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.917901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.917936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654071523316075:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.918559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:26.918572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:26.918584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:26.918588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:26.918604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:26.918608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:26.918618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:26.918623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:26.918637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:26.918643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:26.918650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:26.918655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:26.918703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:26.918713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:26.918732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:26.918741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.918753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:26.918759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:26.918777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:26.918785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:26.918797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:26.918801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:26.957695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3632;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3632;columns=5; 2024-11-21T08:57:26.978018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:27.085742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654075818283739:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.085746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654075818283728:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.085761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.086355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:27.087965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654075818283742:2390], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T08:57:27.293183Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179447320, txId: 281474976710664] shutting down |91.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AddChangefeedNegative [GOOD] Test command err: Trying to start YDB, gRPC: 7827, MsgBus: 11223 2024-11-21T08:57:09.623946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654001939739372:2072];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.624899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00409a/r3tmp/tmp6x9nzL/pdisk_1.dat 2024-11-21T08:57:09.697824Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7827, node 1 2024-11-21T08:57:09.732304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.732331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.734287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.740358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.740372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.740373Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.740407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11223 TClient is connected to server localhost:11223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.810378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.816625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.883585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.904362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.915047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.995151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654001939740893:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:09.995177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.030714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.041803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.051932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.059170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.067410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.080650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.095793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654006234708694:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.095816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.095861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654006234708699:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.096372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:10.100333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654006234708701:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:10.300476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16156, MsgBus: 5787 2024-11-21T08:57:10.511998Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654004675127469:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:10.512248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00409a/r3tmp/tmpjhLbZ4/pdisk_1.dat 2024-11-21T08:57:10.522056Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16156, node 2 2024-11-21T08:57:10.532566Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:10.532582Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:10.532583Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:10.532618Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5787 TClient is connected to server localhost:5787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:10.615011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:10.615049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:10.615287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.615759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:10.622194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:10.679145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.707170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.718138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:10.870232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654004675129000:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.870266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:10.874606Z node 2 :FLAT_TX_SCHEMESHARD W ... 46644480 waiting... 2024-11-21T08:57:25.749703Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.771217Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.783309Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.918426Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439654067305424193:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.918451Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.920974Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.928388Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.935564Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.949415Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.956277Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.963792Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.979588Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439654067305424704:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.979625Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.979629Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439654067305424709:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.980347Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:25.983257Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439654067305424711:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:26.204697Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9718, MsgBus: 2252 2024-11-21T08:57:26.589858Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7439654073435761908:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:26.590010Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00409a/r3tmp/tmpdTHaL9/pdisk_1.dat 2024-11-21T08:57:26.603177Z node 17 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9718, node 17 2024-11-21T08:57:26.610062Z node 17 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:26.610077Z node 17 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:26.610080Z node 17 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:26.610125Z node 17 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2252 TClient is connected to server localhost:2252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:26.689975Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:26.690005Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:26.691120Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:26.692200Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.699222Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.707103Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.722178Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.733991Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.872413Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:7439654073435763446:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.872446Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.877948Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.883731Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.894321Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.950313Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.957511Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.971885Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.989213Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:7439654073435763962:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.989246Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.989247Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:7439654073435763967:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:26.990079Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:27.000047Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:7439654073435763969:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:27.176349Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:27.194315Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CountWhereColumnIsNull [GOOD] Test command err: Trying to start YDB, gRPC: 2841, MsgBus: 15347 2024-11-21T08:57:25.967337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654071355919448:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.967354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488e/r3tmp/tmpOkbcAz/pdisk_1.dat 2024-11-21T08:57:26.016332Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2841, node 1 2024-11-21T08:57:26.028184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:26.028197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:26.028199Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:26.028252Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15347 2024-11-21T08:57:26.068681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:26.068717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:26.069771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:26.088746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.090841Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:26.098134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:26.111733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.111795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.111833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.111850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.111875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.111893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.111907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.111925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.111941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.111961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.111976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.111989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654075650887394:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.115261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.115282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.115312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.115328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.115345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.115364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.115377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.115392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.115407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.115425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.115440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.115452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654075650887396:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.118494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654075650887397:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.118515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654075650887397:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.118544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654075650887397:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.118564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654075650887397:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.118581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654075650887397:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.118595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654075650887397:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.118609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654075650887397:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.118625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654075650887397:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... ceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1732179447382653,"name":"_full_task","f":1732179447382653,"d_finished":0,"c":0,"l":1732179447384057,"d":1404},"events":[{"name":"bootstrap","f":1732179447382723,"d_finished":214,"c":1,"l":1732179447382937,"d":214},{"a":1732179447384034,"name":"ack","f":1732179447383243,"d_finished":11,"c":1,"l":1732179447383254,"d":34},{"a":1732179447384033,"name":"processing","f":1732179447383230,"d_finished":168,"c":8,"l":1732179447383944,"d":192},{"name":"ProduceResults","f":1732179447382866,"d_finished":125,"c":11,"l":1732179447384047,"d":125},{"a":1732179447384047,"name":"Finish","f":1732179447384047,"d_finished":0,"c":0,"l":1732179447384057,"d":10},{"name":"task_result","f":1732179447383231,"d_finished":148,"c":7,"l":1732179447383943,"d":148}],"id":"72075186224037889::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;) 2024-11-21T08:57:27.384101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855746:2701];TabletId=72075186224037889;ScanId=3;TxId=281474976710669;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:57:27.382584Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=65368;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=65368;selected_rows=0; 2024-11-21T08:57:27.384103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855746:2701];TabletId=72075186224037889;ScanId=3;TxId=281474976710669;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:57:27.384107Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654079945855746:2701];TabletId=72075186224037889;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T08:57:27.384112Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654079945855746:2701];TabletId=72075186224037889;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;; 2024-11-21T08:57:27.384121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855736:2700];TabletId=72075186224037888;ScanId=3;TxId=281474976710669;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:57:27.384130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855736:2700];TabletId=72075186224037888;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2024-11-21T08:57:27.384132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855736:2700];TabletId=72075186224037888;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2024-11-21T08:57:27.384134Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439654079945855736:2700] finished for tablet 72075186224037888 2024-11-21T08:57:27.384138Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439654079945855736:2700] send ScanData to [1:7439654075650888351:2692] txId: 281474976710669 scanId: 3 gen: 1 tablet: 72075186224037888 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:57:27.384171Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:7439654079945855736:2700] and sent to [1:7439654075650888351:2692] packs: 0 txId: 281474976710669 scanId: 3 gen: 1 tablet: 72075186224037888 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1732179447381728,"name":"_full_task","f":1732179447381728,"d_finished":0,"c":0,"l":1732179447384141,"d":2413},"events":[{"name":"bootstrap","f":1732179447382988,"d_finished":137,"c":1,"l":1732179447383125,"d":137},{"a":1732179447384119,"name":"ack","f":1732179447383127,"d_finished":16,"c":1,"l":1732179447383143,"d":38},{"a":1732179447384117,"name":"processing","f":1732179447383125,"d_finished":187,"c":8,"l":1732179447384031,"d":211},{"name":"ProduceResults","f":1732179447383079,"d_finished":135,"c":11,"l":1732179447384133,"d":135},{"a":1732179447384133,"name":"Finish","f":1732179447384133,"d_finished":0,"c":0,"l":1732179447384141,"d":8},{"name":"task_result","f":1732179447383206,"d_finished":160,"c":7,"l":1732179447384031,"d":160}],"id":"72075186224037888::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;) 2024-11-21T08:57:27.384180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855736:2700];TabletId=72075186224037888;ScanId=3;TxId=281474976710669;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:57:27.381655Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4552;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4552;selected_rows=0; 2024-11-21T08:57:27.384182Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855736:2700];TabletId=72075186224037888;ScanId=3;TxId=281474976710669;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:57:27.384187Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654079945855736:2700];TabletId=72075186224037888;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T08:57:27.384189Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654079945855736:2700];TabletId=72075186224037888;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;; 2024-11-21T08:57:27.384318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855731:2695];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:57:27.384332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855731:2695];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2024-11-21T08:57:27.384338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855731:2695];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;); 2024-11-21T08:57:27.384342Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439654079945855731:2695] finished for tablet 72075186224037890 2024-11-21T08:57:27.384348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439654079945855731:2695] send ScanData to [1:7439654075650888351:2692] txId: 281474976710669 scanId: 3 gen: 1 tablet: 72075186224037890 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:57:27.384399Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:7439654079945855731:2695] and sent to [1:7439654075650888351:2692] packs: 0 txId: 281474976710669 scanId: 3 gen: 1 tablet: 72075186224037890 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":1732179447381185,"name":"_full_task","f":1732179447381185,"d_finished":0,"c":0,"l":1732179447384352,"d":3167},"events":[{"name":"bootstrap","f":1732179447382693,"d_finished":240,"c":1,"l":1732179447382933,"d":240},{"a":1732179447384314,"name":"ack","f":1732179447383133,"d_finished":12,"c":1,"l":1732179447383145,"d":50},{"a":1732179447384311,"name":"processing","f":1732179447383001,"d_finished":153,"c":8,"l":1732179447384016,"d":194},{"name":"ProduceResults","f":1732179447382866,"d_finished":126,"c":11,"l":1732179447384340,"d":126},{"a":1732179447384340,"name":"Finish","f":1732179447384340,"d_finished":0,"c":0,"l":1732179447384352,"d":12},{"name":"task_result","f":1732179447383003,"d_finished":128,"c":7,"l":1732179447384016,"d":128}],"id":"72075186224037890::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;;) 2024-11-21T08:57:27.384410Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855731:2695];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:57:27.380984Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4352;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4352;selected_rows=0; 2024-11-21T08:57:27.384417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654079945855731:2695];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:57:27.384421Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654079945855731:2695];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T08:57:27.384426Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654079945855731:2695];TabletId=72075186224037890;ScanId=3;TxId=281474976710669;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=4;column_names=level;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=4;column_names=level;);;program_input=(column_ids=4;column_names=level;);;; 2024-11-21T08:57:27.407441Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179446557, txId: 18446744073709551615] shutting down >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> KqpOlapStats::AddRowsSomeTablesInTableStore >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] Test command err: Trying to start YDB, gRPC: 27445, MsgBus: 20985 2024-11-21T08:57:27.340914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654076812635538:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:27.340931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004883/r3tmp/tmp0BEhoy/pdisk_1.dat 2024-11-21T08:57:27.397923Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27445, node 1 2024-11-21T08:57:27.408757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:27.408789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:27.408791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:27.408827Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20985 TClient is connected to server localhost:20985 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:27.442139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:27.442171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:57:27.443316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:27.474806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:27.492545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:27.501222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.501298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.501363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:27.501386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:27.501406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:27.501423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:27.501438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:27.501454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:27.501470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:27.501499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.501515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:27.501535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076812636184:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:27.501871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:27.501892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:27.501901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:27.501904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:27.501914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:27.501917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:27.501923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:27.501929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:27.501934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:27.501939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:27.501943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:27.501945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:27.501999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:27.502013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:27.502024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:27.502031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.502037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:27.502044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:27.502055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:27.502061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:27.502068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:27.502074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:27.503967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654076812636190:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.503982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654076812636190:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.504003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654076812636190:2291];tablet_id=7207518622 ... 21T08:57:27.510914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:27.510921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:27.510926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:27.510953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:27.510961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:27.510970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:27.510977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.510983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:27.510985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:27.510995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:27.511000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:27.511007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:27.511009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1") = "val1" AND id = 6; 2024-11-21T08:57:27.600976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654076812636479:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.600994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654076812636491:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.601000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.601712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:27.603202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654076812636493:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:27.778026Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179447656, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1") = "val1" AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1286) '('"_id" '"731d58e5-4f88df9d-39cff6e6-e220e222") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '"6")) (let $20 (Just $19)) (let $21 (Int32 '1)) (let $22 '($20 $21)) (let $23 (If (== $19 (Int32 '2147483647)) $22 '((+ $20 $21) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($22 $23)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $24 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $25 '('"id" '"jsondoc" '"jsonval")) (let $26 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $27 (OptionalType (DataType 'JsonDocument))) (let $28 (DataType 'Json)) (let $29 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $11)))) (let $30 '((ResourceType '"JsonPath"))) (let $31 (ResourceType '"JsonNode")) (let $32 (DictType $10 $31)) (let $33 '($32)) (let $34 (CallableType '() $29 '($27) $30 $33)) (let $35 '('('"strict"))) (let $36 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $34 (VoidType) '"" $35)) (let $37 (CallableType '() $30 '($10))) (let $38 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $37 (VoidType) '"" '())) (let $39 (Apply $38 (Utf8 '"$.col1"))) (let $40 (Dict $32)) (let $41 (lambda '($51) (Nothing $11))) (let $42 (lambda '($52) $52)) (let $43 (KqpWideReadOlapTableRanges $24 %kqp%tx_result_binding_0_0 $25 '() $26 (lambda '($44) (block '( (let $45 (StructType $9 '('"jsondoc" $27) '('"jsonval" (OptionalType $28)))) (let $46 (KqpOlapApply $45 '('"jsondoc") (lambda '($49) (block '( (let $50 (Apply $36 $49 $39 $40)) (return (Visit $50 '0 $41 '1 $42)) ))))) (let $47 '('eq $46 (String '"val1"))) (let $48 '('?? $47 (Bool 'false))) (return (KqpOlapFilter $44 $48)) ))))) (return (FromFlow (NarrowMap $43 (lambda '($53 $54 $55) (block '( (let $56 (OptionalType $31)) (let $57 (CallableType '() $29 '($56) $30 $33)) (let $58 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $57 (VoidType) '"" $35)) (let $59 (IfPresent $55 (lambda '($64) (block '( (let $65 '($28 '"" '1)) (let $66 (CallableType '() '($31) $65)) (let $67 (Udf '"Json2.Parse" (Void) (VoidType) '"" $66 (VoidType) '"" '())) (return (Just (Apply $67 $64))) ))) (Nothing $56))) (let $60 (Apply $58 $59 $39 $40)) (let $61 (Visit $60 '0 $41 '1 $42)) (let $62 (Apply $36 $54 $39 $40)) (let $63 (Visit $62 '0 $41 '1 $42)) (return (AsStruct '('"column1" $61) '('"column2" $63) '('"id" $53))) )))))) ))) '('('"_logical_id" '1357) '('"_id" '"b4cbd106-9c299368-8b7946dc-4a0d67ab")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($68) $68) '('('"_logical_id" '2174) '('"_id" '"2199a31c-25aeb07f-7a7b887b-3d09c52a")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $8 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $11) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::CompositeRangeOlap >> KqpOlapSysView::StatsSysViewBytesPackActualization ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling [GOOD] Test command err: Trying to start YDB, gRPC: 10621, MsgBus: 27773 2024-11-21T08:57:27.529233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654079636458456:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:27.529318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004881/r3tmp/tmp3mQ9xm/pdisk_1.dat 2024-11-21T08:57:27.570382Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10621, node 1 2024-11-21T08:57:27.583273Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:27.583294Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:27.583296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:27.583340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27773 TClient is connected to server localhost:27773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:27.626370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:27.629782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:27.629809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:27.630912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:27.637397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:27.647203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.647243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.647273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:27.647294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:27.647310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:27.647326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:27.647339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:27.647358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:27.647380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:27.647402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.647417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:27.647438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654079636458893:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:27.649689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.649710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.649735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:27.649747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:27.649762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:27.649777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:27.649791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:27.649808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:27.649830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:27.649845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.649859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:27.649874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654079636458894:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:27.652009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654079636458892:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.652029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654079636458892:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.652049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654079636458892:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:27.652064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654079636458892:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:27.652078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654079636458892:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:27.652092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654079636458892:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:27.652105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654079636458892:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:27.652123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654079636458892:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:27.652136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74396540796364588 ... ss=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:27.655569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:27.655576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:27.655579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:27.655585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:27.655589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:27.655594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:27.655602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:27.655627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:27.655636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:27.655650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:27.655658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.655669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:27.655676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:27.655690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:27.655698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:27.655707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:27.655713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:27.655765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:27.655773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:27.655780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:27.655784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:27.655800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:27.655807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:27.655815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:27.655822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:27.655829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:27.655836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:27.655841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:27.655845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:27.655869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:27.655877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:27.655891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:27.655898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.655909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:27.655916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:27.655929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:27.655936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:27.655945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:27.655952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:27.696075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:57:27.909917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654079636459342:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.909919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654079636459331:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.909931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.910461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:27.911751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654079636459345:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } >> KqpOlapAggregations::Aggregation_ResultCountT_FilterL >> KqpOlapStats::AddRowsTableInTableStore [GOOD] >> KqpOlap::OlapUpsert [GOOD] >> KqpOlapWrite::TierDraftsGC [GOOD] >> KqpOlapAggregations::Json_GetValue_ToString >> KqpOlap::PredicatePushdown |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapUpsert [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsTableInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 24450, MsgBus: 30866 2024-11-21T08:57:27.116048Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654078032148952:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:27.116154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004884/r3tmp/tmpSDjzNB/pdisk_1.dat 2024-11-21T08:57:27.160877Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24450, node 1 2024-11-21T08:57:27.176821Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:27.176835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:27.176837Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:27.176879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30866 TClient is connected to server localhost:30866 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:27.215118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:27.215160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:27.216163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:27.240657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:27.395623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654078032149406:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.395683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.398712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:27.405583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.405652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.405705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:27.405727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:27.405748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:27.405769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:27.405791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:27.405813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:27.405837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:27.405860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.405881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:27.405901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078032149473:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:27.406340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:27.406353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:27.406364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:27.406367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:27.406382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:27.406390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:27.406399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:27.406408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:27.406417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:27.406425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:27.406431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:27.406438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:27.406500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:27.406514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:27.406529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:27.406537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.406550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:27.406558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:27.406574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:27.406581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:27.406591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:27.406598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; CREATE TABLE ... 2TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=24288;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=24288;columns=3; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TierDraftsGC [GOOD] Test command err: Trying to start YDB, gRPC: 63526, MsgBus: 6843 2024-11-21T08:57:26.174671Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654074963833279:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:26.175480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488b/r3tmp/tmpKpio8r/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63526, node 1 2024-11-21T08:57:26.243925Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:26.245658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:26.245672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:26.245674Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:26.245710Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6843 2024-11-21T08:57:26.275554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:26.275584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:26.276623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:26.311189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:26.319992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:26.331265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.331343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.331387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.331411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.331434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.331457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.331479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.331506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.331533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.331554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.331571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.331591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.335181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.335215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.335260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.335280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.335306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.335332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.335357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.335387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.335413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.335435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.335463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.335490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074963833796:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.336041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:26.336060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:26.336073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:26.336083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:26.336100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:26.336111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:26.336121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:26.336134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:26.336142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:26.336146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:26.336153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888; ... 57:28.348008Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:28.348012Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:28.348015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:28.348017Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:28.348021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:28.348029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:28.348045Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179448000 at tablet 72075186224037890 2024-11-21T08:57:28.348053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:28.348056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:57:28.348058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:57:28.348061Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:28.348064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:28.348066Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:28.348068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:28.348070Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:28.348074Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654074963833830:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:28.348492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T08:57:28.348507Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2024-11-21T08:57:28.348516Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:28.348519Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:28.348524Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:515;problem=Background activities cannot be started: no index at tablet; 2024-11-21T08:57:28.348532Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179448000 at tablet 72075186224037891 2024-11-21T08:57:28.348535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:28.348538Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439654074963833797:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:515;problem=Background activities cannot be started: no index at tablet; 2024-11-21T08:57:28.348991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T08:57:28.349003Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T08:57:28.349010Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:28.349013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:28.349020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:57:28.349022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:57:28.349025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:28.349029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:28.349033Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:28.349035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:28.349039Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:28.349050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:28.349068Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179448000 at tablet 72075186224037889 2024-11-21T08:57:28.349074Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:28.349076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:57:28.349078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:57:28.349080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:28.349082Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:28.349084Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:28.349086Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:28.349087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:28.349091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654074963833800:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; >> KqpDecimalColumnShard::TestSimpleQueries [GOOD] >> KqpOlap::CompositeRangeOlap [GOOD] |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TVectorIndexTests::CreateTableMultiColumn >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] >> KqpOlapAggregations::Json_GetValue_ToString [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestSimpleQueries [GOOD] Test command err: Trying to start YDB, gRPC: 18552, MsgBus: 64842 2024-11-21T08:57:27.701272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654078267958858:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:27.701408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004880/r3tmp/tmpvPshcF/pdisk_1.dat 2024-11-21T08:57:27.743443Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18552, node 1 2024-11-21T08:57:27.755674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:27.755685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:27.755687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:27.755713Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64842 TClient is connected to server localhost:64842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:27.803003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:27.803034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:27.804284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:27.826640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:27.963905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654078267959468:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.963927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:27.985386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:27.991030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.991064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.991094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:27.991113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:27.991127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:27.991143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:27.991157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:27.991170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:27.991187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:27.991203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.991218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:27.991236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654078267959544:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:27.991552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:27.991561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:27.991571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:27.991577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:27.991588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:27.991595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:27.991601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:27.991611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:27.991620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:27.991628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:27.991633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:27.991639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:27.991681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:27.991692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:27.991706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:27.991712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.991719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:27.991725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:27.991736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:27.991742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:27.991749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:27.991755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALL ... Id; 2024-11-21T08:57:28.421229Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654082273163415:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:28.421244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654082273163415:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:28.421261Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654082273163415:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:28.421277Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654082273163415:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:28.421295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654082273163415:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.421311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654082273163415:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:28.421327Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654082273163415:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:28.421839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:28.421859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:28.421878Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:28.421891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:28.421911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:28.421924Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:28.421936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:28.421950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:28.421968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:28.421980Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:28.421988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:28.421998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:28.422057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:28.422069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:28.422085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:28.422093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.422107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:28.422114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:28.422127Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:28.422136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:28.422142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:28.422148Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2024-11-21T08:57:28.493116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654082562926995:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.493152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654082562927000:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.493157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.493801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:28.495185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654082562927002:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:28.599045Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179448545, txId: 18446744073709551615] shutting down 2024-11-21T08:57:28.626374Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179448622, txId: 18446744073709551615] shutting down 2024-11-21T08:57:28.627844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654082273163516:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.627866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654082273163521:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.627872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.628519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:28.630107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654082273163523:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:28.761516Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179448685, txId: 18446744073709551615] shutting down 2024-11-21T08:57:28.787057Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179448797, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CompositeRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 30914, MsgBus: 7241 2024-11-21T08:57:28.600198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654080154981791:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:28.600343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487a/r3tmp/tmpbooI61/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30914, node 1 2024-11-21T08:57:28.647388Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:28.651056Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:28.651069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:28.651070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:28.651097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7241 TClient is connected to server localhost:7241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:28.695436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:28.701776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:28.701800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:28.702917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:28.705559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:28.715217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.715271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.715301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:28.715316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:28.715329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:28.715348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:28.715361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:28.715380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:28.715401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:28.715416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.715430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:28.715445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654080154982449:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:28.715767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:28.715780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:28.715791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:28.715799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:28.715809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:28.715815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:28.715820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:28.715827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:28.715835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:28.715839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:28.715846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:28.715848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:28.715886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:28.715896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:28.715909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:28.715915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.715921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:28.715927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:28.715937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:28.715943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:28.715950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:28.715955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:28.717835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654080154982448:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.717849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654080154982448:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.717881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654080154982448:2290];tablet_id=7207518622403 ... 1-21T08:57:28.722419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:28.722429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:28.722437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:28.722446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:28.722453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:28.722460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:28.722464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:28.722467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:28.722493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:28.722500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:28.722510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:28.722513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.722519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:28.722522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:28.722531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:28.722538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:28.722549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:28.722556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:28.722682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:28.722692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:28.722698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:28.722701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:28.722712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:28.722720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:28.722727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:28.722735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:28.722741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:28.722750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:28.722755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:28.722757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:28.722784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:28.722792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:28.722806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:28.722813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.722823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:28.722831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:28.722844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:28.722851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:28.722859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:28.722866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:28.761945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:28.835266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654080154982734:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.835282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654080154982752:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.835292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.835803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:28.837063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654080154982772:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T08:57:28.957681Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179448888, txId: 18446744073709551615] shutting down 2024-11-21T08:57:28.988576Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179449000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:29.022120Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179449028, txId: 18446744073709551615] shutting down 2024-11-21T08:57:29.055342Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179449063, txId: 18446744073709551615] shutting down 2024-11-21T08:57:29.090869Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179449091, txId: 18446744073709551615] shutting down 2024-11-21T08:57:29.127587Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179449126, txId: 18446744073709551615] shutting down 2024-11-21T08:57:29.166975Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179449168, txId: 18446744073709551615] shutting down >> TVectorIndexTests::CreateTableMultiColumn [GOOD] >> KqpOlap::SelectLimit1ManyShards [GOOD] |91.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:57:29.473646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:29.473672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:29.473677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:29.473682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:29.473688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:29.473692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:29.473702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:29.473796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:29.483583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:29.483600Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:29.486143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:29.486834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:29.486876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:29.488243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:29.488477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:29.488569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:29.488660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:29.489647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:29.489889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:29.489900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:29.489935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:29.489941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:29.489947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:29.489959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.491117Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:57:29.505907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:29.505974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.506025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:29.506058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:29.506064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.506742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:29.506761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:29.506802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.506808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:29.506811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:29.506815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:29.507080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.507085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:29.507088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:29.507304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.507310Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.507314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:29.507320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:29.507687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:29.507987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:29.508021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:29.508148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:29.508163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:29.508167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:29.508221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:29.508229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:29.508259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:29.508267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:29.508576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:29.508581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:29.508611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:29.508617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:57:29.508689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.508694Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:29.508703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:29.508705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:29.508709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:29.508712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:29.508715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:29.508717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:29.508724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:29.508728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:29.508730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:57:29.508913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:29.508921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:57:29.508925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:57:29.508928Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:57:29.508930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:29.508939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... trsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:29.597477Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:29.597519Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 44us result status StatusSuccess 2024-11-21T08:57:29.597591Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:29.601674Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:29.601719Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 54us result status StatusSuccess 2024-11-21T08:57:29.601805Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpOlapAggregations::Aggregation_Count_GroupBy >> KqpOlap::ManyColumnShards >> KqpOlapAggregations::Aggregation_Sum_Null >> KqpOlapSysView::StatsSysView >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes >> KqpOlapAggregations::Aggregation_Some_Null >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm >> KqpOlapClickbench::ClickBenchSmoke ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_ToString [GOOD] Test command err: Trying to start YDB, gRPC: 21890, MsgBus: 14867 2024-11-21T08:57:28.907922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654080109225207:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:28.908080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004875/r3tmp/tmpQ33JL9/pdisk_1.dat 2024-11-21T08:57:28.955503Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21890, node 1 2024-11-21T08:57:28.960657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:28.960680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:28.960682Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:28.960719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14867 TClient is connected to server localhost:14867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:29.003299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:29.009094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:29.009112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:29.010199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:29.015846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:29.023743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:29.023793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:29.023823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:29.023839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:29.023853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:29.023871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:29.023885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:29.023901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:29.023916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:29.023931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:29.023946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:29.023965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084404193143:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:29.024312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:29.024323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:29.024331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:29.024335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:29.024345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:29.024351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:29.024357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:29.024368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:29.024374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:29.024380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:29.024384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:29.024386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:29.024429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:29.024437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:29.024448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:29.024454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:29.024463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:29.024469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:29.024480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:29.024486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:29.024493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:29.024501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:29.026339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654084404193144:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:29.026356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654084404193144:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:29.026374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654084404193144:2289];tablet_id=7207518622 ... GCCountersNormalizer;id=9; 2024-11-21T08:57:29.031260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:29.031272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:29.031278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:29.031286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:29.031292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:29.031297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:29.031299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:29.031307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:29.031310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:29.031315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:29.031321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1" RETURNING String), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1" RETURNING String) = "val1" AND id = 1; 2024-11-21T08:57:29.138545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654084404193447:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:29.138567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654084404193439:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:29.138590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:29.139324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:29.140963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654084404193453:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:29.279952Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179449196, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1" RETURNING String), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.col1" RETURNING String) = "val1" AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1290) '('"_id" '"38701aa3-96fec5b0-c4fca019-61d630aa") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $21 (Int32 '1)) (let $22 (Just $21)) (let $23 '($22 $21)) (let $24 (If (== $21 (Int32 '2147483647)) $23 '((+ $22 $21) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($23 $24)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'String)) (let $11 (DataType 'Utf8)) (let $12 (OptionalType $11)) (let $13 (OptionalType $10)) (let $14 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id" '"jsondoc" '"jsonval")) (let $27 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $28 (OptionalType (DataType 'JsonDocument))) (let $29 '((VariantType (TupleType (TupleType (DataType 'Uint8) $10) $12)))) (let $30 (ResourceType '"JsonNode")) (let $31 (OptionalType $30)) (let $32 '((ResourceType '"JsonPath"))) (let $33 (DictType $11 $30)) (let $34 '($33)) (let $35 (CallableType '() $29 '($31) $32 $34)) (let $36 '('('"strict"))) (let $37 (Udf '"Json2.SqlValueUtf8" (Void) (VoidType) '"" $35 (VoidType) '"" $36)) (let $38 (lambda '($57) (block '( (let $58 '((DataType 'Json) '"" '1)) (let $59 (CallableType '() '($30) $58)) (let $60 (Udf '"Json2.Parse" (Void) (VoidType) '"" $59 (VoidType) '"" '())) (return (Just (Apply $60 $57))) )))) (let $39 (Nothing $31)) (let $40 (CallableType '() $32 '($11))) (let $41 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $40 (VoidType) '"" '())) (let $42 (Apply $41 (Utf8 '"$.col1"))) (let $43 (Dict $33)) (let $44 (lambda '($61) (block '( (let $62 (Nothing $13)) (return $62) )))) (let $45 (lambda '($63) (block '( (let $64 (IfPresent $63 (lambda '($65) (Just (SafeCast $65 $10))) (Nothing $13))) (return (If (Exists $63) $64 (Nothing $13))) )))) (let $46 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 $26 '() $27 (lambda '($47) (block '( (let $48 (DataType 'Json)) (let $49 (StructType $9 '('"jsondoc" $28) '('"jsonval" (OptionalType $48)))) (let $50 (KqpOlapApply $49 '('"jsonval") (lambda '($53) (block '( (let $54 (IfPresent $53 $38 $39)) (let $55 (Apply $37 $54 $42 $43)) (let $56 (Nothing $13)) (return (Visit $55 '0 $44 '1 $45)) ))))) (let $51 '('eq $50 (String '"val1"))) (let $52 '('?? $51 (Bool 'false))) (return (KqpOlapFilter $47 $52)) ))))) (return (FromFlow (NarrowMap $46 (lambda '($66 $67 $68) (block '( (let $69 (IfPresent $68 $38 $39)) (let $70 (Apply $37 $69 $42 $43)) (let $71 (Visit $70 '0 $44 '1 $45)) (let $72 (CallableType '() $29 '($28) $32 $34)) (let $73 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $72 (VoidType) '"" $36)) (let $74 (Apply $73 $67 $42 $43)) (let $75 (Visit $74 '0 (lambda '($76) (Nothing $12)) '1 (lambda '($77) $77))) (return (AsStruct '('"column1" $71) '('"column2" $75) '('"id" $66))) )))))) ))) '('('"_logical_id" '1361) '('"_id" '"6b10d641-c982372b-8e16242c-4c7a9b1d")))) (let $15 (DqCnUnionAll (TDqOutput $14 '0))) (let $16 (DqPhyStage '($15) (lambda '($78) $78) '('('"_logical_id" '2416) '('"_id" '"542c0d19-e529fe55-2219d749-d822ef53")))) (let $17 '('"id" '"column1" '"column2")) (let $18 (DqCnResult (TDqOutput $16 '0) $17)) (let $19 (KqpTxResultBinding $8 '0 '0)) (let $20 (KqpPhysicalTx '($14 $16) '($18) '('($5 $19)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $20) '((KqpTxResultBinding (ListType (StructType '('"column1" $13) '('"column2" $12) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> KqpOlapSparsed::SwitchingMultiColumn >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] >> KqpOlapAggregations::Aggregation_Sum_Null [GOOD] |91.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SelectLimit1ManyShards [GOOD] Test command err: 2024-11-21T08:57:25.001162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:57:25.004447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:25.004584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:25.004628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:25.005021Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:25.005041Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a0/r3tmp/tmpngltvc/pdisk_1.dat 2024-11-21T08:57:25.095432Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:25.178822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.269453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.269503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.274248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.274278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.294738Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:57:25.294869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:25.294959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:25.634882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.684002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:1286:2803];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:25.686786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:1286:2803];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:25.686867Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2024-11-21T08:57:25.687433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.687470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.687514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.687533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.687550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.687569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.687586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.687603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.687620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.687638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.687659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.687677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.691699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037895;self_id=[1:1292:2807];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:25.693654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037895;self_id=[1:1292:2807];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:25.693723Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037895 2024-11-21T08:57:25.694288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.694314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.694358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.694377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.694397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.694416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.694434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.694451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.694470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.694486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.694510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.694529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.700566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:1298:2809];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:25.702617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:1298:2809];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:25.702675Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037896 2024-11-21T08:57:25.703266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.703291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.703336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.703357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.703376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.703393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... n-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449166,\"TaskId\":67,\"WaitInputTimeUs\":5605}]},{\"CpuTimeUs\":150,\"Tasks\":[{\"ComputeTimeUs\":5,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449166,\"TaskId\":68,\"WaitInputTimeUs\":5670}]},{\"CpuTimeUs\":169,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449166,\"TaskId\":69,\"WaitInputTimeUs\":5733}]},{\"CpuTimeUs\":154,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":70,\"WaitInputTimeUs\":5775}]},{\"CpuTimeUs\":199,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":71,\"WaitInputTimeUs\":5813}]},{\"CpuTimeUs\":135,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":72,\"WaitInputTimeUs\":5847}]},{\"CpuTimeUs\":141,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":73,\"WaitInputTimeUs\":5882}]},{\"CpuTimeUs\":133,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":74,\"WaitInputTimeUs\":5915}]},{\"CpuTimeUs\":926,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":75,\"WaitInputTimeUs\":5962}]},{\"CpuTimeUs\":144,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":76,\"WaitInputTimeUs\":6821}]},{\"CpuTimeUs\":129,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":77,\"WaitInputTimeUs\":6863}]},{\"CpuTimeUs\":133,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":78,\"WaitInputTimeUs\":6942}]},{\"CpuTimeUs\":153,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":79,\"WaitInputTimeUs\":7006}]},{\"CpuTimeUs\":137,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":80,\"WaitInputTimeUs\":7046}]},{\"CpuTimeUs\":123,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":81,\"WaitInputTimeUs\":7079}]},{\"CpuTimeUs\":121,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":82,\"WaitInputTimeUs\":7110}]},{\"CpuTimeUs\":154,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":83,\"WaitInputTimeUs\":7154}]},{\"CpuTimeUs\":121,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":84,\"WaitInputTimeUs\":7229}]},{\"CpuTimeUs\":153,\"Tasks\":[{\"ComputeTimeUs\":5,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449167,\"TaskId\":85,\"WaitInputTimeUs\":7274}]},{\"CpuTimeUs\":134,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":86,\"WaitInputTimeUs\":7326}]},{\"CpuTimeUs\":147,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":87,\"WaitInputTimeUs\":7361}]},{\"CpuTimeUs\":199,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":88,\"WaitInputTimeUs\":7391}]},{\"CpuTimeUs\":716,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":89,\"WaitInputTimeUs\":7382}]},{\"CpuTimeUs\":154,\"Tasks\":[{\"ComputeTimeUs\":5,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":90,\"WaitInputTimeUs\":6834}]},{\"CpuTimeUs\":124,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":91,\"WaitInputTimeUs\":6850}]},{\"CpuTimeUs\":142,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":92,\"WaitInputTimeUs\":6883}]},{\"CpuTimeUs\":135,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":93,\"WaitInputTimeUs\":6917}]},{\"CpuTimeUs\":130,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":94,\"WaitInputTimeUs\":6954}]},{\"CpuTimeUs\":125,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":95,\"WaitInputTimeUs\":7004}]},{\"CpuTimeUs\":121,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":96,\"WaitInputTimeUs\":7039}]},{\"CpuTimeUs\":2752,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":97,\"WaitInputTimeUs\":7072}]},{\"CpuTimeUs\":157,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":98,\"WaitInputTimeUs\":9804}]},{\"CpuTimeUs\":289,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449168,\"TaskId\":99,\"WaitInputTimeUs\":9828}]},{\"CpuTimeUs\":133,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":100,\"WaitInputTimeUs\":9851}]},{\"CpuTimeUs\":117,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":101,\"WaitInputTimeUs\":9865}]},{\"CpuTimeUs\":115,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":102,\"WaitInputTimeUs\":9875}]},{\"CpuTimeUs\":109,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":103,\"WaitInputTimeUs\":9883}]},{\"CpuTimeUs\":107,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":104,\"WaitInputTimeUs\":9888}]},{\"CpuTimeUs\":116,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":105,\"WaitInputTimeUs\":9904}]},{\"CpuTimeUs\":115,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":106,\"WaitInputTimeUs\":9912}]},{\"CpuTimeUs\":152,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":107,\"WaitInputTimeUs\":9920}]},{\"CpuTimeUs\":113,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":108,\"WaitInputTimeUs\":9925}]},{\"CpuTimeUs\":120,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":109,\"WaitInputTimeUs\":9936}]},{\"CpuTimeUs\":134,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":110,\"WaitInputTimeUs\":9958}]},{\"CpuTimeUs\":163,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":111,\"WaitInputTimeUs\":9979}]},{\"CpuTimeUs\":152,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":112,\"WaitInputTimeUs\":10017}]},{\"CpuTimeUs\":113,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":113,\"WaitInputTimeUs\":10044}]},{\"CpuTimeUs\":112,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449169,\"TaskId\":114,\"WaitInputTimeUs\":10054}]},{\"CpuTimeUs\":128,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":115,\"WaitInputTimeUs\":10066}]},{\"CpuTimeUs\":132,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":116,\"WaitInputTimeUs\":10076}]},{\"CpuTimeUs\":1451,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":117,\"WaitInputTimeUs\":8790}]},{\"CpuTimeUs\":150,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":118,\"WaitInputTimeUs\":8764}]},{\"CpuTimeUs\":141,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":119,\"WaitInputTimeUs\":8773}]},{\"CpuTimeUs\":1358,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":120,\"WaitInputTimeUs\":8777}]},{\"CpuTimeUs\":123,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":121,\"WaitInputTimeUs\":10090}]},{\"CpuTimeUs\":112,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":122,\"WaitInputTimeUs\":10119}]},{\"CpuTimeUs\":121,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":123,\"WaitInputTimeUs\":10156}]},{\"CpuTimeUs\":118,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":124,\"WaitInputTimeUs\":10175}]},{\"CpuTimeUs\":118,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":125,\"WaitInputTimeUs\":10189}]},{\"CpuTimeUs\":124,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":126,\"WaitInputTimeUs\":10220}]},{\"CpuTimeUs\":133,\"Tasks\":[{\"ComputeTimeUs\":12,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":127,\"WaitInputTimeUs\":10264}]},{\"CpuTimeUs\":130,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732179449170,\"TaskId\":128,\"WaitInputTimeUs\":10316}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"selectStore\\/selectTable\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"1db3b7e5-46548d68-edc98d22-951b651\",\"Stats\":{\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\n\022\014\010U\020\300\025\030\211\313\001 \201\001" } } 2024-11-21T08:57:29.333874Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:3302:3884] TxId: 281474976715662. Ctx: { TraceId: 01jd6z1ms8e4ap9z3jy9wngbnn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU2MWQ5ZWYtMTE1YWY4ZmYtMjg3OGZlOTAtZDM1NGM0NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T08:57:29.333887Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:3302:3884] TxId: 281474976715662. Ctx: { TraceId: 01jd6z1ms8e4ap9z3jy9wngbnn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU2MWQ5ZWYtMTE1YWY4ZmYtMjg3OGZlOTAtZDM1NGM0NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.025993s ReadRows: 2 ReadBytes: 2151 ru: 17 rate limiter was not found force flag: 1 2024-11-21T08:57:29.335269Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.h:30;event=destroy;allocation_id=16;stage=FO::ACCESSORS; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.h:30;event=destroy;allocation_id=20;stage=FO::FETCHING; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 18350, MsgBus: 5507 2024-11-21T08:57:30.073940Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654091643719872:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:30.074255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004872/r3tmp/tmphe0YlF/pdisk_1.dat 2024-11-21T08:57:30.111234Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18350, node 1 2024-11-21T08:57:30.121747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:30.121756Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:30.121757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:30.121780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5507 TClient is connected to server localhost:5507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:30.175265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.175290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.176450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:30.197791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:30.205108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:30.215559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.215621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.215669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.215694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.215720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.215741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.215761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.215782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.215806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.215824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.215844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.215866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654091643720530:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.216309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:30.216323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:30.216335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:30.216343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:30.216359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:30.216368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:30.216377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:30.216386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:30.216395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:30.216403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:30.216410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:30.216418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:30.216472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:30.216482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:30.216499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:30.216507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.216518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:30.216530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:30.216550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:30.216558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:30.216568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:30.216576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:30.219429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654091643720528:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.219448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654091643720528:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.219486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654091643720528:2288];tablet_id=7207518622403 ... Chunks; 2024-11-21T08:57:30.226702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:30.226709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:30.226722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:30.226729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:30.226737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:30.226745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:30.226752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:30.226760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:30.226765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:30.226773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:30.226804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:30.226814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:30.226827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:30.226831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.226842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:30.226849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:30.226863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:30.226871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:30.226880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:30.226887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:30.262378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:57:30.440594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654091643720980:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.440597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654091643720969:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.440611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.441290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:30.442779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654091643720983:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:30.808159Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179450498, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '475) '('"_id" '"eaf725fb-18fd86c3-2904a678-925921c0") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '994) '('"_id" '"bc12ea7c-26b5cae8-573ce38-5d5f6586")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1097) '('"_id" '"eb8c8a07-6524ec3b-ca9d9adc-8a1c2c23") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) |91.1%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] Test command err: 2024-11-21T08:56:28.532021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:28.532044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:28.532047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:28.532050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:28.532060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:28.532063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:28.532071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:28.532142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:28.534121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:28.534134Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:28.535296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:28.535320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:28.535334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T08:56:28.535898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:28.535960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:28.536006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:28.536073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:28.536465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:28.536637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:28.536643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:28.536651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:28.536655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:28.536659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:28.536694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.579234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T08:56:28.579301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.579360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T08:56:28.579393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T08:56:28.579400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.579992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:28.580012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T08:56:28.580049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.580057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T08:56:28.580060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:28.580063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:28.580442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.580453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T08:56:28.580456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:28.580713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.580719Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:28.580722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:28.580738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:28.581143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:28.581418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:28.581455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:56:28.581646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:28.581651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:56:28.581654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:29.066948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:29.067003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 662 RawX2: 4294969528 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:29.067016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:29.067102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:29.067112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:29.067140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:29.067149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:29.067801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:29.067815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:29.067861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:29.067866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:669:2237], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:29.067929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.067936Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:29.067947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:29.067952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:29.067964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:29.067969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:29.067973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:29.067977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:29.067988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:29.067994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:29.067998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:29.068469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:29.068492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:29.068497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:29.068502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:29.068508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:29.068525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:29.068529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:525:2138] 2024-11-21T ... 874608Z node 169 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:28.874639Z node 169 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:28.874644Z node 170 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:28.874680Z node 170 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:28.874685Z node 171 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:28.874718Z node 171 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:28.874722Z node 163 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:28.874749Z node 163 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:28.874760Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:28.874787Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:28.874800Z node 164 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:28.874805Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:28.874809Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:28.874817Z node 165 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:28.874821Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:28.874825Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:28.874833Z node 166 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:28.874837Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:28.874841Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:28.874850Z node 167 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:28.874854Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:28.874858Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:28.874866Z node 168 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:28.874871Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:28.874876Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:28.874883Z node 169 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:28.874887Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:28.874891Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:28.874898Z node 170 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:28.874902Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:28.874906Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:28.874915Z node 171 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:28.874993Z node 169 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:28.875002Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:28.875005Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:28.875021Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[169:970:2101] 2024-11-21T08:57:28.875064Z node 170 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:28.875068Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:28.875071Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:28.875077Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[170:972:2101] 2024-11-21T08:57:28.875127Z node 171 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:28.875133Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:28.875137Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:28.875145Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[171:974:2101] 2024-11-21T08:57:28.875192Z node 164 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:28.875198Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:28.875201Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:28.875211Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[164:976:2101] 2024-11-21T08:57:28.875267Z node 165 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:28.875274Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:28.875278Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:28.875286Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[165:978:2101] 2024-11-21T08:57:28.875338Z node 166 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:28.875342Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:28.875345Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:28.875350Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[166:980:2101] 2024-11-21T08:57:28.875390Z node 167 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:28.875395Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:28.875397Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:28.875404Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[167:982:2101] 2024-11-21T08:57:28.875465Z node 168 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:28.875469Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:28.875472Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:28.875479Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[168:984:2101] 2024-11-21T08:57:28.878130Z node 170 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:28.878152Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [170:942:2098] 2024-11-21T08:57:28.878169Z node 171 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:28.878174Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [171:943:2098] 2024-11-21T08:57:28.878193Z node 164 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:28.878197Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [164:936:2098] 2024-11-21T08:57:28.878208Z node 165 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:28.878212Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [165:937:2098] 2024-11-21T08:57:28.878222Z node 166 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:28.878226Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [166:938:2098] 2024-11-21T08:57:28.878237Z node 167 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:28.878243Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [167:939:2098] 2024-11-21T08:57:28.878254Z node 168 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:28.878259Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [168:940:2098] 2024-11-21T08:57:28.878277Z node 169 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:28.878281Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [169:941:2098] 2024-11-21T08:57:28.878895Z node 165 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[165:978:2101] 2024-11-21T08:57:28.878957Z node 170 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[170:972:2101] 2024-11-21T08:57:28.878977Z node 166 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[166:980:2101] 2024-11-21T08:57:28.879045Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:28.879052Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:28.879054Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:28.879061Z node 167 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[167:982:2101] 2024-11-21T08:57:28.879080Z node 164 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[164:976:2101] 2024-11-21T08:57:28.879096Z node 169 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[169:970:2101] 2024-11-21T08:57:28.879122Z node 168 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[168:984:2101] 2024-11-21T08:57:28.879150Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:28.879153Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:28.879155Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:28.879159Z node 171 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[171:974:2101] 2024-11-21T08:57:28.879185Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:28.879189Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:28.879190Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:28.879233Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:28.879236Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:28.879238Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:28.879254Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:28.879257Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:28.879258Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:28.879276Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:28.879279Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:28.879283Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:28.879305Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:28.879307Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:28.879309Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:28.879365Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:28.879370Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:28.879372Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit3_1 Test command err: Trying to start YDB, gRPC: 8529, MsgBus: 30951 2024-11-21T08:57:20.595504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654046231434573:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:20.595519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048cb/r3tmp/tmpmeWLbA/pdisk_1.dat 2024-11-21T08:57:20.653350Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8529, node 1 2024-11-21T08:57:20.663030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:20.663047Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:20.663050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:20.663092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30951 TClient is connected to server localhost:30951 2024-11-21T08:57:20.696144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:20.696173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:20.697255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:20.729900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:20.737764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:20.744050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:20.747223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:20.747275Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T08:57:20.747901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:20.747971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:20.748025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:20.748051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:20.748077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:20.748102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:20.748124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:20.748147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:20.748171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:20.748200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:20.748237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:20.748260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654046231435222:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:20.748692Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T08:57:20.748715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:57:20.748724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:57:20.748741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:20.748765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:20.748777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:20.748785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:57:20.748792Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:57:20.748802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:20.748812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:20.748818Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:57:20.748831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:20.748842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:20.748850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:20.748857Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:57:20.748864Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:57:20.748874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:20.748884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:20.748891Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:57:20.748899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:20.748908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:20.748915Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:57:20.748922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:20.748931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=Res ... NSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:25.774035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T08:57:25.774042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:25.774049Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:25.774058Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:25.774062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:25.774064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:25.774066Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:25.774077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:25.774096Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179445000 at tablet 72075186224037889 2024-11-21T08:57:25.774099Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:25.774102Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:25.774104Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:25.774107Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:25.774109Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:25.774110Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:25.774112Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:25.774116Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654046231435223:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:25.778168Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T08:57:25.778203Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:25.778212Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:25.778230Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:25.778258Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:25.778273Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:25.778285Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:25.778293Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:25.778301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:25.778326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:25.778357Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179445000 at tablet 72075186224037890 2024-11-21T08:57:25.778361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:25.778370Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:25.778374Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:25.778386Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:25.778389Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:25.778391Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:25.778394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:25.778399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654046231435231:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:25.805287Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439654046231435224:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:25.805298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654046231435222:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:25.805310Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654046231435223:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:25.805311Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654046231435231:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:26.305485Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439654046231435224:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:26.305515Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654046231435222:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:26.305519Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654046231435223:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:26.305523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654046231435231:2291];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x124D1530 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:240: Execute_ @ 0x124D2F09 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7FC13B1B5D8F 11. ??:0: ?? @ 0x7FC13B1B5E3F 12. ??:0: ?? @ 0x11815028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] Test command err: Trying to start YDB, gRPC: 1313, MsgBus: 30637 2024-11-21T08:57:30.131725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654092477314737:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:30.131834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004871/r3tmp/tmpCQPyAo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1313, node 1 2024-11-21T08:57:30.185008Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:30.185432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:30.185445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:30.185447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:30.185502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30637 TClient is connected to server localhost:30637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:30.232775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.232802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.233817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:30.262881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:30.270733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:30.278057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.278113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.278144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.278166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.278183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.278198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.278214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.278230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.278245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.278259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.278278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.278288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092477315386:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.280248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.280274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.280310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.280332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.280354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.280375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.280396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.280420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.280444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.280459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.280476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.280498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092477315385:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.280936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:30.280952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:30.280964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:30.280974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:30.280991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:30.281000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:30.281010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:30.281024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:30.281037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:30.281047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:30.281058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888 ... tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:30.285574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:30.285579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:30.285581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:30.285588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:30.285590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:30.285595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:30.285601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:30.285606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:30.285609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:30.285612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:30.285616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:30.285630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:30.285636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:30.285644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:30.285650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.285656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:30.285662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:30.285669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:30.285675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:30.285680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:30.285686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id > 5 2024-11-21T08:57:30.365081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654092477315681:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.365103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.365113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654092477315692:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.365751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:30.367181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654092477315695:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:30.568757Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179450421, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id > 5 JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '697) '('"_id" '"d6d29437-388101d6-c64c4388-5e24e7f2") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $27 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $27) '((Nothing $2) $27))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 '('('"_logical_id" '755) '('"_id" '"968fd3a1-571e68b3-e969447-ac3c405d") '('"_wide_channels" (StructType '('_yql_agg_0 $2))))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $30 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"level") '() $29 (lambda '($31) (TKqpOlapAgg $31 '('('_yql_agg_0 'some '"level")) '())))) (return (FromFlow $30)) ))) $11)) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($32) (block '( (let $33 (Bool 'false)) (let $34 (WideCondense1 (ToFlow $32) (lambda '($36) $36) (lambda '($37 $38) $33) (lambda '($39 $40) (Coalesce $40 $39)))) (let $35 (Condense (NarrowMap (Take $34 (Uint64 '1)) (lambda '($41) (AsStruct '('Some0 $41)))) (Nothing (OptionalType (StructType '('Some0 $2)))) (lambda '($42 $43) $33) (lambda '($44 $45) (Just $44)))) (return (FromFlow (Map $35 (lambda '($46) (AsList (AsStruct '('"column0" (Member $46 'Some0)))))))) ))) '('('"_logical_id" '1289) '('"_id" '"aa78ae35-30580ac5-e9ce025e-7d8754ca")))) (let $15 (DqCnValue (TDqOutput $14 '0))) (let $16 (KqpTxResultBinding $10 '0 '0)) (let $17 '('('"type" '"scan"))) (let $18 (KqpPhysicalTx '($12 $14) '($15) '('($8 $16)) $17)) (let $19 '"%kqp%tx_result_binding_1_0") (let $20 (ListType (StructType '('"column0" $2)))) (let $21 '('('"_logical_id" '1385) '('"_id" '"27bd082b-b1502255-b3af51bb-633f532c") $3)) (let $22 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $21)) (let $23 (DqCnResult (TDqOutput $22 '0) '('"column0"))) (let $24 (KqpTxResultBinding $20 '1 '0)) (let $25 (KqpPhysicalTx '($22) '($23) '('($19 $24)) $17)) (let $26 '($7 $18 $25)) (return (KqpPhysicalQuery $26 '((KqpTxResultBinding $20 '"2" '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_Null [GOOD] Test command err: Trying to start YDB, gRPC: 3397, MsgBus: 19822 2024-11-21T08:57:30.112121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654088553601449:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:30.112285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004870/r3tmp/tmpLEKyUc/pdisk_1.dat 2024-11-21T08:57:30.157977Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3397, node 1 2024-11-21T08:57:30.165911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:30.165927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:30.165928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:30.165965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19822 TClient is connected to server localhost:19822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:30.208163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:30.210879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:30.212948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.212964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.214117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:30.221473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.221533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.221560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.221579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.221590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.221612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.221628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.221641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.221665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.221687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.221703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.221721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088553602107:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.223755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.223773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.223796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.223806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.223817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.223828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.223837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.223853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.223876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.223889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.223904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.223926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088553602106:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.224234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:30.224248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:30.224257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:30.224259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:30.224274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:30.224279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:30.224285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:30.224292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:30.224297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:30.224300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:30.224310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888 ... ; 2024-11-21T08:57:30.229692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:30.229696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:30.229699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:30.229707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:30.229709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:30.229713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:30.229716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:30.229719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:30.229722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:30.229726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:30.229728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:30.229742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:30.229746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:30.229754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:30.229759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.229765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:30.229768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:30.229777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:30.229782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:30.229787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:30.229790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2024-11-21T08:57:30.338351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654088553602379:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.338370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.338411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654088553602406:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.338973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:30.340299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654088553602408:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:30.507543Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179450393, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls` WHERE id > 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int64)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '840) '('"_id" '"5953ea9b-90cdb397-26b74898-31ab1b18") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $28 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $28) '((Nothing $2) $28))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 (OptionalType (DataType 'Int64))) (let $12 '('('"_logical_id" '898) '('"_id" '"2436082e-cdf6be6c-e0945a43-abae805f") '('"_wide_channels" (StructType '('_yql_agg_0 $11))))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"level") '() $30 (lambda '($32) (TKqpOlapAgg $32 '('('_yql_agg_0 'sum '"level")) '())))) (return (FromFlow $31)) ))) $12)) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($33) (block '( (let $34 (Bool 'false)) (let $35 (WideCondense1 (ToFlow $33) (lambda '($37) $37) (lambda '($38 $39) $34) (lambda '($40 $41) (AggrAdd $40 $41)))) (let $36 (Condense (NarrowMap (Take $35 (Uint64 '1)) (lambda '($42) (AsStruct '('Sum0 $42)))) (Nothing (OptionalType (StructType '('Sum0 $11)))) (lambda '($43 $44) $34) (lambda '($45 $46) (Just $45)))) (return (FromFlow (Map $36 (lambda '($47) (AsList (AsStruct '('"column0" (Member $47 'Sum0)))))))) ))) '('('"_logical_id" '1576) '('"_id" '"ba87c983-c23f9271-ed6f994a-33df9b20")))) (let $16 (DqCnValue (TDqOutput $15 '0))) (let $17 (KqpTxResultBinding $10 '0 '0)) (let $18 '('('"type" '"scan"))) (let $19 (KqpPhysicalTx '($13 $15) '($16) '('($8 $17)) $18)) (let $20 '"%kqp%tx_result_binding_1_0") (let $21 (ListType (StructType '('"column0" $11)))) (let $22 '('('"_logical_id" '1672) '('"_id" '"5b1c14e7-9eb29284-996d281b-b2436fb4") $3)) (let $23 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $22)) (let $24 (DqCnResult (TDqOutput $23 '0) '('"column0"))) (let $25 (KqpTxResultBinding $21 '1 '0)) (let $26 (KqpPhysicalTx '($23) '($24) '('($20 $25)) $18)) (let $27 '($7 $19 $26)) (return (KqpPhysicalQuery $27 '((KqpTxResultBinding $21 '"2" '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_2_1_clean Test command err: Trying to start YDB, gRPC: 5378, MsgBus: 61869 2024-11-21T08:57:22.364226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654057016241319:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:22.364243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b4/r3tmp/tmpUXn7H6/pdisk_1.dat 2024-11-21T08:57:22.431070Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5378, node 1 2024-11-21T08:57:22.440252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.440266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.440268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.440308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61869 2024-11-21T08:57:22.468515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:22.468550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:22.469306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:22.504343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:22.511897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:22.518720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:22.521923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:22.521982Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2024-11-21T08:57:22.522632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:22.522680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:22.522722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:22.522746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:22.522773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:22.522792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:22.522809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:22.522831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:22.522857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:22.522878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.522894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:22.522916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654057016241973:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:22.523364Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037891 2024-11-21T08:57:22.523388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:57:22.523398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:57:22.523416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:22.523440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:22.523452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:22.523460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:57:22.523468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:57:22.523484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:22.523491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:22.523494Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:57:22.523507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:22.523518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:22.523523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:22.523530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:57:22.523537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:57:22.523547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:22.523554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:22.523563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:57:22.523572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:22.523581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:22.523589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:57:22.523596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:22.523606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=Res ... ip_reason=no_changes; 2024-11-21T08:57:27.541798Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T08:57:27.541810Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T08:57:27.541820Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:27.541837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:27.541894Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:27.541920Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:27.541929Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:27.541936Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:27.541937Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T08:57:27.541945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:27.541953Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:27.541954Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T08:57:27.541959Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:27.541973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:27.541990Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:27.541999Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:27.542014Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:27.542022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:27.542024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:27.542026Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:27.542028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:27.542041Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179447000 at tablet 72075186224037889 2024-11-21T08:57:27.542048Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:27.542052Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:27.542055Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:27.542061Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:27.542061Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:27.542063Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:27.542073Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:27.542081Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179447000 at tablet 72075186224037890 2024-11-21T08:57:27.542083Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:27.542085Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:27.542085Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:27.542088Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:27.542092Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:27.542094Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654057016241972:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:27.542099Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:27.542101Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:27.542102Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:27.542106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654057016242005:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:27.649418Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654057016241972:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:27.650407Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654057016242005:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:27.650412Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439654057016241973:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:27.650429Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654057016241971:2288];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x124D1530 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:257: Execute_ @ 0x124D34C4 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7F4EBD160D8F 11. ??:0: ?? @ 0x7F4EBD160E3F 12. ??:0: ?? @ 0x11815028 |91.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |91.1%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> KqpOlapAggregations::Aggregation_Count_GroupBy [GOOD] |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 26415, MsgBus: 22752 2024-11-21T08:57:30.495593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654092576491441:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:30.495856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00486c/r3tmp/tmpag2GMO/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26415, node 1 2024-11-21T08:57:30.546748Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:30.551427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:30.551439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:30.551440Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:30.551474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22752 TClient is connected to server localhost:22752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:30.596837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.596863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.597913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:30.626535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:30.634392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:30.642535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.642601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.642633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.642651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.642668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.642680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.642696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.642719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.642738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.642757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.642772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.642789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092576492086:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.644874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.644891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.644910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.644924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.644936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.644952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.644962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.644977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.644986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.645000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.645010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.645024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092576492088:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.646953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092576492091:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.646968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092576492091:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.646988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092576492091:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.646997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092576492091:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.647008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092576492091:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.647018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092576492091:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.647031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092576492091:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.647040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092576492091:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.647056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74396540925764920 ... ess=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:30.650068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:30.650081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:30.650086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:30.650089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:30.650097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:30.650099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:30.650103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:30.650105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:30.650110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:30.650113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:30.650116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:30.650118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:30.650133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:30.650136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:30.650145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:30.650148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.650155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:30.650163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:30.650172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:30.650174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:30.650181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:30.650188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2024-11-21T08:57:30.744073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654092576492382:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.744095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654092576492392:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.744102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:30.744755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:30.746435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654092576492396:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:31.406005Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179450799, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '836) '('"_id" '"72d0812b-a7614fab-f3705eb7-8f7f521") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"5")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Uint64)) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '895) '('"_id" '"dd617b1b-906de287-68d37e10-a8c1e8c5") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'count '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1318) '('"_id" '"a32f775d-7f510e59-dd7941e7-9f39c4f8") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1330) '('"_id" '"ca0ed009-9921216d-8805668b-c5df2c7")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpDecimalColumnShard::TestFilterNulls >> KqpOlapAggregations::Aggregation_ResultCountL_FilterL >> KqpOlapSysView::StatsSysViewBytesColumnActualization >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix >> KqpDecimalColumnShard::TestFilterCompare >> KqpOlapAggregations::Aggregation_Avg_GroupByNull ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_1 Test command err: Trying to start YDB, gRPC: 7858, MsgBus: 28356 2024-11-21T08:57:25.196069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654067504295275:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.196309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004891/r3tmp/tmpRKoDdr/pdisk_1.dat 2024-11-21T08:57:25.239708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7858, node 1 2024-11-21T08:57:25.259880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.259891Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.259892Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.259923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28356 TClient is connected to server localhost:28356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:25.297437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.297465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.298525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.323903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.326280Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:25.336177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.344348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:25.347294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:25.347339Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T08:57:25.347956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.348021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.348085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.348108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.348131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.348153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.348173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.348195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.348241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.348259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.348275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.348294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067504295923:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.348722Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T08:57:25.348749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:57:25.348754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:57:25.348774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:25.348797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.348811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.348814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:57:25.348821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:57:25.348833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.348842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.348844Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:57:25.348856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:25.348866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.348875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.348878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:57:25.348887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:57:25.348896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.348904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.348910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:57:25.348919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.348923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.348924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:57:25.348930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.348934Z node 1 :TX_COLUMNSHARD WARN: tab ... ip_reason=no_changes; 2024-11-21T08:57:30.369185Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T08:57:30.369198Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T08:57:30.369208Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:30.369211Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:30.369222Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:30.369230Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:30.369238Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:30.369242Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:30.369244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:30.369247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:30.369269Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:30.369282Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179450000 at tablet 72075186224037889 2024-11-21T08:57:30.369288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:30.369290Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:30.369292Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:30.369294Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:30.369296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:30.369297Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:30.369298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:30.369302Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654067504295933:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:30.369617Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T08:57:30.369626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T08:57:30.369632Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:30.369635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:30.369639Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:30.369644Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:30.369646Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:30.369648Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:30.369650Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:30.369653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:30.369660Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:30.369667Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179450000 at tablet 72075186224037890 2024-11-21T08:57:30.369672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:30.369674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:30.369675Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:30.369677Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:30.369678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:30.369678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:30.369679Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:30.369682Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654067504295929:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:30.390145Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654067504295929:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:30.390146Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439654067504295936:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:30.391651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654067504295923:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:30.391654Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654067504295933:2290];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x124D1530 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:247: Execute_ @ 0x124D30D4 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7EFD7A851D8F 11. ??:0: ?? @ 0x7EFD7A851E3F 12. ??:0: ?? @ 0x11815028 >> KqpOlap::SimpleRangeOlap >> KqpDecimalColumnShard::TestFilterEqual >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> KqpOlap::ExtractRangesReverse >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet [GOOD] >> KqpOlap::SimpleRangeOlap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet [GOOD] Test command err: Trying to start YDB, gRPC: 20914, MsgBus: 4967 2024-11-21T08:57:32.341120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654100081612591:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.341136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004867/r3tmp/tmpgn7IoC/pdisk_1.dat 2024-11-21T08:57:32.394687Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20914, node 1 2024-11-21T08:57:32.406055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.406075Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.406077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.406115Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4967 TClient is connected to server localhost:4967 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:32.442634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.442662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.443725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.472704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:32.474652Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:32.483651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:32.493953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.494027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.494080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.494107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.494134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.494154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.494172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.494190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.494210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.494225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.494247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.494263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654100081613242:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.498135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.498165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.498203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.498223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.498249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.498268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.498286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.498304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.498324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.498342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.498365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.498382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100081613243:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.498910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.498920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.498936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.498940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.498956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.498963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.498973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.498981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.498990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.498994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_swi ... ncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.507382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.507385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.507399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.507402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.507412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.507414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.508506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.508516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.508526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.508530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.508545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.508548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.508556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.508562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.508570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.508573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.508579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.508582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.508618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.508623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.508639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.508642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.508653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.508657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.508671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.508675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.508684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.508687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2024-11-21T08:57:32.630352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654100081613540:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.630377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654100081613512:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.630444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.631180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:32.633026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654100081613550:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:32.748167Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179452689, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 '('"id" '"resource_id")) (let $2 (DqPhyStage '() (lambda '() (block '( (let $6 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $7 '('"id" '"level" '"resource_id")) (let $8 (KqpWideReadOlapTableRanges $6 (Void) $7 '() '() (lambda '($9) (block '( (let $10 '('eq '"level" (Int32 '"5"))) (let $11 '('?? $10 (Bool 'false))) (return (TKqpOlapExtractMembers (KqpOlapFilter $9 $11) $1)) ))))) (return (FromFlow (NarrowMap $8 (lambda '($12 $13) (AsStruct '('"id" $12) '('"resource_id" $13)))))) ))) '('('"_logical_id" '551) '('"_id" '"e09a0ea0-63bb3097-8120027b-1945aa1f")))) (let $3 (DqCnUnionAll (TDqOutput $2 '"0"))) (let $4 (DqPhyStage '($3) (lambda '($14) $14) '('('"_logical_id" '730) '('"_id" '"373636ab-e57466c9-6cb7f935-50aa719b")))) (let $5 (DqCnResult (TDqOutput $4 '"0") $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($2 $4) '($5) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) >> KqpOlapSysView::StatsSysViewTable >> KqpOlap::ExtractRangesReverse [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 11935, MsgBus: 30832 2024-11-21T08:57:32.473923Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654098556348874:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.474165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00485d/r3tmp/tmpNo63p1/pdisk_1.dat 2024-11-21T08:57:32.527241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11935, node 1 2024-11-21T08:57:32.540248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.540265Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.540267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.540311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30832 2024-11-21T08:57:32.574241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.574274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.575156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.587053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:32.598626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:32.610676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.610777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.610839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.610867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.610891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.610914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.610941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.610960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.610988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.611011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.611034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.611057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654098556349513:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.611602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.611619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.611632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.611637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.611652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.611656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.611666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.611677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.611686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.611694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.611700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.611709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.611780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.611792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.611809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.611818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.611829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.611834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.611856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.611865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.611877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.611885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.615142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654098556349511:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.615177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654098556349511:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.615220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654098556349511:2288];tablet_id=7207518622 ... 32.622953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654098556349517:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.623482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.623496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.623507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.623515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.623530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.623533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.623542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.623547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.623556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.623564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.623570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.623578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.623633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.623643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.623659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.623662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.623672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.623680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.623696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.623704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.623716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.623723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.623787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.623795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.623801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.623804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.623817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.623825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.623833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.623840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.623847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.623850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.623855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.623858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.623883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.623890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.623902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.623910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.623923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.623930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.623942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.623950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.623957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.623960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.657593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:32.741444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654098556349823:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.741462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654098556349834:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.741468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.742145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:32.743718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654098556349837:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T08:57:32.884684Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179452794, txId: 18446744073709551615] shutting down >> KqpDecimalColumnShard::TestFilterNulls [GOOD] >> BsControllerConfig::MoveGroups [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ExtractRangesReverse [GOOD] Test command err: Trying to start YDB, gRPC: 30722, MsgBus: 64590 2024-11-21T08:57:32.911677Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654097938307725:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.911733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00485c/r3tmp/tmpf6ABYv/pdisk_1.dat 2024-11-21T08:57:32.954876Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30722, node 1 2024-11-21T08:57:32.967177Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.967190Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.967196Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.967232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64590 TClient is connected to server localhost:64590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:33.009410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:33.011730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:33.011748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:33.012901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:33.020765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:33.031567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:33.031635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:33.031690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:33.031714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:33.031735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:33.031761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:33.031783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:33.031805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:33.031829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:33.031851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.031873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:33.031898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654102233275667:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:33.032372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:33.032387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:33.032398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:33.032403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:33.032417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:33.032426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:33.032435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:33.032445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:33.032459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:33.032468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:33.032479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:33.032487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:33.032537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:33.032543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:33.032558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:33.032562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.032573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:33.032576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:33.032591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:33.032598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:33.032616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:33.032619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:33.035609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654102233275669:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:33.035620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654102233275669:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:33.035653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654102233275669:2289];tablet_id=7207518622 ... :42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:33.043271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:33.043275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:33.043298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:33.043301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:33.043315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:33.043319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.043331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:33.043335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:33.043348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:33.043352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:33.043360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:33.043363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:33.043418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:33.043421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:33.043428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:33.043431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:33.043444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:33.043447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:33.043454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:33.043458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:33.043469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:33.043472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:33.043477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:33.043481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:33.043501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:33.043504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:33.043516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:33.043519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.043528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:33.043531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:33.043544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:33.043547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:33.043556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:33.043558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:33.078714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; ==================================== QUERY: SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp` < CAST(1000100 AS Timestamp) AND `timestamp` > CAST(1000095 AS Timestamp)) OR (`timestamp` < CAST(1000300 AS Timestamp) AND `timestamp` >= CAST(1000295 AS Timestamp)) OR (`timestamp` <= CAST(1000400 AS Timestamp) AND `timestamp` > CAST(1000395 AS Timestamp)) OR (`timestamp` <= CAST(1000500 AS Timestamp) AND `timestamp` >= CAST(1000495 AS Timestamp)) OR (`timestamp` <= CAST(1000505 AS Timestamp) AND `timestamp` >= CAST(1000499 AS Timestamp)) OR (`timestamp` < CAST(1000510 AS Timestamp) AND `timestamp` >= CAST(1000505 AS Timestamp)) OR (`timestamp` <= CAST(1001000 AS Timestamp) AND `timestamp` >= CAST(1000999 AS Timestamp)) OR (`timestamp` > CAST(1002000 AS Timestamp)) ORDER BY `timestamp` DESC LIMIT 1000; RESULT: 2024-11-21T08:57:33.179934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654102233275992:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.179949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654102233276003:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.179953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.180556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:33.182034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654102233276006:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T08:57:33.343397Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453235, txId: 18446744073709551615] shutting down timestamp: 1970-01-01T00:00:01.001000Z timestamp: 1970-01-01T00:00:01.000999Z timestamp: 1970-01-01T00:00:01.000509Z timestamp: 1970-01-01T00:00:01.000508Z timestamp: 1970-01-01T00:00:01.000507Z timestamp: 1970-01-01T00:00:01.000506Z timestamp: 1970-01-01T00:00:01.000505Z timestamp: 1970-01-01T00:00:01.000504Z timestamp: 1970-01-01T00:00:01.000503Z timestamp: 1970-01-01T00:00:01.000502Z timestamp: 1970-01-01T00:00:01.000501Z timestamp: 1970-01-01T00:00:01.000500Z timestamp: 1970-01-01T00:00:01.000499Z timestamp: 1970-01-01T00:00:01.000498Z timestamp: 1970-01-01T00:00:01.000497Z timestamp: 1970-01-01T00:00:01.000496Z timestamp: 1970-01-01T00:00:01.000495Z timestamp: 1970-01-01T00:00:01.000400Z timestamp: 1970-01-01T00:00:01.000399Z timestamp: 1970-01-01T00:00:01.000398Z timestamp: 1970-01-01T00:00:01.000397Z timestamp: 1970-01-01T00:00:01.000396Z timestamp: 1970-01-01T00:00:01.000299Z timestamp: 1970-01-01T00:00:01.000298Z timestamp: 1970-01-01T00:00:01.000297Z timestamp: 1970-01-01T00:00:01.000296Z timestamp: 1970-01-01T00:00:01.000295Z timestamp: 1970-01-01T00:00:01.000099Z timestamp: 1970-01-01T00:00:01.000098Z timestamp: 1970-01-01T00:00:01.000097Z timestamp: 1970-01-01T00:00:01.000096Z >> KqpDecimalColumnShard::TestFilterEqual [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestFilterNulls [GOOD] Test command err: Trying to start YDB, gRPC: 4989, MsgBus: 15493 2024-11-21T08:57:32.268927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654100941509269:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.269144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004868/r3tmp/tmpy1TCin/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4989, node 1 2024-11-21T08:57:32.314404Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:32.314816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.314829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.314830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.314860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15493 TClient is connected to server localhost:15493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.370212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.370248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.371370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:32.391628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:32.399469Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:32.524796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654100941509886:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.524820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.555208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:32.564373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.564423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.564491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.564511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.564529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.564548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.564565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.564582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.564599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.564617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.564639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.564656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100941509962:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.568424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.568448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.568473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.568485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.568508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.568514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.568524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.568536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.568558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.568574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.568578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.568582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.568651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.568660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.568674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.568680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.568697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.568714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.568730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.568737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.568746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57 ... ;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.920290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654100901747611:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.920305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654100901747611:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.920689Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.920704Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.920713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.920716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.920725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.920728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.920737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.920743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.920749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.920752Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.920757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.920759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.920795Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.920805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.920815Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.920817Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.920825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.920828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.920838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.920846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.920853Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.920856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=304;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=304;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=304;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=304;columns=3; 2024-11-21T08:57:33.013418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654105236477424:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.013445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.013446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654105236477429:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.014034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:33.015247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654105236477431:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:33.124428Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453067, txId: 18446744073709551615] shutting down 2024-11-21T08:57:33.153397Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453151, txId: 18446744073709551615] shutting down 2024-11-21T08:57:33.154481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654105196715019:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.154510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.154515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654105196715024:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.155033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:33.156164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654105196715026:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:33.250963Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453207, txId: 18446744073709551615] shutting down 2024-11-21T08:57:33.278134Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453284, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2967:2106] recipient: [1:2845:2115] 2024-11-21T08:57:17.220538Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:17.221288Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:17.221731Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:17.221821Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:17.221957Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.221964Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.222007Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:17.222921Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:17.222947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:17.222983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:17.222999Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.223013Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.223021Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2992:2106] recipient: [1:60:2107] 2024-11-21T08:57:17.233732Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:17.233812Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.244174Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.244248Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.244267Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.244280Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.244311Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.244320Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.244327Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.244335Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.254653Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.254709Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:17.254916Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:17.254923Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:17.254948Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:17.257177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 100 PDiskFilter { Property { Type: ROT } } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "second storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } } 2024-11-21T08:57:17.257383Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 50:1000 Path# /dev/disk2 2024-11-21T08:57:17.257387Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 50:1001 Path# /dev/disk1 2024-11-21T08:57:17.257390Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1000 Path# /dev/disk3 2024-11-21T08:57:17.257394Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1001 Path# /dev/disk2 2024-11-21T08:57:17.257396Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1002 Path# /dev/disk1 2024-11-21T08:57:17.257399Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 48:1000 Path# /dev/disk2 2024-11-21T08:57:17.257402Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 48:1001 Path# /dev/disk1 2024-11-21T08:57:17.257404Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 47:1000 Path# /dev/disk2 2024-11-21T08:57:17.257407Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 47:1001 Path# /dev/disk1 2024-11-21T08:57:17.257410Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 46:1000 Path# /dev/disk2 2024-11-21T08:57:17.257412Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 46:1001 Path# /dev/disk1 2024-11-21T08:57:17.257414Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 45:1000 Path# /dev/disk2 2024-11-21T08:57:17.257416Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 45:1001 Path# /dev/disk1 2024-11-21T08:57:17.257419Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 44:1000 Path# /dev/disk2 2024-11-21T08:57:17.257426Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 44:1001 Path# /dev/disk1 2024-11-21T08:57:17.257428Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 43:1000 Path# /dev/disk2 2024-11-21T08:57:17.257431Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 43:1001 Path# /dev/disk1 2024-11-21T08:57:17.257433Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 42:1000 Path# /dev/disk2 2024-11-21T08:57:17.257435Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 42:1001 Path# /dev/disk1 2024-11-21T08:57:17.257437Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 41:1000 Path# /dev/disk2 2024-11-21T08:57:17.257440Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 41:1001 Path# /dev/disk1 2024-11-21T08:57:17.257442Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 40:1000 Path# /dev/disk2 2024-11-21T08:57:17.257444Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 40:1001 Path# /dev/disk1 2024-11-21T08:57:17.257446Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisk ... disks.cpp:355} Create new pdisk PDiskId# 72:1001 Path# /dev/disk1 2024-11-21T08:57:25.176094Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1000 Path# /dev/disk1 2024-11-21T08:57:25.176098Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 60:1000 Path# /dev/disk2 2024-11-21T08:57:25.176103Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1000 Path# /dev/disk3 2024-11-21T08:57:25.176107Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1001 Path# /dev/disk1 2024-11-21T08:57:25.176111Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1000 Path# /dev/disk1 2024-11-21T08:57:25.176115Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1000 Path# /dev/disk1 2024-11-21T08:57:25.176120Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1002 Path# /dev/disk2 2024-11-21T08:57:25.176124Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1000 Path# /dev/disk3 2024-11-21T08:57:25.176130Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1001 Path# /dev/disk3 2024-11-21T08:57:25.176134Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1001 Path# /dev/disk2 2024-11-21T08:57:25.176138Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1002 Path# /dev/disk1 2024-11-21T08:57:25.176141Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1000 Path# /dev/disk2 2024-11-21T08:57:25.176146Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1001 Path# /dev/disk3 2024-11-21T08:57:25.176151Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1000 Path# /dev/disk2 2024-11-21T08:57:25.176155Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1002 Path# /dev/disk2 2024-11-21T08:57:25.176158Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 62:1001 Path# /dev/disk3 2024-11-21T08:57:25.176162Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1000 Path# /dev/disk3 2024-11-21T08:57:25.176165Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1001 Path# /dev/disk1 2024-11-21T08:57:25.176168Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1002 Path# /dev/disk2 2024-11-21T08:57:25.176171Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1000 Path# /dev/disk3 2024-11-21T08:57:25.176174Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1001 Path# /dev/disk3 2024-11-21T08:57:25.176177Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1001 Path# /dev/disk2 2024-11-21T08:57:25.176180Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1002 Path# /dev/disk1 2024-11-21T08:57:25.176183Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1002 Path# /dev/disk2 2024-11-21T08:57:25.176187Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 63:1001 Path# /dev/disk3 2024-11-21T08:57:25.176190Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 62:1002 Path# /dev/disk1 2024-11-21T08:57:25.176193Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 63:1002 Path# /dev/disk1 2024-11-21T08:57:25.176195Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1001 Path# /dev/disk3 2024-11-21T08:57:25.176199Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1002 Path# /dev/disk2 2024-11-21T08:57:25.176201Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 57:1001 Path# /dev/disk1 2024-11-21T08:57:25.176241Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1002 Path# /dev/disk2 2024-11-21T08:57:25.176248Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1001 Path# /dev/disk3 2024-11-21T08:57:25.176252Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 60:1001 Path# /dev/disk1 2024-11-21T08:57:25.176257Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1002 Path# /dev/disk1 2024-11-21T08:57:25.176262Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1002 Path# /dev/disk2 2024-11-21T08:57:25.176267Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 60:1002 Path# /dev/disk3 2024-11-21T08:57:25.176273Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2024-11-21T08:57:25.176278Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1002 Path# /dev/disk1 2024-11-21T08:57:25.176282Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1002 Path# /dev/disk2 2024-11-21T08:57:25.176287Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 59:1001 Path# /dev/disk3 2024-11-21T08:57:25.176291Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 58:1001 Path# /dev/disk1 2024-11-21T08:57:25.176296Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 59:1002 Path# /dev/disk1 2024-11-21T08:57:25.176300Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1002 Path# /dev/disk2 2024-11-21T08:57:25.176306Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 58:1002 Path# /dev/disk3 2024-11-21T08:57:25.176312Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 53:1001 Path# /dev/disk1 2024-11-21T08:57:25.176318Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 56:1001 Path# /dev/disk1 2024-11-21T08:57:25.176322Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1002 Path# /dev/disk2 2024-11-21T08:57:25.176327Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 56:1002 Path# /dev/disk3 2024-11-21T08:57:25.176332Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1001 Path# /dev/disk1 2024-11-21T08:57:25.176337Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1001 Path# /dev/disk3 2024-11-21T08:57:25.176341Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1000 Path# /dev/disk1 2024-11-21T08:57:25.176346Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1002 Path# /dev/disk1 2024-11-21T08:57:25.176351Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1001 Path# /dev/disk3 2024-11-21T08:57:25.176355Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1000 Path# /dev/disk1 2024-11-21T08:57:25.176360Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1001 Path# /dev/disk2 2024-11-21T08:57:25.176365Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1002 Path# /dev/disk2 2024-11-21T08:57:25.176369Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1001 Path# /dev/disk3 2024-11-21T08:57:25.176374Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1002 Path# /dev/disk1 2024-11-21T08:57:25.176379Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1002 Path# /dev/disk2 2024-11-21T08:57:25.176384Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1002 Path# /dev/disk2 2024-11-21T08:57:25.176388Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 57:1002 Path# /dev/disk3 2024-11-21T08:57:25.176393Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1000 Path# /dev/disk2 2024-11-21T08:57:25.176397Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 53:1002 Path# /dev/disk3 2024-11-21T08:57:25.176402Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 52:1001 Path# /dev/disk1 2024-11-21T08:57:25.176408Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 52:1002 Path# /dev/disk3 2024-11-21T08:57:25.176412Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1002 Path# /dev/disk1 2024-11-21T08:57:25.176416Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1002 Path# /dev/disk3 2024-11-21T08:57:25.176419Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1001 Path# /dev/disk3 2024-11-21T08:57:25.176422Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1002 Path# /dev/disk2 2024-11-21T08:57:25.176425Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1002 Path# /dev/disk3 2024-11-21T08:57:25.176428Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1002 Path# /dev/disk1 2024-11-21T08:57:25.224871Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T08:57:25.241481Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2024-11-21T08:57:25.255486Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T08:57:25.277083Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2024-11-21T08:57:25.291877Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T08:57:25.310261Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2024-11-21T08:57:25.324807Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> KqpOlap::PKDescScan >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain |91.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |91.1%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestFilterEqual [GOOD] Test command err: Trying to start YDB, gRPC: 28858, MsgBus: 25995 2024-11-21T08:57:32.582237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654100280110264:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.582401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004860/r3tmp/tmp5Upzkm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28858, node 1 2024-11-21T08:57:32.649771Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:32.653315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.653329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.653330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.653362Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25995 2024-11-21T08:57:32.684246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.684276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.685318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.696709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:32.839028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654100280110875:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.839068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.859276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:32.864830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.864867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.864906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.864923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.864941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.864953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.864969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.864987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.865005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.865022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.865037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.865049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100280110951:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.865454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.865479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.865503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.865508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.865526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.865531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.865542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.865554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.865564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.865573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.865580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.865589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.865652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.865663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.865681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.865700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.865717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.865731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; 2024-11-21T08:57:32.865748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.865752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.865763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.865767Z node 1 :TX_COLUMNSHARD WARN: tablet_i ... Id; 2024-11-21T08:57:33.204484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654102023296768:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:33.204499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654102023296768:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:33.204514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654102023296768:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:33.204530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654102023296768:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:33.204545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654102023296768:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.204560Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654102023296768:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:33.204576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654102023296768:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:33.204874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:33.204887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:33.204896Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:33.204899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:33.204911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:33.204915Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:33.204921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:33.204926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:33.204936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:33.204940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:33.204945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:33.204951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:33.204986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:33.204993Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:33.205004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:33.205010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.205017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:33.205023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:33.205034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:33.205036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:33.205046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:33.205048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2024-11-21T08:57:33.278172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654104575078403:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.278199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.278277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654104575078408:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.279131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:33.280741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654104575078410:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:33.404525Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453333, txId: 18446744073709551615] shutting down 2024-11-21T08:57:33.437737Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453438, txId: 18446744073709551615] shutting down 2024-11-21T08:57:33.439081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654102023296870:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.439099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654102023296875:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.439101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.439795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:33.441344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654102023296877:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:33.581183Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453494, txId: 18446744073709551615] shutting down 2024-11-21T08:57:33.610785Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453613, txId: 18446744073709551615] shutting down >> KqpOlap::PredicateDoNotPushdown |91.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |91.1%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut >> KqpDecimalColumnShard::TestFilterCompare [FAIL] >> KqpOlapAggregations::BlocksRead >> KqpOlap::PKDescScan [GOOD] >> KqpOlapAggregations::Aggregation_Count_NullMix >> KqpOlapAggregations::BlockGenericSimpleAggregation >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL [GOOD] >> KqpOlap::PredicateDoNotPushdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PKDescScan [GOOD] Test command err: Trying to start YDB, gRPC: 4732, MsgBus: 24891 2024-11-21T08:57:34.268897Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654107915237528:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:34.269067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004858/r3tmp/tmpY3bzQI/pdisk_1.dat 2024-11-21T08:57:34.316799Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4732, node 1 2024-11-21T08:57:34.324445Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:34.324461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:34.324464Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:34.324507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24891 TClient is connected to server localhost:24891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:34.367059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:34.370197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:34.370224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:34.371337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:34.379366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:34.388659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:34.388726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:34.388773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:34.388795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:34.388816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:34.388837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:34.388858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:34.388885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:34.388910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:34.388932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:34.388955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:34.388977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654107915238192:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:34.389418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:34.389431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:34.389440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:34.389443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:34.389453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:34.389459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:34.389464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:34.389468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:34.389477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:34.389480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:34.389484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:34.389507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:34.389565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:34.389573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:34.389582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:34.389588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:34.389606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:34.389613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:34.389623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:34.389629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:34.389636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:34.389642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:34.392097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654107915238190:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:34.392124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654107915238190:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:34.392167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654107915238190:2288];tablet_id=720751862240 ... e, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=145904;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=145904;columns=5; 2024-11-21T08:57:34.519367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654107915238523:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:34.519387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654107915238500:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:34.519402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:34.520093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:34.521537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654107915238529:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","ReadLimit":"4","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["timestamp"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"limit":"4","type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","ReadLimit":"4","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","ReadLimit":"4","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["timestamp"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"limit":"4","type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","ReadLimit":"4","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ( (let $1 '('"timestamp")) (let $2 (Uint64 '4)) (let $3 (StructType '('"timestamp" (DataType 'Timestamp)))) (let $4 '('('"_logical_id" '378) '('"_id" '"91602ffe-e5584135-9b6ddaf3-f538ec3") '('"_wide_channels" $3))) (let $5 (DqPhyStage '() (lambda '() (block '( (let $9 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $10 (KqpWideReadOlapTableRanges $9 (Void) $1 '('('"ItemsLimit" $2)) '() (lambda '($11) $11))) (return (FromFlow (WideTopSort $10 $2 '('('0 (Bool 'true)))))) ))) $4)) (let $6 (DqCnMerge (TDqOutput $5 '0) '('('0 '"Asc")))) (let $7 (DqPhyStage '($6) (lambda '($12) (FromFlow (NarrowMap (Take (ToFlow $12) $2) (lambda '($13) (AsStruct '('"timestamp" $13)))))) '('('"_logical_id" '391) '('"_id" '"26ef1871-6e8eec66-5c491ef9-e7392023")))) (let $8 (DqCnResult (TDqOutput $7 '0) $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($5 $7) '($8) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType $3) '0 '0)) '('('"type" '"scan_query")))) ) ============================== {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Reverse":true,"Name":"TableFullScan","ReadLimit":"4","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Desc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"reverse":true,"columns":["timestamp"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"limit":"4","type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Reverse":true,"Name":"TableFullScan","ReadLimit":"4","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Reverse":true,"Name":"TableFullScan","ReadLimit":"4","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["timestamp (Desc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"reverse":true,"columns":["timestamp"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"limit":"4","type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Reverse":true,"Name":"TableFullScan","ReadLimit":"4","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["timestamp"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.timestamp"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ( (let $1 '('"timestamp")) (let $2 (Uint64 '4)) (let $3 (StructType '('"timestamp" (DataType 'Timestamp)))) (let $4 '('('"_logical_id" '378) '('"_id" '"38197cf7-2f9d3d68-117ed8b1-e43a506b") '('"_wide_channels" $3))) (let $5 (DqPhyStage '() (lambda '() (block '( (let $9 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $10 (KqpWideReadOlapTableRanges $9 (Void) $1 '('('"ItemsLimit" $2) '('"Reverse")) '() (lambda '($11) $11))) (return (FromFlow (WideTopSort $10 $2 '('('0 (Bool 'false)))))) ))) $4)) (let $6 (DqCnMerge (TDqOutput $5 '0) '('('0 '"Desc")))) (let $7 (DqPhyStage '($6) (lambda '($12) (FromFlow (NarrowMap (Take (ToFlow $12) $2) (lambda '($13) (AsStruct '('"timestamp" $13)))))) '('('"_logical_id" '391) '('"_id" '"684a8d07-d2e65f74-7b97589a-ccbd74c4")))) (let $8 (DqCnResult (TDqOutput $7 '0) $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($5 $7) '($8) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType $3) '0 '0)) '('('"type" '"scan_query")))) ) 2024-11-21T08:57:34.677936Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179454572, txId: 18446744073709551615] shutting down >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean_with_restarts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL [GOOD] Test command err: Trying to start YDB, gRPC: 1133, MsgBus: 6164 2024-11-21T08:57:22.497269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654056602598128:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:22.497290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b3/r3tmp/tmpDa9bVB/pdisk_1.dat 2024-11-21T08:57:22.558237Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1133, node 1 2024-11-21T08:57:22.571996Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:22.572007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:22.572009Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:22.572046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6164 TClient is connected to server localhost:6164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:22.626678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:22.626708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:22.628048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:57:22.645162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.648905Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:22.663882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:22.678088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:22.678176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:22.678236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:22.678267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:22.678292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:22.678315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:22.678347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:22.678373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:22.678398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:22.678423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.678447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:22.678476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654056602598777:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:22.682281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:22.682314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:22.682357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:22.682381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:22.682405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:22.682429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:22.682451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:22.682474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:22.682498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:22.682520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:22.682548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:22.682569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654056602598789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:22.685962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654056602598788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:22.685991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654056602598788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:22.686032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654056602598788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:22.686046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654056602598788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:22.686062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654056602598788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:22.686084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654056602598788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:22.686097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654056602598788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:22.686111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654056602598788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:33.374948Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:33.509642Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:33.509673Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:33.571543Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:33.571565Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:33.633135Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:33.633156Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:33.694811Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:33.694834Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:33.756541Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:33.777257Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:33.828585Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:33.828620Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:33.890574Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:33.890604Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:33.952399Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:33.952415Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:34.014047Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:34.014094Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:34.075929Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:34.075951Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:34.097478Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:34.223785Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:34.223817Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:34.316781Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:34.316805Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:34.398952Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:34.398974Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:34.481075Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:34.481100Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:34.563172Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:34.563193Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjU0ODkxNDMtOGUzNGFmOTktZjRmODg2YTgtOGIxZDk5OQ==. TraceId : 01jd6z1jhg755r32z7s1sgy7p3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:34.614539Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:34.645613Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapAggregations::BlocksRead [GOOD] >> KqpOlapAggregations::Aggregation_Avg_GroupByNull [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicateDoNotPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 30019, MsgBus: 32735 2024-11-21T08:57:34.641510Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654106089571409:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:34.641688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004856/r3tmp/tmp6FH1fe/pdisk_1.dat 2024-11-21T08:57:34.688674Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30019, node 1 2024-11-21T08:57:34.696305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:34.696322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:34.696323Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:34.696353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32735 TClient is connected to server localhost:32735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:34.741761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:34.742905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:34.742924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:34.744026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:34.754627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:34.763663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:34.763717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:34.763742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:34.763758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:34.763777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:34.763795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:34.763813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:34.763826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:34.763841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:34.763852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:34.763867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:34.763887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654106089572069:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:34.764249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:34.764261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:34.764269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:34.764274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:34.764285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:34.764287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:34.764293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:34.764301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:34.764306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:34.764308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:34.764312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:34.764319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:34.764362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:34.764370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:34.764380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:34.764384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:34.764391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:34.764398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:34.764407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:34.764413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:34.764420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:34.764423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:34.766183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654106089572070:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:34.766197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654106089572070:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:34.766217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654106089572070:2289];tablet_id=7207518622 ... tion=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:34.770821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:34.770823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:34.770953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:34.770960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:34.770965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:34.770968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:34.770978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:34.770983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:34.770987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:34.770989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:34.770994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:34.770998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:34.771001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:34.771003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:34.771020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:34.771026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:34.771035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:34.771040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:34.771046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:34.771053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:34.771062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:34.771068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:34.771075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:34.771077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:34.771138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:34.771147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:34.771152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:34.771154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:34.771162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:34.771165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:34.771170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:34.771176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:34.771180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:34.771183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:34.771186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:34.771188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:34.771201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:34.771207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:34.771216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:34.771221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:34.771227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:34.771229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:34.771237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:34.771239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:34.771245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:34.771247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:34.811748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5800;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; 2024-11-21T08:57:34.870881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654106089572394:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:34.870897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654106089572402:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:34.870901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:34.871475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:34.872981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654106089572408:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } >> KqpOlap::ScanFailedSnapshotTooOld [GOOD] >> KqpOlapAggregations::Aggregation_Count_NullMix [GOOD] >> KqpOlapAggregations::BlockGenericSimpleAggregation [GOOD] >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlocksRead [GOOD] Test command err: Trying to start YDB, gRPC: 22957, MsgBus: 3237 2024-11-21T08:57:34.878167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654108810929225:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:34.878467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004854/r3tmp/tmpDj993i/pdisk_1.dat 2024-11-21T08:57:34.924046Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22957, node 1 2024-11-21T08:57:34.928832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:34.928841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:34.928843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:34.928867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3237 TClient is connected to server localhost:3237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:34.971791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:34.979628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:34.979647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:34.980720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:34.984081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:34.993044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:34.993090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:34.993133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:34.993155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:34.993177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:34.993203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:34.993224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:34.993245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:34.993269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:34.993290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:34.993314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:34.993337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654108810929862:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:34.993781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:34.993795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:34.993806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:34.993812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:34.993826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:34.993836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:34.993845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:34.993857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:34.993866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:34.993870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:34.993881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:34.993885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:34.993937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:34.993948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:34.993963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:34.993972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:34.993989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:34.993997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:34.994012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:34.994020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:34.994030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:34.994037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:34.996934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654108810929863:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:34.996954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654108810929863:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:34.996987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654108810929863:2289];tablet_id=7207518622403 ... 5.004896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:35.004904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.004914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:35.004922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:35.004936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:35.004944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:35.004954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:35.004962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:35.005167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:35.005177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:35.005192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:35.005196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:35.005211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:35.005219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:35.005227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:35.005235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:35.005242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:35.005250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:35.005256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:35.005264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:35.005292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:35.005302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:35.005317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:35.005325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.005336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:35.005343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:35.005358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:35.005366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:35.005374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:35.005382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; PRAGMA Kikimr.OptEnableOlapPushdown = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2024-11-21T08:57:35.129610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654113105897462:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.129687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654113105897440:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.129697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.130294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:35.131717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654113105897469:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:35.261302Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179455188, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; PRAGMA Kikimr.OptEnableOlapPushdown = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DqPhyStage '() (lambda '() (block '( (let $5 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $6 '('"id" '"level" '"resource_id")) (let $7 (KqpBlockReadOlapTableRanges $5 (Void) $6 '() '() (lambda '($9) $9))) (let $8 (lambda '($10 $11 $12 $13) (block '( (let $14 (BlockFunc '"Equals" (BlockType (OptionalType (DataType 'Bool))) $11 (AsScalar (Int32 '"5")))) (return $10 $12 (BlockCoalesce $14 (AsScalar (Bool 'false))) $13) )))) (return (FromFlow (NarrowMap (WideFromBlocks (BlockCompress (WideMap $7 $8) '2)) (lambda '($15 $16) (AsStruct '('"id" $15) '('"resource_id" $16)))))) ))) '('('"_logical_id" '564) '('"_id" '"5376eab7-e14c0545-47c35b1e-8fff45ff")))) (let $2 (DqCnUnionAll (TDqOutput $1 '"0"))) (let $3 (DqPhyStage '($2) (lambda '($17) $17) '('('"_logical_id" '613) '('"_id" '"97d2ceb3-7342e3bc-8bd62ca-a5abedcf")))) (let $4 (DqCnResult (TDqOutput $3 '"0") '('"id" '"resource_id"))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($1 $3) '($4) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupByNull [GOOD] Test command err: Trying to start YDB, gRPC: 16529, MsgBus: 20564 2024-11-21T08:57:32.385728Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654100670705399:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.385814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004864/r3tmp/tmpv73VN5/pdisk_1.dat 2024-11-21T08:57:32.458538Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16529, node 1 2024-11-21T08:57:32.472551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.472567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.472569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.472611Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:32.486280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.486309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.488109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20564 TClient is connected to server localhost:20564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.521870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:32.531938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:32.540442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.540525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.540572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.540611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.540636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.540658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.540681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.540707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.540729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.540749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.540775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.540799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100670706033:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.541322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.541335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.541346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.541355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.541385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.541398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.541406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.541412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.541419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.541423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.541429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.541433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.541485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.541508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.541529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.541540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.541559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.541568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.541585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.541588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.541598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.541606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.544612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100670706034:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.544638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100670706034:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.544675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654100670706034:2290];tablet_id=7207518622 ... vent=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.553135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.553140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.553141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.553144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.553146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.553163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.553165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.553174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.553175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.553181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.553183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.553191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.553193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.553205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.553207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id > 5 GROUP BY level ORDER BY level; 2024-11-21T08:57:32.681737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654100670706326:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.681753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654100670706334:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.681757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.682335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:32.683729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654100670706340:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:35.378937Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179452738, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id > 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '2730) '('"_id" '"e45a42a8-38a5681a-1dfa7f60-60123020") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $29 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $29) '((Nothing $2) $29))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (TupleType $10 (DataType 'Uint64))) (let $12 (OptionalType $11)) (let $13 '('"level" $2)) (let $14 (StructType '('_yql_agg_0 $11) '('_yql_agg_1 $12) $13)) (let $15 '('('"_logical_id" '2789) '('"_id" '"90e2bdf6-2eef1dfd-9bfafe2b-ffec0188") '('"_wide_channels" $14))) (let $16 (DqPhyStage '() (lambda '() (block '( (let $30 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $31 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $32 (KqpWideReadOlapTableRanges $30 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $31 (lambda '($34) (block '( (let $35 '('"_yql_agg_0_sum" '"sum" '"id")) (let $36 '('"_yql_agg_0_cnt" '"count" '"id")) (let $37 '('"_yql_agg_1_sum" '"sum" '"level")) (let $38 '('"_yql_agg_1_cnt" '"count" '"level")) (let $39 '($35 $36 $37 $38)) (return (TKqpOlapAgg $34 $39 '('"level"))) ))))) (let $33 (lambda '($40 $41 $42 $43 $44) (block '( (let $45 (IfPresent $43 (lambda '($46) (Just '((Convert $46 'Double) $42))) (Nothing $12))) (return '((Convert $41 'Double) $40) $45 $44) )))) (return (FromFlow (WideMap $32 $33))) ))) $15)) (let $17 (DqCnHashShuffle (TDqOutput $16 '0) '('2))) (let $18 (OptionalType $10)) (let $19 (StructType '('"column1" $10) '('"column2" $18) $13)) (let $20 '('('"_logical_id" '5519) '('"_id" '"d441ace5-fe2f6c4-9343acc9-9e0af7e7") '('"_wide_channels" $19))) (let $21 (DqPhyStage '($17) (lambda '($47) (block '( (let $48 (lambda '($55 $56 $57 $58) (Nth $56 '0) (Nth $56 '1) $57)) (let $49 (lambda '($59 $60 $61 $62 $63 $64 $65) (block '( (let $66 (IfPresent $61 (lambda '($67) (IfPresent $65 (lambda '($68) (Just '((AggrAdd (Nth $67 '0) (Nth $68 '0)) (AggrAdd (Nth $67 '1) (Nth $68 '1))))) $61)) $65)) (return (AggrAdd (Nth $60 '0) $63) (AggrAdd (Nth $60 '1) $64) $66) )))) (let $50 (lambda '($69 $70 $71 $72) (block '( (let $73 (IfPresent $72 (lambda '($74) (Just (Div (Nth $74 '0) (Nth $74 '1)))) (Nothing $18))) (return (Div $70 $71) $73 $69) )))) (let $51 (WideCombiner (ToFlow $47) '"" (lambda '($52 $53 $54) $54) $48 $49 $50)) (return (FromFlow (WideSort $51 '('('2 (Bool 'true)))))) ))) $20)) (let $22 (DqCnMerge (TDqOutput $21 '0) '('('2 '"Asc")))) (let $23 (DqPhyStage '($22) (lambda '($75) (FromFlow (NarrowMap (ToFlow $75) (lambda '($76 $77 $78) (AsStruct '('"column1" $76) '('"column2" $77) '('"level" $78)))))) '('('"_logical_id" '5531) '('"_id" '"e74f3818-3037f5e2-9e14a0c8-4aec1565")))) (let $24 '($16 $21 $23)) (let $25 '('"level" '"column1" '"column2")) (let $26 (DqCnResult (TDqOutput $23 '0) $25)) (let $27 (KqpTxResultBinding $9 '0 '0)) (let $28 (KqpPhysicalTx $24 '($26) '('($7 $27)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $28) '((KqpTxResultBinding (ListType $19) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ScanFailedSnapshotTooOld [GOOD] Test command err: Trying to start YDB, gRPC: 28692, MsgBus: 26360 2024-11-21T08:57:25.049535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654071363659224:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.049552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00489a/r3tmp/tmp3Fe301/pdisk_1.dat 2024-11-21T08:57:25.107001Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28692, node 1 2024-11-21T08:57:25.121026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.121041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.121042Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.121086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26360 2024-11-21T08:57:25.150592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.150621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.151708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.168463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/cnt` (key Int32 NOT NULL, c Int32, PRIMARY KEY (key)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:25.334679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654071363659831:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.334712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:25.363028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:25.372648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.372692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.372739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.372756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.372767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.372781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.372797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.372814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.372830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.372845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.372860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.372875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654071363659908:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.373706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.373721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.373733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.373743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.373757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.373764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.373773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.373778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.373785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.373791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.373797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.373801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.373855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.373879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.373896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.373899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.373908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.373912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.373928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.373931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.373940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.373943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:30.049753Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654071363659224:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:30.049788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:57:35.427863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654114313333156:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.427911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654114313333161:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.427910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.428544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:35.429819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654114313333163:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:35.547337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=key: [ 1 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976710661}],"starts":[{"inc":{"count_not_include":1},"id":281474976710661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":1},"id":281474976710661}],"starts":[{"inc":{"count_include":1},"id":281474976710661}],"finishes":[{"inc":{"count_include":1},"id":281474976710661}]},"p":{"include":0,"pk":"1;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976710661}]},"p":{"include":2147483647}}]}; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_NullMix [GOOD] Test command err: Trying to start YDB, gRPC: 2069, MsgBus: 32602 2024-11-21T08:57:35.103212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654111573843468:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:35.103353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004853/r3tmp/tmpVfuSi4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2069, node 1 2024-11-21T08:57:35.159016Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:35.159198Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:35.159206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:35.159207Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:35.159238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32602 TClient is connected to server localhost:32602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:35.204768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:35.204790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:35.205857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:35.228257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:35.231531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:35.239837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:35.239896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:35.239942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:35.239963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:35.239989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:35.240012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:35.240030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:35.240051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:35.240072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:35.240098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.240120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:35.240143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654111573844119:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:35.240522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:35.240535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:35.240543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:35.240546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:35.240556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:35.240563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:35.240569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:35.240575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:35.240585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:35.240587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:35.240591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:35.240598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:35.240645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:35.240653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:35.240663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:35.240671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.240682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:35.240685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:35.240699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:35.240707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:35.240717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:35.240724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:35.243710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654111573844116:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:35.243735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654111573844116:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:35.243772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654111573844116:2289];tablet_id=720751862240 ... _id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:35.249630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654111573844125:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.249648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654111573844125:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:35.249665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654111573844125:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:35.250108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:35.250118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:35.250125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:35.250127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:35.250136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:35.250138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:35.250143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:35.250146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:35.250151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:35.250153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:35.250157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:35.250163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:35.250191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:35.250208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:35.250223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:35.250231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.250242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:35.250248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:35.250257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:35.250263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:35.250270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:35.250272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls`; 2024-11-21T08:57:35.364046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654111573844394:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.364064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.364100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654111573844421:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.364733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:35.366309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654111573844423:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:35.567253Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179455419, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '527) '('"_id" '"f3e3f323-a48b2c35-da2ab753-3ff5d961") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '('"level") '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 'count '"level")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1075) '('"_id" '"40ee2ded-64c07064-ffa56c04-741f2547")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1178) '('"_id" '"7bd1c439-3cb15523-2ca5de6e-1dc2bfde") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapSysView::StatsSysViewFilter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericSimpleAggregation [GOOD] Test command err: Trying to start YDB, gRPC: 21615, MsgBus: 11212 2024-11-21T08:57:35.153731Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654113038953251:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:35.153854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00484f/r3tmp/tmp6PEWSG/pdisk_1.dat 2024-11-21T08:57:35.200129Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21615, node 1 2024-11-21T08:57:35.212833Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:35.212846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:35.212847Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:35.212882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11212 TClient is connected to server localhost:11212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:35.254719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:35.254742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:35.255692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:35.255768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:35.267121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:35.276563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:35.276619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:35.276646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:35.276662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:35.276673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:35.276688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:35.276700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:35.276718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:35.276733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:35.276751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.276767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:35.276787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654113038953887:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:35.278726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:35.278744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:35.278761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:35.278772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:35.278786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:35.278800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:35.278812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:35.278827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:35.278842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:35.278859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.278875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:35.278888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654113038953888:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:35.279168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:35.279178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:35.279188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:35.279193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:35.279203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:35.279209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:35.279216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:35.279225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:35.279234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:35.279240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:35.279244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... -21T08:57:35.283838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:35.283843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:35.283845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:35.283852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:35.283858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:35.283862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:35.283864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:35.283868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:35.283870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:35.283873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:35.283879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:35.283891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:35.283896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:35.283904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:35.283909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.283915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:35.283918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:35.283924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:35.283926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:35.283931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:35.283935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, COUNT(*), SUM(id) FROM `/Root/tableWithNulls` WHERE level = 5 GROUP BY level ORDER BY level; 2024-11-21T08:57:35.423423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654113038954187:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.423443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654113038954182:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.423459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:35.424129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:35.425882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654113038954196:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, COUNT(*), SUM(id) FROM `/Root/tableWithNulls` WHERE level = 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":11}},{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":12}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":11},{"Id":12},{"Id":3}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":11}},{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":12}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":11},{"Id":12},{"Id":3}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Uint64)) (let $2 (DataType 'Int64)) (let $3 '('"level" (OptionalType (DataType 'Int32)))) (let $4 (StructType '('_yql_agg_0 $1) '('_yql_agg_1 $2) $3)) (let $5 '('('"_logical_id" '1162) '('"_id" '"aa813927-3231ffb1-5849ff73-1301d516") '('"_wide_channels" $4))) (let $6 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $17 (KqpBlockReadOlapTableRanges $16 (Void) '('"id" '"level") '() '() (lambda '($18) (block '( (let $19 '('eq '"level" (Int32 '"5"))) (let $20 '('?? $19 (Bool 'false))) (let $21 '('_yql_agg_0 '"count" '"*")) (let $22 '('_yql_agg_1 'sum '"id")) (return (TKqpOlapAgg (KqpOlapFilter $18 $20) '($21 $22) '('"level"))) ))))) (return (FromFlow $17)) ))) $5)) (let $7 (DqCnHashShuffle (TDqOutput $6 '0) '('2))) (let $8 (StructType '('"column1" $1) '('"column2" $2) $3)) (let $9 '('('"_logical_id" '1824) '('"_id" '"17310b04-8389f2d6-87b21c6e-67f991d9") '('"_wide_channels" $8))) (let $10 (DqPhyStage '($7) (lambda '($23) (block '( (let $24 (lambda '($31 $32 $33 $34) $32 $33)) (let $25 (lambda '($35 $36 $37 $38 $39 $40) (AggrAdd $36 $39) (AggrAdd $37 $40))) (let $26 (lambda '($41 $42 $43) $42 $43 $41)) (let $27 (WideCombiner (WideFromBlocks (ToFlow $23)) '"" (lambda '($28 $29 $30) $30) $24 $25 $26)) (return (FromFlow (WideSort $27 '('('2 (Bool 'true)))))) ))) $9)) (let $11 (DqCnMerge (TDqOutput $10 '0) '('('2 '"Asc")))) (let $12 (DqPhyStage '($11) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46 $47) (AsStruct '('"column1" $45) '('"column2" $46) '('"level" $47)))))) '('('"_logical_id" '1836) '('"_id" '"63db56fa-fb8e1869-77a6fe75-8cf0a199")))) (let $13 '($6 $10 $12)) (let $14 '('"level" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $12 '0) $14)) (return (KqpPhysicalQuery '((KqpPhysicalTx $13 '($15) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $8) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestFilterCompare [FAIL] Test command err: Trying to start YDB, gRPC: 15185, MsgBus: 61466 2024-11-21T08:57:32.366119Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654100287304365:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.366135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004865/r3tmp/tmp9Y2hGd/pdisk_1.dat 2024-11-21T08:57:32.421566Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15185, node 1 2024-11-21T08:57:32.437735Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.437747Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.437749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.437786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61466 2024-11-21T08:57:32.466528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.466553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.467653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.496466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:32.500885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:32.677436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654100287304963:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.677461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.698076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:32.704506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.704538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.704567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.704584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.704602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.704617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.704633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.704643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.704656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.704671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.704685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.704699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654100287305039:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.705013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.705027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.705040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.705049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.705064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.705073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.705082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.705095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.705107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.705115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.705121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.705130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.705173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.705182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.705194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.705196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.705203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.705209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.705220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.705226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.705234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08: ... t_id=72075186224037888;self_id=[2:7439654104044761939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:33.044638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654104044761939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:33.044649Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654104044761939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:33.044664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654104044761939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:33.044677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654104044761939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.044691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654104044761939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:33.044705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654104044761939:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:33.045061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:33.045073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:33.045083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:33.045087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:33.045099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:33.045108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:33.045116Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:33.045119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:33.045124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:33.045131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:33.045135Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:33.045138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:33.045174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:33.045184Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:33.045197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:33.045203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.045209Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:33.045212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:33.045221Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:33.045227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:33.045233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:33.045237Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2024-11-21T08:57:33.117431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654104582272491:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.117465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.117475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654104582272496:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:33.118057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:33.119507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654104582272498:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:33.246080Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654104582272566:2385], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp:331 ConvertValueToColumn(): requirement false failed, message: Unsupported content: Decimal, code: 1 2024-11-21T08:57:33.246500Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDllZWYyNDUtYTZjZmIyOGItOWFjNzliMTQtZGRiNTBkOWQ=, ActorId: [1:7439654104582272489:2376], ActorState: ExecuteState, TraceId: 01jd6z1s5w9vfq7aewxym296bg, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.cpp:805, TString NKikimr::NKqp::StreamResultToYsonImpl(TIterator &, TVector *, bool, const NYdb::EStatus &, const TString &) [TIterator = NYdb::NTable::TScanQueryPartIterator]: (streamPart.EOS())
: Fatal: Execution, code: 1060
: Fatal: ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp:331 ConvertValueToColumn(): requirement false failed, message: Unsupported content: Decimal, code: 1 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:805: StreamResultToYsonImpl @ 0x241C7DB2 2. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:828: ?? @ 0x241C77D7 3. /-S/ydb/core/kqp/ut/common/columnshard.cpp:118: ReadData @ 0x24247B28 4. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:44: CheckQuery @ 0x125BB4A3 5. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:166: operator() @ 0x125BB4A3 6. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:179: Execute_ @ 0x125BB1DD 7. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26: operator() @ 0x125C1936 8. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 9. /-S/ydb/core/kqp/ut/olap/decimal_ut.cpp:26: Execute @ 0x125C12F0 10. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 11. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 12. ??:0: ?? @ 0x7F7B6A851D8F 13. ??:0: ?? @ 0x7F7B6A851E3F 14. ??:0: ?? @ 0x11815028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 27457, MsgBus: 29965 2024-11-21T08:57:32.390739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654097938382219:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.390759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004861/r3tmp/tmpQpfkkr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27457, node 1 2024-11-21T08:57:32.451302Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:32.452751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.452763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.452765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.452799Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29965 TClient is connected to server localhost:29965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:32.491715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.491733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.492887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.518267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:32.521888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:32.529117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.529187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.529226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.529248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.529265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.529283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.529296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.529315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.529335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.529355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.529374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.529392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654097938382860:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.529834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.529850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.529860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.529864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.529877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.529881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.529890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.529916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.529929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.529933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.529940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.529951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.530004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.530016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.530031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.530040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.530050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.530060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.530076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.530085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.530096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.530100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.532939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654097938382859:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.532962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654097938382859:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.532997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654097938382859:2289];tablet_id=7207518622 ... :Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.539748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.539752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.539755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.539758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.539761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.539775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.539782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.539790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.539793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.539799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.539807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.539815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.539819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.539824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.539832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; 2024-11-21T08:57:32.690965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654097938383140:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.690990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654097938383166:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.690996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:32.691598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:32.693353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654097938383169:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:35.509833Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179452745, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '2730) '('"_id" '"4f23470-3fdd0cec-f19d00c2-f69b5672") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) (Int32 '1)) '((Nothing $2) (Int32 '0)))))))))))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (TupleType $10 (DataType 'Uint64))) (let $12 (OptionalType $11)) (let $13 '('"level" $2)) (let $14 (StructType '('_yql_agg_0 $11) '('_yql_agg_1 $12) $13)) (let $15 '('('"_logical_id" '2789) '('"_id" '"b1111b05-b460e8ee-8c8ba5d3-4fb206cb") '('"_wide_channels" $14))) (let $16 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"id")) (let $35 '('"_yql_agg_0_cnt" '"count" '"id")) (let $36 '('"_yql_agg_1_sum" '"sum" '"level")) (let $37 '('"_yql_agg_1_cnt" '"count" '"level")) (let $38 '($34 $35 $36 $37)) (return (TKqpOlapAgg $33 $38 '('"level"))) ))))) (let $32 (lambda '($39 $40 $41 $42 $43) (block '( (let $44 (IfPresent $42 (lambda '($45) (Just '((Convert $45 'Double) $41))) (Nothing $12))) (return '((Convert $40 'Double) $39) $44 $43) )))) (return (FromFlow (WideMap $31 $32))) ))) $15)) (let $17 (DqCnHashShuffle (TDqOutput $16 '0) '('2))) (let $18 (OptionalType $10)) (let $19 (StructType '('"column1" $10) '('"column2" $18) $13)) (let $20 '('('"_logical_id" '5519) '('"_id" '"422a2e25-aa700f84-b9edece9-e64b4030") '('"_wide_channels" $19))) (let $21 (DqPhyStage '($17) (lambda '($46) (block '( (let $47 (lambda '($54 $55 $56 $57) (Nth $55 '0) (Nth $55 '1) $56)) (let $48 (lambda '($58 $59 $60 $61 $62 $63 $64) (block '( (let $65 (IfPresent $60 (lambda '($66) (IfPresent $64 (lambda '($67) (Just '((AggrAdd (Nth $66 '0) (Nth $67 '0)) (AggrAdd (Nth $66 '1) (Nth $67 '1))))) $60)) $64)) (return (AggrAdd (Nth $59 '0) $62) (AggrAdd (Nth $59 '1) $63) $65) )))) (let $49 (lambda '($68 $69 $70 $71) (block '( (let $72 (IfPresent $71 (lambda '($73) (Just (Div (Nth $73 '0) (Nth $73 '1)))) (Nothing $18))) (return (Div $69 $70) $72 $68) )))) (let $50 (WideCombiner (ToFlow $46) '"" (lambda '($51 $52 $53) $53) $47 $48 $49)) (return (FromFlow (WideSort $50 '('('2 (Bool 'true)))))) ))) $20)) (let $22 (DqCnMerge (TDqOutput $21 '0) '('('2 '"Asc")))) (let $23 (DqPhyStage '($22) (lambda '($74) (FromFlow (NarrowMap (ToFlow $74) (lambda '($75 $76 $77) (AsStruct '('"column1" $75) '('"column2" $76) '('"level" $77)))))) '('('"_logical_id" '5531) '('"_id" '"20ecf9c2-10f1d166-79ba3e07-3345f28")))) (let $24 '($16 $21 $23)) (let $25 '('"level" '"column1" '"column2")) (let $26 (DqCnResult (TDqOutput $23 '0) $25)) (let $27 (KqpTxResultBinding $9 '0 '0)) (let $28 (KqpPhysicalTx $24 '($26) '('($7 $27)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $28) '((KqpTxResultBinding (ListType $19) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapSparsed::Switching >> KqpOlap::OlapDeleteImmediate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewFilter [GOOD] Test command err: Trying to start YDB, gRPC: 30194, MsgBus: 14246 2024-11-21T08:57:20.855646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654046311992497:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:20.855668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c5/r3tmp/tmpOHKD0l/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30194, node 1 2024-11-21T08:57:20.911125Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:20.911132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:20.911135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:20.911136Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:20.911169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14246 2024-11-21T08:57:20.955281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:20.955307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:20.956592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:20.984433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:20.989139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:20.995095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:21.006696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.007274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.007306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.007334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.007351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.007370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.007386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.007406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.007426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.007445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.007463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.007481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050606960231:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.010729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.010756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.010800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.010822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.010844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.010864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.010883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.010903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.010927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.010957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.010980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.010999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654050606960232:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.013606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050606960233:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.013630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050606960233:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.013662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050606960233:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.013674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050606960233:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.013686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050606960233:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.013696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050606960233:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.013705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050606960233:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.013715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050606960233:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... lizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:21.017740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:21.017747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:21.017751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:21.017762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:21.017765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:21.017772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:21.017775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:21.017782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:21.017785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:21.017789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:21.017793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:21.017813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:21.017816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:21.017830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:21.017833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.017847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:21.017850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:21.017863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:21.017866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:21.017874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:21.017877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:21.125385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T08:57:25.856051Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654046311992497:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.856084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT PathId, Kind, TabletId, Sum(BlobRangeSize) as Bytes FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY PathId, Kind, TabletId; RESULT: 2024-11-21T08:57:31.334517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654093556633870:2577], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:31.334520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654093556633861:2574], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:31.334528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:31.335119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:31.336546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654093556633875:2578], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:32.474865Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179451455, txId: 281474976715662] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 Bytes: 250032 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 Bytes: 246760 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 Bytes: 249560 ==================================== QUERY: SELECT PathId, Kind, TabletId, Sum(BlobRangeSize) as Bytes FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY PathId, Kind, TabletId; RESULT: 2024-11-21T08:57:33.611979Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179452538, txId: 281474976715664] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 Bytes: 250032 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 Bytes: 246760 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 Bytes: 249560 ==================================== QUERY: SELECT * FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind == 'EVICTED' ORDER BY PathId, Kind, TabletId; RESULT: ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind IN ('SPLIT_COMPACTED', 'INACTIVE', 'EVICTED', 'INSERTED') AND Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY PathId, Kind, TabletId; RESULT: 2024-11-21T08:57:33.660640Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453643, txId: 281474976715666] shutting down 2024-11-21T08:57:35.898923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:57:35.898942Z node 1 :IMPORT WARN: Table profiles were not loaded TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 2024-11-21T08:57:36.034967Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179453721, txId: 281474976715668] shutting down |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapDeleteImmediate [GOOD] >> KqpDatetime64ColumnShard::UseTimestamp64AsPrimaryKey >> KqpOlapBlobsSharing::MultipleSplits >> KqpOlapIndexes::IndexesActualization >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns+isStream >> KqpOlap::YqlScriptOltpAndOlap >> KqpOlapAggregations::Aggregation_Avg_Null |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> KqpDatetime64ColumnShard::UseTimestamp64AsPrimaryKey [GOOD] >> KqpDecimalColumnShard::TestAggregation >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> KqpOlap::YqlScriptOltpAndOlap [GOOD] >> BsControllerConfig::DeleteStoragePool [GOOD] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpOlap::PredicatePushdown [GOOD] >> KqpOlapAggregations::Aggregation_Avg_Null [GOOD] >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns+isStream [GOOD] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::YqlScriptOltpAndOlap [GOOD] Test command err: Trying to start YDB, gRPC: 2722, MsgBus: 13193 2024-11-21T08:57:37.142169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654122903362793:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:37.142185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00483e/r3tmp/tmpmF69Vp/pdisk_1.dat 2024-11-21T08:57:37.197762Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2722, node 1 2024-11-21T08:57:37.212033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:37.212073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:37.212075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:37.212132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13193 2024-11-21T08:57:37.242690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:37.242723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:37.243730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:37.273262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.276101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:37.278093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:37.289182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.289246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.289275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.289290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.289302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.289316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.289351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.289367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.289391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.289405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.289422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.289437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654122903363444:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.291506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.291530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.291560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.291575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.291590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.291600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.291612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.291629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.291644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.291657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.291671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.291684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654122903363442:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.293855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654122903363443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.293874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654122903363443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.293896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654122903363443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.293909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654122903363443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.293922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654122903363443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.293934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654122903363443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.293947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654122903363443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.293960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654122903363443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... alization_finished; 2024-11-21T08:57:37.297409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:37.297416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:37.297425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:37.297428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:37.297442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:37.297446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:37.297452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:37.297457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:37.297463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:37.297467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:37.297472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:37.297475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:37.297512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:37.297516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:37.297530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:37.297533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.297542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:37.297545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:37.297558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:37.297561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:37.297569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:37.297572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:37.297677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:37.297688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:37.297696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:37.297700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:37.297712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:37.297715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:37.297723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:37.297726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:37.297733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:37.297742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:37.297747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:37.297750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:37.297772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:37.297775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:37.297787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:37.297790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.297800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:37.297803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:37.297817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:37.297820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:37.297829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:37.297831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:37.337361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3632;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3632;columns=5; 2024-11-21T08:57:37.360452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.431093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654122903363842:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.431102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654122903363833:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.431116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.431792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:37.433525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654122903363847:2390], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:57:37.614413Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457645, txId: 281474976715664] shutting down >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict >> KqpOlap::SimpleLookupOlap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T08:57:16.991014Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:17.013122Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:17.039829Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:17.039957Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:17.040104Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040110Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.040141Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:17.092821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:17.092870Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:17.108996Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:17.109070Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.109086Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.109094Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:229:2066] recipient: [1:20:2067] 2024-11-21T08:57:17.119559Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:17.119610Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.129943Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.130000Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.130020Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.130031Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.130059Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.130065Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.130072Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.130079Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.140411Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.140487Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:17.146418Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:17.146461Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:17.146499Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:17.150793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T08:57:18.737335Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:18.737513Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:18.737780Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:18.737862Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:18.737974Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:18.737981Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:18.738020Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:18.738864Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:18.738886Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:18.738914Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:18.738929Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:18.738942Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:18.738950Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:229:2066] recipient: [11:20:2067] 2024-11-21T08:57:18.749386Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:18.749451Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:18.759851Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:18.759914Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:18.759931Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:18.759941Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:18.759970Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:18.759977Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:18.759983Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:18.759991Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:18.770342Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:18.770399Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:18.770595Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:18.770603Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:18.770632Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:18.770730Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:2964:2106] recipient: [21:2865:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:2964:2106] recipient: [21:2865:2115] Leader for TabletID 72057594037932033 is [21:2966:2117] sender: [21:2967:2106] recipient: [21:2865:2115] 2024-11-21T08:57:20.883968Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:20.884161Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:20.884422Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:20.884499Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:20.884630Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:20.884636Z node 21 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:20.884679Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:20.885518Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:20.885547Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:20.885572Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:20.885588Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:20.885602Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx ... pp:355} Create new pdisk PDiskId# 87:1002 Path# /dev/disk1 2024-11-21T08:57:28.596666Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1000 Path# /dev/disk1 2024-11-21T08:57:28.596670Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1000 Path# /dev/disk3 2024-11-21T08:57:28.596674Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2024-11-21T08:57:28.596678Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1000 Path# /dev/disk1 2024-11-21T08:57:28.596683Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2024-11-21T08:57:28.596687Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1000 Path# /dev/disk1 2024-11-21T08:57:28.596692Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2024-11-21T08:57:28.596696Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1000 Path# /dev/disk1 2024-11-21T08:57:28.596701Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1001 Path# /dev/disk2 2024-11-21T08:57:28.596705Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1000 Path# /dev/disk3 2024-11-21T08:57:28.596710Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2024-11-21T08:57:28.596714Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1001 Path# /dev/disk1 2024-11-21T08:57:28.596720Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1000 Path# /dev/disk3 2024-11-21T08:57:28.596725Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2024-11-21T08:57:28.596730Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1001 Path# /dev/disk1 2024-11-21T08:57:28.596735Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1002 Path# /dev/disk2 2024-11-21T08:57:28.596741Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 90:1001 Path# /dev/disk2 2024-11-21T08:57:28.596745Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1000 Path# /dev/disk3 2024-11-21T08:57:28.596750Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2024-11-21T08:57:28.596756Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1001 Path# /dev/disk1 2024-11-21T08:57:28.596762Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1002 Path# /dev/disk3 2024-11-21T08:57:28.596767Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 88:1001 Path# /dev/disk3 2024-11-21T08:57:28.596772Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1001 Path# /dev/disk2 2024-11-21T08:57:28.596777Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2024-11-21T08:57:28.596782Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1002 Path# /dev/disk1 2024-11-21T08:57:28.596788Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1002 Path# /dev/disk2 2024-11-21T08:57:28.596793Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1001 Path# /dev/disk2 2024-11-21T08:57:28.596797Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1000 Path# /dev/disk3 2024-11-21T08:57:28.596802Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1002 Path# /dev/disk3 2024-11-21T08:57:28.596807Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 91:1001 Path# /dev/disk3 2024-11-21T08:57:28.596812Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1001 Path# /dev/disk2 2024-11-21T08:57:28.596816Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1001 Path# /dev/disk2 2024-11-21T08:57:28.596821Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1000 Path# /dev/disk1 2024-11-21T08:57:28.596826Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 88:1002 Path# /dev/disk2 2024-11-21T08:57:28.596830Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1000 Path# /dev/disk3 2024-11-21T08:57:28.596836Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1002 Path# /dev/disk3 2024-11-21T08:57:28.596840Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1001 Path# /dev/disk2 2024-11-21T08:57:28.596845Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1002 Path# /dev/disk2 2024-11-21T08:57:28.596849Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 91:1002 Path# /dev/disk2 2024-11-21T08:57:28.596854Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1000 Path# /dev/disk3 2024-11-21T08:57:28.596860Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 89:1001 Path# /dev/disk3 2024-11-21T08:57:28.596866Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1001 Path# /dev/disk2 2024-11-21T08:57:28.596871Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1001 Path# /dev/disk2 2024-11-21T08:57:28.596876Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 94:1002 Path# /dev/disk2 2024-11-21T08:57:28.596880Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1000 Path# /dev/disk3 2024-11-21T08:57:28.596885Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 92:1001 Path# /dev/disk3 2024-11-21T08:57:28.596890Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1001 Path# /dev/disk2 2024-11-21T08:57:28.596896Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 89:1002 Path# /dev/disk2 2024-11-21T08:57:28.596901Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1000 Path# /dev/disk3 2024-11-21T08:57:28.596906Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 95:1001 Path# /dev/disk2 2024-11-21T08:57:28.596911Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1000 Path# /dev/disk3 2024-11-21T08:57:28.596916Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1001 Path# /dev/disk1 2024-11-21T08:57:28.596922Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2024-11-21T08:57:28.596927Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1002 Path# /dev/disk1 2024-11-21T08:57:28.596932Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1002 Path# /dev/disk3 2024-11-21T08:57:28.596937Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 90:1002 Path# /dev/disk3 2024-11-21T08:57:28.596944Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1000 Path# /dev/disk2 2024-11-21T08:57:28.596949Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2024-11-21T08:57:28.596954Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1002 Path# /dev/disk1 2024-11-21T08:57:28.596959Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 92:1002 Path# /dev/disk2 2024-11-21T08:57:28.596964Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1001 Path# /dev/disk3 2024-11-21T08:57:28.596971Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1001 Path# /dev/disk3 2024-11-21T08:57:28.596976Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2024-11-21T08:57:28.596982Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1002 Path# /dev/disk1 2024-11-21T08:57:28.596987Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2024-11-21T08:57:28.596992Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1002 Path# /dev/disk1 2024-11-21T08:57:28.596996Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 93:1002 Path# /dev/disk3 2024-11-21T08:57:28.597002Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1001 Path# /dev/disk2 2024-11-21T08:57:28.597008Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1002 Path# /dev/disk2 2024-11-21T08:57:28.597013Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2024-11-21T08:57:28.597018Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1002 Path# /dev/disk1 2024-11-21T08:57:28.597023Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2024-11-21T08:57:28.597028Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1002 Path# /dev/disk1 2024-11-21T08:57:28.597033Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 95:1002 Path# /dev/disk3 2024-11-21T08:57:28.597039Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1002 Path# /dev/disk2 2024-11-21T08:57:28.608597Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T08:57:28.628977Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2024-11-21T08:57:28.646002Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown [GOOD] Test command err: Trying to start YDB, gRPC: 20186, MsgBus: 27091 2024-11-21T08:57:29.051260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654086242819768:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:29.051464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004874/r3tmp/tmpCgLEIk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20186, node 1 2024-11-21T08:57:29.108002Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:29.108295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:29.108303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:29.108308Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:29.108339Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27091 TClient is connected to server localhost:27091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:29.152226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:29.152252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:29.153287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:29.153628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:29.163225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:29.171863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:29.171916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:29.171950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:29.171974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:29.171989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:29.172004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:29.172021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:29.172036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:29.172052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:29.172067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:29.172081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:29.172098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654086242820425:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:29.172507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:29.172521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:29.172533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:29.172542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:29.172558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:29.172567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:29.172577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:29.172586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:29.172596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:29.172605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:29.172612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:29.172621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:29.172673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:29.172683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:29.172700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:29.172708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:29.172720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:29.172728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:29.172745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:29.172753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:29.172764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:29.172772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:29.175716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654086242820426:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:29.175739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654086242820426:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:29.175772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654086242820426:2289];tablet_id=7207518622 ... T08:57:36.245225Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456231, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` + 2. < 5.f ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` + 2. < 5.f ORDER BY `timestamp` 2024-11-21T08:57:36.314361Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456280, txId: 18446744073709551615] shutting down 2024-11-21T08:57:36.351829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456350, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` - 2.f >= 1. ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` - 2.f >= 1. ORDER BY `timestamp` 2024-11-21T08:57:36.406636Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456385, txId: 18446744073709551615] shutting down 2024-11-21T08:57:36.433799Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456441, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` * 3. > 4.f ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` * 3. > 4.f ORDER BY `timestamp` 2024-11-21T08:57:36.487390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456469, txId: 18446744073709551615] shutting down 2024-11-21T08:57:36.519235Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456525, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` / 2.f <= 1. ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` / 2.f <= 1. ORDER BY `timestamp` 2024-11-21T08:57:36.573390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456560, txId: 18446744073709551615] shutting down 2024-11-21T08:57:36.610611Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456609, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` % 3. != 1.f ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` % 3. != 1.f ORDER BY `timestamp` 2024-11-21T08:57:36.658335Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456651, txId: 18446744073709551615] shutting down 2024-11-21T08:57:36.684749Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456700, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `timestamp` >= Timestamp("1970-01-01T00:00:03.000001Z") AND `level` < 4 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `timestamp` >= Timestamp("1970-01-01T00:00:03.000001Z") AND `level` < 4 ORDER BY `timestamp` 2024-11-21T08:57:36.748162Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456721, txId: 18446744073709551615] shutting down 2024-11-21T08:57:36.780352Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456784, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp`, `level`) >= (Timestamp("1970-01-01T00:00:03.000001Z"), 3) ORDER BY `timestamp` 2024-11-21T08:57:36.846524Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456819, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp`, `level`) >= (Timestamp("1970-01-01T00:00:03.000001Z"), 3) ORDER BY `timestamp` 2024-11-21T08:57:36.889809Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456882, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` != "10001" XOR "XXX" == "YYY" ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` != "10001" XOR "XXX" == "YYY" ORDER BY `timestamp` 2024-11-21T08:57:36.952961Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179456924, txId: 18446744073709551615] shutting down 2024-11-21T08:57:36.983752Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE IF(`level` > 3, -`level`, +`level`) < 2 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE IF(`level` > 3, -`level`, +`level`) < 2 ORDER BY `timestamp` 2024-11-21T08:57:37.034508Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457022, txId: 18446744073709551615] shutting down 2024-11-21T08:57:37.065192Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457071, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE StartsWith(`message` ?? `resource_id`, "10000") ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE StartsWith(`message` ?? `resource_id`, "10000") ORDER BY `timestamp` 2024-11-21T08:57:37.121333Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457106, txId: 18446744073709551615] shutting down 2024-11-21T08:57:37.159060Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457155, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT EndsWith(`message` ?? `resource_id`, "xxx") ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT EndsWith(`message` ?? `resource_id`, "xxx") ORDER BY `timestamp` 2024-11-21T08:57:37.227956Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457197, txId: 18446744073709551615] shutting down 2024-11-21T08:57:37.269337Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457260, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) == <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) == <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` 2024-11-21T08:57:37.362784Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457302, txId: 18446744073709551615] shutting down 2024-11-21T08:57:37.415872Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457393, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) != <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) != <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` 2024-11-21T08:57:37.496779Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457449, txId: 18446744073709551615] shutting down 2024-11-21T08:57:37.535670Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457533, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` LIKE "_id%000_" ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` LIKE "_id%000_" ORDER BY `timestamp` 2024-11-21T08:57:37.602508Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457575, txId: 18446744073709551615] shutting down 2024-11-21T08:57:37.640050Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457638, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` ILIKE "UID%002" ORDER BY `timestamp` 2024-11-21T08:57:37.711840Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457680, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` ILIKE "UID%002" ORDER BY `timestamp` 2024-11-21T08:57:37.749436Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457743, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_Null [GOOD] Test command err: Trying to start YDB, gRPC: 2916, MsgBus: 9089 2024-11-21T08:57:37.272113Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654120108932175:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:37.272129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00483a/r3tmp/tmpZcpFpE/pdisk_1.dat 2024-11-21T08:57:37.332440Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2916, node 1 2024-11-21T08:57:37.348705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:37.348720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:37.348722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:37.348763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9089 2024-11-21T08:57:37.373484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:37.373522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:37.374602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:37.410498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.412886Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:37.419309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:37.430130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.430194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.430227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.430245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.430261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.430276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.430290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.430305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.430323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.430341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.430355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.430372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120108932813:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.433289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.433314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.433348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.433371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.433396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.433419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.433439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.433461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.433487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.433518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.433540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.433562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120108932814:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.436795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654120108932819:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.436814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654120108932819:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.436839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654120108932819:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.436856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654120108932819:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.436872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654120108932819:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.436885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654120108932819:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.436898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654120108932819:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.436911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654120108932819:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... 24037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:37.440107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:37.440114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:37.440122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:37.440128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:37.440132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:37.440137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:37.440142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:37.440163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:37.440170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:37.440184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:37.440192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.440201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:37.440229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:37.440245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:37.440253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:37.440262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:37.440270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AVG(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2024-11-21T08:57:37.560043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654120108933109:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.560065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654120108933120:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.560070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.560631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:37.561787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654120108933123:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:37.864567Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457617, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AVG(level) FROM `/Root/tableWithNulls` WHERE id > 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Double)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '1600) '('"_id" '"f688b876-f3ff1fb7-28890999-ed878229") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $30 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $30) '((Nothing $2) $30))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 (DataType 'Double)) (let $12 (OptionalType (TupleType $11 (DataType 'Uint64)))) (let $13 '('('"_logical_id" '1658) '('"_id" '"7126520d-95db048a-3ce5693f-d2c07dc3") '('"_wide_channels" (StructType '('_yql_agg_0 $12))))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $31 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $32 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $33 (KqpWideReadOlapTableRanges $31 %kqp%tx_result_binding_0_0 '('"level") '() $32 (lambda '($34) (block '( (let $35 '('"_yql_agg_0_sum" '"sum" '"level")) (let $36 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $34 '($35 $36) '())) ))))) (return (FromFlow (WideMap $33 (lambda '($37 $38) (IfPresent $38 (lambda '($39) (Just '((Convert $39 'Double) $37))) (Nothing $12)))))) ))) $13)) (let $15 (DqCnUnionAll (TDqOutput $14 '0))) (let $16 (OptionalType $11)) (let $17 (DqPhyStage '($15) (lambda '($40) (block '( (let $41 (Bool 'false)) (let $42 (WideCondense1 (ToFlow $40) (lambda '($44) $44) (lambda '($45 $46) $41) (lambda '($47 $48) (IfPresent $47 (lambda '($49) (IfPresent $48 (lambda '($50) (Just '((AggrAdd (Nth $49 '0) (Nth $50 '0)) (AggrAdd (Nth $49 '1) (Nth $50 '1))))) $47)) $48)))) (let $43 (Condense (NarrowMap (Take $42 (Uint64 '1)) (lambda '($51) (block '( (let $52 (IfPresent $51 (lambda '($53) (Just (Div (Nth $53 '0) (Nth $53 '1)))) (Nothing $16))) (return (AsStruct '('Avg0 $52))) )))) (Nothing (OptionalType (StructType '('Avg0 $16)))) (lambda '($54 $55) $41) (lambda '($56 $57) (Just $56)))) (return (FromFlow (Map $43 (lambda '($58) (AsList (AsStruct '('"column0" (Member $58 'Avg0)))))))) ))) '('('"_logical_id" '3336) '('"_id" '"e399a2ea-521e67b8-5b6f285d-6db8d3a6")))) (let $18 (DqCnValue (TDqOutput $17 '0))) (let $19 (KqpTxResultBinding $10 '0 '0)) (let $20 '('('"type" '"scan"))) (let $21 (KqpPhysicalTx '($14 $17) '($18) '('($8 $19)) $20)) (let $22 '"%kqp%tx_result_binding_1_0") (let $23 (ListType (StructType '('"column0" $16)))) (let $24 '('('"_logical_id" '3432) '('"_id" '"79f5c30d-a63e8060-718266d4-f3ae8752") $3)) (let $25 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $24)) (let $26 (DqCnResult (TDqOutput $25 '0) '('"column0"))) (let $27 (KqpTxResultBinding $23 '1 '0)) (let $28 (KqpPhysicalTx '($25) '($26) '('($22 $27)) $20)) (let $29 '($7 $21 $28)) (return (KqpPhysicalQuery $29 '((KqpTxResultBinding $23 '"2" '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Avg [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns+isStream [GOOD] Test command err: Trying to start YDB, gRPC: 18330, MsgBus: 19199 2024-11-21T08:57:37.110476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654119076851776:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:37.110622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00483f/r3tmp/tmp8MuzLs/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18330, node 1 2024-11-21T08:57:37.166720Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:37.170929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:37.170942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:37.170944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:37.170980Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19199 TClient is connected to server localhost:19199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:37.211505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:37.211552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:37.212666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:37.244770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.258766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.320890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.337823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.347549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.437737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654119076853319:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.437775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.460034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.466768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.521299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.576246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.583160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.589824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.606406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654119076853838:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.606432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.606480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654119076853843:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:37.607388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:37.610356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654119076853845:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } CREATE TABLE `/Root/ColumnTableTest` (time Timestamp NOT NULL, class Utf8 NOT NULL, uniq Utf8 NOT NULL, PRIMARY KEY (time, class, uniq)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:37.758301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.764069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.764106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.764134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.764146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.764158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.764181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.764220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.764246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.764259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.764273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.764291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.764310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654119076854193:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.764711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:37.764728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:37.764738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:37.764749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:37.764762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:37.764771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:37.764778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:37.764782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:37.764804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:37.764807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:37.764813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:37.764822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:37.764872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:37.764882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:37.764906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:37.764914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.764923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:37.764932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:37.764944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:37.764947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:37.764956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:37.764965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow13TimestampTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=312;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=312;columns=3; 2024-11-21T08:57:37.841446Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654119076854358:2528] TxId: 281474976715672. Ctx: { TraceId: 01jd6z1xs22s6pyjkstj51afz3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWRjZjViYjMtZjE0Njk5NzctNWJjMWUzMDAtMzA5ZjFhODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:37.857187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037919;tx_state=complete;fline=interaction.h:353;batch=time: [ 2024-11-21 08:57:37.815477 ] class: [ "test" ] uniq: [ "test" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715672}],"starts":[{"inc":{"count_not_include":1},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2024-11-21T08:57:37.876177Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654119076854872:2679] TxId: 281474976715675. Ctx: { TraceId: 01jd6z1xt6cfgwezfq2ftt5fr1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThhN2JiYmQtMzRkN2M2MjgtMTUwNWI1OTItOGJkZTg5Y2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:37.887911Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457911, txId: 18446744073709551615] shutting down >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> KqpOlapAggregations::Aggregation_MinL [GOOD] >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg [GOOD] Test command err: Trying to start YDB, gRPC: 26741, MsgBus: 6991 2024-11-21T08:57:25.029892Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654068028608693:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.030031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00489b/r3tmp/tmpmtP33t/pdisk_1.dat 2024-11-21T08:57:25.082937Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26741, node 1 2024-11-21T08:57:25.095098Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.095112Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.095114Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.095163Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6991 TClient is connected to server localhost:6991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:25.131299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.131329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.132406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.160937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.171470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.183572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.183652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.183689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.183713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.183734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.183759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.183780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.183801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.183824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.183851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.183876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.183903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654068028609348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.184490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.184505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.184517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.184521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.184536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.184544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.184553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.184559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.184574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.184583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.184589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:25.184597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:25.184655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:25.184665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:25.184680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:25.184687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.184697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:25.184705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:25.184720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:25.184728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:25.184738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:25.184746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:25.187977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654068028609349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.188628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654068028609349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.188687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654068028609349:2289];tablet_id=7207518622403 ... DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:36.539217Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:36.663313Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:36.663339Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:36.745351Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:36.745376Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:36.827253Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:36.827273Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:36.919569Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:36.919590Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:36.981196Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:37.001841Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:37.063187Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.063212Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.145128Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.145148Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.227247Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.227271Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.309461Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.309488Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.403052Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.403075Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.423623Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:37.559688Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.559711Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.662605Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.662622Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.744741Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.744768Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.826630Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.826662Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.918797Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:37.918818Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmI3YzUzZDctOTdkM2VmNi1hYjk1NDhiNS1hMzYwODJiMw==. TraceId : 01jd6z1mz6d2g9q25dxn3dx35t. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:37.969990Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:37.990651Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapSparsed::AccessorActualization >> KqpOlap::SimpleLookupOlap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MinL [GOOD] Test command err: Trying to start YDB, gRPC: 5257, MsgBus: 29609 2024-11-21T08:57:25.131683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654067673054092:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.131698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004895/r3tmp/tmpSYMx3I/pdisk_1.dat 2024-11-21T08:57:25.189010Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5257, node 1 2024-11-21T08:57:25.196543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.196557Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.196559Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.196588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29609 TClient is connected to server localhost:29609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:25.234404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.234432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.234958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.264427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.273278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:25.284102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.305219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.305294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.305340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.305360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.305377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.305400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.305421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.305436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.305455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.305475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.305510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.305527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654067673054747:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.313805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.313841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.313895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.313919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.313935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.313952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.313968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.313990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.314008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.314025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.314043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.314058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654067673054748:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.321079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067673054749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.321116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067673054749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.321194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067673054749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.321223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067673054749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.321246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067673054749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.321263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067673054749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.321283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067673054749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.321306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654067673054749:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:36.958880Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.114047Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.114077Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.196508Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.196558Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.281124Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.281164Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.363238Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.363265Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.455992Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:37.466458Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:37.517665Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.517692Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.599670Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.599714Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.681646Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.681668Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.763784Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.763816Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.845796Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:37.845822Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:37.887955Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:38.003374Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.003396Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.106221Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.106249Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.188797Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.188826Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.270940Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.270960Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.352950Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.352984Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z1mt1fdbaxv4tv3myrgvn. SessionId : ydb://session/3?node_id=2&id=OWY5MWQwMmItMTEyMTg2NzItYWU1ODY2ZTUtMTU3MDdlNWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.425100Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:38.445917Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapAggregations::Aggregation_MinR_GroupL_OrderL >> KqpOlapWrite::WriteDeleteCleanGC ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict [GOOD] Test command err: Trying to start YDB, gRPC: 28037, MsgBus: 27395 2024-11-21T08:57:38.361428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654124238598511:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:38.361466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004839/r3tmp/tmpHSFbEQ/pdisk_1.dat 2024-11-21T08:57:38.418671Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28037, node 1 2024-11-21T08:57:38.427222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:38.427236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:38.427238Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:38.427279Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27395 2024-11-21T08:57:38.462538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:38.462564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:38.463616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:38.492222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:38.497735Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:38.507771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:38.520722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:38.520789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:38.520845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:38.520867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:38.520891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:38.520916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:38.520935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:38.520956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:38.520977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:38.521000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.521021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:38.521040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654124238599136:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:38.524416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:38.524440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:38.524482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:38.524503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:38.524523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:38.524543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:38.524564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:38.524583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:38.524598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:38.524614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.524628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:38.524642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654124238599157:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:38.525070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:38.525077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:38.525087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:38.525091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:38.525104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:38.525108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:38.525115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:38.525119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:38.525125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:38.525128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_ ... tion=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:38.531779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:38.531783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:38.531905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:38.531911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:38.531918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:38.531921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:38.531934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:38.531937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:38.531944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:38.531947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:38.531953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:38.531956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:38.531960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:38.531963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:38.531986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:38.531994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:38.532018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:38.532021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.532030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:38.532033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:38.532044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:38.532047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:38.532055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:38.532058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:38.533552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:38.533565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:38.533573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:38.533577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:38.533591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:38.533595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:38.533603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:38.533608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:38.533617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:38.533621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:38.533626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:38.533635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:38.533665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:38.533675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:38.533691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:38.533696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.533707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:38.533712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:38.533726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:38.533734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:38.533744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:38.533748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:38.573597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5800;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; 2024-11-21T08:57:38.701969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654124238599464:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.702001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.702069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654124238599491:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.702746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:38.704330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654124238599493:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleLookupOlap [GOOD] Test command err: Trying to start YDB, gRPC: 26733, MsgBus: 30724 2024-11-21T08:57:38.465947Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654125749748884:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:38.466051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004836/r3tmp/tmpye3hbD/pdisk_1.dat 2024-11-21T08:57:38.533636Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26733, node 1 2024-11-21T08:57:38.544124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:38.544136Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:38.544138Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:38.544189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30724 2024-11-21T08:57:38.568595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:38.568627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:38.571228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:38.604193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:38.606532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:38.612162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:38.624699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:38.624771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:38.624827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:38.624852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:38.624877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:38.624898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:38.624921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:38.624944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:38.624975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:38.625001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.625027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:38.625049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654125749749326:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:38.629410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:38.629442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:38.629490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:38.629527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:38.629551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:38.629573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:38.629592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:38.629611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:38.629631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:38.629653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.629673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:38.629695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654125749749325:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:38.633247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654125749749329:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:38.633278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654125749749329:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:38.633317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654125749749329:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:38.633340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654125749749329:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:38.633361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654125749749329:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:38.633381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654125749749329:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:38.633397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654125749749329:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:38.633420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654125749749329:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ct.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:38.638260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:38.638334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:38.638338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:38.638346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:38.638356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:38.638371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:38.638374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:38.638381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:38.638385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:38.638392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:38.638396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:38.638402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:38.638405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:38.638429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:38.638433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:38.638447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:38.638451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.638460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:38.638463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:38.638478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:38.638481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:38.638490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:38.638493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:38.638543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:38.638550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:38.638557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:38.638561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:38.638576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:38.638579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:38.638593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:38.638596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:38.638603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:38.638606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:38.638611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:38.638614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:38.638636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:38.638640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:38.638654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:38.638658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.638667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:38.638671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:38.638686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:38.638689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:38.638699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:38.638701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:38.672474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:38.754028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654125749749622:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.754047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654125749749649:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.754053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.754697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:38.756054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654125749749651:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T08:57:38.916228Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179458807, txId: 18446744073709551615] shutting down >> KqpDecimalColumnShard::TestJoinById >> KqpOlapBlobsSharing::UpsertWhileSplitTest >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream >> KqpOlap::OlapUpsertImmediate [GOOD] >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply >> test.py::test[pg-range_function_multi-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-range_function_multi-default.txt-Plan] [GOOD] >> test.py::test[pg-range_function_multi-default.txt-Results] |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapUpsertImmediate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestAggregation Test command err: Trying to start YDB, gRPC: 25399, MsgBus: 19509 2024-11-21T08:57:36.738989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654114473843244:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:36.739222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004846/r3tmp/tmpZ9efpp/pdisk_1.dat 2024-11-21T08:57:36.792577Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25399, node 1 2024-11-21T08:57:36.797059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:36.797069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:36.797071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:36.797092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19509 TClient is connected to server localhost:19509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:36.839071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:36.839093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:36.840348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:36.873257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (timestamp Timestamp64 NOT NULL, interval Interval64, PRIMARY KEY (timestamp)) PARTITION BY HASH(timestamp) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:36.993251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654114473843842:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:36.993294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:36.995955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.000966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.001001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.001027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.001041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.001056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.001069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.001083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.001100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.001126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.001141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.001156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.001170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114473843918:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.001665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:37.001680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:37.001692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:37.001701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:37.001717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:37.001721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:37.001730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:37.001744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:37.001756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:37.001764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:37.001771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:37.001780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:37.001828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:37.001844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:37.001865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:37.001873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.001885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:37.001893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:37.001909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:37.001917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:37.001928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:37.001936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=n ... IONS_COUNT =1); 2024-11-21T08:57:38.242441Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654123873582479:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.242485Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.246076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:38.252820Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:38.252849Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:38.252877Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:38.252892Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:38.252905Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:38.252924Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:38.252941Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:38.252957Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:38.252974Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:38.252992Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.253015Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:38.253038Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439654123873582525:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:38.253464Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:38.253480Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:38.253489Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:38.253502Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:38.253521Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:38.253528Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:38.253534Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:38.253539Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:38.253546Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:38.253553Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:38.253558Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:38.253565Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:38.253612Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:38.253623Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:38.253633Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:38.253640Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:38.253647Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:38.253654Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:38.253664Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:38.253668Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:38.253675Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:38.253682Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2024-11-21T08:57:38.323935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654124672977040:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.323980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654124672977045:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.323985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.324879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:38.326622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654124672977047:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } >> KqpOlapAggregations::Aggregation >> KqpOlapAggregations::Aggregation_ResultCountExpr [GOOD] >> KqpOlapIndexes::IndexesInLocalMetadata >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream [GOOD] >> BasicUsage::WriteSessionSwitchDatabases [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountExpr [GOOD] Test command err: Trying to start YDB, gRPC: 19631, MsgBus: 24695 2024-11-21T08:57:23.382870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654059490085843:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:23.382988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048aa/r3tmp/tmpDgA4og/pdisk_1.dat 2024-11-21T08:57:23.431819Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19631, node 1 2024-11-21T08:57:23.440746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:23.440771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:23.440773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:23.440813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24695 2024-11-21T08:57:23.484423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:23.484457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:23.485528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:23.498822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:23.508384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:23.518531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.518592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.518628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.518644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.518659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.518678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.518700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.518718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.518740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.518766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.518782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.518800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654059490086500:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.521759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.521783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.521820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.521843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.521858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.521871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.521884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.521898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.521919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.521935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.521950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.521964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654059490086510:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.526931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059490086501:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.526957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059490086501:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.527014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059490086501:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.527038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059490086501:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.527063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059490086501:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.527078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059490086501:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.527092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059490086501:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.527110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654059490086501:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.527126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74396540594900865 ... DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.379482Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.577993Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.578021Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.671988Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.672019Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.754793Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.754823Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.837325Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.837358Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:38.961435Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:38.971880Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:38.992387Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:38.992413Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.074752Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.074778Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.157023Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.157052Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.239198Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.239220Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.321274Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.321303Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.394214Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:39.446640Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.446670Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.593548Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.593570Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.686013Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.686043Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.768141Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.768163Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.849926Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:39.849952Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1888:3005], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGZkZGE4OTctNzBmZTE4ZDEtZmQ2YzE3YmYtY2VhZDIzNTA=. TraceId : 01jd6z1mha2r1yjp0z2pz91pw0. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:39.952458Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:39.973095Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream [GOOD] Test command err: Trying to start YDB, gRPC: 1111, MsgBus: 17728 2024-11-21T08:57:39.572988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654129570078706:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:39.573169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00482c/r3tmp/tmpr4LRih/pdisk_1.dat 2024-11-21T08:57:39.623176Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1111, node 1 2024-11-21T08:57:39.633400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:39.633416Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:39.633417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:39.633448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17728 TClient is connected to server localhost:17728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:39.673919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:39.673948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:39.675065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:39.701474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.707064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.766606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.780772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.788402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.876130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654129570080248:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:39.876151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:39.898146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:39.903006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:39.914117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:39.920921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:39.975145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:39.983987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:39.991867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654129570080763:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:39.991890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654129570080768:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:39.991894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:39.992405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:39.997027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654129570080770:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } CREATE TABLE `/Root/ColumnTableTest` (time Timestamp NOT NULL, class Utf8 NOT NULL, uniq Utf8 NOT NULL, PRIMARY KEY (time, class, uniq)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:40.198979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:40.208728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.208777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.208830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.208857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.208881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.208904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.208930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.208962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.208988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.209012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.209035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.209060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654133865048414:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.210108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:40.210123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:40.210136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:40.210141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:40.210159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:40.210165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:40.210175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:40.210182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:40.210194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:40.210199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:40.210206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:40.210211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:40.210280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:40.210290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:40.210317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:40.210323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.210342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:40.210350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:40.210377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:40.210396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:40.210415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:40.210419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow13TimestampTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=312;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=312;columns=3; 2024-11-21T08:57:40.291040Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654133865048583:2528] TxId: 281474976715672. Ctx: { TraceId: 01jd6z204c7tqxjt9cwcvm165b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNjOTJmOWQtNzllZGMzODktOTcwMzM2YzUtNzI3NmQ5ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:40.332471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037919;tx_state=complete;fline=interaction.h:353;batch=time: [ 2024-11-21 08:57:40.224758 ] class: [ "test" ] uniq: [ "test" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715672}],"starts":[{"inc":{"count_not_include":1},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2024-11-21T08:57:40.358135Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654133865049099:2679] TxId: 281474976715675. Ctx: { TraceId: 01jd6z207kcxba409q0bmqbkfr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTZlOGE1ZmQtYTI4NDM1OTMtMjUyNmNlOTYtOTUzYjZmOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:40.373686Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460389, txId: 18446744073709551615] shutting down >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> KqpOlapSysView::StatsSysViewRanges >> KqpOlap::OlapLayout >> KqpDecimalColumnShard::TestJoinById [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2024-11-21T08:55:26.209573Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1732179326209567 2024-11-21T08:55:26.314142Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653558135222160:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:26.314361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:26.316548Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439653556526946910:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:26.316714Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:55:26.334896Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:55:26.338300Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032db/r3tmp/tmp1Wvi8u/pdisk_1.dat 2024-11-21T08:55:26.363276Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13585, node 1 2024-11-21T08:55:26.373981Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0032db/r3tmp/yandexb9h6zA.tmp 2024-11-21T08:55:26.373994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0032db/r3tmp/yandexb9h6zA.tmp 2024-11-21T08:55:26.374052Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0032db/r3tmp/yandexb9h6zA.tmp 2024-11-21T08:55:26.374096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:55:26.378946Z INFO: TTestServer started on Port 29625 GrpcPort 13585 TClient is connected to server localhost:29625 PQClient connected to localhost:13585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:26.414678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:26.414727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:26.416297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:55:26.441966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:26.442000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:26.442851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:26.442992Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:55:26.443230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T08:55:26.555845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653558135223053:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:26.555871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:26.555924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653558135223065:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:26.556484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T08:55:26.558402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653558135223098:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:26.558494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:55:26.559190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653558135223067:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T08:55:26.576092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T08:55:26.582574Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439653556526947227:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:26.582645Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDlmN2YwODUtNTMyNjY2Mi0zOGUwMDcxYi05MmE4ZTQy, ActorId: [2:7439653556526947184:2277], ActorState: ExecuteState, TraceId: 01jd6yxxkjew798x1qz1nbyrya, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:26.583041Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:26.639211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:55:26.658121Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439653558135223349:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:55:26.658196Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWRmNzdlY2QtZDY3YmNhZjYtZmQ1MzkxZTQtNTE2OTVlYTk=, ActorId: [1:7439653558135223050:2299], ActorState: ExecuteState, TraceId: 01jd6yxxjtb7qvqfr4vbwgxmbh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:55:26.658386Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:55:26.705112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:13585", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T08:55:26.744177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd6yxxr1anx1zpt27gpmvmew, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE1YzQxYTAtMTBlOTJiNC1jZmEzMWZlMy0xZWJkMjE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439653558135223542:2930] 2024-11-21T08:55:31.314334Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653558135222160:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:31.314382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:31.316867Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439653556526946910:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:31.316896Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:55:31.820565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:13585 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T08:55:31.832104Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC ... c_id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:57:20.092386Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439654045759756489:3693] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2024-11-21T08:57:20.092395Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T08:57:20.092949Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:57:20.092971Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439654045759756518:3693], now have 1 active actors on pipe 2024-11-21T08:57:20.093016Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T08:57:20.093110Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:57:20.093121Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:57:20.093148Z node 4 :PERSQUEUE INFO: new Cookie src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src_id 2024-11-21T08:57:20.093179Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:57:20.093193Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:57:20.093356Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:57:20.093363Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:57:20.093381Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:57:20.093460Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0 2024-11-21T08:57:20.093765Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T08:57:20.093788Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732179440093 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:57:20.093830Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session established. Init response: last_seq_no: 2 session_id: "src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2024-11-21T08:57:21.096311Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439654045759756489:3693] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2024-11-21T08:57:21.104030Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439654045759756489:3693] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2024-11-21T08:57:21.104047Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439654045759756489:3693] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2024-11-21T08:57:23.970249Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:57:28.970473Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:57:33.970631Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:57:38.962543Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2024-11-21T08:57:38.962585Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2024-11-21T08:57:38.962773Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2024-11-21T08:57:38.962960Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2024-11-21T08:57:38.963227Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2024-11-21T08:57:38.963275Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2024-11-21T08:57:38.970845Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T08:57:40.162545Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent >>> Ready to answer: ok 2024-11-21T08:57:40.164723Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session: try to update token 2024-11-21T08:57:40.164745Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2024-11-21T08:57:40.171276Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T08:57:40.171559Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T08:57:40.172085Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T08:57:40.171832Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:57:40.171859Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:57:40.171906Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T08:57:40.172260Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T08:57:40.172266Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T08:57:40.172288Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2024-11-21T08:57:40.172372Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2024-11-21T08:57:40.172408Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2024-11-21T08:57:40.172471Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1732179460172 2024-11-21T08:57:40.172497Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:57:40.182974Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T08:57:40.183540Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T08:57:40.183610Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 6000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T08:57:40.183618Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2024-11-21T08:57:40.183621Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session: acknoledged message 1 2024-11-21T08:57:40.180370Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 2024-11-21T08:57:40.180415Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:57:40.180436Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T08:57:40.180528Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] topic 'rt3.dc1--test-topicCounters. CacheSize 0 CachedBlobs 0 2024-11-21T08:57:40.180543Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:57:40.184774Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2024-11-21T08:57:40.184813Z :ERROR: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T08:57:40.184816Z :ERROR: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2024-11-21T08:57:40.184819Z :INFO: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session will now close 2024-11-21T08:57:40.184836Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session: aborting 2024-11-21T08:57:40.190260Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0 grpc read done: success: 0 data: 2024-11-21T08:57:40.190286Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0 grpc read failed 2024-11-21T08:57:40.190296Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0 grpc closed 2024-11-21T08:57:40.190304Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0 is DEAD 2024-11-21T08:57:40.190490Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:57:40.192811Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:57:40.192855Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439654045759756518:3693] destroyed 2024-11-21T08:57:40.192871Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:57:40.204992Z :DEBUG: [/Root] SessionId [src_id|44bd38a8-5e3bc7e-aa83d640-b2e9fdbb_0] MessageGroupId [src_id] Write session: destroy >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestJoinById [GOOD] Test command err: Trying to start YDB, gRPC: 8077, MsgBus: 23111 2024-11-21T08:57:39.614505Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654129506157384:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:39.614824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00482a/r3tmp/tmpdO02ph/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8077, node 1 2024-11-21T08:57:39.668978Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:39.672388Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:39.672398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:39.672399Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:39.672425Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23111 TClient is connected to server localhost:23111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:39.715815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:39.715842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:39.716913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:39.744430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:39.870548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654129506157989:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:39.870578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:39.888560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:39.894174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:39.894225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:39.894259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:39.894281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:39.894297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:39.894314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:39.894330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:39.894348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:39.894370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:39.894394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:39.894407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:39.894421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654129506158065:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:39.894819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:39.894835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:39.894847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:39.894852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:39.894872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:39.894882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:39.894891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:39.894900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:39.894908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:39.894918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:39.894926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:39.894934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:39.894987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:39.894997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:39.895012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:39.895021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:39.895031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:39.895045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:39.895068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:39.895081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:39.895092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:39.895096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBA ... ormalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.483024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.483046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.483067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.483087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.483108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.483131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.483153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.483176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.483197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439654134747576868:2328];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.483620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:40.483637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:40.483649Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:40.483653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:40.483667Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:40.483677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:40.483685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:40.483699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:40.483711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:40.483719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:40.483725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:40.483732Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:40.483774Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:40.483784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:40.483800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:40.483808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.483820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:40.483828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:40.483848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:40.483856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:40.483870Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:40.483878Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=352;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=352;columns=3; 2024-11-21T08:57:40.545092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654133801125602:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.545132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.545131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654133801125607:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.545787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:40.547049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654133801125609:2395], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:40.722897Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460599, txId: 18446744073709551615] shutting down 2024-11-21T08:57:40.724499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654134747576921:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.724527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.724536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654134747576926:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.725097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:40.726372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654134747576928:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:40.850813Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460781, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2024-11-21T08:56:29.311798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:29.311823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:29.311828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:29.311833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:29.311844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:29.311848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:29.311857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:29.311941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:29.315872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:29.315893Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:29.317621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:29.317714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:29.317733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T08:56:29.319090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:29.319154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:29.319233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:29.319283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:29.319836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:29.320083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:29.320090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:29.320106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:29.320111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:29.320115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:29.320148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.362292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T08:56:29.362366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.362416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T08:56:29.362454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T08:56:29.362461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.363341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:29.363364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T08:56:29.363401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.363409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T08:56:29.363412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:29.363415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:29.363763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.363769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T08:56:29.363772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:29.364025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.364031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.364035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:29.364041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:29.364516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:29.364864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:29.364905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:56:29.365095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:29.365100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:56:29.365103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:29.853439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:29.853526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 653 RawX2: 4294969527 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:29.853540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:29.853646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:29.853658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:29.853702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:29.853718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:29.854439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:29.854455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:29.854499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:29.854505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:660:2236], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:29.854576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:29.854585Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:29.854598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:29.854603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:29.854619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:29.854625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:29.854631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:29.854635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:29.854647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:29.854654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:29.854657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:29.855179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:29.855202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:29.855207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:29.855213Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:29.855218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:29.855235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:29.855240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:517:2138] 2024-11-21T ... 409721Z node 165 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:39.409764Z node 165 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:39.409771Z node 166 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:39.409815Z node 166 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:39.409822Z node 167 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:39.409863Z node 167 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:39.409869Z node 168 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T08:57:39.409914Z node 168 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:57:39.409933Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:39.409964Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:39.409985Z node 166 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:39.409991Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:39.409998Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:39.410011Z node 167 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:39.410016Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:39.410022Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:39.410032Z node 168 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:39.410042Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:39.410048Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:39.410059Z node 169 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:39.410066Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:39.410073Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:39.410083Z node 170 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:39.410090Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:39.410099Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:39.410112Z node 171 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:39.410122Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:39.410128Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:39.410139Z node 164 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:39.410145Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T08:57:39.410152Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T08:57:39.410163Z node 165 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:57:39.410264Z node 170 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:39.410274Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:39.410279Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:39.410299Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[170:962:2101] 2024-11-21T08:57:39.410367Z node 171 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:39.410374Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:39.410377Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:39.410385Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[171:964:2101] 2024-11-21T08:57:39.410450Z node 164 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:39.410456Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:39.410460Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:39.410468Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[164:966:2101] 2024-11-21T08:57:39.410529Z node 165 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:39.410536Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:39.410539Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:39.410546Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[165:968:2101] 2024-11-21T08:57:39.410610Z node 166 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:39.410617Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:39.410620Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:39.410627Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[166:970:2101] 2024-11-21T08:57:39.410686Z node 167 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:39.410692Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:39.410696Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:39.410705Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[167:972:2101] 2024-11-21T08:57:39.410768Z node 168 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:39.410775Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:39.410778Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:39.410787Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[168:974:2101] 2024-11-21T08:57:39.410854Z node 169 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T08:57:39.410861Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:57:39.410864Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:57:39.410874Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[169:976:2101] 2024-11-21T08:57:39.414906Z node 164 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:39.414935Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [164:928:2098] 2024-11-21T08:57:39.414973Z node 165 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:39.414981Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [165:929:2098] 2024-11-21T08:57:39.415006Z node 166 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:39.415013Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [166:930:2098] 2024-11-21T08:57:39.415033Z node 167 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:39.415040Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [167:931:2098] 2024-11-21T08:57:39.415061Z node 168 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:39.415068Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [168:932:2098] 2024-11-21T08:57:39.415087Z node 169 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:39.415093Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [169:933:2098] 2024-11-21T08:57:39.415112Z node 170 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:39.415119Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [170:934:2098] 2024-11-21T08:57:39.415135Z node 171 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T08:57:39.415143Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [171:935:2098] 2024-11-21T08:57:39.415997Z node 164 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[164:966:2101] 2024-11-21T08:57:39.416067Z node 165 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[165:968:2101] 2024-11-21T08:57:39.416117Z node 171 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[171:964:2101] 2024-11-21T08:57:39.416268Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:39.416280Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:39.416284Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:39.416296Z node 166 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[166:970:2101] 2024-11-21T08:57:39.416307Z node 167 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[167:972:2101] 2024-11-21T08:57:39.416346Z node 168 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[168:974:2101] 2024-11-21T08:57:39.416421Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:39.416426Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:39.416429Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:39.416445Z node 169 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[169:976:2101] 2024-11-21T08:57:39.416452Z node 170 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[170:962:2101] 2024-11-21T08:57:39.416483Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:39.416491Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:39.416494Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:39.416560Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:39.416565Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:39.416568Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:39.416607Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:39.416612Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:39.416615Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:39.416648Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:39.416652Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:39.416655Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:39.416697Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:39.416701Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:39.416704Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T08:57:39.416743Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T08:57:39.416748Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T08:57:39.416751Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] Test command err: Trying to start YDB, gRPC: 30867, MsgBus: 23896 2024-11-21T08:57:28.024086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654080218395235:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:28.024101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487e/r3tmp/tmphTSMrM/pdisk_1.dat 2024-11-21T08:57:28.071811Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30867, node 1 2024-11-21T08:57:28.080316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:28.080329Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:28.080330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:28.080368Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23896 2024-11-21T08:57:28.125746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:28.125773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:28.126960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:28.138393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:28.149621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:28.164517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.164599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.164645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:28.164667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:28.164684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:28.164698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:28.164717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:28.164737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:28.164758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:28.164775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.164796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:28.164813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654080218395884:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:28.165478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:28.165516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:28.165530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:28.165535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:28.165553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:28.165557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:28.165566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:28.165572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:28.165582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:28.165592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:28.165599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:28.165603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:28.165666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:28.165678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:28.165695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:28.165704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.165714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:28.165723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:28.165740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:28.165750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:28.165761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:28.165769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:28.169039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654080218395894:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.169066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654080218395894:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.169104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654080218395894:2289];tablet_id=7207518622 ... DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:39.597967Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:39.743075Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:39.743100Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:39.825130Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:39.825161Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:39.907309Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:39.907342Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:39.989532Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:39.989595Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.061994Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:40.082925Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:40.146805Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.146842Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.234415Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.234453Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.316854Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.316891Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.398978Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.399003Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.481072Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.481092Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.513096Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:40.640128Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.640151Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.743080Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.743102Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.825291Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.825316Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.907457Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.907482Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:40.991451Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:40.991476Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWEyYzdlYjgtZDcyOWY2ZmYtOWQ4YWMwZDUtYmM4NDZkMDM=. CustomerSuppliedId : . TraceId : 01jd6z1qj3dzrhkf8fxsq8f67j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:41.053249Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:41.074119Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpDatetime64ColumnShard::UseTime64Columns >> KqpOlapAggregations::Aggregation_ResultCountT_FilterL [GOOD] >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy >> KqpOlap::OlapLayout [GOOD] >> KqpOlap::OlapDeleteImmediatePK [GOOD] >> KqpDatetime64ColumnShard::UseTime64Columns [GOOD] >> KqpDatetime64ColumnShard::UseDatetime64AsPrimaryKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapLayout [GOOD] Test command err: Trying to start YDB, gRPC: 6007, MsgBus: 8765 2024-11-21T08:57:41.276426Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654135836508809:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:41.276649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004822/r3tmp/tmpCseT3p/pdisk_1.dat 2024-11-21T08:57:41.315660Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6007, node 1 2024-11-21T08:57:41.329882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:41.329897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:41.329899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:41.329943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8765 TClient is connected to server localhost:8765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:41.399351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:41.399382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:41.400123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.400425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:41.408575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:41.430362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:41.430402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:41.430433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:41.430448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:41.430461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:41.430479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:41.430493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:41.430507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:41.430522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:41.430535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:41.430556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:41.430573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[1:7439654135836509590:2289];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:41.432707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:41.432726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:41.432747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:41.432757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:41.432766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:41.432779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:41.432788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:41.432802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:41.432815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:41.432834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:41.432851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:41.432864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654135836509613:2298];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:41.434814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:2297];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:41.434832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:2297];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:41.434851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:2297];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:41.434862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:2297];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:41.434878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:2297];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:41.434891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:2297];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:41.434905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:2297];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:41.434922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:2297];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:41.434936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654135836509609:22 ... 94], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.802061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } {
: Error: Execution, code: 1060 subissue: {
:15:35: Error: Executing CREATE TABLE subissue: {
: Error: Cannot create table with 17 column shards, only 16 are available, code: 2003 } } } 2024-11-21T08:57:41.807701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836511625:2601], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.807722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.811020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.817021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836511673:2609], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.817041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.820478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.827415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836511716:2617], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.827435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.829074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.842172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836511770:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.842193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.844086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.855595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836511824:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.855620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.857510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.869433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836511868:2644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.869457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.871168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.883860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836511907:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.883889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.885590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.897751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836511946:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.897781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.900766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.909663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateTabletsObjectReply for unknown txId 281474976715682 at schemeshard 72057594046644480 2024-11-21T08:57:41.912248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836512003:2670], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.912279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.915018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.922983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateTabletsObjectReply for unknown txId 281474976715683 at schemeshard 72057594046644480 2024-11-21T08:57:41.925226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836512045:2678], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.925249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } {
: Error: Execution, code: 1060 subissue: {
:15:35: Error: Executing CREATE TABLE subissue: {
: Error: cannot find appropriate group for 9 shards, code: 2003 } } } 2024-11-21T08:57:41.930239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836512063:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.930258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.932107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.940622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836512130:2694], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.940650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.942591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.954956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836512169:2703], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.954977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.957030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T08:57:41.963315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135836512213:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.963336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.966185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean_with_restarts Test command err: Trying to start YDB, gRPC: 7933, MsgBus: 63908 2024-11-21T08:57:35.410652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654114103329495:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:35.410672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00484e/r3tmp/tmpdswdGj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7933, node 1 2024-11-21T08:57:35.468789Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:35.470463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:35.470475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:35.470477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:35.470507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63908 TClient is connected to server localhost:63908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:35.512177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:35.512203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:35.513324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:35.516648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:35.526662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:35.531808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:35.534876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:35.534926Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T08:57:35.535594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:35.535662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:35.535715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:35.535738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:35.535759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:35.535779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:35.535802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:35.535824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:35.535846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:35.535867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:35.535887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:35.535912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654114103330134:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:35.536408Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T08:57:35.536433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:57:35.536437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:57:35.536455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:35.536480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:35.536492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:35.536499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:57:35.536505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:57:35.536517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:35.536525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:35.536527Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:57:35.536538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:35.536546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:35.536548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:35.536550Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:57:35.536555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:57:35.536565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:35.536571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:35.536576Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:57:35.536582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:35.536584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:35.536585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:57:35.536589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:35.536595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=Res ... napshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:57:40.818938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:40.818993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T08:57:40.819015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2024-11-21T08:57:40.819040Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T08:57:40.819043Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:40.819061Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:40.819062Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:40.819088Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:40.819100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:40.819144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:40.819211Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:40.819235Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:40.819254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:40.819257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:40.819264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:40.819268Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:40.819276Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:40.819279Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:40.819302Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:40.819313Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:40.819369Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:40.819372Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:40.819434Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179460000 at tablet 72075186224037889 2024-11-21T08:57:40.819450Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:40.819458Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:40.819466Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:40.819468Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179460000 at tablet 72075186224037888 2024-11-21T08:57:40.819474Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:40.819479Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:40.819482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:40.819486Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:40.819489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:40.819491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:40.819492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:40.819496Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654131283199952:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:40.819499Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:40.819503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:40.819508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:40.819512Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:40.819512Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654131283199979:2511];tablet_id=72075186224037889;parent=[1:7439654131283199950:2507];fline=manager.h:99;event=ask_data;request=request_id=39;3={portions_count=1};; 2024-11-21T08:57:40.819521Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654131283199950:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:40.819909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654131283199952:2508];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:40.819915Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654131283199950:2507];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x124D1530 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:229: Execute_ @ 0x124D2A1B 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7FDBD8FFBD8F 11. ??:0: ?? @ 0x7FDBD8FFBE3F 12. ??:0: ?? @ 0x11815028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountT_FilterL [GOOD] Test command err: Trying to start YDB, gRPC: 12310, MsgBus: 5805 2024-11-21T08:57:28.779392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654082173994749:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:28.779581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004878/r3tmp/tmpAgt4cq/pdisk_1.dat 2024-11-21T08:57:28.817468Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12310, node 1 2024-11-21T08:57:28.826276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:28.826290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:28.826306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:28.826339Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5805 TClient is connected to server localhost:5805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:28.880594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:28.880628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:28.881767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:28.900893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:28.910791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:28.922384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.922469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.922525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:28.922556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:28.922576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:28.922600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:28.922619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:28.922637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:28.922663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:28.922687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.922708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:28.922738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654082173995404:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:28.926236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.926270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.926312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:28.926335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:28.926356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:28.926371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:28.926399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:28.926421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:28.926445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:28.926465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.926484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:28.926505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654082173995407:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:28.927018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:28.927037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:28.927051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:28.927059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:28.927073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:28.927083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:28.927092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:28.927106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:28.927119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:28.927128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:28.927136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890; ... DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:40.317396Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:40.493639Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:40.493659Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:40.585879Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:40.585902Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:40.668061Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:40.668085Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:40.750211Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:40.750227Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:40.842799Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:40.863532Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:40.904663Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:40.904689Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:40.986918Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:40.986945Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.069172Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:41.069203Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.151139Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:41.151161Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.233163Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:41.233184Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.285550Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:41.390645Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:41.390665Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.493601Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:41.493622Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.575743Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:41.575767Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.647696Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:41.647744Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.719535Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:41.719559Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z1raw8q1pn5s2acrwzv1g. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzZiNDdjNTYtZGNjNTY3ZmMtZWNkNDhjYzctNjAwNDc1YmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:41.801877Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:41.822748Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapDeleteImmediatePK [GOOD] >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait >> KqpOlapAggregations::Aggregation [GOOD] >> KqpOlap::MetadataMemoryManager >> KqpOlap::CompactionPlanner >> KqpOlapSysView::StatsSysView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation [GOOD] Test command err: Trying to start YDB, gRPC: 9765, MsgBus: 6936 2024-11-21T08:57:40.312153Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654135094907571:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004826/r3tmp/tmpGUNSju/pdisk_1.dat 2024-11-21T08:57:40.339447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:40.364658Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9765, node 1 2024-11-21T08:57:40.374033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:40.374043Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:40.374045Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:40.374087Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6936 TClient is connected to server localhost:6936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:40.439084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:40.439711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:40.439730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:57:40.440840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:40.448270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:40.457661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.457703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.457727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.457744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.457766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.457790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.457808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.457826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.457842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.457857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.457872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.457894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135094908051:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.459818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.459831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.459846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.459855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.459864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.459872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.459880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.459889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.459898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.459913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.459935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.459948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135094908052:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.460228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:40.460239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:40.460248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:40.460251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:40.460266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:40.460272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:40.460279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:40.460286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:40.460291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:40.460293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:40.460297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;pr ... 5Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:40.464740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:40.464748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:40.464767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:40.464776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:40.464788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:40.464795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.464805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:40.464812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:40.464825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:40.464833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:40.464842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:40.464850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:40.464894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:40.464904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:40.464908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:40.464915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:40.464922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:40.464928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:40.464932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:40.464934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:40.464938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:40.464940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:40.464943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:40.464949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:40.464963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:40.464968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:40.464976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:40.464982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.464987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:40.464989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:40.464997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:40.465003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:40.465008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:40.465010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:40.505773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:40.556593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135094908330:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.556610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135094908356:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.556615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:40.557188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:40.558547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654135094908359:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:40.772591Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460613, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:57:41.009435Z node 1 :KQP_YQL WARN: TraceId: 01jd6z20w3b5t8e6x0b1w1es2j, SessionId: CompileActor 2024-11-21 08:57:41.008 WARN ydb-core-kqp-ut-olap(pid=759463, tid=0x00007FDFD2F3E640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed 2024-11-21T08:57:42.485455Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.583298Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462000, txId: 18446744073709551615] shutting down >> KqpOlapSysView::StatsSysViewTable [GOOD] >> KqpOlapAggregations::Json_GetValue_ToInt >> KqpDatetime64ColumnShard::UseDatetime64AsPrimaryKey [GOOD] >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy [GOOD] >> KqpOlapAggregations::Json_Query >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysView [GOOD] Test command err: Trying to start YDB, gRPC: 5271, MsgBus: 24959 2024-11-21T08:57:30.194643Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654088600694496:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:30.194780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00486e/r3tmp/tmpQtgjUB/pdisk_1.dat 2024-11-21T08:57:30.246996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5271, node 1 2024-11-21T08:57:30.252665Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:30.252678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:30.252680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:30.252720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24959 TClient is connected to server localhost:24959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:30.295981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.296008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.297061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:30.323970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:30.336006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:30.346445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.346485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.346511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.346523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.346539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.346555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.346575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.346593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.346612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.346628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.346643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.346657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654088600695154:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.348873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.348888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.348903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.348914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.348929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.348939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.348952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.348966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.348981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.348994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.349008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.349020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654088600695155:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.350925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.350936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.350951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.350959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.350971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.350980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.350992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.351003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.351015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654088600695158 ... LBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T08:57:35.195208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654088600694496:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:35.195248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT PathId, Kind, TabletId, Sum(Rows) as Rows FROM `/Root/olapStore/.sys/store_primary_index_portion_stats` WHERE Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY TabletId, Kind, PathId RESULT: 2024-11-21T08:57:41.620352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135845337819:3755], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.620353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654135845337810:3752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.620374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:41.621114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:41.622761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654135845337824:3756], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:42.882049Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461747, txId: 281474976715662] shutting down TabletId: 72075186224037888 Rows: 33295 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Rows: 33320 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Rows: 33385 Kind: INSERTED PathId: 3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewTable [GOOD] Test command err: Trying to start YDB, gRPC: 28250, MsgBus: 13870 2024-11-21T08:57:33.428408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654103028443103:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:33.428565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004859/r3tmp/tmpo6BJOj/pdisk_1.dat 2024-11-21T08:57:33.479115Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28250, node 1 2024-11-21T08:57:33.490379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:33.490392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:33.490394Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:33.490429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13870 TClient is connected to server localhost:13870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:33.530064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:33.530098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:33.531252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:33.559164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:33.565673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:33.577203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:33.577274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:33.577325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:33.577350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:33.577373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:33.577399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:33.577421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:33.577443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:33.577470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:33.577505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.577527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:33.577547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654103028443766:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:33.578090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:33.578112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:33.578124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:33.578129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:33.578144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:33.578152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:33.578158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:33.578165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:33.578175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:33.578177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:33.578184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:33.578186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:33.578232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:33.578245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:33.578260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:33.578269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.578280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:33.578289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:33.578304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:33.578313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:33.578324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:33.578332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:33.581319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654103028443765:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:33.581340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654103028443765:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:33.581370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654103028443765:2288];tablet_id=7207518622 ... ;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:33.591084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:33.591093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:33.591107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:33.591115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:33.591126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:33.591134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:33.591148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:33.591157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:33.591171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:33.591179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:33.623329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 Status: 48 TxId: 281474976715660 Issues { message: "Check failed: path: \'/Root/olapStore\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeColumnStore, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/olap/operations/create_store.cpp:361" severity: 1 } SchemeShardStatus: 4 SchemeShardReason: "Check failed: path: \'/Root/olapStore\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeColumnStore, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/olap/operations/create_store.cpp:361" SchemeShardTabletId: 72057594046644480 PathId: 2 PathCreateTxId: 281474976715658 2024-11-21T08:57:33.630539Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T08:57:33.630882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715661 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T08:57:38.428785Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654103028443103:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:38.428826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/olapTable_1/.sys/primary_index_stats` WHERE Activity = 1 GROUP BY PathId, TabletId, Kind ORDER BY PathId, TabletId, Kind RESULT: 2024-11-21T08:57:38.910932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654124503281092:2673], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.910935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654124503281083:2670], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.910950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.911539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:57:38.912852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654124503281097:2674], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:57:41.016741Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179459026, txId: 281474976715664] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/olapTable_2/.sys/primary_index_stats` WHERE Activity = 1 GROUP BY PathId, TabletId, Kind ORDER BY PathId, TabletId, Kind RESULT: 2024-11-21T08:57:43.059933Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461071, txId: 281474976715666] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable_1/.sys/primary_index_stats` WHERE PathId > UInt64("3") ORDER BY PathId, Kind, TabletId RESULT: 2024-11-21T08:57:43.104584Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463090, txId: 281474976715668] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDatetime64ColumnShard::UseDatetime64AsPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 17068, MsgBus: 26225 2024-11-21T08:57:41.930473Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654136218773098:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:41.930632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004821/r3tmp/tmp4baFPU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17068, node 1 2024-11-21T08:57:41.986604Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:41.991625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:41.991638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:41.991640Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:41.991679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26225 TClient is connected to server localhost:26225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:42.031220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:42.031256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:42.032392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:42.062341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, interval Interval64, timestamp Timestamp64, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:42.221396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654140513741002:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:42.221437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:42.241444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:42.247735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:42.247781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:42.247803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:42.247820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:42.247835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:42.247849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:42.247863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:42.247878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:42.247903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:42.247920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:42.247937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:42.247959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654140513741079:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:42.248372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:42.248387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:42.248398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:42.248404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:42.248429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:42.248438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:42.248448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:42.248464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:42.248478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:42.248486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:42.248492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:42.248500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:42.248558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:42.248567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:42.248583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:42.248591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:42.248602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:42.248610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:42.248626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:42.248635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:42.248645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:42.248652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=norma ... default not found or you don't have access permissions } 2024-11-21T08:57:42.946640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:42.952271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:42.952292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:42.952329Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:42.952348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:42.952364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:42.952381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:42.952398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:42.952418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:42.952436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:42.952453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:42.952471Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:42.952487Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654142562238537:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:42.952841Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:42.952851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:42.952860Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:42.952867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:42.952877Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:42.952884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:42.952891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:42.952900Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:42.952910Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:42.952916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:42.952920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:42.952926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:42.952969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:42.952979Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:42.952990Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:42.952997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:42.953005Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:42.953012Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:42.953023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:42.953029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:42.953036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:42.953042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=280;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=280;columns=2; 2024-11-21T08:57:43.014329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654146857205926:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.014355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654146857205931:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.014359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.014893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:43.016117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654146857205933:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:43.151978Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463070, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.177835Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463182, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.213640Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463217, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.246806Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463245, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.277027Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463280, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.303188Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463315, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 31198, MsgBus: 9014 2024-11-21T08:57:42.199847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654142777152089:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:42.200020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004820/r3tmp/tmpZT4Kvv/pdisk_1.dat 2024-11-21T08:57:42.261604Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31198, node 1 2024-11-21T08:57:42.272156Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:42.272166Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:42.272167Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:42.272220Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9014 2024-11-21T08:57:42.301903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:42.301935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:42.302992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:42.316087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:42.327837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:42.335714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:42.335780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:42.335824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:42.335848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:42.335868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:42.335887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:42.335907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:42.335930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:42.335950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:42.335974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:42.335994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:42.336014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654142777152735:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:42.336476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:42.336490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:42.336500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:42.336509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:42.336524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:42.336531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:42.336539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:42.336551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:42.336563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:42.336571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:42.336576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:42.336583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:42.336631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:42.336640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:42.336659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:42.336667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:42.336682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:42.336689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:42.336704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:42.336712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:42.336726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:42.336734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:42.339616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654142777152734:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:42.339646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654142777152734:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:42.339683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654142777152734:2288];tablet_id=7207518622403 ... ateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:42.347792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:42.347803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:42.347809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:42.347811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:42.347821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:42.347823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:42.347828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:42.347832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:42.347837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:42.347839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:42.347843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:42.347845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:42.347861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:42.347869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:42.347880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:42.347886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:42.347893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:42.347899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:42.347908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:42.347915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:42.347921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:42.347927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2024-11-21T08:57:42.487669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654142777153031:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:42.487685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654142777153038:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:42.487689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:42.488256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:42.489505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654142777153045:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:43.190673Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462545, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '972) '('"_id" '"a4ff1fb6-c9427677-812053ed-36676537") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (OptionalType (DataType 'Int64))) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '1031) '('"_id" '"6bf86688-26790c67-fbc91ada-d2aebbd") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'sum '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1598) '('"_id" '"4c184d68-34b26d59-3c5864e2-35478fba") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1610) '('"_id" '"df77ca3f-b5229ac5-d7154aa5-5045345c")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::MultiInsertWithSinks >> KqpOlap::MetadataMemoryManager [GOOD] >> KqpOlapSparsed::AccessorActualization [GOOD] |91.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |91.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> KqpOlapWrite::WriteDeleteCleanGC [GOOD] >> KqpOlapAggregations::Aggregation_SumL_GroupL_OrderL [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::AccessorActualization [GOOD] Test command err: Trying to start YDB, gRPC: 13605, MsgBus: 20056 2024-11-21T08:57:39.136047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654127984634169:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:39.136300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004835/r3tmp/tmpdMMhES/pdisk_1.dat 2024-11-21T08:57:39.177592Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13605, node 1 2024-11-21T08:57:39.189421Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:39.189435Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:39.189437Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:39.189477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20056 TClient is connected to server localhost:20056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:39.237247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:39.237292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:39.238358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:39.259722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.264917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:39.273154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:39.273207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:39.273237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:39.273253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:39.273267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:39.273283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:39.273299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:39.273311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:39.273325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:39.273341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:39.273356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:39.273369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:39.275489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:39.275507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:39.275530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:39.275547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:39.275559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:39.275572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:39.275586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:39.275600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:39.275614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:39.275627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:39.275640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:39.275653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:39.277590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654127984634825:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:39.277604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654127984634825:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:39.277628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654127984634825:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:39.277642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654127984634825:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:39.277651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654127984634825:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:39.277660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654127984634825:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:39.277677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654127984634825:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:39.277697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654127984634825:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:39.277710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74396541279846348 ... n=2;before_size=0;after_size=133992;before_rows=0;after_rows=3269; 2024-11-21T08:57:43.288623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=133992;portion_bytes=133992;portion_raw_bytes=3855823; 2024-11-21T08:57:43.288630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;fline=manager.cpp:14;event=unlock;process_id=CS::TTL::ab77e36a-a7e611ef-9ab7cee2-99eea8f7; 2024-11-21T08:57:43.288637Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:43.288641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:43.288644Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T08:57:43.288647Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179163000;tx_id=18446744073709551615;;current_snapshot_ts=1732179463000; 2024-11-21T08:57:43.288648Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:43.288650Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.288651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.288655Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:43.288660Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:43.288669Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037889 Save Batch GenStep: 1:2 Blob count: 1 2024-11-21T08:57:43.288677Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:43.288680Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=3;external_task_id=ab77e36a-a7e611ef-9ab7cee2-99eea8f7;mem=3976695;cpu=0; 2024-11-21T08:57:43.288698Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654127984634878:2297];tablet_id=72075186224037889;parent=[1:7439654127984634824:2289];fline=manager.h:99;event=ask_data;request=request_id=35;3={portions_count=1};; 2024-11-21T08:57:43.512751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654145164504520:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.512776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654145164504525:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.512777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.513379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:57:43.514715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654145164504527:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:57:43.653429Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715665 scanId: 1 version: {1732179463567:max} readable: {1732179463686:max} at tablet 72075186224037888 2024-11-21T08:57:43.653482Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715665 scanId: 1 at tablet 72075186224037888 2024-11-21T08:57:43.653592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179463567:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T08:57:43.657469Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127984634822:2288];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179463567:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2024-11-21T08:57:43.658038Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715665 scanId: 1 version: {1732179463567:max} readable: {1732179463686:max} at tablet 72075186224037890 2024-11-21T08:57:43.658074Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715665 scanId: 1 at tablet 72075186224037890 2024-11-21T08:57:43.658168Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127984634857:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179463567:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T08:57:43.658267Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127984634857:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179463567:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2024-11-21T08:57:43.658397Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715665 scanId: 1 version: {1732179463567:max} readable: {1732179463686:max} at tablet 72075186224037889 2024-11-21T08:57:43.658432Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715665 scanId: 1 at tablet 72075186224037889 2024-11-21T08:57:43.658474Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179463567:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T08:57:43.658521Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127984634824:2289];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179463567:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2024-11-21T08:57:43.659052Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654127984634871:2294];tablet_id=72075186224037888;parent=[1:7439654127984634822:2288];fline=manager.h:99;event=ask_data;request=request_id=36;3={portions_count=1};; 2024-11-21T08:57:43.659102Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654127984634892:2303];tablet_id=72075186224037890;parent=[1:7439654127984634857:2291];fline=manager.h:99;event=ask_data;request=request_id=37;3={portions_count=1};; 2024-11-21T08:57:43.659119Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654127984634878:2297];tablet_id=72075186224037889;parent=[1:7439654127984634824:2289];fline=manager.h:99;event=ask_data;request=request_id=38;3={portions_count=1};; 2024-11-21T08:57:43.659318Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T08:57:43.659328Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T08:57:43.659333Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T08:57:43.659420Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T08:57:43.659457Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T08:57:43.659471Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T08:57:43.660630Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2024-11-21T08:57:43.660673Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037890 2024-11-21T08:57:43.660683Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037889 2024-11-21T08:57:43.711014Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463567, txId: 18446744073709551615] shutting down [[10000u]] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::MetadataMemoryManager [GOOD] Test command err: Trying to start YDB, gRPC: 27110, MsgBus: 26537 2024-11-21T08:57:42.985306Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654142759755721:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:42.985540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004815/r3tmp/tmpAYBX05/pdisk_1.dat 2024-11-21T08:57:43.038013Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27110, node 1 2024-11-21T08:57:43.048379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:43.048390Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:43.048392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:43.048427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26537 TClient is connected to server localhost:26537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:43.086695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:43.086722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:43.087806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:43.119346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:43.122662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:43.132264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.132315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.132343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.132362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.132377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.132391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.132406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.132423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.132439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.132454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.132470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.132485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654147054723675:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.135350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.135376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.135414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.135435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.135454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.135473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.135492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.135511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.135530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.135549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.135568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.135585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147054723673:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.138420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147054723674:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.138456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147054723674:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.138495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147054723674:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.138519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147054723674:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.138542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147054723674:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.138562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147054723674:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.138583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147054723674:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.138604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147054723674:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.138627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:74396541470547236 ... nks;id=Chunks; 2024-11-21T08:57:43.143334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:43.143342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:43.143359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:43.143367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:43.143379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:43.143388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:43.143394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.143402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.143407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.143414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.143437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.143445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.143457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.143465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.143475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.143483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.143496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.143504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.143512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.143520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:43.143642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:43.143652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:43.143662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:43.143670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:43.143688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:43.143696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:43.143704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:43.143711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:43.143719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.143727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.143732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.143740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.143768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.143777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.143792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.143800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.143815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.143823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.143837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.143845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.143860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.143867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:43.180076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T08:57:43.327910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654147054724018:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.327916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654147054724027:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.327925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.328437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:43.329611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654147054724032:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:43.511895Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463385, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.515060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:43.598467Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463567, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Json_GetValue_ToInt [GOOD] >> KqpOlapAggregations::Aggregation_ResultT_FilterL_Limit2 >> KqpOlapAggregations::Json_Query [GOOD] >> KqpOlap::MultiInsertWithSinks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::WriteDeleteCleanGC [GOOD] Test command err: Trying to start YDB, gRPC: 7887, MsgBus: 14711 2024-11-21T08:57:39.323423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654127588796867:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:39.323573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004832/r3tmp/tmpFzBkqw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7887, node 1 2024-11-21T08:57:39.380133Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:39.380350Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:39.380363Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:39.380365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:39.380406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14711 TClient is connected to server localhost:14711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:39.424472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:39.424501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:39.425654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:39.450112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.454144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:39.464658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:39.464725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:39.464762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:39.464792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:39.464808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:39.464823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:39.464843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:39.464862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:39.464878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:39.464895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:39.464909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:39.464928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:39.468043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:39.468071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:39.468110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:39.468130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:39.468151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:39.468174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:39.468195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:39.468236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:39.468258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:39.468278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:39.468296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:39.468311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:39.468725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:39.468738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:39.468748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:39.468752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:39.468764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:39.468773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:39.468780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:39.468788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:39.468800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:39.468808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:39.468814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889 ... NotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:43.902889Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:43.902891Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:43.902892Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;storage_id=__DEFAULT;tablet_id=72075186224037890;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=2;reason=empty; 2024-11-21T08:57:43.902897Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179463000 at tablet 72075186224037890 2024-11-21T08:57:43.902900Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:43.902900Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:43.902902Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.902902Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:43.902903Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.902905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:43.902905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:43.902909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:43.902909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:43.902911Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.902911Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037889;self_id=[1:7439654127588797527:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;storage_id=__DEFAULT;tablet_id=72075186224037889;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=4;reason=empty; 2024-11-21T08:57:43.902918Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179463000 at tablet 72075186224037891 2024-11-21T08:57:43.902920Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654127588797528:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:43.902921Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.902923Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:43.902923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439654127588797528:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:515;problem=Background activities cannot be started: no index at tablet; 2024-11-21T08:57:43.902926Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:43.902929Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037890;self_id=[1:7439654127588797545:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;storage_id=__DEFAULT;tablet_id=72075186224037890;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=2;reason=empty; 2024-11-21T08:57:43.903245Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2024-11-21T08:57:43.903258Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:43.903261Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:43.903272Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:43.903278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:43.903286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:43.903287Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.903289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.903296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:43.903303Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:43.903316Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;storage_id=__DEFAULT;tablet_id=72075186224037888;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=0;reason=empty; 2024-11-21T08:57:43.903322Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179463000 at tablet 72075186224037888 2024-11-21T08:57:43.903324Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:43.903325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:43.903327Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:57:43.903329Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:43.903341Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.903346Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:43.903347Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:43.903350Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:43.903352Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037888;self_id=[1:7439654127588797526:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;storage_id=__DEFAULT;tablet_id=72075186224037888;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=0;reason=empty; >> KqpOlapAggregations::Json_Exists ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_ToInt [GOOD] Test command err: Trying to start YDB, gRPC: 4574, MsgBus: 1537 2024-11-21T08:57:43.507854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654148546772106:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:43.507873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004810/r3tmp/tmpv1BRiM/pdisk_1.dat 2024-11-21T08:57:43.548390Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4574, node 1 2024-11-21T08:57:43.561635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:43.561651Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:43.561654Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:43.561694Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1537 TClient is connected to server localhost:1537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:43.608403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:43.608429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:43.609508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:43.633864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:43.635257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:43.646512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:43.654773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.654831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.654861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.654878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.654892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.654906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.654922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.654941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.654958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.654972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.654987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.655002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654148546772741:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.655313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:43.655325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:43.655333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:43.655335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:43.655344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:43.655346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:43.655351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:43.655355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:43.655360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.655363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.655368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.655371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.655414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.655421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.655430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.655434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.655441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.655443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.655456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.655460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.655466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.655469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:43.657133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654148546772742:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.657149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654148546772742:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NA ... ess=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.662861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.662884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.662892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.662904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.662911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.662921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.662929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.662941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.662948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.662956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.662963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 1; 2024-11-21T08:57:43.745335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654148546773046:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.745414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654148546773035:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.745426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.745952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:43.747490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654148546773049:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:43.903767Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463798, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1292) '('"_id" '"27409e22-912db7ac-d08866e9-c42cb2bd") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $18 (Int32 '1)) (let $19 (Just $18)) (let $20 '($19 $18)) (let $21 (If (== $18 (Int32 '2147483647)) $20 '((+ $19 $18) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($20 $21)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $6)) (let $11 (DqPhyStage '() (lambda '() (block '( (let $22 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $23 '('"id" '"jsondoc" '"jsonval")) (let $24 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $25 (OptionalType (DataType 'JsonDocument))) (let $26 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) (OptionalType (DataType 'Double)))))) (let $27 (ResourceType '"JsonNode")) (let $28 (OptionalType $27)) (let $29 '((ResourceType '"JsonPath"))) (let $30 (DataType 'Utf8)) (let $31 (DictType $30 $27)) (let $32 '($31)) (let $33 (CallableType '() $26 '($28) $29 $32)) (let $34 '('('"strict"))) (let $35 (Udf '"Json2.SqlValueNumber" (Void) (VoidType) '"" $33 (VoidType) '"" $34)) (let $36 (lambda '($55) (block '( (let $56 '((DataType 'Json) '"" '1)) (let $57 (CallableType '() '($27) $56)) (let $58 (Udf '"Json2.Parse" (Void) (VoidType) '"" $57 (VoidType) '"" '())) (return (Just (Apply $58 $55))) )))) (let $37 (Nothing $28)) (let $38 (CallableType '() $29 '($30))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.obj.obj_col2_int"))) (let $41 (Dict $31)) (let $42 (lambda '($59) (block '( (let $60 (Nothing $7)) (return $60) )))) (let $43 (lambda '($61) (If (Exists $61) (SafeCast $61 $7) (Nothing $7)))) (let $44 (KqpWideReadOlapTableRanges $22 %kqp%tx_result_binding_0_0 $23 '() $24 (lambda '($45) (block '( (let $46 (DataType 'Json)) (let $47 (StructType $10 '('"jsondoc" $25) '('"jsonval" (OptionalType $46)))) (let $48 (KqpOlapApply $47 '('"jsonval") (lambda '($51) (block '( (let $52 (IfPresent $51 $36 $37)) (let $53 (Apply $35 $52 $40 $41)) (let $54 (Nothing $7)) (return (Visit $53 '0 $42 '1 $43)) ))))) (let $49 '('eq $48 (Int32 '"16"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $45 $50)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($62 $63 $64) (block '( (let $65 (IfPresent $64 $36 $37)) (let $66 (Apply $35 $65 $40 $41)) (let $67 (Visit $66 '0 $42 '1 $43)) (let $68 (CallableType '() $26 '($25) $29 $32)) (let $69 (Udf '"Json2.JsonDocumentSqlValueNumber" (Void) (VoidType) '"" $68 (VoidType) '"" $34)) (let $70 (Apply $69 $63 $40 $41)) (let $71 (Visit $70 '0 $42 '1 $43)) (return (AsStruct '('"column1" $67) '('"column2" $71) '('"id" $62))) )))))) ))) '('('"_logical_id" '1363) '('"_id" '"538067c7-33f8a584-51c88189-b8ae2c6c")))) (let $12 (DqCnUnionAll (TDqOutput $11 '0))) (let $13 (DqPhyStage '($12) (lambda '($72) $72) '('('"_logical_id" '2431) '('"_id" '"1b61878b-959133d4-5c429fbb-ad33d845")))) (let $14 '('"id" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $13 '0) $14)) (let $16 (KqpTxResultBinding $9 '0 '0)) (let $17 (KqpPhysicalTx '($11 $13) '($15) '('($5 $16)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $17) '((KqpTxResultBinding (ListType (StructType '('"column1" $7) '('"column2" $7) $10)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_SumL_GroupL_OrderL [GOOD] Test command err: Trying to start YDB, gRPC: 17183, MsgBus: 21194 2024-11-21T08:57:26.758627Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654074898616275:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:26.758780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488a/r3tmp/tmpDPIm6A/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17183, node 1 2024-11-21T08:57:26.811800Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:26.812184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:26.812192Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:26.812193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:26.812231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21194 TClient is connected to server localhost:21194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:26.858535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:26.859495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:26.859527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:57:26.860692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:26.868313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:26.878161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.878225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.878269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.878297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.878319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.878339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.878362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.878383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.878404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.878426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.878448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.878474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654074898616925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.881256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.881279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.881300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.881310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.881320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.881334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.881349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.881363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.881377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:26.881396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:26.881415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:26.881430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654074898616935:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:26.884136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074898616926:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:26.884157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074898616926:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:26.884185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074898616926:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:26.884200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074898616926:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:26.884239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074898616926:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:26.884259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074898616926:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:26.884277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074898616926:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:26.884295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654074898616926:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:26.884315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74396540748986169 ... 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:42.455763Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:42.623394Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:42.623422Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:42.695367Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:42.695397Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:42.757139Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:42.757161Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:42.818760Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:42.818789Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:42.901104Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:42.921882Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:42.952695Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:42.952725Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.014464Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.014507Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.076158Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.076190Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.138117Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.138150Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.199765Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.199791Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.241803Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:43.283642Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.283660Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.408373Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.408398Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.479952Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.479976Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.541447Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.541473Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.603014Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:43.603038Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1787:3048], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjliYzc3NTYtYWEwYzNlMzMtMTgxMTkwMmMtNmQzZGUwMzM=. TraceId : 01jd6z1pzr07hmsp81mm9wxapb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:43.674896Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:43.695697Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_Query [GOOD] Test command err: Trying to start YDB, gRPC: 2264, MsgBus: 13952 2024-11-21T08:57:43.608552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654147944525108:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:43.608688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00480d/r3tmp/tmpSkZnXK/pdisk_1.dat 2024-11-21T08:57:43.647334Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2264, node 1 2024-11-21T08:57:43.657976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:43.657986Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:43.657987Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:43.658014Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13952 TClient is connected to server localhost:13952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:43.709871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:43.709899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:43.710975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:43.731779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:43.742822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:43.751130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.751194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.751224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.751244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.751259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.751273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.751288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.751304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.751324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.751342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.751359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.751374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147944525753:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.751712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:43.751724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:43.751732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:43.751735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:43.751745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:43.751752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:43.751758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:43.751768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:43.751777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.751783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.751788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.751791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.751838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.751846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.751856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.751863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.751870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.751876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.751887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.751893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.751900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.751906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:43.754133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654147944525756:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.754154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654147944525756:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.754176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654147944525756:2289];tablet_id=720751862240 ... s=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:43.761368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:43.761377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:43.761383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:43.761391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:43.761395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.761398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.761401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.761404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.761425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.761434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.761445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.761451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.761459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.761466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.761476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.761483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.761491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.761510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_QUERY(jsonval, "$.col1" WITH UNCONDITIONAL WRAPPER), JSON_QUERY(jsondoc, "$.col1" WITH UNCONDITIONAL WRAPPER) FROM `/Root/tableWithNulls` WHERE level = 1; 2024-11-21T08:57:43.878638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654147944526060:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.878662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654147944526049:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.878728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.879321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:43.881224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654147944526063:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:44.026924Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_QUERY(jsonval, "$.col1" WITH UNCONDITIONAL WRAPPER), JSON_QUERY(jsondoc, "$.col1" WITH UNCONDITIONAL WRAPPER) FROM `/Root/tableWithNulls` WHERE level = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":1},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":1},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'JsonDocument)) (let $2 (OptionalType $1)) (let $3 (DqPhyStage '() (lambda '() (block '( (let $8 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $9 '('"id" '"jsondoc" '"jsonval" '"level")) (let $10 (Bool 'false)) (let $11 (KqpWideReadOlapTableRanges $8 (Void) $9 '() '() (lambda '($12) (block '( (let $13 '('eq '"level" (Int32 '1))) (let $14 '('?? $13 $10)) (let $15 '('"id" '"jsondoc" '"jsonval")) (return (TKqpOlapExtractMembers (KqpOlapFilter $12 $14) $15)) ))))) (return (FromFlow (NarrowMap $11 (lambda '($16 $17 $18) (block '( (let $19 (ResourceType '"JsonNode")) (let $20 (OptionalType $19)) (let $21 '($20)) (let $22 '((ResourceType '"JsonPath"))) (let $23 (DataType 'Utf8)) (let $24 (DictType $23 $19)) (let $25 '($24)) (let $26 '((DataType 'Bool))) (let $27 (CallableType '() $21 $21 $22 $25 $26 $21 $26 $21)) (let $28 (Udf '"Json2.SqlQueryWrap" (Void) (VoidType) '"" $27 (VoidType) '"" '())) (let $29 (Nothing $20)) (let $30 (IfPresent $18 (lambda '($43) (block '( (let $44 '((DataType 'Json) '"" '1)) (let $45 (CallableType '() '($19) $44)) (let $46 (Udf '"Json2.Parse" (Void) (VoidType) '"" $45 (VoidType) '"" '())) (return (Just (Apply $46 $43))) ))) $29)) (let $31 (CallableType '() $22 '($23))) (let $32 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $31 (VoidType) '"" '())) (let $33 (Apply $32 (Utf8 '"$.col1"))) (let $34 (Dict $24)) (let $35 (Apply $28 $30 $33 $34 $10 $29 $10 $29)) (let $36 (lambda '($47) (block '( (let $48 '($19 '"" '1)) (let $49 (CallableType '() '($1) $48)) (let $50 (Udf '"Json2.SerializeToJsonDocument" (Void) (VoidType) '"" $49 (VoidType) '"" '())) (return (Just (Apply $50 $47))) )))) (let $37 (Nothing $2)) (let $38 (IfPresent $35 $36 $37)) (let $39 (CallableType '() $21 '($2) $22 $25 $26 $21 $26 $21)) (let $40 (Udf '"Json2.JsonDocumentSqlQueryWrap" (Void) (VoidType) '"" $39 (VoidType) '"" '())) (let $41 (Apply $40 $17 $33 $34 $10 $29 $10 $29)) (let $42 (IfPresent $41 $36 $37)) (return (AsStruct '('"column1" $38) '('"column2" $42) '('"id" $16))) )))))) ))) '('('"_logical_id" '745) '('"_id" '"6d335676-a2228dbd-bc3b16ee-82d392bc")))) (let $4 (DqCnUnionAll (TDqOutput $3 '"0"))) (let $5 (DqPhyStage '($4) (lambda '($51) $51) '('('"_logical_id" '1423) '('"_id" '"4c4d9ede-bc6182c2-4876938e-e5b29fd0")))) (let $6 '('"id" '"column1" '"column2")) (let $7 (DqCnResult (TDqOutput $5 '"0") $6)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3 $5) '($7) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"column1" $2) '('"column2" $2) '('"id" (DataType 'Int32)))) '"0" '"0")) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL [GOOD] >> KqpOlap::PredicatePushdownCastErrors >> KqpOlapAggregations::Aggregation_Sum_GroupBy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::MultiInsertWithSinks [GOOD] Test command err: Trying to start YDB, gRPC: 28572, MsgBus: 23791 2024-11-21T08:57:43.713576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654147070799714:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:43.713636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004809/r3tmp/tmpt0KTPn/pdisk_1.dat 2024-11-21T08:57:43.756450Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28572, node 1 2024-11-21T08:57:43.769830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:43.769842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:43.769844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:43.769871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23791 TClient is connected to server localhost:23791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:43.814469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:43.814505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:43.815590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:43.839524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:43.847576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:43.856846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.856908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.856950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.856971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.856990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.857008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.857027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.857049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.857069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.857089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.857111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.857133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654147070800365:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.857577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:43.857590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:43.857601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:43.857605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:43.857623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:43.857632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:43.857642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:43.857653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:43.857666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.857673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.857680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.857688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.857743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.857753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.857771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.857778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.857789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.857797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.857813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.857821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.857832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.857839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:43.860197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147070800366:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.860225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147070800366:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.860252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654147070800366:2289];tablet_id=7207518622 ... TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.866760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.866770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.866785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.866793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.866804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.866808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.866827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.866836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.866852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.866859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:43.866945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:43.866955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:43.866963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:43.866966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:43.866978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:43.866986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:43.866993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:43.867000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:43.867008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.867016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.867021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.867029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.867052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.867061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.867076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.867084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.867093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.867101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.867114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.867122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.867132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.867139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:43.905036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:43.972506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654147070800666:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.972525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654147070800658:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.972568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.973160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:43.974491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654147070800672:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:44.069087Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654151365768050:2371] TxId: 281474976715662. Ctx: { TraceId: 01jd6z23q93mqvqknc8qqwmyev, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhkYjg5ZDItZjZkNWMyN2EtMWIxYTk3YTMtN2IzNzRiNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:44.075161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=6;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:00.000000 ] uid: [ "a" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715662}],"finishes":[{"inc":{"count_include":1},"id":281474976715662}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000000;a;"}}]}; 2024-11-21T08:57:44.076601Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654151365768083:2371] TxId: 281474976715663. Ctx: { TraceId: 01jd6z23q93mqvqknc8qqwmyev, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhkYjg5ZDItZjZkNWMyN2EtMWIxYTk3YTMtN2IzNzRiNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:44.079144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=6;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000000 ] uid: [ "b" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715662}],"finishes":[{"inc":{"count_include":1},"id":281474976715662}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000000;b;"}}]}; 2024-11-21T08:57:44.079356Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654151365768101:2371] TxId: 281474976715664. Ctx: { TraceId: 01jd6z23q93mqvqknc8qqwmyev, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhkYjg5ZDItZjZkNWMyN2EtMWIxYTk3YTMtN2IzNzRiNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:44.081145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=6;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:02.000000 ] uid: [ "c" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715662}],"finishes":[{"inc":{"count_include":1},"id":281474976715662}]},"p":{"include":0,"pk":"1970-01-01 00:00:02.000000;c;"}}]}; 2024-11-21T08:57:44.109079Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654151365768166:2417] TxId: 281474976715666. Ctx: { TraceId: 01jd6z23ws0snbnsk3w34zphmc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM3Nzk2NGMtNTZmMDBjNGEtZmE5MzZlODMtNTNlZmIyZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root >> KqpOlapBlobsSharing::MultipleSplitsThenMerges >> KqpOlapBlobsSharing::ChangeSchemaAndSplit >> KqpOlapBlobsSharing::MultipleMergesWithRestartsWhenWait ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL [GOOD] Test command err: Trying to start YDB, gRPC: 5900, MsgBus: 13114 2024-11-21T08:57:23.274221Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654062076724995:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:23.274404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ac/r3tmp/tmpGjImAY/pdisk_1.dat 2024-11-21T08:57:23.320348Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5900, node 1 2024-11-21T08:57:23.334195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:23.334205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:23.334206Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:23.334232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13114 TClient is connected to server localhost:13114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:23.375016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:23.375048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:23.376138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:23.402019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:23.412781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:23.421571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.421629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.421671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.421692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.421709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.421722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.421735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.421760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.421791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.421818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.421838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.421858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654062076725645:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.425194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:23.425228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:23.425270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:23.425301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:23.425330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:23.425356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:23.425376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:23.425392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:23.425411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:23.425436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:23.425457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:23.425476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654062076725646:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:23.425969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:23.425984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:23.425994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:23.425998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:23.426018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:23.426026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:23.426034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:23.426044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:23.426052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:23.426061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:23.426067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891 ... 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:42.687047Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:42.811000Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:42.811021Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:42.893097Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:42.893117Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:42.975212Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:42.975232Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.067536Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.067559Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.129231Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:43.150038Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:43.211555Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.211581Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.293543Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.293568Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.375763Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.375786Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.457661Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.457681Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.550710Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.550728Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.571211Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:43.717454Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.717507Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.810160Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.810181Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.892329Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.892349Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:43.974536Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:43.974560Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:44.066999Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:57:44.067024Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTRhZDJhYmQtNDVmNmJmMDUtZDNkZWNjYzEtODlmOGEzMWI=. TraceId : 01jd6z1me658xq4zjcs6e49m3a. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:57:44.118318Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:44.139094Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapAggregations::Json_Exists [GOOD] >> KqpOlap::NormalizeAbsentColumn >> KqpOlapAggregations::Aggregation_ResultT_FilterL_OrderT_Limit2 >> KqpOlapIndexes::IndexesActualization [GOOD] >> KqpOlapAggregations::Aggregation_Count_Null ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_Exists [GOOD] Test command err: Trying to start YDB, gRPC: 18732, MsgBus: 16844 2024-11-21T08:57:44.543257Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654151686872641:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:44.543417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004803/r3tmp/tmpv9i4s6/pdisk_1.dat 2024-11-21T08:57:44.595824Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18732, node 1 2024-11-21T08:57:44.604958Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:44.604975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:44.604977Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:44.605019Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16844 TClient is connected to server localhost:16844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:44.644662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:44.644686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:44.645772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:44.674855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:44.678589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:44.686155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.686229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.686267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.686289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.686309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.686334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.686355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.686376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.686402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:44.686422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.686442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:44.686463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654151686873288:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:44.686844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:44.686857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:44.686867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:44.686872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:44.686888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:44.686897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:44.686906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:44.686918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:44.686927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:44.686936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:44.686942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:44.686949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:44.687003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:44.687014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:44.687030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:44.687037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.687048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:44.687056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:44.687072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:44.687080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:44.687090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:44.687097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:44.689082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654151686873283:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.689098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654151686873283:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.689123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654151686873283:2288];tablet_id=7207518622 ... rmalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:44.694662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:44.694671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:44.694673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:44.694677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:44.694679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:44.694692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:44.694698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:44.694706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:44.694711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.694717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:44.694719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:44.694727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:44.694733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:44.694738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:44.694740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsonval, "$.col1") AND level = 1; 2024-11-21T08:57:44.806931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654151686873577:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.806950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.807010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654151686873590:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.807648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:44.809296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654151686873592:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:44.946617Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464862, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsonval, "$.col1") AND level = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Int32":1},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":11},{"Id":12}]},"Column":{"Id":13}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":4,"FunctionType":2,"Arguments":[{"Id":9},{"Id":13}]},"Column":{"Id":14}}},{"Filter":{"Predicate":{"Id":14}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Int32":1},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":11},{"Id":12}]},"Column":{"Id":13}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":4,"FunctionType":2,"Arguments":[{"Id":9},{"Id":13}]},"Column":{"Id":14}}},{"Filter":{"Predicate":{"Id":14}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Int32)) (let $2 '('"id" $1)) (let $3 (OptionalType (DataType 'Bool))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $9 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $10 '('"id" '"jsondoc" '"jsonval" '"level")) (let $11 (OptionalType (DataType 'JsonDocument))) (let $12 '($3)) (let $13 (ResourceType '"JsonNode")) (let $14 (OptionalType $13)) (let $15 '((ResourceType '"JsonPath"))) (let $16 (DataType 'Utf8)) (let $17 (DictType $16 $13)) (let $18 '($17)) (let $19 (CallableType '() $12 '($14) $15 $18 $12)) (let $20 '('('"strict"))) (let $21 (Udf '"Json2.SqlExists" (Void) (VoidType) '"" $19 (VoidType) '"" $20)) (let $22 (lambda '($41) (block '( (let $42 '((DataType 'Json) '"" '1)) (let $43 (CallableType '() '($13) $42)) (let $44 (Udf '"Json2.Parse" (Void) (VoidType) '"" $43 (VoidType) '"" '())) (return (Just (Apply $44 $41))) )))) (let $23 (Nothing $14)) (let $24 (CallableType '() $15 '($16))) (let $25 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $24 (VoidType) '"" '())) (let $26 (Apply $25 (Utf8 '"$.col1"))) (let $27 (Dict $17)) (let $28 (Bool 'false)) (let $29 (Just $28)) (let $30 (KqpWideReadOlapTableRanges $9 (Void) $10 '() '() (lambda '($31) (block '( (let $32 (DataType 'Json)) (let $33 (StructType $2 '('"jsondoc" $11) '('"jsonval" (OptionalType $32)) '('"level" (OptionalType $1)))) (let $34 (KqpOlapApply $33 '('"jsonval") (lambda '($39) (block '( (let $40 (IfPresent $39 $22 $23)) (return (Apply $21 $40 $26 $27 $29)) ))))) (let $35 '('?? $34 $28)) (let $36 '('eq '"level" (Int32 '1))) (let $37 '('?? $36 $28)) (let $38 '('"id" '"jsondoc" '"jsonval")) (return (TKqpOlapExtractMembers (KqpOlapFilter $31 (KqpOlapAnd $35 $37)) $38)) ))))) (return (FromFlow (NarrowMap $30 (lambda '($45 $46 $47) (block '( (let $48 (IfPresent $47 $22 $23)) (let $49 (Apply $21 $48 $26 $27 $29)) (let $50 (CallableType '() $12 '($11) $15 $18 $12)) (let $51 (Udf '"Json2.JsonDocumentSqlExists" (Void) (VoidType) '"" $50 (VoidType) '"" $20)) (let $52 (Apply $51 $46 $26 $27 $29)) (return (AsStruct '('"column1" $49) '('"column2" $52) '('"id" $45))) )))))) ))) '('('"_logical_id" '893) '('"_id" '"63f659a4-dc5696f1-8ea68c62-9d2aec1d")))) (let $5 (DqCnUnionAll (TDqOutput $4 '"0"))) (let $6 (DqPhyStage '($5) (lambda '($53) $53) '('('"_logical_id" '1415) '('"_id" '"b11ccbfa-4002c088-26acfd79-7129d7a2")))) (let $7 '('"id" '"column1" '"column2")) (let $8 (DqCnResult (TDqOutput $6 '"0") $7)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($4 $6) '($8) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"column1" $3) '('"column2" $3) $2)) '"0" '"0")) '('('"type" '"scan_query")))) ) >> KqpOlap::BlockChannelForce >> KqpOlapAggregations::NoErrorOnLegacyPragma >> KqpOlapAggregations::Aggregation_Sum_GroupBy [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesActualization [GOOD] Test command err: Trying to start YDB, gRPC: 29263, MsgBus: 32217 2024-11-21T08:57:37.091731Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654120334373526:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:37.092046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004843/r3tmp/tmp1XOrdn/pdisk_1.dat 2024-11-21T08:57:37.136477Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29263, node 1 2024-11-21T08:57:37.145053Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:37.145070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:37.145072Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:37.145106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32217 TClient is connected to server localhost:32217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:37.189441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.193064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:37.193088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:37.194314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:37.199086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:37.210143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.210211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.210259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.210282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.210298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.210321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.210337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.210358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.210386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.210404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.210419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.210430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.213261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.213292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.213337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.213366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.213388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.213409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.213429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.213446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.213465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.213487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.213520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.213543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.215841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654120334374177:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.215863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654120334374177:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.215886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654120334374177:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.215905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654120334374177:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.215920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654120334374177:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.215938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654120334374177:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.215952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654120334374177:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.215967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654120334374177:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.215983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74396541203343741 ... e 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T08:57:45.277945Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:45.277955Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:45.277968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:45.277987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T08:57:45.277998Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179165233;tx_id=18446744073709551615;;current_snapshot_ts=1732179457323; 2024-11-21T08:57:45.278003Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:45.278008Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:45.278010Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:45.278013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:45.278028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:45.278095Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179465233 at tablet 72075186224037890 2024-11-21T08:57:45.278103Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:45.278106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:45.278110Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T08:57:45.278112Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179165233;tx_id=18446744073709551615;;current_snapshot_ts=1732179457323; 2024-11-21T08:57:45.278114Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:45.278116Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:45.278118Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:45.278121Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:45.278125Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654120334374178:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:45.278133Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:45.278142Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2024-11-21T08:57:45.278149Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:45.278157Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:45.278162Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:45.278171Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T08:57:45.278179Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179165233;tx_id=18446744073709551615;;current_snapshot_ts=1732179457323; 2024-11-21T08:57:45.278181Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:45.278183Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:45.278184Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:45.278186Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:45.278195Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:45.278233Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179465233 at tablet 72075186224037888 2024-11-21T08:57:45.278240Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:45.278242Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:45.278244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T08:57:45.278246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179165233;tx_id=18446744073709551615;;current_snapshot_ts=1732179457323; 2024-11-21T08:57:45.278248Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:45.278249Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:45.278251Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:45.278253Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:45.278256Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654120334374179:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:45.278431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654120334374178:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:45.300585Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465135, txId: 18446744073709551615] shutting down [[0u]] 5 / 1 >> KqpOlapAggregations::Aggregation_ResultCountL_FilterL [GOOD] >> KqpOlap::PredicatePushdownParameterTypesValidation >> KqpOlapAggregations::Aggregation_Count_Null [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 9554, MsgBus: 65084 2024-11-21T08:57:44.799818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654149721796335:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:44.800001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047f9/r3tmp/tmpNSWo70/pdisk_1.dat 2024-11-21T08:57:44.855588Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9554, node 1 2024-11-21T08:57:44.862240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:44.862254Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:44.862256Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:44.862288Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65084 TClient is connected to server localhost:65084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:44.900783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:44.900806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:44.901826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:44.902960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:44.915478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:44.925030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.925110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.925177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.925199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.925217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.925231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.925247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.925261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.925294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:44.925310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.925325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:44.925350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654149721796976:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:44.925893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:44.925910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:44.925921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:44.925925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:44.925943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:44.925956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:44.925966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:44.925976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:44.925987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:44.925990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:44.925996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:44.926001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:44.926055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:44.926066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:44.926081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:44.926085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.926096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:44.926104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:44.926120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:44.926129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:44.926139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:44.926147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:44.928668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654149721796986:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.928685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654149721796986:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.928708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654149721796986:2291];tablet_id=720751862240 ... ateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:44.936711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:44.936724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:44.936731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:44.936733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:44.936743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:44.936746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:44.936751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:44.936754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:44.936758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:44.936765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:44.936770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:44.936773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:44.936799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:44.936806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:44.936816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:44.936822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.936830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:44.936837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:44.936847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:44.936853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:44.936860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:44.936866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2024-11-21T08:57:45.091320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654154016764577:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.091342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654154016764551:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.091363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.091992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:45.093771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654154016764580:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:45.832696Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465149, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '972) '('"_id" '"8e246236-8890885e-9273a893-2a4c5070") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"5")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (OptionalType (DataType 'Int64))) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '1031) '('"_id" '"320d9778-d30febda-d9506249-d79f99f") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'sum '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1598) '('"_id" '"f7d56687-c0fddf0d-7ac03a00-98815195") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1610) '('"_id" '"cc4cba61-ed3f2df2-db8bf276-d37157fd")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountL_FilterL [GOOD] Test command err: Trying to start YDB, gRPC: 9695, MsgBus: 1858 2024-11-21T08:57:32.270423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654101351675959:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.270527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004869/r3tmp/tmpS9LdEb/pdisk_1.dat 2024-11-21T08:57:32.312407Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9695, node 1 2024-11-21T08:57:32.320407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.320420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.320421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.320454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1858 TClient is connected to server localhost:1858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.371589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.371616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.372879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:32.393906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:32.403594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:32.413595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.413672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.413718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.413744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.413768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.413794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.413820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.413850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.413879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.413905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.413933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.413963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101351676619:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.414423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.414441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.414452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.414457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.414479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.414491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.414502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.414511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.414521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.414531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.414539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.414544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.414601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.414615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.414632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.414642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.414653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.414664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.414681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.414692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.414703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.414713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.418173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101351676620:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.418211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101351676620:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.418246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101351676620:2289];tablet_id=720751862240378 ... DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.051137Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.227759Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.227781Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.309750Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.309770Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.391688Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.391713Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.473686Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.473717Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.566276Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:44.587058Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:44.628196Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.628241Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.710342Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.710364Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.792395Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.792419Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.874608Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.874633Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:44.956844Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:44.956877Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:45.009021Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:45.104057Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:45.104088Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:45.227844Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:45.227876Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:45.310295Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:45.310327Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:45.393025Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:45.393053Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:45.475526Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:45.475553Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDRlOWJhNzUtNzk1N2EwZjEtNzg0N2MyMS1mMGI5NmRkYg==. CustomerSuppliedId : . TraceId : 01jd6z1vvtexkr80v3azg65d83. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:45.557943Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:45.578831Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapAggregations::NoErrorOnLegacyPragma [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_Null [GOOD] Test command err: Trying to start YDB, gRPC: 26009, MsgBus: 62563 2024-11-21T08:57:45.680269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654153460252103:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:45.680497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047ed/r3tmp/tmpy5zsm7/pdisk_1.dat 2024-11-21T08:57:45.732931Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26009, node 1 2024-11-21T08:57:45.747523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:45.747536Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:45.747538Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:45.747572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62563 TClient is connected to server localhost:62563 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:45.782340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:45.782362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:45.783547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:45.809918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.814165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:45.823682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.823741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.823793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.823816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.823843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.823869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.823891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.823914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.823938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.823962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.823984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.824010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153460252755:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.827672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.827697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.827735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.827758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.827780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.827801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.827826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.827849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.827871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.827908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.827933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.827955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654153460252744:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.828443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:45.828457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:45.828468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:45.828473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:45.828486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:45.828489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:45.828497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:45.828505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:45.828511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:45.828517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:45.828528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... 11-21T08:57:45.836768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:45.836776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:45.836780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:45.836794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:45.836797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:45.836804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:45.836813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:45.836821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:45.836829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:45.836834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:45.836842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:45.836872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:45.836880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:45.836895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:45.836903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.836912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:45.836919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:45.836947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:45.836955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:45.836965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:45.836973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2024-11-21T08:57:45.965577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654153460253023:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.965611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654153460253048:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.965617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.966236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:45.967727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654153460253052:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:46.146750Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466017, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '704) '('"_id" '"d65a741c-4093eb79-9a9b5d37-ed8bb823") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $28 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $28) '((Nothing $2) $28))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 (DataType 'Uint64)) (let $12 '('('"_logical_id" '762) '('"_id" '"bfaffd63-b282a69f-8e3eb44e-1266fa27") '('"_wide_channels" (StructType '('_yql_agg_0 $11))))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"level") '() $30 (lambda '($32) (TKqpOlapAgg $32 '('('_yql_agg_0 'count '"level")) '())))) (return (FromFlow $31)) ))) $12)) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($33) (block '( (let $34 (Bool 'false)) (let $35 (WideCondense1 (ToFlow $33) (lambda '($37) $37) (lambda '($38 $39) $34) (lambda '($40 $41) (AggrAdd $40 $41)))) (let $36 (Condense (NarrowMap (Take $35 (Uint64 '1)) (lambda '($42) (AsStruct '('Count0 $42)))) (Nothing (OptionalType (StructType '('Count0 $11)))) (lambda '($43 $44) $34) (lambda '($45 $46) (Just $45)))) (return (FromFlow (Map $36 (lambda '($47) (AsList (AsStruct '('"column0" (Coalesce (Member $47 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1317) '('"_id" '"ad047232-b6f27fb0-e94cc5fe-84187758")))) (let $16 (DqCnValue (TDqOutput $15 '0))) (let $17 (KqpTxResultBinding $10 '0 '0)) (let $18 '('('"type" '"scan"))) (let $19 (KqpPhysicalTx '($13 $15) '($16) '('($8 $17)) $18)) (let $20 '"%kqp%tx_result_binding_1_0") (let $21 (ListType (StructType '('"column0" $11)))) (let $22 '('('"_logical_id" '1420) '('"_id" '"bca2a923-e8dbc0d4-1f04c131-738a5dd5") $3)) (let $23 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $22)) (let $24 (DqCnResult (TDqOutput $23 '0) '('"column0"))) (let $25 (KqpTxResultBinding $21 '1 '0)) (let $26 (KqpPhysicalTx '($23) '($24) '('($20 $25)) $18)) (let $27 '($7 $19 $26)) (return (KqpPhysicalQuery $27 '((KqpTxResultBinding $21 '"2" '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::NoErrorOnLegacyPragma [GOOD] Test command err: Trying to start YDB, gRPC: 22436, MsgBus: 62995 2024-11-21T08:57:45.725025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654154422622430:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:45.725044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047e9/r3tmp/tmpkQf4c4/pdisk_1.dat 2024-11-21T08:57:45.779709Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22436, node 1 2024-11-21T08:57:45.786642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:45.786655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:45.786657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:45.786688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62995 2024-11-21T08:57:45.826226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:45.826252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:45.827279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:45.856294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.859136Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:45.862383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:45.873253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.873345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.873392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.873419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.873446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.873467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.873489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.873524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.873556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.873577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.873594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.873611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154422623071:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.874114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:45.874134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:45.874146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:45.874150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:45.874165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:45.874179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:45.874189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:45.874204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:45.874219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:45.874222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:45.874227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:45.874231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:45.874287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:45.874300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:45.874316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:45.874321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.874339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:45.874343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:45.874366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:45.874377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:45.874389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:45.874398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:45.877296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654154422623072:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.877320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654154422623072:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLA ... =72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:45.885278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:45.885288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:45.885291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:45.885297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:45.885299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:45.885476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:45.885484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:45.885490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:45.885492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:45.885513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:45.885519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:45.885524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:45.885526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:45.885533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:45.885541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:45.885546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:45.885552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:45.885579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:45.885589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:45.885603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:45.885609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.885616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:45.885622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:45.885633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:45.885639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:45.885645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:45.885651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA Kikimr.KqpPushOlapProcess = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2024-11-21T08:57:45.986289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654154422623365:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.986305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654154422623376:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.986310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:45.986857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:45.988114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654154422623379:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:46.126263Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466038, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA Kikimr.KqpPushOlapProcess = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 '('"id" '"resource_id")) (let $2 (DqPhyStage '() (lambda '() (block '( (let $6 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $7 '('"id" '"level" '"resource_id")) (let $8 (KqpWideReadOlapTableRanges $6 (Void) $7 '() '() (lambda '($9) (block '( (let $10 '('eq '"level" (Int32 '"5"))) (let $11 '('?? $10 (Bool 'false))) (return (TKqpOlapExtractMembers (KqpOlapFilter $9 $11) $1)) ))))) (return (FromFlow (NarrowMap $8 (lambda '($12 $13) (AsStruct '('"id" $12) '('"resource_id" $13)))))) ))) '('('"_logical_id" '559) '('"_id" '"71b04565-ce6f024c-4c062aa3-6ae5fca2")))) (let $3 (DqCnUnionAll (TDqOutput $2 '"0"))) (let $4 (DqPhyStage '($3) (lambda '($14) $14) '('('"_logical_id" '738) '('"_id" '"d675cab7-d64c3cb8-1ca5d874-2b15a843")))) (let $5 (DqCnResult (TDqOutput $4 '"0") $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($2 $4) '($5) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) >> KqpOlap::BulkUpsertUpdate >> test.py::test[pg-range_function_multi-default.txt-Results] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Analyze] >> KqpOlap::BlockChannelForce [GOOD] >> KqpOlap::PredicatePushdownPartial ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BlockChannelForce [GOOD] Test command err: Trying to start YDB, gRPC: 27044, MsgBus: 9145 2024-11-21T08:57:45.772275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654153521023224:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:45.772289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047e7/r3tmp/tmpX4tEN7/pdisk_1.dat 2024-11-21T08:57:45.830651Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27044, node 1 2024-11-21T08:57:45.840231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:45.840243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:45.840245Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:45.840280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9145 2024-11-21T08:57:45.872200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:45.872240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:9145 2024-11-21T08:57:45.873310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:45.881695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.894773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.957077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.973783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.984440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:46.059271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654157815992051:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:46.059293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:46.092042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:46.147431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:46.158083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:46.165176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:46.172436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:46.187156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:46.205080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654157815992567:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:46.205110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:46.205212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654157815992572:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:46.206115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:46.214074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654157815992574:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T08:57:46.393773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:46.403086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:46.403106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654157815992937:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:46.403128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:46.403152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654157815992937:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:46.403160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:46.403174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654157815992937:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:46.403180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:46.403191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:46.403197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654157815992937:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:46.403203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:46.403208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654157815992937:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:46.403215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:46.403225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654157815992937:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:46.403229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:46.403241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654157815992937:2461];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:46.403243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:46.403257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654157815992936:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11- ... ma::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:46.406999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:46.407006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:46.407007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:46.407009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:46.407011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:46.407025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:46.407034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:46.407082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:46.407086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:46.407090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:46.407094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:46.407096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:46.407098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:46.407099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:46.407102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:46.407106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:46.407108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:46.407110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:46.407112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:46.407113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:46.407116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:46.407117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:46.407120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:46.407120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:46.407122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:46.407124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:46.407126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:46.407127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:46.407128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:46.407130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:46.407133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:46.407143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:46.407145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:46.407149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:46.407152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:46.407153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:46.407155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:46.407160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:46.407161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:46.407163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:46.407163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:46.407169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:46.407172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:46.407171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:46.407178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:46.407180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:46.407182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:46.407183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:46.407185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:46.407188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:46.407190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:46.464911Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654157815993166:2535] TxId: 281474976710672. Ctx: { TraceId: 01jd6z266s6yjhf9ff70y19c6x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFhODY1NmYtODRjMmVmOGMtN2ViMmMwOTItYjY2OGNhM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T08:57:46.558365Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439654157815993275:2555] TxId: 281474976710674. Ctx: { TraceId: 01jd6z268r42yb4988p3fyajhg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWNhOTk2ODItNzVmOGM3NTgtY2RlNWI3OTItMWQ4ZDRjOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root |91.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats >> KqpDecimalColumnShard::TestOrderByDecimal >> KqpOlapTiering::Eviction >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Debug] >> KqpOlap::PredicatePushdownParameterTypesValidation [GOOD] >> KqpOlapAggregations::Aggregation_Avg_GroupBy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::UpsertWhileSplitTest Test command err: Trying to start YDB, gRPC: 23420, MsgBus: 8230 2024-11-21T08:57:39.603862Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654127506592968:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:39.604129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00482f/r3tmp/tmp2VZES6/pdisk_1.dat 2024-11-21T08:57:39.652501Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23420, node 1 2024-11-21T08:57:39.665398Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:39.665410Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:39.665412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:39.665450Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8230 TClient is connected to server localhost:8230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:39.705301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:39.705324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:39.706424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:39.727722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.798403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:40.460653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.460711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.460742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.460767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.460789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.460817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.460839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.460862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.460889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.460916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.460941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.460966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654131801563224:2297];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.461019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.461038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.461063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.461084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.461105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.461126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.461147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.461168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.461188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.461209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.461230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.461246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654131801562982:2296];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.463784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385:2298];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.463809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385:2298];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.463841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385:2298];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.463864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385:2298];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.463884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385:2298];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.463904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385:2298];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.463926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385:2298];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.463949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385:2298];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.463971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654131801563385: ... PoolFetcherActor] ActorId: [1:7439654148981469497:6933], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.217510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;task_id=ac02a040-a7e611ef-9f1fb1b2-9dc4f85e;fline=with_appended.cpp:80;portions=1,;task_id=ac02a040-a7e611ef-9f1fb1b2-9dc4f85e; 2024-11-21T08:57:44.217630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;task_id=ac0288b2-a7e611ef-a8502c39-9f764901;fline=with_appended.cpp:80;portions=1,;task_id=ac0288b2-a7e611ef-a8502c39-9f764901; 2024-11-21T08:57:44.217934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;task_id=ac02afd6-a7e611ef-a498dd6f-2dc2414a;fline=with_appended.cpp:80;portions=1,;task_id=ac02afd6-a7e611ef-a498dd6f-2dc2414a; 2024-11-21T08:57:44.218059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=ac02b92c-a7e611ef-a9caacc1-e385a17f;fline=with_appended.cpp:80;portions=1,;task_id=ac02b92c-a7e611ef-a9caacc1-e385a17f; 2024-11-21T08:57:44.218179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;task_id=ac029a3c-a7e611ef-996bc22e-adb605fd;fline=with_appended.cpp:80;portions=1,;task_id=ac029a3c-a7e611ef-996bc22e-adb605fd; 2024-11-21T08:57:44.218200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;task_id=ac02983e-a7e611ef-9b9f14a6-45ffdd31;fline=with_appended.cpp:80;portions=1,;task_id=ac02983e-a7e611ef-9b9f14a6-45ffdd31; 2024-11-21T08:57:44.218305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.218334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;task_id=ac02ad9c-a7e611ef-a23258c8-13a54a4c;fline=with_appended.cpp:80;portions=1,;task_id=ac02ad9c-a7e611ef-a23258c8-13a54a4c; 2024-11-21T08:57:44.218344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=ac02a7e8-a7e611ef-8e7a14b5-53f5669;fline=with_appended.cpp:80;portions=1,;task_id=ac02a7e8-a7e611ef-8e7a14b5-53f5669; 2024-11-21T08:57:44.218470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;task_id=ac02b04e-a7e611ef-a0483b00-d269d4b;fline=with_appended.cpp:80;portions=1,;task_id=ac02b04e-a7e611ef-a0483b00-d269d4b; 2024-11-21T08:57:44.218529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;task_id=ac029e1a-a7e611ef-94758ab8-67dbad33;fline=with_appended.cpp:80;portions=1,;task_id=ac029e1a-a7e611ef-94758ab8-67dbad33; 2024-11-21T08:57:44.218661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;task_id=ac02bc7e-a7e611ef-afcae91d-4bf70235;fline=with_appended.cpp:80;portions=1,;task_id=ac02bc7e-a7e611ef-afcae91d-4bf70235; 2024-11-21T08:57:44.218679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;task_id=ac02b6ca-a7e611ef-86c17aeb-8156da45;fline=with_appended.cpp:80;portions=1,;task_id=ac02b6ca-a7e611ef-86c17aeb-8156da45; 2024-11-21T08:57:44.218808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;task_id=ac02b440-a7e611ef-8614342e-9d3c4a73;fline=with_appended.cpp:80;portions=1,;task_id=ac02b440-a7e611ef-8614342e-9d3c4a73; 2024-11-21T08:57:44.218895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;task_id=ac029410-a7e611ef-9ad1378e-37fcf216;fline=with_appended.cpp:80;portions=1,;task_id=ac029410-a7e611ef-9ad1378e-37fcf216; 2024-11-21T08:57:44.219125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;task_id=ac028fa6-a7e611ef-b78d6407-ef8c376f;fline=with_appended.cpp:80;portions=1,;task_id=ac028fa6-a7e611ef-b78d6407-ef8c376f; 2024-11-21T08:57:44.219143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;task_id=ac029672-a7e611ef-81e05fe0-8fcbfb43;fline=with_appended.cpp:80;portions=1,;task_id=ac029672-a7e611ef-81e05fe0-8fcbfb43; 2024-11-21T08:57:44.219253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;task_id=ac029140-a7e611ef-aa836d9a-6d41259b;fline=with_appended.cpp:80;portions=1,;task_id=ac029140-a7e611ef-aa836d9a-6d41259b; 2024-11-21T08:57:44.219447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=ac0288b2-a7e611ef-9e21578e-a9eca390;fline=with_appended.cpp:80;portions=1,;task_id=ac0288b2-a7e611ef-9e21578e-a9eca390; 2024-11-21T08:57:44.219502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;task_id=ac02b1f2-a7e611ef-8d2e9a5b-a1a6bd48;fline=with_appended.cpp:80;portions=1,;task_id=ac02b1f2-a7e611ef-8d2e9a5b-a1a6bd48; 2024-11-21T08:57:44.219643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;task_id=ac02b80a-a7e611ef-93d4b8b8-b2a551b;fline=with_appended.cpp:80;portions=1,;task_id=ac02b80a-a7e611ef-93d4b8b8-b2a551b; 2024-11-21T08:57:44.219811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;task_id=ac02b882-a7e611ef-b8393c8e-a93d2b8c;fline=with_appended.cpp:80;portions=1,;task_id=ac02b882-a7e611ef-b8393c8e-a93d2b8c; 2024-11-21T08:57:44.219823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;task_id=ac02bc56-a7e611ef-92aff299-4d63ab43;fline=with_appended.cpp:80;portions=1,;task_id=ac02bc56-a7e611ef-92aff299-4d63ab43; 2024-11-21T08:57:44.220271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=ac02b17a-a7e611ef-a49c45b9-2fbf722b;fline=with_appended.cpp:80;portions=1,;task_id=ac02b17a-a7e611ef-a49c45b9-2fbf722b; 2024-11-21T08:57:44.220412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=ac02ba6c-a7e611ef-98993903-274c75ac;fline=with_appended.cpp:80;portions=1,;task_id=ac02ba6c-a7e611ef-98993903-274c75ac; 2024-11-21T08:57:44.220501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=ac02b756-a7e611ef-871be790-7f01c9ae;fline=with_appended.cpp:80;portions=1,;task_id=ac02b756-a7e611ef-871be790-7f01c9ae; 2024-11-21T08:57:44.220588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;task_id=ac02adce-a7e611ef-b158f428-6547cdc0;fline=with_appended.cpp:80;portions=1,;task_id=ac02adce-a7e611ef-b158f428-6547cdc0; 2024-11-21T08:57:44.220744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=ac02aa40-a7e611ef-8daf340e-63f9e796;fline=with_appended.cpp:80;portions=1,;task_id=ac02aa40-a7e611ef-8daf340e-63f9e796; 2024-11-21T08:57:44.220873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;task_id=ac02bb16-a7e611ef-81a3dc91-9be66fc1;fline=with_appended.cpp:80;portions=1,;task_id=ac02bb16-a7e611ef-81a3dc91-9be66fc1; 2024-11-21T08:57:44.220939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ac02b486-a7e611ef-94e2f353-1b1b0be8;fline=with_appended.cpp:80;portions=1,;task_id=ac02b486-a7e611ef-94e2f353-1b1b0be8; 2024-11-21T08:57:44.220986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654148981469607:6970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.221091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=ac02b346-a7e611ef-86a9a639-5b0ba998;fline=with_appended.cpp:80;portions=1,;task_id=ac02b346-a7e611ef-86a9a639-5b0ba998; 2024-11-21T08:57:44.221462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;task_id=ac02b59e-a7e611ef-96ce5b76-e55a4272;fline=with_appended.cpp:80;portions=1,;task_id=ac02b59e-a7e611ef-96ce5b76-e55a4272; 2024-11-21T08:57:44.222055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=ac02bd96-a7e611ef-af8f1b4a-3159a2dc;fline=with_appended.cpp:80;portions=1,;task_id=ac02bd96-a7e611ef-af8f1b4a-3159a2dc; 2024-11-21T08:57:44.222609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:44.224746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654148981469680:6994], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:44.473895Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464274, txId: 18446744073709551615] shutting down [[10000u]] 2024-11-21T08:57:44.480153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:44.481012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:44.498527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:44.501758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 [[10000u]] 2024-11-21T08:57:44.592346Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464554, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.604023Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654127506592968:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:44.604078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; VERIFY failed (2024-11-21T08:57:44.619374Z): verification=found;fline=hash_intervals.h:133; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/tx/sharding/hash_intervals.h:133: MakeShardingWrite @ 0x118B168D 4. /-S/ydb/core/tx/sharding/hash_intervals.cpp:184: MakeSharding @ 0x118B0938 5. /-S/ydb/core/tx/sharding/sharding.cpp:245: SplitByShardsToArrowBatches @ 0x161E1F16 6. /-S/ydb/core/tx/sharding/sharding.cpp:257: SplitByShards @ 0x161E24F5 2024-11-21T08:57:45.623020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 7. /-S/ydb/core/tx/data_events/columnshard_splitter.cpp:64: SplitImpl @ 0x1D204FDD 8. /-S/ydb/core/tx/data_events/columnshard_splitter.cpp:57: DoSplitData @ 0x1D20477F 9. /-S/ydb/core/tx/data_events/shards_splitter.h:73: SplitData @ 0x1D1FA067 10. /-S/ydb/core/tx/tx_proxy/rpc_long_tx.cpp:92: ProceedWithSchema @ 0x1D1F8FF2 2024-11-21T08:57:45.634244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 11. /-S/ydb/library/actors/core/executor_thread.cpp:248: Execute @ 0x136DBA88 12. /-S/ydb/library/actors/core/executor_thread.cpp:425: operator() @ 0x136DF626 13. /-S/ydb/library/actors/core/executor_thread.cpp:479: ProcessExecutorPool @ 0x136DF0D2 14. /-S/ydb/library/actors/core/executor_thread.cpp:510: ThreadProc @ 0x136DFE4E 15. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x126E687C 16. ??:0: ?? @ 0x7FEA65FA9AC2 17. ??:0: ?? @ 0x7FEA6603B84F >> KqpOlapAggregations::Aggregation_Some ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownParameterTypesValidation [GOOD] Test command err: Trying to start YDB, gRPC: 3863, MsgBus: 63803 2024-11-21T08:57:46.179524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654160355676646:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:46.179701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047e5/r3tmp/tmpx14oAB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3863, node 1 2024-11-21T08:57:46.247232Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:46.247250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:46.247252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:46.247288Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:46.247446Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63803 TClient is connected to server localhost:63803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:46.280748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:46.280776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:46.281900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:46.313977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:46.318027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:46.325335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:46.325405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:46.325448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:46.325477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:46.325513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:46.325543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:46.325566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:46.325590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:46.325618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:46.325638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:46.325655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:46.325681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654160355677278:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:46.326114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:46.326129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:46.326138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:46.326142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:46.326156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:46.326167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:46.326179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:46.326185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:46.326192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:46.326196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:46.326202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:46.326205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:46.326257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:46.326268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:46.326283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:46.326294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:46.326304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:46.326308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:46.326323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:46.326332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:46.326343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:46.326346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:46.375735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732179466360 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "olapStore" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179466416 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 720... (TRUNCATED) TClient::Ls request: /Root/olapStore TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "olapStore" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732179466416 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } } Children { Name: "OlapParametersTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732179466430 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 Schemesha... (TRUNCATED) TClient::Ls request: /Root/olapStore/OlapParametersTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "OlapParametersTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732179466430 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 I... (TRUNCATED) 2024-11-21T08:57:46.458041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654160355677495:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:46.458072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654160355677506:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:46.458080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:46.458818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:46.460686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654160355677509:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:46.603186Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466514, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.653468Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466633, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.706818Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466689, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.771187Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466738, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.829805Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466808, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.880120Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466864, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.935540Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466920, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.999621Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.067902Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467039, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.123286Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467095, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.184298Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467158, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.248585Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467221, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.308700Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467284, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.375865Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467347, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.426507Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467403, txId: 18446744073709551615] shutting down >> KqpOlapIndexes::IndexesInBS [GOOD] >> KqpOlapAggregations::Aggregation_Count_GroupByNullMix [GOOD] |91.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> KqpOlap::PredicatePushdownPartial [GOOD] >> KqpOlapAggregations::Aggregation_Some [GOOD] |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_GroupByNullMix [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesInBS [GOOD] Test command err: Trying to start YDB, gRPC: 17241, MsgBus: 5621 2024-11-21T08:57:21.182860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654052782893739:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:21.182974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048bc/r3tmp/tmpMbQD7N/pdisk_1.dat 2024-11-21T08:57:21.245206Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17241, node 1 2024-11-21T08:57:21.259946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:21.259961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:21.259963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:21.259998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5621 2024-11-21T08:57:21.281536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.281568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.282619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:21.311385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:21.316496Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:57:21.320318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:21.342219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.342291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.342335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.342354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.342373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.342390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.342412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.342430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.342465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.342484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.342502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.342519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654052782894239:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.357096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.357130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.357215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.357234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.357250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.357265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.357277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.357292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.357307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.357321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.357343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.357361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654052782894237:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.360768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654052782894240:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.360793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654052782894240:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.360836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654052782894240:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.360851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654052782894240:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.360869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654052782894240:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.360886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654052782894240:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.360900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654052782894240:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.360916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654052782894240:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... 8:57:39.896504Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179459724, txId: 18446744073709551615] shutting down 2024-11-21T08:57:39.999033Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179459843, txId: 18446744073709551615] shutting down 2024-11-21T08:57:40.149421Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:40.331042Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460081, txId: 18446744073709551615] shutting down 2024-11-21T08:57:40.439704Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460249, txId: 18446744073709551615] shutting down 2024-11-21T08:57:40.537002Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460424, txId: 18446744073709551615] shutting down 2024-11-21T08:57:40.646794Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460522, txId: 18446744073709551615] shutting down 2024-11-21T08:57:40.750307Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460620, txId: 18446744073709551615] shutting down 2024-11-21T08:57:40.864799Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460732, txId: 18446744073709551615] shutting down 2024-11-21T08:57:41.016456Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179460830, txId: 18446744073709551615] shutting down 2024-11-21T08:57:41.166782Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:41.296431Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461117, txId: 18446744073709551615] shutting down 2024-11-21T08:57:41.436831Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461250, txId: 18446744073709551615] shutting down 2024-11-21T08:57:41.561752Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461390, txId: 18446744073709551615] shutting down 2024-11-21T08:57:41.684490Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461537, txId: 18446744073709551615] shutting down 2024-11-21T08:57:41.845167Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461649, txId: 18446744073709551615] shutting down 2024-11-21T08:57:41.954533Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179461796, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.068614Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.195665Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462041, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.307550Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462160, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.408109Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462286, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.555681Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462391, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.679903Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462510, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.802019Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462636, txId: 18446744073709551615] shutting down 2024-11-21T08:57:42.945987Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179462769, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.059942Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.211446Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463028, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.333214Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463161, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.436818Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463294, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.542841Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463413, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.656936Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463518, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.769025Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463623, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.879331Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463749, txId: 18446744073709551615] shutting down 2024-11-21T08:57:43.994508Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463854, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.106993Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.217089Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464078, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.332165Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464190, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.448279Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464302, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.560524Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464414, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.684070Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464533, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.796146Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464652, txId: 18446744073709551615] shutting down 2024-11-21T08:57:44.925892Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464764, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.035865Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.137150Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465009, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.247273Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465121, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.384859Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465219, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.486553Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465331, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.624656Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465464, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.732157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465583, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.860531Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465709, txId: 18446744073709551615] shutting down 2024-11-21T08:57:45.976305Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465821, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.101328Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.237829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466059, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.368468Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466199, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.491600Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466332, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.606576Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466465, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.729990Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466577, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.855011Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466696, txId: 18446744073709551615] shutting down 2024-11-21T08:57:46.973616Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179466822, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.121226Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.259186Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467067, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.393735Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467214, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.513249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467361, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.638943Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467480, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.758807Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467606, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.890271Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467725, txId: 18446744073709551615] shutting down >> KqpOlapBlobsSharing::MultipleMerge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownPartial [GOOD] Test command err: Trying to start YDB, gRPC: 30078, MsgBus: 64555 2024-11-21T08:57:47.209240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654165679427719:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:47.209299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047e0/r3tmp/tmpOpQVFo/pdisk_1.dat 2024-11-21T08:57:47.276848Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30078, node 1 2024-11-21T08:57:47.289664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:47.289679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:47.289681Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:47.289722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64555 2024-11-21T08:57:47.307625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:47.307674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:47.308735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:47.332270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:47.344734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:47.357099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.357155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.357205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.357227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.357245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.357262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.357280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.357296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.357311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:47.357336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.357350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:47.357360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654165679428216:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:47.357724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:47.357740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:47.357750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:47.357762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:47.357780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:47.357788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:47.357797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:47.357805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:47.357812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:47.357820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:47.357826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:47.357833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:47.357880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:47.357890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:47.357904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:47.357911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.357922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:47.357929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:47.357944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:47.357951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:47.357960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:47.357967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:47.360716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654165679428217:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.360737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654165679428217:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.360768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654165679428217:2290];tablet_id=7207518622 ... fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:47.369247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:47.369253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:47.369259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:47.369263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:47.369270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:47.369292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:47.369298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:47.369307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:47.369313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.369319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:47.369321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:47.369331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:47.369337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:47.369343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:47.369349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:47.404098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5800;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND `resource_id` = "10001" ORDER BY `timestamp` 2024-11-21T08:57:47.466283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654165679428550:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.466301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654165679428540:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.466353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.466880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:47.468011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654165679428554:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T08:57:47.595363Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467522, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND `resource_id` = "10001" ORDER BY `timestamp` 2024-11-21T08:57:47.627924Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467627, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND `level` > 1 AND LENGTH(`uid`) > 0 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND `level` > 1 AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2024-11-21T08:57:47.692030Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467662, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.723144Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467732, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` >= "10001" AND LENGTH(`uid`) > 0 AND `level` >= 1 AND `level` < 3 ORDER BY `timestamp` 2024-11-21T08:57:47.780995Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467760, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` >= "10001" AND LENGTH(`uid`) > 0 AND `level` >= 1 AND `level` < 3 ORDER BY `timestamp` 2024-11-21T08:57:47.816760Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467816, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND (`resource_id` >= "10001" OR `level`>= 1 AND `level` <= 3) ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND (`resource_id` >= "10001" OR `level`>= 1 AND `level` <= 3) ORDER BY `timestamp` 2024-11-21T08:57:47.873968Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467851, txId: 18446744073709551615] shutting down 2024-11-21T08:57:47.906569Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467914, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" OR `level` >= 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" OR `level` >= 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2024-11-21T08:57:47.972288Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:48.005105Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468005, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" AND `level` != 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" AND `level` != 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2024-11-21T08:57:48.071500Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468047, txId: 18446744073709551615] shutting down 2024-11-21T08:57:48.103604Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468103, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` ORDER BY `timestamp` 2024-11-21T08:57:48.162561Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468145, txId: 18446744073709551615] shutting down 2024-11-21T08:57:48.200092Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468194, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` AND `level` > 1 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` AND `level` > 1 ORDER BY `timestamp` 2024-11-21T08:57:48.256962Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468236, txId: 18446744073709551615] shutting down 2024-11-21T08:57:48.291344Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468292, txId: 18446744073709551615] shutting down >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-ForceBlocks] >> KqpDecimalColumnShard::TestOrderByDecimal [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some [GOOD] Test command err: Trying to start YDB, gRPC: 28332, MsgBus: 3993 2024-11-21T08:57:47.845868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654164151327789:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:47.845883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047d5/r3tmp/tmpcv3IwC/pdisk_1.dat 2024-11-21T08:57:47.890462Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28332, node 1 2024-11-21T08:57:47.894571Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:47.894582Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:47.894583Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:47.894613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3993 TClient is connected to server localhost:3993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:47.946733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:47.946753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:47.947849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:47.967066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:47.978146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:47.985962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.986032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.986081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.986116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.986138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.986159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.986178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.986197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.986212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:47.986229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.986246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:47.986260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654164151328433:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:47.988165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.988180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.988194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.988204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.988229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.988238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.988249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.988262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.988271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:47.988284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.988296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:47.988308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164151328434:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:47.988578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:47.988587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:47.988595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:47.988602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:47.988611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:47.988617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:47.988623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:47.988631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:47.988639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:47.988642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:47.988649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888; ... act.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:47.993156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:47.993160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:47.993163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:47.993170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:47.993172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:47.993176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:47.993178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:47.993182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:47.993184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:47.993187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:47.993193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:47.993206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:47.993211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:47.993219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:47.993225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.993230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:47.993236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:47.993243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:47.993249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:47.993254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:47.993256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id=1 2024-11-21T08:57:48.085567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654168446296036:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.085586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654168446296025:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.085602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.086115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:48.087315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654168446296040:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:48.268488Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468138, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id=1 JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int32)))))) (let $1 '('"_partition_mode" '"single")) (let $2 '('('"_logical_id" '1052) '('"_id" '"1f44f410-c1e8ad62-3e11d188-2a8024cb") $1)) (let $3 (DqPhyStage '() (lambda '() (block '( (let $27 (Int32 '1)) (let $28 (Just $27)) (let $29 '($28 $27)) (let $30 (If (== $27 (Int32 '2147483647)) $29 '((+ $28 $27) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($29 $30)))))))))) ))) $2)) (let $4 (DqCnValue (TDqOutput $3 '0))) (let $5 (KqpPhysicalTx '($3) '($4) '() '('('"type" '"compute")))) (let $6 '"%kqp%tx_result_binding_0_0") (let $7 (DataType 'Int32)) (let $8 (OptionalType $7)) (let $9 (TupleType $8 $7)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 '('('"_logical_id" '1110) '('"_id" '"387d565f-e483fec9-61944107-5e2a3924") '('"_wide_channels" (StructType '('_yql_agg_0 $8))))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $31 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $32 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $33 (KqpWideReadOlapTableRanges $31 %kqp%tx_result_binding_0_0 '('"level") '() $32 (lambda '($34) (TKqpOlapAgg $34 '('('_yql_agg_0 'some '"level")) '())))) (return (FromFlow $33)) ))) $11)) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($35) (block '( (let $36 (Bool 'false)) (let $37 (WideCondense1 (ToFlow $35) (lambda '($39) $39) (lambda '($40 $41) $36) (lambda '($42 $43) (Coalesce $43 $42)))) (let $38 (Condense (NarrowMap (Take $37 (Uint64 '1)) (lambda '($44) (AsStruct '('Some0 $44)))) (Nothing (OptionalType (StructType '('Some0 $8)))) (lambda '($45 $46) $36) (lambda '($47 $48) (Just $47)))) (return (FromFlow (Map $38 (lambda '($49) (AsList (AsStruct '('"column0" (Member $49 'Some0)))))))) ))) '('('"_logical_id" '1644) '('"_id" '"b349bb69-c84236f9-35e400eb-cd5942b8")))) (let $15 (DqCnValue (TDqOutput $14 '0))) (let $16 (KqpTxResultBinding $10 '0 '0)) (let $17 '('('"type" '"scan"))) (let $18 (KqpPhysicalTx '($12 $14) '($15) '('($6 $16)) $17)) (let $19 '"%kqp%tx_result_binding_1_0") (let $20 (ListType (StructType '('"column0" $8)))) (let $21 '('('"_logical_id" '1740) '('"_id" '"88252122-4c0a8094-db0c36c8-942c81d0") $1)) (let $22 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $21)) (let $23 (DqCnResult (TDqOutput $22 '0) '('"column0"))) (let $24 (KqpTxResultBinding $20 '1 '0)) (let $25 (KqpPhysicalTx '($22) '($23) '('($19 $24)) $17)) (let $26 '($5 $18 $25)) (return (KqpPhysicalQuery $26 '((KqpTxResultBinding $20 '"2" '0)) '('('"type" '"scan_query")))) ) >> KqpOlapSysView::StatsSysViewColumns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestOrderByDecimal [GOOD] Test command err: Trying to start YDB, gRPC: 26473, MsgBus: 8244 2024-11-21T08:57:47.571644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654162108657659:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:47.571662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047dd/r3tmp/tmpyrXWxh/pdisk_1.dat 2024-11-21T08:57:47.625935Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26473, node 1 2024-11-21T08:57:47.634256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:47.634268Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:47.634270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:47.634309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8244 TClient is connected to server localhost:8244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:47.673059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:47.673087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:47.674236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:47.703136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:47.824933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654162108658269:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.824964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.844114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:47.851103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.851129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.851158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.851175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.851190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.851204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.851218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.851233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.851253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:47.851268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.851283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:47.851302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162108658345:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:47.851709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:47.851723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:47.851735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:47.851740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:47.851755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:47.851765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:47.851779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:47.851789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:47.851802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:47.851810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:47.851816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:47.851824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:47.851877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:47.851887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:47.851908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:47.851923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.851937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:47.851945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:47.851965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:47.851973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:47.851984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:47.851991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBAC ... hema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:48.183884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654167349790757:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:48.183899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654167349790757:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:48.183912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654167349790757:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:48.183930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654167349790757:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:48.183940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654167349790757:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:48.183951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654167349790757:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:48.183965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654167349790757:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:48.183978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654167349790757:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:48.184299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:48.184313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:48.184325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:48.184330Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:48.184350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:48.184362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:48.184371Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:48.184384Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:48.184398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:48.184406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:48.184412Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:48.184416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:48.184457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:48.184468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:48.184483Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:48.184493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:48.184505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:48.184514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:48.184530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:48.184538Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:48.184549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:48.184564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2024-11-21T08:57:48.256540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654166403625797:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.256561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.256562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654166403625802:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.257135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:48.258314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654166403625804:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:48.363561Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468313, txId: 18446744073709551615] shutting down 2024-11-21T08:57:48.365146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654167349790858:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.365171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.365231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654167349790863:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.366021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:48.367560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654167349790865:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:48.481368Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468418, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_Count_GroupByNull [GOOD] >> KqpOlapSysView::StatsSysViewBytesDictActualization [GOOD] >> KqpOlapAggregations::CountAllPushdown-UseLlvm >> KqpOlap::SimpleCount >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Results] >> KqpOlapSysView::StatsSysViewAggregation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesDictActualization [GOOD] Test command err: Trying to start YDB, gRPC: 19826, MsgBus: 13744 2024-11-21T08:57:20.988474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654046486396494:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:20.988566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048be/r3tmp/tmpF6rIyQ/pdisk_1.dat 2024-11-21T08:57:21.048347Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19826, node 1 2024-11-21T08:57:21.119051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:21.119083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:21.120549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:21.137210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:21.137224Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:21.137226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:21.137263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13744 TClient is connected to server localhost:13744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:21.185427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:21.197292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:21.212049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.212098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.212135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.212153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.212163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.212174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.212189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.212218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.212242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.212256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.212268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.212287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654050781364286:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.215088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.215111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.215145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.215174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.215189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.215205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.215219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.215236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.215253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.215274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.215290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.215304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654050781364288:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.217827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050781364287:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.217850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050781364287:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.217890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050781364287:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.217911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050781364287:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.217928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050781364287:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.217944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050781364287:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.217956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050781364287:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.217971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654050781364287:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.217991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74396540507813642 ... K_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=51200216;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T08:57:25.988113Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654046486396494:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.988157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: 2024-11-21T08:57:31.891609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654093731037755:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:31.891609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654093731037747:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:31.891622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:31.892301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:31.893741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654093731037761:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:32.068249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179452042, txId: 281474976715662] shutting down Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:2817288:0] EntityType: COL BlobRangeSize: 1747280 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:2813360:0] EntityType: COL BlobRangeSize: 1745568 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:2824656:0] EntityType: COL BlobRangeSize: 1751872 PathId: 3 Wait changes: 5244720/44800000 2024-11-21T08:57:36.045803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:57:36.045819Z node 1 :IMPORT WARN: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: 2024-11-21T08:57:37.103683Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179457091, txId: 281474976715664] shutting down Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:2824656:0] EntityType: COL BlobRangeSize: 1751872 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:2817288:0] EntityType: COL BlobRangeSize: 1747280 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:2813360:0] EntityType: COL BlobRangeSize: 1745568 PathId: 3 5244720/44800000 2024-11-21T08:57:37.107507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:37.115114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting actualization: 3/0.000008s 2024-11-21T08:57:37.262438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=a7dc8184-a7e611ef-a0509bed-7f79ca2b;fline=with_appended.cpp:80;portions=2,;task_id=a7dc8184-a7e611ef-a0509bed-7f79ca2b; 2024-11-21T08:57:37.262666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=a7dc817a-a7e611ef-81d1f08c-795e18cb;fline=with_appended.cpp:80;portions=2,;task_id=a7dc817a-a7e611ef-81d1f08c-795e18cb; 2024-11-21T08:57:37.281370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=a7dc9282-a7e611ef-ae41e865-e1afdcc3;fline=with_appended.cpp:80;portions=2,;task_id=a7dc9282-a7e611ef-ae41e865-e1afdcc3; ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 1070008 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1214208:0] EntityType: COL BlobRangeSize: 144200 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 1072784 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1217248:0] EntityType: COL BlobRangeSize: 144464 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 1067792 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1211576:0] EntityType: COL BlobRangeSize: 143784 PathId: 3 Wait changes: 432448/44800000 2024-11-21T08:57:38.172298Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179458155, txId: 281474976715668] shutting down ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 1070008 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1214208:0] EntityType: COL BlobRangeSize: 144200 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 1067792 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1211576:0] EntityType: COL BlobRangeSize: 143784 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 1072784 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1217248:0] EntityType: COL BlobRangeSize: 144464 PathId: 3 432448/44800000 2024-11-21T08:57:43.221652Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179463202, txId: 281474976715670] shutting down 2024-11-21T08:57:43.225913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:43.234848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715673:0, at schemeshard: 72057594046644480 waiting actualization: 3/0.000007s ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: 2024-11-21T08:57:44.298181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ac091844-a7e611ef-8d25d947-1f7b73e0;fline=with_appended.cpp:80;portions=3,;task_id=ac091844-a7e611ef-8d25d947-1f7b73e0; 2024-11-21T08:57:44.298405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=ac0929e2-a7e611ef-b72f3f0a-93da37f5;fline=with_appended.cpp:80;portions=3,;task_id=ac0929e2-a7e611ef-b72f3f0a-93da37f5; 2024-11-21T08:57:44.307194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=ac092ad2-a7e611ef-989e8132-5f5bb47d;fline=with_appended.cpp:80;portions=3,;task_id=ac092ad2-a7e611ef-989e8132-5f5bb47d; Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:3:5:0:2824648:0] EntityType: COL BlobRangeSize: 1751864 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:3:5:0:2813360:0] EntityType: COL BlobRangeSize: 1745568 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:3:5:0:2817288:0] EntityType: COL BlobRangeSize: 1747280 PathId: 3 Wait changes: 2024-11-21T08:57:44.313453Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464298, txId: 281474976715674] shutting down 5244712/44800000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('field') RESULT: Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:3:5:0:2817288:0] EntityType: COL BlobRangeSize: 1747280 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:3:5:0:2813360:0] EntityType: COL BlobRangeSize: 1745568 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:3:5:0:2824648:0] EntityType: COL BlobRangeSize: 1751864 PathId: 3 2024-11-21T08:57:49.354512Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179469339, txId: 281474976715676] shutting down 5244712/44800000 |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_GroupByNull [GOOD] >> KqpOlapAggregations::Aggregation_ProjectionOrder >> KqpOlap::SimpleCount [GOOD] >> KqpOlapAggregations::CountAllPushdown-UseLlvm [GOOD] >> KqpOlapAggregations::Aggregation_Avg_GroupBy [GOOD] >> KqpOlap::NormalizeAbsentColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleCount [GOOD] Test command err: Trying to start YDB, gRPC: 12404, MsgBus: 17979 2024-11-21T08:57:49.778161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654172496523527:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:49.778176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047c7/r3tmp/tmpKeN1DP/pdisk_1.dat 2024-11-21T08:57:49.833899Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12404, node 1 2024-11-21T08:57:49.839978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:49.839989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:49.839990Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:49.840028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17979 TClient is connected to server localhost:17979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:49.878782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:49.878817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:49.879892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:49.910010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:49.914426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:49.928649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.928716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.928765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.928794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.928816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.928836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.928858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.928881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.928907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:49.928932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.928952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:49.928976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654172496524177:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:49.932284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.932314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.932353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.932380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.932397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.932414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.932437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.932462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.932485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:49.932506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.932537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:49.932557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654172496524181:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:49.935751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654172496524178:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.935773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654172496524178:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.935803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654172496524178:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.935819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654172496524178:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.935833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654172496524178:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.935846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654172496524178:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.935859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654172496524178:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.935873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654172496524178:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.935887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74396541724965241 ... ll_task","f":1732179470198391,"d_finished":0,"c":0,"l":1732179470205545,"d":7154},"events":[{"name":"bootstrap","f":1732179470202821,"d_finished":282,"c":1,"l":1732179470203103,"d":282},{"a":1732179470205513,"name":"ack","f":1732179470203392,"d_finished":10,"c":1,"l":1732179470203402,"d":42},{"a":1732179470205496,"name":"processing","f":1732179470203392,"d_finished":202,"c":10,"l":1732179470205351,"d":251},{"name":"ProduceResults","f":1732179470202993,"d_finished":163,"c":13,"l":1732179470205530,"d":163},{"a":1732179470205530,"name":"Finish","f":1732179470205530,"d_finished":0,"c":0,"l":1732179470205545,"d":15},{"name":"task_result","f":1732179470203523,"d_finished":180,"c":9,"l":1732179470205351,"d":180}],"id":"72075186224037890::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2024-11-21T08:57:50.205628Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492042:2469];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:57:50.198277Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4352;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4352;selected_rows=0; 2024-11-21T08:57:50.205630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492042:2469];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:57:50.205634Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654176791492042:2469];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T08:57:50.205637Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654176791492042:2469];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2024-11-21T08:57:50.205688Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492043:2470];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:57:50.205692Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492043:2470];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2024-11-21T08:57:50.205695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492043:2470];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2024-11-21T08:57:50.205696Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439654176791492043:2470] finished for tablet 72075186224037889 2024-11-21T08:57:50.205699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439654176791492043:2470] send ScanData to [1:7439654176791491983:2462] txId: 281474976715663 scanId: 1 gen: 1 tablet: 72075186224037889 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:57:50.205718Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:7439654176791492043:2470] and sent to [1:7439654176791491983:2462] packs: 0 txId: 281474976715663 scanId: 1 gen: 1 tablet: 72075186224037889 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults","f_task_result"],"t":0.004},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.006}],"full":{"a":1732179470198761,"name":"_full_task","f":1732179470198761,"d_finished":0,"c":0,"l":1732179470205700,"d":6939},"events":[{"name":"bootstrap","f":1732179470203107,"d_finished":166,"c":1,"l":1732179470203273,"d":166},{"a":1732179470205686,"name":"ack","f":1732179470203403,"d_finished":17,"c":1,"l":1732179470203420,"d":31},{"a":1732179470205685,"name":"processing","f":1732179470203402,"d_finished":203,"c":10,"l":1732179470205391,"d":218},{"name":"ProduceResults","f":1732179470203223,"d_finished":167,"c":13,"l":1732179470205695,"d":167},{"a":1732179470205695,"name":"Finish","f":1732179470205695,"d_finished":0,"c":0,"l":1732179470205700,"d":5},{"name":"task_result","f":1732179470203550,"d_finished":173,"c":9,"l":1732179470205390,"d":173}],"id":"72075186224037889::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2024-11-21T08:57:50.205721Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492043:2470];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:57:50.198686Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=65368;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=65368;selected_rows=0; 2024-11-21T08:57:50.205722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492043:2470];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:57:50.205725Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654176791492043:2470];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T08:57:50.205732Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654176791492043:2470];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2024-11-21T08:57:50.205937Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492041:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T08:57:50.205943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492041:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2024-11-21T08:57:50.205947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492041:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2024-11-21T08:57:50.205948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439654176791492041:2468] finished for tablet 72075186224037888 2024-11-21T08:57:50.205951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439654176791492041:2468] send ScanData to [1:7439654176791491983:2462] txId: 281474976715663 scanId: 1 gen: 1 tablet: 72075186224037888 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T08:57:50.205974Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:7439654176791492041:2468] and sent to [1:7439654176791491983:2462] packs: 0 txId: 281474976715663 scanId: 1 gen: 1 tablet: 72075186224037888 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0.005},{"events":["f_ack","f_processing","f_task_result"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.008}],"full":{"a":1732179470197172,"name":"_full_task","f":1732179470197172,"d_finished":0,"c":0,"l":1732179470205953,"d":8781},"events":[{"name":"bootstrap","f":1732179470202312,"d_finished":503,"c":1,"l":1732179470202815,"d":503},{"a":1732179470205935,"name":"ack","f":1732179470203370,"d_finished":20,"c":1,"l":1732179470203390,"d":38},{"a":1732179470205933,"name":"processing","f":1732179470203368,"d_finished":286,"c":10,"l":1732179470205316,"d":306},{"name":"ProduceResults","f":1732179470202701,"d_finished":219,"c":13,"l":1732179470205947,"d":219},{"a":1732179470205947,"name":"Finish","f":1732179470205947,"d_finished":0,"c":0,"l":1732179470205953,"d":6},{"name":"task_result","f":1732179470203486,"d_finished":248,"c":9,"l":1732179470205315,"d":248}],"id":"72075186224037888::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2024-11-21T08:57:50.205978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492041:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T08:57:50.197002Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4552;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4552;selected_rows=0; 2024-11-21T08:57:50.205979Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439654176791492041:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T08:57:50.205984Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654176791492041:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T08:57:50.205986Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439654176791492041:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2024-11-21T08:57:50.270678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179470098, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdown-UseLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 30087, MsgBus: 19770 2024-11-21T08:57:49.668460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654170260933056:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:49.668628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047cb/r3tmp/tmpqij2Px/pdisk_1.dat 2024-11-21T08:57:49.719019Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30087, node 1 2024-11-21T08:57:49.730138Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:49.730155Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:49.730157Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:49.730198Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19770 TClient is connected to server localhost:19770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:49.770555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:49.770594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:49.771680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:49.800604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:49.805387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:49.814822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.814877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.814911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.814933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.814954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.814969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.814983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.815003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.815018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:49.815035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.815048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:49.815064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654170260933716:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:49.817307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.817323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.817343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.817353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.817363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.817373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.817386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.817399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.817409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:49.817419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.817432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:49.817443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654170260933719:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:49.819339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654170260933720:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.819355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654170260933720:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.819370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654170260933720:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.819379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654170260933720:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.819392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654170260933720:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.819403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654170260933720:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.819415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654170260933720:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.819428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654170260933720:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.819441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74396541702609337 ... hunks; 2024-11-21T08:57:49.822539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:49.822541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:49.822548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:49.822550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:49.822554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:49.822556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:49.822560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:49.822562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:49.822565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:49.822567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:49.822580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:49.822583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:49.822590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:49.822596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.822605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:49.822614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:49.822625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:49.822632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:49.822637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:49.822639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:49.862474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:57:50.067657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654174555901454:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:50.067657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654174555901462:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:50.067675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:50.068390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:50.070034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654174555901468:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:50.232578Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179470126, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '476) '('"_id" '"cb891124-a4e760d9-e2f37197-f183c8c7") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '995) '('"_id" '"82941c1b-f0a43535-b985b1e5-477b214e")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1098) '('"_id" '"34ecabb5-1d2548d3-50abfa40-584d2466") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Results] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-Analyze] >> KqpOlapTiering::TieringRuleValidation >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsWhenWait ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 14596, MsgBus: 8435 2024-11-21T08:57:47.728697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654164396676473:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:47.728821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047d9/r3tmp/tmpNppGfD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14596, node 1 2024-11-21T08:57:47.783004Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:47.786710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:47.786722Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:47.786724Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:47.786751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8435 TClient is connected to server localhost:8435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:47.830088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:47.830113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:47.831185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:47.857741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:47.862210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:47.868942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.869014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.869066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.869089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.869112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.869133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.869154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.869174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.869203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:47.869227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.869246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:47.869267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654164396677126:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:47.869714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:47.869727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:47.869739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:47.869748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:47.869762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:47.869771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:47.869779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:47.869795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:47.869803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:47.869806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:47.869811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:47.869818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:47.869874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:47.869883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:47.869897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:47.869901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.869911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:47.869919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:47.869933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:47.869940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:47.869949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:47.869956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:47.873075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654164396677129:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.873101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654164396677129:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.873136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654164396677129:2290];tablet_id=7207518622403 ... TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:47.879953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:47.879955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:47.879962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:47.879965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:47.879969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:47.879971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:47.879974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:47.879976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:47.879998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:47.880004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:47.880014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:47.880020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.880026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:47.880032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:47.880040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:47.880046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:47.880052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:47.880054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2024-11-21T08:57:47.972898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654164396677423:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.972913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654164396677418:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.972923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.973395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:47.974540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654164396677432:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:50.372277Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179468026, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1732) '('"_id" '"bfab3416-360d33bb-d4d74383-8da7b78e") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '1)) (let $27 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"5")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1791) '('"_id" '"5297746c-7ffe8c07-f488d37c-742b4812") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3379) '('"_id" '"63323301-515df30a-a1283ea3-b4e21322") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3391) '('"_id" '"6bbf8c22-4d834ad2-f6a2c70a-8ecbf9ee")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::NormalizeAbsentColumn [GOOD] Test command err: Trying to start YDB, gRPC: 25586, MsgBus: 22441 2024-11-21T08:57:45.406164Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654156019771563:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:45.406212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047f2/r3tmp/tmpGdfood/pdisk_1.dat 2024-11-21T08:57:45.450956Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25586, node 1 2024-11-21T08:57:45.462671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:45.462686Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:45.462687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:45.462715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22441 TClient is connected to server localhost:22441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:45.507576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:45.507605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:45.508746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:45.535062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.541240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:45.551288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.551342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.551383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.551407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.551433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.551457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.551476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.551497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.551519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.551540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.551562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.551583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.552014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:45.552027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:45.552040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:45.552048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:45.552063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:45.552071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:45.552080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:45.552090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:45.552103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:45.552111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:45.552122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:45.552130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:45.552181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:45.552191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:45.552222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:45.552233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.552248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:45.552256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:45.552271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:45.552280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:45.552294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:45.552299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:45.555126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.555149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.555180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];tablet_id=7207518622 ... overload_size=0; 2024-11-21T08:57:50.567256Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179170000;tx_id=18446744073709551615;;current_snapshot_ts=1732179466000; 2024-11-21T08:57:50.567257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T08:57:50.567258Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:50.567260Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179170000;tx_id=18446744073709551615;;current_snapshot_ts=1732179466000; 2024-11-21T08:57:50.567260Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:50.567262Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:50.567263Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:50.567264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:50.567265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:50.567266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:50.567268Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:50.567271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654156019772220:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:50.567273Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654156019772221:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:50.567282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654156019772222:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:50.567572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439654156019772254:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:50.567593Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654156019772254:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T08:57:50.567601Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654156019772254:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2024-11-21T08:57:50.567616Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:50.567625Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654156019772254:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:50.567634Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439654156019772254:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:515;problem=Background activities cannot be started: no index at tablet; 2024-11-21T08:57:50.567639Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179470000 at tablet 72075186224037891 2024-11-21T08:57:50.567643Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439654156019772254:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:50.567645Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439654156019772254:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:515;problem=Background activities cannot be started: no index at tablet; 2024-11-21T08:57:50.567696Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T08:57:50.567707Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T08:57:50.567719Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:57:50.567724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:57:50.567733Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:50.567750Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T08:57:50.567760Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179170000;tx_id=18446744073709551615;;current_snapshot_ts=1732179466000; 2024-11-21T08:57:50.567764Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:50.567768Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:50.567772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:50.567775Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:50.567787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:57:50.567808Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179470000 at tablet 72075186224037889 2024-11-21T08:57:50.567815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:57:50.567819Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:57:50.567822Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T08:57:50.567825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179170000;tx_id=18446744073709551615;;current_snapshot_ts=1732179466000; 2024-11-21T08:57:50.567827Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:57:50.567829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:50.567831Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:57:50.567834Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:57:50.567838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654156019772222:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; >> test.py::test[pg-select_agg_one_row-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-Debug] >> KqpOlapAggregations::CountAllNoPushdown >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpOlapWrite::DefaultValues >> TRtmrTestReboots::CreateRtmrVolumeWithReboots [GOOD] >> KqpOlapAggregations::CountAllNoPushdown [GOOD] >> KqpOlapTiering::TieringRuleValidation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllNoPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 26355, MsgBus: 3123 2024-11-21T08:57:51.985451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654179632635692:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:51.985649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047bf/r3tmp/tmpRHfpQv/pdisk_1.dat 2024-11-21T08:57:52.030241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26355, node 1 2024-11-21T08:57:52.038618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:52.038641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:52.038643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:52.038693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3123 TClient is connected to server localhost:3123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:52.079401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:52.082273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:52.087174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:52.087202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:52.088392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:52.097216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:52.097278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:52.097322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:52.097346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:52.097372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:52.097394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:52.097415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:52.097438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:52.097468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:52.097496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:52.097532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:52.097555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183927603648:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:52.098054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:52.098070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:52.098083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:52.098093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:52.098109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:52.098119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:52.098128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:52.098140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:52.098154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:52.098163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:52.098174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:52.098184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:52.098239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:52.098250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:52.098271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:52.098276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:52.098293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:52.098297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:52.098313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:52.098317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:52.098329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:52.098338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:52.101788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654183927603649:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:52.101811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654183927603649:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:52.101848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654183927603649:2289];tablet_id=7207518622403 ... e=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:52.106885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:52.106893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:52.109944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:52.109968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:52.109996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:52.110019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:52.110034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:52.110054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:52.110085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:52.110105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:52.110125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:52.110144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:52.110165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:52.110183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183927603682:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:52.111609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:52.111621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:52.111630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:52.111634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:52.111646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:52.111650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:52.111659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:52.111664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:52.111676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:52.111687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:52.111694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:52.111702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:52.111727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:52.111737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:52.111753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:52.111761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:52.111772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:52.111780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:52.111795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:52.111802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:52.111812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:52.111819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:52.139312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:57:52.332098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654183927604089:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.332098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654183927604100:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.332120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.332834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:52.334563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654183927604103:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T08:57:52.514287Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179472387, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy >> KqpOlapWrite::DefaultValues [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapTiering::TieringRuleValidation [GOOD] Test command err: Trying to start YDB, gRPC: 10358, MsgBus: 28803 2024-11-21T08:57:51.015729Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654179172096518:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:51.015857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047c0/r3tmp/tmpA9SLko/pdisk_1.dat 2024-11-21T08:57:51.063365Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10358, node 1 2024-11-21T08:57:51.074533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:51.074545Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:51.074547Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:51.074582Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28803 TClient is connected to server localhost:28803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:51.116882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:51.116907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:51.118038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:51.118989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:51.265316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:51.275742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:51.275784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:51.275808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:51.275826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:51.275849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:51.275865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:51.275882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:51.275906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:51.275928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:51.275952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:51.275964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:51.275976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654179172097208:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:51.275992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:51.276028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:51.276080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:51.276100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:51.276127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:51.276151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:51.276174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:51.276199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:51.276238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:51.276262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:51.276284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:51.276305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654179172097209:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:51.276352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:51.276369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:51.276382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:51.276393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:51.276411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:51.276422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:51.276433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:51.276446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:51.276456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:51.276467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:51.276475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... 21T08:57:51.280275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654179172097243:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:51.280295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654179172097243:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:51.280314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654179172097243:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:51.280333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654179172097243:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:51.280354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654179172097243:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:51.280373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654179172097243:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:51.280392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654179172097243:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:51.280413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654179172097243:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:51.280852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:51.280869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:51.280879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:51.280887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:51.280901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:51.280909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:51.280917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:51.280926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:51.280933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:51.280937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:51.280942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:51.280945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:51.280977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:51.280981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:51.280997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:51.281007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:51.281019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:51.281029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:51.281043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:51.281052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:51.281063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:51.281072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:51.323294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:51.335600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654179172097469:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:51.335631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.017551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2024-11-21T08:57:52.074981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654183467064895:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.074982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654183467064890:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.074997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.075468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2024-11-21T08:57:52.079038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654183467064897:2391], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T08:57:52.200415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:52.241803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2024-11-21T08:57:52.289268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T08:57:52.346040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T08:57:52.394667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T08:57:52.531074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T08:57:52.588668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2024-11-21T08:57:52.646339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710696:0, at schemeshard: 72057594046644480 2024-11-21T08:57:52.694058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710699:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037891 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037890 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::DefaultValues [GOOD] Test command err: Trying to start YDB, gRPC: 15797, MsgBus: 13550 2024-11-21T08:57:52.255282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654183377008297:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:52.255347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047be/r3tmp/tmp4x6r98/pdisk_1.dat 2024-11-21T08:57:52.297608Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15797, node 1 2024-11-21T08:57:52.309139Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:52.309152Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:52.309154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:52.309192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13550 TClient is connected to server localhost:13550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:52.355931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:52.355966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:52.357035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:52.377817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:52.388484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:52.397642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:52.397714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:52.397767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:52.397795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:52.397820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:52.397842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:52.397864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:52.397887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:52.397904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:52.397921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:52.397938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:52.397955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654183377008749:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:52.400137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:52.400156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:52.400173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:52.400184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:52.400196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:52.400242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:52.400269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:52.400291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:52.400308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:52.400324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:52.400342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:52.400360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654183377008745:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:52.402502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183377008746:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:52.402522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183377008746:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:52.402566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183377008746:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:52.402586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183377008746:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:52.402604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183377008746:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:52.402617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183377008746:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:52.402629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183377008746:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:52.402642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654183377008746:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:52.402656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74396541833770087 ... description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:52.406341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:52.406347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:52.406352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:52.406357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:52.406361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:52.406367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:52.406371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:52.406375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:52.406388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:52.406395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:52.406403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:52.406410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:52.406416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:52.406419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:52.406426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:52.406428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:52.406434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:52.406436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:52.406508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:52.406517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:52.406522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:52.406525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:52.406533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:52.406536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:52.406540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:52.406543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:52.406547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:52.406554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:52.406558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:52.406565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:52.406581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:52.406585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:52.406593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:52.406596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:52.406602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:52.406605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:52.406612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:52.406615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:52.406621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:52.406623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:52.446316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:52.515291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654183377009038:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.515317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.535144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6400144;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6400144;columns=1; ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` WHERE field = 'abcde' RESULT: 2024-11-21T08:57:52.685464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654183377009124:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.685532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.685602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654183377009129:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:52.686448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:52.688090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654183377009131:2394], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:57:52.858059Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179472744, txId: 18446744073709551615] shutting down count: 800000 >> test.py::test[pg-select_agg_one_row-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-ForceBlocks] >> KqpOlapAggregations::Aggregation_MaxL >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 [GOOD] >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy [GOOD] >> KqpOlapCompression::DisabledAlterCompression ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 31634, MsgBus: 28366 2024-11-21T08:57:40.604068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654132396596054:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:40.604249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004824/r3tmp/tmpNqHbRJ/pdisk_1.dat 2024-11-21T08:57:40.660573Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31634, node 1 2024-11-21T08:57:40.671301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:40.671318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:40.671320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:40.671358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28366 TClient is connected to server localhost:28366 2024-11-21T08:57:40.705121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:40.705151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:40.706213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:40.715607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:40.726767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:40.735991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.736046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.736074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.736091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.736102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.736116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.736130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.736144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.736163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.736178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.736193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.736223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654132396596702:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.736582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:40.736592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:40.736599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:40.736601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:40.736617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:40.736625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:40.736630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:40.736634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:40.736639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:40.736643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:40.736646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:40.736648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:40.736686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:40.736694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:40.736704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:40.736710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.736717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:40.736719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:40.736729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:40.736735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:40.736742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:40.736743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:40.738522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654132396596703:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.738537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654132396596703:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.738555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654132396596703:2289];tablet_id=7207518622 ... TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.152983Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:52.320147Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.320172Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:52.402162Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.402185Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:52.484018Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.484037Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:52.565989Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.566015Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:52.658732Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:52.680353Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:52.721663Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.721684Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:52.804004Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.804029Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:52.886267Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.886295Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:52.968406Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:52.968431Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:53.050691Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:53.050722Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:53.103403Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:53.199327Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:53.199350Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:53.322731Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:53.322749Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:53.404823Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:53.404853Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:53.486610Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:53.486631Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:53.569093Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:53.569122Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z243x0hy9gzfyrqcmp2k8. SessionId : ydb://session/3?node_id=2&id=YTI2ODE4YzgtZGIyMjlmMzEtOGNkOGZjZTEtYTJmMWNmMmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:53.651115Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:53.671798Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue_ToString ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 22761, MsgBus: 1667 2024-11-21T08:57:53.140614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654187406687169:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:53.140782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047bd/r3tmp/tmpvLtEBj/pdisk_1.dat 2024-11-21T08:57:53.191570Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22761, node 1 2024-11-21T08:57:53.200705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:53.200719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:53.200720Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:53.200755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1667 TClient is connected to server localhost:1667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:53.242587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:53.242620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:53.243804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:53.271249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:53.304719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:53.312780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:53.312835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:53.312862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:53.312878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:53.312892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:53.312905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:53.312923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:53.312941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:53.312960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:53.312976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:53.312990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:53.313005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654187406687835:2291];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:53.313294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:53.313306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:53.313313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:53.313320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:53.313332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:53.313338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:53.313343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:53.313354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:53.313358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:53.313361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:53.313364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:53.313366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:53.313404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:53.313411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:53.313419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:53.313426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:53.313432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:53.313438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:53.313447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:53.313452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:53.313458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:53.313463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:53.315075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654187406687826:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:53.315090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654187406687826:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:53.315106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654187406687826:2290];tablet_id=7207518622403 ... 891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:53.319934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:53.319944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:53.319949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:53.319951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:53.319963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:53.319969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:53.319973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:53.319980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:53.319984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:53.319990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:53.319993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:53.319999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:53.320014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:53.320020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:53.320028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:53.320034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:53.320039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:53.320045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:53.320053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:53.320058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:53.320064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:53.320069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2024-11-21T08:57:53.411388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654187406688122:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:53.411403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654187406688114:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:53.411453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:53.412029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:53.413385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654187406688128:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:54.141358Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179473465, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '836) '('"_id" '"ad7b83be-fb1b2793-75d507d8-dd57139e") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '0)) (let $25 '((Nothing $2) $24)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Uint64)) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '895) '('"_id" '"7a1711f0-d09c4078-9a80f21e-da5eb6ae") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'count '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1318) '('"_id" '"3cdd970f-2308ba6e-faaa97d7-9c3c6356") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1330) '('"_id" '"acb9fa1f-f4fbf0e-743e5149-3ecf4f93")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2967:2106] recipient: [1:2845:2115] 2024-11-21T08:57:17.213697Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:17.214501Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:17.214907Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:17.214999Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:17.215142Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.215148Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.215196Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:17.216117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:17.216147Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:17.216183Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:17.216199Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.216231Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.216244Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2992:2106] recipient: [1:60:2107] 2024-11-21T08:57:17.226849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:17.226892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.237466Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.237540Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.237558Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.237569Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.237600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.237620Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.237626Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.237634Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.247962Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.248020Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:17.248242Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:17.248255Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:17.248286Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:17.250480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2024-11-21T08:57:17.250679Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk3 2024-11-21T08:57:17.250685Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2024-11-21T08:57:17.250688Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1002 Path# /dev/disk1 2024-11-21T08:57:17.250691Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk2 2024-11-21T08:57:17.250694Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1001 Path# /dev/disk1 2024-11-21T08:57:17.250697Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk3 2024-11-21T08:57:17.250700Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2024-11-21T08:57:17.250703Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1002 Path# /dev/disk1 2024-11-21T08:57:17.250705Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk3 2024-11-21T08:57:17.250708Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2024-11-21T08:57:17.250716Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1002 Path# /dev/disk1 2024-11-21T08:57:17.250718Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk3 2024-11-21T08:57:17.250723Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2024-11-21T08:57:17.250725Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1001 Path# /dev/disk1 2024-11-21T08:57:17.250728Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk2 2024-11-21T08:57:17.250730Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk3 2024-11-21T08:57:17.250732Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2024-11-21T08:57:17.250734Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk2 2024-11-21T08:57:17.250737Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk2 2024-11-21T08:57:17.250739Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk2 2024-11-21T08:57:17.250741Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk2 2024-11-21T08:57:17.250743Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1001 Path# /dev/disk1 2024-11-21T08:57:17.250746Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk3 2024-11-21T08:57:17.250748Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2024-11-21T08:57:17.250750Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1001 Path# /dev/disk1 2024-11-21T08:57:17.250753Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk3 2024-11-21T08:57:17.250755Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2024-11-21T08:57:17.250757Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk3 2024-11-21T08:57:17.250760Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk2 2024-11-21T08:57:17.250762Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1001 Path# /dev/disk1 2024-11-21T08:57:17.250764Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk3 2024-11-21T08:57:17.250767Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2024-11-21T08:57:17.250769Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1002 Path# /dev/disk1 2024-11-21T08:57:17.250771Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk2 2024-11-21T08:57:17.250774Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk3 2024-11-21T08:57:17.250776Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2024-11-21T08:57:17.250778Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/ ... 1T08:57:43.888977Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 165:1002 Path# /dev/disk2 2024-11-21T08:57:43.888980Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 169:1002 Path# /dev/disk1 2024-11-21T08:57:43.888984Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 174:1002 Path# /dev/disk2 2024-11-21T08:57:43.888987Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 164:1002 Path# /dev/disk3 2024-11-21T08:57:43.888990Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 167:1002 Path# /dev/disk3 2024-11-21T08:57:43.888993Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 170:1002 Path# /dev/disk3 2024-11-21T08:57:43.888996Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1000 Path# /dev/disk1 2024-11-21T08:57:43.888999Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1000 Path# /dev/disk1 2024-11-21T08:57:43.889002Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2024-11-21T08:57:43.889005Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1001 Path# /dev/disk2 2024-11-21T08:57:43.889008Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 172:1002 Path# /dev/disk1 2024-11-21T08:57:43.889011Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1001 Path# /dev/disk2 2024-11-21T08:57:43.889016Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1002 Path# /dev/disk1 2024-11-21T08:57:43.889019Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1002 Path# /dev/disk3 2024-11-21T08:57:43.889023Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 192:1002 Path# /dev/disk2 2024-11-21T08:57:43.889026Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1001 Path# /dev/disk3 2024-11-21T08:57:43.889029Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 189:1002 Path# /dev/disk1 2024-11-21T08:57:43.889032Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2024-11-21T08:57:43.889035Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1002 Path# /dev/disk2 2024-11-21T08:57:43.889038Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 175:1002 Path# /dev/disk3 2024-11-21T08:57:43.889041Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2024-11-21T08:57:43.889044Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1001 Path# /dev/disk2 2024-11-21T08:57:43.889048Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1002 Path# /dev/disk3 2024-11-21T08:57:43.939607Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2024-11-21T08:57:43.941220Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1000 Path# /dev/disk2 2024-11-21T08:57:43.941234Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1001 Path# /dev/disk1 2024-11-21T08:57:43.941237Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1000 Path# /dev/disk2 2024-11-21T08:57:43.941241Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1001 Path# /dev/disk1 2024-11-21T08:57:43.941244Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1000 Path# /dev/disk2 2024-11-21T08:57:43.941247Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1001 Path# /dev/disk1 2024-11-21T08:57:43.941250Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1000 Path# /dev/disk2 2024-11-21T08:57:43.941253Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1001 Path# /dev/disk1 2024-11-21T08:57:43.941256Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1000 Path# /dev/disk2 2024-11-21T08:57:43.941259Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1001 Path# /dev/disk1 2024-11-21T08:57:43.941262Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1000 Path# /dev/disk2 2024-11-21T08:57:43.941265Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1001 Path# /dev/disk1 2024-11-21T08:57:43.941268Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1000 Path# /dev/disk2 2024-11-21T08:57:43.941270Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1001 Path# /dev/disk1 2024-11-21T08:57:43.941274Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1000 Path# /dev/disk3 2024-11-21T08:57:43.941277Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2024-11-21T08:57:43.941281Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1002 Path# /dev/disk1 2024-11-21T08:57:43.941284Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1000 Path# /dev/disk3 2024-11-21T08:57:43.941287Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2024-11-21T08:57:43.941290Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1002 Path# /dev/disk1 2024-11-21T08:57:43.941293Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1000 Path# /dev/disk3 2024-11-21T08:57:43.941296Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2024-11-21T08:57:43.941300Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1002 Path# /dev/disk1 2024-11-21T08:57:43.941329Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2024-11-21T08:57:43.941334Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 2024-11-21T08:57:43.941338Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2024-11-21T08:57:43.941345Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2024-11-21T08:57:43.941349Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2024-11-21T08:57:43.941355Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2024-11-21T08:57:43.941361Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 |91.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> test.py::test[pg-select_agg_one_row-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-Results] >> KqpOlapCompression::DisabledAlterCompression [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue_ToString [GOOD] >> KqpOlapAggregations::Aggregation_Sum >> KqpOlapSysView::StatsSysViewBytesColumnActualization [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapCompression::DisabledAlterCompression [GOOD] Test command err: Trying to start YDB, gRPC: 14881, MsgBus: 16148 2024-11-21T08:57:54.523328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654193428109884:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:54.523461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047ba/r3tmp/tmpzQQUB7/pdisk_1.dat 2024-11-21T08:57:54.569170Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14881, node 1 2024-11-21T08:57:54.574208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:54.574218Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:54.574219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:54.574242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16148 TClient is connected to server localhost:16148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:54.621374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:54.624275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:54.624303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:54.625394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected CREATE TABLE `/Root/StandaloneTable` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:54.773937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654193428110496:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.773959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.792235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:54.798455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:54.798493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:54.798546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:54.798566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:54.798577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:54.798592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:54.798607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:54.798630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:54.798653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:54.798677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.798698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:54.798723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654193428110573:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:54.799179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:54.799201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:54.799214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:54.799226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:54.799241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:54.799248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:54.799257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:54.799269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:54.799278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:54.799288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:54.799293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:54.799298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:54.799349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:54.799365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:54.799380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:54.799392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.799416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:54.799426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:54.799441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:54.799450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:54.799458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:54.799460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:54.850097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654193428110704:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.850125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLESTORE `/Root/TableStoreTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:54.855171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654193428110715:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.855196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.857651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:57:54.862994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:54.863027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:54.863074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:54.863099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:54.863123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:54.863145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:54.863167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:54.863193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:54.863221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:54.863238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.863260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:54.863281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654193428110740:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:54.863773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:54.863794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:54.863804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:54.863808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:54.863827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:54.863835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:54.863844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:54.863853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:54.863865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:54.863873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:54.863877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:54.863884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:54.863918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:54.863929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:54.863948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:54.863956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.863966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:54.863974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:54.863993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:54.864002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:54.864016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:54.864024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:54.914156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654193428110779:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.914181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLE `/Root/TableStoreTest/ColumnTableTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:54.917005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654193428110797:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.917023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.919087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:54.923610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654193428110838:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.923628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TRtmrTestReboots::CreateRtmrVolumeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:56:56.675159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:56.675177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:56.675181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:56.675185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:56.675191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:56.675195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:56.675204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:56.675277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:56.682370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:56.682387Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:56:56.684016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:56.684081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:56.684106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:56.686083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:56.686141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:56.686224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:56.686381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:56.687035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:56.687379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:56.687395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:56.687408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:56.687416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:56.687422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:56.687467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:56:56.688657Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:56:56.699627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:56.699685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:56.699738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:56.699773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:56.699778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:56.700417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:56.700438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:56.700487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:56.700497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:56.700500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:56.700504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:56.700756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:56.700762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:56.700764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:56.700960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:56.700965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:56.700968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:56.700972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:56.701342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:56.701674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:56.701714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:56.701858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:56.701873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:56.701877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:56.701914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:56.701919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:56.701947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:56.701959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:56.702211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:56.702218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:56.702248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:56.702253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:56.702317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:56.702322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:56.702330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:56.702333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:56.702337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:56.702340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:56.702343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:56.702346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:56.702353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:56.702357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:56.702359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... LocalPathId: 3] 2024-11-21T08:57:52.448657Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:57:52.448693Z node 220 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:52.448698Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [220:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:57:52.448704Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [220:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 4 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:57:52.448822Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:57:52.448829Z node 220 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:57:52.448841Z node 220 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:57:52.448845Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:57:52.448850Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:57:52.448855Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:57:52.448860Z node 220 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:57:52.448863Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:57:52.448997Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 45 2024-11-21T08:57:52.449009Z node 220 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T08:57:52.449014Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T08:57:52.449017Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2024-11-21T08:57:52.449162Z node 220 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:57:52.449174Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:57:52.449179Z node 220 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:57:52.449183Z node 220 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:57:52.449187Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:57:52.449303Z node 220 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:57:52.449314Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:57:52.449317Z node 220 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:57:52.449321Z node 220 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T08:57:52.449324Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 44 2024-11-21T08:57:52.449333Z node 220 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:57:52.449337Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [220:300:2292] 2024-11-21T08:57:52.450056Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:57:52.450248Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:57:52.450265Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:57:52.450269Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [220:301:2293] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:57:52.450360Z node 220 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirRtmr/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:57:52.450406Z node 220 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirRtmr/rtmr1" took 53us result status StatusSuccess 2024-11-21T08:57:52.450509Z node 220 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirRtmr/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 4 PartitionsCount: 42 Partitions { PartitionId: "w\334\2548U\210\224\200\245f\366\022\316\235s\006" BusKey: 0 TabletId: 72075186233409546 } Partitions { PartitionId: "\212\272\207\021C]\3443N\232\026s\363J{\223" BusKey: 1 TabletId: 72075186233409547 } Partitions { PartitionId: "\333F\340@\202\305T\0044\235rL\372\\\302(" BusKey: 2 TabletId: 72075186233409548 } Partitions { PartitionId: "]b\330fm~y(\354-\252Jea\326\345" BusKey: 3 TabletId: 72075186233409549 } Partitions { PartitionId: "\256F!)\2353\316\206?\267,0+Y\212>" BusKey: 4 TabletId: 72075186233409550 } Partitions { PartitionId: "\004\243\0229\337\345!\010K-&\r\347\314\342\204" BusKey: 5 TabletId: 72075186233409551 } Partitions { PartitionId: "\031\307b\0243\273\300\035a\222u\010?\025\346\033" BusKey: 6 TabletId: 72075186233409552 } Partitions { PartitionId: "\361\"w\'C\200\355\326y\364\357\2679q\264\372" BusKey: 7 TabletId: 72075186233409553 } Partitions { PartitionId: "\357\215\037\261\031\341\246\001\317\266\234\351\343\205B\033" BusKey: 8 TabletId: 72075186233409554 } Partitions { PartitionId: "\026\000\326\370\350\\\322n\307\032r\306l\3638v" BusKey: 9 TabletId: 72075186233409555 } Partitions { PartitionId: "2\000\3156\2129v\017;bh&s\230\315\315" BusKey: 10 TabletId: 72075186233409556 } Partitions { PartitionId: "7\033~\307\303\344\343\010\027\020\331\337\226!\336M" BusKey: 11 TabletId: 72075186233409557 } Partitions { PartitionId: "\013\330\240Q\023\304\250:t\213z8\224\232N\272" BusKey: 12 TabletId: 72075186233409558 } Partitions { PartitionId: "\265\343~\267y\306]" BusKey: 23 TabletId: 72075186233409569 } Partitions { PartitionId: "{\007\233\346\201\240\226\243\367\366\r\006\235je\217" BusKey: 24 TabletId: 72075186233409570 } Partitions { PartitionId: "\373\002\033}7\222\335\376\200V+\204\320(&q" BusKey: 25 TabletId: 72075186233409571 } Partitions { PartitionId: "\31375\020S\265\01602Y\270E\206\001S`" BusKey: 26 TabletId: 72075186233409572 } Partitions { PartitionId: "\313\005qo\007\330\030}Bwj\242\271\217z\010" BusKey: 27 TabletId: 72075186233409573 } Partitions { PartitionId: "\010zn\\k\217\373\201\0227\266w^\212\031\303" BusKey: 28 TabletId: 72075186233409574 } Partitions { PartitionId: "K|\2265R\224\326G\345\342\245iXI\201\245" BusKey: 29 TabletId: 72075186233409575 } Partitions { PartitionId: "\232y\362\024\225}\023m\320p\241f\206Y\233f" BusKey: 30 TabletId: 72075186233409576 } Partitions { PartitionId: "\347\312c\177Q\016\266\370SJ\242\263\022q\204*" BusKey: 31 TabletId: 72075186233409577 } Partitions { PartitionId: "#\230\361\313\314\302C\206p\205\327\235>\311\234\257" BusKey: 32 TabletId: 72075186233409578 } Partitions { PartitionId: "\325\332\376xDN(\363\034\333\013\223\023\340\253\226" BusKey: 33 TabletId: 72075186233409579 } Partitions { PartitionId: "J\010\253\221n\270\204L\225]\270\322\016\260\215\010" BusKey: 34 TabletId: 72075186233409580 } Partitions { PartitionId: "\361\022\345%\255\262I\367Z\257\214#QKP$" BusKey: 35 TabletId: 72075186233409581 } Partitions { PartitionId: "\244\026b\237\010zN\\\307\37662F\325M\203" BusKey: 36 TabletId: 72075186233409582 } Partitions { PartitionId: "2S33\343V\217\322\234\314\365\355\345d)\307" BusKey: 37 TabletId: 72075186233409583 } Partitions { PartitionId: "\203\2271B\214\350\216\211lN\034\002\270\322\n\257" BusKey: 38 TabletId: 72075186233409584 } Partitions { PartitionId: "i!.\203\236a\005\370\215\356\213\003D&\243\322" BusKey: 39 TabletId: 72075186233409585 } Partitions { PartitionId: "\306\234t{\005c\311\273M\235\355\035E\010\233\343" BusKey: 40 TabletId: 72075186233409586 } Partitions { PartitionId: "\330\353\016*\ns\023y\024y\317\225\024|\343\337" BusKey: 41 TabletId: 72075186233409587 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue_ToString [GOOD] Test command err: Trying to start YDB, gRPC: 11984, MsgBus: 8715 2024-11-21T08:57:54.611423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654194117889796:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:54.611563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047bb/r3tmp/tmpQ59Qp8/pdisk_1.dat 2024-11-21T08:57:54.664174Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11984, node 1 2024-11-21T08:57:54.672535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:54.672548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:54.672550Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:54.672585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8715 TClient is connected to server localhost:8715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:54.712790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:54.712815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:54.713888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:54.741515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:54.745823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:54.754978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:54.755036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:54.755077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:54.755103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:54.755123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:54.755144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:54.755164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:54.755185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:54.755206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:54.755226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.755246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:54.755270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654194117890443:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:54.758096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:54.758167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:54.758232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:54.758253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:54.758272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:54.758290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:54.758314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:54.758330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:54.758348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:54.758372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.758394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:54.758421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654194117890442:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:54.758999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:54.759016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:54.759028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:54.759041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:54.759066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:54.759085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:54.759103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:54.759122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:54.759138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:54.759149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:54.759157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890; ... ;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:54.767894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:54.767934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:54.767942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:54.767963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:54.767971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.767989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:54.767998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:54.768013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:54.768020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:54.768031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:54.768039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1" RETURNING String) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1" RETURNING String) = "val1" AND id = 6; 2024-11-21T08:57:54.860952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654194117890737:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.860974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654194117890748:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.860980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:54.861550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:54.863039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654194117890751:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:55.005999Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179475000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1" RETURNING String) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1" RETURNING String) = "val1" AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1290) '('"_id" '"bc78e525-29001196-894aef9a-98088270") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $21 (Int32 '"6")) (let $22 (Just $21)) (let $23 (Int32 '1)) (let $24 '($22 $23)) (let $25 (If (== $21 (Int32 '2147483647)) $24 '((+ $22 $23) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($24 $25)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'String)) (let $11 (DataType 'Utf8)) (let $12 (OptionalType $11)) (let $13 (OptionalType $10)) (let $14 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id" '"jsondoc" '"jsonval")) (let $28 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $29 (OptionalType (DataType 'JsonDocument))) (let $30 (DataType 'Json)) (let $31 '((VariantType (TupleType (TupleType (DataType 'Uint8) $10) $12)))) (let $32 '((ResourceType '"JsonPath"))) (let $33 (ResourceType '"JsonNode")) (let $34 (DictType $11 $33)) (let $35 '($34)) (let $36 (CallableType '() $31 '($29) $32 $35)) (let $37 '('('"strict"))) (let $38 (Udf '"Json2.JsonDocumentSqlValueUtf8" (Void) (VoidType) '"" $36 (VoidType) '"" $37)) (let $39 (CallableType '() $32 '($11))) (let $40 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $39 (VoidType) '"" '())) (let $41 (Apply $40 (Utf8 '"$.col1"))) (let $42 (Dict $34)) (let $43 (lambda '($54) (block '( (let $55 (Nothing $13)) (return $55) )))) (let $44 (lambda '($56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (SafeCast $58 $10))) (Nothing $13))) (return (If (Exists $56) $57 (Nothing $13))) )))) (let $45 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 $27 '() $28 (lambda '($46) (block '( (let $47 (StructType $9 '('"jsondoc" $29) '('"jsonval" (OptionalType $30)))) (let $48 (KqpOlapApply $47 '('"jsondoc") (lambda '($51) (block '( (let $52 (Apply $38 $51 $41 $42)) (let $53 (Nothing $13)) (return (Visit $52 '0 $43 '1 $44)) ))))) (let $49 '('eq $48 (String '"val1"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $46 $50)) ))))) (return (FromFlow (NarrowMap $45 (lambda '($59 $60 $61) (block '( (let $62 (OptionalType $33)) (let $63 (CallableType '() $31 '($62) $32 $35)) (let $64 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $63 (VoidType) '"" $37)) (let $65 (IfPresent $61 (lambda '($70) (block '( (let $71 '($30 '"" '1)) (let $72 (CallableType '() '($33) $71)) (let $73 (Udf '"Json2.Parse" (Void) (VoidType) '"" $72 (VoidType) '"" '())) (return (Just (Apply $73 $70))) ))) (Nothing $62))) (let $66 (Apply $64 $65 $41 $42)) (let $67 (Visit $66 '0 (lambda '($74) (Nothing $12)) '1 (lambda '($75) $75))) (let $68 (Apply $38 $60 $41 $42)) (let $69 (Visit $68 '0 $43 '1 $44)) (return (AsStruct '('"column1" $67) '('"column2" $69) '('"id" $59))) )))))) ))) '('('"_logical_id" '1361) '('"_id" '"f3098437-9f31853b-7f7a9a59-fcdbc1af")))) (let $15 (DqCnUnionAll (TDqOutput $14 '0))) (let $16 (DqPhyStage '($15) (lambda '($76) $76) '('('"_logical_id" '2418) '('"_id" '"69052716-49df9191-d2f35e76-b1e2468d")))) (let $17 '('"id" '"column1" '"column2")) (let $18 (DqCnResult (TDqOutput $16 '0) $17)) (let $19 (KqpTxResultBinding $8 '0 '0)) (let $20 (KqpPhysicalTx '($14 $16) '($18) '('($5 $19)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $20) '((KqpTxResultBinding (ListType (StructType '('"column1" $12) '('"column2" $13) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::ExtractRanges >> KqpOlapAggregations::Aggregation_Some_GroupBy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesColumnActualization [GOOD] Test command err: Trying to start YDB, gRPC: 13509, MsgBus: 31774 2024-11-21T08:57:32.340172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654101290347872:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:32.340379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004866/r3tmp/tmpdF8Z5s/pdisk_1.dat 2024-11-21T08:57:32.396288Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13509, node 1 2024-11-21T08:57:32.408231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:32.408246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:32.408247Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:32.408287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31774 2024-11-21T08:57:32.441604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:32.441635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:32.443151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:32.472167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:32.477298Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:32.487921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:32.499723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.499781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.499835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.499850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.499865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.499880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.499910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.499926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.499941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.499956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.499970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.499994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654101290348521:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.506714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.514048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.514139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.514159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.514174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.514203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.514220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.514237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:32.514259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:32.515187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.515226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:32.515247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654101290348525:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:32.518045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101290348528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:32.518066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101290348528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:32.518091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101290348528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:32.518107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101290348528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:32.518128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101290348528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:32.518148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101290348528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:32.518166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101290348528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:32.518187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654101290348528:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... xUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.523513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.523516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.523522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.523524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.523592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:32.523604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:32.523613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:32.523617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:32.523630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:32.523634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:32.523642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:32.523652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:32.523657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:32.523660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:32.523664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:32.523667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:32.523684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:32.523697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:32.523708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:32.523716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:32.523722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:32.523725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:32.523734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:32.523737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:32.523742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:32.523744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:32.550594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=51200216;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T08:57:37.340797Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654101290347872:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:37.340843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=51200216;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; 2024-11-21T08:57:38.919889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=a8890ab2-a7e611ef-833ad2e2-db0a7446;fline=with_appended.cpp:80;portions=3,4,5,;task_id=a8890ab2-a7e611ef-833ad2e2-db0a7446; WAIT_COMPACTION: 3 2024-11-21T08:57:38.960539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=a889c2c2-a7e611ef-ae1d4504-e94b50f1;fline=with_appended.cpp:80;portions=3,4,5,;task_id=a889c2c2-a7e611ef-ae1d4504-e94b50f1; 2024-11-21T08:57:39.324900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=a8ddc7b4-a7e611ef-9c6e4252-17886a2c;fline=with_appended.cpp:80;portions=3,4,5,;task_id=a8ddc7b4-a7e611ef-9c6e4252-17886a2c; WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64') RESULT: 2024-11-21T08:57:43.943300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654148534989327:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.943321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.943319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654148534989335:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.944739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:43.947634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654148534989341:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:44.115862Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464089, txId: 281474976715662] shutting down Wait changes: 0/0 2024-11-21T08:57:47.393354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:57:47.393374Z node 1 :IMPORT WARN: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64') RESULT: 2024-11-21T08:57:49.157126Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179469142, txId: 281474976715664] shutting down 0/0 2024-11-21T08:57:49.161532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:49.169613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting actualization: 9/0.000010s 2024-11-21T08:57:49.938696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=af6c23aa-a7e611ef-8d3ac986-1d765b14;fline=with_appended.cpp:80;portions=6,7,8,;task_id=af6c23aa-a7e611ef-8d3ac986-1d765b14; 2024-11-21T08:57:49.939207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=af6c780a-a7e611ef-88477e26-cf037f2e;fline=with_appended.cpp:80;portions=6,7,8,;task_id=af6c780a-a7e611ef-88477e26-cf037f2e; 2024-11-21T08:57:49.944323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=af6c727e-a7e611ef-b5e1b9cb-b19be5ef;fline=with_appended.cpp:80;portions=6,7,8,;task_id=af6c727e-a7e611ef-b5e1b9cb-b19be5ef; ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64','_yql_delete_flag') RESULT: Wait changes: 0/0 2024-11-21T08:57:50.226201Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179470206, txId: 281474976715668] shutting down ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64','_yql_delete_flag') RESULT: 0/0 2024-11-21T08:57:55.261294Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179475249, txId: 281474976715670] shutting down >> test.py::test[pg-select_agg_one_row-default.txt-Results] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Analyze] >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply [GOOD] >> KqpOlapBlobsSharing::SplitEmpty >> KqpOlapStats::DescibeTableStore >> KqpOlap::ExtractRanges [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_NoPushdownOnDisabledEmitAggApply [GOOD] Test command err: Trying to start YDB, gRPC: 11634, MsgBus: 21430 2024-11-21T08:57:40.019201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654133237544191:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:40.019469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004827/r3tmp/tmpd1FTzb/pdisk_1.dat 2024-11-21T08:57:40.075070Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11634, node 1 2024-11-21T08:57:40.104447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:40.104465Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:40.104468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:40.104508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:40.119886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:40.119919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:40.120933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21430 TClient is connected to server localhost:21430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:57:40.171260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:40.180228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:40.194382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.194475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.194539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.194569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.194594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.194616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.194637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.194663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.194686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.194709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.194733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.194760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654133237544828:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.198808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.198840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.198884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.198903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.198920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.198939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.198957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.198974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.198992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.199014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.199037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.199054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654133237544829:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.202762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654133237544827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.202788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654133237544827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.202834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654133237544827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.202858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654133237544827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.202879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654133237544827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.202897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654133237544827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.202913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654133237544827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.203002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654133237544827:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.203035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:74396541332375448 ... DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.142976Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:54.299280Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.299305Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:54.381843Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.381878Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:54.464251Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.464276Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:54.546666Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.546695Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:54.619124Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:54.640078Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:54.701774Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.701798Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:54.783960Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.783983Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:54.866173Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.866196Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:54.948625Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:54.948650Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.030846Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.030869Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.062714Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:55.198865Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.198891Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.302065Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.302090Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.384495Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.384524Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.466853Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.466880Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.549107Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.549132Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWY2ZjQ5MWEtNTAxYWMzYy1jMmRhNTIwZi00NzcwODYzOQ==. TraceId : 01jd6z24km8pc88qdk0vbn5jsp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.610892Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:55.631744Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapSysView::StatsSysViewBytesPackActualization [GOOD] >> KqpDecimalColumnShard::TestGroupByDecimal >> test.py::test[pg-select_columnref1-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ExtractRanges [GOOD] Test command err: Trying to start YDB, gRPC: 29052, MsgBus: 23441 2024-11-21T08:57:55.837256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654198333083147:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:55.837451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b7/r3tmp/tmp43bBF9/pdisk_1.dat 2024-11-21T08:57:55.876965Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29052, node 1 2024-11-21T08:57:55.885629Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:55.885641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:55.885643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:55.885669Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23441 TClient is connected to server localhost:23441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:55.938653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:55.938679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:55.939741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:55.959595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:55.969929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:55.979781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:55.979842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:55.979880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:55.979905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:55.979924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:55.979943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:55.979961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:55.979980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:55.980005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:55.980028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:55.980049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:55.980068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654198333083805:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:55.980497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:55.980511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:55.980522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:55.980527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:55.980541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:55.980545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:55.980555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:55.980559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:55.980568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:55.980571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:55.980578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:55.980581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:55.980642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:55.980653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:55.980669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:55.980673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:55.980684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:55.980689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:55.980705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:55.980712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:55.980722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:55.980727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:55.983482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654198333083807:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:55.983500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654198333083807:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:55.983528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654198333083807:2289];tablet_id=7207518622 ... 88;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:55.990505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:55.990535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:55.990541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:55.990546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:55.990549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:55.990555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:55.990560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:55.990563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:55.990566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:55.990601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:55.990608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:55.990620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:55.990622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:55.990628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:55.990632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:55.990644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:55.990647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:55.990654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:55.990657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:55.991387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:55.991396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:55.991401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:55.991404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:55.991412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:55.991414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:55.991419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:55.991422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:55.991426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:55.991430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:55.991433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:55.991435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:55.991451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:55.991458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:55.991468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:55.991470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:55.991476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:55.991483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:55.991492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:55.991494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:55.991500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:55.991506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:56.027487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; ==================================== QUERY: SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp` < CAST(1000100 AS Timestamp) AND `timestamp` > CAST(1000095 AS Timestamp)) OR (`timestamp` <= CAST(1001000 AS Timestamp) AND `timestamp` >= CAST(1000999 AS Timestamp)) OR (`timestamp` > CAST(1002000 AS Timestamp)) ORDER BY `timestamp` LIMIT 1000; RESULT: 2024-11-21T08:57:56.069973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654202628051437:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.069996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654202628051428:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.070014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.070657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:56.072388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654202628051442:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:56.219313Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179476125, txId: 18446744073709551615] shutting down timestamp: 1970-01-01T00:00:01.000096Z timestamp: 1970-01-01T00:00:01.000097Z timestamp: 1970-01-01T00:00:01.000098Z timestamp: 1970-01-01T00:00:01.000099Z timestamp: 1970-01-01T00:00:01.000999Z timestamp: 1970-01-01T00:00:01.001000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesPackActualization [GOOD] Test command err: Trying to start YDB, gRPC: 18869, MsgBus: 64610 2024-11-21T08:57:28.567256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654084124451784:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:28.567272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487b/r3tmp/tmpb65llM/pdisk_1.dat 2024-11-21T08:57:28.614562Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18869, node 1 2024-11-21T08:57:28.619781Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:28.619796Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:28.619798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:28.619833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64610 TClient is connected to server localhost:64610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:28.667543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:28.667563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:28.668670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:28.691420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:28.702952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:28.714624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.714689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.714728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:28.714756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:28.714779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:28.714803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:28.714823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:28.714846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:28.714865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:28.714894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.714915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:28.714938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654084124452440:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:28.715384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:28.715399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:28.715411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:28.715420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:28.715437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:28.715447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:28.715457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:28.715467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:28.715481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:28.715489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:28.715498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:28.715506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:28.715561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:28.715572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:28.715590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:28.715598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.715609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:28.715647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:28.715670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:28.715679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:28.715694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:28.715702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:28.718628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654084124452442:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.718651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654084124452442:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.718684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654084124452442:2291];tablet_id=7207518622 ... abletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6400144;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6400144;columns=1; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T08:57:33.567828Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654084124451784:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:33.567868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: 2024-11-21T08:57:38.891890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654127074125903:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.891890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654127074125895:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.891904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:38.892571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:38.894203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654127074125909:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:39.030278Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179459007, txId: 281474976715662] shutting down Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:1070008:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:1072784:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:1067792:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Wait changes: 3210584/6400000 2024-11-21T08:57:43.608660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:57:43.608675Z node 1 :IMPORT WARN: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: 2024-11-21T08:57:44.066488Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179464053, txId: 281474976715664] shutting down Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:1067792:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:1070008:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:1072784:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 3210584/6400000 2024-11-21T08:57:44.069926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:44.077175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715667:0, at schemeshard: 72057594046644480 waiting actualization: 3/0.000008s 2024-11-21T08:57:44.760142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=ac551f96-a7e611ef-88072686-5b6c0a6b;fline=with_appended.cpp:80;portions=2,;task_id=ac551f96-a7e611ef-88072686-5b6c0a6b; 2024-11-21T08:57:44.761820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ac550dbc-a7e611ef-bdd838cc-e14bcd8d;fline=with_appended.cpp:80;portions=2,;task_id=ac550dbc-a7e611ef-bdd838cc-e14bcd8d; 2024-11-21T08:57:44.775111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=ac5850a8-a7e611ef-b6c086a5-c3bd6d72;fline=with_appended.cpp:80;portions=2,;task_id=ac5850a8-a7e611ef-b6c086a5-c3bd6d72; ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:276352:0] EntityType: COL BlobRangeSize: 276352 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:277656:0] EntityType: COL BlobRangeSize: 277656 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:276904:0] EntityType: COL BlobRangeSize: 276904 PathId: 3 2024-11-21T08:57:45.123266Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179465110, txId: 281474976715668] shutting down Wait changes: 830912/6400000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:276352:0] EntityType: COL BlobRangeSize: 276352 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:276904:0] EntityType: COL BlobRangeSize: 276904 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:277656:0] EntityType: COL BlobRangeSize: 277656 PathId: 3 2024-11-21T08:57:50.163841Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179470149, txId: 281474976715670] shutting down 830912/6400000 2024-11-21T08:57:50.167210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T08:57:50.174443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715673:0, at schemeshard: 72057594046644480 waiting actualization: 3/0.000007s 2024-11-21T08:57:50.754208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=afe90de8-a7e611ef-bd907eee-7344bfde;fline=with_appended.cpp:80;portions=3,;task_id=afe90de8-a7e611ef-bd907eee-7344bfde; 2024-11-21T08:57:50.754211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=afe8face-a7e611ef-b700bf4d-f122023b;fline=with_appended.cpp:80;portions=3,;task_id=afe8face-a7e611ef-b700bf4d-f122023b; 2024-11-21T08:57:50.777280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=afec4224-a7e611ef-aabecc76-171ca3d3;fline=with_appended.cpp:80;portions=3,;task_id=afec4224-a7e611ef-aabecc76-171ca3d3; ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: 2024-11-21T08:57:51.225891Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179471209, txId: 281474976715674] shutting down Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:3:5:0:1067792:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:3:5:0:1072784:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:3:5:0:1070008:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Wait changes: 3210584/6400000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('pk_int') RESULT: Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:3:5:0:1072784:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:3:5:0:1070008:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 3 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:3:5:0:1067792:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 3210584/6400000 2024-11-21T08:57:56.278424Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179476257, txId: 281474976715676] shutting down >> KqpOlapWrite::TierDraftsGCWithRestart >> KqpOlapAggregations::Aggregation_Some_GroupBy [GOOD] >> KqpOlapAggregations::Aggregation_ResultT_FilterL_Limit2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 18809, MsgBus: 1311 2024-11-21T08:57:55.943985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654196600178474:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:55.943997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b6/r3tmp/tmp2lWvCX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18809, node 1 2024-11-21T08:57:55.999529Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:55.999717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:55.999726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:55.999728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:55.999759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1311 TClient is connected to server localhost:1311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:56.041328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:56.044772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:56.044789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:56.045896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:56.051749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:56.058280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:56.058329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:56.058352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:56.058374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:56.058388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:56.058404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:56.058417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:56.058431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:56.058446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:56.058462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:56.058476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:56.058485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654200895146407:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:56.060646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:56.060667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:56.060697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:56.060720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:56.060740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:56.060761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:56.060778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:56.060795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:56.060822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:56.060841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:56.060861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:56.060882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654200895146416:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:56.061357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:56.061377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:56.061389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:56.061399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:56.061417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:56.061426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:56.061439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:56.061454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:56.061466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:56.061474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:56.061485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891; ... ablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:56.066497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:56.066503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:56.066508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:56.066514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:56.066521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:56.066526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:56.066530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:56.066536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:56.066539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:56.066545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:56.066548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:56.066550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:56.066562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:56.066568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:56.066575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:56.066581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:56.066588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:56.066593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:56.066600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:56.066606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:56.066611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:56.066616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2024-11-21T08:57:56.172366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654200895146700:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.172381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654200895146708:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.172384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.173006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:56.174379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654200895146714:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:56.794003Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179476230, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '829) '('"_id" '"df0a9c24-61f0beda-95182ba0-da02912d") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $23 (Int32 '1)) (let $24 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $23) $24))) (RangeCreate (AsList '($24 '((Just (Int32 '"5")) $23)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $1)) (let $11 '('('"_logical_id" '888) '('"_id" '"97abeb12-45a352b5-eb58e6b2-319e0f00") '('"_wide_channels" (StructType '('_yql_agg_0 $2) $10)))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id")) (let $27 '('('"UsedKeyColumns" $26) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $28 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $27 (lambda '($29) (TKqpOlapAgg $29 '('('_yql_agg_0 'some '"level")) $26)))) (return (FromFlow $28)) ))) $11)) (let $13 (DqCnHashShuffle (TDqOutput $12 '0) '('1))) (let $14 (StructType '('"column1" $2) $10)) (let $15 '('('"_logical_id" '1311) '('"_id" '"1f7eb1dc-7a920a2d-5f62098d-c599b186") '('"_wide_channels" $14))) (let $16 (DqPhyStage '($13) (lambda '($30) (block '( (let $31 (lambda '($42 $43) $43 $42)) (let $32 (WideCombiner (ToFlow $30) '"" (lambda '($33 $34) $34) (lambda '($35 $36 $37) $36) (lambda '($38 $39 $40 $41) (Coalesce $41 $39)) $31)) (return (FromFlow (WideSort $32 '('('1 (Bool 'true)))))) ))) $15)) (let $17 (DqCnMerge (TDqOutput $16 '0) '('('1 '"Asc")))) (let $18 (DqPhyStage '($17) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46) (AsStruct '('"column1" $45) '('"id" $46)))))) '('('"_logical_id" '1323) '('"_id" '"8b1086ac-b317d56c-6dba28e4-7f46ca4f")))) (let $19 '($12 $16 $18)) (let $20 (DqCnResult (TDqOutput $18 '0) '('"id" '"column1"))) (let $21 (KqpTxResultBinding $9 '0 '0)) (let $22 (KqpPhysicalTx $19 '($20) '('($7 $21)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $22) '((KqpTxResultBinding (ListType $14) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapSparsed::SwitchingStandalone ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultT_FilterL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 7584, MsgBus: 20079 2024-11-21T08:57:44.178740Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654150608520215:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:44.178875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004805/r3tmp/tmpnyqyBZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7584, node 1 2024-11-21T08:57:44.231611Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:44.236334Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:44.236345Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:44.236346Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:44.236370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20079 TClient is connected to server localhost:20079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:44.279357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:44.279380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:44.280482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:44.309531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:44.325626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:44.333751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.333805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.333833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.333850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.333864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.333878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.333891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.333906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.333922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:44.333938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.333952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:44.333970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150608520870:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:44.335869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.335890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.335909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.335919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.335932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.335944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.335957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.335970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.335984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:44.335996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.336013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:44.336033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654150608520872:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:44.337909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.337924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.337944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.337958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.337970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.337982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.337995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.338007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.338022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654150608520873 ... TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.391407Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.546412Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.546436Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.628816Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.628838Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.711212Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.711236Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.793510Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.793534Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:55.865727Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:55.886665Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:55.948412Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:55.948433Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.030502Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.030525Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.112673Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.112699Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.194901Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.194928Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.277086Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.277116Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.308986Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:56.445605Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.445647Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.549100Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.549127Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.631459Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.631484Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.713935Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.713960Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.796308Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.796345Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z27arer0g5d0r09azaecz. SessionId : ydb://session/3?node_id=2&id=ODFlOGJlMDYtNjc3ODVkNGYtOGMyYzFiZjYtZDQ1YWY1YmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:56.858181Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:56.879099Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> test.py::test[pg-select_columnref1-default.txt-Debug] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-ForceBlocks] >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] >> KqpOlap::OlapRead_StreamGenericQuery >> KqpOlapSysView::StatsSysViewRanges [GOOD] >> KqpOlap::ManyColumnShardsWithRestarts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewRanges [GOOD] Test command err: Trying to start YDB, gRPC: 20487, MsgBus: 26452 2024-11-21T08:57:41.146189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654137485897515:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:41.146339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004823/r3tmp/tmpgtvhzp/pdisk_1.dat 2024-11-21T08:57:41.191953Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20487, node 1 2024-11-21T08:57:41.196447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:41.196457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:41.196459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:41.196491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26452 TClient is connected to server localhost:26452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:41.241618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:41.246991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:41.247021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:41.248127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:41.251891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:41.262885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:41.262943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:41.262973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:41.262989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:41.263005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:41.263022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:41.263039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:41.263056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:41.263078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:41.263094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:41.263108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:41.263130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654137485898181:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:41.263490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:41.263503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:41.263510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:41.263516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:41.263525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:41.263528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:41.263536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:41.263544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:41.263548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:41.263551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:41.263555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:41.263557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:41.263606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:41.263616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:41.263628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:41.263636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:41.263646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:41.263654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:41.263669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:41.263677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:41.263687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:41.263694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:41.266256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654137485898172:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:41.266278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654137485898172:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:41.266311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654137485898172:2289];tablet_id=7207518622 ... BlobId: [72075186224037888:1:12:14:0:61176:0] EntityType: COL BlobRangeSize: 14056 PathId: 5 Rows: 1681 RawBytes: 6724 BlobRangeOffset: 57104 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 12 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:12:14:0:61176:0] EntityType: COL BlobRangeSize: 4072 PathId: 5 Rows: 1681 RawBytes: 1912255 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 12 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:12:14:0:61176:0] EntityType: COL BlobRangeSize: 23488 PathId: 5 Rows: 1638 RawBytes: 13104 BlobRangeOffset: 48968 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 15 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:15:17:0:59736:0] EntityType: COL BlobRangeSize: 6768 PathId: 5 Rows: 1638 RawBytes: 12728 BlobRangeOffset: 36680 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 15 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:15:17:0:59736:0] EntityType: COL BlobRangeSize: 12288 PathId: 5 Rows: 1638 RawBytes: 24570 BlobRangeOffset: 22952 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 15 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:15:17:0:59736:0] EntityType: COL BlobRangeSize: 13728 PathId: 5 Rows: 1638 RawBytes: 6552 BlobRangeOffset: 55736 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 15 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:15:17:0:59736:0] EntityType: COL BlobRangeSize: 4000 PathId: 5 Rows: 1638 RawBytes: 1866609 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 15 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:15:17:0:59736:0] EntityType: COL BlobRangeSize: 22952 PathId: 5 Rows: 1638 RawBytes: 13104 BlobRangeOffset: 48984 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 18 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:18:20:0:59680:0] EntityType: COL BlobRangeSize: 6760 PathId: 5 Rows: 1638 RawBytes: 12745 BlobRangeOffset: 36664 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 18 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:18:20:0:59680:0] EntityType: COL BlobRangeSize: 12320 PathId: 5 Rows: 1638 RawBytes: 24570 BlobRangeOffset: 22928 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 18 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:18:20:0:59680:0] EntityType: COL BlobRangeSize: 13736 PathId: 5 Rows: 1638 RawBytes: 6552 BlobRangeOffset: 55744 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 18 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:18:20:0:59680:0] EntityType: COL BlobRangeSize: 3936 PathId: 5 Rows: 1638 RawBytes: 1866087 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 18 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:18:20:0:59680:0] EntityType: COL BlobRangeSize: 22928 PathId: 5 Rows: 1698 RawBytes: 13584 BlobRangeOffset: 50704 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 21 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:21:23:0:61920:0] EntityType: COL BlobRangeSize: 7008 PathId: 5 Rows: 1698 RawBytes: 13193 BlobRangeOffset: 37920 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 21 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:21:23:0:61920:0] EntityType: COL BlobRangeSize: 12784 PathId: 5 Rows: 1698 RawBytes: 25470 BlobRangeOffset: 23712 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 21 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:21:23:0:61920:0] EntityType: COL BlobRangeSize: 14208 PathId: 5 Rows: 1698 RawBytes: 6792 BlobRangeOffset: 57712 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 21 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:21:23:0:61920:0] EntityType: COL BlobRangeSize: 4208 PathId: 5 Rows: 1698 RawBytes: 1935412 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 21 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:21:23:0:61920:0] EntityType: COL BlobRangeSize: 23712 PathId: 5 Rows: 1720 RawBytes: 13760 BlobRangeOffset: 51272 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 24 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:24:26:0:62480:0] EntityType: COL BlobRangeSize: 7096 PathId: 5 Rows: 1720 RawBytes: 13359 BlobRangeOffset: 38368 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 24 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:24:26:0:62480:0] EntityType: COL BlobRangeSize: 12904 PathId: 5 Rows: 1720 RawBytes: 25800 BlobRangeOffset: 23976 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 24 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:24:26:0:62480:0] EntityType: COL BlobRangeSize: 14392 PathId: 5 Rows: 1720 RawBytes: 6880 BlobRangeOffset: 58368 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 24 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:24:26:0:62480:0] EntityType: COL BlobRangeSize: 4112 PathId: 5 Rows: 1720 RawBytes: 1960413 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 24 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:24:26:0:62480:0] EntityType: COL BlobRangeSize: 23976 PathId: 5 Rows: 1718 RawBytes: 13744 BlobRangeOffset: 51272 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 27 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:27:29:0:62600:0] EntityType: COL BlobRangeSize: 7080 PathId: 5 Rows: 1718 RawBytes: 13386 BlobRangeOffset: 38328 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 27 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:27:29:0:62600:0] EntityType: COL BlobRangeSize: 12944 PathId: 5 Rows: 1718 RawBytes: 25770 BlobRangeOffset: 23960 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 27 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:27:29:0:62600:0] EntityType: COL BlobRangeSize: 14368 PathId: 5 Rows: 1718 RawBytes: 6872 BlobRangeOffset: 58352 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 27 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:27:29:0:62600:0] EntityType: COL BlobRangeSize: 4248 PathId: 5 Rows: 1718 RawBytes: 1958292 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 27 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:27:29:0:62600:0] EntityType: COL BlobRangeSize: 23960 PathId: 5 Rows: 1648 RawBytes: 13184 BlobRangeOffset: 49328 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: timestamp PortionId: 30 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:30:32:0:60264:0] EntityType: COL BlobRangeSize: 6808 PathId: 5 Rows: 1648 RawBytes: 12830 BlobRangeOffset: 36896 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: resource_id PortionId: 30 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:30:32:0:60264:0] EntityType: COL BlobRangeSize: 12432 PathId: 5 Rows: 1648 RawBytes: 24720 BlobRangeOffset: 23088 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: uid PortionId: 30 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037888:1:30:32:0:60264:0] EntityType: COL BlobRangeSize: 13808 PathId: 5 Rows: 1648 RawBytes: 6592 BlobRangeOffset: 56136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: level PortionId: 30 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037888:1:30:32:0:60264:0] EntityType: COL BlobRangeSize: 4128 PathId: 5 Rows: 1648 RawBytes: 1879929 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: message PortionId: 30 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037888:1:30:32:0:60264:0] EntityType: COL BlobRangeSize: 23088 PathId: 5 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId == UInt64("3") AND Activity == 1 GROUP BY TabletId, PathId, Kind ORDER BY TabletId, Kind RESULT: 2024-11-21T08:57:54.011510Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179472161, txId: 281474976710668] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` GROUP BY PathId, Kind, TabletId ORDER BY PathId DESC, Kind DESC, TabletId DESC ; RESULT: TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 2024-11-21T08:57:55.867209Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179474039, txId: 281474976710670] shutting down ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId > UInt64("0") AND PathId < UInt32("4") OR PathId > UInt64("4") AND PathId <= UInt64("5") GROUP BY PathId, Kind, TabletId ORDER BY PathId DESC, Kind DESC, TabletId DESC ; RESULT: 2024-11-21T08:57:56.185463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:57:56.185478Z node 1 :IMPORT WARN: Table profiles were not loaded TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 2024-11-21T08:57:57.758473Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179475906, txId: 281474976710672] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:57:20.072671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:20.072692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:20.072697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:20.072701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:20.072707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:20.072711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:20.072719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:20.072796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:20.081756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:20.081780Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:57:20.087100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:20.087211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:20.087254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:20.105357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:20.105502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:20.105628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:20.105909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:20.106820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:20.107133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:20.107144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:20.107157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:20.107164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:20.107171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:20.107219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:57:20.108714Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:57:20.126397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:20.126490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:20.126567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:20.126616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:20.126625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:20.127488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:20.127514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:20.127597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:20.127608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:20.127612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:20.127617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:20.129211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:20.129233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:20.129239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:20.129741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:20.129755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:20.129762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:20.129770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:20.130422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:20.130956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:20.131004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:20.131163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:20.131185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:20.131191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:20.131247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:20.131252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:20.131275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:20.131284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:20.131640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:20.131647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:20.131679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:20.131682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:20.131743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:20.131748Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:20.131755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:20.131758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:20.131762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:20.131765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:20.131767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:20.131770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:20.131777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:20.131781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:20.131783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:57.496756Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:57:57.496833Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 85us result status StatusSuccess 2024-11-21T08:57:57.497024Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:57.517638Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1083:2823] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T08:57:57.517698Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:999:2823] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T08:57:57.517736Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1083:2823] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732179477490591 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732179477490591 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1732179477490591 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:57:57.518668Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1083:2823] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2024-11-21T08:57:57.518696Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:999:2823] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpOlapBlobsSharing::TableReshardingConsistency64 >> KqpOlap::OlapRead_StreamGenericQuery [GOOD] >> KqpOlapAggregations::Aggregation_ResultT_FilterL_OrderT_Limit2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_StreamGenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 6478, MsgBus: 27953 2024-11-21T08:57:57.959777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654208167778752:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:57.959794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a9/r3tmp/tmpAvIe8W/pdisk_1.dat 2024-11-21T08:57:58.012231Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6478, node 1 2024-11-21T08:57:58.018287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:58.018297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:58.018298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:58.018329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27953 TClient is connected to server localhost:27953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:58.060953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:58.060992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:58.062045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:58.089023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:58.092475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:58.101151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:58.101220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:58.101265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:58.101290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:58.101311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:58.101338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:58.101360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:58.101384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:58.101405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:58.101428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:58.101450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:58.101473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654212462746691:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:58.101964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:58.101978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:58.101989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:58.101993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:58.102008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:58.102017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:58.102026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:58.102039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:58.102047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:58.102056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:58.102062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:58.102085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:58.102146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:58.102158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:58.102174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:58.102178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:58.102189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:58.102198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:58.102213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:58.102222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:58.102233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:58.102241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:58.105280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654212462746692:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:58.105306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654212462746692:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:58.105345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654212462746692:2289];tablet_id=720751862240 ... 62746960:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:58.172907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439654212462746960:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:58.173340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:58.173353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:58.173364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:58.173375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:58.173388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:58.173397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:58.173405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:58.173415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:58.173427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:58.173436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:58.173446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:58.173454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:58.173496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:58.173518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:58.173539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:58.173548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:58.173562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:58.173571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:58.173589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:58.173597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:58.173613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:58.173621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:58.173766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:58.173777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:58.173786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:58.173795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:58.173815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:58.173823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:58.173831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:58.173840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:58.173853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:58.173862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:58.173869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:58.173877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:58.173906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:58.173915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:58.173931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:58.173939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:58.173949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:58.173957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:58.173973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:58.173981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:58.173991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:58.173998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:58.211973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2024-11-21T08:57:58.233214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654212462747206:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:58.233243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654212462747195:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:58.233258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:58.233916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:57:58.235469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654212462747209:2409], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } >> test.py::test[pg-select_columnref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Plan] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultT_FilterL_OrderT_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 23547, MsgBus: 17028 2024-11-21T08:57:45.544619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654156837800397:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:45.544871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047ef/r3tmp/tmpcDL5p5/pdisk_1.dat 2024-11-21T08:57:45.607611Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23547, node 1 2024-11-21T08:57:45.617627Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:45.617642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:45.617645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:45.617687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17028 2024-11-21T08:57:45.645751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:45.645789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:45.646922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:45.685521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.692136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:45.703148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.703215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.703268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.703290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.703310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.703334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.703356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.703380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.703410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.703433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.703447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.703462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654156837801060:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.706267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.706292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.706326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.706341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.706357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.706377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.706394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.706412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.706433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.706451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.706468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.706484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654156837801059:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.706876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:45.706887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:45.706895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:45.706898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:45.706908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:45.706914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:45.706920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:45.706927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:45.706933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:45.706935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:45.706939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:56.996285Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.140247Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.140272Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.222087Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.222107Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.304046Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.304066Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.386255Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.386286Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.448182Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:57.468914Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:57.530514Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.530534Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.612479Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.612498Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.694519Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.694545Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.776599Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.776624Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.869807Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:57.869846Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:57.890292Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:58.025853Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.025881Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.128687Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.128713Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.210732Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.210750Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.292811Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.292834Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.374834Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.374859Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2JlNGU2ZWItZDkwNTg0YjgtNTBmNjcwM2EtYTQ0ZmU0ZjM=. CustomerSuppliedId : . TraceId : 01jd6z28zb4t87fx38w5tg4dvt. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.426281Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:58.447009Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpDecimalColumnShard::TestGroupByDecimal [GOOD] >> KqpOlap::SimpleQueryOlapDiagnostics |91.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestGroupByDecimal [GOOD] Test command err: Trying to start YDB, gRPC: 30024, MsgBus: 25949 2024-11-21T08:57:56.618985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654202393530903:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:56.619102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b3/r3tmp/tmpKZrlRy/pdisk_1.dat 2024-11-21T08:57:56.659566Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30024, node 1 2024-11-21T08:57:56.667999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:56.668011Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:56.668012Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:56.668040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25949 TClient is connected to server localhost:25949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:56.720621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:56.720642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:56.721694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:56.741020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:56.859081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654202393531519:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.859102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.880032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:56.887001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:56.887043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:56.887087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:56.887118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:56.887134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:56.887157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:56.887179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:56.887198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:56.887221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:56.887246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:56.887268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:56.887292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654202393531595:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:56.887747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:56.887763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:56.887776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:56.887785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:56.887805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:56.887814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:56.887826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:56.887836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:56.887844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:56.887853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:56.887859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:56.887868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:56.887923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:56.887937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:56.887960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:56.887969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:56.887983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:56.887992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:56.888013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:56.888021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:56.888037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:56.888045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALL ... .223471Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654208030607463:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:57.223486Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439654208030607463:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:57.223844Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:57.223859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:57.223872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:57.223875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:57.223890Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:57.223899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:57.223908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:57.223916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:57.223921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:57.223926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:57.223930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:57.223932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:57.223972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:57.223982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:57.223992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:57.223997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.224004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:57.224006Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:57.224018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:57.224023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:57.224030Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:57.224033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2024-11-21T08:57:57.316373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654206688499056:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:57.316402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:57.316401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654206688499061:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:57.317215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:57.318827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654206688499063:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:58.045084Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179477371, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.046670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654212325574884:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:58.046692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:58.046710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654212325574889:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:58.047220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:58.048620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439654212325574891:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:58.801668Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478099, txId: 18446744073709551615] shutting down >> KqpOlap::CompactionPlanner [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Analyze] >> KqpOlapAggregations::Aggregation_MinR_GroupL_OrderL [GOOD] >> KqpOlapAggregations::AggregationCountGroupByPushdown >> KqpOlap::SimpleQueryOlapDiagnostics [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CompactionPlanner [GOOD] Test command err: Trying to start YDB, gRPC: 8933, MsgBus: 7347 2024-11-21T08:57:43.015402Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654146751514857:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:43.015612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004817/r3tmp/tmpqND7e3/pdisk_1.dat 2024-11-21T08:57:43.054122Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8933, node 1 2024-11-21T08:57:43.065979Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:43.065992Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:43.065993Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:43.066026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7347 TClient is connected to server localhost:7347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:43.116353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:43.116376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:43.117543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:43.138776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:43.149140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:43.158049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.158103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.158126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.158142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.158154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.158173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.158188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.158203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.158219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.158234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.158249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.158266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654146751515517:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.160198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.160231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.160261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.160274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.160287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.160299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.160315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.160324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.160333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.160342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.160357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.160368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654146751515519:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.160676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:43.160687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:43.160695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:43.160704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:43.160713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:43.160719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:43.160725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:43.160728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:43.160733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.160736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.160740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;pr ... s=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:43.165335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:43.165338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:43.165340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:43.165352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:43.165358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:43.165365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:43.165371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.165376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:43.165378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:43.165386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:43.165392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:43.165397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:43.165400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:43.206097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:57:43.274605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654146751515811:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.274624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:43.292677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T08:57:48.015547Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654146751514857:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:48.015572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:57:48.429539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654168226352493:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.429567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:48.431739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T08:57:48.554839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=ae926c14-a7e611ef-871e1887-21aedc7b;fline=with_appended.cpp:80;portions=4,;task_id=ae926c14-a7e611ef-871e1887-21aedc7b; 2024-11-21T08:57:48.555796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ae92a972-a7e611ef-86c94c74-9757dd44;fline=with_appended.cpp:80;portions=4,;task_id=ae92a972-a7e611ef-86c94c74-9757dd44; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T08:57:48.593689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=ae9bac7a-a7e611ef-88ed1b95-8be71375;fline=with_appended.cpp:80;portions=4,;task_id=ae9bac7a-a7e611ef-88ed1b95-8be71375; WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 2024-11-21T08:57:53.616585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654189701189159:2530], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:53.616607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:53.618180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715662:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T08:57:53.739363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b1a911a0-a7e611ef-b1c9d567-71e08674;fline=with_appended.cpp:80;portions=7,;task_id=b1a911a0-a7e611ef-b1c9d567-71e08674; 2024-11-21T08:57:53.741869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b1a91916-a7e611ef-be15cd71-7984b197;fline=with_appended.cpp:80;portions=7,;task_id=b1a91916-a7e611ef-be15cd71-7984b197; 2024-11-21T08:57:53.791051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b1b30714-a7e611ef-a3e2e871-252f3b85;fline=with_appended.cpp:80;portions=7,;task_id=b1b30714-a7e611ef-a3e2e871-252f3b85; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=82565408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=82565408;columns=5; 2024-11-21T08:57:54.336076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b1fe34a0-a7e611ef-8be98fee-135606c8;fline=with_appended.cpp:80;portions=9,;task_id=b1fe34a0-a7e611ef-8be98fee-135606c8; WAIT_COMPACTION: 9 2024-11-21T08:57:54.404476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b1fe37a2-a7e611ef-bbb5a8e9-d3541087;fline=with_appended.cpp:80;portions=9,;task_id=b1fe37a2-a7e611ef-bbb5a8e9-d3541087; 2024-11-21T08:57:54.439256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b20ddfe0-a7e611ef-ad8df745-fd5d45f7;fline=with_appended.cpp:80;portions=9,;task_id=b20ddfe0-a7e611ef-ad8df745-fd5d45f7; WAIT_COMPACTION: 9 WAIT_COMPACTION: 9 WAIT_COMPACTION: 9 2024-11-21T08:57:58.054344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:57:58.054363Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_COMPACTION: 9 2024-11-21T08:57:59.357982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654215470993156:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.358007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.360352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654215470993165:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.360373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.360502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654215470993170:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.361235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2024-11-21T08:57:59.362629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654215470993172:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2024-11-21T08:57:59.528235Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479415, txId: 18446744073709551615] shutting down >> KqpOlapCompression::TestAlterCompressionTableInTableStore >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleQueryOlapDiagnostics [GOOD] Test command err: Trying to start YDB, gRPC: 6748, MsgBus: 26946 2024-11-21T08:57:59.411972Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654216145074887:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:59.412279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a0/r3tmp/tmpEfYurP/pdisk_1.dat 2024-11-21T08:57:59.464893Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6748, node 1 2024-11-21T08:57:59.471580Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:59.471595Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:59.471597Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:59.471641Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26946 TClient is connected to server localhost:26946 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:59.513399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:59.513424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:59.514536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:59.540999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:59.547109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:59.557959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:59.558025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:59.558060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:59.558082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:59.558101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:59.558120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:59.558146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:59.558171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:59.558201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:59.558224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.558239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:59.558262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654216145075549:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:59.558618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:59.558628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:59.558637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:59.558646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:59.558664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:59.558672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:59.558679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:59.558686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:59.558692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:59.558697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:59.558702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:59.558708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:59.558765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:59.558778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:59.558794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:59.558802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.558813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:59.558821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:59.558837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:59.558846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:59.558860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:59.558867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:59.561122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654216145075550:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:59.561141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654216145075550:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:59.561161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654216145075550:2289];tablet_id=720751862240 ... teSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:59.567031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:59.567041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:59.567048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:59.567051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:59.567064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:59.567071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:59.567079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:59.567086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:59.567093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:59.567101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:59.567106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:59.567114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:59.567140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:59.567148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:59.567166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:59.567173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.567183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:59.567191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:59.567203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:59.567211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:59.567220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:59.567227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:59.568246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:59.568257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:59.568272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:59.568281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:59.568300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:59.568308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:59.568315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:59.568324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:59.568336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:59.568345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:59.568355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:59.568364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:59.568396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:59.568405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:59.568424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:59.568432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.568442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:59.568450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:59.568462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:59.568470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:59.568479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:59.568486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:59.606294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T08:57:59.679876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654216145075846:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.679898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654216145075872:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.679904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.680439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:59.682265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654216145075875:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:57:59.823039Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479737, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.855747Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479842, txId: 18446744073709551615] shutting down >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MinR_GroupL_OrderL [GOOD] Test command err: Trying to start YDB, gRPC: 9270, MsgBus: 1527 2024-11-21T08:57:39.273952Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654128044997334:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:39.274100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004833/r3tmp/tmpYtCgzR/pdisk_1.dat 2024-11-21T08:57:39.320048Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9270, node 1 2024-11-21T08:57:39.328612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:39.328629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:39.328630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:39.328679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1527 TClient is connected to server localhost:1527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:39.370269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:39.375175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:39.375197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:39.376324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:39.383145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:39.392498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:39.392563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:39.392606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:39.392624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:39.392639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:39.392660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:39.392680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:39.392725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:39.392751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:39.392772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:39.392793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:39.392812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654128044997980:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:39.396251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:39.396275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:39.396307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:39.396323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:39.396343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:39.396363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:39.396383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:39.396403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:39.396428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:39.396446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:39.396461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:39.396475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654128044997981:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:39.396921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:39.396933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:39.396945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:39.396953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:39.396968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:39.396978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:39.396987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:39.396992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:39.397000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:39.397008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:39.397014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;pr ... 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.079415Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.224146Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.224169Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.306218Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.306250Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.388498Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.388518Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.470758Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.470786Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.542866Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:58.563760Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:57:58.625533Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.625581Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.707791Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.707820Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.790085Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.790111Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.872408Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.872436Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.954609Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:58.954638Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:58.986676Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:59.123306Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:59.123329Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:59.226430Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:59.226464Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:59.308695Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:59.308732Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:59.391024Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:59.391053Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:59.473332Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:57:59.473354Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmU1NDA2M2UtYjUzMmM0MTUtOTQ4Y2QwNWMtZDY5OTJjYTM=. TraceId : 01jd6z23w932x62tta6f80qev1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:57:59.535013Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:57:59.555823Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> test.py::test[pg-select_having_no_from-default.txt-Analyze] [GOOD] >> KqpOlap::DuplicatesInIncomingBatch >> KqpOlapCompression::TestAlterCompressionTableInTableStore [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet [GOOD] Test command err: 2024-11-21T08:57:44.174060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:57:44.176136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:44.176241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:44.176274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:44.176522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:44.176530Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00480b/r3tmp/tmprxdVQE/pdisk_1.dat 2024-11-21T08:57:44.249728Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:44.330394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:57:44.416797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:44.416823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:44.417734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:44.417750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:44.428908Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:57:44.429009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:44.429083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:44.761355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:44.810741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.810787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.810832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.810852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.810871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.810888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.810911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.810930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.810949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:44.810967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.810989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:44.811007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:44.815961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.815982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.816006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.816020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.816030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.816042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.816057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.816073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.816092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:44.816103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.816114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:44.816124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:44.823732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.823761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.823800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.823817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.823834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.823851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.823865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.823884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.823901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:44.823916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.823931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:44.823946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:129 ... nent=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T08:57:57.529602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b3a13046-a7e611ef-8aafcc79-a31bf9f0;fline=with_appended.cpp:80;portions=67,;task_id=b3a13046-a7e611ef-8aafcc79-a31bf9f0; 2024-11-21T08:57:57.573184Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b3a6ef4a-a7e611ef-b9736aca-71a308bd;fline=with_appended.cpp:80;portions=67,;task_id=b3a6ef4a-a7e611ef-b9736aca-71a308bd; 2024-11-21T08:57:57.608244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b3abd8a2-a7e611ef-8283cf90-331e946c;fline=with_appended.cpp:80;portions=67,;task_id=b3abd8a2-a7e611ef-8283cf90-331e946c; 2024-11-21T08:57:57.641147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=b3b10b1a-a7e611ef-a6129ab5-71b20b37;fline=with_appended.cpp:80;portions=67,;task_id=b3b10b1a-a7e611ef-a6129ab5-71b20b37; 2024-11-21T08:57:57.675036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=b3b7b96a-a7e611ef-b5568a8a-e7b546aa;fline=with_appended.cpp:80;portions=67,;task_id=b3b7b96a-a7e611ef-b5568a8a-e7b546aa; 2024-11-21T08:57:57.708950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=b3bcf8c6-a7e611ef-831902a6-cd13de44;fline=with_appended.cpp:80;portions=67,;task_id=b3bcf8c6-a7e611ef-831902a6-cd13de44; 2024-11-21T08:57:57.754192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=b3ddf27e-a7e611ef-b87d38dd-a37c15c2;fline=with_appended.cpp:80;portions=67,;task_id=b3ddf27e-a7e611ef-b87d38dd-a37c15c2; 2024-11-21T08:57:57.787961Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=b3e4830a-a7e611ef-86d3d18f-e3aab497;fline=with_appended.cpp:80;portions=67,;task_id=b3e4830a-a7e611ef-86d3d18f-e3aab497; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T08:57:58.020006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=b3f57c00-a7e611ef-99552869-6328d54b;fline=with_appended.cpp:80;portions=68,;task_id=b3f57c00-a7e611ef-99552869-6328d54b; 2024-11-21T08:57:58.063933Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=b3fc2604-a7e611ef-935cb983-f3f894a4;fline=with_appended.cpp:80;portions=68,;task_id=b3fc2604-a7e611ef-935cb983-f3f894a4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T08:57:58.225131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b401791a-a7e611ef-8060fcce-b7ce4259;fline=with_appended.cpp:80;portions=70,;task_id=b401791a-a7e611ef-8060fcce-b7ce4259; 2024-11-21T08:57:58.263009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b4067e24-a7e611ef-855bc215-bd06f048;fline=with_appended.cpp:80;portions=70,;task_id=b4067e24-a7e611ef-855bc215-bd06f048; 2024-11-21T08:57:58.305934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b40bacbe-a7e611ef-8c10e8c9-7d938efe;fline=with_appended.cpp:80;portions=70,;task_id=b40bacbe-a7e611ef-8c10e8c9-7d938efe; 2024-11-21T08:57:58.344682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=b410d716-a7e611ef-98484a80-756ebb53;fline=with_appended.cpp:80;portions=70,;task_id=b410d716-a7e611ef-98484a80-756ebb53; 2024-11-21T08:57:58.381693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=b417bd2e-a7e611ef-9887e02b-83ca4a98;fline=with_appended.cpp:80;portions=70,;task_id=b417bd2e-a7e611ef-9887e02b-83ca4a98; 2024-11-21T08:57:58.418186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=b41ce484-a7e611ef-8dc2f4bc-a78456e2;fline=with_appended.cpp:80;portions=70,;task_id=b41ce484-a7e611ef-8dc2f4bc-a78456e2; 2024-11-21T08:57:58.488802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=b4405144-a7e611ef-b5e0e625-1ba82908;fline=with_appended.cpp:80;portions=70,;task_id=b4405144-a7e611ef-b5e0e625-1ba82908; 2024-11-21T08:57:58.526337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=b4470656-a7e611ef-b5e60d26-675b2118;fline=with_appended.cpp:80;portions=70,;task_id=b4470656-a7e611ef-b5e60d26-675b2118; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T08:57:58.783740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=b45fa3f0-a7e611ef-bb333a3f-b742f1f3;fline=with_appended.cpp:80;portions=71,;task_id=b45fa3f0-a7e611ef-bb333a3f-b742f1f3; 2024-11-21T08:57:58.840637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=b4656498-a7e611ef-9b11686b-97f3d042;fline=with_appended.cpp:80;portions=71,;task_id=b4656498-a7e611ef-9b11686b-97f3d042; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T08:57:58.972047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b46bf042-a7e611ef-897b4833-3713fc81;fline=with_appended.cpp:80;portions=73,;task_id=b46bf042-a7e611ef-897b4833-3713fc81; 2024-11-21T08:57:59.037913Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b471d5d4-a7e611ef-814b8558-892cc60f;fline=with_appended.cpp:80;portions=73,;task_id=b471d5d4-a7e611ef-814b8558-892cc60f; 2024-11-21T08:57:59.081976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b47784ca-a7e611ef-a9199ace-d9343a71;fline=with_appended.cpp:80;portions=73,;task_id=b47784ca-a7e611ef-a9199ace-d9343a71; 2024-11-21T08:57:59.123911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=b47d148a-a7e611ef-9e01bad7-15762f24;fline=with_appended.cpp:80;portions=73,;task_id=b47d148a-a7e611ef-9e01bad7-15762f24; 2024-11-21T08:57:59.160744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=b487d83e-a7e611ef-96bc52ce-7ba41e35;fline=with_appended.cpp:80;portions=73,;task_id=b487d83e-a7e611ef-96bc52ce-7ba41e35; 2024-11-21T08:57:59.197233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=b48d9224-a7e611ef-9c7a8b08-6d6a8800;fline=with_appended.cpp:80;portions=73,;task_id=b48d9224-a7e611ef-9c7a8b08-6d6a8800; 2024-11-21T08:57:59.245483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=b4b4dbb8-a7e611ef-a279b573-c1b90455;fline=with_appended.cpp:80;portions=73,;task_id=b4b4dbb8-a7e611ef-a279b573-c1b90455; 2024-11-21T08:57:59.294126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=b4bd8c68-a7e611ef-838f8825-ad772ce6;fline=with_appended.cpp:80;portions=73,;task_id=b4bd8c68-a7e611ef-838f8825-ad772ce6; 2024-11-21T08:57:59.352434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7249:6433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.352461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7260:6438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.352486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.355930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:59.372079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7263:6441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:57:59.569533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=b4d19b04-a7e611ef-9c88a7d3-dbf55fd7;fline=with_appended.cpp:80;portions=74,;task_id=b4d19b04-a7e611ef-9c88a7d3-dbf55fd7; 2024-11-21T08:57:59.613596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=b4dba522-a7e611ef-a73c8eb0-13561416;fline=with_appended.cpp:80;portions=74,;task_id=b4dba522-a7e611ef-a73c8eb0-13561416; 2024-11-21T08:57:59.733609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b4e25e94-a7e611ef-bed5f6bf-a968d4dc;fline=with_appended.cpp:80;portions=75,;task_id=b4e25e94-a7e611ef-bed5f6bf-a968d4dc; 2024-11-21T08:57:59.784521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b4e8c068-a7e611ef-a369b946-53fd9843;fline=with_appended.cpp:80;portions=75,;task_id=b4e8c068-a7e611ef-a369b946-53fd9843; 2024-11-21T08:57:59.827552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b4ee5da2-a7e611ef-8e90d6d9-eb580406;fline=with_appended.cpp:80;portions=75,;task_id=b4ee5da2-a7e611ef-8e90d6d9-eb580406; 2024-11-21T08:57:59.840794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=b4f3f460-a7e611ef-90196570-2d6f0db7;fline=with_appended.cpp:80;portions=75,;task_id=b4f3f460-a7e611ef-90196570-2d6f0db7; 2024-11-21T08:57:59.853866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=b4fb52b4-a7e611ef-ac7592bc-b76cf648;fline=with_appended.cpp:80;portions=75,;task_id=b4fb52b4-a7e611ef-ac7592bc-b76cf648; 2024-11-21T08:57:59.866770Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=b502ba0e-a7e611ef-983f1aa7-ab19a34e;fline=with_appended.cpp:80;portions=75,;task_id=b502ba0e-a7e611ef-983f1aa7-ab19a34e; 2024-11-21T08:57:59.962275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=b52cbf0c-a7e611ef-b6553ac2-55fb218e;fline=with_appended.cpp:80;portions=75,;task_id=b52cbf0c-a7e611ef-b6553ac2-55fb218e; 2024-11-21T08:57:59.975259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=b5337ae0-a7e611ef-b4e59f78-9753d309;fline=with_appended.cpp:80;portions=75,;task_id=b5337ae0-a7e611ef-b4e59f78-9753d309; 2024-11-21T08:57:59.988030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=b545c998-a7e611ef-ae136621-85aab061;fline=with_appended.cpp:80;portions=75,;task_id=b545c998-a7e611ef-ae136621-85aab061; 2024-11-21T08:58:00.012288Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=b54d8bd8-a7e611ef-b8813552-dfb283b5;fline=with_appended.cpp:80;portions=75,;task_id=b54d8bd8-a7e611ef-b8813552-dfb283b5; -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 2024-11-21T08:58:00.126410Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6z2jsp5jrmth1410knfphq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRjZTVlNDgtM2EyODQzNjQtNTY0YzZmNjctODU4ZTM0OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"level","type":{"optional_type":{"item":{"type_id":1}}}},{"name":"message","type":{"optional_type":{"item":{"type_id":4608}}}},{"name":"resource_id","type":{"optional_type":{"item":{"type_id":4608}}}},{"name":"timestamp","type":{"type_id":50}},{"name":"uid","type":{"type_id":4608}}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":65} 2024-11-21T08:58:00.178152Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2500, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapCompression::TestAlterCompressionTableInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 1508, MsgBus: 7210 2024-11-21T08:58:00.292868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654219619367766:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:00.293000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00479d/r3tmp/tmp84KyfZ/pdisk_1.dat 2024-11-21T08:58:00.332074Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1508, node 1 2024-11-21T08:58:00.344828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:00.344840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:00.344843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:00.344875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7210 TClient is connected to server localhost:7210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:00.394291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:00.394320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:00.395435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:00.417330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLESTORE `/Root/TableStoreTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:58:00.533155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654219619368378:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:00.533182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:00.559897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:58:00.567763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:00.567799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:00.567830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:00.567846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:00.567860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:00.567875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:00.567889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:00.567904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:00.567920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:00.567937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:00.567951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:00.567966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654219619368445:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:00.568327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:00.568340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:00.568351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:00.568360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:00.568370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:00.568376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:00.568381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:00.568385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:00.568395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:00.568397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:00.568401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:00.568403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:00.568445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:00.568456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:00.568467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:00.568475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:00.568482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:00.568488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:00.568498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:00.568504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:00.568511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:00.568517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; CREATE TABLE `/Root/TableStoreTest/ColumnTableTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:58:00.616956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654219619368525:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:00.616974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:00.618996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T08:58:00.628981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654219619368625:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:00.629000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpOlapAggregations::AggregationCountGroupByPushdown [GOOD] >> KqpOlapAggregations::Json_GetValue_Minus >> KqpOlap::EmptyRange >> KqpOlap::DuplicatesInIncomingBatch [GOOD] >> KqpOlapSysView::StatsSysViewColumns [GOOD] >> KqpOlapAggregations::BlockGenericSelectAll ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationCountGroupByPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 1350, MsgBus: 22815 2024-11-21T08:58:00.080317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654220537715266:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:00.080475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00479f/r3tmp/tmpH0pjQH/pdisk_1.dat 2024-11-21T08:58:00.135114Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1350, node 1 2024-11-21T08:58:00.142860Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:00.142876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:00.142878Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:00.142922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22815 TClient is connected to server localhost:22815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:00.181864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:00.181891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:00.183053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:00.211995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:00.221608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:00.231970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:00.232050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:00.232105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:00.232134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:00.232161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:00.232185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:00.232238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:00.232262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:00.232288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:00.232315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:00.232338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:00.232362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654220537715928:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:00.232892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:00.232912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:00.232925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:00.232938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:00.232955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:00.232966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:00.232977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:00.232983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:00.232992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:00.233008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:00.233016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:00.233031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:00.233097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:00.233111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:00.233128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:00.233133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:00.233144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:00.233148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:00.233164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:00.233174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:00.233185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:00.233193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:00.236167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654220537715920:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:00.236193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654220537715920:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:00.236245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654220537715920:2288];tablet_id=720751862240 ... zation_finished; 2024-11-21T08:58:00.244554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:00.244565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:00.244580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:00.244588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:00.244601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:00.244609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:00.244617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:00.244626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:00.244633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:00.244641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:00.244647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:00.244656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:00.244685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:00.244695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:00.244710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:00.244718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:00.244729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:00.244738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:00.244755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:00.244763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:00.244773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:00.244781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:00.278663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:58:00.488104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654220537716361:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:00.488105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654220537716369:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:00.488127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:00.488800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:58:00.490369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654220537716375:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:58:01.168628Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480542, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}],"KeyColumns":[{"Id":4}]}},{"Projection":{"Columns":[{"Id":6},{"Id":4}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["level"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}],"KeyColumns":[{"Id":4}]}},{"Projection":{"Columns":[{"Id":6},{"Id":4}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Uint64)) (let $2 '('"level" (OptionalType (DataType 'Int32)))) (let $3 '('('"_logical_id" '579) '('"_id" '"cabdf8d6-1b9aec59-23aed168-98c88e10") '('"_wide_channels" (StructType '('_yql_agg_0 $1) $2)))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $13 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $14 '('"level")) (let $15 (KqpWideReadOlapTableRanges $13 (Void) $14 '() '() (lambda '($16) (TKqpOlapAgg $16 '('('_yql_agg_0 'count '"level")) $14)))) (return (FromFlow $15)) ))) $3)) (let $5 (DqCnHashShuffle (TDqOutput $4 '0) '('1))) (let $6 (StructType '('"column1" $1) $2)) (let $7 '('('"_logical_id" '995) '('"_id" '"9f3ef868-1d44c835-be24b625-6dab0e5d") '('"_wide_channels" $6))) (let $8 (DqPhyStage '($5) (lambda '($17) (block '( (let $18 (lambda '($29 $30) $30 $29)) (let $19 (WideCombiner (ToFlow $17) '"" (lambda '($20 $21) $21) (lambda '($22 $23 $24) $23) (lambda '($25 $26 $27 $28) (AggrAdd $26 $28)) $18)) (return (FromFlow (WideSort $19 '('('1 (Bool 'true)))))) ))) $7)) (let $9 (DqCnMerge (TDqOutput $8 '0) '('('1 '"Asc")))) (let $10 (DqPhyStage '($9) (lambda '($31) (FromFlow (NarrowMap (ToFlow $31) (lambda '($32 $33) (AsStruct '('"column1" $32) '('"level" $33)))))) '('('"_logical_id" '1007) '('"_id" '"5ee63e60-53c01d0d-c5698963-f20903a7")))) (let $11 '($4 $8 $10)) (let $12 (DqCnResult (TDqOutput $10 '0) '('"level" '"column1"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $11 '($12) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType $6) '0 '0)) '('('"type" '"scan_query")))) ) |91.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> test.py::test[pg-select_having_no_from-default.txt-Debug] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-ForceBlocks] |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::DuplicatesInIncomingBatch [GOOD] Test command err: Trying to start YDB, gRPC: 3204, MsgBus: 28294 2024-11-21T08:58:00.847574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654220759404391:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:00.847631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00479a/r3tmp/tmpfpR1aJ/pdisk_1.dat 2024-11-21T08:58:00.892753Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3204, node 1 2024-11-21T08:58:00.906686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:00.906707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:00.906709Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:00.906748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28294 TClient is connected to server localhost:28294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:00.948696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:00.948729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:00.949818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:00.974671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Utf8 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:58:01.121527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654225054372297:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.121570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.147839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:58:01.154498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.154545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.154585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:01.154610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:01.154631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:01.154654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:01.154675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:01.154696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:01.154717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:01.154739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.154760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:01.154782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225054372374:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:01.155242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:01.155254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:01.155262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:01.155269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:01.155279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:01.155286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:01.155292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:01.155296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:01.155305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:01.155311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:01.155315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:01.155321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:01.155367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:01.155378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:01.155389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:01.155395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.155405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:01.155412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:01.155421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:01.155427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:01.155434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:01.155439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=472;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=472;columns=4; 2024-11-21T08:58:01.219072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654225054372518:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.219104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654225054372523:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.219109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.219759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:01.221110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654225054372525:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:58:01.336568Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481277, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewColumns [GOOD] Test command err: Trying to start YDB, gRPC: 10901, MsgBus: 6248 2024-11-21T08:57:48.989471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654169461758341:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:48.989487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047ce/r3tmp/tmp9k3qda/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10901, node 1 2024-11-21T08:57:49.056047Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:49.056185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:49.056194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:49.056195Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:49.056241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6248 TClient is connected to server localhost:6248 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:49.090783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:49.090805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:57:49.091894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:49.101186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:49.111280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:49.119270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.119324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.119356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.119372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.119388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.119400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.119414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.119429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.119444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:49.119461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.119475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:49.119489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654173756726293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:49.119792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:49.119802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:49.119810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:49.119813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:49.119824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:49.119830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:49.119835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:49.119838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:49.119843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:49.119846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:49.119851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:49.119857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:49.119894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:49.119901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:49.119911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:49.119917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.119924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:49.119930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:49.119940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:49.119946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:49.119952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:49.119958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:49.121807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654173756726294:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.121823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654173756726294:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.121839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654173756726294:2289];tablet_id=7207518622403 ... et_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.126166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654173756726327:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:49.126182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654173756726327:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:49.126534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:49.126542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:49.126548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:49.126550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:49.126560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:49.126566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:49.126570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:49.126572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:49.126577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:49.126580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:49.126584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:49.126586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:49.126604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:49.126613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:49.126624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:49.126630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.126636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:49.126640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:49.126648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:49.126651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:49.126658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:49.126661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:49.168298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T08:57:53.990054Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654169461758341:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:53.990105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT TabletId, PathId, Kind FROM `/Root/olapStore/.sys/store_primary_index_stats` ORDER BY PathId, Kind, TabletId LIMIT 4; RESULT: 2024-11-21T08:57:59.335700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654216706399922:2574], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.335704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654216706399933:2577], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.335724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.336366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T08:57:59.337710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654216706399936:2578], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T08:57:59.456102Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479439, txId: 281474976710662] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT SUM(BlobRangeSize) as Bytes, SUM(Rows) as Rows, PathId, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId, TabletId ORDER BY Bytes RESULT: 2024-11-21T08:58:00.309794Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479496, txId: 281474976710664] shutting down TabletId: 72075186224037889 Rows: 33005 PathId: 3 Bytes: 246760 TabletId: 72075186224037890 Rows: 33490 PathId: 3 Bytes: 249560 TabletId: 72075186224037888 Rows: 33505 PathId: 3 Bytes: 250032 ==================================== QUERY: SELECT Sum(Rows) as Rows, Kind, Sum(ColumnRawBytes) as RawBytes, PathId FROM `/Root/olapStore/.sys/store_primary_index_portion_stats` WHERE Activity == 1 GROUP BY Kind, PathId ORDER BY PathId, Kind, Rows RESULT: 2024-11-21T08:58:01.350268Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480351, txId: 281474976710666] shutting down Rows: 20000 PathId: 3 Kind: INSERTED RawBytes: 23478900 >> KqpOlap::EmptyRange [GOOD] >> KqpOlapStatistics::StatsUsage >> KqpOlapAggregations::Aggregation_Some_GroupByNullMix >> KqpOlapAggregations::Json_GetValue_Minus [GOOD] >> KqpOlapAggregations::BlockGenericSelectAll [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::EmptyRange [GOOD] Test command err: Trying to start YDB, gRPC: 20395, MsgBus: 10090 2024-11-21T08:58:01.442346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654223679481281:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:01.442523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004797/r3tmp/tmpSioyOe/pdisk_1.dat 2024-11-21T08:58:01.494407Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20395, node 1 2024-11-21T08:58:01.504534Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:01.504546Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:01.504547Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:01.504572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10090 TClient is connected to server localhost:10090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:01.543632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:01.543661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:01.544759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:01.574542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:01.586102Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:58:01.601253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:01.618863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.618948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.619008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:01.619043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:01.619069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:01.619094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:01.619117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:01.619138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:01.619228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:01.619253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.619285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:01.619308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654223679481926:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:01.623406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.623438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.623478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:01.623506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:01.623541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:01.623575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:01.623598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:01.623626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:01.623654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:01.623677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.623700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:01.623724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654223679481925:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:01.627631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654223679481937:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.627666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654223679481937:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.627706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654223679481937:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:01.627731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654223679481937:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:01.627754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654223679481937:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:01.627777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654223679481937:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:01.627798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654223679481937:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:01.627823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654223679481937:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... :62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:01.632989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:01.633056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:01.633060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:01.633068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:01.633072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:01.633086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:01.633089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:01.633095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:01.633098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:01.633104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:01.633107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:01.633112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:01.633115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:01.633149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:01.633155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:01.633168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:01.633172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.633181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:01.633185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:01.633198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:01.633201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:01.633210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:01.633213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:01.633281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:01.633286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:01.633293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:01.633297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:01.633310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:01.633316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:01.633325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:01.633357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:01.633364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:01.633369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:01.633374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:01.633377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:01.633427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:01.633432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:01.633450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:01.633459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.633468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:01.633477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:01.633491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:01.633499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:01.633522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:01.633530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:01.640760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; 2024-11-21T08:58:01.710748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654223679482243:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.710772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.710908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654223679482270:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.711620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:58:01.719459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654223679482272:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:58:01.880556Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481774, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_Minus [GOOD] Test command err: Trying to start YDB, gRPC: 16900, MsgBus: 1856 2024-11-21T08:58:01.370331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654225813352420:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:01.370513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004799/r3tmp/tmpr9GhlH/pdisk_1.dat 2024-11-21T08:58:01.419113Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16900, node 1 2024-11-21T08:58:01.425459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:01.425472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:01.425473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:01.425516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1856 TClient is connected to server localhost:1856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:01.470442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:58:01.471450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:01.471473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T08:58:01.472543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:01.478937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:01.489209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.489289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.489339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:01.489364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:01.489391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:01.489423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:01.489445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:01.489470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:01.489495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:01.489532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.489554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:01.489578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654225813353061:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:01.490065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:01.490080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:01.490091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:01.490097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:01.490113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:01.490123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:01.490133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:01.490147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:01.490156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:01.490165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:01.490171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:01.490183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:01.490247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:01.490258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:01.490275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:01.490284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.490296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:01.490305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:01.490321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:01.490330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:01.490340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:01.490349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:01.493590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225813353062:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.493614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225813353062:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.493654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654225813353062:2289];tablet_id=7207518622403 ... ute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:01.502029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:01.502037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:01.502073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:01.502084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:01.502101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:01.502110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.502121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:01.502129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:01.502144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:01.502152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:01.502168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:01.502176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.'col-abc'"), JSON_VALUE(jsondoc, "$.'col-abc'") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.'col-abc'") = "val-abc" AND id = 1; 2024-11-21T08:58:01.693681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654225813353342:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.693705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654225813353361:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.693712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.694501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:01.696068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654225813353371:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:58:01.897759Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481746, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.'col-abc'"), JSON_VALUE(jsondoc, "$.'col-abc'") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.'col-abc'") = "val-abc" AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == val-abc","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val-abc"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val-abc"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == val-abc","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1286) '('"_id" '"ab99ad60-65b3d434-bfb6e93a-78dc5683") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '1)) (let $20 (Just $19)) (let $21 '($20 $19)) (let $22 (If (== $19 (Int32 '2147483647)) $21 '((+ $20 $19) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (OptionalType (DataType 'JsonDocument))) (let $27 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $11)))) (let $28 (ResourceType '"JsonNode")) (let $29 (OptionalType $28)) (let $30 '((ResourceType '"JsonPath"))) (let $31 (DictType $10 $28)) (let $32 '($31)) (let $33 (CallableType '() $27 '($29) $30 $32)) (let $34 '('('"strict"))) (let $35 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $33 (VoidType) '"" $34)) (let $36 (lambda '($54) (block '( (let $55 '((DataType 'Json) '"" '1)) (let $56 (CallableType '() '($28) $55)) (let $57 (Udf '"Json2.Parse" (Void) (VoidType) '"" $56 (VoidType) '"" '())) (return (Just (Apply $57 $54))) )))) (let $37 (Nothing $29)) (let $38 (CallableType '() $30 '($10))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.'col-abc'"))) (let $41 (Dict $31)) (let $42 (lambda '($58) (Nothing $11))) (let $43 (lambda '($59) $59)) (let $44 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($45) (block '( (let $46 (DataType 'Json)) (let $47 (StructType $9 '('"jsondoc" $26) '('"jsonval" (OptionalType $46)))) (let $48 (KqpOlapApply $47 '('"jsonval") (lambda '($51) (block '( (let $52 (IfPresent $51 $36 $37)) (let $53 (Apply $35 $52 $40 $41)) (return (Visit $53 '0 $42 '1 $43)) ))))) (let $49 '('eq $48 (String '"val-abc"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $45 $50)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($60 $61 $62) (block '( (let $63 (IfPresent $62 $36 $37)) (let $64 (Apply $35 $63 $40 $41)) (let $65 (Visit $64 '0 $42 '1 $43)) (let $66 (CallableType '() $27 '($26) $30 $32)) (let $67 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $66 (VoidType) '"" $34)) (let $68 (Apply $67 $61 $40 $41)) (let $69 (Visit $68 '0 $42 '1 $43)) (return (AsStruct '('"column1" $65) '('"column2" $69) '('"id" $60))) )))))) ))) '('('"_logical_id" '1357) '('"_id" '"3afda080-75f294b2-eb8c9c0b-8af2c70f")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($70) $70) '('('"_logical_id" '2217) '('"_id" '"fb25a96b-f12eb700-aabeaa1b-3be6aa98")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $8 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $11) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapStatistics::StatsUsage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 2182, MsgBus: 10544 2024-11-21T08:58:01.693953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654224709557863:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:01.694061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004796/r3tmp/tmpp8h8w4/pdisk_1.dat 2024-11-21T08:58:01.750009Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2182, node 1 2024-11-21T08:58:01.763051Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:01.763063Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:01.763064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:01.763104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10544 2024-11-21T08:58:01.794160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:01.794186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:01.795272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:01.828002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:01.841884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:01.850893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.850960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.851013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:01.851038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:01.851058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:01.851079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:01.851105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:01.851124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:01.851148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:01.851170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.851195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:01.851217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654224709558350:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:01.853582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.853602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.853632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:01.853644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:01.853654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:01.853668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:01.853677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:01.853690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:01.853706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:01.853716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.853725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:01.853735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654224709558348:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:01.855837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:01.855846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:01.855866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:01.855876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:01.855886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:01.855894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:01.855902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:01.855914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:01.855930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654224709558349 ... =normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:01.860150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:01.860268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:01.860281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:01.860290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:01.860295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:01.860309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:01.860318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:01.860326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:01.860336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:01.860348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:01.860358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:01.860364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:01.860373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:01.860407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:01.860418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:01.860434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:01.860444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:01.860455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:01.860465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:01.860481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:01.860489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:01.860498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:01.860506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id, level FROM `/Root/tableWithNulls` WHERE level != 5 OR level IS NULL ORDER BY id, resource_id, level; 2024-11-21T08:58:01.998186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654224709558654:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.998206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654224709558644:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.998219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:01.998889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:02.000493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654224709558658:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id, level FROM `/Root/tableWithNulls` WHERE level != 5 OR level IS NULL ORDER BY id, resource_id, level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"level != 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":12,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"Id":7,"Arguments":[{"Id":3}]},"Column":{"Id":11}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":11}]},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":1,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":12}]},"Column":{"Id":13}}},{"Filter":{"Predicate":{"Id":13}}},{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Sort-Filter-TableFullScan"}],"Node Type":"Merge","SortColumns":["id (Asc)","resource_id (Asc)","level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":12,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"Id":7,"Arguments":[{"Id":3}]},"Column":{"Id":11}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":11}]},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":1,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":12}]},"Column":{"Id":13}}},{"Filter":{"Predicate":{"Id":13}}},{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level != 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"SortBy":"","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Int32)) (let $2 (StructType '('"id" $1) '('"level" (OptionalType $1)) '('"resource_id" (OptionalType (DataType 'Utf8))))) (let $3 '('('"_logical_id" '880) '('"_id" '"e45b4141-d3562f2e-d60210f2-9d9325") '('"_wide_channels" $2))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $10 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $11 '('"id" '"level" '"resource_id")) (let $12 (KqpBlockReadOlapTableRanges $10 (Void) $11 '() '() (lambda '($15) (block '( (let $16 '('neq '"level" (Int32 '"5"))) (let $17 '('?? $16 (Bool 'false))) (return (KqpOlapFilter $15 (KqpOlapOr $17 '('empty '"level")))) ))))) (let $13 (Bool 'true)) (let $14 '('('0 $13) '('2 $13) '('1 $13))) (return (FromFlow (WideFromBlocks (WideSortBlocks $12 $14)))) ))) $3)) (let $5 '('('0 '"Asc") '('2 '"Asc") '('1 '"Asc"))) (let $6 (DqCnMerge (TDqOutput $4 '0) $5)) (let $7 (DqPhyStage '($6) (lambda '($18) (FromFlow (NarrowMap (ToFlow $18) (lambda '($19 $20 $21) (AsStruct '('"id" $19) '('"level" $20) '('"resource_id" $21)))))) '('('"_logical_id" '892) '('"_id" '"86d023d9-1105ad8a-6220b38f-6fcd9345")))) (let $8 '('"id" '"resource_id" '"level")) (let $9 (DqCnResult (TDqOutput $7 '0) $8)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($4 $7) '($9) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $2) '"0" '"0")) '('('"type" '"query")))) ) >> test.py::test[pg-select_having_no_from-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Plan] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 4685, MsgBus: 61334 2024-11-21T08:57:59.314009Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654214940559783:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:59.314021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a3/r3tmp/tmpU3kiGw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4685, node 1 2024-11-21T08:57:59.361166Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:59.363066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:59.363077Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:59.363079Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:59.363111Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61334 TClient is connected to server localhost:61334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:59.407009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:59.415207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:59.415228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:59.416341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:59.418724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:59.426980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:59.427058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:59.427114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:59.427140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:59.427158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:59.427181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:59.427206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:59.427229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:59.427254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:59.427277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.427300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:59.427321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654214940560430:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:59.427821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:59.427837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:59.427850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:59.427857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:59.427878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:59.427887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:59.427898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:59.427907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:59.427921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:59.427930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:59.427937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:59.427945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:59.428003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:59.428013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:59.428031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:59.428039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.428051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:59.428059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:59.428076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:59.428085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:59.428103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:59.428111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:59.431116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654214940560428:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:59.431141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654214940560428:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:59.431181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654214940560428:2288];tablet_id=720751862240 ... 888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:59.437554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:59.437560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:59.437564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:59.437567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:59.437570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:59.437576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:59.437579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:59.437581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:59.437594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:59.437596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:59.437607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:59.437613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.437619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:59.437622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:59.437629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:59.437635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:59.437641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:59.437643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2024-11-21T08:57:59.562768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654214940560724:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.562787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654214940560733:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.562793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:59.563327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:59.564609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654214940560738:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:58:02.074954Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479618, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1732) '('"_id" '"6b8cf7f1-325f7684-62a011b-1506e8f4") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '0)) (let $27 '((Nothing $2) $26)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"7")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1791) '('"_id" '"b0c88e2e-d533f2c5-16ab01f-1cfe0814") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3379) '('"_id" '"9c25265a-17b01ca4-9642b68d-bbdaff7") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3391) '('"_id" '"77c01d7f-113de4d3-d559953f-ca29431d")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStatistics::StatsUsage [GOOD] Test command err: Trying to start YDB, gRPC: 22224, MsgBus: 8832 2024-11-21T08:58:02.107205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654228426931044:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:02.107409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004794/r3tmp/tmpg3vY5K/pdisk_1.dat 2024-11-21T08:58:02.158167Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22224, node 1 2024-11-21T08:58:02.165745Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:02.165759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:02.165761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:02.165793Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8832 TClient is connected to server localhost:8832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:02.207427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:02.207450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:02.208677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:02.209190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:02.221077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:02.232125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:02.232176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:02.232234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:02.232263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:02.232285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:02.232308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:02.232331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:02.232358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:02.232390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:02.232416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:02.232443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:02.232463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654228426931683:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:02.236034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:02.236065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:02.236100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:02.236126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:02.236147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:02.236166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:02.236183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:02.236220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:02.236241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:02.236263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:02.236283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:02.236303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654228426931684:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:02.239538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:02.239564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:02.239597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:02.239621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:02.239643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:02.239663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:02.239681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:02.239701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:02.239726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654228426931685: ... 186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:02.244640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:02.244648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:02.244655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:02.244663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:02.244669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:02.244677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:02.244682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:02.244689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:02.244716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:02.244724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:02.244742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:02.244751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:02.244761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:02.244769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:02.244783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:02.244791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:02.244799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:02.244806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:02.244864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:02.244872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:02.244879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:02.244888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:02.244904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:02.244912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:02.244926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:02.244934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:02.244943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:02.244951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:02.244957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:02.244966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:02.244989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:02.244997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:02.245017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:02.245025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:02.245041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:02.245050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:02.245068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:02.245081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:02.245089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:02.245092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:02.279716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:58:02.380321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654228426931977:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.380359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.407743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:58:02.418352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654228426932031:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.418389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.422541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654228426932046:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.422560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.427598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654228426932061:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.427615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.427914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710663:0, at schemeshard: 72057594046644480 >> KqpOlapAggregations::Aggregation_Sum_GroupByNull |91.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut >> KqpOlapAggregations::Aggregation_Sum_NullMixGroupBy >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL |91.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |91.3%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> KqpOlapAggregations::AggregationCountPushdown >> KqpOlap::BlockChannelAuto >> test.py::test[pg-select_having_no_from-default.txt-Results] [GOOD] >> test.py::test[pg-select_where-default.txt-Analyze] >> KqpOlapAggregations::Aggregation_Some_GroupByNullMix [GOOD] >> KqpOlapBlobsSharing::MultipleSchemaVersions >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 >> KqpOlapIndexes::CountMinSketchIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 19406, MsgBus: 2642 2024-11-21T08:58:02.249120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654227860054141:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:02.249484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004793/r3tmp/tmpO4pHqZ/pdisk_1.dat 2024-11-21T08:58:02.305940Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19406, node 1 2024-11-21T08:58:02.315291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:02.315309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:02.315311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:02.315363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2642 2024-11-21T08:58:02.350084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:02.350124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:02.351078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:02.381111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:02.385257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:58:02.397832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:02.397922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:02.397972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:02.397992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:02.398005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:02.398027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:02.398045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:02.398057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:02.398071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:02.398087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:02.398108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:02.398124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654227860054783:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:02.398588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:02.398604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:02.398616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:02.398626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:02.398642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:02.398650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:02.398660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:02.398677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:02.398690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:02.398699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:02.398705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:02.398709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:02.398777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:02.398789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:02.398806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:02.398815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:02.398826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:02.398834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:02.398851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:02.398865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:02.398875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:02.398878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:02.401268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654227860054784:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:02.401294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654227860054784:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:02.401323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654227860054784:2290];tablet_id=7207518622403 ... ;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:02.407417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:02.407431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:02.407439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:02.407447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:02.407455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:02.407462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:02.407470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:02.407475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:02.407480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:02.407509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:02.407518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:02.407531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:02.407538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:02.407547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:02.407556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:02.407568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:02.407571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:02.407580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:02.407588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 5 AND 6 GROUP BY level ORDER BY level; 2024-11-21T08:58:02.522687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654227860055061:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.522713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.522767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654227860055088:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:02.523502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:02.527122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T08:58:02.527289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654227860055090:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:58:03.461556Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482579, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 5 AND 6 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '985) '('"_id" '"d20889bb-ba186de2-539bcb40-80fcb22a") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $25 (Int32 '1)) (let $26 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"5")) $25) $26))) (RangeCreate (AsList '($26 '((Just (Int32 '"6")) $25)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"level" $2)) (let $11 (StructType '('_yql_agg_0 $1) '('_yql_agg_1 $2) $10)) (let $12 '('('"_logical_id" '1044) '('"_id" '"660d7c35-6e027b9f-9056ee8c-5ddf84a8") '('"_wide_channels" $11))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $27 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $28 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $27 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (block '( (let $31 '('_yql_agg_0 'some '"id")) (let $32 '('_yql_agg_1 'some '"level")) (return (TKqpOlapAgg $30 '($31 $32) '('"level"))) ))))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('2))) (let $15 (StructType '('"column1" $1) '('"column2" $2) $10)) (let $16 '('('"_logical_id" '1582) '('"_id" '"46075396-16038caa-49ba3cd7-255df476") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($33) (block '( (let $34 (lambda '($41 $42 $43 $44) $42 $43)) (let $35 (lambda '($45 $46 $47 $48 $49 $50) $49 (Coalesce $50 $47))) (let $36 (lambda '($51 $52 $53) $52 $53 $51)) (let $37 (WideCombiner (ToFlow $33) '"" (lambda '($38 $39 $40) $40) $34 $35 $36)) (return (FromFlow (WideSort $37 '('('2 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('2 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($54) (FromFlow (NarrowMap (ToFlow $54) (lambda '($55 $56 $57) (AsStruct '('"column1" $55) '('"column2" $56) '('"level" $57)))))) '('('"_logical_id" '1594) '('"_id" '"442a8099-59abf42a-a34aabee-dc88665a")))) (let $20 '($13 $17 $19)) (let $21 '('"level" '"column1" '"column2")) (let $22 (DqCnResult (TDqOutput $19 '0) $21)) (let $23 (KqpTxResultBinding $9 '0 '0)) (let $24 (KqpPhysicalTx $20 '($22) '('($7 $23)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $24) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_Sum_GroupByNull [GOOD] >> KqpOlapAggregations::Aggregation_Sum_NullMixGroupBy [GOOD] >> test.py::test[pg-select_where-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_where-default.txt-Debug] >> KqpOlap::BlockChannelAuto [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_GroupByNull [GOOD] Test command err: Trying to start YDB, gRPC: 4605, MsgBus: 63169 2024-11-21T08:58:03.066008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654230673630144:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.066122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004792/r3tmp/tmpEW3aGo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4605, node 1 2024-11-21T08:58:03.116379Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:03.120455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:03.120468Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:03.120469Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:03.120508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63169 TClient is connected to server localhost:63169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:03.167681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:03.167712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:03.168780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:03.195716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.206921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:03.213615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.213676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.213707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.213724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.213735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.213749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.213766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.213781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:03.213796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:03.213814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.213828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:03.213843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654230673630786:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:03.216134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.216160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.216192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.216219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.216236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.216250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.216265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.216281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:03.216297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:03.216320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.216334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:03.216344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654230673630789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:03.218386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.218406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.218430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.218443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.218459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.218473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.218497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.218518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:03.218542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654230673630785 ... cess=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:03.221803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:03.221805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:03.221814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:03.221816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:03.221821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:03.221823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:03.221827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:03.221833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:03.221836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:03.221838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:03.221853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:03.221859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:03.221873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:03.221879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.221885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:03.221887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:03.221897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:03.221903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:03.221908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:03.221910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id > 5 GROUP BY level ORDER BY level; 2024-11-21T08:58:03.313644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654230673631073:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.313672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.313750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654230673631094:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.314469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:03.316137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654230673631096:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:58:04.153961Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483370, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SUM(id), SUM(level) FROM `/Root/tableWithNulls` WHERE id > 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1214) '('"_id" '"63f80006-b9fc2aae-63096548-21bb06c7") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $27 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $27) '((Nothing $2) $27))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Int64)) (let $11 (OptionalType $10)) (let $12 '('"level" $2)) (let $13 (StructType '('_yql_agg_0 $10) '('_yql_agg_1 $11) $12)) (let $14 '('('"_logical_id" '1273) '('"_id" '"cea16b5a-1113bfbe-62e5a702-3d9075c7") '('"_wide_channels" $13))) (let $15 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $30 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $29 (lambda '($31) (block '( (let $32 '('_yql_agg_0 'sum '"id")) (let $33 '('_yql_agg_1 'sum '"level")) (return (TKqpOlapAgg $31 '($32 $33) '('"level"))) ))))) (return (FromFlow $30)) ))) $14)) (let $16 (DqCnHashShuffle (TDqOutput $15 '0) '('2))) (let $17 (StructType '('"column1" $10) '('"column2" $11) $12)) (let $18 '('('"_logical_id" '2113) '('"_id" '"4a627300-48f69af8-54702e36-8b039214") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($16) (lambda '($34) (block '( (let $35 (lambda '($42 $43 $44 $45) $43 $44)) (let $36 (lambda '($46 $47 $48 $49 $50 $51) (AggrAdd $47 $50) (AggrAdd $48 $51))) (let $37 (lambda '($52 $53 $54) $53 $54 $52)) (let $38 (WideCombiner (ToFlow $34) '"" (lambda '($39 $40 $41) $41) $35 $36 $37)) (return (FromFlow (WideSort $38 '('('2 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('2 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($55) (FromFlow (NarrowMap (ToFlow $55) (lambda '($56 $57 $58) (AsStruct '('"column1" $56) '('"column2" $57) '('"level" $58)))))) '('('"_logical_id" '2125) '('"_id" '"7034f3ae-b7316939-77e4a5aa-7034108f")))) (let $22 '($15 $19 $21)) (let $23 '('"level" '"column1" '"column2")) (let $24 (DqCnResult (TDqOutput $21 '0) $23)) (let $25 (KqpTxResultBinding $9 '0 '0)) (let $26 (KqpPhysicalTx $22 '($24) '('($7 $25)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $26) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 29023, MsgBus: 19292 2024-11-21T08:58:03.162817Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654232668096218:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.163084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004791/r3tmp/tmpIIGpqS/pdisk_1.dat 2024-11-21T08:58:03.215826Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29023, node 1 2024-11-21T08:58:03.227971Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:03.227987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:03.227990Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:03.228030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19292 TClient is connected to server localhost:19292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:03.263860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:03.263883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:03.264939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:03.296460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.298110Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:58:03.301808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:03.314550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.314625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.314659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.314679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.314693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.314711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.314728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.314743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:03.314762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:03.314776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.314791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:03.314810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232668096857:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:03.318081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.318096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.318129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.318145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.318160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.318175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.318187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.318207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:03.318230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:03.318245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.318259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:03.318273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232668096858:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:03.325239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232668096859:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.325274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232668096859:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.325340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232668096859:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.325363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232668096859:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.325386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232668096859:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.325409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232668096859:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.325432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232668096859:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.325454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232668096859:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... ess=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:03.330868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:03.330871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:03.330878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:03.330881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:03.330901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:03.330904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:03.330910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:03.330913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:03.330919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:03.330922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:03.330926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:03.330929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:03.330950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:03.330953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:03.330966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:03.330975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.330984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:03.330987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:03.331000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:03.331002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:03.331010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:03.331012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2024-11-21T08:58:03.437437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654232668097163:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.437455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654232668097154:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.437524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.438106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:03.439358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654232668097168:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:58:04.142713Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483489, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, SUM(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '972) '('"_id" '"6efeb490-d71c554f-c5786dc4-d7f8b188") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '0)) (let $25 '((Nothing $2) $24)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (OptionalType (DataType 'Int64))) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '1031) '('"_id" '"78644046-5a3b69c-b708cf2-40acee89") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'sum '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1598) '('"_id" '"32e9ec2e-97d7d69c-eae9ec04-c0949497") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1610) '('"_id" '"60fde8e6-fc56e54c-d9cfd32d-fcda600f")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean Test command err: Trying to start YDB, gRPC: 7346, MsgBus: 3932 2024-11-21T08:57:57.819766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654204759491452:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:57.819863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047ab/r3tmp/tmpnAf0rF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7346, node 1 2024-11-21T08:57:57.863230Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:57.867294Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:57.867305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:57.867307Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:57.867336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3932 TClient is connected to server localhost:3932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:57.921253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:57.921277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:57.922374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:57.941061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:57.951411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:57.957724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:57.960511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:57.960568Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T08:57:57.961135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.961190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.961239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:57.961260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:57.961278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:57.961301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:57.961315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:57.961335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:57.961353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:57.961377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.961396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:57.961415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:57.961977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:57:57.963838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:57:57.963866Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2024-11-21T08:57:57.964481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.964502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.964536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:57.964555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:57.964577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:57.964599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:57.964618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:57.964639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:57.964656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:57.964679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.964711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:57.964730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:57.965099Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037889 2024-11-21T08:57:57.965125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:57:57.965128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:57:57.965146Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:57:57.965173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:57.965190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:57.965197Z node 1 :TX_COLUMNSHARD NOTICE: tablet_i ... A_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654204759491452:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:02.819961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:58:02.967890Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654204759492093:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:02.967907Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654204759492092:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:02.969932Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T08:58:02.970441Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T08:58:02.970464Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:58:02.970475Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:58:02.970504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:02.970528Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:02.970545Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:02.970553Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:02.970563Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:02.970570Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:02.970588Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:02.970627Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179483000 at tablet 72075186224037889 2024-11-21T08:58:02.970638Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:02.970642Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:02.970645Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:02.970649Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:02.970653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:02.970655Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:02.970658Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:02.970663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654204759492093:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:02.970937Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:02.970951Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2024-11-21T08:58:02.970959Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:58:02.970964Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:58:02.970973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:02.970982Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:02.970993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:02.970996Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:02.971000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:02.971004Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:02.971013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:02.971025Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179483000 at tablet 72075186224037888 2024-11-21T08:58:02.971035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:02.971039Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:02.971042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:02.971045Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:02.971048Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:02.971051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:02.971054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:02.971058Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654204759492092:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x124D1530 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:216: Execute_ @ 0x124D20C7 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7F24BF6B9D8F 11. ??:0: ?? @ 0x7F24BF6B9E3F 12. ??:0: ?? @ 0x11815028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BlockChannelAuto [GOOD] Test command err: Trying to start YDB, gRPC: 8635, MsgBus: 62137 2024-11-21T08:58:03.655518Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654233317864307:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.655763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00478e/r3tmp/tmpHljOsW/pdisk_1.dat 2024-11-21T08:58:03.710968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8635, node 1 2024-11-21T08:58:03.730067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:03.730081Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:03.730083Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:03.730123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62137 2024-11-21T08:58:03.756866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:03.756893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:03.757971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:03.789097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.794838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.810589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.826558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.834996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.942458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654233317865860:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.942533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.948415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:58:03.959508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:58:03.965902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:03.972902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:03.980247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:58:03.986938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:58:03.995561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654233317866361:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.995605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.995618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654233317866366:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:03.996178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:58:04.000346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654233317866368:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:58:04.182664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T08:58:04.190042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.190081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.190115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.190135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.190149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.190165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:04.190178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:04.190193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:04.190209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:04.190212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654237612834041:2462];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.190224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.190227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654237612834041:2462];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.190263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:04.190271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654237612834041:2462];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.190281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439654237612834039:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:04.190288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654237612834041:2462];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.190328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654237612834041:2462];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08 ... 04.193696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:04.193699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:04.193705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:04.193708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:04.193713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:04.193717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:04.193722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:04.193724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:04.193755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:04.193763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:04.193774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:04.193777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.193784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:04.193787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:04.193797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:04.193800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:04.193807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:04.193809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:04.194024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.194050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.194089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.194119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.194146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.194176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:04.194232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:04.194255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:04.194278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:04.194305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.194331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:04.194354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439654237612834054:2463];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:04.194853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:04.194869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:04.194880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:04.194884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:04.194901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:04.194914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:04.194923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:04.194930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:04.194944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:04.194948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:04.194961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:04.194966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:04.195012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:04.195025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:04.195039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:04.195050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.195063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:04.195073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:04.195090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:04.195101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:04.195113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:04.195122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpOlapBlobsSharing::HugeSchemeHistory >> test.py::test[pg-select_where-default.txt-Debug] [GOOD] >> test.py::test[pg-select_where-default.txt-ForceBlocks] >> KqpOlapBlobsSharing::TableReshardingModuloN >> KqpOlapAggregations::JsonDoc_GetValue_ToInt >> KqpOlapIndexes::IndexesInLocalMetadata [GOOD] >> KqpOlapWrite::TestRemoveTableBeforeIndexation >> KqpOlapSysView::StatsSysViewBytesDictStatActualization ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesInLocalMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 8776, MsgBus: 27160 2024-11-21T08:57:40.503174Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654135647369620:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:40.503313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004825/r3tmp/tmpIzbXcP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8776, node 1 2024-11-21T08:57:40.553256Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:40.555145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:40.555157Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:40.555158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:40.555188Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27160 TClient is connected to server localhost:27160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:40.597099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:40.600626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:40.603996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:40.604020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:40.605200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:40.611706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.611766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.611807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.611835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.611856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.611879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.611898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.611923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.611948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.611968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.611988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.612012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654135647370272:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.614179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.614198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.614227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.614249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.614269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.614287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.614305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.614325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.614343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:40.614362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:40.614382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:40.614400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654135647370273:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:40.616261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:40.616279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:40.616305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:40.616324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:40.616343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:40.616360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:40.616377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:40.616396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:40.616414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654135647370274 ... 8:57:58.093104Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.193334Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478057, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.296603Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478176, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.410021Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478274, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.517829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478379, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.628726Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478498, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.736465Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478603, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.850567Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478715, txId: 18446744073709551615] shutting down 2024-11-21T08:57:58.950882Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179478820, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.053221Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479000, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.171410Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479037, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.279937Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479135, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.394490Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479261, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.511786Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479359, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.626487Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479492, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.760595Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479597, txId: 18446744073709551615] shutting down 2024-11-21T08:57:59.896453Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479716, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.025576Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479863, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.151558Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480000, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.259529Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480122, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.390321Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480234, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.511368Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480346, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.615811Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480472, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.752199Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480598, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.865285Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480724, txId: 18446744073709551615] shutting down 2024-11-21T08:58:00.982140Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179480836, txId: 18446744073709551615] shutting down 2024-11-21T08:58:01.130178Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481000, txId: 18446744073709551615] shutting down 2024-11-21T08:58:01.274952Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481074, txId: 18446744073709551615] shutting down 2024-11-21T08:58:01.402395Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481228, txId: 18446744073709551615] shutting down 2024-11-21T08:58:01.511203Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481368, txId: 18446744073709551615] shutting down 2024-11-21T08:58:01.648255Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481480, txId: 18446744073709551615] shutting down 2024-11-21T08:58:01.806278Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481599, txId: 18446744073709551615] shutting down 2024-11-21T08:58:01.944904Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481746, txId: 18446744073709551615] shutting down 2024-11-21T08:58:02.063677Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482000, txId: 18446744073709551615] shutting down 2024-11-21T08:58:02.192620Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482040, txId: 18446744073709551615] shutting down 2024-11-21T08:58:02.302103Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482166, txId: 18446744073709551615] shutting down 2024-11-21T08:58:02.424220Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482278, txId: 18446744073709551615] shutting down 2024-11-21T08:58:02.562607Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482390, txId: 18446744073709551615] shutting down 2024-11-21T08:58:02.698912Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482516, txId: 18446744073709551615] shutting down 2024-11-21T08:58:02.806877Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482663, txId: 18446744073709551615] shutting down 2024-11-21T08:58:02.938334Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179482782, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.056293Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483000, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.164146Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483027, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.289812Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483139, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.431251Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483251, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.557252Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483391, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.657424Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483538, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.783427Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483636, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.890396Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483748, txId: 18446744073709551615] shutting down 2024-11-21T08:58:03.998774Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179483867, txId: 18446744073709551615] shutting down 2024-11-21T08:58:04.116161Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484000, txId: 18446744073709551615] shutting down 2024-11-21T08:58:04.231989Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484084, txId: 18446744073709551615] shutting down 2024-11-21T08:58:04.346659Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484203, txId: 18446744073709551615] shutting down 2024-11-21T08:58:04.467323Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484322, txId: 18446744073709551615] shutting down 2024-11-21T08:58:04.582512Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484434, txId: 18446744073709551615] shutting down 2024-11-21T08:58:04.695111Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484553, txId: 18446744073709551615] shutting down 2024-11-21T08:58:04.805159Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484672, txId: 18446744073709551615] shutting down 2024-11-21T08:58:04.912729Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484784, txId: 18446744073709551615] shutting down 2024-11-21T08:58:05.022091Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484889, txId: 18446744073709551615] shutting down 2024-11-21T08:58:05.173009Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179485000, txId: 18446744073709551615] shutting down 2024-11-21T08:58:05.329993Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179485106, txId: 18446744073709551615] shutting down 2024-11-21T08:58:05.472383Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179485281, txId: 18446744073709551615] shutting down 2024-11-21T08:58:05.595876Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179485421, txId: 18446744073709551615] shutting down 2024-11-21T08:58:05.704553Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179485568, txId: 18446744073709551615] shutting down 2024-11-21T08:58:05.821432Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179485680, txId: 18446744073709551615] shutting down 2024-11-21T08:58:05.947866Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179485792, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] >> test.py::test[pg-select_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_where-default.txt-Plan] [GOOD] >> test.py::test[pg-select_where-default.txt-Results] >> KqpOlapAggregations::Aggregation_MaxL [GOOD] >> KqpOlapStats::DescibeTableStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MaxL [GOOD] Test command err: Trying to start YDB, gRPC: 20135, MsgBus: 23546 2024-11-21T08:57:54.096251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654195773615197:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:54.096365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047bc/r3tmp/tmprAnISr/pdisk_1.dat 2024-11-21T08:57:54.139171Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20135, node 1 2024-11-21T08:57:54.148170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:54.148181Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:54.148183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:54.148242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23546 TClient is connected to server localhost:23546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:54.197762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:54.197783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:54.198845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:54.221140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:54.230792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:54.240291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:54.240361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:54.240405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:54.240421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:54.240441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:54.240460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:54.240481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:54.240506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:54.240524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:54.240548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.240567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:54.240586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654195773615852:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:54.241095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:54.241109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:54.241119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:54.241128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:54.241142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:54.241150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:54.241158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:54.241170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:54.241182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:54.241189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:54.241194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:54.241201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:54.241250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:54.241259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:54.241273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:54.241281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:54.241290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:54.241294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:54.241307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:54.241315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:54.241326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:54.241333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:54.243871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654195773615862:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:54.243889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654195773615862:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:54.243909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654195773615862:2291];tablet_id=7207518622 ... DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:04.739681Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:04.929233Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:04.929259Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.022069Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.022098Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.104950Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.104982Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.192598Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.192633Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.296533Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:05.317655Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:05.350980Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.351013Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.433483Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.433530Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.516036Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.516058Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.598674Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.598702Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.681292Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.681320Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.744395Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:05.786974Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.786992Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:05.935080Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:05.935106Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.028463Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:06.028489Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.110561Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:06.110584Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.192575Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:06.192596Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1882:3003], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jd6z2h367xgcdzqa3nrfhwgm. SessionId : ydb://session/3?node_id=2&id=NGZhOGFmZjgtOWU3MmY1NDYtMWQyNmMwZTUtMzViNGI1MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.285016Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:06.305753Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] Test command err: Trying to start YDB, gRPC: 11321, MsgBus: 23449 2024-11-21T08:58:06.008801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654246072660607:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:06.008904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004787/r3tmp/tmpnEWxA9/pdisk_1.dat 2024-11-21T08:58:06.064286Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11321, node 1 2024-11-21T08:58:06.073329Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:06.073344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:06.073346Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:06.073384Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23449 TClient is connected to server localhost:23449 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:58:06.108128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:06.108157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:58:06.109295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:06.139312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:06.149223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:06.158033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.158075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.158108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.158133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.158154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.158176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.158198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.158233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.158255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:06.158273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.158298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:06.158322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654246072661100:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:06.161528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.161550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.161575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.161591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.161607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.161623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.161644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.161664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.161685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:06.161705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.161725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:06.161745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654246072661101:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:06.164796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654246072661104:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.164813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654246072661104:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.164836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654246072661104:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.164856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654246072661104:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.164879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654246072661104:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.164899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654246072661104:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.164920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654246072661104:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.164939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654246072661104:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.164960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74396542460726611 ... line=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:06.169567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:06.169590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:06.169598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:06.169611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:06.169619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.169629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:06.169636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:06.169649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:06.169656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:06.169665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:06.169672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int"), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 6; 2024-11-21T08:58:06.308192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654246072661381:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:06.308238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:06.308315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654246072661408:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:06.309080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:06.311238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654246072661410:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:58:06.476190Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179486366, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int"), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1290) '('"_id" '"cf377c-bb5e2286-5d3743db-5f421849") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $20 (Int32 '"6")) (let $21 (Just $20)) (let $22 (Int32 '1)) (let $23 '($21 $22)) (let $24 (If (== $20 (Int32 '2147483647)) $23 '((+ $21 $22) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($23 $24)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $6)) (let $11 (DataType 'Utf8)) (let $12 (OptionalType $11)) (let $13 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id" '"jsondoc" '"jsonval")) (let $27 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $28 (OptionalType (DataType 'JsonDocument))) (let $29 (DataType 'Json)) (let $30 (TupleType (DataType 'Uint8) (DataType 'String))) (let $31 '((ResourceType '"JsonPath"))) (let $32 (ResourceType '"JsonNode")) (let $33 (DictType $11 $32)) (let $34 '($33)) (let $35 (CallableType '() '((VariantType (TupleType $30 (OptionalType (DataType 'Double))))) '($28) $31 $34)) (let $36 '('('"strict"))) (let $37 (Udf '"Json2.JsonDocumentSqlValueNumber" (Void) (VoidType) '"" $35 (VoidType) '"" $36)) (let $38 (CallableType '() $31 '($11))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.obj.obj_col2_int"))) (let $41 (Dict $33)) (let $42 (lambda '($53) (block '( (let $54 (Nothing $7)) (return $54) )))) (let $43 (lambda '($55) (If (Exists $55) (SafeCast $55 $7) (Nothing $7)))) (let $44 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 $26 '() $27 (lambda '($45) (block '( (let $46 (StructType $10 '('"jsondoc" $28) '('"jsonval" (OptionalType $29)))) (let $47 (KqpOlapApply $46 '('"jsondoc") (lambda '($50) (block '( (let $51 (Apply $37 $50 $40 $41)) (let $52 (Nothing $7)) (return (Visit $51 '0 $42 '1 $43)) ))))) (let $48 '('eq $47 (Int32 '"16"))) (let $49 '('?? $48 (Bool 'false))) (return (KqpOlapFilter $45 $49)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($56 $57 $58) (block '( (let $59 (OptionalType $32)) (let $60 (CallableType '() '((VariantType (TupleType $30 $12))) '($59) $31 $34)) (let $61 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $60 (VoidType) '"" $36)) (let $62 (IfPresent $58 (lambda '($67) (block '( (let $68 '($29 '"" '1)) (let $69 (CallableType '() '($32) $68)) (let $70 (Udf '"Json2.Parse" (Void) (VoidType) '"" $69 (VoidType) '"" '())) (return (Just (Apply $70 $67))) ))) (Nothing $59))) (let $63 (Apply $61 $62 $40 $41)) (let $64 (Visit $63 '0 (lambda '($71) (Nothing $12)) '1 (lambda '($72) $72))) (let $65 (Apply $37 $57 $40 $41)) (let $66 (Visit $65 '0 $42 '1 $43)) (return (AsStruct '('"column1" $64) '('"column2" $66) '('"id" $56))) )))))) ))) '('('"_logical_id" '1361) '('"_id" '"490d3fc7-3a4fc96d-5bdbcb1c-67a28a5b")))) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($73) $73) '('('"_logical_id" '2334) '('"_id" '"8f2e7014-af351b0b-282cb5fd-69e8e528")))) (let $16 '('"id" '"column1" '"column2")) (let $17 (DqCnResult (TDqOutput $15 '0) $16)) (let $18 (KqpTxResultBinding $9 '0 '0)) (let $19 (KqpPhysicalTx '($13 $15) '($17) '('($5 $18)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $19) '((KqpTxResultBinding (ListType (StructType '('"column1" $12) '('"column2" $7) $10)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::DescibeTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 18693, MsgBus: 31481 2024-11-21T08:57:56.511252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654201225395772:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:56.511378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b4/r3tmp/tmp6fFKda/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18693, node 1 2024-11-21T08:57:56.565858Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:56.565993Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:56.566003Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:56.566004Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:56.566028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31481 TClient is connected to server localhost:31481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:56.612163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:56.612188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:56.613334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:56.636229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:56.772643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654201225396381:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.772692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:56.774961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:56.781529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:56.781583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:56.781632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:56.781663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:56.781694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:56.781723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:56.781756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:56.781784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:56.781815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:56.781845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:56.781873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:56.781905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654201225396448:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:56.782378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:56.782394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:56.782410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:56.782423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:56.782440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:56.782451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:56.782461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:56.782474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:56.782484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:56.782487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:56.782499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:56.782503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:56.782561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:56.782575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:56.782590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:56.782601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:56.782612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:56.782618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:56.782634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:56.782644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:56.782655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:56.782662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; CREATE TABLE ... olumnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=25392;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=25392;columns=3; 2024-11-21T08:58:01.511608Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654201225395772:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:01.511631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[pg-select_where-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Analyze] |91.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] |91.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |91.4%| [LD] {RESULT} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |91.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut >> KqpOlapAggregations::Aggregation_ProjectionOrder [GOOD] >> KqpOlap::PredicatePushdown_DifferentLvlOfFilters >> TFileStoreWithReboots::SimultaneousCreateDropNfs >> test.py::test[pg-select_win_ntile-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] Test command err: 2024-11-21T08:55:21.349888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653537896227918:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:21.349906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b73/r3tmp/tmp6TR0vZ/pdisk_1.dat 2024-11-21T08:55:21.476354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:55:21.489440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:55:21.489479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:55:21.490926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24465, node 1 2024-11-21T08:55:21.560556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:55:21.560571Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:55:21.560573Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:55:21.560616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:55:21.611578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:21.612826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:55:21.612851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:55:21.614554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T08:55:21.614637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T08:55:21.614647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T08:55:21.615478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T08:55:21.615490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T08:55:21.615907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:55:21.620907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:55:21.621223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732179321663, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T08:55:21.621236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T08:55:21.621305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T08:55:21.623237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T08:55:21.623317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T08:55:21.623337Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T08:55:21.623357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T08:55:21.623374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T08:55:21.623396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T08:55:21.623956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T08:55:21.623990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T08:55:21.623998Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T08:55:21.624022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T08:55:26.349989Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439653537896227918:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:55:26.350009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:55:36.465096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:55:36.465113Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:21.490369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2024-11-21T08:56:21.490430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2024-11-21T08:56:21.490455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 2024-11-21T08:57:21.490654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2024-11-21T08:57:21.490700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2024-11-21T08:57:21.490732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop |91.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ProjectionOrder [GOOD] >> KqpOlapAggregations::Aggregation_Sum [GOOD] Test command err: Trying to start YDB, gRPC: 61746, MsgBus: 21794 2024-11-21T08:57:50.169075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654175819760969:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:50.169375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047c2/r3tmp/tmp3RgMfF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61746, node 1 2024-11-21T08:57:50.226554Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:50.228463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:50.228474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:50.228476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:50.228513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21794 TClient is connected to server localhost:21794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:50.270050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:50.270074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:50.271228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:50.295607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:50.303141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:50.313843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:50.313909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:50.313959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:50.313985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:50.314008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:50.314031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:50.314052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:50.314074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:50.314109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:50.314131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:50.314152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:50.314173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654175819761626:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:50.317648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:50.317674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:50.317714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:50.317734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:50.317758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:50.317779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:50.317798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:50.317819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:50.317840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:50.317860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:50.317880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:50.317901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654175819761627:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:50.318338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:50.318350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:50.318365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:50.318374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:50.318392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:50.318401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:50.318410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:50.318420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:50.318428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:50.318434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:50.318440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:06.587641Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.725105Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:06.725140Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.787419Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:06.787451Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.850367Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:06.850390Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.912339Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:06.912372Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:06.974421Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:06.995211Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:07.046696Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.046726Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.108399Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.108425Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.170071Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.170114Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.231990Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.232021Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.293870Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.293898Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.319680Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:07.446432Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.446457Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.543200Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.543230Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.626048Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.626074Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.712536Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.712574Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.796908Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.796946Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NDg4NjkwMDgtN2RkYTY3ZjUtYTM4NTM0NjgtMmZjMjY3OTQ=. TraceId : 01jd6z2eajayfwbfvzgp2eqa6c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.849101Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:07.880232Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |91.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> KqpOlap::PredicatePushdown_DifferentLvlOfFilters [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_DifferentLvlOfFilters [GOOD] Test command err: Trying to start YDB, gRPC: 23971, MsgBus: 20538 2024-11-21T08:58:08.303305Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654255641267821:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:08.303489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004783/r3tmp/tmp7i4fxN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23971, node 1 2024-11-21T08:58:08.350333Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:08.354057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:08.354073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:08.354074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:08.354105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20538 TClient is connected to server localhost:20538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:08.404821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:08.404849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:08.405937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:08.434038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:08.440388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:58:08.465872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:08.482501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:08.482574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:08.482617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:08.482641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:08.482668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:08.482687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:08.482708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:08.482731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:08.482757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:08.482782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:08.482808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:08.482829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654255641268467:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:08.488561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:08.488583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:08.488606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:08.488616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:08.488626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:08.488636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:08.488648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:08.488661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:08.488676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:08.488687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:08.488701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:08.488711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654255641268468:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:08.490653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654255641268469:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:08.490669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654255641268469:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:08.490689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654255641268469:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:08.490700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654255641268469:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:08.490713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654255641268469:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:08.490725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654255641268469:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:08.490738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654255641268469:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:08.490750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654255641268469:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... tion=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:08.493351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:08.493353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:08.493442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:08.493449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:08.493454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:08.493456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:08.493464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:08.493469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:08.493474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:08.493476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:08.493480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:08.493483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:08.493486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:08.493488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:08.493501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:08.493515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:08.493523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:08.493525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:08.493531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:08.493537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:08.493545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:08.493551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:08.493557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:08.493560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:08.493588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:08.493594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:08.493599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:08.493601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:08.493608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:08.493614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:08.493619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:08.493621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:08.493624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:08.493627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:08.493630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:08.493632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:08.493645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:08.493650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:08.493658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:08.493662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:08.493668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:08.493671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:08.493679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:08.493682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:08.493688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:08.493690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:08.531694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5800;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; 2024-11-21T08:58:08.582326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654255641268808:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:08.582351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654255641268804:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:08.582417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:08.583025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:58:08.584620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654255641268818:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum [GOOD] Test command err: Trying to start YDB, gRPC: 1853, MsgBus: 18306 2024-11-21T08:57:55.490087Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654199580833141:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:55.490104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b8/r3tmp/tmp3x11PK/pdisk_1.dat 2024-11-21T08:57:55.534815Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1853, node 1 2024-11-21T08:57:55.545474Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:55.545486Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:55.545487Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:55.545521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18306 TClient is connected to server localhost:18306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:55.590969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:55.591000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:55.591192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:55.592123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:57:55.603268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:55.611901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:55.611954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:55.611980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:55.611996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:55.612010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:55.612024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:55.612038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:55.612057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:55.612073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:55.612090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:55.612107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:55.612121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654199580833794:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:55.612545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:55.612560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:55.612571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:55.612580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:55.612601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:55.612610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:55.612619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:55.612628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:55.612642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:55.612651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:55.612662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:55.612671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:55.612719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:55.612730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:55.612749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:55.612757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:55.612772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:55.612781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:55.612800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:55.612809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:55.612819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:55.612827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:55.614587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654199580833795:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:55.614609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654199580833795:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:55.614623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654199580833795:2289];tablet_id=720751862240 ... DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.033542Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.188893Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.188918Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.271248Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.271276Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.354434Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.354465Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.436920Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.436949Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.510435Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:07.532515Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:07.600180Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.600230Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.684625Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.684655Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.767757Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.767790Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.850116Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.850140Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.936662Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:07.936694Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:07.979762Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:08.119574Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:08.119599Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:08.223015Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:08.223038Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:08.305309Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:08.305342Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:08.387703Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:08.387728Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:08.470360Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:08.470391Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGI3Nzk0MjEtOTZiOTQ0MS0yMTlkY2Q3Mi1jNTY2MWRiZg==. TraceId : 01jd6z2jgnfbf9m20n884j4dmv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:08.532204Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:08.553024Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[pg-select_win_ntile-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-ForceBlocks] |91.4%| [TA] $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |91.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |91.4%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> KqpOlapCompression::OffCompression >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] |91.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |91.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |91.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> KqpOlapAggregations::JsonDoc_Exists >> test.py::test[pg-select_win_ntile-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Results] |91.4%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 12848, MsgBus: 3881 2024-11-21T08:58:07.710765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654247977018816:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:07.710825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004784/r3tmp/tmpBexvHE/pdisk_1.dat 2024-11-21T08:58:07.768297Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12848, node 1 2024-11-21T08:58:07.780379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:07.780394Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:07.780396Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:07.780440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3881 2024-11-21T08:58:07.812201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:07.812245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:07.813471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:07.829369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:07.838190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:07.847735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:07.847793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:07.847848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:07.847876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:07.847895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:07.847915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:07.848541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:07.848572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:07.848590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:07.848614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:07.848631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:07.848658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654247977019313:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:07.849230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:07.849239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:07.849250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:07.849257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:07.849271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:07.849276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:07.849284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:07.849296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:07.849305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:07.849309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:07.849314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:07.849318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:07.849377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:07.849383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:07.849399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:07.849403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:07.849418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:07.849423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:07.849438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:07.849442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:07.849451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:07.849454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:07.853320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654247977019314:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:07.853342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654247977019314:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:07.853386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654247977019314:2289];tablet_id=7207518622403 ... s=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:07.862656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:07.862659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:07.862666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:07.862670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:07.862677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:07.862680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:07.862685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:07.862688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:07.862725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:07.862729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:07.862743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:07.862746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:07.862757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:07.862760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:07.862774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:07.862777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:07.862786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:07.862789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2024-11-21T08:58:08.010071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654252271986900:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:08.010102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:08.010197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654252271986914:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:08.011011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:08.013168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654252271986916:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:58:10.685826Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179488067, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1732) '('"_id" '"39186e29-e4604ecf-1caf9956-953c33b3") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '1)) (let $27 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"7")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1791) '('"_id" '"733f6966-7849b36e-982870f-4ff76c69") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3379) '('"_id" '"5809e6f7-20105cf9-bb215e91-8d748cc8") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3391) '('"_id" '"d45c8507-28c98da6-976cdaf-21a1fd8f")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) |91.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index >> KqpOlap::PredicatePushdownCastErrors [GOOD] >> KqpOlapCompression::OffCompression [GOOD] |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index >> KqpOlapAggregations::Aggregation_Sum_NullMix >> KqpOlapBlobsSharing::BlobsSharingSplit1_1 >> KqpOlapAggregations::JsonDoc_Exists [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapCompression::OffCompression [GOOD] Test command err: Trying to start YDB, gRPC: 26479, MsgBus: 6937 2024-11-21T08:58:11.002067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654267475895220:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:11.002230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004782/r3tmp/tmpbH6ygF/pdisk_1.dat 2024-11-21T08:58:11.053831Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26479, node 1 2024-11-21T08:58:11.062743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:11.062759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:11.062761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:11.062800Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6937 TClient is connected to server localhost:6937 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:58:11.103625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:11.103657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:58:11.104804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:11.132128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/StandaloneTable` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) PARTITION BY HASH(pk_int) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:58:11.279279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654267475895832:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.279313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.311157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:58:11.318166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:11.318198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:11.318232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:11.318251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:11.318269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:11.318288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:11.318298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:11.318319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:11.318342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:11.318362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.318380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:11.318400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267475895909:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:11.318870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:11.318884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:11.318896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:11.318900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:11.318922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:11.318926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:11.318934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:11.318945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:11.318954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:11.318958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:11.318963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:11.318965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:11.319016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:11.319032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:11.319045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:11.319048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.319057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:11.319061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:11.319076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:11.319079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:11.319088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:11.319096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:11.368321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654267475896040:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.368344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.369748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 CREATE TABLESTORE `/Root/TableStoreTest` (pk_int Uint64 NOT NULL, PRIMARY KEY (pk_int)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:58:11.380200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654267475896067:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.380240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.382192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T08:58:11.386830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:11.386868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:11.386917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:11.386942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:11.386962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:11.386978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:11.386997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:11.387018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:11.387042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:11.387065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.387092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:11.387113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267475896092:2371];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:11.387629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:11.387644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:11.387655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:11.387659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:11.387679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:11.387683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:11.387691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:11.387702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:11.387710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:11.387719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:11.387725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:11.387733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:11.387775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:11.387786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:11.387801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:11.387805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.387816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:11.387826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:11.387842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:11.387851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:11.387862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:11.387871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:11.438195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654267475896131:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.438223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.439810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownCastErrors [GOOD] Test command err: Trying to start YDB, gRPC: 5120, MsgBus: 32569 2024-11-21T08:57:44.801921Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654150109211885:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:44.802162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047fd/r3tmp/tmp6jHozM/pdisk_1.dat 2024-11-21T08:57:44.869386Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5120, node 1 2024-11-21T08:57:44.875619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:44.875631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:44.875632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:44.875678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32569 2024-11-21T08:57:44.902502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:44.902534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:44.903600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:44.921225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:44.930252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:44.937638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:44.937692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:44.937717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:44.937739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:44.937758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:44.937772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:44.937788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:44.937799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:44.937818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:44.937843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.937860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:44.937871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654150109212509:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:44.938198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:44.938211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:44.938219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:44.938222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:44.938235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:44.938243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:44.938249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:44.938252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:44.938259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:44.938268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:44.938274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:44.938283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:44.938326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:44.938336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:44.938350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:44.938359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:44.938370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:44.938378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:44.938390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:44.938397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:44.938403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:44.938409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:44.987458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732179464967 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ... otation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int16 '==' Optional 2024-11-21T08:58:11.199874Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjgzZWM1NmYtZDVmZWEyZGQtNmY0ODc1ZjItYzQ4ZGIxODQ=, ActorId: [1:7439654266073415122:11021], ActorState: ExecuteState, TraceId: 01jd6z2ybx200wg7aqpjxqsqn1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Int32; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.204115Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415136:11003], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int32 '==' Optional 2024-11-21T08:58:11.204231Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2NlYzlhOWMtM2VkYjAxZmQtNGFkOTY2ZmEtYmQ0M2Y3ODI=, ActorId: [1:7439654266073415134:11002], ActorState: ExecuteState, TraceId: 01jd6z2yc09a9m6249xmmc8q52, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Int64; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.208078Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415148:10983], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int64 '==' Optional 2024-11-21T08:58:11.208181Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWFkN2E5MGQtNDk0YzFjNWMtNmU3ZGJlNS03ZWRjZjJjZQ==, ActorId: [1:7439654266073415146:10971], ActorState: ExecuteState, TraceId: 01jd6z2yc5ahptz4j8181ng1fd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt8; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.215954Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415160:10981], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint8 '==' Optional 2024-11-21T08:58:11.216105Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmNjMmFmZTAtNDYwYmMzMjUtNmQ5MGI5YzUtNmRmOTQ1MGQ=, ActorId: [1:7439654266073415158:10968], ActorState: ExecuteState, TraceId: 01jd6z2ycc34awvw6nx70r98yz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt16; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.220181Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415172:10965], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint16 '==' Optional 2024-11-21T08:58:11.220289Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzdiNDBhMTEtZDk4NmQ5YmEtMmM2OThjOTItMmE1OWZkODE=, ActorId: [1:7439654266073415170:10995], ActorState: ExecuteState, TraceId: 01jd6z2ych7rhd7jgv1ef4ghv3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt32; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.224292Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415184:10961], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint32 '==' Optional 2024-11-21T08:58:11.224882Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjljODUzZDctNjAxYTM1NDEtYWRjM2QyZWUtYTdiZWQ4Nw==, ActorId: [1:7439654266073415182:10984], ActorState: ExecuteState, TraceId: 01jd6z2ycn27bx5tsp10t7ykqj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt64; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.229243Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415196:10944], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint64 '==' Optional 2024-11-21T08:58:11.229348Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTM0OWExNmQtYjUzY2I2MDYtOTI3MDQ2NDktYzMxNGFmYmQ=, ActorId: [1:7439654266073415194:10959], ActorState: ExecuteState, TraceId: 01jd6z2ycs18wjr8nhcd8pbt2p, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Double; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.233304Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415208:10956], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Double '==' Optional 2024-11-21T08:58:11.233394Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njg4ZjQ3N2QtZDEzYTIxZmUtNzJhYTdlZS1iZmEyNDQ0YQ==, ActorId: [1:7439654266073415206:10942], ActorState: ExecuteState, TraceId: 01jd6z2ycy7km0sck1faby7cs2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Float; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.237327Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415220:10939], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Float '==' Optional 2024-11-21T08:58:11.237427Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQyYjkzNmEtODk3YWNkZDQtMTI5YjJhYmItYTE3OGY5NDU=, ActorId: [1:7439654266073415218:10979], ActorState: ExecuteState, TraceId: 01jd6z2yd23dcp54rbg80pr3zj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS String; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.240771Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415232:10922], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: String '==' Optional 2024-11-21T08:58:11.240842Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODNhMjU2YjItNGY4MjJmMDItNmNmMzgwOC1mOGU0MTZm, ActorId: [1:7439654266073415230:11000], ActorState: ExecuteState, TraceId: 01jd6z2yd67k6makt5amnagzpq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Utf8; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.244088Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654266073415244:10906], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Utf8 '==' Optional 2024-11-21T08:58:11.244171Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmU4YmQxZmEtZmFjYTQ4MmEtZjNkMjA0MTktMjEyZjc3MjI=, ActorId: [1:7439654266073415242:10908], ActorState: ExecuteState, TraceId: 01jd6z2yd9bpt6w696ba6jx2ra, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Timestamp; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.281259Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179491203, txId: 18446744073709551615] shutting down Test query: --!syntax_v1 DECLARE $in_value AS Date; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.341256Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179491315, txId: 18446744073709551615] shutting down Test query: --!syntax_v1 DECLARE $in_value AS Datetime; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T08:58:11.396347Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179491378, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_Exists [GOOD] Test command err: Trying to start YDB, gRPC: 16188, MsgBus: 61369 2024-11-21T08:58:11.290746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654266536465414:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:11.290797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00477e/r3tmp/tmpJcmD7U/pdisk_1.dat 2024-11-21T08:58:11.347656Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16188, node 1 2024-11-21T08:58:11.354113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:11.354132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:11.354134Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:11.354169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61369 TClient is connected to server localhost:61369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:11.391746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:11.391778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:11.392897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:11.398388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:11.407925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:11.415150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:11.415212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:11.415246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:11.415266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:11.415281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:11.415295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:11.415310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:11.415322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:11.415337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:11.415356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.415371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:11.415386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654266536466048:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:11.418468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:11.418495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:11.418535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:11.418556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:11.418578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:11.418597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:11.418616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:11.418636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:11.418657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:11.418677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.418697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:11.418717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654266536466049:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:11.419141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:11.419158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:11.419169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:11.419177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:11.419192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:11.419200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:11.419210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:11.419227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:11.419241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:11.419249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:11.419255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... Id;id=CleanGranuleId; 2024-11-21T08:58:11.426929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:11.426937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:11.426943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:11.426952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:11.426957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:11.426965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:11.426988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:11.426998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:11.427011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:11.427019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.427030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:11.427038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:11.427050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:11.427058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:11.427066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:11.427074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsondoc, "$.col1") AND id = 6; 2024-11-21T08:58:11.552241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654266536466329:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.552261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654266536466356:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.552265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:11.552783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:11.554091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654266536466358:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:58:11.703518Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179491609, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsondoc, "$.col1") AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Filter":{"Predicate":{"Id":9}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Filter":{"Predicate":{"Id":9}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1256) '('"_id" '"cbe89fe8-3e58b66e-55037a74-c1b5238f") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $18 (Int32 '"6")) (let $19 (Just $18)) (let $20 (Int32 '1)) (let $21 '($19 $20)) (let $22 (If (== $18 (Int32 '2147483647)) $21 '((+ $19 $20) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (OptionalType (DataType 'Bool))) (let $11 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (OptionalType (DataType 'JsonDocument))) (let $27 (DataType 'Json)) (let $28 '($10)) (let $29 '((ResourceType '"JsonPath"))) (let $30 (DataType 'Utf8)) (let $31 (ResourceType '"JsonNode")) (let $32 (DictType $30 $31)) (let $33 '($32)) (let $34 (CallableType '() $28 '($26) $29 $33 $28)) (let $35 '('('"strict"))) (let $36 (Udf '"Json2.JsonDocumentSqlExists" (Void) (VoidType) '"" $34 (VoidType) '"" $35)) (let $37 (CallableType '() $29 '($30))) (let $38 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $37 (VoidType) '"" '())) (let $39 (Apply $38 (Utf8 '"$.col1"))) (let $40 (Dict $32)) (let $41 (Bool 'false)) (let $42 (Just $41)) (let $43 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($44) (block '( (let $45 (StructType $9 '('"jsondoc" $26) '('"jsonval" (OptionalType $27)))) (let $46 (KqpOlapApply $45 '('"jsondoc") (lambda '($48) (Apply $36 $48 $39 $40 $42)))) (let $47 '('?? $46 $41)) (return (KqpOlapFilter $44 $47)) ))))) (return (FromFlow (NarrowMap $43 (lambda '($49 $50 $51) (block '( (let $52 (OptionalType $31)) (let $53 (CallableType '() $28 '($52) $29 $33 $28)) (let $54 (Udf '"Json2.SqlExists" (Void) (VoidType) '"" $53 (VoidType) '"" $35)) (let $55 (IfPresent $51 (lambda '($58) (block '( (let $59 '($27 '"" '1)) (let $60 (CallableType '() '($31) $59)) (let $61 (Udf '"Json2.Parse" (Void) (VoidType) '"" $60 (VoidType) '"" '())) (return (Just (Apply $61 $58))) ))) (Nothing $52))) (let $56 (Apply $54 $55 $39 $40 $42)) (let $57 (Apply $36 $50 $39 $40 $42)) (return (AsStruct '('"column1" $56) '('"column2" $57) '('"id" $49))) )))))) ))) '('('"_logical_id" '1326) '('"_id" '"1d3ac257-f708ac2-f7487c4f-dc896450")))) (let $12 (DqCnUnionAll (TDqOutput $11 '0))) (let $13 (DqPhyStage '($12) (lambda '($62) $62) '('('"_logical_id" '2016) '('"_id" '"6c8c620e-3a970ed7-f57c94fe-e7a6c51a")))) (let $14 '('"id" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $13 '0) $14)) (let $16 (KqpTxResultBinding $8 '0 '0)) (let $17 (KqpPhysicalTx '($11 $13) '($15) '('($5 $16)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $17) '((KqpTxResultBinding (ListType (StructType '('"column1" $10) '('"column2" $10) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> test.py::test[pg-select_win_ntile-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Analyze] >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm >> KqpOlapAggregations::Aggregation_Sum_NullMix [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_NullMix [GOOD] Test command err: Trying to start YDB, gRPC: 4572, MsgBus: 3739 2024-11-21T08:58:11.833624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654267149256050:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:11.833834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00477a/r3tmp/tmp1mR69g/pdisk_1.dat 2024-11-21T08:58:11.877823Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4572, node 1 2024-11-21T08:58:11.891823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:11.891839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:11.891840Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:11.891871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3739 TClient is connected to server localhost:3739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:11.934739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:11.934773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:11.935807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:11.961289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:11.967056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:11.975472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:11.975551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:11.975601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:11.975624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:11.975643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:11.975666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:11.975688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:11.975708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:11.975727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:11.975748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.975766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:11.975785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267149256694:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:11.976312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:11.976329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:11.976340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:11.976345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:11.976360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:11.976370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:11.976380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:11.976394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:11.976408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:11.976417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:11.976424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:11.976432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:11.976497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:11.976508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:11.976526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:11.976534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.976544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:11.976552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:11.976568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:11.976577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:11.976587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:11.976595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:11.979627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267149256696:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:11.979649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267149256696:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:11.979691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654267149256696:2289];tablet_id=720751862240378 ... ;tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:11.987673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654267149256740:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.987693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654267149256740:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:11.987712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654267149256740:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:11.988173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:11.988187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:11.988198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:11.988225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:11.988246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:11.988255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:11.988264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:11.988273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:11.988282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:11.988289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:11.988296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:11.988305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:11.988348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:11.988360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:11.988375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:11.988384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.988395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:11.988403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:11.988418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:11.988428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:11.988438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:11.988446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls`; 2024-11-21T08:58:12.144526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654271444224291:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:12.144550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654271444224287:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:12.144567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:12.145286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:12.146785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654271444224301:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T08:58:12.392289Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179492197, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int64)))))) (let $1 (OptionalType (DataType 'Int64))) (let $2 '('('"_logical_id" '663) '('"_id" '"c115bf15-cca2aba1-f5fbaf8a-e27cc843") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '('"level") '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 'sum '"level")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Sum0 $28)))) (Nothing (OptionalType (StructType '('Sum0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Member $33 'Sum0)))))))) ))) '('('"_logical_id" '1334) '('"_id" '"994186c3-f5f35eb4-b0ff5647-d2e2308a")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1430) '('"_id" '"67193028-3b339760-af1d4f20-8ee5407c") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_OrderT_Limit2 >> KqpOlap::OlapRead_ScanQuery >> test.py::test[pg-sublink_projection_array-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Debug] >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm [GOOD] |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 10703, MsgBus: 25725 2024-11-21T08:58:12.653019Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654269958661130:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:12.653374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004771/r3tmp/tmpk3faLn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10703, node 1 2024-11-21T08:58:12.729853Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:12.733267Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:12.733276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:12.733278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:12.733321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25725 2024-11-21T08:58:12.754260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:12.754288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:12.755392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:12.792835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:12.795021Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:58:12.801883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:12.818027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:12.818114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:12.818172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:12.818199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:12.818222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:12.818243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:12.818265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:12.818286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:12.818319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:12.818343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:12.818366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:12.818387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654269958661787:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:12.826352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:12.826390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:12.826447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:12.826466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:12.826485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:12.826502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:12.826517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:12.826535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:12.826551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:12.826568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:12.826590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:12.826607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654269958661788:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:12.830267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654269958661789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:12.830300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654269958661789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:12.830340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654269958661789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:12.830369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654269958661789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:12.830386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654269958661789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:12.830409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654269958661789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:12.830426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654269958661789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:12.830451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654269958661789:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... d=Chunks; 2024-11-21T08:58:12.837791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:12.837795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:12.837812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:12.837816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:12.837823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:12.837828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:12.837838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:12.837842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:12.837848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:12.837852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:12.837886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:12.837891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:12.837908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:12.837912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:12.837923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:12.837928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:12.837944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:12.837948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:12.837959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:12.837962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:12.860643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:58:13.107658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654274253629524:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:13.107665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654274253629534:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:13.107695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:13.108251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:58:13.109806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654274253629538:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:58:13.248939Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179493163, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '475) '('"_id" '"25c2283a-59b376ac-1a5f445a-a05632f3") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '994) '('"_id" '"96e7db27-5f32a0f3-bcea037d-fedf722")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1097) '('"_id" '"e0d1c629-7b6921a8-c923a9f-7c2a145") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::OlapRead_ScanQuery [GOOD] |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_ScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 7010, MsgBus: 11472 2024-11-21T08:58:13.352780Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654277271873318:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:13.352795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004769/r3tmp/tmp8OsOow/pdisk_1.dat 2024-11-21T08:58:13.399490Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7010, node 1 2024-11-21T08:58:13.410827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:13.410839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:13.410841Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:13.410875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11472 TClient is connected to server localhost:11472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:13.454036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:13.454069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:13.455164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:13.483081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:13.495472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:13.504169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:13.504252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:13.504290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:13.504310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:13.504322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:13.504346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:13.504364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:13.504380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:13.504406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:13.504434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:13.504456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:13.504487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654277271873961:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:13.507450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:13.507486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:13.507526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:13.507549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:13.507565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:13.507583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:13.507609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:13.507626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:13.507643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:13.507666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:13.507683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:13.507704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654277271873962:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:13.510984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:13.511012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:13.511047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:13.511069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:13.511097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:13.511118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:13.511141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:13.511161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:13.511184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654277271873963 ... s; 2024-11-21T08:58:13.539895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:13.539897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:13.539905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:13.539910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:13.539916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:13.539918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:13.542047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:13.542068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:13.542089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:13.542100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:13.542110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:13.542123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:13.542139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:13.542153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:13.542169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:13.542182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:13.542195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:13.542208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654277271874215:2363];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:13.542728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:13.542740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:13.542749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:13.542753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:13.542775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:13.542784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:13.542789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:13.542792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:13.542797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:13.542800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:13.542804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:13.542807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:13.542836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:13.542844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:13.542853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:13.542855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:13.542862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:13.542868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:13.542878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:13.542887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:13.542898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:13.542905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:13.546975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2024-11-21T08:58:13.625626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654277271874461:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:13.625679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:13.632263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654277271874478:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:13.632286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654277271874483:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:13.632288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:13.632953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T08:58:13.634274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654277271874485:2414], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T08:58:13.783103Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179493688, txId: 18446744073709551615] shutting down >> test.py::test[pg-sublink_projection_array-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-ForceBlocks] |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpOlapStats::AddRowsTableStandalone >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> KqpOlapAggregations::BlockGenericWithDistinct >> KqpOlapTiering::Eviction [GOOD] >> KqpOlapStatistics::StatsUsageWithTTL >> test.py::test[pg-sublink_projection_array-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Plan] >> test.py::test[pg-sublink_projection_array-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Results] |91.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapTiering::Eviction [GOOD] Test command err: Trying to start YDB, gRPC: 6173, MsgBus: 2855 2024-11-21T08:57:47.619857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654163864505692:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:47.619956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047db/r3tmp/tmpS7rUPW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6173, node 1 2024-11-21T08:57:47.672558Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:47.676925Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:47.676937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:47.676938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:47.676971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2855 TClient is connected to server localhost:2855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:47.720756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:47.720782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:47.721900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:47.750820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:47.870839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:47.884145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.884172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.884183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.884225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.884232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.884249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.884253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.884269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.884272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.884280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.884289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.884294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.884305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.884307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.884322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.884327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.884369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:47.884369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:47.884403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.884406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.884427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:47.884427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:47.884467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654163864506382:2299];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:47.884467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654163864506381:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:47.886879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.886900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.886926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.886947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.886967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.886986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.887010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.887030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:2301];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.887051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654163864506416:23 ... BACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1172808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1172808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1172808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1172808;columns=5; 2024-11-21T08:57:51.872493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b0959d60-a7e611ef-86e08209-fb81eda0;fline=with_appended.cpp:80;portions=297,;task_id=b0959d60-a7e611ef-86e08209-fb81eda0; 2024-11-21T08:57:51.872980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b095b0de-a7e611ef-b46985f3-6592fc1e;fline=with_appended.cpp:80;portions=297,;task_id=b095b0de-a7e611ef-b46985f3-6592fc1e; 2024-11-21T08:57:51.873475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b095ad82-a7e611ef-814ed93c-4d8efd45;fline=with_appended.cpp:80;portions=297,;task_id=b095ad82-a7e611ef-814ed93c-4d8efd45; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1172808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1172808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1172808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1172808;columns=5; 2024-11-21T08:57:51.898135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b0998772-a7e611ef-9025c4d6-3fefaf3;fline=with_appended.cpp:80;portions=300,;task_id=b0998772-a7e611ef-9025c4d6-3fefaf3; 2024-11-21T08:57:51.898584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b0998768-a7e611ef-b8691018-1557205e;fline=with_appended.cpp:80;portions=300,;task_id=b0998768-a7e611ef-b8691018-1557205e; 2024-11-21T08:57:51.899640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b099988e-a7e611ef-9647c9b8-df6e3ce6;fline=with_appended.cpp:80;portions=300,;task_id=b099988e-a7e611ef-9647c9b8-df6e3ce6; WAIT_COMPACTION: 300 2024-11-21T08:57:52.620024Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654163864505692:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:52.620071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 waiting actualization: 0/0.000011s waiting actualization: 0/1.000091s waiting actualization: 0/2.000185s waiting actualization: 0/3.000273s waiting actualization: 0/4.000364s ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: 2024-11-21T08:58:02.667951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:58:02.667975Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:02.913566Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179481938, txId: 281474976715826] shutting down TierName: __DEFAULT RawBytes: 118826890 2024-11-21T08:58:02.918436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715831:0, at schemeshard: 72057594046644480 waiting actualization: 0/0.000011s waiting actualization: 0/1.000101s 2024-11-21T08:58:04.918554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b85b0b84-a7e611ef-a8385008-173852dd;fline=with_appended.cpp:80;portions=301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,;task_id=b85b0b84-a7e611ef-a8385008-173852dd; 2024-11-21T08:58:04.920609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b85ad06a-a7e611ef-92e61341-d9d0d48c;fline=with_appended.cpp:80;portions=301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,;task_id=b85ad06a-a7e611ef-92e61341-d9d0d48c; 2024-11-21T08:58:04.924239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b85b1f5c-a7e611ef-b5c3a41d-475b4913;fline=with_appended.cpp:80;portions=301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,;task_id=b85b1f5c-a7e611ef-b5c3a41d-475b4913; waiting actualization: 0/2.000185s 2024-11-21T08:58:05.919542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b8f38abc-a7e611ef-bfce0765-6f60fa19;fline=with_appended.cpp:80;portions=344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,;task_id=b8f38abc-a7e611ef-bfce0765-6f60fa19; 2024-11-21T08:58:05.920878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b8f3b082-a7e611ef-97f0bb5b-9dc5e354;fline=with_appended.cpp:80;portions=344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,;task_id=b8f3b082-a7e611ef-97f0bb5b-9dc5e354; waiting actualization: 0/3.000298s 2024-11-21T08:58:05.926390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b8f3c982-a7e611ef-b2884c7a-d92f2162;fline=with_appended.cpp:80;portions=344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,;task_id=b8f3c982-a7e611ef-b2884c7a-d92f2162; 2024-11-21T08:58:06.912708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b98c4586-a7e611ef-a8b7f6b0-d3d064c7;fline=with_appended.cpp:80;portions=387,388,389,390,391,392,393,394,395,396,397,398,399,400,;task_id=b98c4586-a7e611ef-a8b7f6b0-d3d064c7; 2024-11-21T08:58:06.913587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b98c5904-a7e611ef-a33eb639-e1957f87;fline=with_appended.cpp:80;portions=387,388,389,390,391,392,393,394,395,396,397,398,399,400,;task_id=b98c5904-a7e611ef-a33eb639-e1957f87; 2024-11-21T08:58:06.915266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b98c6c00-a7e611ef-a7512c91-b9006058;fline=with_appended.cpp:80;portions=387,388,389,390,391,392,393,394,395,396,397,398,399,400,;task_id=b98c6c00-a7e611ef-a7512c91-b9006058; waiting actualization: 0/4.000391s ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: 2024-11-21T08:58:08.913681Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179487965, txId: 281474976715849] shutting down TierName: tier1 RawBytes: 118826890 2024-11-21T08:58:08.928079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715853:0, at schemeshard: 72057594046644480 WAIT_COMPACTION: 300 2024-11-21T08:58:09.932833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=bb56e3da-a7e611ef-b56afdc1-939f203c;fline=with_appended.cpp:80;portions=401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,;task_id=bb56e3da-a7e611ef-b56afdc1-939f203c; 2024-11-21T08:58:09.934079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=bb56ce54-a7e611ef-9907a2a5-5741890b;fline=with_appended.cpp:80;portions=401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,;task_id=bb56ce54-a7e611ef-9907a2a5-5741890b; 2024-11-21T08:58:09.936331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=bb56ce4a-a7e611ef-98baf2cf-93ce904d;fline=with_appended.cpp:80;portions=401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,;task_id=bb56ce4a-a7e611ef-98baf2cf-93ce904d; WAIT_COMPACTION: 300 2024-11-21T08:58:10.931454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=bbefa2b4-a7e611ef-9a064d65-99e31d0d;fline=with_appended.cpp:80;portions=444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,;task_id=bbefa2b4-a7e611ef-9a064d65-99e31d0d; 2024-11-21T08:58:10.933219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=bbef86e4-a7e611ef-a0f8db6b-5548840a;fline=with_appended.cpp:80;portions=444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,;task_id=bbef86e4-a7e611ef-a0f8db6b-5548840a; 2024-11-21T08:58:10.938435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=bbef86da-a7e611ef-a18c07cd-65fe3a25;fline=with_appended.cpp:80;portions=444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,;task_id=bbef86da-a7e611ef-a18c07cd-65fe3a25; WAIT_COMPACTION: 300 2024-11-21T08:58:11.921288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=bc88449c-a7e611ef-9166bdfc-9b55eacb;fline=with_appended.cpp:80;portions=487,488,489,490,491,492,493,494,495,496,497,498,499,500,;task_id=bc88449c-a7e611ef-9166bdfc-9b55eacb; 2024-11-21T08:58:11.922094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=bc8845f0-a7e611ef-a02c5153-f75af75a;fline=with_appended.cpp:80;portions=487,488,489,490,491,492,493,494,495,496,497,498,499,500,;task_id=bc8845f0-a7e611ef-a02c5153-f75af75a; 2024-11-21T08:58:11.923207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=bc8854dc-a7e611ef-acb900ad-c3b38ba3;fline=with_appended.cpp:80;portions=487,488,489,490,491,492,493,494,495,496,497,498,499,500,;task_id=bc8854dc-a7e611ef-acb900ad-c3b38ba3; WAIT_COMPACTION: 300 WAIT_COMPACTION: 300 ==================================== QUERY: SELECT TierName, SUM(ColumnRawBytes) As RawBytes FROM `/Root/olapStore/olapTable/.sys/primary_index_portion_stats` WHERE Activity == 1 GROUP BY TierName RESULT: 2024-11-21T08:58:15.180609Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179493986, txId: 281474976715872] shutting down TierName: __DEFAULT RawBytes: 118826890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037891 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037890 >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling |91.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> KqpOlapStatistics::StatsUsageWithTTL [GOOD] >> KqpOlapAggregations::BlockGenericWithDistinct [GOOD] >> KqpOlapWrite::TierDraftsGCWithRestart [GOOD] >> KqpOlapWrite::TestRemoveTableBeforeIndexation [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Results] [GOOD] >> KqpOlapStats::AddRowsTableStandalone [GOOD] |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-Analyze] |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |91.5%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> BsControllerConfig::MergeBoxes [GOOD] >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 [GOOD] >> KqpOlapIndexes::CountMinSketchIndex [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-Analyze] [GOOD] |91.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> KqpOlapSysView::StatsSysViewAggregation [GOOD] >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-Debug] >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL [GOOD] >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_OrderT_Limit2 [GOOD] >> KqpOlapBlobsSharing::TableReshardingConsistency64 [FAIL] >> KqpOlapBlobsSharing::TableReshardingModuloN [FAIL] >> KqpOlapSysView::StatsSysViewEnumStringBytes [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] >> TFileStoreWithReboots::SimultaneousCreateDropNfs [GOOD] >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait [FAIL] >> KqpOlapBlobsSharing::ChangeSchemaAndSplit [FAIL] >> test.py::test[pg-sublink_where_any_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-ForceBlocks] >> test.py::test[pg-sublink_where_any_corr-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-Results] >> test.py::test[pg-sublink_where_any_corr-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Analyze] >> test.py::test[pg-tpcds-q05-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Debug] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[pg-tpcds-q05-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q05-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] >> test.py::test[pg-tpcds-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Analyze] >> test.py::test[pg-tpcds-q16-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Debug] >> test.py::test[pg-tpcds-q16-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::CountMinSketchIndex [GOOD] Test command err: Trying to start YDB, gRPC: 63372, MsgBus: 16843 2024-11-21T08:58:03.974667Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654233465150721:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.974684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00478a/r3tmp/tmpc5LkSb/pdisk_1.dat 2024-11-21T08:58:04.022715Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63372, node 1 2024-11-21T08:58:04.033569Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:04.033585Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:04.033587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:04.033624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16843 TClient is connected to server localhost:16843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:04.075684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:04.075709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:04.076830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:04.077718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:04.088643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:04.098596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.098661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.098715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.098742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.098757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.098774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:04.098794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:04.098819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:04.098842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:04.098858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.098873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:04.098888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:04.102248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.102273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.102307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.102334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.102355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.102378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:04.102399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:04.102420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:04.102445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:04.102466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.102488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:04.102508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654237760118651:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:04.102887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:04.102901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:04.102910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:04.102913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:04.102923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:04.102929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:04.102935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:04.102945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:04.102951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:04.102957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:04.102961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... 18446744073709551615;;current_snapshot_ts=1732179484385; 2024-11-21T08:58:16.474729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179196000;tx_id=18446744073709551615;;current_snapshot_ts=1732179484385; 2024-11-21T08:58:16.474734Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:16.474734Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:16.474744Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.474748Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.474756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.474757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.474760Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.474765Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.474784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.474793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.474808Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179496000 at tablet 72075186224037888 2024-11-21T08:58:16.474818Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179496000 at tablet 72075186224037889 2024-11-21T08:58:16.474819Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:16.474821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:16.474824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:16.474825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:16.474828Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T08:58:16.474829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T08:58:16.474833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179196000;tx_id=18446744073709551615;;current_snapshot_ts=1732179484385; 2024-11-21T08:58:16.474833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732179196000;tx_id=18446744073709551615;;current_snapshot_ts=1732179484385; 2024-11-21T08:58:16.474835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:16.474835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:16.474838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.474840Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.474840Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.474843Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.474843Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.474847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.474848Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654237760118650:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.474854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654237760118671:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.616489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654237760118650:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:16.616494Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654237760118651:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:16.619051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654237760118671:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:17.116739Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654237760118650:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:17.116743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654237760118651:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:17.119238Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654237760118671:2290];fline=actor.cpp:33;event=skip_flush_writing; TClient::Ls request: /Root/olapTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "olapTable" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732179484196 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 19 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 19 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 ColumnTableVersion: 6 ColumnTableSchemaVersion: 6 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } TableStats { DataSize: 8504856 RowCount: 230000 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ... (TRUNCATED) >>> cms_res_id of class name COUNT_MIN_SKETCH >>> cms_uid of class name COUNT_MIN_SKETCH >>> cms_level of class name COUNT_MIN_SKETCH >>> cms_message of class name COUNT_MIN_SKETCH >>> cms_ts of class name COUNT_MIN_SKETCH >>> shard actual id: 72075186224037888 >>> path id: 2 >>> shard actual id: 72075186224037889 >>> path id: 2 >>> shard actual id: 72075186224037890 >>> path id: 2 >>> sketch.GetElementCount() = 39870 >>> sketch.GetElementCount() = 80070 >>> sketch.GetElementCount() = 120000 2024-11-21T08:58:17.281408Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654237760118698:2293];tablet_id=72075186224037889;parent=[1:7439654237760118650:2288];fline=manager.h:99;event=ask_data;request=request_id=124;2={portions_count=2};; 2024-11-21T08:58:17.281548Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654237760118705:2296];tablet_id=72075186224037890;parent=[1:7439654237760118651:2289];fline=manager.h:99;event=ask_data;request=request_id=126;2={portions_count=2};; 2024-11-21T08:58:17.281629Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654237760118712:2299];tablet_id=72075186224037888;parent=[1:7439654237760118671:2290];fline=manager.h:99;event=ask_data;request=request_id=128;2={portions_count=2};; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::TableReshardingConsistency64 [FAIL] Test command err: Trying to start YDB, gRPC: 23204, MsgBus: 19517 2024-11-21T08:57:58.622722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654209625634278:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:58.622738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a5/r3tmp/tmprORAv8/pdisk_1.dat 2024-11-21T08:57:58.667730Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23204, node 1 2024-11-21T08:57:58.681772Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:58.681784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:58.681786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:58.681822Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19517 TClient is connected to server localhost:19517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:58.723027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:58.723049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:58.724269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:58.751159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:58.757001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:58.779028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:58.779092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:58.779136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:58.779161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:58.779182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:58.779195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:58.779208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:58.779222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:58.779239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:58.779259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:58.779274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:58.779287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654209625635062:2293];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:58.779689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:58.779702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:58.779713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:58.779721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:58.779737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:58.779745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:58.779753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:58.779761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:58.779769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:58.779776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:58.779782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:58.779789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:58.779838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:58.779849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:58.779863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:58.779867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:58.779876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:58.779885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:58.779899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:58.779907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:58.779916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:58.779924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:58.782299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654209625635063:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:58.782320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654209625635063:2294];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:58.782350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654209625635063:2294];tablet_id=7207518622 ... t proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:00.973574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T08:58:00.977671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 2024-11-21T08:58:00.984117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) 2024-11-21T08:58:02.888197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 2024-11-21T08:58:02.894913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T08:58:02.902225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) 2024-11-21T08:58:03.623287Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654209625634278:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.623316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; RESHARDING_WAIT_FINISHED... (SPLIT:0) 2024-11-21T08:58:04.889550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T08:58:04.897337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:04.903926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) RESHARDING_WAIT_FINISHED... (SPLIT:0) 2024-11-21T08:58:06.891300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2024-11-21T08:58:06.899593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2024-11-21T08:58:06.905878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720668:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) RESHARDING_WAIT_FINISHED... (SPLIT:0) 2024-11-21T08:58:08.894060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720669:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:0) RESHARDING_FINISHED 2024-11-21T08:58:09.016020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:58:09.017113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725657:0, at schemeshard: 72057594046644480 2024-11-21T08:58:09.078616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725658:0, at schemeshard: 72057594046644480 2024-11-21T08:58:09.099735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725659:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:10.896298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725660:0, at schemeshard: 72057594046644480 2024-11-21T08:58:10.903657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725661:0, at schemeshard: 72057594046644480 2024-11-21T08:58:10.910561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725662:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:12.893496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725663:0, at schemeshard: 72057594046644480 2024-11-21T08:58:12.899050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:12.912531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725665:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:13.667716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:58:13.667742Z node 1 :IMPORT WARN: Table profiles were not loaded RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:14.895252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725666:0, at schemeshard: 72057594046644480 2024-11-21T08:58:14.900560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725667:0, at schemeshard: 72057594046644480 2024-11-21T08:58:14.909359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725668:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:16.897338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725669:0, at schemeshard: 72057594046644480 2024-11-21T08:58:16.902926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725670:0, at schemeshard: 72057594046644480 2024-11-21T08:58:16.909378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725671:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:18.900147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725672:0, at schemeshard: 72057594046644480 2024-11-21T08:58:18.912108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725673:0, at schemeshard: 72057594046644480 2024-11-21T08:58:18.918062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725674:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:20.898144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725675:0, at schemeshard: 72057594046644480 2024-11-21T08:58:20.906680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725676:0, at schemeshard: 72057594046644480 2024-11-21T08:58:20.912997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725677:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:22.897943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725678:0, at schemeshard: 72057594046644480 2024-11-21T08:58:22.901222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725679:0, at schemeshard: 72057594046644480 2024-11-21T08:58:22.907666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725680:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:24.900004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725681:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_FINISHED 2024-11-21T08:58:25.195499Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179505000, txId: 18446744073709551615] shutting down [[57685u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[57685u]]|[[230000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x241C11B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x124E6444 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:351: Execute @ 0x124D56CA 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:401: Execute_ @ 0x124D39B7 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7EFE2175CD8F 11. ??:0: ?? @ 0x7EFE2175CE3F 12. ??:0: ?? @ 0x11815028 |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 24835, MsgBus: 63881 2024-11-21T08:58:03.919803Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654231078411344:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.919820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00478b/r3tmp/tmp5UoRx2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24835, node 1 2024-11-21T08:58:03.975367Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:03.980351Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:03.980371Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:03.980374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:03.980411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63881 TClient is connected to server localhost:63881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:04.021309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:04.021339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:04.022418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:04.050964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:04.054891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:04.065647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.065716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.065774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.065800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.065822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.065845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:04.065872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:04.065904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:04.065930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:04.065953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.065979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:04.066009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654235373379302:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:04.069214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.069241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.069277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.069303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.069320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.069340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:04.069364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:04.069392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:04.069411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:04.069429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.069453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:04.069472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654235373379308:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:04.069942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:04.069957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:04.069970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:04.069979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:04.069999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:04.070008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:04.070018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:04.070026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:04.070040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:04.070049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:04.070055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:15.312256Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:15.477085Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:15.477119Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:15.572193Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:15.572242Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:15.658811Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:15.658841Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:15.743737Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:15.743770Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:15.828701Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:15.855964Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:15.930668Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:15.930702Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.023884Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.023917Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.118642Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.118674Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.200745Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.200775Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.283916Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.283940Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.317181Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:16.459030Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.459062Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.562891Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.562915Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.645519Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.645554Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.728234Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.728261Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.810860Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:16.810893Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z2tk50czb0jq78c2sr7tt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2ZlNTVlYTUtOWQ1YTBkMWItM2QwYmM3NjMtOTliNjE2ZDg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:16.875741Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:16.896952Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:56:52.000614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:52.000631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:52.000635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:52.000638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:52.000642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:52.000646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:52.000654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:52.000721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:52.007822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:52.007838Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:56:52.009542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:52.009627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:52.009664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:56:52.011563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:52.011636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:52.011713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:52.011908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:52.012513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:52.012757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:52.012764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:52.012773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:52.012778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:52.012783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:52.012812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:56:52.014301Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:56:52.031020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:56:52.031100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:52.031163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:56:52.031210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:56:52.031218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:52.032144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:52.032168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:56:52.032247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:52.032259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:56:52.032264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:52.032269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:52.032690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:52.032701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:56:52.032705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:52.033013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:52.033020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:52.033026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:52.033033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:52.033672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:52.034036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:52.034083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:56:52.034287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:56:52.034310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:56:52.034316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:52.034368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:52.034374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:56:52.034403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:56:52.034414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:56:52.034783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:56:52.034791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:56:52.034834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:56:52.034842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:56:52.034924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:56:52.034930Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:52.034941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:52.034964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:52.034970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:52.034975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:52.034980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:52.034984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:52.034994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:56:52.035000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:56:52.035005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... aCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:27.538710Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:58:27.538779Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 88us result status StatusSuccess 2024-11-21T08:58:27.538912Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:27.549213Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:817:2653] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T08:58:27.549246Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:756:2653] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T08:58:27.549275Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:817:2653] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732179507536456 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732179507536456 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732179507536456 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:58:27.550145Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:817:2653] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T08:58:27.550166Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:756:2653] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStatistics::StatsUsageWithTTL [GOOD] Test command err: Trying to start YDB, gRPC: 3481, MsgBus: 22389 2024-11-21T08:58:15.663317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654282663919355:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:15.663490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004763/r3tmp/tmp9Ndy7A/pdisk_1.dat 2024-11-21T08:58:15.716188Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3481, node 1 2024-11-21T08:58:15.728688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:15.728699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:15.728701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:15.728735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22389 TClient is connected to server localhost:22389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:15.784872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:15.790215Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T08:58:15.794695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:15.794720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:15.796483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:58:15.796732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:15.807714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:15.807774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:15.807821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:15.807846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:15.807865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:15.807879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:15.807894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:15.807908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:15.807932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:15.807948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.807964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:15.807980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654282663920009:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:15.808447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:15.808458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:15.808469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:15.808474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:15.808488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:15.808492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:15.808502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:15.808508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:15.808517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:15.808521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:15.808526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:15.808529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:15.808585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:15.808590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:15.808607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:15.808611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.808625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:15.808629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:15.808647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:15.808668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:15.808680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:15.808683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:15.811131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654282663920008:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:15.811142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654282663920008:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS ... ine=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:15.820999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:15.821011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:15.821016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:15.821034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:15.821040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:15.821051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:15.821057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:15.821069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:15.821075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:15.821083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:15.821087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:15.821135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:15.821141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:15.821157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:15.821164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.821173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:15.821177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:15.821189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:15.821192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:15.821201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:15.821203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:15.821311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:15.821315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:15.821321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:15.821324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:15.821336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:15.821338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:15.821344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:15.821347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:15.821353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:15.821356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:15.821360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:15.821362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:15.821385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:15.821389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:15.821401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:15.821407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.821416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:15.821424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:15.821435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:15.821437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:15.821445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:15.821447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:58:15.863845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:58:15.984661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654282663920301:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:15.984690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:16.013445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T08:58:16.028765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654286958887651:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:16.028794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:16.033252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T08:58:16.040771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654286958887704:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:16.040794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TestRemoveTableBeforeIndexation [GOOD] Test command err: Trying to start YDB, gRPC: 11085, MsgBus: 28109 2024-11-21T08:58:06.137008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654243519247456:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:06.137258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004786/r3tmp/tmpwY6GOL/pdisk_1.dat 2024-11-21T08:58:06.189549Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11085, node 1 2024-11-21T08:58:06.195554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:06.195568Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:06.195569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:06.195601Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28109 TClient is connected to server localhost:28109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:06.238559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:06.238584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:06.239622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:06.266576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:06.270791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:06.281441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.281493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.281552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.281568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.281579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.281594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.281615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.281629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.281643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:06.281657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.281667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:06.281678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654243519248108:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:06.283958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.283979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.284008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.284020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.284034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.284048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.284071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.284086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.284102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:06.284121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.284135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:06.284150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:06.287255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.287278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.287316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.287335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.287356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.287376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.287396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.287417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.287439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74396542435192481 ... olumnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302359Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302368Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.302368Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.302388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.302389Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.302412Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:16.302417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2024-11-21T08:58:16.302419Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:58:16.302423Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:58:16.302430Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:16.302434Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:16.302438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:16.302440Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302448Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.302456Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.302475Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179496000 at tablet 72075186224037890 2024-11-21T08:58:16.302477Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:16.302481Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:16.302484Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:16.302488Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179496000 at tablet 72075186224037889 2024-11-21T08:58:16.302491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:16.302494Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:16.302495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:16.302496Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302498Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302499Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:16.302500Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.302503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:16.302503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654243519248142:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.302506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302510Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.302511Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179496000 at tablet 72075186224037888 2024-11-21T08:58:16.302513Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:16.302516Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:16.302516Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439654243519248110:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:16.302518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:16.302521Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:16.302523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302525Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:16.302528Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:16.302532Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654243519248109:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL [GOOD] Test command err: Trying to start YDB, gRPC: 25311, MsgBus: 65003 2024-11-21T08:58:03.164228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654231251158621:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.164378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004790/r3tmp/tmpoS6OBm/pdisk_1.dat 2024-11-21T08:58:03.208957Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25311, node 1 2024-11-21T08:58:03.221991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:03.222005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:03.222006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:03.222040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65003 TClient is connected to server localhost:65003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:03.265021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:03.265038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:03.266191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:03.291561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.296565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:03.307961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.308025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.308062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.308086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.308105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.308119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.308133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.308154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:03.308168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:03.308182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.308195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:03.308314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654231251159285:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:03.308805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:03.308813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:03.308823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:03.308827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:03.308840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:03.308843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:03.308852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:03.308856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:03.308864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:03.308867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:03.308874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:03.308883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:03.308933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:03.308938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:03.308952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:03.308959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.308969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:03.308975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:03.308990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:03.308993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:03.309002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:03.309004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:03.311735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654231251159284:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.311746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654231251159284:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.311785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654231251159284:2288];tablet_id=7207518622 ... 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.003462Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.137371Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.137393Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.219313Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.219335Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.301146Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.301182Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.383055Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.383079Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.465023Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1215:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:24.475404Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1215:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:24.526578Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.526600Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.608318Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.608345Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.690298Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.690319Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.772389Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.772414Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.854454Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:24.854479Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:24.896433Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1215:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:25.021603Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:25.021630Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:25.124157Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:25.124180Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:25.206097Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:25.206122Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:25.288028Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:25.288050Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:25.369558Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T08:58:25.369585Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1837:3094], TxId: 281474976715662, task: 161. Ctx: { CustomerSuppliedId : . TraceId : 01jd6z2wpr098p2fmqpmmpf0z2. SessionId : ydb://session/3?node_id=2&id=YmMzYTM2N2QtNDY3ZDQ5NDgtNjQ0OTE3N2EtNzU2YmUzYmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T08:58:25.431092Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1215:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:25.451970Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1215:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_1 Test command err: Trying to start YDB, gRPC: 7143, MsgBus: 26952 2024-11-21T08:58:11.845884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654267266058990:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:11.845905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004779/r3tmp/tmpSzBaNz/pdisk_1.dat 2024-11-21T08:58:11.893425Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7143, node 1 2024-11-21T08:58:11.907794Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:11.907807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:11.907809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:11.907845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26952 TClient is connected to server localhost:26952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:11.946652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:11.946690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:11.947770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:11.974675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:11.983769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:11.989979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:58:11.992365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:58:11.992416Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2024-11-21T08:58:11.993148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:11.993191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:11.993250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:11.993277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:11.993294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:11.993313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:11.993345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:11.993384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:11.993414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:11.993442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:11.993479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:11.993501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654267266059639:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:11.994044Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037889 2024-11-21T08:58:11.994073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:58:11.994077Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:58:11.994096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:58:11.994121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:11.994135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:11.994138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:58:11.994145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:58:11.994157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:11.994163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:11.994167Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:58:11.994181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:58:11.994187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:11.994194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:11.994196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:58:11.994204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:58:11.994213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:11.994223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:11.994230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:58:11.994241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:11.994250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:11.994253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:58:11.994260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:11.994270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=Res ... ip_reason=no_changes; 2024-11-21T08:58:17.012006Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:17.012013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2024-11-21T08:58:17.012022Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:58:17.012029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:58:17.012037Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:17.012044Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:17.012051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:17.012054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:17.012056Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:17.012058Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:17.012067Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:17.012078Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179497000 at tablet 72075186224037888 2024-11-21T08:58:17.012081Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:17.012084Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:17.012087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:17.012090Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:17.012092Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:17.012094Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:17.012096Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:17.012100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439654267266059640:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:17.013786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T08:58:17.014812Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T08:58:17.014836Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:58:17.014842Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:58:17.014856Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:17.014879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:17.014893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:17.014907Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:17.014915Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:17.014924Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:17.014945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:17.014972Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179497000 at tablet 72075186224037890 2024-11-21T08:58:17.014980Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:17.014983Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T08:58:17.014986Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:17.014989Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:17.014992Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:17.014993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:17.014996Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:17.015002Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654267266059673:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:17.187876Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654267266059673:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:17.187895Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654267266059640:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:17.187900Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654267266059639:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:17.187904Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439654267266059641:2290];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x124D1530 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:208: Execute_ @ 0x124CEE7D 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7FFA9D9C4D8F 11. ??:0: ?? @ 0x7FFA9D9C4E3F 12. ??:0: ?? @ 0x11815028 >> test.py::test[pg-tpcds-q16-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Results] >> test.py::test[pg-tpcds-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Analyze] >> test.py::test[pg-tpcds-q30-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Debug] >> test.py::test[pg-tpcds-q30-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::SimultaneousCreateDropNfs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:09.806169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:09.806209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:09.806215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:09.806219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:09.815934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:09.815968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:09.815998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:09.816099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:09.856890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:09.856930Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:09.864450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:09.864594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:09.864633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:09.876779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:09.876899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:09.882302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:09.892556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:09.893633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:09.916352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:09.916387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:09.916403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:09.916418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:09.916425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:09.916477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:09.918782Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:09.952106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:09.970450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.970581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:09.970638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:09.970650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.971803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:09.971831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:09.971902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.971913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:09.971917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:09.971922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:09.973884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.973909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:09.973917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:09.974581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.974596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.974603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:09.974610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:09.984640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:09.985547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:09.994203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:09.994560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:09.994611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:10.014206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:10.014339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:10.014350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:10.014409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:10.014431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:10.015425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:10.015440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:10.015494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:10.015499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:10.015591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:10.015600Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:10.015614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:10.015618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:10.015625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:10.015631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:10.015636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:10.015640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:10.015659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:10.015665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:10.015669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... pdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:58:30.745202Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:58:30.745204Z node 84 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:58:30.745206Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:58:30.745209Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:58:30.745215Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T08:58:30.745386Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:58:30.745392Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:58:30.745395Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:58:30.745473Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:58:30.745634Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:58:30.745675Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:30.745714Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:58:30.745767Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:58:30.745945Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:58:30.745964Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2024-11-21T08:58:30.746075Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:58:30.746112Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:30.746118Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:58:30.746124Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409548 2024-11-21T08:58:30.746181Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:58:30.746195Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409547 2024-11-21T08:58:30.746301Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:30.746305Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:58:30.746322Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:58:30.746347Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:58:30.746360Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:58:30.746838Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:58:30.746852Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:58:30.746864Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:58:30.746866Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:58:30.746874Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T08:58:30.746882Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:58:30.746885Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:58:30.746903Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:30.746907Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:58:30.746917Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:58:30.746955Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:58:30.747219Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1003 2024-11-21T08:58:30.747257Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:58:30.747262Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T08:58:30.747274Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:58:30.747279Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:58:30.747325Z node 84 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:58:30.747338Z node 84 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:58:30.747343Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:58:30.747346Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [84:527:2484] 2024-11-21T08:58:30.747355Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:58:30.747357Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [84:527:2484] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T08:58:30.747394Z node 84 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:58:30.747402Z node 84 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:58:30.747407Z node 84 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T08:58:30.747450Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:58:30.747470Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 28us result status StatusPathDoesNotExist 2024-11-21T08:58:30.747496Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:58:30.747523Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:58:30.747533Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 11us result status StatusSuccess 2024-11-21T08:58:30.747570Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:09.806171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:09.806205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:09.806214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:09.806218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:09.815917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:09.815955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:09.815990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:09.816098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:09.852626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:09.852665Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:09.864238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:09.864348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:09.864403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:09.876628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:09.876737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:09.882302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:09.891955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:09.893332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:09.916352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:09.916386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:09.916403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:09.916417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:09.916425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:09.916478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:09.918378Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:09.952116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:09.970466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.970582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:09.970634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:09.970655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.971778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:09.971815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:09.971887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.971898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:09.971902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:09.971905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:09.974810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.974829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:09.974835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:09.975407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.975416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:09.975419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:09.975426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:09.984653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:09.986125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:09.994204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:09.994560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:09.994626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:10.014210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:10.014349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:10.014362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:10.014409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:10.014431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:10.015501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:10.015518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:10.015561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:10.015566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:10.015652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:10.015660Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:10.015672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:10.015676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:10.015682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:10.015687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:10.015694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:10.015698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:10.015711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:10.015717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:10.015732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157132Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:22.157135Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T08:58:22.157141Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T08:58:22.157144Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:58:22.157148Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T08:58:22.157187Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:58:22.157193Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T08:58:22.157202Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:58:22.157206Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:58:22.157210Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:58:22.157214Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:58:22.157217Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:58:22.157220Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:58:22.157239Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:58:22.157243Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T08:58:22.157247Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T08:58:22.157250Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:58:22.157253Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T08:58:22.157256Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T08:58:22.157314Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157322Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157325Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:22.157329Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:58:22.157332Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:58:22.157449Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157457Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157462Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:22.157466Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T08:58:22.157469Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:58:22.157667Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157677Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157680Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:22.157684Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:58:22.157688Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:58:22.157742Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157749Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.157752Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:22.157755Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:58:22.157758Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:58:22.157765Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T08:58:22.157769Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [49:301:2293] 2024-11-21T08:58:22.157928Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 Leader for TabletID 72057594037968897 is [49:213:2213] sender: [49:339:2058] recipient: [49:15:2062] 2024-11-21T08:58:22.158198Z node 49 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T08:58:22.158247Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:22.158289Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:58:22.158361Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:22.158368Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:58:22.158376Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:58:22.158380Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:58:22.158384Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:58:22.158388Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:58:22.158392Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:22.158441Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.158498Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.158734Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.158748Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:22.158757Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:58:22.158762Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [49:302:2294] 2024-11-21T08:58:22.159049Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:58:22.159078Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T08:58:22.159145Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:58:22.159166Z node 49 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 28us result status StatusPathDoesNotExist 2024-11-21T08:58:22.159193Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:10914:2156] recipient: [1:10713:2165] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:10914:2156] recipient: [1:10713:2165] Leader for TabletID 72057594037932033 is [1:10916:2167] sender: [1:10917:2156] recipient: [1:10713:2165] 2024-11-21T08:57:17.535480Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T08:57:17.536312Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T08:57:17.536716Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T08:57:17.536808Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T08:57:17.536978Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.536984Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T08:57:17.537042Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T08:57:17.538036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T08:57:17.538067Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T08:57:17.538107Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T08:57:17.538127Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.538139Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T08:57:17.538148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:10916:2167] sender: [1:10942:2156] recipient: [1:110:2157] 2024-11-21T08:57:17.549220Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T08:57:17.549277Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.559690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T08:57:17.559747Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.559764Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T08:57:17.559776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.559805Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T08:57:17.559815Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.559821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T08:57:17.559830Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.570381Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T08:57:17.570451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T08:57:17.570725Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T08:57:17.570733Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T08:57:17.570759Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T08:57:17.573211Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12097 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12098 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12099 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12100 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 800 PDiskFilter { Property { Type: ROT } } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 800 PDiskFilter { Property { Type: SSD } } } } Command { QueryBaseConfig { ... OTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2024-11-21T08:58:09.152260Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 293:1001 Path# /dev/disk1 2024-11-21T08:58:09.152263Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 295:1001 Path# /dev/disk1 2024-11-21T08:58:09.152266Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1000 Path# /dev/disk2 2024-11-21T08:58:09.152269Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 293:1002 Path# /dev/disk2 2024-11-21T08:58:09.152274Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1001 Path# /dev/disk1 2024-11-21T08:58:09.152278Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1000 Path# /dev/disk3 2024-11-21T08:58:09.152281Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1000 Path# /dev/disk3 2024-11-21T08:58:09.152284Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2024-11-21T08:58:09.152287Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2024-11-21T08:58:09.152290Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1002 Path# /dev/disk1 2024-11-21T08:58:09.152293Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1000 Path# /dev/disk3 2024-11-21T08:58:09.152296Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2024-11-21T08:58:09.152299Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1001 Path# /dev/disk1 2024-11-21T08:58:09.152302Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1000 Path# /dev/disk3 2024-11-21T08:58:09.152305Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1002 Path# /dev/disk1 2024-11-21T08:58:09.152308Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2024-11-21T08:58:09.152312Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1002 Path# /dev/disk2 2024-11-21T08:58:09.152314Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1002 Path# /dev/disk1 2024-11-21T08:58:09.152317Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1000 Path# /dev/disk3 2024-11-21T08:58:09.152321Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2024-11-21T08:58:09.152324Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2024-11-21T08:58:09.152327Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1002 Path# /dev/disk1 2024-11-21T08:58:09.152330Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1000 Path# /dev/disk3 2024-11-21T08:58:09.152334Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2024-11-21T08:58:09.152337Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1001 Path# /dev/disk1 2024-11-21T08:58:09.152340Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1000 Path# /dev/disk3 2024-11-21T08:58:09.152343Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2024-11-21T08:58:09.152347Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2024-11-21T08:58:09.152350Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1002 Path# /dev/disk1 2024-11-21T08:58:09.152353Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1000 Path# /dev/disk3 2024-11-21T08:58:09.152356Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2024-11-21T08:58:09.152359Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2024-11-21T08:58:09.152362Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1002 Path# /dev/disk1 2024-11-21T08:58:09.152365Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1000 Path# /dev/disk3 2024-11-21T08:58:09.152368Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2024-11-21T08:58:09.152372Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2024-11-21T08:58:09.152375Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1002 Path# /dev/disk1 2024-11-21T08:58:09.152379Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1000 Path# /dev/disk3 2024-11-21T08:58:09.152382Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1000 Path# /dev/disk3 2024-11-21T08:58:09.152387Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2024-11-21T08:58:09.152392Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1002 Path# /dev/disk1 2024-11-21T08:58:09.152397Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1000 Path# /dev/disk3 2024-11-21T08:58:09.152401Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2024-11-21T08:58:09.152406Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2024-11-21T08:58:09.152411Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1002 Path# /dev/disk1 2024-11-21T08:58:09.152413Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1001 Path# /dev/disk2 2024-11-21T08:58:09.152417Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1000 Path# /dev/disk3 2024-11-21T08:58:09.152420Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2024-11-21T08:58:09.152423Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2024-11-21T08:58:09.152426Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1002 Path# /dev/disk1 2024-11-21T08:58:09.152429Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1000 Path# /dev/disk3 2024-11-21T08:58:09.152432Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2024-11-21T08:58:09.152435Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1002 Path# /dev/disk1 2024-11-21T08:58:09.152438Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1000 Path# /dev/disk3 2024-11-21T08:58:09.152442Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2024-11-21T08:58:09.152445Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1000 Path# /dev/disk1 2024-11-21T08:58:09.152448Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1000 Path# /dev/disk3 2024-11-21T08:58:09.152451Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2024-11-21T08:58:09.152454Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1000 Path# /dev/disk3 2024-11-21T08:58:09.152457Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1000 Path# /dev/disk1 2024-11-21T08:58:09.152460Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1000 Path# /dev/disk3 2024-11-21T08:58:09.152464Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1001 Path# /dev/disk2 2024-11-21T08:58:09.152467Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1000 Path# /dev/disk1 2024-11-21T08:58:09.152470Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1001 Path# /dev/disk3 2024-11-21T08:58:09.152473Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1002 Path# /dev/disk1 2024-11-21T08:58:09.152476Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1001 Path# /dev/disk3 2024-11-21T08:58:09.152479Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1001 Path# /dev/disk3 2024-11-21T08:58:09.152482Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1002 Path# /dev/disk1 2024-11-21T08:58:09.152486Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1002 Path# /dev/disk2 2024-11-21T08:58:09.152489Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2024-11-21T08:58:09.152493Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1002 Path# /dev/disk1 2024-11-21T08:58:09.152496Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1002 Path# /dev/disk2 2024-11-21T08:58:09.152499Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1001 Path# /dev/disk1 2024-11-21T08:58:09.152503Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1002 Path# /dev/disk2 2024-11-21T08:58:09.152506Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1002 Path# /dev/disk1 2024-11-21T08:58:09.152510Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1002 Path# /dev/disk2 2024-11-21T08:58:09.152513Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1002 Path# /dev/disk2 2024-11-21T08:58:09.201465Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2024-11-21T08:58:09.217419Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:57:52.100266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:52.100286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:52.100289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:52.100292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:52.100296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:52.100298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:52.100305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:52.100377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:52.108988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:52.109008Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:57:52.111307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:52.111420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:52.111459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:52.113831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:52.113901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:52.113973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:52.114142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:52.114633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:52.114834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:52.114840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:52.114849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:52.114854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:52.114858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:52.114886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:57:52.115883Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:57:52.126781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:52.126841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:52.126904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:52.126954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:52.126962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:52.127558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:52.127576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:52.127623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:52.127630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:52.127633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:52.127637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:52.127899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:52.127905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:52.127908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:52.128135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:52.128141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:52.128145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:52.128151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:52.128714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:52.129105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:52.129156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:52.129351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:52.129379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:52.129385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:52.129442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:52.129449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:52.129478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:52.129491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:52.129902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:52.129911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:52.129957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:52.129962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:52.130044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:52.130050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:52.130061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:52.130065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:52.130070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:52.130075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:52.130079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:52.130083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:52.130095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:52.130101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:52.130105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... emeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.498871Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:26.498959Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T08:58:26.498966Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:26.499012Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:58:26.499027Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2024-11-21T08:58:26.499029Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2024-11-21T08:58:26.499032Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2024-11-21T08:58:26.499097Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.499103Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.499105Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:26.499260Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.499267Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.499270Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:26.499272Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T08:58:26.499274Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T08:58:26.499282Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T08:58:26.499401Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:58:26.499406Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:26.499434Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:58:26.499445Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2024-11-21T08:58:26.499447Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:58:26.499450Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T08:58:26.499452Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T08:58:26.499455Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:58:26.499460Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:58:26.499471Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:58:26.499474Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T08:58:26.499476Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T08:58:26.499478Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:58:26.499480Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T08:58:26.499482Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T08:58:26.499485Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T08:58:26.499549Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.499802Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.499812Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.499815Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.499822Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.500313Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:26.500593Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 489626274069 } TabletId: 72075186233409546 State: 4 2024-11-21T08:58:26.500603Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:58:26.500820Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:58:26.500871Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:58:26.500914Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:58:26.500947Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T08:58:26.501013Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:26.501016Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:58:26.501022Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:58:26.501025Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:58:26.501028Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:58:26.501592Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:58:26.501602Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2024-11-21T08:58:26.501623Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:58:26.501654Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:58:26.501658Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:58:26.501734Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:58:26.501752Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:58:26.501755Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:615:2544] 2024-11-21T08:58:26.502354Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 489626274070 } TabletId: 72075186233409547 State: 4 2024-11-21T08:58:26.502365Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:58:26.502563Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:58:26.502607Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2024-11-21T08:58:26.502648Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:26.502675Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2024-11-21T08:58:26.502931Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:26.502937Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:58:26.502944Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:26.503331Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:58:26.503340Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2024-11-21T08:58:26.503393Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:58:26.503427Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:58:26.503435Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericWithDistinct [GOOD] Test command err: Trying to start YDB, gRPC: 7807, MsgBus: 6668 2024-11-21T08:58:15.516767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654282606775332:2068];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004764/r3tmp/tmp7QA9EP/pdisk_1.dat 2024-11-21T08:58:15.582618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:15.599102Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7807, node 1 2024-11-21T08:58:15.616368Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:15.616382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:15.616384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:15.616425Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6668 TClient is connected to server localhost:6668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:15.674176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:15.674202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:15.674982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:58:15.675223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T08:58:15.677424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:58:15.682593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:15.689976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:15.690030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:15.690055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:15.690070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:15.690092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:15.690102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:15.690120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:15.690132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:15.690150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:15.690167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.690181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:15.690195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654282606775949:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:15.690551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:15.690566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:15.690574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:15.690577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:15.690586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:15.690593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:15.690599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:15.690606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:15.690615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:15.690617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:15.690622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:15.690628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:15.690672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:15.690690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:15.690715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:15.690724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.690735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:15.690743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:15.690759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:15.690767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:15.690779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:15.690787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:15.693192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654282606775948:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:15.693215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654282606775948:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NA ... 24-11-21T08:58:15.706882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:15.706891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:15.706895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:15.706904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:15.706911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:15.706917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:15.706926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:15.706955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:15.706964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:15.706980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:15.706988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.707000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:15.707009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:15.707024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:15.707032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:15.707043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:15.707051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls` WHERE level = 5 AND Cast(id AS String) = "5"; 2024-11-21T08:58:15.834664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654282606776250:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:15.834692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654282606776242:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:15.834760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:15.835500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T08:58:15.837106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654282606776256:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls` WHERE level = 5 AND Cast(id AS String) = "5"; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.id == \"5\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.id == \"5\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (Bool 'false)) (let $2 (lambda '($20) $20)) (let $3 '('('"_logical_id" '1067) '('"_id" '"e9c80dc9-d1246b24-7ff2addc-ece41a35") '('"_wide_channels" (StructType '('"id" (DataType 'Int32)))))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $14 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $15 (KqpBlockReadOlapTableRanges $14 (Void) '('"id" '"level") '() '() (lambda '($16) (block '( (let $17 '('eq '"level" (Int32 '"5"))) (let $18 '('?? $17 $1)) (return (TKqpOlapExtractMembers (KqpOlapFilter $16 $18) '('"id"))) ))))) (return (FromFlow (WideCombiner (WideFilter (WideFromBlocks $15) (lambda '($19) (== (SafeCast $19 (DataType 'String)) (String '"5")))) '-1073741824 $2 (lambda '($21 $22) $21) (lambda '($23 $24 $25) $25) (lambda '($26 $27) $27)))) ))) $3)) (let $5 (DqCnHashShuffle (TDqOutput $4 '0) '('0))) (let $6 (Uint64 '1)) (let $7 (DataType 'Uint64)) (let $8 '('('"_logical_id" '1772) '('"_id" '"2dc53026-3bb0cd1f-ecccc06a-d0df3190") '('"_wide_channels" (StructType '('_yql_agg_0 (OptionalType $7)))))) (let $9 (DqPhyStage '($5) (lambda '($28) (block '( (let $29 (lambda '($32 $33))) (let $30 (WideCombiner (ToFlow $28) '"" $2 $29 $29 $2)) (let $31 (Condense1 (NarrowMap $30 (lambda '($34) (AsStruct '('"id" $34)))) (lambda '($35) $6) (lambda '($36 $37) $1) (lambda '($38 $39) (Inc $39)))) (return (FromFlow (ExpandMap $31 (lambda '($40) (Just $40))))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '0))) (let $11 (DqPhyStage '($10) (lambda '($41) (block '( (let $42 (WideCondense1 (ToFlow $41) (lambda '($44) $44) (lambda '($45 $46) $1) (lambda '($47 $48) (IfPresent $47 (lambda '($49) (IfPresent $48 (lambda '($50) (Just (AggrAdd $49 $50))) $47)) $48)))) (let $43 (Condense (NarrowMap (Take $42 $6) (lambda '($51) (AsStruct '('Count0 (Unwrap $51))))) (Nothing (OptionalType (StructType '('Count0 $7)))) (lambda '($52 $53) $1) (lambda '($54 $55) (Just $54)))) (return (FromFlow (Map $43 (lambda '($56) (AsStruct '('"column0" (Coalesce (Member $56 'Count0) (Uint64 '0)))))))) ))) '('('"_logical_id" '2507) '('"_id" '"7f6ee5a0-2b0cfe76-6354380b-a3bf3c49")))) (let $12 '($4 $9 $11)) (let $13 (DqCnResult (TDqOutput $11 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $12 '($13) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $7))) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling [GOOD] Test command err: Trying to start YDB, gRPC: 28424, MsgBus: 23116 2024-11-21T08:58:16.245489Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654289388854038:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:16.245753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004760/r3tmp/tmpwqRTEI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28424, node 1 2024-11-21T08:58:16.305428Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:16.306477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:16.306494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:16.306496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:16.306538Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23116 TClient is connected to server localhost:23116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:16.346772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:16.346797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:16.347920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:16.374908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:16.377012Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:58:16.381541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:16.391779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:16.391846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:16.391895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:16.391922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:16.391946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:16.391965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:16.391982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:16.392005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:16.392030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:16.392058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:16.392083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:16.392106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654289388854692:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:16.392595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:16.392610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:16.392622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:16.392627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:16.392643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:16.392652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:16.392663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:16.392673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:16.392682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:16.392691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:16.392697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:16.392705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:16.392756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:16.392767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:16.392783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:16.392792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:16.392802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:16.392810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:16.392826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:16.392834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:16.392845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:16.392849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:16.395653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654289388854693:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:16.395670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654289388854693:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLA ... ss=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:16.403444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:16.403449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:16.403451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:16.403456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:16.403458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:16.403462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:16.403469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:16.403484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:16.403491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:16.403504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:16.403513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:16.403522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:16.403530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:16.403542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:16.403550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:16.403559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:16.403566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:16.403608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:16.403616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:16.403620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:16.403623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:16.403633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:16.403639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:16.403644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:16.403647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:16.403654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:16.403662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:16.403667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:16.403672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:16.403697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:16.403706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:16.403715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:16.403721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:16.403727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:16.403730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:16.403738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:16.403744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:16.403749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:16.403751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:16.441557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:58:16.675401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654289388855141:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:16.675415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654289388855132:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:16.675429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:16.676053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:58:16.677531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654289388855146:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } >> KqpOlapBlobsSharing::MultipleMerge [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::ChangeSchemaAndSplit [FAIL] Test command err: Trying to start YDB, gRPC: 9212, MsgBus: 15658 2024-11-21T08:57:45.121360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654154569325306:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:45.121484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047f7/r3tmp/tmpO8qns4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9212, node 1 2024-11-21T08:57:45.167935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:45.167947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:45.167949Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:45.167979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:45.168601Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15658 TClient is connected to server localhost:15658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:45.222842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:45.222864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:45.223885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:45.243089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.352791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:45.948732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.948732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.948757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.948772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.948818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.948818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.948856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.948859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.948877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.948878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.948904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.948905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.948926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.948926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.948942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.948944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.948961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.948963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.948983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.948985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.949003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.949003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.949020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654154569328099:2296];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.949022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654154569328345:2298];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.952277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654154569328865:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.952277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654154569328868:2310];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.952288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654154569328868:2310];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.952295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654154569328865:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.952321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654154569328865:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.952322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654154569328868:2310];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.952338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654154569328865:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.952338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654154569328868:2310];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.952353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654154569328868:2310];tablet_id=72075186224038900;process=TTxInitSchema:: ... rt;last_saved_id=16; 2024-11-21T08:58:28.030463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038605;self_id=[1:7439654334958027364:12975];tablet_id=72075186224038605;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.032146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038715;self_id=[1:7439654334958027334:12973];tablet_id=72075186224038715;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.032540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038796;self_id=[1:7439654334958027405:12984];tablet_id=72075186224038796;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.034562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038364;self_id=[1:7439654334958027426:12988];tablet_id=72075186224038364;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.034780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038321;self_id=[1:7439654334958027469:12995];tablet_id=72075186224038321;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.036590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[1:7439654334958027457:12994];tablet_id=72075186224038074;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.037457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;self_id=[1:7439654334958027554:13010];tablet_id=72075186224038735;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.038809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038322;self_id=[1:7439654334958027583:13019];tablet_id=72075186224038322;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.039387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038372;self_id=[1:7439654334958027523:13005];tablet_id=72075186224038372;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.040974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[1:7439654334958027672:13039];tablet_id=72075186224038077;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.041318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038429;self_id=[1:7439654334958027720:13049];tablet_id=72075186224038429;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.042825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038382;self_id=[1:7439654334958027594:13020];tablet_id=72075186224038382;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.043259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038361;self_id=[1:7439654334958027826:13074];tablet_id=72075186224038361;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.045101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038099;self_id=[1:7439654334958027764:13059];tablet_id=72075186224038099;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.045282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038303;self_id=[1:7439654334958027515:13004];tablet_id=72075186224038303;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.047282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654334958027636:13030];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.047588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038868;self_id=[1:7439654334958027678:13040];tablet_id=72075186224038868;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.049288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038875;self_id=[1:7439654334958027736:13050];tablet_id=72075186224038875;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.049607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038314;self_id=[1:7439654334958027896:13091];tablet_id=72075186224038314;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.051264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;self_id=[1:7439654334958027783:13063];tablet_id=72075186224038723;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.051546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038666;self_id=[1:7439654334958027946:13101];tablet_id=72075186224038666;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.053308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038282;self_id=[1:7439654334958027799:13065];tablet_id=72075186224038282;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.053542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038869;self_id=[1:7439654334958027629:13029];tablet_id=72075186224038869;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.055360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038826;self_id=[1:7439654334958027992:13114];tablet_id=72075186224038826;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.055668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038460;self_id=[1:7439654334958028058:13130];tablet_id=72075186224038460;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.057706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038678;self_id=[1:7439654334958027939:13100];tablet_id=72075186224038678;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.058555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038242;self_id=[1:7439654334958028096:13140];tablet_id=72075186224038242;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.059804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038378;self_id=[1:7439654334958028115:13145];tablet_id=72075186224038378;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.060928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038822;self_id=[1:7439654334958027855:13081];tablet_id=72075186224038822;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.061931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038294;self_id=[1:7439654334958028134:13149];tablet_id=72075186224038294;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.063185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038779;self_id=[1:7439654334958028163:13155];tablet_id=72075186224038779;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.063941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038536;self_id=[1:7439654334958027985:13113];tablet_id=72075186224038536;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.065377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038454;self_id=[1:7439654334958028242:13175];tablet_id=72075186224038454;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.065972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7439654334958028214:13166];tablet_id=72075186224038053;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.067638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038237;self_id=[1:7439654334958027891:13090];tablet_id=72075186224038237;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.067885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038253;self_id=[1:7439654334958027852:13080];tablet_id=72075186224038253;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.069859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038653;self_id=[1:7439654334958028060:13131];tablet_id=72075186224038653;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.070253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7439654334958028246:13176];tablet_id=72075186224038097;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.071990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038473;self_id=[1:7439654334958028020:13120];tablet_id=72075186224038473;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.072444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[1:7439654334958028170:13156];tablet_id=72075186224038096;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.074399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038606;self_id=[1:7439654334958028283:13185];tablet_id=72075186224038606;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.074781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[1:7439654334958028030:13124];tablet_id=72075186224038076;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.076617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038469;self_id=[1:7439654334958028202:13165];tablet_id=72075186224038469;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.184284Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179508000, txId: 18446744073709551615] shutting down [[9974u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[9974u]]|[[20000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x241C11B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x124E6444 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x124DABB0 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:528: Execute_ @ 0x124DABB0 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7F95B9A70D8F 11. ??:0: ?? @ 0x7F95B9A70E3F 12. ??:0: ?? @ 0x11815028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_OrderT_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 24938, MsgBus: 22065 2024-11-21T08:58:13.315867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654274234952619:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:13.316073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00476f/r3tmp/tmpl7LcxX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24938, node 1 2024-11-21T08:58:13.370016Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:13.374265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:13.374280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:13.374282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:13.374322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22065 TClient is connected to server localhost:22065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:13.416386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:13.416410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:13.417584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:13.447571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:13.459794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:13.474341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:13.474423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:13.474473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:13.474497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:13.474522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:13.474544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:13.474563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:13.474586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:13.474606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:13.474625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:13.474645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:13.474667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654274234953263:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:13.475151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:13.475167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:13.475179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:13.475186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:13.475202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:13.475206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:13.475215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:13.475227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:13.475236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:13.475245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:13.475251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:13.475259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:13.475320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:13.475335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:13.475348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:13.475356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:13.475366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:13.475374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:13.475388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:13.475396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:13.475405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:13.475408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:13.478576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654274234953264:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:13.478605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654274234953264:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:13.478640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654274234953264:2289];tablet_id=7207518622 ... TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:24.355184Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:24.509858Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:24.509886Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:24.591736Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:24.591760Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:24.673796Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:24.673815Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:24.755900Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:24.755928Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:24.827966Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:24.848721Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T08:58:24.910343Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:24.910374Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:24.992551Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:24.992575Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.074693Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:25.074716Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.156761Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:25.156785Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.238819Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:25.238840Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.270449Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:25.406055Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:25.406080Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.509573Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:25.509597Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.591845Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:25.591868Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.673873Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:25.673900Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.755814Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T08:58:25.755839Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { TraceId : 01jd6z344k3wt4ks41g0fbqj41. SessionId : ydb://session/3?node_id=2&id=NTdjNzAwYjQtNzYwMzM1YzctNmU3Mzg2ZjItZDNiZmU1Zjg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T08:58:25.817132Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:25.837858Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewEnumStringBytes [GOOD] Test command err: Trying to start YDB, gRPC: 4131, MsgBus: 16178 2024-11-21T08:57:26.934772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654071888470765:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:26.934879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004885/r3tmp/tmpUdGGS2/pdisk_1.dat 2024-11-21T08:57:27.003473Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4131, node 1 2024-11-21T08:57:27.015995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:27.016011Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:27.016012Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:27.016046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16178 2024-11-21T08:57:27.036066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:27.036092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:27.037223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:27.072922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:27.080414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:27.090876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.090947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.090994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:27.091021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:27.091045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:27.091077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:27.091104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:27.091130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:27.091152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:27.091178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.091205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:27.091224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654076183438571:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:27.091755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:27.091772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:27.091784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:27.091788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:27.091812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:27.091823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:27.091864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:27.091877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:27.091887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:27.091891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:27.091897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:27.091901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:27.091969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:27.091982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:27.092002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:27.092013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:27.092026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:27.092037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:27.092062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:27.092072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:27.092084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:27.092093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:27.095098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076183438573:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:27.095121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076183438573:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:27.095144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654076183438573:2289];tablet_id=720751862240 ... 1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Wait changes: 18421472/51200000 2024-11-21T08:57:54.322710Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179474310, txId: 281474976715667] shutting down 2024-11-21T08:57:57.661741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:57:57.661759Z node 2 :IMPORT WARN: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 18421472/51200000 2024-11-21T08:57:59.359758Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179479345, txId: 281474976715669] shutting down 2024-11-21T08:57:59.362662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715671:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=51200216;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: 2024-11-21T08:58:09.977921Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179489943, txId: 281474976715673] shutting down Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266574 RawBytes: 14928144 BlobRangeOffset: 1069840 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 400064 PathId: 3 Rows: 266574 RawBytes: 2132592 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 1069840 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 266834 RawBytes: 14942704 BlobRangeOffset: 1070888 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 400080 PathId: 3 Rows: 266834 RawBytes: 2134672 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 1070888 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266592 RawBytes: 14929152 BlobRangeOffset: 1069880 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 398104 PathId: 3 Rows: 266592 RawBytes: 2132736 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 1069880 PathId: 3 Wait changes: 22830328/102400000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266574 RawBytes: 14928144 BlobRangeOffset: 1069840 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 400064 PathId: 3 Rows: 266574 RawBytes: 2132592 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 1069840 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266834 RawBytes: 14942704 BlobRangeOffset: 1070888 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 400080 PathId: 3 Rows: 266834 RawBytes: 2134672 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 1070888 PathId: 3 Rows: 266592 RawBytes: 14929152 BlobRangeOffset: 1069880 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 398104 PathId: 3 Rows: 266592 RawBytes: 2132736 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 1069880 PathId: 3 2024-11-21T08:58:15.023472Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179495009, txId: 281474976715675] shutting down 22830328/102400000 ==================================== QUERY: SELECT COUNT(*), MAX(pk_int), MIN(pk_int) FROM `/Root/olapStore/olapTable` RESULT: 2024-11-21T08:58:15.123739Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179495067, txId: 18446744073709551615] shutting down column2: 0 column0: 1600000 column1: 1599999 column2: int64_value: 0 column0: uint64_value: 1600000 column1: int64_value: 1599999 2024-11-21T08:58:16.228975Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179495109, txId: 18446744073709551615] shutting down count=1600000;min_count=3124;max_count=3126;groups_count=512; 2024-11-21T08:58:16.237052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715682:0, at schemeshard: 72057594046644480 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 unpacked data: 44800000 / 15210888 packed data: 44800000 / 1198272 frq_diff: 0.07877725482 frq_compression: 0.02674714286 pk_size : 6400000 / 3210584 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TierDraftsGCWithRestart [GOOD] Test command err: Trying to start YDB, gRPC: 22589, MsgBus: 65432 2024-11-21T08:57:56.930350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654201832866531:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:56.930830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b0/r3tmp/tmpaTlFs6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22589, node 1 2024-11-21T08:57:56.983725Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:56.987547Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:56.987558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:56.987560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:56.987591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65432 TClient is connected to server localhost:65432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:57.030773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:57.030801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:57.031883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:57.062003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:57.070202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:57.078976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.079027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.079050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:57.079066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:57.079080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:57.079095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:57.079108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:57.079124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:57.079146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:57.079161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.079176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:57.079191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206127834481:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:57.079485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:57.079494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:57.079502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:57.079507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:57.079517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:57.079519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:57.079524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:57.079529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:57.079538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:57.079540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:57.079543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:57.079545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:57.079581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:57.079584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:57.079595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:57.079597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.079605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:57.079608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:57.079618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:57.079623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:57.079629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:57.079631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:57.081324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654206127834480:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.081338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654206127834480:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.081356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654206127834480:2288];tablet_id=7207518622 ... NSHARD DEBUG: Index: tables 1 inserted portions=2;blobs=2;rows=7196;bytes=263888;raw_bytes=8451436; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 72075186224037890 2024-11-21T08:58:15.847452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T08:58:15.847461Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=3; 2024-11-21T08:58:15.847469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T08:58:15.847482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=72075186224037890; 2024-11-21T08:58:15.847498Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T08:58:15.847500Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:58:15.847506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:58:15.847508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:15.847512Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:15.847518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:15.847523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:15.847525Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:15.847528Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:15.847534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:15.847537Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:15.847591Z node 1 :TX_COLUMNSHARD DEBUG: fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T08:58:15.847611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:58:15.847641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T08:58:15.847646Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T08:58:15.847648Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T08:58:15.847650Z node 1 :TX_COLUMNSHARD DEBUG: Create periodic stats pipe to 72057594046644480 at tablet 72075186224037890 2024-11-21T08:58:15.847667Z node 1 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2024-11-21T08:58:15.847675Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T08:58:15.847679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:58:15.847681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:15.847684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:15.847687Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:15.847691Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:15.847693Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:15.847695Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:15.847699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:15.847702Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:15.847713Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T08:58:15.847724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T08:58:15.847730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=88; 2024-11-21T08:58:15.847735Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=2;path_id=3; 2024-11-21T08:58:15.847755Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=2;path_id=3; 2024-11-21T08:58:15.847768Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=3; 2024-11-21T08:58:15.847770Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; 2024-11-21T08:58:15.848016Z node 1 :TX_COLUMNSHARD DEBUG: Registered with mediator time cast at tablet 72075186224037890 2024-11-21T08:58:15.848027Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732179495000 at tablet 72075186224037890 2024-11-21T08:58:15.848033Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T08:58:15.848042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T08:58:15.848045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:15.848049Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T08:58:15.848056Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T08:58:15.848060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:15.848063Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T08:58:15.848080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T08:58:15.848090Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439654283437249786:3720];tablet_id=72075186224037890;parent=[1:7439654283437249739:3701];fline=manager.h:99;event=ask_data;request=request_id=937;3={portions_count=2};; 2024-11-21T08:58:15.848095Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439654283437249739:3701];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T08:58:15.848599Z node 1 :TX_COLUMNSHARD DEBUG: Connected to 72057594046644480 at tablet 72075186224037890 2024-11-21T08:58:16.341241Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439654283437249703:3698];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:16.343266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439654283437249704:3699];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:16.343282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439654283437249705:3700];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T08:58:16.346308Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439654283437249739:3701];fline=actor.cpp:33;event=skip_flush_writing; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsTableStandalone [GOOD] Test command err: Trying to start YDB, gRPC: 26798, MsgBus: 28524 2024-11-21T08:58:15.207943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654282790001947:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:15.208058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004768/r3tmp/tmpa9z63a/pdisk_1.dat 2024-11-21T08:58:15.275544Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26798, node 1 2024-11-21T08:58:15.284387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:15.284402Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:15.284404Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:15.284440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28524 2024-11-21T08:58:15.310551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:15.310575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:15.311965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:15.351927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:15.354539Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:58:15.561192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654282790002558:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:15.561213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:15.594556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:58:15.602117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:15.602159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:15.602205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:15.602220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:15.602236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:15.602250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:15.602269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:15.602285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:15.602304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:15.602319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.602334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:15.602356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654282790002634:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:15.605902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:15.605923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:15.605934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:15.605941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:15.605960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:15.605966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:15.605976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:15.605984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:15.605993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:15.606000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:15.606008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:15.606013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:15.606081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:15.606087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:15.606106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:15.606112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:15.606123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:15.606135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:15.606153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:15.606158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:15.606171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024 ... 2TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=24288;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=24288;columns=3; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewAggregation [GOOD] Test command err: Trying to start YDB, gRPC: 24991, MsgBus: 22540 2024-11-21T08:57:49.940478Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654173065230820:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:49.940608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047c5/r3tmp/tmpUwkLU9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24991, node 1 2024-11-21T08:57:49.994013Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:49.998712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:49.998735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:49.998736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:49.998768Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22540 TClient is connected to server localhost:22540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:50.041361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:50.041382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:50.042522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:50.073150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:50.086187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:50.096731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:50.096797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:50.096841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:50.096867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:50.096888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:50.096910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:50.096927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:50.096950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:50.096980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:50.097003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:50.097026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:50.097048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654177360198764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:50.099389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:50.099407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:50.099423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:50.099433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:50.099441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:50.099449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:50.099457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:50.099472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:50.099485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:50.099498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:50.099511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:50.099524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654177360198766:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:50.101390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654177360198765:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:50.101404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654177360198765:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:50.101420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654177360198765:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:50.101429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654177360198765:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:50.101441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654177360198765:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:50.101449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654177360198765:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:50.101461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654177360198765:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:50.101473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654177360198765:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:50.101486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74396541773601987 ... CTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3522808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3522808;columns=5; WAIT_COMPACTION: 0 2024-11-21T08:57:54.940566Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654173065230820:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:54.940606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT SUM(Rows) as rows, FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind != 'INACTIVE' RESULT: 2024-11-21T08:58:04.507853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654237489747296:6389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:04.507853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654237489747304:6392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:04.507873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:04.508662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2024-11-21T08:58:04.510251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654237489747310:6393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2024-11-21T08:58:04.730648Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484666, txId: 281474976715666] shutting down rows: 3000000 ==================================== QUERY: SELECT PathId, SUM(Rows) as rows, FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind != 'INACTIVE' GROUP BY PathId ORDER BY PathId RESULT: 2024-11-21T08:58:04.989573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:58:04.989590Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:05.471559Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179484768, txId: 281474976715669] shutting down rows: 500000 PathId: 3 rows: 1000000 PathId: 4 rows: 1500000 PathId: 5 ==================================== QUERY: SELECT PathId, SUM(Rows) as rows, SUM(BlobRangeSize) as bytes, SUM(RawBytes) as bytes_raw FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED') GROUP BY PathId ORDER BY rows DESC LIMIT 10 RESULT: 2024-11-21T08:58:07.940710Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179485540, txId: 281474976715671] shutting down rows: 1500000 bytes: 11064488 bytes_raw: 352239000 PathId: 5 rows: 1000000 bytes: 7465168 bytes_raw: 234789000 PathId: 4 rows: 500000 bytes: 3848160 bytes_raw: 117339000 PathId: 3 ==================================== QUERY: SELECT PathId, SUM(Rows) as rows, SUM(BlobRangeSize) as bytes, SUM(RawBytes) as bytes_raw FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId == UInt64("3") AND Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED') GROUP BY PathId ORDER BY rows DESC LIMIT 10 RESULT: rows: 500000 bytes: 3848160 bytes_raw: 117339000 PathId: 3 ==================================== QUERY: SELECT PathId, SUM(Rows) as rows, SUM(BlobRangeSize) as bytes, SUM(RawBytes) as bytes_raw FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId >= UInt64("4") AND Kind IN ('INSERTED', 'SPLIT_COMPACTED', 'COMPACTED') GROUP BY PathId ORDER BY rows DESC LIMIT 10 RESULT: 2024-11-21T08:58:10.463176Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179488054, txId: 281474976715673] shutting down 2024-11-21T08:58:12.944728Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179490553, txId: 281474976715675] shutting down rows: 1500000 bytes: 11064488 bytes_raw: 352239000 PathId: 5 rows: 1000000 bytes: 7465168 bytes_raw: 234789000 PathId: 4 ==================================== QUERY: SELECT PathId, TabletId, Kind FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId, TabletId, Kind RESULT: TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT count(distinct(PathId)) as PathsCount, count(distinct(Kind)) as KindsCount, count(distinct(TabletId)) as TabletsCount FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 RESULT: 2024-11-21T08:58:15.265400Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179492981, txId: 281474976715677] shutting down 2024-11-21T08:58:17.048685Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179495416, txId: 281474976715679] shutting down PathsCount: 3 TabletsCount: 4 KindsCount: 1 ==================================== QUERY: SELECT PathId, count(*), sum(Rows), sum(BlobRangeSize), sum(RawBytes) FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE Activity == 1 GROUP BY PathId ORDER BY PathId RESULT: 2024-11-21T08:58:18.155537Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179497104, txId: 281474976715682] shutting down column2: 500000 column3: 3848160 column1: 1500 column4: 117339000 PathId: 3 column2: 1000000 column3: 7465168 column1: 1500 column4: 234789000 PathId: 4 column2: 1500000 column3: 11064488 column1: 1500 column4: 352239000 PathId: 5 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::TableReshardingModuloN [FAIL] Test command err: Trying to start YDB, gRPC: 29219, MsgBus: 25292 2024-11-21T08:58:05.857199Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654242286053638:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:05.857215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004788/r3tmp/tmpeKpg0Y/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29219, node 1 2024-11-21T08:58:05.913888Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:05.914938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:05.914949Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:05.914951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:05.914984Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25292 TClient is connected to server localhost:25292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:58:05.958828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:05.958848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:05.959933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:05.991434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:05.995771Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:58:06.005151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:06.029961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.029995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.030023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.030041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.030055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.030067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.030083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.030097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.030112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:06.030127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.030141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:06.030157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7439654246581021763:2300];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:06.030457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:06.030468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:06.030475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:06.030478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:06.030486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:06.030488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:06.030493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:06.030496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:06.030500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:06.030503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:06.030507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:06.030509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:06.030546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:06.030554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:06.030563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:06.030565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.030576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:06.030582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:06.030591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:06.030597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:06.030603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:06.030609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:06.033001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439654246581021850:2310];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.033018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439654246581021850:2310];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLA ... node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2024-11-21T08:58:24.118848Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2024-11-21T08:58:24.118962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725680:0, at schemeshard: 72057594046644480 2024-11-21T08:58:24.119133Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2024-11-21T08:58:24.124693Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2024-11-21T08:58:24.126007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725681:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T08:58:25.113347Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; 2024-11-21T08:58:25.113459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725682:0, at schemeshard: 72057594046644480 2024-11-21T08:58:25.113764Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; 2024-11-21T08:58:25.118628Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_FINISHED 2024-11-21T08:58:25.683795Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179505175, txId: 18446744073709551615] shutting down [[57643u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[57643u]]|[[230000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x241C11B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x124E6444 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:351: Execute @ 0x124D56CA 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:405: Execute_ @ 0x124D7E3C 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7F788EA51D8F 11. ??:0: ?? @ 0x7F788EA51E3F 12. ??:0: ?? @ 0x11815028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait [FAIL] Test command err: Trying to start YDB, gRPC: 22470, MsgBus: 10127 2024-11-21T08:57:42.647688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654141195045287:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:42.647862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00481a/r3tmp/tmpiK34Q1/pdisk_1.dat 2024-11-21T08:57:42.707065Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22470, node 1 2024-11-21T08:57:42.715108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:42.715123Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:42.715125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:42.715150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10127 2024-11-21T08:57:42.748992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:42.749028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server 2024-11-21T08:57:42.750084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected localhost:10127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:42.781607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:42.861840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:43.410747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.410780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.410807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.410823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.410838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.410853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.410867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.410882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.410898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.410913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.410930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.410945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439654145490016239:2298];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.411036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.411068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.411096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.411115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.411128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.411142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.411153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.411166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.411181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:43.411194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:43.411208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:43.411221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439654145490016049:2296];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:43.412977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:7439654145490016338:2307];tablet_id=72075186224038893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:43.412993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:7439654145490016338:2307];tablet_id=72075186224038893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:43.413014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:7439654145490016338:2307];tablet_id=72075186224038893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:43.413025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:7439654145490016338:2307];tablet_id=72075186224038893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:43.413038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:7439654145490016338:2307];tablet_id=72075186224038893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:43.413053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:7439654145490016338:2307];tablet_id=72075186224038893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:43.413066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:7439654145490016338:2307];tablet_id=72075186224038893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:43.413080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:7439654145490016338:2307];tablet_id=72075186224038893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:43.413094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038893;self_id=[1:74396541454900163 ... :90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.390689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038381;self_id=[1:7439654338763686535:13926];tablet_id=72075186224038381;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.391820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038389;self_id=[1:7439654338763686094:12937];tablet_id=72075186224038389;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.393319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7439654338763686562:6675];tablet_id=72075186224038035;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.394050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038296;self_id=[1:7439654338763686515:11625];tablet_id=72075186224038296;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.395796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038243;self_id=[1:7439654338763686609:11507];tablet_id=72075186224038243;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.396686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038356;self_id=[1:7439654338763686584:6349];tablet_id=72075186224038356;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.397978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038337;self_id=[1:7439654338763686658:5933];tablet_id=72075186224038337;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.399308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038421;self_id=[1:7439654338763686628:12145];tablet_id=72075186224038421;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.401074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038649;self_id=[1:7439654338763686691:6414];tablet_id=72075186224038649;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.401774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038877;self_id=[1:7439654338763686742:4465];tablet_id=72075186224038877;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.403898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038557;self_id=[1:7439654338763686811:12236];tablet_id=72075186224038557;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.404067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038460;self_id=[1:7439654338763686881:12171];tablet_id=72075186224038460;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.406882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038430;self_id=[1:7439654338763686842:2806];tablet_id=72075186224038430;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.409057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038480;self_id=[1:7439654338763686854:4565];tablet_id=72075186224038480;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.411244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038102;self_id=[1:7439654338763686795:12239];tablet_id=72075186224038102;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.414622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038766;self_id=[1:7439654338763686952:12155];tablet_id=72075186224038766;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.417586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038116;self_id=[1:7439654338763686910:7679];tablet_id=72075186224038116;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.420077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038804;self_id=[1:7439654338763686992:2929];tablet_id=72075186224038804;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.422226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038716;self_id=[1:7439654338763686907:5965];tablet_id=72075186224038716;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.424267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7439654338763687004:4510];tablet_id=72075186224038063;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.426425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[1:7439654338763687312:6701];tablet_id=72075186224038071;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.428576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038651;self_id=[1:7439654338763687073:12160];tablet_id=72075186224038651;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.430862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038418;self_id=[1:7439654338763687203:12079];tablet_id=72075186224038418;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.432363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038363;self_id=[1:7439654338763687092:12066];tablet_id=72075186224038363;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.433061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[1:7439654338763687350:12115];tablet_id=72075186224038072;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.434582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038565;self_id=[1:7439654338763687050:5327];tablet_id=72075186224038565;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.435301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038373;self_id=[1:7439654338763687402:11785];tablet_id=72075186224038373;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.436812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038219;self_id=[1:7439654338763687183:3418];tablet_id=72075186224038219;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.437483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038198;self_id=[1:7439654338763687287:11940];tablet_id=72075186224038198;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.439011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038856;self_id=[1:7439654338763687223:6375];tablet_id=72075186224038856;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.439583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038284;self_id=[1:7439654338763687327:12140];tablet_id=72075186224038284;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.441283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038433;self_id=[1:7439654338763687245:3064];tablet_id=72075186224038433;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.441788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[1:7439654338763687390:11984];tablet_id=72075186224038040;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.443463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038682;self_id=[1:7439654338763687031:6439];tablet_id=72075186224038682;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.444159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038299;self_id=[1:7439654338763686968:3437];tablet_id=72075186224038299;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.445624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038164;self_id=[1:7439654338763687126:3093];tablet_id=72075186224038164;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.446528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038532;self_id=[1:7439654338763687163:12089];tablet_id=72075186224038532;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.447854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038888;self_id=[1:7439654338763687268:12191];tablet_id=72075186224038888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.448912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038600;self_id=[1:7439654338763687357:3716];tablet_id=72075186224038600;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.450248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038568;self_id=[1:7439654338763687132:11994];tablet_id=72075186224038568;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.451226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038778;self_id=[1:7439654338763687469:11614];tablet_id=72075186224038778;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.452364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038831;self_id=[1:7439654338763687430:14336];tablet_id=72075186224038831;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.453554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7439654338763687441:7406];tablet_id=72075186224038051;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:28.525421Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179508000, txId: 18446744073709551615] shutting down [[0u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[0u]]|[[10000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x241C11B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x124E6444 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x124DE4A5 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:663: Execute_ @ 0x124DE4A5 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7FABA204ED8F 11. ??:0: ?? @ 0x7FABA204EE3F 12. ??:0: ?? @ 0x11815028 |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |91.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |91.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat >> test.py::test[pg-tpcds-q30-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Results] |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |91.5%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |91.5%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |91.5%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |91.5%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> TSchemeShardMoveTest::MoveIndex >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::MoveMigratedTable |91.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] >> ColumnShardTiers::DSConfigs >> TSchemeShardMoveTest::Boot |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |91.5%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test.py::test[pg-tpcds-q30-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Analyze] >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> KqpOlap::PushdownFilter >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TSchemeShardMoveTest::Index [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleMerge [FAIL] Test command err: Trying to start YDB, gRPC: 20455, MsgBus: 61862 2024-11-21T08:57:48.774435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654167867049265:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:48.774637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047d1/r3tmp/tmpYpqmZv/pdisk_1.dat 2024-11-21T08:57:48.812164Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20455, node 1 2024-11-21T08:57:48.825061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:48.825080Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:48.825082Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:48.825119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61862 TClient is connected to server localhost:61862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:48.875510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:48.875543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:48.876675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:48.898900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:48.991253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:49.571080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.571116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.571148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.571160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.571173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.571184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.571197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.571210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.571225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:49.571240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.571257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:49.571269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654172162021131:2297];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:49.572013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:49.572050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:49.572072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:49.572089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:49.572101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:49.572115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:49.572125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:49.572137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:49.572153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:49.572165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:49.572187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:49.572219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;self_id=[1:7439654172162021130:2296];tablet_id=72075186224038905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:49.572558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:49.572569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:49.572576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:49.572578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:49.572588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:49.572590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:49.572596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:49.572601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:49.572610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:49.572613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:49.572617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240389 ... on_start;last_saved_id=16; 2024-11-21T08:58:29.827075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038653;self_id=[1:7439654343960776629:12706];tablet_id=72075186224038653;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.829159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038743;self_id=[1:7439654343960776709:12722];tablet_id=72075186224038743;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.829738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038362;self_id=[1:7439654343960776488:12672];tablet_id=72075186224038362;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.831492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;self_id=[1:7439654343960776632:12707];tablet_id=72075186224038695;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.833112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038378;self_id=[1:7439654343960776803:12738];tablet_id=72075186224038378;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.833849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038237;self_id=[1:7439654343960776858:12748];tablet_id=72075186224038237;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.835599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038638;self_id=[1:7439654343960776857:12747];tablet_id=72075186224038638;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.836129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038667;self_id=[1:7439654343960776802:12737];tablet_id=72075186224038667;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.838802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7439654343960776929:12758];tablet_id=72075186224038033;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.840683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038130;self_id=[1:7439654343960776928:12757];tablet_id=72075186224038130;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.841917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7439654343960777027:12776];tablet_id=72075186224038073;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.842956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038393;self_id=[1:7439654343960777003:12768];tablet_id=72075186224038393;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.844533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7439654343960776991:12767];tablet_id=72075186224038095;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.845332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038421;self_id=[1:7439654343960777108:12794];tablet_id=72075186224038421;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.846882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038401;self_id=[1:7439654343960777091:12792];tablet_id=72075186224038401;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.847684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038131;self_id=[1:7439654343960777051:12782];tablet_id=72075186224038131;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.849230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038201;self_id=[1:7439654343960777143:12803];tablet_id=72075186224038201;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.850152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038277;self_id=[1:7439654343960777072:12787];tablet_id=72075186224038277;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.851609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038356;self_id=[1:7439654343960777185:12812];tablet_id=72075186224038356;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.852392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038898;self_id=[1:7439654343960777230:12823];tablet_id=72075186224038898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.854141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038190;self_id=[1:7439654343960777210:12818];tablet_id=72075186224038190;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.854614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038755;self_id=[1:7439654343960777165:12808];tablet_id=72075186224038755;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.856517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038185;self_id=[1:7439654343960777270:12832];tablet_id=72075186224038185;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.856880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038147;self_id=[1:7439654343960777359:12852];tablet_id=72075186224038147;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.858845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038637;self_id=[1:7439654343960777251:12828];tablet_id=72075186224038637;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.859241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038757;self_id=[1:7439654343960777319:12843];tablet_id=72075186224038757;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.861209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038652;self_id=[1:7439654343960777340:12848];tablet_id=72075186224038652;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.861689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038873;self_id=[1:7439654343960777442:12873];tablet_id=72075186224038873;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.863541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038694;self_id=[1:7439654343960777522:12893];tablet_id=72075186224038694;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.864082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038429;self_id=[1:7439654343960777482:12884];tablet_id=72075186224038429;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.865868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038774;self_id=[1:7439654343960777383:12858];tablet_id=72075186224038774;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.866475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038710;self_id=[1:7439654343960777419:12867];tablet_id=72075186224038710;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.868103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038236;self_id=[1:7439654343960777601:12913];tablet_id=72075186224038236;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.868840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038169;self_id=[1:7439654343960777562:12903];tablet_id=72075186224038169;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.870406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038282;self_id=[1:7439654343960777533:12894];tablet_id=72075186224038282;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.871152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038330;self_id=[1:7439654343960777607:12914];tablet_id=72075186224038330;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.872884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038764;self_id=[1:7439654343960777395:12859];tablet_id=72075186224038764;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.873494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[1:7439654343960777573:12904];tablet_id=72075186224038071;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.875250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038642;self_id=[1:7439654343960777481:12883];tablet_id=72075186224038642;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.875782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;self_id=[1:7439654343960777299:12838];tablet_id=72075186224038269;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.877572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038265;self_id=[1:7439654343960777645:12924];tablet_id=72075186224038265;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.878217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038842;self_id=[1:7439654343960777640:12923];tablet_id=72075186224038842;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.879881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038285;self_id=[1:7439654343960777449:12874];tablet_id=72075186224038285;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:29.971144Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179509144, txId: 18446744073709551615] shutting down [[0u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[0u]]|[[10000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x241C11B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x124E6444 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x124DCE91 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:580: Execute_ @ 0x124DCE91 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7FE33ABF1D8F 11. ??:0: ?? @ 0x7FE33ABF1E3F 12. ??:0: ?? @ 0x11815028 >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Debug] |91.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:58:32.811867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:32.811885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.811889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:32.811892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:32.811896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:32.811899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:32.811905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.811990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:32.819526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:32.819542Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:32.822271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:32.822979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:32.823012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:32.824381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:32.824538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:32.824604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.824659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:32.825451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.825679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.825687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.825717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:32.825722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.825726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:32.825735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.826584Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:58:32.841306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:32.841372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.841452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:32.841505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:32.841512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.842133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.842154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:32.842192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.842201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:32.842206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:32.842211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:32.842681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.842691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:32.842696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:32.843009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.843016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.843021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.843028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.843599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:32.843947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:32.843996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:32.844160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.844184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:32.844191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.844264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:32.844272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.844302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.844315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:32.844739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.844747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.844781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.844784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:58:32.844834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.844838Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:32.844849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:32.844853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.844857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:32.844863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.844867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:32.844870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:32.844882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:32.844888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:32.844892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:58:32.845099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:32.845107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:32.845110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:58:32.845114Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:58:32.845116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.845125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:33.423736Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:58:33.423759Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 26us result status StatusSuccess 2024-11-21T08:58:33.423814Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:33.423865Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:58:33.423895Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 31us result status StatusSuccess 2024-11-21T08:58:33.424036Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:33.424125Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:58:33.424154Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 32us result status StatusSuccess 2024-11-21T08:58:33.424290Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |91.6%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:58:32.829576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:32.829591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.829595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:32.829598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:32.829602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:32.829605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:32.829611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.829674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:32.839025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:32.839044Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:32.840937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:32.841531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:32.841564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:32.842792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:32.843001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:32.843088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.843147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:32.844229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.844512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.844521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.844549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:32.844555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.844560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:32.844571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.845696Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:58:32.857160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:32.857236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.857291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:32.857332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:32.857337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.858322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.858350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:32.858396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.858406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:32.858410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:32.858415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:32.858875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.858886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:32.858891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:32.859218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.859227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.859232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.859238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.859656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:32.859934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:32.859973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:32.860106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.860122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:32.860129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.860169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:32.860174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.860195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.860224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:32.860509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.860514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.860548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.860552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:58:32.860609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.860613Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:32.860621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:32.860624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.860627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:32.860630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.860633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:32.860635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:32.860642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:32.860646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:32.860649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:58:32.860835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:32.860843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:32.860846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:58:32.860850Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:58:32.860852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.860862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:33.445046Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:58:33.445058Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 12us result status StatusSuccess 2024-11-21T08:58:33.445088Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:33.445117Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:58:33.445132Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 16us result status StatusSuccess 2024-11-21T08:58:33.445240Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:33.445284Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:58:33.445304Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 22us result status StatusSuccess 2024-11-21T08:58:33.445387Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:58:32.924592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:32.924616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.924621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:32.924626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:32.924632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:32.924636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:32.924644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.924725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:32.934744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:32.934758Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:32.937508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:32.938359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:32.938393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:32.939836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:32.940031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:32.940121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.940184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:32.941148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.941403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.941415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.941451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:32.941459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.941465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:32.941477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.942722Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:58:32.957174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:32.957235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.957300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:32.957333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:32.957338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.957933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.957951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:32.957982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.957988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:32.957991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:32.957994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:32.958318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.958327Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:32.958332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:32.958606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.958612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.958616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.958621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.959023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:32.959395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:32.959444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:32.959622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.959648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:32.959657Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.959708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:32.959715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.959744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.959756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:32.960203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.960230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.960271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.960276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:58:32.960331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.960336Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:32.960344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:32.960347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.960351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:32.960354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.960357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:32.960360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:32.960368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:32.960373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:32.960375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:58:32.960586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:32.960595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:32.960598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:58:32.960602Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:58:32.960604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.960615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 97 } } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:58:33.491637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 186 } } 2024-11-21T08:58:33.491641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2024-11-21T08:58:33.491649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 186 } } 2024-11-21T08:58:33.491656Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 186 } } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T08:58:33.491850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936897 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:58:33.491857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T08:58:33.491870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936897 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:58:33.491876Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:58:33.491883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 321 RawX2: 8589936897 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:58:33.491896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:33.491900Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:58:33.491904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:58:33.491910Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T08:58:33.491982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 8589936896 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:58:33.491987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2024-11-21T08:58:33.491998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 8589936896 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:58:33.492002Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:58:33.492009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 8589936896 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T08:58:33.492015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492018Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492028Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2024-11-21T08:58:33.492501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492913Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:58:33.492926Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 2/3 2024-11-21T08:58:33.492929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2024-11-21T08:58:33.492932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2024-11-21T08:58:33.492955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492985Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:33.492987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T08:58:33.492991Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/3 2024-11-21T08:58:33.492993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2024-11-21T08:58:33.492996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2024-11-21T08:58:33.493006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:383:2348] message: TxId: 102 2024-11-21T08:58:33.493009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2024-11-21T08:58:33.493013Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T08:58:33.493016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T08:58:33.493031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:58:33.493034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:58:33.493036Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2024-11-21T08:58:33.493038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2024-11-21T08:58:33.493042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:58:33.493047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:58:33.493049Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2024-11-21T08:58:33.493051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2024-11-21T08:58:33.493056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T08:58:33.493058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:58:33.493098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:33.493102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:58:33.493109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:58:33.493113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:58:33.493116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:58:33.493119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:58:33.493122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:33.493680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:58:33.493690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:466:2430] 2024-11-21T08:58:33.493773Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut >> KqpOlapSparsed::Switching [GOOD] |91.6%| [LD] {RESULT} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:58:32.955879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:32.955902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.955907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:32.955912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:32.955918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:32.955921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:32.955929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.956003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:32.964385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:32.964400Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:32.966490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:32.967034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:32.967058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:32.968056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:32.968198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:32.968282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.968322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:32.969087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.969282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.969290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.969313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:32.969318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.969321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:32.969330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.970178Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:58:32.983247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:32.983294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.983327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:32.983352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:32.983357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.983845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.983859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:32.983880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.983884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:32.983887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:32.983889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:32.984176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.984182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:32.984185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:32.984634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.984643Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.984648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.984651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.985011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:32.985267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:32.985294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:32.985394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.985408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:32.985414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.985445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:32.985449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.985465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.985472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:32.985768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.985773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.985794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.985798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:58:32.985837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.985841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:32.985848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:32.985850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.985854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:32.985857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.985860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:32.985862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:32.985868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:32.985872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:32.985874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:58:32.986048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:32.986055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:32.986058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:58:32.986061Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:58:32.986063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.986073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 174 } } 2024-11-21T08:58:33.545273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2024-11-21T08:58:33.545282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 174 } } 2024-11-21T08:58:33.545290Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 174 } } FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T08:58:33.545481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 8589936896 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T08:58:33.545485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2024-11-21T08:58:33.545529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 8589936896 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T08:58:33.545535Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:58:33.545539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 8589936896 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T08:58:33.545548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:33.545551Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T08:58:33.545554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:58:33.545559Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2024-11-21T08:58:33.545666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936897 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T08:58:33.545670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2024-11-21T08:58:33.545677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936897 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T08:58:33.545680Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:58:33.545684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 321 RawX2: 8589936897 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T08:58:33.545687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:33.545692Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:58:33.545694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:58:33.545696Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T08:58:33.546057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T08:58:33.546578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:58:33.546615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T08:58:33.546696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T08:58:33.546701Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:33.546704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T08:58:33.546713Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2024-11-21T08:58:33.546716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2024-11-21T08:58:33.546721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2024-11-21T08:58:33.547059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:58:33.547181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T08:58:33.547187Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:33.547190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T08:58:33.547197Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2024-11-21T08:58:33.547199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2024-11-21T08:58:33.547202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2024-11-21T08:58:33.547205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2024-11-21T08:58:33.547209Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T08:58:33.547212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T08:58:33.547229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T08:58:33.547232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:58:33.547240Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T08:58:33.547247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T08:58:33.547252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T08:58:33.547256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:58:33.547259Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2024-11-21T08:58:33.547262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2024-11-21T08:58:33.547269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T08:58:33.547272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:58:33.547479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:33.547487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:58:33.547498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:58:33.547502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:58:33.547508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:58:33.547511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:58:33.547516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:33.548273Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T08:58:33.548309Z node 2 :TX_PROXY DEBUG: actor# [2:267:2259] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2024-11-21T08:58:33.601539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T08:58:33.601552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T08:58:33.601626Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T08:58:33.601640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T08:58:33.601643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:665:2553] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::Switching [GOOD] Test command err: Trying to start YDB, gRPC: 3789, MsgBus: 62036 2024-11-21T08:57:36.391475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654118405764685:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:36.391709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00484a/r3tmp/tmpyZDnkw/pdisk_1.dat 2024-11-21T08:57:36.455066Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3789, node 1 2024-11-21T08:57:36.462414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:36.462427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:36.462429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:36.462460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62036 2024-11-21T08:57:36.492739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:36.492769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:36.493782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:36.532690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:36.537023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:36.545796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:36.545858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:36.545884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:36.545905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:36.545921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:36.545935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:36.545949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:36.545963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:36.545981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:36.545995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:36.546008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:36.546022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:36.547959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:36.547979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:36.547995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:36.548008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:36.548019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:36.548034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:36.548052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:36.548069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:36.548089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:36.548102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:36.548112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:36.548120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:36.549850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:36.549869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:36.549887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:36.549896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:36.549904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:36.549911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:36.549918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:36.549926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:36.549939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654118405765350 ... icWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.263582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.263590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.301954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.302003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.363869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.363892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.363927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.363928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.402306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.402355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.464281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.464340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.464374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.464451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.502719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.502801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.564587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.564626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.564647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.564690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; Timing: wait took 5 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` WHERE field == 'abcde' RESULT: 2024-11-21T08:58:27.603010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.603065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.665091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.665157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.665183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.665207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.687998Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179507000, txId: 18446744073709551615] shutting down count: 12726 ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` RESULT: 2024-11-21T08:58:27.703352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.703409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.765342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.765423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.765457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.765509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:58:27.786206Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179507674, txId: 18446744073709551615] shutting down count: 14000 Timing: checkTable took 0 seconds WAIT_COMPACTION: 9 2024-11-21T08:58:27.803851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654118405765375:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:58:27.815218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=c600e222-a7e611ef-b0267c03-114336d4;fline=with_appended.cpp:80;portions=9,;task_id=c600e222-a7e611ef-b0267c03-114336d4; 2024-11-21T08:58:27.865790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654118405765346:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:58:27.865837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654118405765343:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:58:27.878526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=c60a5866-a7e611ef-8d3e3178-659991af;fline=with_appended.cpp:80;portions=9,;task_id=c60a5866-a7e611ef-8d3e3178-659991af; 2024-11-21T08:58:27.880043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=c60a555a-a7e611ef-94aa39e0-4d44069a;fline=with_appended.cpp:80;portions=9,;task_id=c60a555a-a7e611ef-94aa39e0-4d44069a; WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 Timing: wait took 6 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` WHERE field == 'abcde' RESULT: 2024-11-21T08:58:33.881146Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179513000, txId: 18446744073709551615] shutting down count: 12726 ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` RESULT: 2024-11-21T08:58:33.963870Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179513869, txId: 18446744073709551615] shutting down count: 14000 Timing: checkTable took 0 seconds Timing: wait took 0 seconds >> KqpOlapBlobsSharing::SplitEmpty [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-ForceBlocks] >> KqpOlap::PushdownFilter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::SplitEmpty [GOOD] Test command err: Trying to start YDB, gRPC: 16811, MsgBus: 18210 2024-11-21T08:57:56.511288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654203998300580:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:56.511482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b5/r3tmp/tmpizdefN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16811, node 1 2024-11-21T08:57:56.569171Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:56.569328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:56.569341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:56.569343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:56.569378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18210 TClient is connected to server localhost:18210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:56.610442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:56.612100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:56.612130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:56.613281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:56.692201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:57.219265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.219295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.219298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.219320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.219329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:57.219341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:57.219345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:57.219353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:57.219356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:57.219364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:57.219367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:57.219395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:57.219397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:57.219416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:57.219434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:57.219434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:57.219468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:57.219474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:57.219490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.219492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.219513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:57.219514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:57.219548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654208293270335:2311];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:57.219553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038908;self_id=[1:7439654208293270314:2307];tablet_id=72075186224038908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:57.221691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654208293270327:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.221707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654208293270327:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.221728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654208293270327:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:57.221743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654208293270327:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:57.221757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654208293270327:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:57.221773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654208293270327:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:57.221785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654208293270327:2310];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:57.221791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7439654208293270289:2301];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.221798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654208293270327:2310];tablet_id=7207518622403 ... unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720705:0, at schemeshard: 72057594046644480 2024-11-21T08:58:15.421063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720706:0, at schemeshard: 72057594046644480 2024-11-21T08:58:15.427494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720707:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:16.314395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720708:0, at schemeshard: 72057594046644480 2024-11-21T08:58:16.322574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720709:0, at schemeshard: 72057594046644480 2024-11-21T08:58:16.329637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720710:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:17.858202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720711:0, at schemeshard: 72057594046644480 2024-11-21T08:58:17.868916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720712:0, at schemeshard: 72057594046644480 2024-11-21T08:58:17.875601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720713:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:18.938501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720714:0, at schemeshard: 72057594046644480 2024-11-21T08:58:18.947917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720715:0, at schemeshard: 72057594046644480 2024-11-21T08:58:18.953334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720716:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:19.338704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720717:0, at schemeshard: 72057594046644480 2024-11-21T08:58:19.345145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720718:0, at schemeshard: 72057594046644480 2024-11-21T08:58:19.353052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720719:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:20.980101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720720:0, at schemeshard: 72057594046644480 2024-11-21T08:58:20.991400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720721:0, at schemeshard: 72057594046644480 2024-11-21T08:58:20.997571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720722:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:21.290749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720723:0, at schemeshard: 72057594046644480 2024-11-21T08:58:21.298967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720724:0, at schemeshard: 72057594046644480 2024-11-21T08:58:21.305900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720725:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:23.056073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720726:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:23.062860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720727:0, at schemeshard: 72057594046644480 2024-11-21T08:58:23.070002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720728:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:24.063733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720729:0, at schemeshard: 72057594046644480 2024-11-21T08:58:24.070608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720730:0, at schemeshard: 72057594046644480 2024-11-21T08:58:24.077090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720731:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:25.503067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720732:0, at schemeshard: 72057594046644480 2024-11-21T08:58:25.512824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720733:0, at schemeshard: 72057594046644480 2024-11-21T08:58:25.520493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720734:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:26.468410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720735:0, at schemeshard: 72057594046644480 2024-11-21T08:58:26.478936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720736:0, at schemeshard: 72057594046644480 2024-11-21T08:58:26.485142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720737:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:27.716810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720738:0, at schemeshard: 72057594046644480 2024-11-21T08:58:27.724169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720739:0, at schemeshard: 72057594046644480 2024-11-21T08:58:27.731219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720740:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:29.467676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720741:0, at schemeshard: 72057594046644480 2024-11-21T08:58:29.474208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720742:0, at schemeshard: 72057594046644480 2024-11-21T08:58:29.481476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720743:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:30.251787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720744:0, at schemeshard: 72057594046644480 2024-11-21T08:58:30.258619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720745:0, at schemeshard: 72057594046644480 2024-11-21T08:58:30.265135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720746:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:31.259555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720747:0, at schemeshard: 72057594046644480 2024-11-21T08:58:31.266163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720748:0, at schemeshard: 72057594046644480 2024-11-21T08:58:31.273126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720749:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:32.231958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720750:0, at schemeshard: 72057594046644480 2024-11-21T08:58:32.240714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720751:0, at schemeshard: 72057594046644480 2024-11-21T08:58:32.247169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720752:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T08:58:33.394702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720753:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_FINISHED 2024-11-21T08:58:34.214537Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179514000, txId: 18446744073709551615] shutting down [[0u]] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PushdownFilter [GOOD] Test command err: Trying to start YDB, gRPC: 22453, MsgBus: 27221 2024-11-21T08:58:33.513779Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654361250838377:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:33.513792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00475c/r3tmp/tmpUy3lam/pdisk_1.dat 2024-11-21T08:58:33.559259Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22453, node 1 2024-11-21T08:58:33.566852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:33.566862Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:33.566863Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:33.566889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27221 TClient is connected to server localhost:27221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:33.608477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:33.614458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:33.614489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:33.615563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:33.621702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:33.627234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:58:33.629300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:58:33.629337Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T08:58:33.629753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:33.629799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:33.629831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:33.629851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:33.629876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:33.629895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:33.629917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:33.629933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:33.629956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:33.629971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:33.629985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:33.630001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654361250839028:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:33.630324Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T08:58:33.630339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:58:33.630346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:58:33.630359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:58:33.630386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:33.630398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:33.630405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:58:33.630412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:58:33.630422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:33.630429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:33.630431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:58:33.630440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:58:33.630448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:33.630452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:33.630453Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T08:58:33.630458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T08:58:33.630464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:33.630468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:33.630473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T08:58:33.630479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:33.630486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:33.630487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T08:58:33.630491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:33.630497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=R ... s } 2024-11-21T08:58:34.890202Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439654365510398127:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:34.890259Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:34.890857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:58:34.892694Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439654365510398156:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:58:34.999537Z node 3 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 3 version: {1732179515000:max} readable: {1732179515038:max} at tablet 72075186224037890 2024-11-21T08:58:34.999587Z node 3 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 3 version: {1732179515000:max} readable: {1732179515038:max} at tablet 72075186224037888 2024-11-21T08:58:34.999588Z node 3 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 3 at tablet 72075186224037890 2024-11-21T08:58:34.999618Z node 3 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 3 at tablet 72075186224037888 2024-11-21T08:58:34.999706Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[3:7439654365510397903:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179515000:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2024-11-21T08:58:34.999707Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:7439654365510397869:2289];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179515000:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2024-11-21T08:58:35.004982Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[3:7439654365510397903:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179515000:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2024-11-21T08:58:35.005086Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:7439654365510397869:2289];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179515000:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2024-11-21T08:58:35.005278Z node 3 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 3 version: {1732179515000:max} readable: {1732179515038:max} at tablet 72075186224037889 2024-11-21T08:58:35.005321Z node 3 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 3 at tablet 72075186224037889 2024-11-21T08:58:35.005456Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[3:7439654365510397868:2288];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179515000:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2024-11-21T08:58:35.005577Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[3:7439654365510397868:2288];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732179515000:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2024-11-21T08:58:35.005644Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7439654365510397924:2297];tablet_id=72075186224037888;parent=[3:7439654365510397869:2289];fline=manager.h:99;event=ask_data;request=request_id=7;3={portions_count=1};; 2024-11-21T08:58:35.005720Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7439654365510397938:2303];tablet_id=72075186224037890;parent=[3:7439654365510397903:2291];fline=manager.h:99;event=ask_data;request=request_id=8;3={portions_count=1};; 2024-11-21T08:58:35.005839Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7439654365510397917:2294];tablet_id=72075186224037889;parent=[3:7439654365510397868:2288];fline=manager.h:99;event=ask_data;request=request_id=9;3={portions_count=1};; 2024-11-21T08:58:35.005852Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T08:58:35.005860Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T08:58:35.005912Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T08:58:35.005934Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T08:58:35.005945Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T08:58:35.006104Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T08:58:35.006211Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2024-11-21T08:58:35.006276Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T08:58:35.006336Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T08:58:35.006354Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037889 2024-11-21T08:58:35.006596Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037890 2024-11-21T08:58:35.008945Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179515000, txId: 18446744073709551615] shutting down >> test.py::test[pg-tpcds-q41-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Results] |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/pgwire/pgwire |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |91.6%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire >> KqpOlapSysView::StatsSysViewBytesDictStatActualization [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesDictStatActualization [GOOD] Test command err: Trying to start YDB, gRPC: 10019, MsgBus: 28223 2024-11-21T08:58:06.402819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654244184900145:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:06.403183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004785/r3tmp/tmpBwiYOY/pdisk_1.dat 2024-11-21T08:58:06.469950Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10019, node 1 2024-11-21T08:58:06.480425Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:06.480443Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:06.480445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:06.480493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28223 2024-11-21T08:58:06.503659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:06.503684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:06.507077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:06.551725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:06.556410Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:58:06.567883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:06.579393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.579452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.579481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.579501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.579514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.579528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.579549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.579571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.579590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:06.579604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.579617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:06.579631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654244184900798:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:06.579971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:06.579983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:06.579990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:06.579993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:06.580004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:06.580006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:06.580011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:06.580017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:06.580022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:06.580029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:06.580032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:06.580034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:06.580070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:06.580078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:06.580088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:06.580094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.580101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:06.580108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:06.580117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:06.580123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:06.580129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:06.580131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T08:58:06.582305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654244184900799:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.582319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654244184900799:2291];tablet_id=720751862 ... LUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.842993Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 8 Optimized: Stats: 2024-11-21T08:58:35.843055Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.843140Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.843244Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; PathId: 3 Rows: 2024-11-21T08:58:35.843334Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 266065 TierName2024-11-21T08:58:35.843400Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.843464Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.843526Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.843596Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.843707Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; : __DEFAULT Activity:  ColumnRawBytes: 2024-11-21T08:58:35.843814Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 17028160 2024-11-21T08:58:35.843893Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.843957Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.844030Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; Details: {"primary_key_max":"799999;","primary_key_min":"2;","snapshot_min":{"plan_step":1732179487000,"tx_id":18446744073709551615},"snapshot_max":{"plan_step":1732179487000,"tx_id":18446744073709551615}} TabletId: 72075186224037890 Kind: INSERTED PortionId: 4 ColumnBlobBytes: 1211576 CompactionLevel: 0 IndexRawBytes: 8 IndexBlobBytes2024-11-21T08:58:35.844117Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.844183Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; : 8 Optimized: Stats: PathId: 3 2024-11-21T08:58:35.844263Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.844328Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.844390Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; Rows: 267322 2024-11-21T08:58:35.844452Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.844513Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.844608Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; TierName2024-11-21T08:58:35.844709Z node 1 :TX_COLUMNSHARD CRIT: fline=program.cpp:410;event=program_arg_is_missing;program=[{assignes=[{op=Constant;const=1;column=G:16;};{yql_op=Equals;arguments=[G:10;G:16;];kernel=local_function;column=G:17;};{op=Constant;const=0;column=G:18;};{yql_op=Coalesce;arguments=[G:17;G:18;];kernel=local_function;column=G:19;};];filters=[G:19;];projections=[G:10;G:7;G:5;G:14;G:15;G:8;G:6;pk_int;G:13;field;G:9;G:4;G:12;ts;G:11;];};]; 2024-11-21T08:58:35.854443Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179515837, txId: 281474976715681] shutting down : __DEFAULT Activity:  ColumnRawBytes: 17108608 Details: {"primary_key_max":"799997;","primary_key_min":"1;","snapshot_min":{"plan_step":1732179487000,"tx_id":18446744073709551615},"snapshot_max":{"plan_step":1732179487000,"tx_id":18446744073709551615}} TabletId: 72075186224037889 Kind: INSERTED PortionId: 4 ColumnBlobBytes: 1217248 CompactionLevel: 0 IndexRawBytes: 8 IndexBlobBytes: 8 Optimized: Stats: PathId: 3 |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[pg-tpcds-q78-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Debug] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[pg-tpcds-q78-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-ForceBlocks] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> Trace::SkipSpaces [GOOD] >> Trace::NextToken [GOOD] >> Trace::TTraceEvent [GOOD] >> Trace::TExpectedTraceEvent [GOOD] >> Trace::TExpectedTrace [GOOD] >> TSettingsValidation::ValidateSettingsFailOnStart >> TxUsage::WriteToTopic_Demo_4 >> TxUsage::WriteToTopic_Demo_1 >> TxUsage::WriteToTopic_Demo_3 >> TxUsage::WriteToTopic_Demo_2 >> TxUsage::WriteToTopic_Two_WriteSession |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> BasicUsage::WriteRead >> BasicUsage::ConnectToYDB >> TxUsage::WriteToTopic_Demo_21_RestartNo >> test.py::test[pg-tpcds-q78-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Results] |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |91.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration >> KqpOlapBlobsSharing::MultipleMergesWithRestartsWhenWait [FAIL] |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |91.6%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> test.py::test[pg-tpcds-q78-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Analyze] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test.py::test[pg-tpcds-q85-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Debug] |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |91.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleMergesWithRestartsWhenWait [FAIL] Test command err: Trying to start YDB, gRPC: 28292, MsgBus: 1323 2024-11-21T08:57:45.221753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654155313450638:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:45.222001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047f5/r3tmp/tmpJpxTk8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28292, node 1 2024-11-21T08:57:45.275354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:45.277044Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:45.277058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:45.277060Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:45.277094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1323 TClient is connected to server localhost:1323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:45.323365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:45.323401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:45.324686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:45.355185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:45.443611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:46.014901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:46.014948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:46.014988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:46.015023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:46.015044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:46.015089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:46.015109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:46.015144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:46.015175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:46.015197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:46.015222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:46.015249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654155313453580:2296];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:46.017235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:46.017274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:46.017309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:46.017331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:46.017355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:46.017371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:46.017385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:46.017403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:46.017427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:46.017449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:46.017469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:46.017492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439654155313453663:2297];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:46.019297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:46.019321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:46.019356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:46.019378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:46.019401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:46.019425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:46.019446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:46.019467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:46.019490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654155313453843: ... line=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.288191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038164;self_id=[1:7439654378651919701:11074];tablet_id=72075186224038164;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.290530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038302;self_id=[1:7439654378651919890:8738];tablet_id=72075186224038302;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.292946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038386;self_id=[1:7439654378651919848:8266];tablet_id=72075186224038386;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.295537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038742;self_id=[1:7439654378651919984:7102];tablet_id=72075186224038742;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.298176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038278;self_id=[1:7439654378651919940:8240];tablet_id=72075186224038278;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.301505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038704;self_id=[1:7439654378651919875:14566];tablet_id=72075186224038704;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.305408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038172;self_id=[1:7439654378651919965:8778];tablet_id=72075186224038172;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.309343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[1:7439654378651919741:8648];tablet_id=72075186224038000;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.314414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[1:7439654378651920037:14499];tablet_id=72075186224038043;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.315228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038331;self_id=[1:7439654378651920051:14498];tablet_id=72075186224038331;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.317891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7439654378651920182:9062];tablet_id=72075186224038009;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.318845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654378651920131:5039];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.320068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038750;self_id=[1:7439654378651920130:6014];tablet_id=72075186224038750;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.322736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038687;self_id=[1:7439654378651920433:8073];tablet_id=72075186224038687;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.322779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7439654378651920406:5843];tablet_id=72075186224038082;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.325945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038357;self_id=[1:7439654378651920314:3167];tablet_id=72075186224038357;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.326484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7439654378651920353:3234];tablet_id=72075186224038046;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.328327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[1:7439654378651920368:7207];tablet_id=72075186224038067;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.330038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038287;self_id=[1:7439654378651920469:9285];tablet_id=72075186224038287;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.330600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038369;self_id=[1:7439654378651920267:7136];tablet_id=72075186224038369;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.333405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038188;self_id=[1:7439654378651920260:6144];tablet_id=72075186224038188;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.334981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038182;self_id=[1:7439654378651920183:5054];tablet_id=72075186224038182;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.335990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038791;self_id=[1:7439654378651920634:6038];tablet_id=72075186224038791;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.338360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038152;self_id=[1:7439654378651920249:4427];tablet_id=72075186224038152;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.338580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038836;self_id=[1:7439654378651920766:5267];tablet_id=72075186224038836;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.340906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038099;self_id=[1:7439654378651920339:3534];tablet_id=72075186224038099;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.342202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038537;self_id=[1:7439654378651920501:6353];tablet_id=72075186224038537;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.343274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038617;self_id=[1:7439654378651920389:7706];tablet_id=72075186224038617;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.345616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038448;self_id=[1:7439654378651920542:8519];tablet_id=72075186224038448;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.345960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038637;self_id=[1:7439654378651920578:8019];tablet_id=72075186224038637;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.348416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038481;self_id=[1:7439654378651920309:6285];tablet_id=72075186224038481;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.349287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038269;self_id=[1:7439654378651920448:10624];tablet_id=72075186224038269;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.350991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038195;self_id=[1:7439654378651920500:4349];tablet_id=72075186224038195;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.352824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038803;self_id=[1:7439654378651920660:3580];tablet_id=72075186224038803;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.353443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038826;self_id=[1:7439654378651920553:7935];tablet_id=72075186224038826;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.355932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038198;self_id=[1:7439654378651920720:5138];tablet_id=72075186224038198;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.356374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038854;self_id=[1:7439654378651920619:4430];tablet_id=72075186224038854;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.358303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7439654378651920613:5099];tablet_id=72075186224038058;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.359833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038787;self_id=[1:7439654378651920692:4414];tablet_id=72075186224038787;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.360680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[1:7439654378651920785:8542];tablet_id=72075186224037986;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.362932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038784;self_id=[1:7439654378651920724:5085];tablet_id=72075186224038784;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.363430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038387;self_id=[1:7439654378651920812:4035];tablet_id=72075186224038387;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.365401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038222;self_id=[1:7439654378651920754:4790];tablet_id=72075186224038222;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:37.434880Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179517000, txId: 18446744073709551615] shutting down [[0u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[0u]]|[[10000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x241C11B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x124E6444 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x124DE8F1 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:679: Execute_ @ 0x124DE8F1 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7FAE39021D8F 11. ??:0: ?? @ 0x7FAE39021E3F 12. ??:0: ?? @ 0x11815028 |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/stress_tool/ydb_stress_tool |91.6%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> test.py::test[pg-tpcds-q85-default.txt-Debug] [GOOD] |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/benchmark/benchmark |91.6%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/benchmark |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/benchmark >> test.py::test[pg-tpcds-q85-default.txt-ForceBlocks] |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> TSchemeShardMoveTest::MoveIndexSameDst >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> KqpOlapBlobsSharing::MultipleSchemaVersions [FAIL] >> test.py::test[pg-tpcds-q85-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Results] >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:58:43.315919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:43.315938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:43.315942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:43.315945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:43.315949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:43.315953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:43.315959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:43.316041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:43.323417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:43.323434Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:43.325502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:43.326038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:43.326066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:43.326956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:43.327095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:43.327157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:43.327202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:43.327828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:43.328023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:43.328030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:43.328055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:43.328059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:43.328064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:43.328072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.328933Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:58:43.339371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:43.339475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.339538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:43.339588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:43.339597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.340476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:43.340500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:43.340539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.340549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:43.340552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:43.340556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:43.340902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.340909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:43.340912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:43.341143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.341149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.341153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:43.341157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:43.341683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:43.342040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:43.342088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:43.342248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:43.342267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:43.342271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:43.342314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:43.342319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:43.342345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:43.342355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:43.342659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:43.342666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:43.342709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:43.342712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:58:43.342777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.342781Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:43.342789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:43.342792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:43.342795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:43.342798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:43.342802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:43.342804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:43.342811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:43.342815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:43.342818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:58:43.343053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:43.343063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:58:43.343066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:58:43.343070Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:58:43.343073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:43.343082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 294Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T08:58:43.749304Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:442:2406], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:58:43.749346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T08:58:43.749367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:58:43.749385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T08:58:43.749388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T08:58:43.749392Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T08:58:43.749434Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:43.749447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:43.749452Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T08:58:43.749455Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T08:58:43.749712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T08:58:43.749718Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T08:58:43.749727Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T08:58:43.749730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T08:58:43.749735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T08:58:43.749744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:123:2149] message: TxId: 281474976710760 2024-11-21T08:58:43.749749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T08:58:43.749753Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T08:58:43.749757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T08:58:43.749764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T08:58:43.749997Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T08:58:43.750006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T08:58:43.750011Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T08:58:43.750019Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:442:2406], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:58:43.750326Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T08:58:43.750349Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:442:2406], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:58:43.750358Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T08:58:43.750721Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T08:58:43.750739Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:442:2406], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T08:58:43.750742Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T08:58:43.750760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T08:58:43.750764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:614:2567] TestWaitNotification: OK eventTxId 102 2024-11-21T08:58:43.750857Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:58:43.750899Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 52us result status StatusSuccess 2024-11-21T08:58:43.750996Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSchemaVersions [FAIL] Test command err: Trying to start YDB, gRPC: 24116, MsgBus: 3587 2024-11-21T08:58:03.902701Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654231066347641:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.902884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00478c/r3tmp/tmpDSBGsf/pdisk_1.dat 2024-11-21T08:58:03.946864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24116, node 1 2024-11-21T08:58:03.959518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:03.959533Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:03.959535Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:03.959570Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3587 TClient is connected to server localhost:3587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:04.004065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:04.004095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:04.005143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:04.029516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:04.114781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:04.689607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.689645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.689658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.689666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.689758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.689762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.689829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.689832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.689869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.689873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.689900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:04.689914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:04.689929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:04.689931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:04.689955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:04.689956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:04.690019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:04.690031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:04.690072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.690079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:04.690154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:04.690158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:04.690208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654235361317352:2301];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:04.690224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654235361317359:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:04.692542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7439654235361317353:2302];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.692563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7439654235361317353:2302];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.692575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654235361317360:2309];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:04.692583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7439654235361317353:2302];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.692583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654235361317360:2309];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:04.692602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7439654235361317353:2302];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:04.692602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654235361317360:2309];tablet_id=72075186224038015;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:04.692613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7439654235361317353:2302];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:04.692613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7439654235361317360:2309];tablet_id=72075186224038015;process=TTxInitSchema:: ... zation_start;last_saved_id=16; 2024-11-21T08:58:41.778676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038210;self_id=[1:7439654394275193399:12444];tablet_id=72075186224038210;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.780150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038134;self_id=[1:7439654394275193628:12499];tablet_id=72075186224038134;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.780924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038221;self_id=[1:7439654394275193752:12529];tablet_id=72075186224038221;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.782624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038662;self_id=[1:7439654394275193428:12453];tablet_id=72075186224038662;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.783317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038840;self_id=[1:7439654394275193698:12518];tablet_id=72075186224038840;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.785080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038652;self_id=[1:7439654394275193658:12508];tablet_id=72075186224038652;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.785641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038392;self_id=[1:7439654394275193006:12356];tablet_id=72075186224038392;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.787685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038429;self_id=[1:7439654394275193781:12538];tablet_id=72075186224038429;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.787926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038218;self_id=[1:7439654394275193301:12424];tablet_id=72075186224038218;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.790149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038822;self_id=[1:7439654394275193384:12443];tablet_id=72075186224038822;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.791059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038686;self_id=[1:7439654394275193585:12492];tablet_id=72075186224038686;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.792761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038713;self_id=[1:7439654394275193468:12463];tablet_id=72075186224038713;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.793387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038472;self_id=[1:7439654394275194053:12599];tablet_id=72075186224038472;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.795435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7439654394275193566:12488];tablet_id=72075186224038085;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.795505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038367;self_id=[1:7439654394275193713:12519];tablet_id=72075186224038367;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.797880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038279;self_id=[1:7439654394275193955:12578];tablet_id=72075186224038279;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.797941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038774;self_id=[1:7439654394275193872:12558];tablet_id=72075186224038774;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.800076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038739;self_id=[1:7439654394275194212:12638];tablet_id=72075186224038739;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.800350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038370;self_id=[1:7439654394275193215:12403];tablet_id=72075186224038370;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.802272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038717;self_id=[1:7439654394275194133:12619];tablet_id=72075186224038717;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.804479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038373;self_id=[1:7439654394275194171:12628];tablet_id=72075186224038373;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.804695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038754;self_id=[1:7439654394275194264:12649];tablet_id=72075186224038754;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.806733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038202;self_id=[1:7439654394275193921:12569];tablet_id=72075186224038202;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.807405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;self_id=[1:7439654394275193914:12568];tablet_id=72075186224037997;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.809261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038655;self_id=[1:7439654394275193524:12477];tablet_id=72075186224038655;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.809893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038377;self_id=[1:7439654394275193961:12579];tablet_id=72075186224038377;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.811516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038303;self_id=[1:7439654394275194000:12589];tablet_id=72075186224038303;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.812382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038135;self_id=[1:7439654394275193741:12528];tablet_id=72075186224038135;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.813856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038324;self_id=[1:7439654394275193613:12498];tablet_id=72075186224038324;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.814881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038109;self_id=[1:7439654394275193840:12549];tablet_id=72075186224038109;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.816339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038797;self_id=[1:7439654394275194087:12608];tablet_id=72075186224038797;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.817539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;self_id=[1:7439654394275193343:12433];tablet_id=72075186224038697;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.818660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7439654394275194088:12609];tablet_id=72075186224038020;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.819808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038807;self_id=[1:7439654394275194130:12618];tablet_id=72075186224038807;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.821074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038337;self_id=[1:7439654394275193999:12588];tablet_id=72075186224038337;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.822042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038865;self_id=[1:7439654394275193828:12548];tablet_id=72075186224038865;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.823688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;self_id=[1:7439654394275193792:12539];tablet_id=72075186224038734;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.824419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038862;self_id=[1:7439654394275193875:12559];tablet_id=72075186224038862;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.827129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038266;self_id=[1:7439654394275194291:12658];tablet_id=72075186224038266;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.827352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038305;self_id=[1:7439654394275194219:12639];tablet_id=72075186224038305;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.829783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[1:7439654394275194173:12629];tablet_id=72075186224038034;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.829882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;self_id=[1:7439654394275194047:12598];tablet_id=72075186224038701;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.832199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038639;self_id=[1:7439654394275194253:12648];tablet_id=72075186224038639;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:41.910675Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179521002, txId: 18446744073709551615] shutting down [[2u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[2u]]|[[3u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x241C11B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x124E6444 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x124DBBA9 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:545: Execute_ @ 0x124DBBA9 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7F38128A9D8F 11. ??:0: ?? @ 0x7F38128A9E3F 12. ??:0: ?? @ 0x11815028 |91.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> test.py::test[pg-tpch-q18-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Debug] |91.6%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> test.py::test[pg-tpch-q18-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-ForceBlocks] >> KqpOlap::BulkUpsertUpdate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BulkUpsertUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 27238, MsgBus: 5020 2024-11-21T08:57:46.694760Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654158334729164:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:46.694947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047e3/r3tmp/tmpRGsVIV/pdisk_1.dat 2024-11-21T08:57:46.749476Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27238, node 1 2024-11-21T08:57:46.760658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:46.760689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:46.760692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:46.760727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5020 TClient is connected to server localhost:5020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:46.796406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:46.796434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:46.799295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:46.805955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, value Int32 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:47.013793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654162629697077:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.013828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.044346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:47.051640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:47.051672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:47.051707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:47.051723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:47.051738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:47.051752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:47.051766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:47.051780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:47.051794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:47.051813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.051827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:47.051842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:47.052435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:47.052444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:47.052454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:47.052459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:47.052477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:47.052481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:47.052489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:47.052497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:47.052504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:47.052507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:47.052512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:47.052515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:47.052562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:47.052566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:47.052579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:47.052582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:47.052591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:47.052595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:47.052607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:47.052610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:47.052621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:47.052624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=208;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=208;columns=2; 2024-11-21T08:57:47.120591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654162629697297:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.120639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.120743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654162629697302:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:47.121542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T08:57:47.123324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654162629697304:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T08:57:47.273919Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467179, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=208;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=208;columns=2; 2024-11-21T08:57:47.322377Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179467298, txId: 18446744073709551615] shutting down 2024-11-21T08:57:51.695066Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654158334729164:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:51.695101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:58:01.746636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:58:01.746655Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:47.054529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654162629697153:2303];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:47.057728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=d17a4bde-a7e611ef-8cf1b09f-27facf36;fline=with_appended.cpp:80;portions=3,;task_id=d17a4bde-a7e611ef-8cf1b09f-27facf36; 2024-11-21T08:58:47.368273Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179527000, txId: 18446744073709551615] shutting down >> TxUsage::WriteToTopic_Two_WriteSession [GOOD] >> TTablesWithReboots::CopyAlterWithReboots >> TTablesWithReboots::AlterTableSchemaWithReboots >> TTablesWithReboots::SimultaneousDropForceDrop |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> BasicUsage::ConnectToYDB [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> test.py::test[pg-tpch-q18-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_5 >> TSettingsValidation::ValidateSettingsFailOnStart [GOOD] >> TxUsage::SessionAbort |91.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay/ydb_query_replay |91.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay >> TTablesWithReboots::TwiceRmDirWithReboots >> TTablesWithReboots::CopyWithRebootsAtCommit >> TTablesWithReboots::AlterTableConfigWithReboots |91.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay_yt/query_replay_yt |91.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TAccessServiceTest::PassRequestId >> TUserAccountServiceTest::Get >> FolderServiceTest::TFolderService >> FolderServiceTest::TFolderServiceTransitional >> FolderServiceTest::TFolderServiceAdapter |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate >> TServiceAccountServiceTest::Get [GOOD] >> TUserAccountServiceTest::Get [GOOD] >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TAccessServiceTest::PassRequestId [GOOD] >> TAccessServiceTest::Authenticate [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_21_RestartNo [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2024-11-21T08:58:49.773419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654428440781007:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:49.773440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00312e/r3tmp/tmp8cTEvp/pdisk_1.dat 2024-11-21T08:58:49.826658Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:49.874205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:49.874248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:49.875330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:49.905207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2024-11-21T08:58:49.841920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654431012535266:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:49.842217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030ca/r3tmp/tmpMbaXY7/pdisk_1.dat 2024-11-21T08:58:49.904324Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:49.943252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:49.943282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:49.944341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:49.983101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:49.987114Z node 1 :GRPC_CLIENT DEBUG: [164c7f082b10] Connect to grpc://localhost:11231 2024-11-21T08:58:49.987297Z node 1 :GRPC_CLIENT DEBUG: [164c7f082b10] Request ListFoldersRequest { id: "i_am_exists" } 2024-11-21T08:58:49.989319Z node 1 :GRPC_CLIENT DEBUG: [164c7f082b10] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2024-11-21T08:58:49.989534Z node 1 :GRPC_CLIENT DEBUG: [164c7f083f50] Connect to grpc://localhost:27139 2024-11-21T08:58:49.989615Z node 1 :GRPC_CLIENT DEBUG: [164c7f083f50] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2024-11-21T08:58:49.990822Z node 1 :GRPC_CLIENT DEBUG: [164c7f083f50] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2024-11-21T08:58:49.990937Z node 1 :GRPC_CLIENT DEBUG: [164c7f083f50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T08:58:49.991386Z node 1 :GRPC_CLIENT DEBUG: [164c7f083f50] Status 5 Not Found 2024-11-21T08:58:49.991521Z node 1 :GRPC_CLIENT DEBUG: [164c7f082b10] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T08:58:49.991850Z node 1 :GRPC_CLIENT DEBUG: [164c7f082b10] Status 5 Not Found ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2024-11-21T08:58:49.832777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654428826812526:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:49.832894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030e4/r3tmp/tmprFKHz4/pdisk_1.dat 2024-11-21T08:58:49.880390Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:49.904550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:49.909206Z node 1 :GRPC_CLIENT DEBUG: [55c67f0816d0]{trololo} Connect to grpc://localhost:27723 2024-11-21T08:58:49.909673Z node 1 :GRPC_CLIENT DEBUG: [55c67f0816d0]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2024-11-21T08:58:49.911410Z node 1 :GRPC_CLIENT DEBUG: [55c67f0816d0]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } 2024-11-21T08:58:49.934302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:49.934334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:49.935441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2024-11-21T08:58:49.874539Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654429197065065:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:49.874558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030d2/r3tmp/tmpsZ6nx7/pdisk_1.dat 2024-11-21T08:58:49.932423Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:49.975253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:49.975288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:49.976485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:50.007028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:50.012233Z node 1 :GRPC_CLIENT DEBUG: [15763f081490] Connect to grpc://localhost:24001 2024-11-21T08:58:50.012749Z node 1 :GRPC_CLIENT DEBUG: [15763f081490] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2024-11-21T08:58:50.014799Z node 1 :GRPC_CLIENT DEBUG: [15763f081490] Status 7 Permission Denied 2024-11-21T08:58:50.014925Z node 1 :GRPC_CLIENT DEBUG: [15763f081490] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2024-11-21T08:58:50.015420Z node 1 :GRPC_CLIENT DEBUG: [15763f081490] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> LocalPartition::Restarts >> TTablesWithReboots::LostBorrowAckWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2024-11-21T08:58:49.801498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654431709802186:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:49.801767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030d5/r3tmp/tmpyidRST/pdisk_1.dat 2024-11-21T08:58:49.851417Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:49.879442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:49.902279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:49.902303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:49.903333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:50.080621Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654433625134801:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:50.080637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030d5/r3tmp/tmpykqTLZ/pdisk_1.dat 2024-11-21T08:58:50.087421Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:50.182881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:50.182911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:50.183253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:50.183873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected >> TTablesWithReboots::AlterCopyWithReboots |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> test.py::test[pg-tpch-q18-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Analyze] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> FolderServiceTest::TFolderService [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2024-11-21T08:58:49.850649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654428460929738:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:49.850880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030d0/r3tmp/tmpBDViOL/pdisk_1.dat 2024-11-21T08:58:49.894743Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:49.921661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:49.924234Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Connect to grpc://localhost:14652 2024-11-21T08:58:49.925734Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T08:58:49.927250Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14652: Failed to connect to remote host: Connection refused 2024-11-21T08:58:49.927606Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T08:58:49.927722Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14652: Failed to connect to remote host: Connection refused 2024-11-21T08:58:49.951865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:49.951890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:49.952995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:50.927931Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T08:58:50.928223Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14652: Failed to connect to remote host: Connection refused 2024-11-21T08:58:51.928649Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T08:58:51.929641Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Status 5 Not Found 2024-11-21T08:58:51.929784Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Request ListFoldersRequest { id: "i_am_exists" } 2024-11-21T08:58:51.930354Z node 1 :GRPC_CLIENT DEBUG: [547bbf0816d0] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2024-11-21T08:58:49.822022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654431965797808:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:49.822042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00313f/r3tmp/tmpmsNp3l/pdisk_1.dat 2024-11-21T08:58:49.864727Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:49.922615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:49.922643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:49.923752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:49.947121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:49.958125Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Connect to grpc://localhost:30746 2024-11-21T08:58:49.960008Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T08:58:49.961351Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30746: Failed to connect to remote host: Connection refused 2024-11-21T08:58:49.961790Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T08:58:49.961900Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30746: Failed to connect to remote host: Connection refused 2024-11-21T08:58:50.962140Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T08:58:50.962428Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:30746: Failed to connect to remote host: Connection refused 2024-11-21T08:58:51.962686Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T08:58:51.963627Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Status 5 Not Found 2024-11-21T08:58:51.963735Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2024-11-21T08:58:51.964271Z node 1 :GRPC_CLIENT DEBUG: [1684ff0816d0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TTablesWithReboots::ParallelCreateDrop >> TTablesWithReboots::DropCopyWithRebootsAtCommit >> TTablesWithReboots::CreateDroppedTableAndDropWithReboots >> TTablesWithReboots::CopyTableWithReboots |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateDroppedTableWithReboots >> test.py::test[pg-tpch-q21-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Debug] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::Fake [GOOD] >> TxUsage::WriteToTopic_Demo_3 [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::Fake [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_26 >> TxUsage::SessionAbort [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::TwoSessionOneConsumer >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> TTablesWithReboots::AlterAndForceDrop |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> test.py::test[pg-tpch-q21-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-ForceBlocks] >> KqpOlap::ManyColumnShardsWithRestarts [GOOD] >> TTablesWithReboots::CreateWithRebootsAtCommit >> KqpOlapSparsed::SwitchingStandalone [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ManyColumnShardsWithRestarts [GOOD] Test command err: 2024-11-21T08:57:58.999354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:57:59.001784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:59.001884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:59.001915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:59.002188Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:59.002198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a6/r3tmp/tmpgPhvlN/pdisk_1.dat 2024-11-21T08:57:59.071714Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:59.152688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:57:59.239128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:59.239156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:59.239950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:59.239962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:59.251128Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:57:59.251231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:59.251302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:59.590399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:59.710108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:59.710156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:59.710199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:59.710219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:59.710236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:59.710255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:59.710273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:59.710291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:59.710311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:59.710330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.710352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:59.710370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:59.716822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:59.716849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:59.716863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:59.716870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:59.716884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:59.716888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:59.716898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:59.716905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:59.716914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:59.716918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:59.716924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:59.716928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:59.716974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:59.716980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:59.716993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:59.716999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:59.717013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:59.717020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:59.717038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:59.717042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:59.717052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:59.717056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:59.740498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:59.740543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:59.740582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:59.740602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:59.740620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGra ... 6224037978;self_id=[2:23117:9396];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037978;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.768806Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:23119:9398];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037980;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.772027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[2:23121:9400];tablet_id=72075186224037982;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.773168Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[2:23114:9393];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037976;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.777710Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:23122:9401];tablet_id=72075186224037984;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.780782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:23132:9404];tablet_id=72075186224037986;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.785405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[2:23133:9405];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.786457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:23117:9396];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037978;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.787666Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.788731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:23119:9398];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037980;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.789784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.792936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;self_id=[2:23137:9406];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.795668Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.796675Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.798650Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.799077Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[2:23121:9400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037982;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.799145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:23132:9404];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037986;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.799709Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[2:23133:9405];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037943;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.800226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:23122:9401];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037984;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.800594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;self_id=[2:23137:9406];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037945;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.803586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:23164:9407];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.807068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.810832Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:23164:9407];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037947;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.821784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[2:23168:9410];tablet_id=72075186224037952;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.824954Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.826770Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[2:23168:9410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037952;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.833382Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:23170:9412];tablet_id=72075186224037972;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.837442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:23171:9413];tablet_id=72075186224037970;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.839967Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.840992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.846365Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:23175:9416];tablet_id=72075186224037966;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.846901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:23170:9412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037972;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.846946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:23171:9413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037970;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.848969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.849903Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:23171:9413];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037970;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.853323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:23175:9416];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037966;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.857014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;self_id=[2:23178:9418];tablet_id=72075186224037960;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.861438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.866186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[2:23186:9421];tablet_id=72075186224037962;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.867693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;self_id=[2:23178:9418];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037960;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.869672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.872834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[2:23186:9421];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037962;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.875690Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:23189:9422];tablet_id=72075186224037964;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.877452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:23192:9424];tablet_id=72075186224037968;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:58:47.879676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.880868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.882276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:23192:9424];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037968;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.882853Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:23189:9422];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037964;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T08:58:47.883852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:23192:9424];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037968;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=152;columns=1; 2024-11-21T08:58:55.114625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6z3z2q51v45tsx9ywk113a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFiMTBjNy0zZTIyNGYyNC00OWMzZDE0ZC1hNGVkYWY0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"type_id":4}}],"rows":[{"items":[{"uint64_value":200000}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2024-11-21T08:58:55.117307Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jd6z3z2q51v45tsx9ywk113a", SessionId: ydb://session/3?node_id=1&id=NmFiMTBjNy0zZTIyNGYyNC00OWMzZDE0ZC1hNGVkYWY0NA==, Slow query, duration: 10.422098s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT COUNT(*) FROM `/Root/largeOlapStore/largeOlapTable`;", parameters: 0b 2024-11-21T08:58:55.118106Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 5500, txId: 18446744073709551615] shutting down >> TxUsage::WriteToTopic_Demo_2 [GOOD] >> TTablesWithReboots::CopyTableAndDropWithReboots >> TxUsage::WriteToTopic_Demo_1 [GOOD] >> TxUsage::WriteToTopic_Demo_4 [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_10 >> TxUsage::WriteToTopic_Demo_34 >> TxUsage::WriteToTopic_Demo_18_RestartNo ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::SwitchingStandalone [GOOD] Test command err: Trying to start YDB, gRPC: 32716, MsgBus: 15609 2024-11-21T08:57:57.482445Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654206020059273:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:57.482586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047ae/r3tmp/tmpp9XRU7/pdisk_1.dat 2024-11-21T08:57:57.524019Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32716, node 1 2024-11-21T08:57:57.535933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:57.535946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:57.535947Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:57.535977Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15609 TClient is connected to server localhost:15609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:57.575331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:57.583464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:57.583489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:57.584622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:57.755781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654206020059885:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:57.755829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:57.759239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:57.787620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.787657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.787680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:57.787696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:57.787710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:57.787727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:57.787743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:57.787760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:57.787760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:57.787775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:57.787789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.787793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:57.787804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:57.787819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:57.787834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:57.787862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:57.787885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:57.787902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:57.787919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:57.787938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:57.787954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:57.787964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:57.787975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:57.787989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:57.788162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:57.788176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:57.788192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:57.788201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:57.788238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:57.788253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:57.788267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:57.788274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:57.788288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720 ... SHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654206020060401:2316];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.732811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[1:7439654206020060446:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.732833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654206020060461:2334];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.732853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439654206020060366:2306];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.732878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[1:7439654206020060358:2303];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.732910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439654206020060359:2304];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.732951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7439654206020060479:2352];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.733003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7439654206020060473:2346];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.733015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654206020060397:2312];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.733981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[1:7439654206020060400:2315];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.734430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;task_id=d312be04-a7e611ef-bcabca2a-a994efbd;fline=with_appended.cpp:80;portions=9,;task_id=d312be04-a7e611ef-bcabca2a-a994efbd; 2024-11-21T08:58:49.735217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;task_id=d312cf02-a7e611ef-8e2a22a8-679d8dad;fline=with_appended.cpp:80;portions=9,;task_id=d312cf02-a7e611ef-8e2a22a8-679d8dad; 2024-11-21T08:58:49.735656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;task_id=d312d02e-a7e611ef-a8592abf-914586b1;fline=with_appended.cpp:80;portions=9,;task_id=d312d02e-a7e611ef-a8592abf-914586b1; 2024-11-21T08:58:49.735851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;task_id=d312ff68-a7e611ef-b74fccc8-ed7497f1;fline=with_appended.cpp:80;portions=9,;task_id=d312ff68-a7e611ef-b74fccc8-ed7497f1; 2024-11-21T08:58:49.735943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;task_id=d313003a-a7e611ef-88f7f4e9-67d5f138;fline=with_appended.cpp:80;portions=9,;task_id=d313003a-a7e611ef-88f7f4e9-67d5f138; 2024-11-21T08:58:49.736711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654206020060448:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.737309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=d312fc70-a7e611ef-8df4df3d-79b18e8b;fline=with_appended.cpp:80;portions=9,;task_id=d312fc70-a7e611ef-8df4df3d-79b18e8b; 2024-11-21T08:58:49.737414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;task_id=d312fb1c-a7e611ef-95dcd781-87d17f85;fline=with_appended.cpp:80;portions=9,;task_id=d312fb1c-a7e611ef-95dcd781-87d17f85; 2024-11-21T08:58:49.737775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;task_id=d312fdf6-a7e611ef-881aadba-d6e8f7d;fline=with_appended.cpp:80;portions=9,;task_id=d312fdf6-a7e611ef-881aadba-d6e8f7d; 2024-11-21T08:58:49.737786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;task_id=d312fd38-a7e611ef-b3d3d983-35be9601;fline=with_appended.cpp:80;portions=9,;task_id=d312fd38-a7e611ef-b3d3d983-35be9601; 2024-11-21T08:58:49.737863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=d313036e-a7e611ef-977d5fe7-adbfe98a;fline=with_appended.cpp:80;portions=9,;task_id=d313036e-a7e611ef-977d5fe7-adbfe98a; 2024-11-21T08:58:49.737898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;task_id=d3130300-a7e611ef-a03149c1-93da6229;fline=with_appended.cpp:80;portions=9,;task_id=d3130300-a7e611ef-a03149c1-93da6229; 2024-11-21T08:58:49.738293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=d313937e-a7e611ef-b542d9c9-3d4f664a;fline=with_appended.cpp:80;portions=9,;task_id=d313937e-a7e611ef-b542d9c9-3d4f664a; 2024-11-21T08:58:49.738307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;task_id=d31328f8-a7e611ef-bf8feffe-f1b3233b;fline=with_appended.cpp:80;portions=9,;task_id=d31328f8-a7e611ef-bf8feffe-f1b3233b; 2024-11-21T08:58:49.746080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[1:7439654206020060447:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.748037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;task_id=d3150042-a7e611ef-b653e052-1be40e9c;fline=with_appended.cpp:80;portions=9,;task_id=d3150042-a7e611ef-b653e052-1be40e9c; 2024-11-21T08:58:49.767607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[1:7439654206020060427:2321];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.768016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[1:7439654206020060386:2308];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.769074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439654206020060462:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.769098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7439654206020060403:2317];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.770020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;task_id=d31848ba-a7e611ef-92f1ce1e-91ed3a8;fline=with_appended.cpp:80;portions=9,;task_id=d31848ba-a7e611ef-92f1ce1e-91ed3a8; 2024-11-21T08:58:49.770097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;task_id=d3185a44-a7e611ef-8c2f5745-f7b22d1f;fline=with_appended.cpp:80;portions=9,;task_id=d3185a44-a7e611ef-8c2f5745-f7b22d1f; 2024-11-21T08:58:49.771124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;self_id=[1:7439654206020060404:2318];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.771126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439654206020060531:2360];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.771135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;task_id=d31884f6-a7e611ef-9a8288ed-cb393da3;fline=with_appended.cpp:80;portions=9,;task_id=d31884f6-a7e611ef-9a8288ed-cb393da3; 2024-11-21T08:58:49.771141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=d318837a-a7e611ef-b5ea4b0c-b7f959d9;fline=with_appended.cpp:80;portions=9,;task_id=d318837a-a7e611ef-b5ea4b0c-b7f959d9; 2024-11-21T08:58:49.771980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[1:7439654206020060426:2320];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.772182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439654206020060383:2307];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.772312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[1:7439654206020060476:2349];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.772335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7439654206020060459:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.772386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7439654206020060472:2345];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.773687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;task_id=d318d3c0-a7e611ef-b489a385-f5a6b973;fline=with_appended.cpp:80;portions=9,;task_id=d318d3c0-a7e611ef-b489a385-f5a6b973; 2024-11-21T08:58:49.773796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=d318d3d4-a7e611ef-9d97dee8-43a3e2e5;fline=with_appended.cpp:80;portions=9,;task_id=d318d3d4-a7e611ef-9d97dee8-43a3e2e5; 2024-11-21T08:58:49.774668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;task_id=d318f508-a7e611ef-b5ec58c0-b1cb57ac;fline=with_appended.cpp:80;portions=9,;task_id=d318f508-a7e611ef-b5ec58c0-b1cb57ac; 2024-11-21T08:58:49.775225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;task_id=d318fd28-a7e611ef-bfb6e7f4-7d20b741;fline=with_appended.cpp:80;portions=9,;task_id=d318fd28-a7e611ef-bfb6e7f4-7d20b741; 2024-11-21T08:58:49.775231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;task_id=d3190278-a7e611ef-a0a37071-5b657ef2;fline=with_appended.cpp:80;portions=9,;task_id=d3190278-a7e611ef-a0a37071-5b657ef2; 2024-11-21T08:58:49.775343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;task_id=d3190372-a7e611ef-8d27fb81-b7f98ada;fline=with_appended.cpp:80;portions=9,;task_id=d3190372-a7e611ef-8d27fb81-b7f98ada; 2024-11-21T08:58:49.775376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;task_id=d319028c-a7e611ef-ba314e80-321151a;fline=with_appended.cpp:80;portions=9,;task_id=d319028c-a7e611ef-ba314e80-321151a; 2024-11-21T08:58:49.785026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439654206020060486:2359];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T08:58:49.786940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;task_id=d31af1a0-a7e611ef-b206ac50-df2ef39a;fline=with_appended.cpp:80;portions=9,;task_id=d31af1a0-a7e611ef-b206ac50-df2ef39a; WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 Timing: wait took 6 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapTable` WHERE field == 'abcde' RESULT: 2024-11-21T08:58:55.787611Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179535000, txId: 18446744073709551615] shutting down count: 12748 ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapTable` RESULT: 2024-11-21T08:58:55.887737Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179535765, txId: 18446744073709551615] shutting down count: 14000 Timing: checkTable took 0 seconds Timing: wait took 0 seconds |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> test.py::test[pg-tpch-q21-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Results] >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots >> TTablesWithReboots::DropTableWithReboots >> TTablesWithReboots::TwiceRmDirWithReboots [GOOD] >> TTablesWithReboots::AlterTableFollowersWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::TwiceRmDirWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:49.476256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:49.476273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.476277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:49.476281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:49.476295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:49.476297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:49.476304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.476376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:49.484473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:49.484491Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.486307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:49.486385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:49.486410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:49.488463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:49.488526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:49.488603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.488784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.489533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.489763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.489771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.489780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:49.489784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.489789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:49.489816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:49.490612Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.501856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:49.501927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.501981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:49.502023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:49.502028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.502571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.502590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:49.502619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.502628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:49.502631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:49.502634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:49.502866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.502882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:49.502884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:49.503113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.503119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.503123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.503128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.503516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:49.503877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:49.503937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:49.504103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.504125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:49.504130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.504180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:49.504185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.504223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:49.504235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.504572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.504580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.504610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.504616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:49.504673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.504677Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:49.504686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:49.504689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.504692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:49.504695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.504699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:49.504701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:49.504709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:49.504713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:49.504715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T08:58:57.734198Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:57.734209Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 118 RawX2: 150323857504 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:57.734213Z node 35 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 1003:0, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T08:58:57.734225Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.734229Z node 35 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:58:57.734232Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:58:57.734237Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:57.734243Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:58:57.734246Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T08:58:57.734250Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:58:57.734252Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:58:57.734257Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:58:57.734264Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:58:57.734268Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2024-11-21T08:58:57.734271Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T08:58:57.734275Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:58:57.734504Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:57.734540Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:57.734718Z node 35 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:57.734723Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:57.734738Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:58:57.734753Z node 35 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:57.734755Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [35:205:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T08:58:57.734758Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [35:205:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T08:58:57.734823Z node 35 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:57.734829Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:57.734832Z node 35 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:57.734834Z node 35 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:58:57.734837Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:58:57.734870Z node 35 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:57.734875Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:58:57.734877Z node 35 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:58:57.734879Z node 35 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:58:57.734881Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:58:57.734890Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T08:58:57.734905Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:58:57.734908Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:58:57.734913Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:57.735277Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:57.735645Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:58:57.735658Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T08:58:57.735686Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:58:57.735689Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T08:58:57.735697Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:58:57.735699Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:58:57.736006Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "Victim" } } TxId: 1004 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:57.736020Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/Victim, pathId: 0, opId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.736031Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1004:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Victim', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, at schemeshard: 72057594046678944 2024-11-21T08:58:57.736121Z node 35 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:58:57.736171Z node 35 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:58:57.736403Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1004, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Victim\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 1004 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:57.736419Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1004, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Victim', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, operation: DROP DIRECTORY, path: /MyRoot/Victim 2024-11-21T08:58:57.736432Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:58:57.736438Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [35:348:2340] 2024-11-21T08:58:57.736452Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:58:57.736454Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [35:348:2340] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2024-11-21T08:58:57.736501Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Victim" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:58:57.736515Z node 35 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Victim" took 16us result status StatusPathDoesNotExist 2024-11-21T08:58:57.736533Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Victim\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Victim" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_5 [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> TxUsage::WriteToTopic_Demo_6 >> BasicUsage::WriteRead [GOOD] >> Describe::Basic |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:32.815350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:32.815368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.815371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:32.815374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:32.815379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:32.815381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:32.815388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.815455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:32.822615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:32.822636Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:32.824325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:32.824393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:32.824422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:32.826192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:32.826263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:32.826331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.826481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:32.826969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.827164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.827170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.827179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:32.827183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.827188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:32.827218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:32.828174Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:32.838172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:32.838220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.838264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:32.838296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:32.838301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.838782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.838800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:32.838834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.838841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:32.838844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:32.838847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:32.839096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.839102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:32.839105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:32.839304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.839309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.839312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.839317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.839707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:32.840013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:32.840049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:32.840188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.840221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:32.840228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.840263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:32.840267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.840286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.840294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:32.840567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.840573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.840607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.840612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:32.840678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.840683Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:32.840690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:32.840692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.840696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:32.840699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.840702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:32.840704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:32.840710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:32.840714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:32.840716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... cent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:58.507837Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:58:58.507868Z node 30 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 38us result status StatusSuccess 2024-11-21T08:58:58.507953Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:58.518185Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:821:2657] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T08:58:58.518211Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:760:2657] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T08:58:58.518237Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:821:2657] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732179538506003 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732179538506003 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732179538506003 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T08:58:58.519003Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:821:2657] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T08:58:58.519027Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:760:2657] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit [GOOD] >> TTablesWithReboots::SimpleDropTableWithReboots2 >> TxUsage::WriteToTopic_Demo_22_RestartNo >> TTablesWithReboots::CopyTableAndDropWithReboots2 >> TxUsage::TwoSessionOneConsumer [GOOD] >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> test.py::test[pg-tpch-q21-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Analyze] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Debug] >> TxUsage::WriteToTopic_Demo_34 [GOOD] >> TxUsage::WriteToTopic_Demo_35 >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-ForceBlocks] >> KqpOlapStats::AddRowsSomeTablesInTableStore [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Results] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Analyze] >> TxUsage::WriteToTopic_Demo_26 [GOOD] >> TxUsage::WriteToTopic_Demo_10 [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_11 >> TxUsage::WriteToTopic_Demo_27 >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit >> Describe::Basic [GOOD] >> Describe::Statistics >> test.py::test[pg_catalog-pg_stat_database-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Debug] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_stat_database-default.txt-ForceBlocks] >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction [GOOD] >> LocalPartition::Restarts [GOOD] >> LocalPartition::DiscoveryServiceBadPort >> TxUsage::ReadRuleGeneration >> test.py::test[pg_catalog-pg_stat_database-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsSomeTablesInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 10846, MsgBus: 22479 2024-11-21T08:57:28.107057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654081000139460:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:28.107078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487d/r3tmp/tmprzOORX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10846, node 1 2024-11-21T08:57:28.161829Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:28.167186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:28.167202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:28.167205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:28.167246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22479 TClient is connected to server localhost:22479 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:57:28.208066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:28.208096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:28.209181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:28.220863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T08:57:28.375806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654081000140062:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.375833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:28.395811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:28.403326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:28.403363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:28.403400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:28.403417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:28.403428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:28.403442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:28.403456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:28.403476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:28.403492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:28.403507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.403523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:28.403539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654081000140130:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:28.403881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:28.403895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:28.403907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:28.403912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:28.403926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:28.403931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:28.403941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:28.403953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:28.403963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:28.403972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:28.403978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:28.403988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:28.404041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:28.404052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:28.404068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:28.404077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:28.404088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:28.404097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:28.404111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:28.404120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:28.404131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:28.404139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; CREATE TABLE ... 2TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=55496;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=55496;columns=3; >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots >> TxUsage::WriteToTopic_Demo_22_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_35 [GOOD] >> TxUsage::WriteToTopic_Demo_36 >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit |91.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[pg_catalog-pg_stat_database-default.txt-ForceBlocks] [GOOD] >> TxUsage::WriteToTopic_Demo_6 [GOOD] >> TxUsage::WriteToTopic_Demo_7 >> KqpOlapBlobsSharing::HugeSchemeHistory [FAIL] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |91.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::ReadWithRestarts >> TTablesWithReboots::AlterTableConfigWithReboots [GOOD] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::HugeSchemeHistory [FAIL] Test command err: Trying to start YDB, gRPC: 8704, MsgBus: 9625 2024-11-21T08:58:05.253269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654238998578539:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:05.254036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004789/r3tmp/tmpoiDKrz/pdisk_1.dat 2024-11-21T08:58:05.308144Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8704, node 1 2024-11-21T08:58:05.320764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:05.320777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:05.320778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:05.320811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9625 2024-11-21T08:58:05.355142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:05.355201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:05.356242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:05.386690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:05.497787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:06.116833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.116881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.116902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.116916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.116932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.116942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.116959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.116977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.116993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:06.117006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.117029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:06.117049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;self_id=[1:7439654243293549459:2296];tablet_id=72075186224038902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:06.117204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:06.117231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:06.117265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:06.117289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:06.117312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:06.117326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:06.117346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:06.117348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:06.117353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:06.117361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:06.117364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:06.117365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:06.117378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:06.117381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:06.117387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:06.117393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:06.117396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:06.117399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:06.117404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:06.117406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:06.117411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654243293549474:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:06.117411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:06.117414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038902;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42; ... rmalization_start;last_saved_id=16; 2024-11-21T08:59:06.516810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038406;self_id=[1:7439654500992362823:12000];tablet_id=72075186224038406;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.520282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038106;self_id=[1:7439654500992362918:12019];tablet_id=72075186224038106;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.521971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[1:7439654500992362880:12010];tablet_id=72075186224038056;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.523670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7439654500992362828:12001];tablet_id=72075186224038013;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.525325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7439654500992363152:12070];tablet_id=72075186224038001;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.526780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038425;self_id=[1:7439654500992362984:12031];tablet_id=72075186224038425;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.530091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038165;self_id=[1:7439654500992363014:12040];tablet_id=72075186224038165;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.534381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038238;self_id=[1:7439654500992362967:12030];tablet_id=72075186224038238;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.536285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038861;self_id=[1:7439654500992363063:12050];tablet_id=72075186224038861;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.538180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038302;self_id=[1:7439654500992363024:12041];tablet_id=72075186224038302;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.541240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038314;self_id=[1:7439654500992363155:12071];tablet_id=72075186224038314;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.541806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038123;self_id=[1:7439654500992363119:12061];tablet_id=72075186224038123;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.545736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038830;self_id=[1:7439654500992363238:12090];tablet_id=72075186224038830;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.547180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038409;self_id=[1:7439654500992363274:12099];tablet_id=72075186224038409;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.549362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038476;self_id=[1:7439654500992362933:12021];tablet_id=72075186224038476;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.552835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038777;self_id=[1:7439654500992363111:12060];tablet_id=72075186224038777;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.553249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038671;self_id=[1:7439654500992363196:12081];tablet_id=72075186224038671;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.557912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038716;self_id=[1:7439654500992363379:12125];tablet_id=72075186224038716;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.558933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038839;self_id=[1:7439654500992363239:12091];tablet_id=72075186224038839;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.562025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038265;self_id=[1:7439654500992363346:12116];tablet_id=72075186224038265;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.566051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038792;self_id=[1:7439654500992363079:12051];tablet_id=72075186224038792;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.569823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[1:7439654500992363389:12126];tablet_id=72075186224037988;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.573773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038438;self_id=[1:7439654500992363340:12115];tablet_id=72075186224038438;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.577725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;self_id=[1:7439654500992363312:12106];tablet_id=72075186224038726;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.581477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038743;self_id=[1:7439654500992363498:12151];tablet_id=72075186224038743;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.585172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038871;self_id=[1:7439654500992363447:12140];tablet_id=72075186224038871;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.589350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038179;self_id=[1:7439654500992363533:12161];tablet_id=72075186224038179;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.593066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038704;self_id=[1:7439654500992363492:12150];tablet_id=72075186224038704;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.597577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038626;self_id=[1:7439654500992363195:12080];tablet_id=72075186224038626;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.601626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038382;self_id=[1:7439654500992363294:12104];tablet_id=72075186224038382;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.603968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[1:7439654500992363462:12141];tablet_id=72075186224037994;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.605984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7439654500992363571:12171];tablet_id=72075186224038079;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.609870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038769;self_id=[1:7439654500992363539:12162];tablet_id=72075186224038769;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.610739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038398;self_id=[1:7439654500992363425:12134];tablet_id=72075186224038398;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T08:59:06.717066Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179546000, txId: 18446744073709551615] shutting down [[53u]] 2024-11-21T08:59:07.675413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;self_id=[1:7439654488107439761:9402];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:59:07.678826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[1:7439654488107439738:9400];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:59:07.680408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;task_id=ddc4c888-a7e611ef-bee01324-4712798f;fline=with_appended.cpp:80;portions=5,;task_id=ddc4c888-a7e611ef-bee01324-4712798f; 2024-11-21T08:59:07.683385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;task_id=ddc5506e-a7e611ef-9c4c123e-f16aa746;fline=with_appended.cpp:80;portions=6,;task_id=ddc5506e-a7e611ef-9c4c123e-f16aa746; 2024-11-21T08:59:07.691119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[1:7439654488107439788:9404];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:59:07.694223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439654488107439419:9377];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:59:07.694644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;task_id=ddc73028-a7e611ef-b84f31be-610af4d0;fline=with_appended.cpp:80;portions=4,;task_id=ddc73028-a7e611ef-b84f31be-610af4d0; 2024-11-21T08:59:07.697728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;task_id=ddc7a97c-a7e611ef-a47b6f01-53a9a98;fline=with_appended.cpp:80;portions=3,;task_id=ddc7a97c-a7e611ef-a47b6f01-53a9a98; 2024-11-21T08:59:07.700591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439654488107439557:9387];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:59:07.704359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=ddc8a14c-a7e611ef-9c29c808-8785d551;fline=with_appended.cpp:80;portions=3,;task_id=ddc8a14c-a7e611ef-9c29c808-8785d551; strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[53u]]|[[100u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x12839BF8 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x241C11B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x124E6444 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x124DBDC7 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:564: Execute_ @ 0x124DBDC7 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7FBFC2F91D8F 11. ??:0: ?? @ 0x7FBFC2F91E3F 12. ??:0: ?? @ 0x11815028 >> Describe::Statistics [GOOD] >> Describe::Location ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableConfigWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:49.514319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:49.514341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.514346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:49.514351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:49.514366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:49.514370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:49.514380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.514461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:49.521670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:49.521685Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.523210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:49.523309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:49.523333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:49.525602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:49.525651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:49.525724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.525856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.526326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.526531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.526538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.526546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:49.526551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.526555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:49.526582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:49.527493Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.541365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:49.541428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.541476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:49.541518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:49.541540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.542080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.542101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:49.542128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.542134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:49.542137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:49.542143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:49.542385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.542401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:49.542404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:49.542624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.542630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.542635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.542640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.543064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:49.543344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:49.543377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:49.543515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.543531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:49.543535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.543587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:49.543592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.543610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:49.543620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.543907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.543913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.543939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.543942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:49.544008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.544012Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:49.544019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:49.544022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.544025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:49.544028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.544031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:49.544033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:49.544040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:49.544043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:49.544046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... tionId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 1004 MinStep: 5000005 MaxStep: 18446744073709551615 PrepareArriveTime: 79000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 25 } } 2024-11-21T08:59:10.527492Z node 86 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#1004:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 1004 MinStep: 5000005 MaxStep: 18446744073709551615 PrepareArriveTime: 79000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 25 } } 2024-11-21T08:59:10.527495Z node 86 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T08:59:10.527503Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 1004:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:59:10.527506Z node 86 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 3 -> 128 2024-11-21T08:59:10.527716Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:10.527732Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:10.527735Z node 86 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:10.527741Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1004 ready parts: 1/1 2024-11-21T08:59:10.527759Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1004 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:10.527940Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T08:59:10.527955Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1004 at step: 5000005 2024-11-21T08:59:10.528001Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:10.528012Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 369367189608 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:10.528016Z node 86 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1004:0 HandleReply TEvOperationPlan, operationId: 1004:0, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T08:59:10.528059Z node 86 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 129 2024-11-21T08:59:10.528070Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T08:59:10.528857Z node 86 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:10.528863Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:59:10.528896Z node 86 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:10.528900Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [86:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T08:59:10.528945Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:10.528950Z node 86 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:59:10.529172Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:10.529182Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:10.529184Z node 86 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:10.529188Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T08:59:10.529191Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:59:10.529200Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T08:59:10.529322Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 146 } } 2024-11-21T08:59:10.529329Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:10.529339Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 146 } } 2024-11-21T08:59:10.529346Z node 86 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 146 } } 2024-11-21T08:59:10.529390Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 369367189770 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:10.529393Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:10.529400Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 369367189770 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:10.529403Z node 86 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:10.529408Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 369367189770 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:10.529414Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:10.529416Z node 86 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:10.529418Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:10.529421Z node 86 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:59:10.529915Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:10.529967Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:10.529980Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:10.530032Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:10.530038Z node 86 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:59:10.530052Z node 86 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:59:10.530056Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:10.530061Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T08:59:10.530065Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:10.530069Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:59:10.530072Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:59:10.530090Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:59:10.530475Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:10.530480Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:10.530518Z node 86 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:10.530528Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:10.530531Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [86:494:2469] TestWaitNotification: OK eventTxId 1004 >> TAsyncIndexTests::CreateTable |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TAsyncIndexTests::CreateTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T08:59:11.718081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:59:11.718100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:11.718103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:59:11.718106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:59:11.718110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:59:11.718113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:59:11.718119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:11.718182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:59:11.725557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:59:11.725574Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:59:11.727612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:59:11.728114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:59:11.728146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:59:11.729503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:59:11.729680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:59:11.729747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:11.729811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:11.730691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:11.730895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:11.730902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:11.730932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:59:11.730937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:11.730942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:59:11.730951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.731922Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T08:59:11.742621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:59:11.742693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.742744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:59:11.742778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:59:11.742784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.743418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:11.743436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:59:11.743479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.743486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:59:11.743489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:59:11.743492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:59:11.743778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.743784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:11.743787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:59:11.744028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.744033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.744037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:11.744043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:59:11.744507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:11.744874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:59:11.744915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:59:11.745053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:11.745069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:11.745073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:11.745111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:59:11.745115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:11.745159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:11.745169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:59:11.745491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:11.745497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:11.745548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:11.745552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:59:11.745619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.745624Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:59:11.745633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:59:11.745636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:11.745640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:59:11.745643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:11.745646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:59:11.745649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:59:11.745656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:11.745660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:59:11.745663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T08:59:11.745858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:59:11.745867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T08:59:11.745869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T08:59:11.745873Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T08:59:11.745876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:11.745883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... .813348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T08:59:11.813354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:11.813357Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:59:11.813359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:11.813362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T08:59:11.813426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T08:59:11.813430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T08:59:11.813435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T08:59:11.813438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:11.813442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T08:59:11.813445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:11.813447Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.813449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:11.813451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T08:59:11.814209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:59:11.814228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:59:11.814234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:59:11.814801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T08:59:11.814832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:59:11.814868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.814897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:59:11.814939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.814976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T08:59:11.814981Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T08:59:11.814988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T08:59:11.814990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T08:59:11.814993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T08:59:11.815030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T08:59:11.815035Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T08:59:11.815038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T08:59:11.815040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T08:59:11.815043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T08:59:11.815053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T08:59:11.815056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T08:59:11.815059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T08:59:11.815062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T08:59:11.815107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T08:59:11.815110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T08:59:11.815112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T08:59:11.815115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:11.815117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T08:59:11.815118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T08:59:11.815123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:59:11.815557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T08:59:11.815565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T08:59:11.815638Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:59:11.815673Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 43us result status StatusSuccess 2024-11-21T08:59:11.815817Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateTableWithReboots >> TTablesWithReboots::SimpleDropTableWithReboots |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_36 [GOOD] >> TxUsage::WriteToTopic_Demo_37 >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo >> TTablesWithReboots::AlterAndForceDrop [GOOD] >> TxUsage::ReadRuleGeneration [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterAndForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:55.328396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:55.328411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:55.328414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:55.328417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:55.328426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:55.328428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:55.328433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:55.328480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:55.336122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:55.336134Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:55.337386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:55.337441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:55.337455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:55.338966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:55.339029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:55.339104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:55.339215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:55.339669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:55.339829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:55.339835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:55.339842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:55.339846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:55.339850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:55.339869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:55.340660Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:55.350608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:55.350647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.350674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:55.350706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:55.350710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.351070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:55.351082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:55.351101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.351106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:55.351109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:55.351115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:55.351353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.351359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:55.351362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:55.351535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.351539Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.351542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:55.351545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:55.351908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:55.352155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:55.352183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:55.352371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:55.352385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:55.352390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:55.352426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:55.352436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:55.352450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:55.352457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:55.352720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:55.352726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:55.352743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:55.352746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:55.352785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.352788Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:55.352795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:55.352797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:55.352800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:55.352803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:55.352806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:55.352808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:55.352814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:55.352817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:55.352819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... X_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:59:15.537610Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:15.537613Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T08:59:15.537615Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T08:59:15.537617Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 2 2024-11-21T08:59:15.537669Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:15.537673Z node 83 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1004:0 ProgressState 2024-11-21T08:59:15.537681Z node 83 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:59:15.537683Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:15.537689Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T08:59:15.537692Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:15.537694Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:59:15.537697Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:59:15.537712Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T08:59:15.537715Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2024-11-21T08:59:15.537717Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T08:59:15.537720Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T08:59:15.537722Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T08:59:15.537823Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.537832Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.537836Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:15.537840Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T08:59:15.537844Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:15.537969Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.537980Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.537984Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:15.537989Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:59:15.537993Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:59:15.538268Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.538278Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.538280Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:15.538283Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T08:59:15.538288Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:59:15.538296Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T08:59:15.538500Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:15.538605Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.538625Z node 83 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:59:15.538680Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:15.538722Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2024-11-21T08:59:15.539253Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:15.539260Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:59:15.539268Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:59:15.539272Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T08:59:15.539274Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:15.539578Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.539634Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:15.539870Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:59:15.539881Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:59:15.539908Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1003 2024-11-21T08:59:15.539943Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:59:15.539947Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T08:59:15.539957Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:15.539959Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:15.539998Z node 83 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:59:15.540011Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:59:15.540015Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [83:477:2452] 2024-11-21T08:59:15.540023Z node 83 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:15.540033Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:15.540035Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [83:477:2452] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted 2024-11-21T08:59:15.540069Z node 83 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 Deleted tabletId 72075186233409546 2024-11-21T08:59:15.540108Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:15.540125Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2024-11-21T08:59:15.540181Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2024-11-21T08:58:39.370911Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654387751748277:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.371478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.495119Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ebc/r3tmp/tmpF3vgbF/pdisk_1.dat 2024-11-21T08:58:39.745306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.745344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:39.756400Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:39.764688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4102, node 1 2024-11-21T08:58:39.824391Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:39.824411Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:40.228826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003ebc/r3tmp/yandex7ULzAt.tmp 2024-11-21T08:58:40.228841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003ebc/r3tmp/yandex7ULzAt.tmp 2024-11-21T08:58:40.228884Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003ebc/r3tmp/yandex7ULzAt.tmp 2024-11-21T08:58:40.228924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.393592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654392046716169:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.393616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.394034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654392046716181:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.483520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.527610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654392046716183:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.576628Z INFO: TTestServer started on Port 65218 GrpcPort 4102 TClient is connected to server localhost:65218 PQClient connected to localhost:4102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:58:40.836568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:58:40.840904Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.867562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:41.047745Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:41.052059Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2024-11-21T08:58:41.390304Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654392046716258:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.390923Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWU2MzNhYmEtODc0NmU3NzEtZGZjZWUwYjMtYTUzODUzMzE=, ActorId: [1:7439654392046716158:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw91m6ace3ffy17qdzj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.400598Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.565766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.571477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.631571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654400636651321:2626] 2024-11-21T08:58:44.368266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654387751748277:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.368304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.879642Z :ValidateSettingsFailOnStart INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900247Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960543Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.960635Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.978662Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654422111488174:2826] connected; active server actors: 1 2024-11-21T08:58:47.978806Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987582Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987675Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988090Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.988142Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988151Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988155Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.996561Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.996611Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996855Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005299Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005319Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005368Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.005377Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014048Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654422111488187:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.014069Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014075Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654422111488173:2825], now have 1 active actors on pipe 2024-11-21T08:58:48.014080Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:58:48.015841Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439654387751748650:2169] txId 281474976710674 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChan ... cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T08:59:14.088443Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 1a77240a-6891e7-cac613c1-3955da5f has messages 1 2024-11-21T08:59:14.088463Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 read done: guid# 1a77240a-6891e7-cac613c1-3955da5f, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 199 2024-11-21T08:59:14.088474Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 response to read: guid# 1a77240a-6891e7-cac613c1-3955da5f 2024-11-21T08:59:14.088538Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 Process answer. Aval parts: 0 2024-11-21T08:59:14.088604Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] Got ReadResponse, serverBytesSize = 199, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428601 2024-11-21T08:59:14.088624Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428601 2024-11-21T08:59:14.088683Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2024-11-21T08:59:14.088694Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] Returning serverBytesSize = 199 to budget 2024-11-21T08:59:14.088698Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] In ContinueReadingDataImpl, ReadSizeBudget = 199, ReadSizeServerDelta = 52428601 2024-11-21T08:59:14.088807Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T08:59:14.088845Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (3-3) 2024-11-21T08:59:14.088858Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] The application data is transferred to the client. Number of messages 1, size 9 bytes 2024-11-21T08:59:14.088852Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 grpc read done: success# 1, data# { read_request { bytes_size: 199 } } 2024-11-21T08:59:14.088866Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] Returning serverBytesSize = 0 to budget 0 1 2024-11-21T08:59:14.088881Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] Commit offsets [3, 4). Partition stream id: 1 2024-11-21T08:59:14.088886Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 got read request: guid# db52f44-b032488c-3c6f4a67-dc79e70d 2024-11-21T08:59:14.088975Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 3 end: 4 } } } } 2024-11-21T08:59:14.089013Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) committing to position 4 prev 3 end 4 by cookie 2 2024-11-21T08:59:14.089058Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T08:59:14.089066Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T08:59:14.089086Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 offset is set to 4 (startOffset 0) session consumer-1_5_2_5035203630251109458_v1 2024-11-21T08:59:14.089098Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:59:14.089337Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 readTimeStamp for offset 4 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 2 2024-11-21T08:59:14.089353Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:59:14.089358Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T08:59:14.089376Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2024-11-21T08:59:14.089384Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) commit done to position 4 endOffset 4 with cookie 2 2024-11-21T08:59:14.089388Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 4 2024-11-21T08:59:14.089489Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 4 } } 2024-11-21T08:59:15.068347Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T08:59:15.068756Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0 describe result for acl check 2024-11-21T08:59:15.087245Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 checking auth because of timeout 2024-11-21T08:59:15.087274Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 auth for : consumer-1 2024-11-21T08:59:15.087365Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2024-11-21T08:59:15.087379Z :INFO: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1001 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:59:15.087375Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 Handle describe topics response 2024-11-21T08:59:15.087396Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 auth is DEAD 2024-11-21T08:59:15.087414Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 auth ok: topics# 1, initDone# 1 2024-11-21T08:59:16.086430Z :INFO: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] Closing read session. Close timeout: 0.000000s 2024-11-21T08:59:16.086451Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2024-11-21T08:59:16.086459Z :INFO: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T08:59:16.086484Z :NOTICE: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T08:59:16.086492Z :DEBUG: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] [] Abort session to cluster 2024-11-21T08:59:16.086712Z :NOTICE: [/Root] [/Root] [8a4cdd8d-1caef297-2b30c3a0-864defe5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T08:59:16.086854Z :INFO: [/Root] SessionId [test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T08:59:16.086857Z :INFO: [/Root] SessionId [test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T08:59:16.086861Z :DEBUG: [/Root] SessionId [test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T08:59:16.086925Z :INFO: [/Root] SessionId [test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T08:59:16.086929Z :DEBUG: [/Root] SessionId [test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T08:59:16.086939Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 grpc read done: success# 0, data# { } 2024-11-21T08:59:16.086961Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 grpc read failed 2024-11-21T08:59:16.086969Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 grpc closed 2024-11-21T08:59:16.086991Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_5035203630251109458_v1 is DEAD 2024-11-21T08:59:16.087098Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:59:16.087118Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session consumer-1_5_2_5035203630251109458_v1 2024-11-21T08:59:16.087125Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654535845852658:2513] destroyed 2024-11-21T08:59:16.087159Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer-1_5_2_5035203630251109458_v1 2024-11-21T08:59:16.087176Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0 grpc read done: success: 0 data: 2024-11-21T08:59:16.087184Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0 grpc read failed 2024-11-21T08:59:16.087190Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0 grpc closed 2024-11-21T08:59:16.087194Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|b153025d-2ce5eaec-9a8a49f4-32697fd9_0 is DEAD 2024-11-21T08:59:16.087376Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7439654535845852655:2510] disconnected; active server actors: 1 2024-11-21T08:59:16.087390Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7439654535845852655:2510] client consumer-1 disconnected session consumer-1_5_2_5035203630251109458_v1 2024-11-21T08:59:16.087514Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:59:16.087586Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:59:16.087612Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654527255917846:2460] destroyed 2024-11-21T08:59:16.087634Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> BasicUsage::ReadWithRestarts [GOOD] >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit >> KqpOlap::CheckEarlyFilterOnEmptySelect [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CheckEarlyFilterOnEmptySelect [GOOD] Test command err: Trying to start YDB, gRPC: 32304, MsgBus: 13245 2024-11-21T08:57:25.286623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654069467905482:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:25.286687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004890/r3tmp/tmpVtWgUF/pdisk_1.dat 2024-11-21T08:57:25.335469Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32304, node 1 2024-11-21T08:57:25.353451Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:25.353464Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:25.353467Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:25.353527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13245 2024-11-21T08:57:25.384833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:25.384860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:25.388330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:25.420887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:25.435026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:25.445869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.445927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.445976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.446006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.446025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.446048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.446070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.446095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.446123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.446151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.446173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.446195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654069467905981:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.449750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:25.449778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:25.449821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:25.449839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:25.449862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:25.449885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:25.449906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:25.449931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:25.449949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:25.449972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:25.450000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:25.450022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654069467905982:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:25.450464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:25.450481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:25.450493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:25.450497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:25.450518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:25.450525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:25.450530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:25.450534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:25.450544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:25.450546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:25.450556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=22056;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=22056;columns=5; ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable` WHERE uid='dsfdfsd' LIMIT 10; RESULT: 2024-11-21T08:59:15.508419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654541914492231:8281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:15.508438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654541914492236:6145], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:15.508443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:15.509125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:59:15.510815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654541914492245:2925], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:59:17.307537Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179555561, txId: 18446744073709551615] shutting down 0 >> LocalPartition::DiscoveryServiceBadPort [GOOD] >> TxUsage::WriteToTopic_Demo_7 [GOOD] >> LocalPartition::DiscoveryServiceBadNodeId >> TxUsage::WriteToTopic_Demo_8 >> TxUsage::WriteToTopic_Demo_37 [GOOD] >> TxUsage::WriteToTopic_Demo_38 >> Describe::Location [GOOD] >> Describe::DescribePartitionPermissions >> TTablesWithReboots::AlterTableSchemaWithReboots [GOOD] >> THiveTest::TestCreate100Tablets >> THiveTest::TestDrain >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableSchemaWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:49.294385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:49.294435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.294442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:49.294448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:49.294459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:49.294462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:49.294468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.294555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:49.336621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:49.336644Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.338699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:49.338770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:49.338792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:49.351180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:49.351269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:49.365050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.366562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.369754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:49.371889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.371895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:49.371923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:49.377260Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.404676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:49.413137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.413240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:49.413289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:49.413298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:49.414129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:49.414141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:49.414146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:49.414535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:49.414878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.414900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.415588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:49.416008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:49.425107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:49.425412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.425451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:49.425460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.425584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:49.425594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.425616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:49.425630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.426418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.426459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:49.426524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:49.426541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:49.426545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.426550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:49.426555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.426560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:49.426564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:49.426575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:49.426580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:49.426584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... tatus: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 168 } } 2024-11-21T08:59:19.685329Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:19.685340Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 168 } } 2024-11-21T08:59:19.685348Z node 124 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 168 } } FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:59:19.685404Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 91 } } 2024-11-21T08:59:19.685407Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2024-11-21T08:59:19.685414Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 91 } } 2024-11-21T08:59:19.685420Z node 124 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 91 } } 2024-11-21T08:59:19.685468Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 334 RawX2: 532575947023 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:19.685471Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:19.685481Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 334 RawX2: 532575947023 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:19.685486Z node 124 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:19.685492Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 334 RawX2: 532575947023 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:19.685500Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:19.685504Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T08:59:19.685566Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 532575947027 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:19.685570Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2024-11-21T08:59:19.685577Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 532575947027 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:19.685579Z node 124 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:19.685592Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 340 RawX2: 532575947027 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:19.685597Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:19.685599Z node 124 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:19.685602Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:19.685604Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:19.685607Z node 124 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:59:19.685941Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:19.686300Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:19.686321Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:19.686334Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:19.686347Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:19.686417Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:19.686424Z node 124 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:59:19.686435Z node 124 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:59:19.686439Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:19.686444Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T08:59:19.686449Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:19.686453Z node 124 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:59:19.686457Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:59:19.686479Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:59:19.687016Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:19.687022Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:19.687059Z node 124 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:19.687069Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:19.687072Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [124:558:2523] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:59:19.687117Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:19.687140Z node 124 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 31us result status StatusSuccess 2024-11-21T08:59:19.687218Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key2" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key1" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "add_2" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnIds: 2 KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |91.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication >> THiveTest::TestUpdateChannelValues >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> TxUsage::WriteToTopic_Demo_19_RestartNo [GOOD] >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCheckSubHiveMigration >> TxUsage::WriteToTopic_Demo_27 [GOOD] >> TxUsage::WriteToTopic_Demo_28 >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> TTablesWithReboots::AlterCopyWithReboots [GOOD] >> THiveTest::TestLocalReplacement >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> TTablesWithReboots::SimultaneousDropForceDrop [GOOD] >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight [GOOD] >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterCopyWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:50.987196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:50.987214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:50.987218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:50.987222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:50.987236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:50.987240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:50.987247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:50.987310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:50.997516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:50.997545Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:50.999390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:50.999454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:50.999470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:51.000991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:51.001035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:51.001093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:51.001215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:51.001646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:51.001795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:51.001800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:51.001806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:51.001810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:51.001813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:51.001836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:51.002656Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:51.013559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:51.013607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:51.013641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:51.013682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:51.013686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:51.014137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:51.014148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:51.014168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:51.014173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:51.014176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:51.014178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:51.014413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:51.014418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:51.014421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:51.014638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:51.014642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:51.014645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:51.014649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:51.015010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:51.015250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:51.015279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:51.015383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:51.015398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:51.015402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:51.015432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:51.015437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:51.015451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:51.015459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:51.015744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:51.015749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:51.015767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:51.015770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:51.015809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:51.015813Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:51.015819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:51.015822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:51.015825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:51.015828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:51.015831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:51.015833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:51.015839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:51.015843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:51.015845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1006 Step: 5000007 OrderId: 1006 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 111 } } 2024-11-21T08:59:22.878347Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 471 RawX2: 566935685515 } Origin: 72075186233409547 State: 5 TxId: 1006 Step: 0 Generation: 2 2024-11-21T08:59:22.878350Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1006, tablet: 72075186233409547, partId: 0 2024-11-21T08:59:22.878356Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1006:0, at schemeshard: 72057594046678944, message: Source { RawX1: 471 RawX2: 566935685515 } Origin: 72075186233409547 State: 5 TxId: 1006 Step: 0 Generation: 2 2024-11-21T08:59:22.878359Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T08:59:22.878737Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:22.878748Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T08:59:22.878754Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1006:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:59:22.878758Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1006, done: 0, blocked: 1 2024-11-21T08:59:22.878768Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1006 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T08:59:22.878782Z node 132 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 137 -> 129 2024-11-21T08:59:22.878792Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:22.878797Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:59:22.878840Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:22.878877Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:22.879087Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:22.879092Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:22.879109Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:59:22.879123Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:22.879126Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [132:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2024-11-21T08:59:22.879128Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [132:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T08:59:22.879178Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:22.879182Z node 132 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1006:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:22.879190Z node 132 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:22.879193Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1006:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:22.879196Z node 132 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 129 -> 240 2024-11-21T08:59:22.879258Z node 132 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:59:22.879264Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:59:22.879267Z node 132 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:59:22.879269Z node 132 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:59:22.879272Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:22.879377Z node 132 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:59:22.879383Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:59:22.879386Z node 132 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:59:22.879388Z node 132 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:59:22.879390Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:59:22.879399Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T08:59:22.879744Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:22.879752Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1006:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:22.879800Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:59:22.879816Z node 132 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T08:59:22.879818Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:59:22.879821Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: true 2024-11-21T08:59:22.879824Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:59:22.879826Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T08:59:22.879828Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T08:59:22.879841Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:59:22.879946Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:59:22.880108Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:59:22.880705Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 471 RawX2: 566935685515 } TabletId: 72075186233409547 State: 4 2024-11-21T08:59:22.880717Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:22.880911Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:22.880959Z node 132 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:59:22.881006Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:59:22.881034Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T08:59:22.881286Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:22.881291Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:59:22.881298Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:22.881684Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:59:22.881693Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:59:22.881733Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T08:59:22.881768Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T08:59:22.881771Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T08:59:22.881807Z node 132 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:59:22.881818Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:59:22.881821Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [132:709:2673] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:59:22.881857Z node 132 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:22.881865Z node 132 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimultaneousDropForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:49.294384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:49.294422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.294429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:49.294434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:49.294446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:49.294449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:49.294459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.294555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:49.336640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:49.336659Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.338538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:49.338606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:49.338629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:49.351296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:49.351389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:49.365059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.366575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.369792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:49.371861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.371867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:49.371909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:49.377337Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.404794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:49.413137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.413233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:49.413285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:49.413293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:49.414129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:49.414139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:49.414143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:49.414534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:49.414929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.414950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.415566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:49.415852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:49.425100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:49.425409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.425451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:49.425459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.425563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:49.425570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.425601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:49.425613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.426246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.426310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:49.426401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426407Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:49.426418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:49.426421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.426425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:49.426428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.426432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:49.426435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:49.426444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:49.426449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:49.426452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... wnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:23.083110Z node 139 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:59:23.083112Z node 139 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:59:23.083115Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:59:23.083119Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T08:59:23.083379Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:23.083389Z node 139 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:23.083449Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:23.083468Z node 139 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:59:23.083470Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:59:23.083474Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T08:59:23.083476Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:59:23.083479Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:59:23.083481Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:59:23.083493Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:59:23.083883Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:59:23.083922Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:59:23.084816Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 597000456459 } TabletId: 72075186233409546 State: 4 2024-11-21T08:59:23.084830Z node 139 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:23.085110Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:23.085167Z node 139 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:59:23.085218Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:23.085255Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:59:23.085307Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:23.085310Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:59:23.085317Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 Forgetting tablet 72075186233409546 2024-11-21T08:59:23.086114Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:59:23.086124Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:59:23.086176Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:59:23.086213Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:59:23.086217Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T08:59:23.086225Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:23.086227Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:23.086776Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpForceDropUnsafe Drop { Id: 3 } } TxId: 1004 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:59:23.086789Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: TDropForceUnsafe Propose, path: /, pathId: 3, opId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:23.086792Z node 139 :FLAT_TX_SCHEMESHARD WARN: UNSAFE DELETION IS CALLED. TDropForceUnsafe is UNSAFE operation. Usually it is called for deleting user's DB (tenant). But it could be triggered by administrator for special emergency cases. And there is that case. I hope you are aware of the problems with it. 1: Shared transactions among the tables could be broken if one of the tables is force dropped. Dependent transactions on other tables could be blocked forever. 2: Loans are going to be lost. Force dropped tablets are never return loans. Some tablets would be waiting for borrowed blocks forever. Details: path: /, pathId: 3, opId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:23.086800Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1004:1, propose status:StatusNameConflict, reason: Check failed: path: '', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175, at schemeshard: 72057594046678944 2024-11-21T08:59:23.086922Z node 139 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:59:23.086991Z node 139 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:23.087185Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1004, response: Status: StatusNameConflict Reason: "Check failed: path: \'\', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175" TxId: 1004 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:23.087196Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1004, subject: , status: StatusNameConflict, reason: Check failed: path: '', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175, operation: DROP PATH UNSAFE, path: 2024-11-21T08:59:23.087213Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:59:23.087217Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [139:475:2450] 2024-11-21T08:59:23.087229Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:23.087231Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [139:475:2450] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:59:23.087277Z node 139 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:23.087286Z node 139 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T08:59:23.087323Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:23.087341Z node 139 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 20us result status StatusPathDoesNotExist 2024-11-21T08:59:23.087364Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:59:23.087393Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:23.087404Z node 139 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 12us result status StatusSuccess 2024-11-21T08:59:23.087446Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestLockTabletExecution >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> TTablesWithReboots::CopyAlterWithReboots [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> TxUsage::WriteToTopic_Demo_38 [GOOD] >> Describe::DescribePartitionPermissions [GOOD] >> LocalPartition::Basic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyAlterWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:49.294377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:49.294411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.294415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:49.294419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:49.294432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:49.294434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:49.294441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.294525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:49.336686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:49.336704Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.338716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:49.338801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:49.338823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:49.351179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:49.351268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:49.365052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.366572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.370116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.371847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:49.371855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.371861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:49.371903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:49.377261Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.404680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:49.413138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.413233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:49.413286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:49.413295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:49.414128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:49.414140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:49.414143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:49.414540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:49.414943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.414956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.414964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.415458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:49.415818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:49.425100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:49.425408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.425450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:49.425459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.425570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:49.425581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.425610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:49.425623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.426260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.426320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:49.426404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.426411Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:49.426422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:49.426427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.426432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:49.426438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.426443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:49.426446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:49.426461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:49.426467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:49.426471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 7594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 566935685386 } Origin: 72075186233409546 State: 5 TxId: 1006 Step: 0 Generation: 2 2024-11-21T08:59:24.423348Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1006, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:24.423355Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1006:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 566935685386 } Origin: 72075186233409546 State: 5 TxId: 1006 Step: 0 Generation: 2 2024-11-21T08:59:24.423358Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T08:59:24.423625Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:24.423633Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T08:59:24.423637Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1006:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:59:24.423639Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1006, done: 0, blocked: 1 2024-11-21T08:59:24.423645Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1006 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T08:59:24.423657Z node 132 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 137 -> 129 2024-11-21T08:59:24.423667Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:24.423672Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:24.423990Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424008Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424055Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424059Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:24.424074Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:59:24.424086Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424089Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [132:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2024-11-21T08:59:24.424092Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [132:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2024-11-21T08:59:24.424133Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424137Z node 132 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1006:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:24.424145Z node 132 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424147Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1006:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424150Z node 132 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 129 -> 240 2024-11-21T08:59:24.424226Z node 132 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:59:24.424233Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:59:24.424235Z node 132 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:59:24.424238Z node 132 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:59:24.424241Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:24.424323Z node 132 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:59:24.424330Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T08:59:24.424332Z node 132 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T08:59:24.424334Z node 132 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:59:24.424336Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:59:24.424341Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T08:59:24.424727Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424735Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1006:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:24.424774Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:24.424788Z node 132 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T08:59:24.424790Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:59:24.424794Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: true 2024-11-21T08:59:24.424796Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T08:59:24.424799Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T08:59:24.424801Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T08:59:24.424810Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:59:24.424999Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:59:24.425012Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T08:59:24.425638Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 566935685386 } TabletId: 72075186233409546 State: 4 2024-11-21T08:59:24.425649Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:24.425860Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:24.425908Z node 132 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:59:24.425952Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:24.425980Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2024-11-21T08:59:24.426215Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:24.426219Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:59:24.426226Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:24.426614Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:59:24.426621Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:59:24.426685Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T08:59:24.426717Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T08:59:24.426723Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T08:59:24.426756Z node 132 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T08:59:24.426765Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T08:59:24.426768Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [132:713:2677] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T08:59:24.426806Z node 132 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:24.426813Z node 132 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:59:24.426819Z node 132 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:59:24.426823Z node 132 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 >> TxUsage::WriteToTopic_Demo_39 >> TTablesWithReboots::DropTableWithReboots [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> THiveTest::TestFollowersReconfiguration >> THiveTest::TestReCreateTablet >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::DropTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:57.560900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:57.560915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:57.560918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:57.560921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:57.560933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:57.560936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:57.560942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:57.560988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:57.568530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:57.568546Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:57.570020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:57.570083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:57.570099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:57.571762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:57.571819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:57.571885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:57.572026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:57.572552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:57.572729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:57.572735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:57.572743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:57.572747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:57.572751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:57.572773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:57.573621Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:57.585379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:57.585454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.585498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:57.585552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:57.585557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.585989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:57.586008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:57.586033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.586042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:57.586045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:57.586048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:57.586377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.586385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:57.586388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:57.586652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.586659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.586662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:57.586666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:57.587061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:57.587336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:57.587368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:57.587497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:57.587518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:57.587524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:57.587568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:57.587574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:57.587594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:57.587603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:57.587924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:57.587931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:57.587953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:57.587958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:57.588015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.588020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:57.588028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:57.588031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:57.588034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:57.588037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:57.588040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:57.588043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:57.588050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:57.588054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:57.588056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... Id: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:25.806059Z node 117 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:25.806062Z node 117 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:59:25.806064Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:25.806134Z node 117 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:25.806140Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:25.806142Z node 117 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:25.806144Z node 117 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:59:25.806146Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T08:59:25.806151Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T08:59:25.806538Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:25.806546Z node 117 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:25.806600Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T08:59:25.806615Z node 117 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:59:25.806618Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:25.806621Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T08:59:25.806624Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:25.806626Z node 117 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:59:25.806628Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:59:25.806644Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:59:25.806888Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:25.806901Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:25.808077Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 596 RawX2: 502511176180 } TabletId: 72075186233409549 State: 4 2024-11-21T08:59:25.808090Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:25.808133Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 599 RawX2: 502511176181 } TabletId: 72075186233409550 State: 4 2024-11-21T08:59:25.808139Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:25.808188Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 592 RawX2: 502511176178 } TabletId: 72075186233409548 State: 4 2024-11-21T08:59:25.808193Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:25.808463Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:25.808638Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T08:59:25.808731Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:59:25.808776Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 Forgetting tablet 72075186233409549 2024-11-21T08:59:25.809101Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:25.809264Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2024-11-21T08:59:25.809297Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T08:59:25.809324Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Forgetting tablet 72075186233409550 2024-11-21T08:59:25.809694Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:25.809717Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:59:25.809739Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:59:25.809762Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409548 2024-11-21T08:59:25.809999Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:25.810005Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:59:25.810012Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:25.810379Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:59:25.810386Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:59:25.810419Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:59:25.810422Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T08:59:25.810581Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:59:25.810585Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:59:25.810605Z node 117 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:59:25.810644Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:25.810647Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:25.810682Z node 117 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:25.810692Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:25.810695Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [117:842:2772] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:59:25.810738Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:25.810756Z node 117 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 25us result status StatusPathDoesNotExist 2024-11-21T08:59:25.810780Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted 2024-11-21T08:59:25.810819Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:25.810826Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:59:25.810830Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:59:25.810835Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T08:59:25.810840Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T08:59:25.810845Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T08:59:25.810850Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2024-11-21T08:56:27.115589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:27.115617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:27.115622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:27.115627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:27.115641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:27.115645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:27.115655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:27.115725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:27.117764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:27.117786Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:27.119563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:27.119717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:27.119730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T08:56:27.121273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:27.121342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:27.121390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.121482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.121955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.122175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.122180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.122189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:27.122194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.122198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:27.122208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.156594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T08:56:27.156671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.156711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T08:56:27.156752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T08:56:27.156759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T08:56:27.157495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T08:56:27.157508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:27.157512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:27.157934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T08:56:27.157950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:27.158324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.158335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.158343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.158357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.158919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:27.159287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:27.159337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:56:27.159497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.159503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:56:27.159507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.323323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:27.323331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.323351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.323357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.323753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.323788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:27.323839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.323844Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:27.323863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:27.323866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.323870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:27.323873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.323875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:27.323877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:27.323886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:27.323889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:27.323891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:27.324122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.324131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.324134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:27.324136Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:27.324139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.324147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:27.324150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T0 ... update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:02.066562Z node 100 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046578944 2024-11-21T08:59:02.066608Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046578944 2024-11-21T08:59:02.066625Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046578944 2024-11-21T08:59:02.551153Z node 100 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2024-11-21T08:59:02.551206Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2024-11-21T08:59:02.551229Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2024-11-21T08:59:02.697201Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T08:59:02.697384Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T08:59:02.698812Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:02.698860Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:06.282517Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T08:59:06.282730Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T08:59:06.284462Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:06.284559Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:10.029837Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T08:59:10.030014Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T08:59:10.031427Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:10.031494Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:13.846119Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T08:59:13.846305Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T08:59:13.847751Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:13.847804Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:17.491469Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T08:59:17.491698Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T08:59:17.493625Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:17.493721Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:21.277280Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T08:59:21.277575Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T08:59:21.279460Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:21.279522Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:24.383260Z node 100 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046578944 2024-11-21T08:59:24.383315Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046578944 2024-11-21T08:59:24.383333Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046578944 2024-11-21T08:59:24.878867Z node 100 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2024-11-21T08:59:24.878914Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2024-11-21T08:59:24.878941Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2024-11-21T08:59:25.013642Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T08:59:25.013836Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T08:59:25.015329Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T08:59:25.015393Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit [GOOD] >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestHiveBalancer >> TxUsage::WriteToTopic_Demo_23_RestartNo >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots [GOOD] >> TTablesWithReboots::CreateDroppedTableWithReboots [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> THeavyPerfTest::TTestLoadEverything >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> TxUsage::WriteToTopic_Demo_8 [GOOD] >> TxUsage::WriteToTopic_Demo_9 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:57.382774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:57.382791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:57.382794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:57.382797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:57.382809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:57.382811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:57.382817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:57.382866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:57.390183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:57.390196Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:57.391951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:57.392040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:57.392062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:57.394036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:57.394085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:57.394143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:57.394270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:57.394710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:57.394878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:57.394884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:57.394892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:57.394896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:57.394900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:57.394923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:57.395706Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:57.406136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:57.406188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.406221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:57.406258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:57.406263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.406658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:57.406674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:57.406695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.406701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:57.406706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:57.406709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:57.406954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.406961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:57.406964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:57.407168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.407173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.407177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:57.407181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:57.407537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:57.407774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:57.407801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:57.407908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:57.407923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:57.407927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:57.407958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:57.407962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:57.407977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:57.407985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:57.408257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:57.408264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:57.408282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:57.408285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:57.408325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:57.408330Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:57.408337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:57.408339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:57.408342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:57.408345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:57.408348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:57.408350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:57.408357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:57.408360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:57.408363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... } } 2024-11-21T08:59:27.381352Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 532575947030 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:27.381355Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2024-11-21T08:59:27.381363Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 532575947030 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:27.381368Z node 124 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:27.381373Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 343 RawX2: 532575947030 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:27.381379Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:27.381381Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T08:59:27.381403Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 532575947029 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:27.381406Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:27.381412Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 532575947029 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:27.381414Z node 124 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:27.381418Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 342 RawX2: 532575947029 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:27.381421Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:27.381423Z node 124 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:27.381426Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:27.381428Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:27.381431Z node 124 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:59:27.381757Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:27.382047Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:27.382063Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:27.382071Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:27.382081Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:27.382093Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:27.382097Z node 124 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:59:27.382105Z node 124 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:59:27.382107Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:27.382111Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T08:59:27.382114Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:27.382117Z node 124 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:59:27.382119Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:59:27.382135Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:59:27.382542Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:27.382547Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:27.382584Z node 124 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:27.382594Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:27.382597Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [124:566:2529] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:59:27.382638Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:27.382661Z node 124 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Table" took 30us result status StatusSuccess 2024-11-21T08:59:27.382786Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key2" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key1" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnIds: 2 KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } ExecutorCacheSize: 42 TxReadSizeLimit: 100 PartitioningPolicy { MinPartitionsCount: 2 } FreezeState: Unfreeze } TableSchemaVersion: 3 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\010\000\000\000\000\000\000\000\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateDroppedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:53.585834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:53.585850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:53.585854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:53.585857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:53.585869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:53.585871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:53.585878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:53.585931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:53.593341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:53.593355Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:53.594852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:53.594926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:53.594946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:53.596657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:53.596702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:53.596767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:53.596903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:53.597437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:53.597669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:53.597677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:53.597686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:53.597691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:53.597695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:53.597721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:53.598567Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:53.609679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:53.609738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:53.609782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:53.609824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:53.609829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:53.610323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:53.610340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:53.610370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:53.610377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:53.610380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:53.610386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:53.610610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:53.610615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:53.610618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:53.610804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:53.610809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:53.610814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:53.610819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:53.611193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:53.611425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:53.611461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:53.611638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:53.611661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:53.611669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:53.611719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:53.611726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:53.611752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:53.611765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:53.612179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:53.612191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:53.612248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:53.612255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:53.612344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:53.612352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:53.612364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:53.612369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:53.612375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:53.612381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:53.612386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:53.612390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:53.612401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:53.612407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:53.612411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:27.560170Z node 140 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:27.560173Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T08:59:27.560176Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T08:59:27.560178Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T08:59:27.560180Z node 140 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T08:59:27.560299Z node 140 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:27.560307Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:27.560310Z node 140 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:59:27.560313Z node 140 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:59:27.560316Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:27.560358Z node 140 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:27.560364Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:27.560366Z node 140 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:59:27.560368Z node 140 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:59:27.560370Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T08:59:27.560377Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T08:59:27.560778Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:27.560789Z node 140 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:27.560847Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T08:59:27.560866Z node 140 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:59:27.560868Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:59:27.560872Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T08:59:27.560875Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:59:27.560878Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:59:27.560880Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:59:27.560895Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:59:27.561174Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:59:27.561190Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:59:27.562291Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 593 RawX2: 601295423987 } TabletId: 72075186233409548 State: 4 2024-11-21T08:59:27.562304Z node 140 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:27.562342Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 597 RawX2: 601295423989 } TabletId: 72075186233409549 State: 4 2024-11-21T08:59:27.562345Z node 140 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:27.562364Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 599 RawX2: 601295423990 } TabletId: 72075186233409550 State: 4 2024-11-21T08:59:27.562367Z node 140 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:27.562641Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:27.562852Z node 140 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T08:59:27.562898Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:59:27.562934Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 Forgetting tablet 72075186233409548 2024-11-21T08:59:27.563300Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:27.563329Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:27.563342Z node 140 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2024-11-21T08:59:27.563376Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:59:27.563402Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:59:27.563444Z node 140 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T08:59:27.563650Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T08:59:27.563670Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:59:27.563933Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:27.563941Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:59:27.563949Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:27.564295Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:59:27.564303Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:59:27.564528Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:59:27.564533Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:59:27.564541Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:59:27.564545Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T08:59:27.564752Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T08:59:27.564789Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:59:27.564793Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:59:27.564831Z node 140 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:59:27.564844Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:59:27.564847Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [140:840:2771] TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted 2024-11-21T08:59:27.564896Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:27.564905Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:59:27.564908Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:59:27.564913Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T08:59:27.564918Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T08:59:27.564923Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T08:59:27.564928Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> TTablesWithReboots::SimpleDropTableWithReboots2 [GOOD] >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimpleDropTableWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:59:00.197137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:59:00.197154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:00.197157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:59:00.197161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:59:00.197173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:59:00.197176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:59:00.197182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:00.197238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:59:00.204532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:59:00.204546Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:59:00.206113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:59:00.206181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:59:00.206202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:59:00.208158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:59:00.208225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:59:00.208299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:00.208457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:00.208952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:00.209134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:00.209140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:00.209147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:59:00.209151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:00.209155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:59:00.209179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:59:00.210112Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:59:00.220708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:59:00.220767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.220809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:59:00.220852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:59:00.220858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.221289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:00.221307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:59:00.221333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.221341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:59:00.221343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:59:00.221346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:59:00.221631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.221638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:00.221641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:59:00.221875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.221881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.221884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:00.221888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:59:00.222270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:00.222527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:59:00.222558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:59:00.222676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:00.222693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:00.222698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:00.222730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:59:00.222734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:00.222748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:00.222757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:00.223036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:00.223043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:00.223063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:00.223068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:59:00.223111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.223115Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:59:00.223122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:59:00.223124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:00.223128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:59:00.223131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:00.223134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:59:00.223136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:59:00.223143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:00.223146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:59:00.223148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 8944 2024-11-21T08:59:28.408074Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:28.408079Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:28.408103Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:59:28.408126Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:28.408130Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [117:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T08:59:28.408135Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [117:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:59:28.408192Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:28.408199Z node 117 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:28.408233Z node 117 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:28.408238Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:28.408242Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:28.408246Z node 117 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T08:59:28.408386Z node 117 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:28.408400Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:28.408404Z node 117 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:59:28.408409Z node 117 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:59:28.408414Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:28.408591Z node 117 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:28.408605Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:28.408613Z node 117 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:59:28.408617Z node 117 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:59:28.408621Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:59:28.408632Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T08:59:28.408992Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:28.409003Z node 117 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:28.409069Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:59:28.409091Z node 117 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:59:28.409096Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:59:28.409101Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T08:59:28.409105Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:59:28.409110Z node 117 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:59:28.409114Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:59:28.409135Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:28.409520Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:59:28.409806Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:59:28.411190Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 341 RawX2: 502511175956 } TabletId: 72075186233409546 State: 4 2024-11-21T08:59:28.411210Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:28.411266Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 502511175957 } TabletId: 72075186233409547 State: 4 2024-11-21T08:59:28.411273Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:28.411751Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:28.411840Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:28.411897Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:59:28.411949Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:28.411999Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2024-11-21T08:59:28.412472Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T08:59:28.412551Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:59:28.412587Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:59:28.412970Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:28.412981Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:59:28.412993Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:28.413463Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:59:28.413475Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:59:28.413725Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:59:28.413733Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:59:28.413760Z node 117 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:59:28.413794Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:59:28.413798Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:59:28.413838Z node 117 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:59:28.413852Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:59:28.413855Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [117:541:2504] TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T08:59:28.413889Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:28.413896Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T08:59:28.413953Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:28.413977Z node 117 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 33us result status StatusPathDoesNotExist 2024-11-21T08:59:28.414004Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestRestartsWithFollower >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight [GOOD] >> BasicUsage::ReadSessionCorrectClose >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> THiveTest::TestLocalDisconnect >> TxUsage::WriteToTopic_Demo_28 [GOOD] >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> TTablesWithReboots::CreateTableWithReboots [GOOD] >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestHiveRestart >> TopicAutoscaling::PartitionSplit_PQv1 >> TxUsage::WriteToTopic_Demo_29 >> LocalPartition::DiscoveryServiceBadNodeId [GOOD] >> LocalPartition::WithoutPartition ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: (1,1): 1 on 2 (1,1): 1 on 1 RemoveNode 7 (1,1): 1 on 3 (1,3): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 0 (1,2): 1 on 9 RemoveNode 0 (1,3): 1 on 9 RemoveNode 2 (1,3): 1 on 3 (1,3): 1 on 4 (1,1): -1 on 0 (1,2): 1 on 6 RemoveNode 1 (1,3): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 8 (1,2): -1 on 6 (1,2): 1 on 6 (1,2): 1 on 8 (1,1): 1 on 1 (1,1): 1 on 3 AddNode 0 (1,1): 1 on 0 (1,3): 1 on 2 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 5 (1,3): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 4 (1,1): 1 on 1 (1,3): 1 on 8 (1,2): -1 on 6 RemoveNode 6 (1,1): 1 on 3 (1,1): 1 on 4 (1,1): -1 on 3 AddNode 2 (1,2): 1 on 8 RemoveNode 8 (1,1): 1 on 3 (1,2): 1 on 7 (1,1): 1 on 0 (1,1): 1 on 3 RemoveNode 5 (1,3): 1 on 8 RemoveNode 9 (1,3): 1 on 5 (1,1): 1 on 0 AddNode 6 (1,2): -1 on 7 (1,1): 1 on 2 (1,2): 1 on 9 AddNode 1 (1,1): 1 on 3 RemoveNode 6 (1,3): 1 on 7 (1,1): 1 on 3 (1,3): 1 on 0 (1,3): -1 on 5 (1,3): 1 on 3 (1,3): -1 on 3 (1,1): -1 on 1 (1,1): 1 on 0 (1,2): 1 on 7 (1,1): 1 on 3 (1,2): 1 on 8 (1,3): 1 on 8 (1,3): 1 on 3 (1,3): 1 on 0 (1,2): -1 on 7 (1,1): -1 on 4 (1,2): 1 on 7 RemoveNode 0 (1,1): 1 on 4 (1,3): 1 on 2 (1,3): 1 on 2 AddNode 5 (1,2): 1 on 9 (1,3): 1 on 1 (1,1): 1 on 3 RemoveNode 2 (1,3): -1 on 0 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): 1 on 2 RemoveNode 4 (1,3): 1 on 0 (1,1): 1 on 1 (1,2): 1 on 6 (1,2): 1 on 7 (1,1): 1 on 0 (1,1): 1 on 1 AddNode 8 (1,1): -1 on 3 (1,1): -1 on 3 (1,2): 1 on 8 (1,2): 1 on 6 AddNode 2 (1,1): 1 on 1 (1,2): 1 on 6 RemoveNode 5 (1,3): -1 on 5 (1,1): 1 on 3 (1,2): 1 on 9 (1,3): 1 on 6 (1,2): 1 on 8 (1,3): 1 on 2 AddNode 4 (1,2): 1 on 9 (1,1): 1 on 0 (1,2): 1 on 5 (1,1): 1 on 4 AddNode 9 (1,3): -1 on 4 RemoveNode 4 (1,3): 1 on 0 (1,1): -1 on 3 (1,1): 1 on 3 (1,3): 1 on 9 (1,2): -1 on 8 (1,2): 1 on 5 AddNode 7 (1,3): 1 on 1 (1,3): 1 on 3 (1,1): 1 on 0 (1,3): 1 on 0 RemoveNode 1 (1,1): 1 on 2 RemoveNode 9 (1,1): -1 on 1 (1,3): -1 on 8 (1,2): 1 on 9 (1,1): 1 on 4 AddNode 4 (1,1): 1 on 2 AddNode 1 (1,1): 1 on 4 (1,3): -1 on 8 (1,1): 1 on 3 RemoveNode 2 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): -1 on 9 AddNode 2 (1,1): 1 on 3 (1,3): -1 on 8 (1,2): 1 on 6 (1,3): -1 on 1 (1,2): -1 on 8 RemoveNode 7 (1,2): 1 on 5 RemoveNode 4 (1,1): 1 on 4 (1,1): 1 on 0 (1,3): 1 on 8 (1,2): 1 on 8 (1,2): 1 on 7 RemoveNode 2 (1,1): 1 on 4 RemoveNode 3 (1,1): 1 on 0 (1,2): 1 on 5 AddNode 7 (1,1): 1 on 4 (1,1): 1 on 4 (1,1): -1 on 2 (1,3): 1 on 3 (1,1): 1 on 0 (1,3): 1 on 8 (1,2): 1 on 8 AddNode 9 (1,2): 1 on 6 AddNode 4 (1,1): 1 on 3 AddNode 0 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): 1 on 7 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): 1 on 1 (1,1): 1 on 1 (1,3): -1 on 0 AddNode 2 (1,3): -1 on 0 (1,1): 1 on 4 (1,1): 1 on 4 (1,3): 1 on 0 RemoveNode 0 (1,3): 1 on 3 (1,2): -1 on 8 (1,2): 1 on 6 (1,2): 1 on 8 (1,2): 1 on 7 (1,3): 1 on 6 (1,2): 1 on 7 (1,2): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 5 AddNode 6 (1,2): 1 on 7 RemoveNode 9 (1,1): 1 on 1 (1,1): 1 on 1 (1,2): 1 on 7 RemoveNode 8 (1,3): -1 on 6 RemoveNode 7 (1,1): 1 on 2 (1,2): 1 on 9 (1,1): 1 on 1 RemoveNode 4 (1,2): 1 on 5 RemoveNode 1 (1,1): 1 on 1 AddNode 8 (1,3): -1 on 8 (1,3): 1 on 0 AddNode 0 (1,3): 1 on 3 (1,2): 1 on 6 (1,2): -1 on 9 RemoveNode 2 (1,3): 1 on 2 (1,3): 1 on 1 (1,3): 1 on 8 RemoveNode 6 (1,1): -1 on 0 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 3 (1,3): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 8 AddNode 7 (1,3): 1 on 5 AddNode 5 (1,1): -1 on 3 RemoveNode 7 (1,3): -1 on 8 AddNode 7 (1,1): -1 on 3 (1,3): 1 on 3 RemoveNode 7 (1,2): 1 on 7 (1,3): 1 on 7 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 3 (1,2): -1 on 9 (1,1): -1 on 2 (1,2): 1 on 9 AddNode 7 (1,2): -1 on 8 AddNode 0 (1,1): 1 on 2 (1,3): 1 on 0 (1,2): 1 on 9 AddNode 2 (1,3): 1 on 0 RemoveNode 7 (1,3): 1 on 8 RemoveNode 2 (1,1): 1 on 4 (1,2): 1 on 8 (1,2): 1 on 7 (1,3): 1 on 0 (1,3): -1 on 1 AddNode 2 (1,3): 1 on 2 (1,3): -1 on 7 (1,3): 1 on 0 (1,1): 1 on 0 (1,3): 1 on 0 (1,2): 1 on 9 RemoveNode 5 (1,1): -1 on 3 (1,3): 1 on 7 (1,1): 1 on 1 (1,2): 1 on 7 AddNode 9 (1,2): 1 on 6 (1,1): 1 on 1 (1,3): 1 on 3 (1,1): 1 on 1 (1,1): -1 on 2 (1,2): -1 on 7 AddNode 4 (1,2): 1 on 8 (1,3): 1 on 5 (1,1): 1 on 0 (1,1): 1 on 4 (1,1): 1 on 1 (1,2): 1 on 7 (1,3): -1 on 2 (1,2): 1 on 9 (1,3): -1 on 5 (1,1): 1 on 0 (1,2): 1 on 8 (1,3): -1 on 0 (1,3): 1 on 7 (1,1): 1 on 0 (1,3): 1 on 1 (1,2): 1 on 6 (1,2): -1 on 7 (1,1): 1 on 2 (1,2): 1 on 6 (1,2): -1 on 9 RemoveNode 2 (1,3): 1 on 7 (1,3): 1 on 2 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): 1 on 9 (1,2): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 5 (1,2): 1 on 6 RemoveNode 3 (1,1): 1 on 4 (1,2): 1 on 9 (1,2): -1 on 8 (1,3): -1 on 6 (1,3): 1 on 0 (1,1): 1 on 0 (1,3): 1 on 3 AddNode 1 (1,3): 1 on 4 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 0 RemoveNode 4 (1,2): 1 on 9 (1,3): 1 on 7 (1,1): 1 on 1 (1,2): -1 on 6 AddNode 5 (1,3): -1 on 0 (1,2): 1 on 9 (1,1): 1 on 2 (1,2): 1 on 9 AddNode 3 (1,3): -1 on 2 (1,3): 1 on 1 RemoveNode 8 (1,1): 1 on 0 (1,2): -1 on 5 AddNode 4 (1,3): -1 on 5 (1,3): 1 on 0 (1,3): -1 on 0 (1,3): 1 on 7 (1,1): 1 on 0 RemoveNode 9 (1,1): -1 on 4 (1,3): 1 on 0 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 8 (1,2): 1 on 5 RemoveNode 1 (1,3): 1 on 4 (1,3): 1 on 6 (1,1): 1 on 0 (1,1): 1 on 1 AddNode 2 (1,2): -1 on 6 AddNode 1 (1,3): 1 on 6 (1,1): 1 on 4 (1,3): -1 on 8 (1,3): 1 on 3 RemoveNode 2 (1,1): 1 on 1 (1,1): 1 on 0 (1,3): 1 on 0 AddNode 9 (1,1): -1 on 4 (1,3): 1 on 7 (1,2): -1 on 9 (1,3): 1 on 7 RemoveNode 4 (1,3): -1 on 9 AddNode 8 (1,1): 1 on 1 (1,1): 1 on 0 (1,1): 1 on 4 (1,2): 1 on 5 (1,2): 1 on 9 RemoveNode 8 (1,2): 1 on 9 (1,3): 1 on 8 (1,2): 1 on 5 (1,3): 1 on 1 AddNode 7 (1,3): 1 on 4 AddNode 4 (1,1): 1 on 3 RemoveNode 7 (1,1): 1 on 4 (1,2): 1 on 7 (1,3): 1 on 7 (1,1): 1 on 4 (1,2): 1 on 8 AddNode 6 (1,1): 1 on 2 RemoveNode 6 (1,2): 1 on 6 (1,3): 1 on 1 (1,1): -1 on 3 AddNode 0 (1,1): 1 on 0 (1,1): -1 on 2 (1,3): 1 on 9 (1,2): -1 on 8 (1,1): 1 on 3 RemoveNode 3 (1,3): -1 on 0 (1,2): 1 on 5 RemoveNode 1 (1,2): 1 on 9 AddNode 3 (1,1): -1 on 3 (1,2): 1 on 7 (1,2): 1 on 6 AddNode 8 (1,3): 1 on 6 AddNode 1 (1,3): -1 on 3 (1,1): 1 on 3 (1,3): 1 on 4 (1,1): 1 on 4 (1,2): 1 on 6 (1,1): 1 on 3 (1,3): -1 on 7 (1,1): 1 on 4 (1,2): 1 on 8 RemoveNode 4 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): -1 on 0 (1,1): 1 on 4 (1,1): 1 on 0 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): 1 on 0 RemoveNode 5 (1,2): 1 on 8 (1,2): 1 on 8 RemoveNode 8 (1,1): -1 on 0 (1,3): 1 on 1 (1,2): 1 on 6 RemoveNode 0 (1,3): -1 on 2 (1,1): 1 on 0 (1,2): 1 on 8 (1,3): 1 on 6 (1,2): 1 on 6 (1,3): 1 on 8 RemoveNode 1 (1,2): 1 on 8 (1,1): 1 on 2 (1,1): 1 on 4 AddNode 2 (1,2): 1 on 6 (1,1): -1 on 2 (1,3): 1 on 5 (1,1): 1 on 4 (1,1): 1 on 3 (1,2): 1 on 5 (1,2): 1 on 9 (1,3): 1 on 1 RemoveNode 2 (1,3): 1 on 9 (1,1): 1 on 1 AddNode 2 (1,2): 1 on 8 (1,2): 1 on 6 AddNode 8 (1,2): 1 on 8 (1,3): 1 on 8 AddNode 0 (1,3): 1 on 8 (1,1): 1 on 4 (1,1): -1 on 2 RemoveNode 9 (1,1): 1 on 1 (1,1): 1 on 3 (1,1): -1 on 3 (1,3): 1 on 4 (1,3): 1 on 5 AddNode 1 (1,2): 1 on 6 (1,2): -1 on 9 (1,1): 1 on 4 (1,3): 1 on 9 (1,3): 1 on 1 (1,3): 1 on 7 (1,2): -1 on 8 (1,2): 1 on 6 (1,1): 1 on 0 (1,2): -1 on 9 (1,1): 1 on 1 (1,2): 1 on 5 (1,1): 1 on 3 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): -1 on 6 (1,1): 1 on 0 (1,1): 1 on 4 (1,2): 1 on 9 (1,3): 1 on 5 (1,3): 1 on 2 AddNode 5 (1,3): 1 on 8 (1,2): 1 on 9 (1,1): 1 on 0 RemoveNode 5 (1,2): -1 on 7 (1,2): 1 on 6 (1,2): 1 on 6 (1,2): -1 on 5 (1,1): 1 on 3 (1,3): 1 on 5 (1,3): 1 on 4 (1,3): 1 on 4 (1,3): -1 on 2 (1,2): -1 on 7 (1,1): 1 on 3 (1,3): -1 on 7 (1,2): 1 on 6 (1,1): 1 on 2 AddNode 6 (1,1): -1 on 0 (1,2): -1 on 5 (1,3): 1 on 6 (1,1): 1 on 1 AddNode 9 (1,1): 1 on 4 (1,1): 1 on 1 AddNode 7 (1,3): 1 on 3 (1,2): -1 on 7 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 4 (1,3): 1 on 2 (1,2): 1 on 5 RemoveNode 8 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 6 (1,2): 1 on 7 (1,3): 1 on 3 (1,3): 1 on 8 (1,1): 1 on 3 RemoveNode 4 (1,3): 1 on 4 (1,3): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 2 (1,2): 1 on 8 AddNode 8 (1,3): 1 on 9 (1,1): 1 on 3 (1,2): 1 on 9 AddNode 5 (1,1): 1 on 3 RemoveNode 8 (1,2): 1 on 9 RemoveNode 7 (1,3): 1 on 0 (1,2): -1 on 9 RemoveNode 1 (1,1): -1 on 1 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 2 (1,2): 1 on 7 (1,2): 1 on 8 (1,2): 1 on 9 (1,2): 1 on 7 (1,1): -1 on 4 (1,3): 1 on 1 (1,2): -1 on 5 (1,1): 1 on 3 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 4 (1,2): 1 on 8 RemoveNode 3 (1,2): 1 on 9 AddNode 1 (1,3): -1 on 2 (1,2): -1 on 6 (1,2): 1 on 9 (1,3): -1 on 2 AddNode 2 (1,3): 1 on 0 RemoveNode 0 (1,1): -1 on 3 (1,2): 1 on 6 (1,2): 1 on 9 (1,2): 1 on 9 AddNode 6 (1,2): -1 on 7 RemoveNode 4 (1,2): 1 on 6 AddNode 4 (1,2): 1 on 6 (1,1): 1 on 4 AddNode 0 (1,3): 1 on 4 RemoveNode 9 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): 1 on 6 AddNode 8 (1,1): 1 on 0 (1,1): 1 on 0 AddNode 7 (1,1): 1 on 3 (1,3): 1 on 5 (1,3): -1 on 7 (1,1): -1 on 4 RemoveNode 6 (1,3): 1 on 0 RemoveNode 7 (1,1): 1 on 4 (1,3): 1 on 3 (1,3): 1 on 2 (1,1): 1 on 4 AddNode 9 (1,2): 1 on 8 (1,1): 1 on 0 RemoveNode 0 (1,2): -1 on 8 (1,2): 1 on 6 AddNode 7 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 5 (1,2): 1 on 5 AddNode 3 (1,1): 1 on 3 RemoveNode 7 (1,1): 1 on 2 (1,3): 1 on 0 RemoveNode 4 (1,1): 1 on 4 (1,2): -1 on 8 (1,2): 1 on 7 RemoveNode 1 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 7 (1,3): -1 on 5 AddNode 6 (1,3): 1 on 8 RemoveNode 9 (1,2): 1 on 5 (1,3): 1 on 9 (1,1): 1 on 2 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 8 (1,2): 1 on 9 (1,3): 1 on 6 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): -1 on 2 RemoveNode 6 (1,2): 1 on 6 RemoveNode 2 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 3 (1,1): 1 on 1 (1,3): 1 on 8 AddNode 0 (1,2): 1 on 8 RemoveNode 5 (1,3): 1 on 6 AddNode 8 (1,1): -1 on 1 (1,1): 1 on 4 AddNode 2 (1,1): 1 on 2 RemoveNode 2 (1,1): -1 on 4 (1,1): 1 on 1 AddNode 9 (1,2): 1 on 6 (1,2): 1 on 5 RemoveNode 8 (1,3): 1 on 3 AddNode 7 (1,2): 1 on 8 (1,2): 1 on 6 (1,2): 1 on 5 RemoveNode 3 (1,3): 1 on 3 AddNode 4 (1,2): 1 on 5 (1,3): 1 on 1 (1,3): -1 on 6 (1,2): 1 on 5 RemoveNode 4 (1,3): -1 on 6 (1,3): 1 on 2 (1,1): -1 on 4 (1,3): 1 on 9 (1,1): -1 on 0 (1,2): 1 on 7 (1,1): 1 on 1 (1,1): -1 on 1 (1,1): 1 on 1 (1,3): 1 on 6 (1,2): 1 on 8 AddNode 2 (1,3): 1 on 0 (1,2): 1 on 8 RemoveNode 7 (1,1): 1 on 3 (1,1): 1 on 1 AddNode 1 (1,1): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 1 (1,3): 1 on 7 AddNode 8 (1,1): 1 on 4 (1,3): -1 on 7 (1,2): 1 on 8 (1,1): 1 on 2 (1,2): 1 on 5 AddNode 7 (1,1): -1 on 3 (1,2): -1 on 7 (1,2): 1 on 5 AddNode 4 (1,2): -1 on 9 (1,2): -1 on 7 (1,1): -1 on 2 (1,2): 1 on 6 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 8 RemoveNode 1 (1,2): 1 on 7 RemoveNode 4 (1,2): 1 on 5 (1,1): -1 on 2 (1,1): 1 on 0 (1,3): -1 on 2 (1,2): 1 on 6 AddNode 6 (1,2): 1 on 5 (1,3): 1 on 6 (1,2): 1 on 5 AddNode 4 (1,3): 1 on 8 (1,2): 1 on 8 (1,3): 1 on 1 (1,3): -1 on 6 (1,2): 1 on 8 (1,3): -1 on 3 (1,2): 1 on 6 (1,1): 1 on 2 (1,3): -1 on 8 (1,2): 1 on 5 (1,3): 1 on 3 AddNode 3 (1,1): 1 on 0 RemoveNode 8 (1,2): 1 on 7 AddNode 8 (1,3): 1 on 3 (1,1): -1 on 0 RemoveNode 0 (1,2): 1 on 8 (1,2): 1 on 9 RemoveNode 3 (1,1): -1 on 2 RemoveNode 8 (1,1): 1 on 0 RemoveNode 7 (1,1): 1 on 4 (1,2): 1 on 8 (1,3): 1 on 9 (1,1): 1 on 2 (1,3): 1 on 3 AddNode 3 (1,2): 1 on 8 AddNode 1 (1,2): 1 on 7 RemoveNode 6 (1,2): 1 on 5 (1,2): -1 on 6 RemoveNode 9 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 0 (1,1): 1 on 4 AddNode 7 (1,3): 1 on 3 (1,1): 1 on 4 (1,3): 1 on 5 (1,1): 1 on 0 (1,3): 1 on 6 (1,3): -1 on 2 RemoveNode 0 (1,3): 1 on 7 AddNode 0 (1,1): 1 on 1 (1,1): -1 on 2 AddNode 6 (1,1): 1 on 3 (1,2): 1 on 5 RemoveNode 2 (1,3): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 1 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 2 (1,2): 1 on 6 (1,3): 1 on 0 RemoveNode 4 (1,1): 1 on 0 (1,1): 1 on 3 (1,3): 1 on 6 (1,2): 1 on 8 (1,3): -1 on 8 (1,2): -1 on 7 (1,1): -1 on 1 (1,1): 1 on 0 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 4 RemoveNode 7 (1,2): 1 on 5 RemoveNode 1 (1,1): 1 on 2 (1,2): -1 on 7 (1,3): -1 on 9 (1,1): -1 on 4 (1,1): 1 on 4 (1,3): 1 on 5 (1,1): 1 on 4 RemoveNode 3 (1,1): 1 on 4 (1,1): 1 on 4 (1,1): 1 on 4 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 3 (1,3): 1 on 0 (1,3): 1 on 1 (1,1): 1 on 2 (1,1): 1 on 1 (1,1): 1 on 0 AddNode 4 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 4 (1,1): 1 on 3 (1,2): 1 on 5 (1,2): 1 on 6 AddNode 7 (1,3): 1 on 8 (1,1): 1 on 2 (1,3): 1 on 3 AddNode 8 (1,1 ... 4 (1,3): 1 on 3 (1,2): 1 on 8 AddNode 5 (1,2): 1 on 7 (1,2): -1 on 8 RemoveNode 9 (1,2): -1 on 6 (1,2): 1 on 6 (1,3): 1 on 9 RemoveNode 6 (1,1): 1 on 2 (1,3): -1 on 2 (1,1): -1 on 4 RemoveNode 5 (1,2): 1 on 8 (1,2): 1 on 5 AddNode 2 (1,1): 1 on 4 (1,3): 1 on 3 RemoveNode 2 (1,3): 1 on 6 (1,2): 1 on 6 (1,3): 1 on 2 RemoveNode 7 (1,1): 1 on 3 AddNode 2 (1,2): 1 on 6 (1,2): 1 on 9 (1,3): 1 on 0 (1,2): 1 on 7 RemoveNode 4 (1,2): 1 on 5 (1,2): 1 on 9 AddNode 4 (1,3): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 3 (1,2): 1 on 7 (1,1): 1 on 4 (1,3): 1 on 6 (1,3): 1 on 4 (1,1): 1 on 1 (1,3): 1 on 5 (1,1): -1 on 0 RemoveNode 3 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): -1 on 1 RemoveNode 4 (1,3): 1 on 1 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 0 (1,3): 1 on 2 (1,2): 1 on 5 AddNode 1 (1,2): 1 on 7 (1,2): 1 on 9 AddNode 4 (1,1): 1 on 2 (1,3): 1 on 0 AddNode 9 (1,3): -1 on 6 AddNode 7 (1,2): 1 on 8 (1,1): 1 on 2 RemoveNode 4 (1,3): 1 on 6 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 8 (1,2): 1 on 6 (1,2): 1 on 9 RemoveNode 7 (1,3): 1 on 9 (1,3): 1 on 3 (1,3): 1 on 5 (1,3): 1 on 5 (1,2): 1 on 6 (1,3): 1 on 7 (1,3): -1 on 2 (1,2): -1 on 9 (1,1): -1 on 4 (1,2): 1 on 7 RemoveNode 9 (1,3): 1 on 0 RemoveNode 1 (1,1): 1 on 0 AddNode 7 (1,3): 1 on 8 (1,2): 1 on 6 (1,1): 1 on 1 RemoveNode 2 (1,3): -1 on 0 (1,2): -1 on 6 (1,3): 1 on 5 AddNode 3 (1,1): -1 on 3 AddNode 4 (1,3): 1 on 1 (1,1): 1 on 2 (1,2): 1 on 5 AddNode 9 (1,1): 1 on 4 (1,2): 1 on 6 RemoveNode 7 (1,3): -1 on 5 (1,1): 1 on 1 (1,3): 1 on 6 RemoveNode 9 (1,3): 1 on 9 RemoveNode 8 (1,1): 1 on 2 AddNode 6 (1,1): 1 on 2 (1,3): 1 on 7 (1,2): 1 on 7 AddNode 8 (1,2): 1 on 5 AddNode 5 (1,2): 1 on 7 (1,2): 1 on 6 (1,2): 1 on 5 (1,3): 1 on 5 (1,1): 1 on 4 (1,2): -1 on 5 RemoveNode 4 (1,2): 1 on 5 (1,3): 1 on 2 (1,1): 1 on 1 (1,3): 1 on 3 (1,2): -1 on 9 (1,2): -1 on 6 AddNode 4 (1,3): 1 on 9 RemoveNode 4 (1,3): -1 on 1 RemoveNode 0 (1,3): 1 on 8 (1,2): 1 on 7 AddNode 2 (1,3): 1 on 1 (1,2): 1 on 6 AddNode 7 (1,2): 1 on 9 AddNode 1 (1,2): 1 on 9 (1,2): 1 on 8 (1,1): 1 on 0 (1,3): 1 on 9 RemoveNode 6 (1,2): 1 on 8 AddNode 6 (1,3): -1 on 7 (1,2): 1 on 8 (1,3): -1 on 5 (1,2): 1 on 8 AddNode 0 (1,1): 1 on 2 (1,1): 1 on 1 (1,2): 1 on 5 RemoveNode 0 (1,3): -1 on 9 (1,3): 1 on 0 AddNode 0 (1,3): 1 on 8 RemoveNode 7 (1,2): -1 on 5 (1,1): 1 on 1 (1,1): -1 on 3 RemoveNode 2 (1,1): 1 on 0 (1,2): -1 on 7 (1,3): 1 on 2 (1,1): 1 on 2 (1,3): 1 on 1 (1,1): -1 on 1 (1,2): 1 on 6 (1,3): 1 on 4 (1,2): 1 on 9 (1,3): -1 on 4 RemoveNode 3 (1,2): 1 on 6 (1,3): 1 on 4 RemoveNode 5 (1,1): 1 on 0 (1,3): 1 on 3 RemoveNode 1 (1,3): -1 on 0 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 0 (1,2): -1 on 5 AddNode 3 (1,2): 1 on 9 (1,1): 1 on 1 AddNode 2 (1,2): 1 on 8 RemoveNode 0 (1,3): 1 on 8 RemoveNode 2 (1,3): 1 on 0 RemoveNode 6 (1,2): 1 on 6 (1,3): 1 on 5 (1,1): 1 on 1 AddNode 7 (1,1): 1 on 0 (1,2): 1 on 6 (1,1): 1 on 3 (1,2): 1 on 5 RemoveNode 8 (1,2): 1 on 8 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 4 (1,2): 1 on 8 AddNode 8 (1,2): 1 on 6 RemoveNode 8 (1,3): 1 on 9 AddNode 9 (1,2): 1 on 9 RemoveNode 9 (1,2): 1 on 9 AddNode 8 (1,2): 1 on 8 (1,3): -1 on 5 AddNode 6 (1,3): 1 on 3 RemoveNode 7 (1,3): 1 on 8 (1,2): 1 on 7 AddNode 0 (1,2): -1 on 9 (1,3): 1 on 4 (1,2): 1 on 7 (1,3): -1 on 5 (1,1): -1 on 1 (1,1): 1 on 2 (1,3): 1 on 6 AddNode 5 (1,3): 1 on 7 RemoveNode 3 (1,2): 1 on 8 (1,2): 1 on 5 (1,1): 1 on 3 (1,3): 1 on 5 (1,1): 1 on 4 (1,3): 1 on 8 (1,3): 1 on 7 (1,2): -1 on 8 AddNode 3 (1,1): 1 on 0 RemoveNode 0 (1,2): 1 on 6 (1,1): 1 on 2 (1,3): 1 on 8 RemoveNode 4 (1,3): 1 on 2 (1,2): -1 on 6 (1,3): 1 on 3 AddNode 2 (1,3): 1 on 5 (1,1): 1 on 2 (1,3): 1 on 2 RemoveNode 3 (1,3): 1 on 3 (1,2): 1 on 6 RemoveNode 5 (1,2): 1 on 9 (1,3): -1 on 9 (1,2): 1 on 6 (1,2): -1 on 6 AddNode 0 (1,2): 1 on 5 AddNode 3 (1,3): -1 on 4 (1,3): 1 on 7 RemoveNode 2 (1,1): 1 on 1 (1,3): 1 on 5 RemoveNode 8 (1,1): 1 on 2 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 0 (1,1): -1 on 2 RemoveNode 6 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 4 (1,1): 1 on 2 (1,1): 1 on 2 (1,2): 1 on 5 (1,1): 1 on 0 (1,2): 1 on 6 (1,3): -1 on 8 (1,3): 1 on 5 (1,3): 1 on 1 (1,1): 1 on 3 AddNode 4 (1,1): -1 on 3 (1,1): 1 on 2 (1,3): -1 on 5 RemoveNode 4 (1,3): 1 on 2 (1,1): 1 on 0 (1,3): -1 on 6 (1,1): 1 on 1 (1,2): 1 on 6 (1,2): -1 on 6 (1,2): 1 on 7 (1,3): -1 on 3 AddNode 7 (1,2): 1 on 7 RemoveNode 0 (1,3): 1 on 0 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 7 (1,1): 1 on 2 (1,2): 1 on 9 RemoveNode 7 (1,2): -1 on 6 AddNode 7 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): -1 on 6 RemoveNode 3 (1,3): 1 on 9 (1,2): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 5 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 1 (1,2): 1 on 5 RemoveNode 7 (1,3): -1 on 7 AddNode 7 (1,1): 1 on 1 (1,2): 1 on 9 (1,2): -1 on 6 AddNode 5 (1,1): 1 on 1 (1,3): 1 on 2 (1,3): 1 on 2 (1,2): 1 on 9 AddNode 4 (1,3): 1 on 9 (1,3): -1 on 2 RemoveNode 4 (1,3): -1 on 4 RemoveNode 7 (1,1): -1 on 4 (1,3): 1 on 2 (1,3): -1 on 2 (1,1): 1 on 4 (1,1): 1 on 0 (1,2): 1 on 9 (1,3): 1 on 0 (1,2): -1 on 7 AddNode 9 (1,1): 1 on 4 (1,3): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 3 AddNode 2 (1,3): 1 on 2 RemoveNode 0 (1,2): -1 on 7 (1,1): 1 on 2 (1,3): 1 on 7 (1,2): 1 on 6 (1,2): -1 on 6 AddNode 6 (1,1): 1 on 3 (1,1): 1 on 4 (1,2): -1 on 6 (1,3): 1 on 0 AddNode 7 (1,1): 1 on 0 (1,3): -1 on 8 RemoveNode 9 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): 1 on 2 (1,1): 1 on 0 (1,3): 1 on 4 (1,1): 1 on 0 AddNode 0 (1,1): 1 on 3 RemoveNode 7 (1,3): 1 on 0 (1,2): 1 on 7 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 7 (1,3): 1 on 8 (1,1): 1 on 1 RemoveNode 0 (1,2): 1 on 9 (1,2): -1 on 5 AddNode 1 (1,2): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): 1 on 3 (1,3): 1 on 0 (1,2): 1 on 9 (1,1): 1 on 1 (1,1): 1 on 1 AddNode 9 (1,3): 1 on 2 RemoveNode 7 (1,2): 1 on 5 RemoveNode 1 (1,1): -1 on 1 (1,1): -1 on 3 (1,3): 1 on 2 AddNode 0 (1,2): 1 on 7 (1,3): -1 on 0 (1,1): 1 on 3 AddNode 8 (1,2): 1 on 7 (1,3): 1 on 5 (1,2): 1 on 6 (1,3): 1 on 2 (1,3): 1 on 2 RemoveNode 0 (1,2): 1 on 5 AddNode 0 (1,1): -1 on 1 RemoveNode 2 (1,1): 1 on 2 (1,1): -1 on 2 (1,3): 1 on 8 (1,2): 1 on 9 (1,3): -1 on 6 (1,3): -1 on 8 (1,1): 1 on 2 RemoveNode 0 (1,1): -1 on 3 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): 1 on 0 (1,1): 1 on 0 (1,2): -1 on 8 (1,2): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 6 (1,2): 1 on 9 (1,1): 1 on 2 (1,2): 1 on 8 (1,3): 1 on 9 RemoveNode 5 (1,2): 1 on 7 (1,2): 1 on 5 (1,2): -1 on 9 (1,3): 1 on 5 (1,2): 1 on 8 (1,3): 1 on 8 RemoveNode 6 (1,2): -1 on 6 (1,3): 1 on 6 (1,3): 1 on 3 (1,2): 1 on 8 (1,1): 1 on 1 (1,3): 1 on 1 (1,1): 1 on 1 AddNode 6 (1,1): 1 on 4 AddNode 3 (1,2): 1 on 8 (1,1): 1 on 2 RemoveNode 9 (1,3): 1 on 1 AddNode 2 (1,1): 1 on 0 (1,3): 1 on 7 AddNode 9 (1,1): -1 on 2 AddNode 1 (1,1): -1 on 1 (1,2): 1 on 8 RemoveNode 2 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): 1 on 9 AddNode 2 (1,2): 1 on 8 (1,2): 1 on 9 (1,3): 1 on 3 RemoveNode 2 (1,1): 1 on 4 AddNode 7 (1,1): 1 on 1 RemoveNode 9 (1,2): 1 on 9 (1,3): 1 on 7 AddNode 4 (1,2): 1 on 6 (1,3): -1 on 7 (1,2): -1 on 6 (1,3): 1 on 5 (1,2): -1 on 8 (1,1): 1 on 3 AddNode 2 (1,1): 1 on 1 (1,2): 1 on 8 (1,3): 1 on 2 (1,1): 1 on 4 (1,3): -1 on 8 (1,1): 1 on 3 (1,1): 1 on 4 RemoveNode 8 (1,1): 1 on 3 RemoveNode 4 (1,2): 1 on 8 (1,2): 1 on 9 (1,3): -1 on 2 (1,1): -1 on 0 (1,2): 1 on 5 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 3 (1,1): 1 on 4 (1,1): -1 on 1 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): 1 on 9 (1,3): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 1 (1,3): -1 on 4 (1,1): 1 on 0 (1,1): 1 on 3 (1,1): 1 on 3 RemoveNode 1 (1,3): 1 on 3 (1,3): 1 on 0 (1,1): 1 on 3 RemoveNode 2 (1,3): 1 on 5 (1,1): -1 on 2 (1,2): 1 on 8 (1,1): 1 on 1 RemoveNode 7 (1,3): -1 on 2 (1,1): 1 on 2 (1,1): 1 on 0 (1,1): 1 on 3 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 9 (1,3): -1 on 7 (1,1): 1 on 1 RemoveNode 0 (1,3): -1 on 9 AddNode 2 (1,1): 1 on 3 (1,1): -1 on 0 (1,1): 1 on 0 (1,3): -1 on 1 (1,2): 1 on 8 (1,2): -1 on 8 (1,2): 1 on 9 (1,1): -1 on 4 RemoveNode 2 (1,3): 1 on 2 (1,3): 1 on 3 (1,2): 1 on 8 (1,3): 1 on 5 (1,2): 1 on 9 AddNode 2 (1,2): -1 on 8 RemoveNode 9 (1,3): -1 on 3 (1,3): -1 on 1 RemoveNode 3 (1,1): 1 on 0 AddNode 5 (1,3): 1 on 4 RemoveNode 6 (1,2): 1 on 7 (1,1): 1 on 2 AddNode 1 (1,1): 1 on 1 RemoveNode 2 (1,3): -1 on 1 AddNode 2 (1,3): -1 on 7 (1,2): -1 on 5 (1,1): -1 on 1 (1,1): -1 on 1 (1,1): 1 on 1 AddNode 8 (1,1): 1 on 3 AddNode 3 (1,2): 1 on 9 (1,3): 1 on 5 (1,1): 1 on 2 (1,2): 1 on 6 (1,2): -1 on 7 AddNode 6 (1,3): 1 on 9 (1,1): 1 on 0 AddNode 0 (1,2): 1 on 9 AddNode 7 (1,2): 1 on 7 (1,3): 1 on 1 (1,3): 1 on 1 (1,2): -1 on 9 (1,1): -1 on 3 RemoveNode 7 (1,2): 1 on 8 (1,3): 1 on 0 RemoveNode 0 (1,3): 1 on 1 (1,1): -1 on 3 RemoveNode 1 (1,1): 1 on 4 (1,3): 1 on 0 (1,3): 1 on 5 (1,1): 1 on 0 (1,3): 1 on 8 (1,1): 1 on 2 AddNode 9 (1,3): 1 on 6 (1,3): 1 on 6 (1,1): -1 on 1 (1,1): 1 on 4 AddNode 7 (1,2): 1 on 9 (1,2): -1 on 8 (1,2): 1 on 8 (1,2): -1 on 5 AddNode 4 (1,1): 1 on 0 (1,3): 1 on 5 (1,3): 1 on 2 RemoveNode 4 (1,3): -1 on 9 (1,1): 1 on 4 (1,2): 1 on 9 RemoveNode 9 (1,2): 1 on 7 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 7 RemoveNode 6 (1,3): 1 on 2 (1,1): 1 on 2 (1,1): 1 on 0 (1,1): 1 on 0 (1,2): -1 on 5 (1,1): 1 on 2 (1,2): 1 on 9 (1,1): -1 on 3 (1,1): 1 on 1 RemoveNode 5 (1,1): 1 on 4 (1,2): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 1 (1,2): 1 on 9 (1,1): 1 on 1 (1,3): 1 on 4 (1,3): 1 on 4 RemoveNode 2 (1,1): 1 on 0 (1,3): 1 on 0 AddNode 4 (1,1): 1 on 0 (1,1): 1 on 3 RemoveNode 4 (1,2): 1 on 8 (1,2): -1 on 9 (1,2): -1 on 7 AddNode 5 (1,1): 1 on 0 AddNode 0 (1,2): 1 on 7 (1,2): -1 on 5 (1,1): 1 on 2 (1,3): 1 on 8 (1,1): -1 on 2 RemoveNode 0 (1,2): 1 on 5 (1,2): -1 on 7 RemoveNode 5 (1,3): 1 on 5 AddNode 2 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 4 (1,2): 1 on 5 (1,3): 1 on 3 AddNode 9 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 0 (1,1): 1 on 1 (1,3): 1 on 2 (1,1): -1 on 4 (1,3): 1 on 3 AddNode 5 (1,2): 1 on 9 (1,3): 1 on 2 (1,2): -1 on 5 (1,2): 1 on 6 AddNode 1 (1,1): -1 on 0 RemoveNode 1 (1,2): -1 on 7 AddNode 4 (1,1): 1 on 0 (1,2): 1 on 9 (1,1): 1 on 2 RemoveNode 3 (1,3): -1 on 6 RemoveNode 8 (1,2): 1 on 8 (1,3): 1 on 7 (1,3): 1 on 0 RemoveNode 5 (1,2): -1 on 7 (1,3): 1 on 3 AddNode 6 (1,1): 1 on 2 AddNode 5 (1,2): 1 on 6 AddNode 3 (1,3): 1 on 2 RemoveNode 4 (1,3): 1 on 3 (1,2): 1 on 9 (1,3): 1 on 4 AddNode 0 (1,1): -1 on 4 RemoveNode 0 (1,2): 1 on 6 RemoveNode 5 (1,1): 1 on 0 (1,1): -1 on 4 (1,3): 1 on 1 (1,1): 1 on 0 AddNode 8 (1,1): -1 on 2 (1,3): -1 on 0 (1,3): 1 on 6 (1,1): 1 on 2 (1,2): 1 on 7 AddNode 5 (1,2): 1 on 9 (1,1): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 1 RemoveNode 9 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 1 (1,3): 1 on 3 (1,2): 1 on 6 (1,1): -1 on 0 (1,2): 1 on 9 (1,3): 1 on 1 (1,2): 1 on 8 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): 1 on 4 RemoveNode 5 (1,2): -1 on 6 (1,3): 1 on 4 (1,1): -1 on 1 (1,1): 1 on 3 AddNode 5 (1,1): 1 on 4 (1,3): 1 on 3 (1,1): -1 on 2 (1,3): -1 on 1 (1,1): 1 on 1 (1,2): 1 on 9 (1,2): 1 on 7 (1,1): 1 on 0 (1,3): 1 on 1 RemoveNode 0 (1,2): 1 on 5 (1,3): -1 on 8 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): -1 on 3 RemoveNode 6 (1,3): 1 on 9 AddNode 9 (1,1): 1 on 0 RemoveNode 5 (1,3): 1 on 0 RemoveNode 3 (1,3): -1 on 4 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): -1 on 7 (1,3): -1 on 6 Final state: 403 387 397 417 400 0 0 0 0 0 0 0 0 0 0 359 427 442 433 410 192 199 174 233 198 205 200 154 185 175 - - + - - - - + + + Took 2.8314 seconds avg = 4800 min = 4800 max = 4800 std-dev = 0 ch.0 avg = 1600 ch.0 min = 1533 ch.0 max = 1660 ch.0 std-dev = 29.59594567 ch.1 avg = 1600 ch.1 min = 1524 ch.1 max = 1658 ch.1 std-dev = 26.68857433 ch.2 avg = 1600 ch.2 min = 1531 ch.2 max = 1672 ch.2 std-dev = 29.16093277 avg = 1250 std-dev = 0 avg = 4800 min = 4800 max = 4800 std-dev = 0 ch.0 avg = 1600 ch.0 min = 1600 ch.0 max = 1600 ch.0 std-dev = 0 ch.1 avg = 1600 ch.1 min = 1600 ch.1 max = 1600 ch.1 std-dev = 0 ch.2 avg = 1600 ch.2 min = 1600 ch.2 max = 1600 ch.2 std-dev = 0 avg = 1250 std-dev = 0 avg = 4800 min = 4799 max = 4801 std-dev = 0.2449489743 ch.0 avg = 1600 ch.0 min = 1536 ch.0 max = 1679 ch.0 std-dev = 32.67231244 ch.1 avg = 1600 ch.1 min = 1542 ch.1 max = 1685 ch.1 std-dev = 32.25771226 ch.2 avg = 1600 ch.2 min = 1520 ch.2 max = 1682 ch.2 std-dev = 32.38425543 avg = 1250 std-dev = 0 >> TTablesWithReboots::CreateDroppedTableAndDropWithReboots [GOOD] >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestRestartTablets >> TTablesWithReboots::CreateWithRebootsAtCommit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:59:12.681548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:59:12.681563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:12.681567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:59:12.681570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:59:12.681579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:59:12.681581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:59:12.681587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:12.681635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:59:12.688338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:59:12.688349Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:59:12.689616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:59:12.689672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:59:12.689685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:59:12.691189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:59:12.691227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:59:12.691287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:12.691395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:12.691811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:12.691972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:12.691978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:12.691985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:59:12.691989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:12.691993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:59:12.692014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:59:12.693011Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:59:12.703139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:59:12.703188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:12.703219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:59:12.703252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:59:12.703256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:12.703678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:12.703689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:59:12.703710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:12.703715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:59:12.703717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:59:12.703720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:59:12.703938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:12.703943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:12.703946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:59:12.704135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:12.704140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:12.704143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:12.704146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:59:12.704577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:12.704847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:59:12.704878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:59:12.704984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:12.704997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:12.705001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:12.705030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:59:12.705034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:12.705048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:12.705056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:12.705311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:12.705316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:12.705334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:12.705337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:59:12.705375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:12.705379Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:59:12.705385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:59:12.705387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:12.705390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:59:12.705393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:12.705396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:59:12.705398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:59:12.705403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:12.705406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:59:12.705408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... EMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T08:59:29.692860Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T08:59:29.693750Z node 71 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:29.693756Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T08:59:29.693772Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:59:29.693791Z node 71 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:29.693794Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [71:205:2208], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T08:59:29.693797Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [71:205:2208], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T08:59:29.693857Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.693862Z node 71 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T08:59:29.694134Z node 71 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:59:29.694142Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:59:29.694145Z node 71 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:59:29.694148Z node 71 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T08:59:29.694151Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T08:59:29.694193Z node 71 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:59:29.694199Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T08:59:29.694216Z node 71 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T08:59:29.694218Z node 71 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T08:59:29.694220Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:59:29.694225Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T08:59:29.694322Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 121 } } 2024-11-21T08:59:29.694327Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:29.694337Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 121 } } 2024-11-21T08:59:29.694344Z node 71 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 121 } } 2024-11-21T08:59:29.694393Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 337 RawX2: 304942680341 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:29.694396Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:29.694404Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 337 RawX2: 304942680341 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:29.694407Z node 71 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:29.694411Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 337 RawX2: 304942680341 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:29.694416Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:29.694418Z node 71 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.694420Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:29.694425Z node 71 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2024-11-21T08:59:29.694922Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:59:29.694936Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:59:29.694982Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.694993Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.695024Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.695028Z node 71 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T08:59:29.695036Z node 71 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T08:59:29.695038Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:59:29.695041Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T08:59:29.695048Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [71:304:2296] message: TxId: 1002 2024-11-21T08:59:29.695052Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:59:29.695055Z node 71 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T08:59:29.695057Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T08:59:29.695069Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:29.695348Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:59:29.695354Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [71:305:2297] TestWaitNotification: OK eventTxId 1002 2024-11-21T08:59:29.695412Z node 71 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:29.695435Z node 71 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/Table1" took 26us result status StatusSuccess 2024-11-21T08:59:29.695494Z node 71 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/Table1" PathDescription { Self { Name: "Table1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> TTablesWithReboots::AlterTableFollowersWithReboots [GOOD] >> THiveTest::TestCreateTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateWithRebootsAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:55.911569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:55.911585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:55.911588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:55.911591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:55.911603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:55.911606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:55.911612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:55.911667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:55.919243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:55.919257Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:55.920883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:55.920947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:55.920964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:55.922502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:55.922546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:55.922605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:55.922712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:55.923132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:55.923299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:55.923304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:55.923312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:55.923317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:55.923320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:55.923344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:55.924121Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:55.934483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:55.934538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.934573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:55.934614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:55.934619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.935037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:55.935053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:55.935076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.935082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:55.935085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:55.935088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:55.935353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.935362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:55.935366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:55.935603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.935608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.935612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:55.935616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:55.936010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:55.936310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:55.936349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:55.936521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:55.936563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:55.936571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:55.936618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:55.936625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:55.936643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:55.936652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:55.936965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:55.936970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:55.936994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:55.936997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:55.937046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:55.937050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:55.937057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:55.937060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:55.937063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:55.937067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:55.937069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:55.937072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:55.937078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:55.937082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:55.937084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... age: Source { RawX1: 340 RawX2: 605590391061 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:30.030108Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:30.030117Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 605590391061 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:30.030121Z node 141 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:30.030125Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 340 RawX2: 605590391061 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:30.030132Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030134Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T08:59:30.030163Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 605590391062 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:30.030166Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409547, partId: 0 2024-11-21T08:59:30.030172Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 605590391062 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:30.030174Z node 141 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:30.030178Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 342 RawX2: 605590391062 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T08:59:30.030181Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030184Z node 141 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030188Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030190Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030193Z node 141 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2024-11-21T08:59:30.030289Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:59:30.030299Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T08:59:30.030580Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030595Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030603Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030612Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030650Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.030655Z node 141 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T08:59:30.030664Z node 141 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T08:59:30.030667Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:59:30.030671Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T08:59:30.030674Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T08:59:30.030678Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T08:59:30.030680Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T08:59:30.030695Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1002 2024-11-21T08:59:30.031040Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T08:59:30.031046Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T08:59:30.031081Z node 141 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T08:59:30.031092Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T08:59:30.031095Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [141:451:2416] TestWaitNotification: OK eventTxId 1002 2024-11-21T08:59:30.031135Z node 141 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:30.031164Z node 141 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 36us result status StatusSuccess 2024-11-21T08:59:30.031301Z node 141 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\003\000\004\000\000\000\377\377\377\177\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> LocalPartition::Basic [GOOD] >> LocalPartition::DescribeBadPartition >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableFollowersWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:58.136653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:58.136673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:58.136678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:58.136682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:58.136695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:58.136698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:58.136707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:58.136790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:58.146966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:58.146982Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:58.148873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:58.148961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:58.148981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:58.151162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:58.151224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:58.151304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:58.151485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:58.152070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:58.152314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:58.152325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:58.152335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:58.152342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:58.152348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:58.152381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:58.153540Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:58.164074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:58.164136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:58.164177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:58.164230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:58.164235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:58.164875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:58.164892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:58.164915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:58.164923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:58.164925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:58.164928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:58.165203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:58.165209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:58.165211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:58.165422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:58.165426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:58.165429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:58.165434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:58.165849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:58.166153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:58.166184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:58.166312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:58.166329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:58.166333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:58.166364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:58.166368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:58.166383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:58.166390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:58.166697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:58.166705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:58.166725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:58.166730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:58.166771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:58.166775Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:58.166781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:58.166783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:58.166787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:58.166789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:58.166792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:58.166795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:58.166801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:58.166804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:58.166807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 1004 MinStep: 5000006 MaxStep: 18446744073709551615 PrepareArriveTime: 94000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 21 } } 2024-11-21T08:59:30.239735Z node 133 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#1004:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 1004 MinStep: 5000006 MaxStep: 18446744073709551615 PrepareArriveTime: 94000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 21 } } 2024-11-21T08:59:30.239739Z node 133 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T08:59:30.239751Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 1004:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.239755Z node 133 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 3 -> 128 2024-11-21T08:59:30.240004Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.240022Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.240025Z node 133 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:30.240031Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1004 ready parts: 1/1 2024-11-21T08:59:30.240049Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1004 MinStep: 5000006 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:30.240276Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T08:59:30.240292Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1004 at step: 5000006 2024-11-21T08:59:30.240332Z node 133 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:30.240343Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 571230652517 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:30.240347Z node 133 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1004:0 HandleReply TEvOperationPlan, operationId: 1004:0, stepId: 5000006, at schemeshard: 72057594046678944 2024-11-21T08:59:30.240381Z node 133 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 129 2024-11-21T08:59:30.240392Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2024-11-21T08:59:30.241087Z node 133 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:30.241092Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:59:30.241117Z node 133 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:30.241122Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [133:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T08:59:30.241290Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.241297Z node 133 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:30.241394Z node 133 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:30.241403Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:30.241407Z node 133 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:30.241411Z node 133 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2024-11-21T08:59:30.241416Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:59:30.241430Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T08:59:30.241858Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 140 } } 2024-11-21T08:59:30.241867Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:30.241882Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 140 } } 2024-11-21T08:59:30.241892Z node 133 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 140 } } 2024-11-21T08:59:30.241967Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:30.241972Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T08:59:30.241982Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:30.241986Z node 133 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T08:59:30.241993Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T08:59:30.242000Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:30.242004Z node 133 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.242007Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:30.242012Z node 133 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:59:30.242124Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:30.242416Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.242437Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.242486Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.242491Z node 133 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T08:59:30.242501Z node 133 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:59:30.242505Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:30.242510Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T08:59:30.242515Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:30.242519Z node 133 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:59:30.242523Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:59:30.242539Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:59:30.242945Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:30.242950Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:30.242982Z node 133 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:30.242998Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:30.243002Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [133:545:2520] TestWaitNotification: OK eventTxId 1004 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateDroppedTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:52.480330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:52.480350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:52.480354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:52.480357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:52.480371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:52.480374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:52.480380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:52.480443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:52.488011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:52.488028Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:52.489664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:52.489732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:52.489753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:52.491627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:52.491705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:52.491789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.491959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:52.492647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.492881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:52.492891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.492902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:52.492909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:52.492915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:52.492950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:52.494147Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:52.511521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:52.511587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.511632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:52.511686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:52.511693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.512306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.512328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:52.512361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.512369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:52.512376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:52.512380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:52.512743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.512754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:52.512758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:52.513044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.513054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.513059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.513064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.513650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:52.513992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:52.514033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:52.514201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.514226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:52.514233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.514281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:52.514289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.514311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:52.514323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:52.514690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:52.514701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:52.514730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.514735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:52.514806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.514813Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:52.514823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:52.514827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.514831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:52.514836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.514840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:52.514843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:52.514853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:52.514858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:52.514862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... Id: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:30.079105Z node 155 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:59:30.079108Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:59:30.079111Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:30.079159Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:30.079164Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:30.079167Z node 155 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:59:30.079169Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:59:30.079171Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T08:59:30.079176Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T08:59:30.079652Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:30.079661Z node 155 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:30.079719Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T08:59:30.079735Z node 155 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:59:30.079738Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:59:30.079742Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T08:59:30.079744Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:59:30.079747Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:59:30.079750Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:59:30.079768Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:59:30.079894Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:59:30.079967Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:59:30.081294Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 584 RawX2: 665719933420 } TabletId: 72075186233409548 State: 4 2024-11-21T08:59:30.081308Z node 155 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:30.081360Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 594 RawX2: 665719933428 } TabletId: 72075186233409549 State: 4 2024-11-21T08:59:30.081367Z node 155 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:30.081397Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 601 RawX2: 665719933431 } TabletId: 72075186233409550 State: 4 2024-11-21T08:59:30.081402Z node 155 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:30.081831Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:30.081950Z node 155 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2024-11-21T08:59:30.081994Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:30.082352Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T08:59:30.082404Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:59:30.082443Z node 155 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T08:59:30.082497Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:30.082524Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T08:59:30.082540Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Forgetting tablet 72075186233409549 2024-11-21T08:59:30.082826Z node 155 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T08:59:30.083113Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T08:59:30.083147Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T08:59:30.083426Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:30.083432Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:59:30.083442Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:30.083686Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T08:59:30.083693Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T08:59:30.083734Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T08:59:30.083737Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T08:59:30.083959Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T08:59:30.083966Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T08:59:30.083975Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T08:59:30.084019Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:59:30.084023Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:59:30.084059Z node 155 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:59:30.084070Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:59:30.084074Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [155:839:2772] TestWaitNotification: OK eventTxId 1005 2024-11-21T08:59:30.084116Z node 155 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:30.084139Z node 155 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 27us result status StatusPathDoesNotExist 2024-11-21T08:59:30.084166Z node 155 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted 2024-11-21T08:59:30.084228Z node 155 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:30.084236Z node 155 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:59:30.084241Z node 155 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:59:30.084246Z node 155 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T08:59:30.084250Z node 155 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T08:59:30.084256Z node 155 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T08:59:30.084261Z node 155 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> KqpOlap::ManyColumnShards [GOOD] >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteOwnerTablets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ManyColumnShards [GOOD] Test command err: 2024-11-21T08:57:30.682050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T08:57:30.684185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:57:30.684285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:57:30.684314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:30.684561Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:57:30.684569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00486f/r3tmp/tmp0cMdLh/pdisk_1.dat 2024-11-21T08:57:30.758163Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:30.837820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:57:30.925179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.925224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.926409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.926433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.937660Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:57:30.937812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:30.937893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:31.335725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:57:31.560826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:31.560881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:31.560938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:31.560959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:31.560976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:31.560995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:31.561012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:31.561033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:31.561051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:31.561073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:31.561096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:31.561114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1494:2987];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:31.574772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:31.574807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:31.574855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:31.574877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:31.574897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:31.574916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:31.574934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:31.574953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:31.574974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:31.574990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:31.575012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:31.575031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1497:2989];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:31.588991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:31.589027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:31.589078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:31.589101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:31.589121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:31.589142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:31.589159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:31.589178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:31.589197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:31.589216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:31.589234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:31.589252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:1499:2991];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normaliz ... - nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 2024-11-21T08:59:26.637876Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6z54np1ypqprm7qrjjxw25, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgxNTUyMWQtOWQ2ZDIyNWEtMTgzYzU2ZDItYWRlYjBmOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:59:30.005367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6z54np1ypqprm7qrjjxw25, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgxNTUyMWQtOWQ2ZDIyNWEtMTgzYzU2ZDItYWRlYjBmOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"type_id":4}}],"rows":[{"items":[{"uint64_value":100000}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2024-11-21T08:59:30.018060Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 8500, txId: 18446744073709551615] shutting down >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> Balancing::Balancing_OneTopic_TopicApi >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> THiveTest::TestSkipBadNode [GOOD] >> TScaleRecommenderTest::BasicTest >> TTablesWithReboots::SimpleDropTableWithReboots [GOOD] >> KqpOlapClickbench::ClickBenchSmoke [GOOD] >> TTablesWithReboots::CopyTableWithReboots [GOOD] >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionStealLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapClickbench::ClickBenchSmoke [GOOD] Test command err: Trying to start YDB, gRPC: 9842, MsgBus: 10687 2024-11-21T08:57:30.194745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654090816976398:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:30.194899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00486d/r3tmp/tmpwOtH5x/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9842, node 1 2024-11-21T08:57:30.242621Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:30.246487Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:30.246499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:30.246501Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:30.246533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10687 TClient is connected to server localhost:10687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:30.285391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:30.289646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:30.296177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.296198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.297337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:30.304183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.304256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.304291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.304311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.304326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.304341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.304357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.304374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.304386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.304401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.304417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.304435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654090816977056:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.306531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.306547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.306565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.306579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.306596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.306610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.306623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.306637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.306651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.306664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.306678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.306691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654090816977057:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.308663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.308679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.308694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.308707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.308720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.308733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.308745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.308756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.308767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654090816977058 ... g;size=1125808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1125808;columns=105; 2024-11-21T08:57:42.461594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=aac88e24-a7e611ef-aa9d0f52-a9a16d2e;fline=with_appended.cpp:80;portions=9,;task_id=aac88e24-a7e611ef-aa9d0f52-a9a16d2e; 2024-11-21T08:57:42.526485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=aad524cc-a7e611ef-9dbc545b-39ca255b;fline=with_appended.cpp:80;portions=9,;task_id=aad524cc-a7e611ef-9dbc545b-39ca255b; 2024-11-21T08:57:42.599557Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=aade5df8-a7e611ef-8ae5b1db-1341416f;fline=with_appended.cpp:80;portions=9,;task_id=aade5df8-a7e611ef-8ae5b1db-1341416f; 2024-11-21T08:57:42.682359Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=aaebe356-a7e611ef-88e2c133-19130047;fline=with_appended.cpp:80;portions=9,;task_id=aaebe356-a7e611ef-88e2c133-19130047; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1125808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1125808;columns=105; 2024-11-21T08:57:42.775925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=aafa5544-a7e611ef-9a858c5b-133395ad;fline=with_appended.cpp:80;portions=11,;task_id=aafa5544-a7e611ef-9a858c5b-133395ad; 2024-11-21T08:57:42.856462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ab043370-a7e611ef-b0ea2cb7-5113606;fline=with_appended.cpp:80;portions=11,;task_id=ab043370-a7e611ef-b0ea2cb7-5113606; 2024-11-21T08:57:42.937189Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=ab0f54a8-a7e611ef-86b30ebd-f116e726;fline=with_appended.cpp:80;portions=11,;task_id=ab0f54a8-a7e611ef-86b30ebd-f116e726; 2024-11-21T08:57:43.007998Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=ab1bfaa0-a7e611ef-b965f1e8-2dcb4c54;fline=with_appended.cpp:80;portions=11,;task_id=ab1bfaa0-a7e611ef-b965f1e8-2dcb4c54; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1125808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1125808;columns=105; 2024-11-21T08:57:43.101973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=ab2a46fa-a7e611ef-9fd309db-df2a6472;fline=with_appended.cpp:80;portions=13,;task_id=ab2a46fa-a7e611ef-9fd309db-df2a6472; 2024-11-21T08:57:43.190034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ab368f3c-a7e611ef-a1c5592f-6551a062;fline=with_appended.cpp:80;portions=13,;task_id=ab368f3c-a7e611ef-a1c5592f-6551a062; 2024-11-21T08:57:43.260726Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=ab42df76-a7e611ef-b99a6575-d3a4158a;fline=with_appended.cpp:80;portions=13,;task_id=ab42df76-a7e611ef-b99a6575-d3a4158a; 2024-11-21T08:57:43.344589Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=ab4dab04-a7e611ef-ad92a6b9-55b206b4;fline=with_appended.cpp:80;portions=13,;task_id=ab4dab04-a7e611ef-ad92a6b9-55b206b4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1125808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1125808;columns=105; 2024-11-21T08:57:43.439418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=ab5c0848-a7e611ef-80734905-f7c1d9bf;fline=with_appended.cpp:80;portions=15,;task_id=ab5c0848-a7e611ef-80734905-f7c1d9bf; 2024-11-21T08:57:43.508073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ab69767c-a7e611ef-85a0c199-b323b99a;fline=with_appended.cpp:80;portions=15,;task_id=ab69767c-a7e611ef-85a0c199-b323b99a; 2024-11-21T08:57:43.573421Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=ab744b7e-a7e611ef-aeb272a6-830827fb;fline=with_appended.cpp:80;portions=15,;task_id=ab744b7e-a7e611ef-aeb272a6-830827fb; 2024-11-21T08:57:43.672732Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=ab810b7a-a7e611ef-b5a617c7-139da805;fline=with_appended.cpp:80;portions=15,;task_id=ab810b7a-a7e611ef-b5a617c7-139da805; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1181808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1181808;columns=105; 2024-11-21T08:57:43.800512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=ab8f7d0e-a7e611ef-b3ad6c15-bb8e6cff;fline=with_appended.cpp:80;portions=17,;task_id=ab8f7d0e-a7e611ef-b3ad6c15-bb8e6cff; 2024-11-21T08:57:43.903240Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ab99fd9c-a7e611ef-a806b614-b3d22adc;fline=with_appended.cpp:80;portions=17,;task_id=ab99fd9c-a7e611ef-a806b614-b3d22adc; 2024-11-21T08:57:43.977768Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=aba3f16c-a7e611ef-954f8263-c1986f41;fline=with_appended.cpp:80;portions=17,;task_id=aba3f16c-a7e611ef-954f8263-c1986f41; 2024-11-21T08:57:44.067381Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=abb31ff2-a7e611ef-bea9407d-4150ef02;fline=with_appended.cpp:80;portions=17,;task_id=abb31ff2-a7e611ef-bea9407d-4150ef02; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AdvEngineID, COUNT(*) as c FROM `/Root/benchTable` WHERE AdvEngineID != 0 GROUP BY AdvEngineID ORDER BY c DESC 2024-11-21T08:57:44.116080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2202:3323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.116118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2214:3328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.122643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:44.126613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T08:57:44.143286Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=abc69b72-a7e611ef-bde1f74b-11260bb7;fline=with_appended.cpp:80;portions=19,;task_id=abc69b72-a7e611ef-bde1f74b-11260bb7; 2024-11-21T08:57:44.165330Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=abd643d8-a7e611ef-a8e100fe-b5ae82e3;fline=with_appended.cpp:80;portions=19,;task_id=abd643d8-a7e611ef-a8e100fe-b5ae82e3; 2024-11-21T08:57:44.188122Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=abe1a3ea-a7e611ef-83b40de4-8dcf697c;fline=with_appended.cpp:80;portions=19,;task_id=abe1a3ea-a7e611ef-83b40de4-8dcf697c; 2024-11-21T08:57:44.220827Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=abef4efa-a7e611ef-bd368062-b33b2b6b;fline=with_appended.cpp:80;portions=19,;task_id=abef4efa-a7e611ef-bd368062-b33b2b6b; 2024-11-21T08:57:44.443195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2223:3334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T08:57:44.719363Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd6z23w6btpe8njy3bbdcjev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODg0YjE0Yi1lYmM0ZWFjOS0yYjA4YzcxYi03NzBjNjZmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT RegionID, SUM(AdvEngineID), COUNT(*) AS c, avg(ResolutionWidth), COUNT(DISTINCT UserID) FROM `/Root/benchTable` GROUP BY RegionID ORDER BY c DESC LIMIT 10 2024-11-21T08:57:56.815507Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd6z2g6n2xajc21wj4njb1m7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDJkMDI1Y2QtYzI2MTYzMWMtNDBhYjQ4N2UtYTJiYjE2Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T08:58:06.683595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:58:06.683628Z node 2 :IMPORT WARN: Table profiles were not loaded REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchPhrase, count(*) AS c FROM `/Root/benchTable` WHERE SearchPhrase != '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10; 2024-11-21T08:58:13.457095Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd6z30ge3f1ssx84z78587nr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWRkYTAxYmEtNmE4MzRiZS0xYjIwYmMyNC1jNjIyNzA0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchEngineID, SearchPhrase, count(*) AS c FROM `/Root/benchTable` WHERE SearchPhrase != '' GROUP BY SearchEngineID, SearchPhrase ORDER BY c DESC LIMIT 10; 2024-11-21T08:58:28.986501Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd6z3fp3b2tgzdzkmftqm1bd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGU3YThkMWMtMjYwMmFmZWItYjg3YWIyYzctNGNkZjg0YjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchPhrase, MIN(URL), MIN(Title), COUNT(*) AS c, COUNT(DISTINCT UserID) FROM `/Root/benchTable` WHERE Title LIKE '%Google%' AND URL NOT LIKE '%.google.%' AND SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10; 2024-11-21T08:58:43.797250Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd6z3y32371acwrg4zvkb9zj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDU4NWRjNGYtNjQwYWUyNTgtOTFiMTNjNzItNzMxN2VhYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT TraficSourceID, SearchEngineID, AdvEngineID, Src, Dst, COUNT(*) AS PageViews FROM `/Root/benchTable` WHERE CounterID = 62 AND EventDate >= Date('2013-07-01') AND EventDate <= Date('2013-07-31') AND IsRefresh == 0 GROUP BY TraficSourceID, SearchEngineID, AdvEngineID, IF (SearchEngineID = 0 AND AdvEngineID = 0, Referer, '') AS Src, URL AS Dst ORDER BY PageViews DESC LIMIT 10; 2024-11-21T08:59:04.627424Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd6z4je690bqhsk2qq97qpkk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzg5MzE5NWQtNTM2ZGI3N2MtNDQyMDE2NzUtZGI3MzAwYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimpleDropTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:59:13.088474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:59:13.088490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:13.088493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:59:13.088496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:59:13.088509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:59:13.088511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:59:13.088518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:13.088579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:59:13.096100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:59:13.096112Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:59:13.097594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:59:13.097663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:59:13.097680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:59:13.099282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:59:13.099337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:59:13.099412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:13.099515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:13.100009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:13.100184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:13.100190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:13.100198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:59:13.100220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:13.100227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:59:13.100254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:59:13.101245Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:59:13.111865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:59:13.111923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:13.111967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:59:13.112009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:59:13.112014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:13.112517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:13.112535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:59:13.112567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:13.112576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:59:13.112579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:59:13.112582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:59:13.112862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:13.112869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:13.112871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:59:13.113083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:13.113088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:13.113092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:13.113096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:59:13.113517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:13.113810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:59:13.113845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:59:13.113967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:13.113986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:13.113991Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:13.114024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:59:13.114029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:13.114047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:13.114056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:13.114370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:13.114379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:13.114410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:13.114415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:59:13.114491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:13.114499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:59:13.114509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:59:13.114513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:13.114518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:59:13.114523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:13.114528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:59:13.114532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:59:13.114542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:13.114547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:59:13.114551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ARD INFO: TDropTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031272Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031280Z node 79 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1003:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T08:59:32.031284Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:59:32.031287Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T08:59:32.031293Z node 79 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T08:59:32.031306Z node 79 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2024-11-21T08:59:32.031316Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:32.031324Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:32.031352Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031395Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031681Z node 79 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031692Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:32.031711Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:59:32.031728Z node 79 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031731Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [79:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T08:59:32.031734Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [79:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T08:59:32.031784Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031789Z node 79 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:32.031799Z node 79 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031802Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:32.031805Z node 79 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T08:59:32.031889Z node 79 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:32.031896Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:32.031899Z node 79 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:59:32.031902Z node 79 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:59:32.031904Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:32.032021Z node 79 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:32.032028Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T08:59:32.032031Z node 79 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T08:59:32.032037Z node 79 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:59:32.032039Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:59:32.032045Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T08:59:32.032598Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:32.032609Z node 79 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:32.032650Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:32.032675Z node 79 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:59:32.032677Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:59:32.032681Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T08:59:32.032683Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:59:32.032686Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:59:32.032688Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:59:32.032697Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T08:59:32.032807Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:59:32.033007Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T08:59:32.033630Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 339302418699 } TabletId: 72075186233409546 State: 4 2024-11-21T08:59:32.033643Z node 79 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:32.033845Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:32.033894Z node 79 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T08:59:32.034138Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:32.034175Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T08:59:32.034221Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:32.034224Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:59:32.034230Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:32.034597Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:59:32.034609Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:59:32.034645Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T08:59:32.034668Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:59:32.034671Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:59:32.034700Z node 79 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:59:32.034710Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:59:32.034713Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [79:474:2449] TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted 2024-11-21T08:59:32.034742Z node 79 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 Deleted tabletId 72075186233409546 2024-11-21T08:59:32.034778Z node 79 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:32.034794Z node 79 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 24us result status StatusPathDoesNotExist 2024-11-21T08:59:32.034816Z node 79 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_39 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:52.542849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:52.542869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:52.542874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:52.542878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:52.542890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:52.542894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:52.542902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:52.542965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:52.551028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:52.551043Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:52.552932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:52.553022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:52.553040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:52.554929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:52.555064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:52.555125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.555148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:52.555550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.555719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:52.555725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.555742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:52.555746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:52.555751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:52.555762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:52.556649Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:52.568084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:52.568138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.568175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:52.568229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:52.568234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.568938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.568958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:52.568985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.568992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:52.568995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:52.568998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:52.569262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.569271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:52.569274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:52.569473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.569477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.569481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.569485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.569895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:52.570202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:52.570235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:52.570349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.570366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:52.570370Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.570401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:52.570405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.570426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:52.570434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:52.570723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:52.570728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:52.570784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.570788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T08:58:52.570836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.570841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:52.570850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:52.570853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.570856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:52.570859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.570862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:52.570865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:52.570873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:52.570877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:52.570879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... efCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:59:32.106453Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:32.106458Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:32.106503Z node 162 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:32.106516Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:32.106518Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [162:834:2751] TestWaitNotification: OK eventTxId 1004 2024-11-21T08:59:32.106565Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:32.106589Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot" took 34us result status StatusSuccess 2024-11-21T08:59:32.106657Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "NewTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } Children { Name: "NewTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:32.106707Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:32.106725Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Table" took 19us result status StatusSuccess 2024-11-21T08:59:32.106790Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:32.106827Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/NewTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:32.106836Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/NewTable1" took 11us result status StatusSuccess 2024-11-21T08:59:32.106867Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/NewTable1" PathDescription { Self { Name: "NewTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "NewTable1" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:32.106892Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/NewTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:32.106901Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/NewTable2" took 10us result status StatusSuccess 2024-11-21T08:59:32.106929Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/NewTable2" PathDescription { Self { Name: "NewTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "NewTable2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_40 >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TxUsage::WriteToTopic_Demo_23_RestartNo [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestDeleteTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] Test command err: 2024-11-21T08:59:26.489311Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:26.489904Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:26.489967Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:26.490083Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:26.490242Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:26.490250Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:26.490371Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T08:59:26.490374Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:26.490399Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:26.490449Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:26.492054Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:26.492068Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:26.492306Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.492333Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.492350Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.492368Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.492385Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.492401Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.492420Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.492423Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:26.492431Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T08:59:26.492435Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T08:59:26.492439Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:26.492443Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:26.492524Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:26.495171Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:26.495189Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:26.495194Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:26.495453Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:26.495491Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:26.495524Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:26.495528Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:26.495534Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:26.495955Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:26.496019Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:59:26.496024Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:26.496388Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:26.496403Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:26.496406Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:26.496426Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T08:59:26.496437Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:26.496461Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T08:59:26.496465Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T08:59:26.496569Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:26.496609Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T08:59:26.496614Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T08:59:26.496616Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T08:59:26.496620Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:26.496643Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:26.496648Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:26.496666Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T08:59:26.496668Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T08:59:26.496671Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:26.496704Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:26.496726Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:26.496736Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:59:26.497080Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T08:59:26.497192Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:26.497206Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:26.497209Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T08:59:26.497894Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T08:59:26.497900Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T08:59:26.497913Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:26.498085Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:26.498142Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:26.498157Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T08:59:26.498160Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T08:59:26.498163Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:26.498200Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T08:59:26.498211Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T08:59:26.498214Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T08:59:26.498222Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T08:59:26.498225Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:26.498232Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:26.498245Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:26.498253Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:26.498256Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T08:59:26.498262Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T08:59:26.498265Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T08:59:26.498273Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T08:59:26.498291Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... 7] 2024-11-21T08:59:34.756972Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] immediate retry [20:678:2477] 2024-11-21T08:59:34.756974Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [20:678:2477] 2024-11-21T08:59:34.756985Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2024-11-21T08:59:34.756996Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:34.757007Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:34.757024Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T08:59:34.757028Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T08:59:34.757031Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T08:59:34.757037Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T08:59:34.757049Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T08:59:34.757058Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T08:59:34.757060Z node 20 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2024-11-21T08:59:34.757064Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [20:639:2448] 2024-11-21T08:59:34.757070Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [20:678:2477] 2024-11-21T08:59:34.757073Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [20:678:2477] 2024-11-21T08:59:34.757079Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [21:682:2142] 2024-11-21T08:59:34.757082Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [21:682:2142] 2024-11-21T08:59:34.757089Z node 21 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046678944 2024-11-21T08:59:34.757091Z node 21 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/tenant1 to schemeshard 72057594046678944 2024-11-21T08:59:34.757100Z node 20 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [20:678:2477] 2024-11-21T08:59:34.757108Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [21:682:2142] 2024-11-21T08:59:34.757110Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [21:682:2142] 2024-11-21T08:59:34.757116Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [21:682:2142] 2024-11-21T08:59:34.757121Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [20:678:2477] 2024-11-21T08:59:34.757124Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [20:678:2477] 2024-11-21T08:59:34.757143Z node 20 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [21:681:2142] EventType# 271122945 2024-11-21T08:59:34.757158Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2024-11-21T08:59:34.757161Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T08:59:34.757197Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T08:59:34.757201Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T08:59:34.757280Z node 21 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046678944: Status: StatusSuccess Path: "/dc-1/tenant1" PathDescription { Self { Name: "tenant1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: false CreateTxId: 101 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 18446744073709551615 PathId: 18446744073709551615 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 2024-11-21T08:59:34.757293Z node 21 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/tenant1 to hive 72057594037927937 (allocated resources: ) 2024-11-21T08:59:34.757340Z node 21 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:34.757343Z node 21 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:34.757351Z node 21 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[21:688:2143] 2024-11-21T08:59:34.757386Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [21:688:2143] 2024-11-21T08:59:34.757389Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [21:688:2143] 2024-11-21T08:59:34.757401Z node 21 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:34.757405Z node 21 :TABLET_RESOLVER DEBUG: SelectForward node 21 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [20:316:2259] 2024-11-21T08:59:34.757433Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [21:688:2143] 2024-11-21T08:59:34.757450Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 20 [21:688:2143] 2024-11-21T08:59:34.757477Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [21:688:2143] 2024-11-21T08:59:34.757480Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [21:688:2143] 2024-11-21T08:59:34.757532Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [21:688:2143] 2024-11-21T08:59:34.757603Z node 20 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([21:688:2143]) [20:695:2480] 2024-11-21T08:59:34.757644Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [21:688:2143] 2024-11-21T08:59:34.757647Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [21:688:2143] 2024-11-21T08:59:34.757649Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [21:688:2143] 2024-11-21T08:59:34.757657Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [21:688:2143] 2024-11-21T08:59:34.757665Z node 21 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594037927937 Status=OK ClientId=[21:688:2143] 2024-11-21T08:59:34.757708Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [21:686:2143] EventType# 268959744 2024-11-21T08:59:34.757732Z node 20 :HIVE DEBUG: HIVE#72057594037927937 Handle TEvLocal::TEvRegisterNode from [21:686:2143] HiveId: 72057594037927937 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2024-11-21T08:59:34.757740Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2024-11-21T08:59:34.757745Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T08:59:34.757748Z node 20 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxRegisterNode(21)::Execute 2024-11-21T08:59:34.757769Z node 20 :HIVE WARN: HIVE#72057594037927937 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:59:34.757775Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{24, redo 152b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2024-11-21T08:59:34.757779Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T08:59:34.757809Z node 20 :HIVE DEBUG: HIVE#72057594037927937 TEvInterconnect::TEvNodeInfo NodeId 21 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" 2024-11-21T08:59:34.757845Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [20:697:2482] 2024-11-21T08:59:34.757848Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [20:697:2482] 2024-11-21T08:59:34.757854Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:34.757859Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [20:316:2259] 2024-11-21T08:59:34.757862Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [20:697:2482] 2024-11-21T08:59:34.757868Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [20:697:2482] 2024-11-21T08:59:34.757872Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [20:697:2482] 2024-11-21T08:59:34.757877Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [20:697:2482] 2024-11-21T08:59:34.757885Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [20:697:2482] 2024-11-21T08:59:34.757895Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [20:697:2482] 2024-11-21T08:59:34.757897Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [20:697:2482] 2024-11-21T08:59:34.757900Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [20:697:2482] 2024-11-21T08:59:34.757902Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [20:697:2482] 2024-11-21T08:59:34.757904Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [20:697:2482] 2024-11-21T08:59:34.757908Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [20:696:2481] EventType# 268697616 2024-11-21T08:59:34.757915Z node 20 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([20:697:2482]) [20:698:2483] >> TxUsage::WriteToTopic_Demo_29 [GOOD] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TxUsage::WriteToTopic_Demo_30 >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> LocalPartition::WithoutPartition [GOOD] >> LocalPartition::WithoutPartitionWithRestart >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestFollowers >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowerPromotion >> TxUsage::WriteToTopic_Demo_9 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> TxUsage::WriteToTopic_Demo_42 >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK >> TTablesWithReboots::DropCopyWithRebootsAtCommit [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> TScaleRecommenderTest::BasicTest [GOOD] >> TStorageBalanceTest::TestScenario1 >> TxUsage::WriteToTopic_Demo_11 [GOOD] >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> TxUsage::WriteToTopic_Demo_12 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::DropCopyWithRebootsAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:52.409081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:52.409097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:52.409101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:52.409103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:52.409111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:52.409113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:52.409119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:52.409174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:52.417104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:52.417122Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:52.418673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:52.418741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:52.418763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:52.420310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:52.420353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:52.420422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.420553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:52.421092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.421322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:52.421330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.421338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:52.421343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:52.421347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:52.421373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:52.422300Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:52.436199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:52.436278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.436331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:52.436376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:52.436382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.437168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.437190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:52.437223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.437231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:52.437234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:52.437237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:52.437551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.437557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:52.437560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:52.437773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.437777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.437781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.437785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.438263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:52.438626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:52.438677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:52.438869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.438897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:52.438905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.438959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:52.438966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.438991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:52.439004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:52.439442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:52.439455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:52.439486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.439492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:52.439578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.439587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:52.439598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:52.439602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.439607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:52.439612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.439616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:52.439620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:52.439631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:52.439637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:52.439641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 6678944, txId: 1005, path id: 1 2024-11-21T08:59:39.234692Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [194:205:2208], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T08:59:39.234750Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:39.234756Z node 194 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:39.234766Z node 194 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:39.234769Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:39.234773Z node 194 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T08:59:39.234880Z node 194 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:39.234887Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:39.234889Z node 194 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:59:39.234892Z node 194 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:59:39.234899Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:39.234984Z node 194 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:39.234991Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:39.234994Z node 194 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:59:39.234996Z node 194 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:59:39.234998Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:59:39.235003Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T08:59:39.235405Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:39.235413Z node 194 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:39.235469Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:59:39.235489Z node 194 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:59:39.235492Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:59:39.235495Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T08:59:39.235498Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:59:39.235501Z node 194 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:59:39.235503Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:59:39.235517Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:59:39.235897Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:59:39.235953Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:59:39.236983Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 432 RawX2: 833223657828 } TabletId: 72075186233409547 State: 4 2024-11-21T08:59:39.237021Z node 194 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:39.237287Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:39.237343Z node 194 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:59:39.237716Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:59:39.237765Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T08:59:39.237845Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:39.237849Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:59:39.237856Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:39.238353Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:59:39.238362Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:59:39.238426Z node 194 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1004 2024-11-21T08:59:39.238455Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:39.238459Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2024-11-21T08:59:39.238467Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:59:39.238469Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:59:39.238514Z node 194 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:39.238525Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:39.238528Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [194:667:2631] 2024-11-21T08:59:39.238534Z node 194 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:59:39.238541Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:59:39.238543Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [194:667:2631] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T08:59:39.238585Z node 194 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:39.238593Z node 194 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:59:39.238599Z node 194 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:59:39.238603Z node 194 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2024-11-21T08:59:39.238650Z node 194 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NewTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:39.238670Z node 194 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NewTable" took 27us result status StatusPathDoesNotExist 2024-11-21T08:59:39.238698Z node 194 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/NewTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/NewTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T08:59:39.238742Z node 194 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:39.238750Z node 194 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 23us result status StatusPathDoesNotExist 2024-11-21T08:59:39.238760Z node 194 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> KqpOlapSparsed::SwitchingMultiColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateTabletChangeToExternal [GOOD] Test command err: 2024-11-21T08:59:30.621840Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:30.622502Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:30.622578Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:30.622731Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:30.622910Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:30.622920Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:30.623046Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T08:59:30.623049Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:30.623082Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:30.623148Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:30.624915Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:30.624932Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:30.625212Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:30.625246Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:30.625274Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:30.625303Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:30.625330Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:30.625356Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:30.625386Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:30.625389Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:30.625398Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T08:59:30.625401Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T08:59:30.625406Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:30.625411Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:30.625514Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:30.628197Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:30.628231Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:30.628238Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:30.628493Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:30.628533Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:30.628566Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:30.628572Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:30.628578Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:30.629032Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:30.629109Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:59:30.629113Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:30.629479Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:30.629494Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:30.629497Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:30.629524Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T08:59:30.629549Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:30.629598Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T08:59:30.629603Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T08:59:30.629718Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:30.629764Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T08:59:30.629769Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T08:59:30.629772Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T08:59:30.629775Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:30.629803Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:30.629808Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:30.629827Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T08:59:30.629829Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T08:59:30.629832Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:30.629868Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:30.629894Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:30.629910Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:59:30.630265Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T08:59:30.630377Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:30.630394Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:30.630400Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T08:59:30.631192Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T08:59:30.631205Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T08:59:30.631222Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:30.631411Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:30.631488Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:30.631507Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T08:59:30.631511Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T08:59:30.631514Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:30.631553Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T08:59:30.631567Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T08:59:30.631570Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T08:59:30.631578Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T08:59:30.631581Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:30.631589Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:30.631605Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:30.631613Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:30.631617Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T08:59:30.631624Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T08:59:30.631627Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T08:59:30.631634Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T08:59:30.631653Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... 4b of static mem, Memory{4194304 dyn 0} 2024-11-21T08:59:39.747492Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute Owner: 72057594037927937 OwnerIdx: 0 TabletType: Dummy TabletBootMode: TABLET_BOOT_MODE_EXTERNAL BindedChannels { StoragePoolName: "def1" } BindedChannels { StoragePoolName: "def2" } BindedChannels { StoragePoolName: "def3" } 2024-11-21T08:59:39.747499Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 State: ReadyToWork 2024-11-21T08:59:39.747504Z node 18 :HIVE DEBUG: HIVE#72057594037927937 Tablet(Dummy.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 18) 2024-11-21T08:59:39.747511Z node 18 :HIVE TRACE: HIVE#72057594037927937 Node(18, (0,1048576,0,0)->(0,0,0,0)) 2024-11-21T08:59:39.747523Z node 18 :HIVE TRACE: HIVE#72057594037927937 UpdateTotalResources: ObjectId (72057594037927937,0): {Memory: 1048576} -> {} 2024-11-21T08:59:39.747528Z node 18 :HIVE TRACE: HIVE#72057594037927937 UpdateTotalResources: Type Dummy: {Memory: 1048576} -> {} 2024-11-21T08:59:39.747534Z node 18 :HIVE DEBUG: HIVE#72057594037927937 Sending TEvStopTablet(Dummy.72075186224037888.Leader.1 gen 1) to node 18 2024-11-21T08:59:39.747565Z node 18 :HIVE DEBUG: HIVE#72057594037927937 CreateTabletFollowers Tablet Dummy.72075186224037888.Leader.1 2024-11-21T08:59:39.747569Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute Existing tablet Dummy.72075186224037888.Leader.1 has been successfully updated 2024-11-21T08:59:39.747571Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T08:59:39.747580Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{10, redo 442b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2024-11-21T08:59:39.747584Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T08:59:39.757833Z node 18 :BS_PROXY_PUT INFO: [a55b41de52eb2a08] bootstrap ActorId# [18:387:2349] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:8:0:0:230:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T08:59:39.757864Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Id# [72057594037927937:2:8:0:0:230:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:59:39.757868Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] restore Id# [72057594037927937:2:8:0:0:230:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T08:59:39.757874Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:8:0:0:230:1] Marker# BPG33 2024-11-21T08:59:39.757877Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:8:0:0:230:1] Marker# BPG32 2024-11-21T08:59:39.757897Z node 18 :BS_PROXY DEBUG: Send to queueActorId# [18:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:8:0:0:230:1] FDS# 230 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T08:59:39.758215Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] received {EvVPutResult Status# OK ID# [72057594037927937:2:8:0:0:230:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 22 } Cost# 81811 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 23 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T08:59:39.758229Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Result# TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T08:59:39.758234Z node 18 :BS_PROXY_PUT INFO: [a55b41de52eb2a08] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T08:59:39.758256Z node 18 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T08:59:39.758272Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2024-11-21T08:59:39.758285Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Complete (72057594037927937,0) TabletId: 72075186224037888 SideEffects: {Notifications: 0x10080003 [18:49:2090],0x10040207 [18:383:2345],0x10040201 [18:383:2345]} 2024-11-21T08:59:39.758312Z node 18 :LOCAL DEBUG: TLocalNodeRegistrar: Handle TEvStopTablet TabletId:(72075186224037888,0) Generation:1 2024-11-21T08:59:39.758322Z node 18 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [18:49:2090], reason = ReasonStop Marker# TSYS29 2024-11-21T08:59:39.758326Z node 18 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2024-11-21T08:59:39.758360Z node 18 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2024-11-21T08:59:39.758362Z node 18 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2024-11-21T08:59:39.758385Z node 18 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2024-11-21T08:59:39.758472Z node 18 :LOCAL DEBUG: TLocalNodeRegistrar: Handle TEvTablet::TEvTabletDead tabletId:72075186224037888 generation:1 reason:33 2024-11-21T08:59:39.758484Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [18:380:2343] 2024-11-21T08:59:39.758487Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [18:380:2343] 2024-11-21T08:59:39.758504Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [18:50:2090] 2024-11-21T08:59:39.758507Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [18:50:2090] 2024-11-21T08:59:39.758512Z node 18 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [18:49:2090] EventType# 268960257 2024-11-21T08:59:39.758530Z node 18 :HIVE DEBUG: HIVE#72057594037927937 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2024-11-21T08:59:39.758539Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2024-11-21T08:59:39.758543Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T08:59:39.758552Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Execute for tablet Dummy.72075186224037888.Leader.1 status 5 reason ReasonPill generation 1 follower 0 from local [18:49:2090] 2024-11-21T08:59:39.758558Z node 18 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue (0) 2024-11-21T08:59:39.758560Z node 18 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - sending 2024-11-21T08:59:39.758568Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T08:59:39.758572Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T08:59:39.758577Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10080004 [18:49:2090]} 2024-11-21T08:59:39.758585Z node 18 :LOCAL DEBUG: TLocalNodeRegistrar: Handle TEvDeadTabletAck TabletId:(72075186224037888,0) 2024-11-21T08:59:39.758606Z node 18 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - executing 2024-11-21T08:59:39.758611Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T08:59:39.758613Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T08:59:39.758616Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2024-11-21T08:59:39.758619Z node 18 :HIVE DEBUG: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2024-11-21T08:59:39.758623Z node 18 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T08:59:39.758626Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T08:59:39.758628Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T08:59:39.758631Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Complete 2024-11-21T08:59:39.758668Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [18:389:2351] 2024-11-21T08:59:39.758671Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [18:389:2351] 2024-11-21T08:59:39.758682Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:39.758688Z node 18 :TABLET_RESOLVER DEBUG: SelectForward node 18 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [18:312:2292] 2024-11-21T08:59:39.758694Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [18:389:2351] 2024-11-21T08:59:39.758697Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [18:389:2351] 2024-11-21T08:59:39.758705Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [18:389:2351] 2024-11-21T08:59:39.758708Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [18:389:2351] 2024-11-21T08:59:39.758713Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2024-11-21T08:59:39.758767Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:39.758781Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T08:59:39.758787Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T08:59:39.758791Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T08:59:39.758798Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T08:59:39.758804Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T08:59:39.758813Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2024-11-21T08:59:39.758825Z node 18 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 >> TxUsage::WriteToTopic_Demo_40 [GOOD] >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::SwitchingMultiColumn [GOOD] Test command err: Trying to start YDB, gRPC: 2591, MsgBus: 24344 2024-11-21T08:57:30.702966Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654092481211425:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:30.703156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00486b/r3tmp/tmp4F7bw5/pdisk_1.dat 2024-11-21T08:57:30.751502Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2591, node 1 2024-11-21T08:57:30.756541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:30.756551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:30.756552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:30.756580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24344 TClient is connected to server localhost:24344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:30.799144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:30.803971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:30.803993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:30.805112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:30.813416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:30.822591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.822654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.822687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:30.822703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:30.822717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:30.822731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:30.822749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:30.822764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:30.822781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:30.822796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.822810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:30.822828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:30.823164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:30.823174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:30.823181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:30.823188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:30.823197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:30.823203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:30.823208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:30.823214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:30.823223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:30.823229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:30.823232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:30.823236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:30.823277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:30.823284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:30.823295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:30.823301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:30.823309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:30.823316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:30.823325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:30.823331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:30.823338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:30.823341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:30.824975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:30.824990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:30.825006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];tablet_id=720751862240 ... tion;reason=disabled; 2024-11-21T08:59:32.413134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.413344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.413368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.428447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.428495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.477690Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179570716, txId: 18446744073709551615] shutting down 2024-11-21T08:59:32.513446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.513446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.513496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.513502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.528833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.528900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; Timing: Executing query took 2 seconds 2024-11-21T08:59:32.613792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.613792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.613836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.613842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.629145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.629206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.714163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.714208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.714215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.714254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.729566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.729649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.814509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.814516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.814554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.814562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.829868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.829932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.914853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.914853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.914898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.914899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.930207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; 2024-11-21T08:59:32.930255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:785;event=skip_compaction;reason=disabled; Timing: checkTable took 3 seconds WAIT_COMPACTION: 9 2024-11-21T08:59:33.015588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654092481212116:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:59:33.015679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654092481212082:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:59:33.400115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654092481212083:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T08:59:33.452075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=ecdf6058-a7e611ef-9be0394e-fdc2052c;fline=with_appended.cpp:80;portions=9,;task_id=ecdf6058-a7e611ef-9be0394e-fdc2052c; 2024-11-21T08:59:33.461452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=ecdf615c-a7e611ef-ba3f13f0-e7135dc4;fline=with_appended.cpp:80;portions=9,;task_id=ecdf615c-a7e611ef-ba3f13f0-e7135dc4; 2024-11-21T08:59:33.796346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=ed1a0e9c-a7e611ef-9166eead-f5678529;fline=with_appended.cpp:80;portions=9,;task_id=ed1a0e9c-a7e611ef-9166eead-f5678529; WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 Timing: wait took 6 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` RESULT: 2024-11-21T08:59:39.115709Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179579000, txId: 18446744073709551615] shutting down count: 14000 2024-11-21T08:59:39.476069Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179579088, txId: 18446744073709551615] shutting down Timing: Executing query took 0 seconds Timing: checkTable took 1 seconds Timing: wait took 0 seconds Timing: Fill took 18 seconds >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit [GOOD] >> TxUsage::WriteToTopic_Demo_41 >> TxUsage::WriteToTopic_Demo_20_RestartNo >> TTablesWithReboots::CopyWithRebootsAtCommit [GOOD] >> TxUsage::WriteToTopic_Demo_30 [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> TxUsage::WriteToTopic_Demo_31 >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyWithRebootsAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:49.465096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:49.465113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.465116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:49.465119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:49.465129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:49.465131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:49.465136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:49.465192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:49.472466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:49.472482Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.474356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:49.474423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:49.474450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:49.476578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:49.476643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:49.476743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.476904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.477554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.477853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.477862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.477874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:49.477879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.477885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:49.477925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:49.479201Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:49.490403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:49.490464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.490512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:49.490552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:49.490557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.491085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.491102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:49.491133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.491139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:49.491142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:49.491145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:49.491390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.491404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:49.491407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:49.491601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.491605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.491609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.491613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.491974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:49.492180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:49.492235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:49.492371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:49.492387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:49.492392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.492440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:49.492444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:49.492466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:49.492476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:49.492904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:49.492912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:49.492944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:49.492947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:49.493004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:49.493009Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:49.493017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:49.493020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.493023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:49.493026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:49.493029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:49.493032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:49.493039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:49.493043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:49.493045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 5186233409549, at schemeshard: 72057594046678944 2024-11-21T08:59:41.049984Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:41.049987Z node 212 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T08:59:41.050347Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:41.050364Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:41.050371Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:41.050377Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:41.050422Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:41.050428Z node 212 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2024-11-21T08:59:41.050433Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:59:41.050436Z node 212 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T08:59:41.050442Z node 212 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T08:59:41.050444Z node 212 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T08:59:41.050992Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T08:59:41.051003Z node 212 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T08:59:41.051012Z node 212 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T08:59:41.051015Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:59:41.051018Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T08:59:41.051022Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T08:59:41.051025Z node 212 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T08:59:41.051027Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T08:59:41.051050Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:59:41.051054Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1003 2024-11-21T08:59:41.051418Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T08:59:41.051424Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T08:59:41.051459Z node 212 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T08:59:41.051470Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T08:59:41.051473Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [212:631:2572] TestWaitNotification: OK eventTxId 1003 2024-11-21T08:59:41.051516Z node 212 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:41.051537Z node 212 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 28us result status StatusSuccess 2024-11-21T08:59:41.051611Z node 212 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "NewTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:41.051648Z node 212 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NewTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:41.051668Z node 212 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NewTable" took 21us result status StatusSuccess 2024-11-21T08:59:41.051767Z node 212 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NewTable" PathDescription { Self { Name: "NewTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "NewTable" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\003\000\004\000\000\000\377\377\377\177\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TTablesWithReboots::ParallelCreateDrop [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> LocalPartition::DescribeBadPartition [GOOD] >> LocalPartition::DescribeHang >> BasicUsage::ReadSessionCorrectClose [GOOD] >> BasicUsage::ConflictingWrites >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:52.408673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:52.408690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:52.408693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:52.408696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:52.408706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:52.408709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:52.408715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:52.408791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:52.415966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:52.415979Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:52.417433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:52.417506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:52.417539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:52.419229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:52.419273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:52.419339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.419478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:52.419942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.420138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:52.420144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.420152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:52.420157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:52.420161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:52.420190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:52.421166Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:52.431382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:52.431441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.431486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:52.431523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:52.431528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.432069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.432083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:52.432109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.432115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:52.432118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:52.432121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:52.432369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.432375Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:52.432377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:52.432578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.432582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.432586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.432590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.432983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:52.433238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:52.433272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:52.433395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:52.433412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:52.433416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.433446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:52.433450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:52.433468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:52.433476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:52.433751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:52.433757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:52.433785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:52.433788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:52.433840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:52.433844Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:52.433851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:52.433854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.433857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:52.433860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:52.433863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:52.433866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:52.433871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:52.433875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:52.433878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T08:59:42.609407Z node 208 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:42.609410Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [208:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T08:59:42.609426Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [208:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T08:59:42.609467Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:42.609472Z node 208 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:42.609481Z node 208 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:42.609484Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T08:59:42.609486Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:42.609489Z node 208 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T08:59:42.609589Z node 208 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:42.609596Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:42.609599Z node 208 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:42.609602Z node 208 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T08:59:42.609605Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:42.609676Z node 208 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:42.609682Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T08:59:42.609686Z node 208 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T08:59:42.609689Z node 208 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T08:59:42.609691Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T08:59:42.609696Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T08:59:42.609969Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T08:59:42.609976Z node 208 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:42.610017Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T08:59:42.610032Z node 208 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T08:59:42.610035Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:42.610038Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T08:59:42.610041Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T08:59:42.610044Z node 208 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T08:59:42.610046Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T08:59:42.610059Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T08:59:42.610321Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:42.610341Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T08:59:42.611171Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 349 RawX2: 893353199902 } TabletId: 72075186233409546 State: 4 2024-11-21T08:59:42.611184Z node 208 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:42.611219Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 356 RawX2: 893353199905 } TabletId: 72075186233409547 State: 4 2024-11-21T08:59:42.611222Z node 208 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:42.611585Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:42.611619Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:42.611651Z node 208 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T08:59:42.611687Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:42.611718Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2024-11-21T08:59:42.612030Z node 208 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:59:42.612087Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:59:42.612110Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2024-11-21T08:59:42.612352Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:42.612359Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T08:59:42.612365Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:42.612700Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T08:59:42.612706Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T08:59:42.612864Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:59:42.612869Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:59:42.612890Z node 208 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T08:59:42.612919Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T08:59:42.612923Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T08:59:42.612958Z node 208 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T08:59:42.612969Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T08:59:42.612972Z node 208 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [208:543:2507] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T08:59:42.613008Z node 208 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:42.613016Z node 208 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:59:42.613020Z node 208 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:59:42.613025Z node 208 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2024-11-21T08:59:42.613069Z node 208 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DropMe" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T08:59:42.613089Z node 208 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DropMe" took 27us result status StatusPathDoesNotExist 2024-11-21T08:59:42.613109Z node 208 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DropMe\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DropMe" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::CDC_Write |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TxUsage::WriteToTopic_Demo_42 [GOOD] >> TxUsage::WriteToTopic_Demo_43 >> TxUsage::WriteToTopic_Demo_12 [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] >> test_db_counters.py::TestKqpCounters::test_case >> TxUsage::WriteToTopic_Demo_13 >> TxUsage::WriteToTopic_Demo_41 [GOOD] >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--true] >> TxUsage::WriteToTopic_Demo_31 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_41 [GOOD] Test command err: 2024-11-21T08:58:39.369792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654385004063973:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.370304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.498203Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e4b/r3tmp/tmppA8Abu/pdisk_1.dat 2024-11-21T08:58:39.720801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.720833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:39.738245Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:39.765958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11151, node 1 2024-11-21T08:58:39.816337Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:39.816360Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:40.227681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e4b/r3tmp/yandexxgcWFW.tmp 2024-11-21T08:58:40.227700Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e4b/r3tmp/yandexxgcWFW.tmp 2024-11-21T08:58:40.227752Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e4b/r3tmp/yandexxgcWFW.tmp 2024-11-21T08:58:40.227796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.396499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654389299031868:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.396537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.400434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654389299031880:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.471568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.528859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654389299031882:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.588743Z INFO: TTestServer started on Port 4270 GrpcPort 11151 TClient is connected to server localhost:4270 PQClient connected to localhost:11151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:40.837904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.848180Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.851139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T08:58:40.868056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:41.041999Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:41.389633Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654389299031957:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.396585Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGExZTFjZTQtYWI4ZDViNjktMWZmMWMyOTQtNWE3MGNmMGQ=, ActorId: [1:7439654389299031857:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw8atazmsxxg99bjfja, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.401644Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.565760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.571448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.631556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654397888967020:2626] 2024-11-21T08:58:44.364652Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654385004063973:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.364688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.869865Z :WriteToTopic_Demo_4 INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900217Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960922Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.960984Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.979461Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654419363803848:2814] connected; active server actors: 1 2024-11-21T08:58:47.979552Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987516Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987641Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988082Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988101Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988105Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.988200Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.996446Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996590Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.996708Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005206Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005228Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005299Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.005305Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014041Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654419363803847:2813], now have 1 active actors on pipe 2024-11-21T08:58:48.014064Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014069Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654419363803886:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.016616Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439654385004064353 RawX2: 4294969469 } TxId: 281474976710674 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ... 4037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 83, partNo: 0, Offset: 82 is stored on disk 2024-11-21T08:59:46.472798Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T08:59:46.472800Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 83, partNo: 1, Offset: 82 is stored on disk 2024-11-21T08:59:46.472806Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T08:59:46.472808Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 84, partNo: 0, Offset: 83 is stored on disk 2024-11-21T08:59:46.472810Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T08:59:46.472811Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 84, partNo: 1, Offset: 83 is stored on disk 2024-11-21T08:59:46.473246Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:59:46.473257Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:59:46.473261Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:59:46.473265Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:59:46.473299Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 163 requestId: cookie: 82 2024-11-21T08:59:46.473309Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 166 requestId: cookie: 83 2024-11-21T08:59:46.473317Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 167 requestId: cookie: 84 2024-11-21T08:59:46.473320Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T08:59:46.473326Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T08:59:46.473335Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Write in transaction. Partition: 0, WriteId: { NodeId: 9 KeyId: 281474976715675 }, NeedSupportivePartition: 0 2024-11-21T08:59:46.473391Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client PART message topic: topic_A partition: 0 SourceId: '\0test-message_group_id' SeqNo: 85 partNo : 0 messageNo: 170 size: 511944 2024-11-21T08:59:46.473444Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client PART message topic: topic_A partition: 0 SourceId: '\0test-message_group_id' SeqNo: 85 partNo : 1 messageNo: 170 size: 488129 2024-11-21T08:59:46.473453Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message topic: topic_A partition: 0 SourceId: '\0test-message_group_id' SeqNo: 85 partNo : 1 messageNo: 170 size 488129 offset: -1 2024-11-21T08:59:46.473465Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T08:59:46.473466Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T08:59:46.473470Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Write in transaction. Partition: 0, WriteId: { NodeId: 9 KeyId: 281474976715675 }, NeedSupportivePartition: 0 2024-11-21T08:59:46.473494Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 85 partNo 0 2024-11-21T08:59:46.473502Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 85 partNo 1 2024-11-21T08:59:46.473539Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client PART message topic: topic_A partition: 0 SourceId: '\0test-message_group_id' SeqNo: 86 partNo : 0 messageNo: 171 size: 511944 2024-11-21T08:59:46.473610Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client PART message topic: topic_A partition: 0 SourceId: '\0test-message_group_id' SeqNo: 86 partNo : 1 messageNo: 171 size: 488129 2024-11-21T08:59:46.473617Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message topic: topic_A partition: 0 SourceId: '\0test-message_group_id' SeqNo: 86 partNo : 1 messageNo: 171 size 488129 offset: -1 2024-11-21T08:59:46.473622Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob complete sourceId '\0test-message_group_id' seqNo 85 partNo 1 FormedBlobsCount 0 NewHead: Offset 84 PartNo 0 PackedSize 1000253 count 1 nextOffset 85 batches 2 2024-11-21T08:59:46.473627Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:59:46.473637Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439654675084106873:2475] destroyed 2024-11-21T08:59:46.473641Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:59:46.473646Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439654675084106870:2475] destroyed 2024-11-21T08:59:46.473659Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:59:46.473741Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {9, 281474976715675}, 100000} compactOffset 84,1 HeadOffset 80 endOffset 84 curOffset 85 D0000100000_00000000000000000084_00000_0000000001_00001| size 1000243 WTime 1732179586473 2024-11-21T08:59:46.473876Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:59:46.473877Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::DropOwner. 2024-11-21T08:59:46.474302Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1000117 2024-11-21T08:59:46.474311Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T08:59:46.474319Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 85, partNo: 0, Offset: 84 is stored on disk 2024-11-21T08:59:46.474322Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T08:59:46.474327Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 85, partNo: 1, Offset: 84 is stored on disk 2024-11-21T08:59:46.474328Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 170 requestId: cookie: 85 2024-11-21T08:59:46.474447Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 86 partNo 0 2024-11-21T08:59:46.474456Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 86 partNo 1 2024-11-21T08:59:46.474550Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob complete sourceId '\0test-message_group_id' seqNo 86 partNo 1 FormedBlobsCount 0 NewHead: Offset 85 PartNo 0 PackedSize 1000253 count 1 nextOffset 86 batches 2 2024-11-21T08:59:46.474671Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {9, 281474976715675}, 100000} compactOffset 85,1 HeadOffset 80 endOffset 85 curOffset 86 D0000100000_00000000000000000085_00000_0000000001_00001| size 1000243 WTime 1732179586474 2024-11-21T08:59:46.474805Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:59:46.474826Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976715675 LockNode: 9 Status: STATUS_NOT_FOUND 2024-11-21T08:59:46.474834Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete partitions for WriteId {9, 281474976715675} 2024-11-21T08:59:46.474836Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {9, 281474976715675}, 100000} 2024-11-21T08:59:46.474850Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2024-11-21T08:59:46.475044Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1000117 2024-11-21T08:59:46.475052Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T08:59:46.475057Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 86, partNo: 0, Offset: 85 is stored on disk 2024-11-21T08:59:46.475060Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T08:59:46.475063Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 86, partNo: 1, Offset: 85 is stored on disk 2024-11-21T08:59:46.475065Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 171 requestId: cookie: 86 >> TxUsage::WriteToTopic_Demo_32 >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestExternalBoot |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked >> LocalPartition::DescribeHang [GOOD] >> LocalPartition::DiscoveryHang >> TxUsage::WriteToTopic_Demo_20_RestartNo [GOOD] >> THiveTest::TestExternalBootWhenLocked [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2024-11-21T08:59:20.488830Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:20.489834Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:20.489902Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:20.490020Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [3:65:2071] ControllerId# 72057594037932033 2024-11-21T08:59:20.490022Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:20.490050Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:20.490114Z node 3 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:20.490206Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:20.490210Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:20.490404Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:71:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.490422Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:72:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.490437Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:73:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.490463Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:74:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.490477Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:75:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.490491Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:76:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.490520Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:77:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.490528Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:20.490534Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:65:2071] 2024-11-21T08:59:20.490537Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:65:2071] 2024-11-21T08:59:20.490541Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:20.490544Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:20.490598Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:20.490636Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:20.490964Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:20.490993Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:20.491090Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:20.491244Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:20.491252Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:20.491340Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:87:2075] ControllerId# 72057594037932033 2024-11-21T08:59:20.491342Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:20.491351Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:20.491377Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:20.491976Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:61:2065] 2024-11-21T08:59:20.491981Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:61:2065] 2024-11-21T08:59:20.491987Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:20.494157Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:20.494166Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:20.494406Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:94:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.494435Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:95:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.494460Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:96:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.494488Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:97:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.494517Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:98:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.494549Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:99:2085] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.494576Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:100:2086] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.494579Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:20.494586Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:87:2075] 2024-11-21T08:59:20.494589Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:87:2075] 2024-11-21T08:59:20.494593Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:20.494598Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:20.494701Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:20.494712Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:20.495560Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:20.495580Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:20.495671Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:108:2072] ControllerId# 72057594037932033 2024-11-21T08:59:20.495675Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:20.495682Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:20.495710Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:20.495802Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:20.495823Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:20.495826Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:20.496013Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:114:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.496029Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:115:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.496045Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:116:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.496063Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:117:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.496078Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:118:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.496099Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:119:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.496113Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:120:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.496116Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:20.496121Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:108:2072] 2024-11-21T08:59:20.496123Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:108:2072] 2024-11-21T08:59:20.496127Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:20.496130Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:20.496197Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:20.496247Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:65:2071] 2024-11-21T08:59:20.496257Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:20.496261Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:20.496265Z node 3 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:20.496291Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:20.496299Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:61:2065] 2024-11-21T08:59:20.499091Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:87:2075] 2024-11-21T08:59:20.499109Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:20.499113Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:20.499403Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:20.499432Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:20.499460Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:108:2072] 2024-11-21T08:59:20.499465Z node 2 :BS_NO ... Kikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T08:59:48.456883Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Execute for tablet Dummy.72075186224037888.Leader.1 status 5 reason ReasonPill generation 1 follower 0 from local [42:94:2091] 2024-11-21T08:59:48.456888Z node 42 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue (0) 2024-11-21T08:59:48.456891Z node 42 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - sending 2024-11-21T08:59:48.456899Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T08:59:48.456904Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T08:59:48.456908Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10080004 [42:94:2091]} 2024-11-21T08:59:48.456916Z node 42 :LOCAL DEBUG: TLocalNodeRegistrar: Handle TEvDeadTabletAck TabletId:(72075186224037888,0) 2024-11-21T08:59:48.456926Z node 42 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - executing 2024-11-21T08:59:48.456929Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T08:59:48.456932Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T08:59:48.456934Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2024-11-21T08:59:48.456937Z node 42 :HIVE DEBUG: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2024-11-21T08:59:48.456941Z node 42 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T08:59:48.456944Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T08:59:48.456946Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T08:59:48.456948Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Complete 2024-11-21T08:59:48.456993Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [42:446:2353] 2024-11-21T08:59:48.456996Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [42:446:2353] 2024-11-21T08:59:48.457008Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:48.457014Z node 42 :TABLET_RESOLVER DEBUG: SelectForward node 42 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [42:363:2294] 2024-11-21T08:59:48.457021Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [42:446:2353] 2024-11-21T08:59:48.457025Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [42:446:2353] 2024-11-21T08:59:48.457032Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [42:446:2353] 2024-11-21T08:59:48.457037Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [42:446:2353] 2024-11-21T08:59:48.457041Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2024-11-21T08:59:48.457057Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:48.457070Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T08:59:48.457075Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T08:59:48.457079Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T08:59:48.457084Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T08:59:48.457090Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T08:59:48.457101Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T08:59:48.457114Z node 42 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T08:59:48.457144Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [43:448:2091] 2024-11-21T08:59:48.457148Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [43:448:2091] 2024-11-21T08:59:48.457155Z node 43 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:48.457160Z node 43 :TABLET_RESOLVER DEBUG: SelectForward node 43 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [42:314:2258] 2024-11-21T08:59:48.457164Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [43:448:2091] 2024-11-21T08:59:48.457169Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [43:448:2091] 2024-11-21T08:59:48.457173Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 42 [43:448:2091] 2024-11-21T08:59:48.457182Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [43:448:2091] 2024-11-21T08:59:48.457192Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [43:448:2091] 2024-11-21T08:59:48.457216Z node 42 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [43:448:2091] 2024-11-21T08:59:48.457238Z node 42 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([43:448:2091]) [42:449:2354] 2024-11-21T08:59:48.457248Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [43:448:2091] 2024-11-21T08:59:48.457251Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [43:448:2091] 2024-11-21T08:59:48.457253Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [43:448:2091] 2024-11-21T08:59:48.457258Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [43:448:2091] 2024-11-21T08:59:48.457261Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [43:448:2091] 2024-11-21T08:59:48.457263Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [43:448:2091] 2024-11-21T08:59:48.457293Z node 42 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [43:436:2086] EventType# 268697624 2024-11-21T08:59:48.457310Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2024-11-21T08:59:48.457315Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T08:59:48.457321Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2024-11-21T08:59:48.457356Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxStartTablet::Execute, Sending TEvBootTablet(Dummy.72075186224037888.Leader.2) to node 43 storage {Version# 1 TabletID# 72075186224037888 TabletType# Dummy Channels# {0:{Channel# 0 Type# none StoragePool# def1 History# {0:{FromGeneration# 0 GroupID# 2147483648 Timestamp# 1970-01-01T00:00:00.058024Z}}, 1:{Channel# 1 Type# none StoragePool# def2 History# {0:{FromGeneration# 0 GroupID# 2147483649 Timestamp# 1970-01-01T00:00:00.058024Z}}, 2:{Channel# 2 Type# none StoragePool# def3 History# {0:{FromGeneration# 0 GroupID# 2147483650 Timestamp# 1970-01-01T00:00:00.058024Z}}} Tenant: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:48.457371Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2024-11-21T08:59:48.457376Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T08:59:48.467620Z node 42 :BS_PROXY_PUT INFO: [d70ef3c23a1a2346] bootstrap ActorId# [42:451:2356] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T08:59:48.467657Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T08:59:48.467662Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T08:59:48.467669Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2024-11-21T08:59:48.467672Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2024-11-21T08:59:48.467693Z node 42 :BS_PROXY DEBUG: Send to queueActorId# [42:53:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T08:59:48.467992Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T08:59:48.468011Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T08:59:48.468018Z node 42 :BS_PROXY_PUT INFO: [d70ef3c23a1a2346] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T08:59:48.468046Z node 42 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T08:59:48.468070Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2024-11-21T08:59:48.468082Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [43:436:2086]} 2024-11-21T08:59:48.468119Z node 42 :HIVE TRACE: HIVE#72057594037927937 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 43 Cookie 0 >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--true] >> TTablesWithReboots::CopyTableAndDropWithReboots [GOOD] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionDeadNode >> TxUsage::WriteToTopic_Demo_13 [GOOD] >> TxUsage::WriteToTopic_Demo_14 >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:56.269813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:56.269828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:56.269831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:56.269834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:56.269843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:56.269845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:56.269851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:56.269898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:56.276647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:56.276658Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:56.277973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:56.278101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:56.278124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:56.279779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:56.279823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:56.279878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:56.279983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:56.280443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:56.280599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:56.280605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:56.280611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:56.280615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:56.280621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:56.280642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:56.281403Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:56.291331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:56.291382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:56.291413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:56.291449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:56.291454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:56.291868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:56.291881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:56.291903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:56.291908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:56.291911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:56.291914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:56.292134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:56.292140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:56.292142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:56.292338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:56.292343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:56.292346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:56.292350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:56.292708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:56.292954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:56.292982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:56.293086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:56.293101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:56.293105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:56.293134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:56.293138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:56.293153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:56.293161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:56.293421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:56.293426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:56.293447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:56.293450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:56.293494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:56.293499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:56.293506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:56.293508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:56.293511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:56.293514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:56.293517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:56.293519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:56.293537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:56.293540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:56.293543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 7594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 970662611295 } Origin: 72075186233409547 State: 5 TxId: 1005 Step: 0 Generation: 2 2024-11-21T08:59:50.995513Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409547, partId: 0 2024-11-21T08:59:50.995520Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 970662611295 } Origin: 72075186233409547 State: 5 TxId: 1005 Step: 0 Generation: 2 2024-11-21T08:59:50.995523Z node 226 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1005:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T08:59:50.995773Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:50.995780Z node 226 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1005:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T08:59:50.995784Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1005:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T08:59:50.995787Z node 226 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1005, done: 0, blocked: 1 2024-11-21T08:59:50.995793Z node 226 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1005:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1005 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T08:59:50.995807Z node 226 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 137 -> 129 2024-11-21T08:59:50.995818Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:50.995823Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:59:50.996069Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:50.996087Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:50.996247Z node 226 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:50.996254Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:50.996274Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T08:59:50.996290Z node 226 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:50.996293Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [226:202:2205], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T08:59:50.996296Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [226:202:2205], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T08:59:50.996338Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:50.996342Z node 226 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T08:59:50.996350Z node 226 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:50.996352Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T08:59:50.996355Z node 226 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T08:59:50.996453Z node 226 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:50.996462Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:50.996466Z node 226 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:59:50.996470Z node 226 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T08:59:50.996474Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T08:59:50.996710Z node 226 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:50.996719Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T08:59:50.996723Z node 226 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T08:59:50.996727Z node 226 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T08:59:50.996731Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T08:59:50.996741Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T08:59:50.997054Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T08:59:50.997062Z node 226 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:50.997114Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T08:59:50.997134Z node 226 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T08:59:50.997138Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:59:50.997143Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T08:59:50.997147Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T08:59:50.997151Z node 226 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T08:59:50.997155Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T08:59:50.997170Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T08:59:50.997347Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:59:50.997507Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T08:59:50.998248Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 970662611295 } TabletId: 72075186233409547 State: 4 2024-11-21T08:59:50.998260Z node 226 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T08:59:50.998480Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T08:59:50.998533Z node 226 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T08:59:50.998817Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T08:59:50.998851Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T08:59:50.998970Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T08:59:50.998975Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T08:59:50.998984Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:50.999291Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T08:59:50.999299Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T08:59:50.999435Z node 226 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T08:59:50.999471Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T08:59:50.999477Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T08:59:50.999526Z node 226 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T08:59:50.999537Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T08:59:50.999542Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [226:665:2629] TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T08:59:50.999581Z node 226 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T08:59:50.999589Z node 226 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T08:59:50.999594Z node 226 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T08:59:50.999599Z node 226 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--false] >> TxUsage::WriteToTopic_Demo_32 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:32.945554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:32.945575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.945578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:32.945582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:32.945586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:32.945588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:32.945595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:32.945664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:32.952760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:32.952777Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:32.954413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:32.954495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:32.954525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:32.956609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:32.956674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:32.956741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.956884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:32.957424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.957695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.957704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.957714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:32.957719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.957724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:32.957758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:32.958998Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:32.975506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:32.975586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.975650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:32.975695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:32.975704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.976528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.976554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:32.976615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.976626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:32.976630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:32.976636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:32.976998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.977008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:32.977013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:32.977299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.977307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.977312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.977319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.978007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:32.978393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:32.978440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:32.978639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:32.978662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:32.978668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.978717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:32.978724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:32.978755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:32.978767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:32.979122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:32.979132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:32.979174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:32.979181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:32.979261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:32.979267Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:32.979277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:32.979281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.979287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:32.979292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:32.979297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:32.979301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:32.979311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:32.979317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:32.979322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... sult: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:52.040617Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T08:59:52.040683Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 75us result status StatusSuccess 2024-11-21T08:59:52.040831Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_33 >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> TxUsage::WriteToTopic_Demo_43 [GOOD] >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--false] >> TxUsage::WriteToTopic_Demo_44 >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--false] >> LocalPartition::DiscoveryHang [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit [GOOD] >> TxUsage::WriteToTopic_Demo_24 >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--true] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2024-11-21T08:59:31.885943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654612151438709:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:31.886098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003801/r3tmp/tmp9pHGc2/pdisk_1.dat 2024-11-21T08:59:31.908915Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created TServer::EnableGrpc on GrpcPort 17794, node 1 2024-11-21T08:59:31.937206Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:59:31.937503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003801/r3tmp/yandexk7kCw5.tmp 2024-11-21T08:59:31.937514Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003801/r3tmp/yandexk7kCw5.tmp 2024-11-21T08:59:31.937571Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003801/r3tmp/yandexk7kCw5.tmp 2024-11-21T08:59:31.937605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:59:31.939844Z INFO: TTestServer started on Port 29337 GrpcPort 17794 TClient is connected to server localhost:29337 PQClient connected to localhost:17794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:59:31.986065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:59:31.986095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:59:31.987139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:59:32.009509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:59:32.018695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:59:32.110956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654616446406751:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:32.110980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654616446406755:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:32.110984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:32.111522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:59:32.112827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654616446406765:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:59:32.131081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:59:32.185218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:59:32.198199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:59:32.207843Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654616446407049:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:59:32.208045Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDg3OWU0ZDQtNWYxMjU0NWUtN2NkMzNhMzctMTAxZGY1N2Q=, ActorId: [1:7439654616446406748:2304], ActorState: ExecuteState, TraceId: 01jd6z5dcdfym25d7q23j94wnr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:59:32.208538Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654616446407114:2596] 2024-11-21T08:59:36.886285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654612151438709:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:36.886310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:59:37.340825Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:59:37.343719Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:59:37.344048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439654637921243895:2765], Recipient [1:7439654612151439073:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:37.344059Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:37.344061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:59:37.344066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439654637921243891:2762], Recipient [1:7439654612151439073:2177]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T08:59:37.344068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:59:37.349083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:59:37.349172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:59:37.349234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:59:37.349250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:59:37.349260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T08:59:37.349268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T08:59:37.349275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T08:59:37.349298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T08:59:37.349406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T08:59:37.349425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T08:59:37.349454Z node 1 :FLAT_TX_SCHEM ... # 271187975, Sender [4:7439654711912176552:2438], Recipient [4:7439654711912176681:2456]: NKikimrPQ.TStatus GetStatForAllConsumers: true 2024-11-21T08:59:56.496841Z node 4 :PERSQUEUE TRACE: HandleHook, processing event TEvPersQueue::TEvStatus 2024-11-21T08:59:56.496842Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPersQueue::TEvStatus 2024-11-21T08:59:56.496848Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 271188536, Sender [4:7439654711912176552:2438], Recipient [4:7439654711912176681:2456]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T08:59:56.496854Z node 4 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T08:59:56.496870Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [4:7439654711912176685:2457], Partition 1, Sender [4:7439654711912176685:2457], Recipient [4:7439654711912176759:2464], Cookie: 0 2024-11-21T08:59:56.496877Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [4:7439654711912176685:2457], Recipient [4:7439654711912176759:2464]: NKikimr::TEvPQ::TEvPartitionStatus 2024-11-21T08:59:56.496878Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2024-11-21T08:59:56.496903Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2024-11-21T08:59:56.496914Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [4:7439654711912176685:2457], Partition 1, Sender [4:7439654711912176685:2457], Recipient [4:7439654711912176759:2464], Cookie: 0 2024-11-21T08:59:56.496920Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [4:7439654711912176685:2457], Recipient [4:7439654711912176759:2464]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T08:59:56.496921Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T08:59:56.496924Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [4:7439654711912176681:2456], Partition 2, Sender [4:7439654711912176681:2456], Recipient [4:7439654711912176758:2463], Cookie: 0 2024-11-21T08:59:56.496926Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [4:7439654711912176681:2456], Recipient [4:7439654711912176758:2463]: NKikimr::TEvPQ::TEvPartitionStatus 2024-11-21T08:59:56.496926Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2024-11-21T08:59:56.496941Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2024-11-21T08:59:56.496952Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [4:7439654711912176681:2456], Partition 2, Sender [4:7439654711912176681:2456], Recipient [4:7439654711912176758:2463], Cookie: 0 2024-11-21T08:59:56.496960Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [4:7439654711912176681:2456], Recipient [4:7439654711912176758:2463]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T08:59:56.496961Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T08:59:56.497014Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [4:7439654711912176552:2438], Recipient [4:7439654686142371627:2136]: NKikimrSchemeOp.TDescribePath PathId: 14 SchemeshardId: 72057594046644480 2024-11-21T08:59:56.497023Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T08:59:56.497059Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][origin] Send TEvPeriodicTopicStats PathId: 14 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2024-11-21T08:59:56.497112Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][origin] ProcessPendingStats. PendingUpdates size 0 2024-11-21T08:59:56.497152Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [4:7439654711912176552:2438], Recipient [4:7439654686142371627:2136]: NKikimrPQ.TEvPeriodicTopicStats PathId: 14 Generation: 1 Round: 1 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2024-11-21T08:59:56.497158Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2024-11-21T08:59:56.497161Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 14] DataSize 0 UsedReserveSize 0 2024-11-21T08:59:56.497167Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099999s, queue# 1 2024-11-21T08:59:56.498441Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [4:7439654711912176550:2437], Partition 0, Sender [4:7439654711912176616:2443], Recipient [4:7439654711912176613:2441], Cookie: 0 2024-11-21T08:59:56.498452Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [4:7439654711912176616:2443], Recipient [4:7439654711912176613:2441]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T08:59:56.498455Z node 4 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T08:59:56.501537Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439654711912176550:2437], Partition 0, Sender [0:0:0], Recipient [4:7439654711912176613:2441], Cookie: 0 2024-11-21T08:59:56.501564Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439654711912176613:2441]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:59:56.501567Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:59:56.501579Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:59:56.501596Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:59:56.501603Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:59:56.501607Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:59:56.507504Z node 4 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T08:59:56.507547Z node 4 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/dir/origin" 2024-11-21T08:59:56.507598Z node 4 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/dir/origin 2024-11-21T08:59:56.510234Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [4:7439654711912176681:2456], Partition 2, Sender [4:7439654711912176768:2468], Recipient [4:7439654711912176758:2463], Cookie: 0 2024-11-21T08:59:56.510251Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [4:7439654711912176768:2468], Recipient [4:7439654711912176758:2463]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T08:59:56.510255Z node 4 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T08:59:56.510263Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [4:7439654711912176685:2457], Partition 1, Sender [4:7439654711912176771:2469], Recipient [4:7439654711912176759:2464], Cookie: 0 2024-11-21T08:59:56.510268Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [4:7439654711912176771:2469], Recipient [4:7439654711912176759:2464]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T08:59:56.510270Z node 4 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T08:59:56.513298Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439654711912176681:2456], Partition 2, Sender [0:0:0], Recipient [4:7439654711912176758:2463], Cookie: 0 2024-11-21T08:59:56.513310Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439654711912176758:2463]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:59:56.513312Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:59:56.513314Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439654711912176685:2457], Partition 1, Sender [0:0:0], Recipient [4:7439654711912176759:2464], Cookie: 0 2024-11-21T08:59:56.513320Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:59:56.513322Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439654711912176759:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:59:56.513324Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T08:59:56.513331Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T08:59:56.513334Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:59:56.513342Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T08:59:56.513342Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:59:56.513344Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T08:59:56.513346Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T08:59:56.513346Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> TxUsage::WriteToTopic_Demo_14 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> TxUsage::WriteToTopic_Demo_15 >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit [GOOD] >> TxUsage::WriteToTopic_Demo_33 [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit >> test_storage_config.py::TestStorageConfig::test_cases[case_3] >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2024-11-21T08:59:29.475711Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:29.476313Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:29.476379Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:29.476500Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:29.476650Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:29.476657Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:29.476778Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T08:59:29.476781Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:29.476805Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:29.476871Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:29.478498Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:29.478511Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:29.478702Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:29.478725Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:29.478741Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:29.478758Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:29.478774Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:29.478789Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:29.478807Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:29.478810Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:29.478817Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T08:59:29.478820Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T08:59:29.478824Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:29.478829Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:29.478906Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:29.481857Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:29.481875Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:29.481881Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:29.482109Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:29.482137Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:29.482166Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:29.482170Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:29.482175Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:29.482527Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:29.482586Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:59:29.482590Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:29.482923Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:29.482936Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:29.482938Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:29.482955Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T08:59:29.482962Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:29.482980Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T08:59:29.482983Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T08:59:29.483112Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:29.483153Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T08:59:29.483157Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T08:59:29.483160Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T08:59:29.483164Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:29.483185Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:29.483190Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:29.483206Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T08:59:29.483208Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T08:59:29.483211Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:29.483241Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:29.483262Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:29.483271Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:59:29.483594Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T08:59:29.483701Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:29.483713Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:29.483716Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T08:59:29.484380Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T08:59:29.484387Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T08:59:29.484400Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:29.484599Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:29.484683Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:29.484716Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T08:59:29.484720Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T08:59:29.484723Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:29.484764Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T08:59:29.484786Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T08:59:29.484789Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T08:59:29.484797Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T08:59:29.484801Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:29.484809Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:29.484823Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:29.484832Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:29.484835Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T08:59:29.484842Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T08:59:29.484845Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T08:59:29.484852Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T08:59:29.484871Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... 63] CurrentLeaderTablet: [54:1936:2266] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T08:59:57.034382Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037892 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037892 Cookie: 0 CurrentLeader: [54:1930:2263] CurrentLeaderTablet: [54:1936:2266] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2024-11-21T08:59:57.034387Z node 49 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037892 followers: 0 2024-11-21T08:59:57.034392Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1930:2263] 2024-11-21T08:59:57.034411Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892] forward result remote node 54 [49:2059:2726] 2024-11-21T08:59:57.034430Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892] remote node connected [49:2059:2726] 2024-11-21T08:59:57.034434Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892]::SendEvent [49:2059:2726] 2024-11-21T08:59:57.034471Z node 54 :PIPE_SERVER DEBUG: [72075186224037892] Accept Connect Originator# [49:2059:2726] 2024-11-21T08:59:57.034540Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892] connected with status OK role: Leader [49:2059:2726] 2024-11-21T08:59:57.034544Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892] send queued [49:2059:2726] 2024-11-21T08:59:57.034641Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] ::Bootstrap [49:2063:2728] 2024-11-21T08:59:57.034646Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] lookup [49:2063:2728] 2024-11-21T08:59:57.034653Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:57.034660Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1276:2096] 2024-11-21T08:59:57.034681Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result remote node 54 [49:2063:2728] 2024-11-21T08:59:57.034699Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] remote node connected [49:2063:2728] 2024-11-21T08:59:57.034704Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [49:2063:2728] 2024-11-21T08:59:57.034733Z node 54 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [49:2063:2728] 2024-11-21T08:59:57.034819Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [49:2063:2728] 2024-11-21T08:59:57.034824Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [49:2063:2728] 2024-11-21T08:59:57.034917Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [49:2066:2730] 2024-11-21T08:59:57.034922Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [49:2066:2730] 2024-11-21T08:59:57.034929Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:57.034934Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [53:1280:2097] 2024-11-21T08:59:57.034949Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 53 [49:2066:2730] 2024-11-21T08:59:57.034968Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [49:2066:2730] 2024-11-21T08:59:57.034972Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [49:2066:2730] 2024-11-21T08:59:57.035043Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connect request undelivered [49:2066:2730] 2024-11-21T08:59:57.035047Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] immediate retry [49:2066:2730] 2024-11-21T08:59:57.035051Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [49:2066:2730] 2024-11-21T08:59:57.035057Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037894 entry.State: StNormal 2024-11-21T08:59:57.035080Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:57.035095Z node 49 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:57.035114Z node 49 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2024-11-21T08:59:57.035121Z node 49 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2024-11-21T08:59:57.035126Z node 49 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2024-11-21T08:59:57.035133Z node 49 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [54:1931:2264] CurrentLeaderTablet: [54:1938:2267] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T08:59:57.035147Z node 49 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [54:1931:2264] CurrentLeaderTablet: [54:1938:2267] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T08:59:57.035155Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [54:1931:2264] CurrentLeaderTablet: [54:1938:2267] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2024-11-21T08:59:57.035161Z node 49 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2024-11-21T08:59:57.035166Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1931:2264] 2024-11-21T08:59:57.035177Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 54 [49:2066:2730] 2024-11-21T08:59:57.035205Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [49:2066:2730] 2024-11-21T08:59:57.035209Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [49:2066:2730] 2024-11-21T08:59:57.035367Z node 54 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [49:2066:2730] 2024-11-21T08:59:57.035435Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [49:2066:2730] 2024-11-21T08:59:57.035440Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [49:2066:2730] 2024-11-21T08:59:57.035539Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [49:2070:2732] 2024-11-21T08:59:57.035543Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [49:2070:2732] 2024-11-21T08:59:57.035551Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:57.035557Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1778:2191] 2024-11-21T08:59:57.035575Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result remote node 54 [49:2070:2732] 2024-11-21T08:59:57.035593Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] remote node connected [49:2070:2732] 2024-11-21T08:59:57.035597Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [49:2070:2732] 2024-11-21T08:59:57.035639Z node 54 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [49:2070:2732] 2024-11-21T08:59:57.035705Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [49:2070:2732] 2024-11-21T08:59:57.035709Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [49:2070:2732] 2024-11-21T08:59:57.035808Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [49:2073:2734] 2024-11-21T08:59:57.035812Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [49:2073:2734] 2024-11-21T08:59:57.035819Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:57.035825Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1781:2193] 2024-11-21T08:59:57.035843Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result remote node 54 [49:2073:2734] 2024-11-21T08:59:57.035865Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] remote node connected [49:2073:2734] 2024-11-21T08:59:57.035869Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [49:2073:2734] 2024-11-21T08:59:57.035926Z node 54 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [49:2073:2734] 2024-11-21T08:59:57.035997Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [49:2073:2734] 2024-11-21T08:59:57.036002Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [49:2073:2734] 2024-11-21T08:59:57.036101Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [49:2075:2735] 2024-11-21T08:59:57.036106Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [49:2075:2735] 2024-11-21T08:59:57.036113Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:57.036119Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [49:581:2269] 2024-11-21T08:59:57.036129Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [49:2075:2735] 2024-11-21T08:59:57.036145Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [49:2075:2735] 2024-11-21T08:59:57.036158Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [49:2075:2735] 2024-11-21T08:59:57.036162Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [49:2075:2735] 2024-11-21T08:59:57.036183Z node 49 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [49:2075:2735] 2024-11-21T08:59:57.036229Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [49:2075:2735] 2024-11-21T08:59:57.036235Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [49:2075:2735] 2024-11-21T08:59:57.036238Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [49:2075:2735] 2024-11-21T08:59:57.036244Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [49:2075:2735] 2024-11-21T08:59:57.036248Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [49:2075:2735] 2024-11-21T08:59:57.036255Z node 49 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [49:552:2264] EventType# 268697616 2024-11-21T08:59:57.036280Z node 49 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([49:2075:2735]) [49:2076:2736] >> BasicUsage::ConflictingWrites [GOOD] >> BasicUsage::TWriteSession_WriteEncoded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_33 [GOOD] Test command err: 2024-11-21T08:58:39.370073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654385767739895:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.370654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.502285Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e60/r3tmp/tmph5Ou6e/pdisk_1.dat 2024-11-21T08:58:39.722127Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:39.743655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.743683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:39.784641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17115, node 1 2024-11-21T08:58:39.823071Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:39.824464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T08:58:39.824545Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:39.837973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439654385767740412:2282], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:58:40.227639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e60/r3tmp/yandexwb42Nl.tmp 2024-11-21T08:58:40.227654Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e60/r3tmp/yandexwb42Nl.tmp 2024-11-21T08:58:40.227708Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e60/r3tmp/yandexwb42Nl.tmp 2024-11-21T08:58:40.227754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.393874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654390062707780:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.393906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.394406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654390062707792:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.483497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.532317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654390062707794:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.581687Z INFO: TTestServer started on Port 29281 GrpcPort 17115 TClient is connected to server localhost:29281 PQClient connected to localhost:17115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:40.835832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.840572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.868330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:58:41.042032Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T08:58:41.389620Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654390062707870:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.392855Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzBhMTQ5NDEtZjVjZDg2OS1kNWUzZTc1Mi05NGQzMThlNg==, ActorId: [1:7439654390062707769:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw9ep3pe63n1yedf77m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.400438Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.565785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.621173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.638924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654398652642939:2626] 2024-11-21T08:58:44.364949Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654385767739895:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.364985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.861984Z :WriteToTopic_Demo_3 INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900253Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960249Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.960369Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.979363Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654420127479780:2827] connected; active server actors: 1 2024-11-21T08:58:47.979458Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987565Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987661Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988091Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.988115Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988121Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988124Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.996507Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996618Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.996784Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005358Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005374Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005491Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.005509Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.014061Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654420127479815:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.014081Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014085Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654420127479779:2826], now have 1 active actors on pipe 2024-11-21T08:58:48.015925Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439654385767740269 RawX2: 4294969466 } TxId: 281474976710674 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSecond ... ic 'topic_A' partition {0, {9, 281474976715679}, 100001} part blob complete sourceId '\0test-message_group_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 16546 count 1 nextOffset 1 batches 1 2024-11-21T08:59:58.510912Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {9, 281474976715679}, 100001} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100001_00000000000000000000_00000_0000000001_00000| size 16536 WTime 1732179598510 2024-11-21T08:59:58.510939Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:59:58.511221Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 16479 2024-11-21T08:59:58.511232Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715679}, 100001} 2024-11-21T08:59:58.511242Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715679}, 100001}, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2024-11-21T08:59:58.511254Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 1 requestId: cookie: 2 2024-11-21T08:59:58.511264Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:59:58.511267Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T08:59:58.511273Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:59:58.511285Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] read cookie 0 Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T08:59:58.511292Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:59:58.511310Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T08:59:58.511317Z node 9 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:59:58.511329Z node 9 :PERSQUEUE DEBUG: Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} user test-consumer readTimeStamp done, result 1732179598510 queuesize 0 startOffset 0 2024-11-21T08:59:58.511472Z :DEBUG: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T08:59:58.511517Z :DEBUG: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 2 written_in_tx { } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T08:59:58.511524Z :DEBUG: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] OnAck: seqNo=2, txId=01jd6z675caatnt0hbt00bk6g1, WriteCount=1, AckCount=1 2024-11-21T08:59:58.511697Z :DEBUG: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session: acknoledged message 2 2024-11-21T08:59:58.512154Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439654726953826178 RawX2: 4503638282078656 } TxId: 281474976715680 Data { Operations { PartitionId: 0 Path: "/Root/topic_A" SupportivePartition: 100001 } Op: Commit SendingShards: 72075186224037894 ReceivingShards: 72075186224037894 Immediate: true WriteId { NodeId: 9 KeyId: 281474976715679 } } 2024-11-21T08:59:58.512162Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PartitionId {0, {9, 281474976715679}, 100001} for WriteId {9, 281474976715679} 2024-11-21T08:59:58.512164Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715680 has WriteId {9, 281474976715679} 2024-11-21T08:59:58.512166Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] immediate transaction 2024-11-21T08:59:58.512189Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2024-11-21T08:59:58.512198Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2024-11-21T08:59:58.512202Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=1 2024-11-21T08:59:58.512225Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Head=Offset 0 PartNo 0 PackedSize 16536 count 1 nextOffset 1 batches 1, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2024-11-21T08:59:58.512236Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 2 partNo 0 2024-11-21T08:59:58.512250Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 16546 count 1 nextOffset 2 batches 1 2024-11-21T08:59:58.512258Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2024-11-21T08:59:58.512286Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Add new write blob: topic 'topic_A' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 16536 WTime 1732179598512 2024-11-21T08:59:58.512312Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:59:58.512768Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T08:59:58.512808Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvTransactionCompleted WriteId {9, 281474976715679} 2024-11-21T08:59:58.512816Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {9, 281474976715679}, 100001} 2024-11-21T08:59:58.512820Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2024-11-21T08:59:58.512897Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976715679 LockNode: 9 Status: STATUS_NOT_FOUND 2024-11-21T08:59:58.512903Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] there is already a transaction TxId 281474976715680 for WriteId {9, 281474976715679} 2024-11-21T08:59:58.512978Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {9, 281474976715679}, 100001} 2024-11-21T08:59:58.512988Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {9, 281474976715679} 2024-11-21T08:59:58.512992Z node 9 :PERSQUEUE WARN: [PQ: 72075186224037894] Unknown transaction 281474976715680 2024-11-21T08:59:58.513003Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T08:59:58.513069Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T08:59:58.513444Z :INFO: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T08:59:58.513447Z :INFO: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T08:59:58.513450Z :DEBUG: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T08:59:58.513608Z :INFO: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T08:59:58.513608Z :DEBUG: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T08:59:58.513624Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2024-11-21T08:59:58.513627Z :DEBUG: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2024-11-21T08:59:58.513640Z :DEBUG: [/Root] SessionId [test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T08:59:58.513684Z node 9 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0 grpc read done: success: 0 data: 2024-11-21T08:59:58.513694Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0 grpc read failed 2024-11-21T08:59:58.513698Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0 grpc closed 2024-11-21T08:59:58.513701Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|bf236356-82b09f77-a3a74ee8-24e451c2_0 is DEAD 2024-11-21T08:59:58.513967Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:59:58.513981Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:59:58.513984Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:59:58.514016Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:59:58.514030Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439654726953826084:2478] destroyed 2024-11-21T08:59:58.514037Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:59:58.514040Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439654726953826156:2478] destroyed 2024-11-21T08:59:58.514042Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:59:58.514044Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439654726953826087:2478] destroyed 2024-11-21T08:59:58.514046Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] Test command err: 2024-11-21T08:58:39.369677Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654388559103149:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.370242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.498761Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e95/r3tmp/tmpChAx1u/pdisk_1.dat 2024-11-21T08:58:39.780485Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2008, node 1 2024-11-21T08:58:39.785407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.785432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:39.788559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:39.814122Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:39.814133Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:40.227127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e95/r3tmp/yandex27POpv.tmp 2024-11-21T08:58:40.227142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e95/r3tmp/yandex27POpv.tmp 2024-11-21T08:58:40.227193Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e95/r3tmp/yandex27POpv.tmp 2024-11-21T08:58:40.227230Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.400664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654392854071040:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.400695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.400838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654392854071052:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.467608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.524893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654392854071054:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.579335Z INFO: TTestServer started on Port 25278 GrpcPort 2008 TClient is connected to server localhost:25278 PQClient connected to localhost:2008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:58:40.836680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T08:58:40.838929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:40.867256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:41.049580Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:41.393744Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654392854071130:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.396274Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjU0N2FiYzgtZDM3NGZjNDktNWE0N2VhODUtNDRmZjQ3MDU=, ActorId: [1:7439654392854071029:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw8frs0zm96bmc6g7kc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.400887Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.565787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.572241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.625616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654401444006192:2627] 2024-11-21T08:58:44.364635Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654388559103149:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.364670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.880267Z :WriteRead INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900271Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960296Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.969233Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.978766Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654422918843044:2826] connected; active server actors: 1 2024-11-21T08:58:47.978858Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987563Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987661Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988079Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.988090Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988095Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988098Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.996484Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996523Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.996778Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005248Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005265Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005350Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.005350Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014032Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654422918843043:2825], now have 1 active actors on pipe 2024-11-21T08:58:48.014054Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014059Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654422918843057:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.014062Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:58:48.016778Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439654388559103532:2175] txId 281474976710674 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { P ... 4784Z node 10 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T08:59:59.784785Z node 10 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T08:59:59.784793Z node 10 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [10:7439654729067881462:2451] (SourceId=test-message_group_id, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2024-11-21T08:59:59.784796Z node 10 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2024-11-21T08:59:59.784904Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 10, Generation: 1 2024-11-21T08:59:59.784914Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:59.784918Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [10:7439654729067881465:2451], now have 1 active actors on pipe 2024-11-21T08:59:59.784924Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T08:59:59.784927Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T08:59:59.784952Z node 10 :PERSQUEUE INFO: new Cookie test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0 generated for partition 0 topic 'test-topic' owner test-message_group_id 2024-11-21T08:59:59.784974Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T08:59:59.784996Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:59:59.785031Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T08:59:59.785044Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T08:59:59.785076Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T08:59:59.785098Z node 10 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0 2024-11-21T08:59:59.785310Z :DEBUG: [/Root] SessionId [test-message_group_id|26d30d84-34e460f-c5fd5a7-a1ec6230_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T08:59:59.785328Z :INFO: [/Root] SessionId [test-message_group_id|26d30d84-34e460f-c5fd5a7-a1ec6230_0] PartitionId [0] Generation [1] Write session established. Init response: session_id: "test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0" 2024-11-21T08:59:59.785332Z :TRACE: [/Root] TRACE_EVENT InitResponse partition_id=0 session_id=test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0 2024-11-21T08:59:59.785339Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session: set DirectWriteToPartitionId 0 2024-11-21T08:59:59.785377Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write 1 messages with Id from 1 to 1 2024-11-21T08:59:59.785402Z :INFO: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session: close. Timeout 18446744073709.551615s 2024-11-21T08:59:59.785569Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T08:59:59.785581Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 1 2024-11-21T08:59:59.785774Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T08:59:59.785853Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T08:59:59.785884Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T08:59:59.785893Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T08:59:59.785923Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T08:59:59.785947Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:59:59.785993Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T08:59:59.786002Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T08:59:59.786013Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: test-topic partition: 0 SourceId: '\0test-message_group_id' SeqNo: 1 partNo : 0 messageNo: 1 size 98 offset: -1 2024-11-21T08:59:59.786061Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 1 partNo 0 2024-11-21T08:59:59.786089Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 187 count 1 nextOffset 1 batches 1 2024-11-21T08:59:59.786142Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 175 WTime 1732179599785 2024-11-21T08:59:59.786171Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T08:59:59.786513Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 120 2024-11-21T08:59:59.786524Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T08:59:59.786537Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T08:59:59.786547Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T08:59:59.786556Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T08:59:59.786558Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T08:59:59.786561Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T08:59:59.786590Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'test-topic' partition 0 user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T08:59:59.786598Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T08:59:59.786604Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T08:59:59.786607Z node 10 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T08:59:59.786627Z node 10 :PERSQUEUE DEBUG: Topic 'test-topic' partition 0 user test-consumer readTimeStamp done, result 1732179599785 queuesize 0 startOffset 0 2024-11-21T08:59:59.786730Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T08:59:59.786775Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T08:59:59.786779Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] OnAck: seqNo=1, txId=? 2024-11-21T08:59:59.786782Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session: acknoledged message 1 2024-11-21T08:59:59.885488Z :INFO: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T08:59:59.885512Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T08:59:59.885705Z :INFO: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T08:59:59.885759Z :DEBUG: [/Root] SessionId [test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T08:59:59.885911Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0 grpc read done: success: 0 data: 2024-11-21T08:59:59.885927Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0 grpc read failed 2024-11-21T08:59:59.885935Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0 grpc closed 2024-11-21T08:59:59.885938Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|6fec9c38-c2033305-d3e98be8-b378440c_0 is DEAD 2024-11-21T08:59:59.886216Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T08:59:59.886287Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T08:59:59.886302Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [10:7439654729067881465:2451] destroyed 2024-11-21T08:59:59.886314Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--true] >> test_system_views.py::TestPartitionStats::test_case >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--false] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--false] >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> test_storage_config.py::TestStorageConfig::test_cases[case_11] >> TxUsage::WriteToTopic_Demo_24 [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> TxUsage::WriteToTopic_Demo_25 >> TopicAutoscaling::CDC_Write [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> LocalPartition::WithoutPartitionDeadNode [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> TxUsage::WriteToTopic_Demo_15 [GOOD] >> TxUsage::WriteToTopic_Demo_16 >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--false] [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TPersQueueMirrorer::TestBasicRemote >> test_system_views.py::TestPartitionStats::test_case [GOOD] >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestGetStorageInfo >> test_storage_config.py::TestStorageConfig::test_cases[case_3] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_4] >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--true] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--true] [GOOD] >> TTablesWithReboots::CopyTableAndDropWithReboots2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] Test command err: 2024-11-21T08:59:26.445279Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:26.446268Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:26.446324Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:26.446434Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [3:65:2071] ControllerId# 72057594037932033 2024-11-21T08:59:26.446437Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:26.446465Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:26.446515Z node 3 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:26.446599Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:26.446603Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:26.446790Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:71:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.446809Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:72:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.446825Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:73:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.446850Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:74:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.446865Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:75:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.446879Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:76:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.446895Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:77:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.446898Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:26.446905Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:65:2071] 2024-11-21T08:59:26.446908Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:65:2071] 2024-11-21T08:59:26.446911Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:26.446915Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:26.446965Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:26.446999Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:26.447316Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:26.447342Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:26.447434Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:26.447587Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:26.447595Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:26.447683Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:87:2075] ControllerId# 72057594037932033 2024-11-21T08:59:26.447686Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:26.447694Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:26.447719Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:26.448347Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:61:2065] 2024-11-21T08:59:26.448353Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:61:2065] 2024-11-21T08:59:26.448360Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:26.450317Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:26.450325Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:26.450514Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:94:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.450540Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:95:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.450565Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:96:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.450590Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:97:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.450618Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:98:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.450666Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:99:2085] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.450692Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:100:2086] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.450695Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:26.450703Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:87:2075] 2024-11-21T08:59:26.450705Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:87:2075] 2024-11-21T08:59:26.450709Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:26.450714Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:26.450811Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:26.450820Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:26.451622Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:26.451639Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:26.451728Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:108:2072] ControllerId# 72057594037932033 2024-11-21T08:59:26.451731Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:26.451738Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:26.451762Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:26.451832Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:26.451852Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:26.451854Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:26.452028Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:114:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.452043Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:115:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.452058Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:116:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.452075Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:117:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.452091Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:118:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.452113Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:119:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.452126Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:120:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:26.452128Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:26.452134Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:108:2072] 2024-11-21T08:59:26.452136Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:108:2072] 2024-11-21T08:59:26.452139Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:26.452142Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:26.452231Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:26.452261Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:65:2071] 2024-11-21T08:59:26.452268Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:26.452271Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:26.452275Z node 3 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:26.452297Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:26.452304Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:61:2065] 2024-11-21T08:59:26.455063Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:87:2075] 2024-11-21T08:59:26.455078Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:26.455081Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:26.455375Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:26.455398Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:26.455423Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:108:2072] 2024-11-21T08:59:26.455428Z node 2 :BS_NO ... 09:00:06.552620Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [45:308:2288] 2024-11-21T09:00:06.552625Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [45:308:2288] 2024-11-21T09:00:06.552640Z node 45 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:00:06.552647Z node 45 :TABLET_RESOLVER DEBUG: SelectForward node 45 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [45:263:2256] 2024-11-21T09:00:06.552653Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [45:308:2288] 2024-11-21T09:00:06.552658Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [45:308:2288] 2024-11-21T09:00:06.552663Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [45:308:2288] 2024-11-21T09:00:06.552667Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [45:308:2288] 2024-11-21T09:00:06.552675Z node 45 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [45:308:2288] 2024-11-21T09:00:06.552703Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [45:308:2288] 2024-11-21T09:00:06.552707Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [45:308:2288] 2024-11-21T09:00:06.552711Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [45:308:2288] 2024-11-21T09:00:06.552716Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [45:308:2288] 2024-11-21T09:00:06.552720Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [45:308:2288] 2024-11-21T09:00:06.552729Z node 45 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [45:307:2287] EventType# 268697621 2024-11-21T09:00:06.552745Z node 45 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([45:308:2288]) [45:309:2289] 2024-11-21T09:00:06.552754Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::Handle::TEvGetTabletStorageInfo TabletId=72075186224037888 2024-11-21T09:00:06.552781Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [45:311:2291] 2024-11-21T09:00:06.552783Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [45:311:2291] 2024-11-21T09:00:06.552787Z node 45 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:00:06.552791Z node 45 :TABLET_RESOLVER DEBUG: SelectForward node 45 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [45:263:2256] 2024-11-21T09:00:06.552794Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [45:311:2291] 2024-11-21T09:00:06.552798Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [45:311:2291] 2024-11-21T09:00:06.552801Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [45:311:2291] 2024-11-21T09:00:06.552803Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [45:311:2291] 2024-11-21T09:00:06.552809Z node 45 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [45:311:2291] 2024-11-21T09:00:06.552816Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [45:311:2291] 2024-11-21T09:00:06.552819Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [45:311:2291] 2024-11-21T09:00:06.552821Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [45:311:2291] 2024-11-21T09:00:06.552823Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [45:311:2291] 2024-11-21T09:00:06.552825Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [45:311:2291] 2024-11-21T09:00:06.552828Z node 45 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [45:310:2290] EventType# 268697615 2024-11-21T09:00:06.552833Z node 45 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([45:311:2291]) [45:312:2292] 2024-11-21T09:00:06.552844Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} queued, type NKikimr::NHive::TTxDeleteTablet 2024-11-21T09:00:06.552849Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:00:06.552878Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594037927937 ShardLocalIdx: 0 TxId_Deprecated: 0 2024-11-21T09:00:06.552884Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute Tablet 72075186224037888 2024-11-21T09:00:06.552912Z node 45 :HIVE DEBUG: HIVE#72057594037927937 Tablet(Dummy.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2024-11-21T09:00:06.552923Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037927937 TxId_Deprecated: 0 ShardOwnerId: 72057594037927937 ShardLocalIdx: 0 2024-11-21T09:00:06.552936Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} hope 1 -> done Change{5, redo 102b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2024-11-21T09:00:06.552944Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:00:06.563228Z node 45 :BS_PROXY_PUT INFO: [9521640286a8eda0] bootstrap ActorId# [45:314:2294] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:104:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T09:00:06.563272Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] Id# [72057594037927937:2:5:0:0:104:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:00:06.563279Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] restore Id# [72057594037927937:2:5:0:0:104:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:00:06.563286Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:104:1] Marker# BPG33 2024-11-21T09:00:06.563290Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:104:1] Marker# BPG32 2024-11-21T09:00:06.563325Z node 45 :BS_PROXY DEBUG: Send to queueActorId# [45:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:104:1] FDS# 104 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:00:06.563760Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:104:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 80818 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:00:06.563782Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T09:00:06.563789Z node 45 :BS_PROXY_PUT INFO: [9521640286a8eda0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:00:06.563814Z node 45 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T09:00:06.563835Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2024-11-21T09:00:06.563847Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10050004 [45:270:2260],0x10040206 [45:310:2290]} 2024-11-21T09:00:06.563892Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} queued, type NKikimr::NHive::TTxDeleteTabletResult 2024-11-21T09:00:06.563897Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:00:06.563905Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2024-11-21T09:00:06.563965Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} hope 1 -> done Change{6, redo 106b alter 0b annex 0, ~{ 16, 1 } -{ }, 0 gb} 2024-11-21T09:00:06.563971Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:00:06.574309Z node 45 :BS_PROXY_PUT INFO: [758a346c7e0f5aa1] bootstrap ActorId# [45:316:2296] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:6:0:0:104:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T09:00:06.574367Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Id# [72057594037927937:2:6:0:0:104:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:00:06.574376Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] restore Id# [72057594037927937:2:6:0:0:104:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:00:06.574388Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:6:0:0:104:1] Marker# BPG33 2024-11-21T09:00:06.574393Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:6:0:0:104:1] Marker# BPG32 2024-11-21T09:00:06.574423Z node 45 :BS_PROXY DEBUG: Send to queueActorId# [45:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:6:0:0:104:1] FDS# 104 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:00:06.574774Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] received {EvVPutResult Status# OK ID# [72057594037927937:2:6:0:0:104:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80818 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:00:06.574797Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Result# TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T09:00:06.574804Z node 45 :BS_PROXY_PUT INFO: [758a346c7e0f5aa1] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:00:06.574828Z node 45 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T09:00:06.574849Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} commited cookie 1 for step 6 2024-11-21T09:00:06.574862Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {Notifications: 0x1004020B [45:307:2287]} >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyTableAndDropWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:59:00.343480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:59:00.343494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:00.343497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:59:00.343499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:59:00.343511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:59:00.343513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:59:00.343518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:00.343566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:59:00.350382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:59:00.350394Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:59:00.351681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:59:00.351738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:59:00.351754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:59:00.353869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:59:00.353919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:59:00.353987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:00.354135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:00.354693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:00.354862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:00.354868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:00.354875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:59:00.354880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:00.354883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:59:00.354903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:59:00.355783Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:59:00.366101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:59:00.366151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.366190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:59:00.366229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:59:00.366233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.366652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:00.366667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:59:00.366688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.366697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:59:00.366700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:59:00.366703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:59:00.367008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.367020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:00.367024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:59:00.367255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.367260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.367263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:00.367266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:59:00.367648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:00.367887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:59:00.367914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:59:00.368016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:00.368032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:00.368036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:00.368069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:59:00.368073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:00.368088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:00.368096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:00.368372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:00.368378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:00.368397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:00.368402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:59:00.368442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:00.368446Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:59:00.368453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:59:00.368455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:00.368458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:59:00.368461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:00.368463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:59:00.368465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:59:00.368472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:00.368475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:59:00.368478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 102Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:00:07.126131Z node 235 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:00:07.126135Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:00:07.126141Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T09:00:07.126153Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [235:398:2373] message: TxId: 1004 2024-11-21T09:00:07.126157Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:00:07.126160Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:00:07.126162Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:00:07.126178Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:00:07.126243Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:00:07.126247Z node 235 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:00:07.126253Z node 235 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:00:07.126256Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:00:07.126259Z node 235 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T09:00:07.126281Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:00:07.126285Z node 235 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:00:07.126288Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [235:202:2205], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T09:00:07.126291Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [235:202:2205], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T09:00:07.126434Z node 235 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:00:07.126444Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:00:07.126447Z node 235 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:00:07.126451Z node 235 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T09:00:07.126455Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:00:07.126509Z node 235 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:00:07.126514Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:00:07.126517Z node 235 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:00:07.126519Z node 235 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:00:07.126521Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:00:07.126526Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T09:00:07.126988Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:00:07.126996Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [235:533:2497] 2024-11-21T09:00:07.127556Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:00:07.127565Z node 235 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:00:07.127618Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:00:07.127638Z node 235 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T09:00:07.127641Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:00:07.127644Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T09:00:07.127652Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [235:398:2373] message: TxId: 1005 2024-11-21T09:00:07.127656Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:00:07.127659Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:00:07.127661Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:00:07.127674Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:00:07.127806Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:00:07.127818Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:00:07.128431Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:00:07.128440Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [235:533:2497] 2024-11-21T09:00:07.128547Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 1009317316874 } TabletId: 72075186233409546 State: 4 2024-11-21T09:00:07.128557Z node 235 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:00:07.129151Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:00:07.129188Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 430 RawX2: 1009317316962 } TabletId: 72075186233409547 State: 4 2024-11-21T09:00:07.129196Z node 235 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:00:07.129279Z node 235 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:00:07.129370Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:00:07.129412Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:00:07.129482Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:00:07.129488Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:00:07.129496Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 Forgetting tablet 72075186233409546 2024-11-21T09:00:07.130392Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:00:07.130477Z node 235 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T09:00:07.130896Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:00:07.130943Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:00:07.131326Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:00:07.131336Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:00:07.131418Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T09:00:07.131427Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:00:07.131431Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:00:07.131440Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:00:07.131856Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:00:07.131863Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:00:07.131921Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T09:00:07.131981Z node 235 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:00:07.131989Z node 235 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--true] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit [GOOD] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] >> test_system_views.py::TestQueryMetrics::test_case >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--true] >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> test_storage_config.py::TestStorageConfig::test_cases[case_4] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_5] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] >> test_storage_config.py::TestStorageConfig::test_cases[case_11] [GOOD] >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--false] >> test_storage_config.py::TestStorageConfig::test_cases[case_12] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> ExternalIndex::Simple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> BasicUsage::TWriteSession_WriteEncoded [GOOD] Test command err: 2024-11-21T08:58:39.369629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654386047987932:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.369944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.495125Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e69/r3tmp/tmpKzkuVG/pdisk_1.dat 2024-11-21T08:58:39.763740Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:39.764053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.764067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26264, node 1 2024-11-21T08:58:39.785110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:39.828349Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:40.225660Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e69/r3tmp/yandexkPZUMt.tmp 2024-11-21T08:58:40.225672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e69/r3tmp/yandexkPZUMt.tmp 2024-11-21T08:58:40.225716Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e69/r3tmp/yandexkPZUMt.tmp 2024-11-21T08:58:40.225747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.396796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654390342955823:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.396822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.406115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654390342955835:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.471623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.526245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654390342955837:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.576628Z INFO: TTestServer started on Port 21369 GrpcPort 26264 TClient is connected to server localhost:21369 PQClient connected to localhost:26264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:40.840550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.843444Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.868075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:41.048341Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:41.389633Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654390342955912:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.393909Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2I5MDZjYzQtZDQxMzNkYS02ZGNjOGE1YS1lNmRlZTVmMg==, ActorId: [1:7439654390342955812:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw8dcyfb4s8djm164a8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.400801Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.565881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.571845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.631563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654398932890975:2626] 2024-11-21T08:58:44.364069Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654386047987932:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.364102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.868094Z :ConnectToYDB INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900237Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960290Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.960368Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.978710Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654420407727833:2831] connected; active server actors: 1 2024-11-21T08:58:47.978823Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987550Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987641Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988057Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988066Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988070Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.988343Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.996501Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996612Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.996709Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005225Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005259Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005309Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.005330Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014013Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654420407727832:2830], now have 1 active actors on pipe 2024-11-21T08:58:48.014037Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014042Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654420407727846:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.014047Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:58:48.016721Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439654386047988316:2178] txId 281474976710674 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } Explic ... er session test-consumer_12_1_17408014923084169412_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2024-11-21T09:00:10.666394Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 partition ready for read: partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1732179605661, sizeLag# 519 2024-11-21T09:00:10.666402Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1TEvPartitionReady. Aval parts: 1 2024-11-21T09:00:10.666423Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 performing read request: guid# 47e6848b-f6d8d067-7b52b968-31d62afc, from# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), count# 4, size# 622, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T09:00:10.666453Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 READ FROM TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1)maxCount 4 maxSize 622 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 47e6848b-f6d8d067-7b52b968-31d62afc 2024-11-21T09:00:10.666483Z node 12 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T09:00:10.666492Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T09:00:10.666506Z node 12 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:00:10.666535Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 1 Topic 'test-topic' partition 0 user test-consumer offset 0 count 4 size 622 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T09:00:10.666544Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T09:00:10.666589Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2024-11-21T09:00:10.666598Z node 12 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:00:10.666642Z node 12 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:00:10.666749Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 98 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732179605661 CreateTimestampMS: 1732179605660 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 91 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732179605661 CreateTimestampMS: 1732179605660 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 98 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732179605661 CreateTimestampMS: 1732179605660 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 71 bytes ..." SourceId: "" SeqNo: 4 WriteTimestampMS: 1732179605661 CreateTimestampMS: 1732179605660 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 29 RealReadOffset: 3 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T09:00:10.666789Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T09:00:10.666800Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 47e6848b-f6d8d067-7b52b968-31d62afc has messages 1 2024-11-21T09:00:10.666826Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 read done: guid# 47e6848b-f6d8d067-7b52b968-31d62afc, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 393 2024-11-21T09:00:10.666839Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 response to read: guid# 47e6848b-f6d8d067-7b52b968-31d62afc 2024-11-21T09:00:10.666920Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 Process answer. Aval parts: 0 2024-11-21T09:00:10.666992Z :DEBUG: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] [] Got ReadResponse, serverBytesSize = 393, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428407 2024-11-21T09:00:10.667019Z :DEBUG: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428407 2024-11-21T09:00:10.667110Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2024-11-21T09:00:10.667122Z :DEBUG: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] [] Returning serverBytesSize = 393 to budget 2024-11-21T09:00:10.667128Z :DEBUG: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] [] In ContinueReadingDataImpl, ReadSizeBudget = 393, ReadSizeServerDelta = 52428407 2024-11-21T09:00:10.667202Z :DEBUG: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T09:00:10.667245Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T09:00:10.667253Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 1} (1-1) 2024-11-21T09:00:10.667258Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 2} (2-2) 2024-11-21T09:00:10.667262Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (3-3) 2024-11-21T09:00:10.667270Z :DEBUG: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] [] The application data is transferred to the client. Number of messages 4, size 14 bytes 2024-11-21T09:00:10.667277Z :DEBUG: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] [] Returning serverBytesSize = 0 to budget 2024-11-21T09:00:10.667270Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 grpc read done: success# 1, data# { read_request { bytes_size: 393 } } 2024-11-21T09:00:10.667297Z :INFO: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:10.667304Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:0 2024-11-21T09:00:10.667310Z :INFO: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:10.667317Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 got read request: guid# fdb9be6c-b813aa51-f5e773f8-b111f15b 2024-11-21T09:00:10.667329Z :NOTICE: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:00:10.667334Z :DEBUG: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] [] Abort session to cluster 2024-11-21T09:00:10.667441Z :NOTICE: [/Root] [/Root] [e1ef1350-5271dac-db642868-e8be8359] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:10.667465Z :INFO: [/Root] SessionId [4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T09:00:10.667468Z :INFO: [/Root] SessionId [4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T09:00:10.667473Z :DEBUG: [/Root] SessionId [4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T09:00:10.667492Z :INFO: [/Root] SessionId [4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T09:00:10.667495Z :DEBUG: [/Root] SessionId [4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T09:00:10.667512Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:10.667521Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 grpc read failed 2024-11-21T09:00:10.667523Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 grpc closed 2024-11-21T09:00:10.667530Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_17408014923084169412_v1 is DEAD 2024-11-21T09:00:10.667714Z node 12 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [12:7439654776691847981:2534] disconnected; active server actors: 1 2024-11-21T09:00:10.667716Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:10.667719Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_12_1_17408014923084169412_v1 2024-11-21T09:00:10.667723Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [12:7439654776691847984:2537] destroyed 2024-11-21T09:00:10.667725Z node 12 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [12:7439654776691847981:2534] client test-consumer disconnected session test-consumer_12_1_17408014923084169412_v1 2024-11-21T09:00:10.667730Z node 12 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_12_1_17408014923084169412_v1 2024-11-21T09:00:10.667739Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0 grpc read done: success: 0 data: 2024-11-21T09:00:10.667740Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0 grpc read failed 2024-11-21T09:00:10.667743Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0 grpc closed 2024-11-21T09:00:10.667745Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 4358148c-a824805c-b56ba2cf-ae8f74d9|8f33f294-5cbedd48-9773c9ff-c17a8db_0 is DEAD 2024-11-21T09:00:10.667927Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:00:10.667967Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:10.667983Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [12:7439654750922043884:2454] destroyed 2024-11-21T09:00:10.667992Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--false] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_5] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_6] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> TPersQueueMirrorer::ValidStartStream >> TxUsage::WriteToTopic_Demo_25 [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_12] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: 2024-11-21T08:56:54.637905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:91:2137], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:56:54.637935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:56:54.637948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2024-11-21T08:56:54.637956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ef1/r3tmp/tmpTZyXpJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19986, node 1 TClient is connected to server localhost:3743 2024-11-21T08:56:54.758462Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvGetProxyServicesRequest 2024-11-21T08:56:54.758537Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvGetProxyServicesRequest 2024-11-21T08:56:54.764403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.784223Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:54.785078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:56:54.785091Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:56:54.785095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:56:54.785194Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:56:54.826992Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T08:56:54.827198Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T08:56:54.827241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:56:54.827264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:56:54.837949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:56:54.942388Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvProposeTransaction 2024-11-21T08:56:54.942416Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T08:56:54.942459Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:643:2537] 2024-11-21T08:56:54.952266Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T08:56:54.952542Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T08:56:54.952557Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T08:56:54.952623Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T08:56:54.952664Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T08:56:54.952678Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T08:56:54.953622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:56:54.953822Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T08:56:54.953982Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T08:56:54.953993Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 SEND to# [1:642:2536] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:56:54.972012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:711:2597];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T08:56:54.973843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:711:2597];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T08:56:54.973884Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T08:56:54.974260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:56:54.974292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:56:54.974316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:56:54.974329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:56:54.974342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:56:54.974353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:56:54.974364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:56:54.974375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:56:54.974387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:56:54.974402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:56:54.974414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:56:54.974425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:56:54.977449Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T08:56:54.977509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T08:56:54.977516Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T08:56:54.977548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:56:54.977573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:56:54.977585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:56:54.977591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T08:56:54.977600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T08:56:54.977611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:56:54.977618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:56:54.977622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T08:56:54.977638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T08:56:54.977645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:56:54.977653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888; ... oFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '335) '('"_id" '"b9aedf1b-1d2efa05-73ce5f62-cc85498d")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) ) 2024-11-21T09:00:10.732671Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.732 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [KQP] kqp_transform.cpp:33: PhysicalPeepholeTransformer: ( (let $1 (KqpTable '"//Root/.metadata/initialization/migrations" '"72057594046644480:6" '"" '1)) (let $2 '('"componentId" '"instant" '"modificationId")) (let $3 (Uint64 '"1001")) (let $4 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) (Void) '())) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"componentId" $5) '('"instant" (OptionalType (DataType 'Uint32))) '('"modificationId" $5))) (let $7 '('('"_logical_id" '322) '('"_id" '"96a1892b-8acf0779-70d5c1ac-2242a2b4") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $4)) (lambda '($12) (block '( (let $13 (lambda '($14) (Member $14 '"componentId") (Member $14 '"instant") (Member $14 '"modificationId"))) (return (FromFlow (ExpandMap (Take (ToFlow $12) $3) $13))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($15) (FromFlow (NarrowMap (Take (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '335) '('"_id" '"b9aedf1b-1d2efa05-73ce5f62-cc85498d")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) 2024-11-21T09:00:10.733822Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.733 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:466: Register async execution for node #260 2024-11-21T09:00:10.733850Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.733 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {3}, callable #269 2024-11-21T09:00:10.733867Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.733 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:577: Node #269 finished execution 2024-11-21T09:00:10.733876Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.733 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:594: Node #269 created 0 trackable nodes: 2024-11-21T09:00:10.733884Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.733 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:87: Finish, output #272, status: Async 2024-11-21T09:00:10.734121Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:133: Completed async execution for node #260 2024-11-21T09:00:10.734132Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #260 2024-11-21T09:00:10.734140Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:59: Begin, root #272 2024-11-21T09:00:10.734146Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #272, status: Ok 2024-11-21T09:00:10.734152Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {0}, callable #272 2024-11-21T09:00:10.734157Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {1}, callable #271 2024-11-21T09:00:10.734163Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {2}, callable #270 2024-11-21T09:00:10.734177Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {3}, callable #267 2024-11-21T09:00:10.734183Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {4}, callable #260 2024-11-21T09:00:10.734208Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:577: Node #260 finished execution 2024-11-21T09:00:10.734217Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:594: Node #260 created 0 trackable nodes: 2024-11-21T09:00:10.734223Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {3}, callable #267 2024-11-21T09:00:10.734228Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:577: Node #267 finished execution 2024-11-21T09:00:10.734235Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {2}, callable #270 2024-11-21T09:00:10.734268Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:577: Node #270 finished execution 2024-11-21T09:00:10.734274Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:594: Node #270 created 0 trackable nodes: 2024-11-21T09:00:10.734280Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {1}, callable #271 2024-11-21T09:00:10.734289Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:577: Node #271 finished execution 2024-11-21T09:00:10.734295Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:594: Node #271 created 0 trackable nodes: 2024-11-21T09:00:10.734301Z node 1 :KQP_YQL TRACE: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 TRACE ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:387: {0}, callable #272 2024-11-21T09:00:10.734307Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:577: Node #272 finished execution 2024-11-21T09:00:10.734313Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:594: Node #272 created 0 trackable nodes: 2024-11-21T09:00:10.734318Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:87: Finish, output #272, status: Ok 2024-11-21T09:00:10.734324Z node 1 :KQP_YQL INFO: TraceId: 01jd6z6k2r192zf8ehx1sc6z9c, SessionId: CompileActor 2024-11-21 09:00:10.734 INFO ydb-services-ext_index-ut(pid=714530, tid=0x00007F42340C2BC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #272 2024-11-21T09:00:10.746393Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvExecuteKqpTransaction 2024-11-21T09:00:10.746423Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976716246 ProcessProposeKqpTransaction 2024-11-21T09:00:10.748167Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvExecuteKqpTransaction 2024-11-21T09:00:10.748181Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976716247 ProcessProposeKqpTransaction 2024-11-21T09:00:10.938688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:711:2597];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T09:00:10.938774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:719:2599];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T09:00:10.938780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:722:2601];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T09:00:10.938785Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:725:2604];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T09:00:10.960156Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:711:2597];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T09:00:10.960217Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:719:2599];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T09:00:10.960226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:722:2601];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T09:00:10.960235Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:725:2604];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_25 [GOOD] Test command err: 2024-11-21T08:58:39.370380Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654388617070281:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.370874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.495119Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e9e/r3tmp/tmpWPo8pE/pdisk_1.dat 2024-11-21T08:58:39.765994Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:39.766378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.766391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20599, node 1 2024-11-21T08:58:39.776925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:39.824411Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:40.224463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e9e/r3tmp/yandexBPPVa8.tmp 2024-11-21T08:58:40.224485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e9e/r3tmp/yandexBPPVa8.tmp 2024-11-21T08:58:40.224555Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e9e/r3tmp/yandexBPPVa8.tmp 2024-11-21T08:58:40.224606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.394302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654392912038177:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.394334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.394432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654392912038189:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.482796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.528259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654392912038191:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.572806Z INFO: TTestServer started on Port 6194 GrpcPort 20599 TClient is connected to server localhost:6194 PQClient connected to localhost:20599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:40.840535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.844728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.858838Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T08:58:40.868868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:58:41.389671Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654392912038266:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.390152Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTJiNmFiYWYtODIyMTk1YjgtMjNiMmRjMzgtYzQ5MGNmNWU=, ActorId: [1:7439654392912038166:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw8fzcyq0xa22jesn72, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.400432Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.565770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.571673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.625619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654401501973329:2628] 2024-11-21T08:58:44.364782Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654388617070281:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.364817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.862387Z :WriteToTopic_Demo_21_RestartNo INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900236Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960508Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.970594Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.978721Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654422976810163:2822] connected; active server actors: 1 2024-11-21T08:58:47.978814Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987548Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987661Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988037Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.988139Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988149Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988152Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.996508Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996539Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.996712Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005342Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005359Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005446Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.005455Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.014051Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654422976810199:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.014069Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014073Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654422976810162:2821], now have 1 active actors on pipe 2024-11-21T08:58:48.015797Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439654388617070667 RawX2: 4294969473 } TxId: 281474976710674 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitCh ... 0_2_947708513682628464_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 3 2024-11-21T09:00:10.334882Z :DEBUG: [/Root] [/Root] [810b16a7-4cb279e9-f5cc1814-1eab756b] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 3 } } 2024-11-21T09:00:11.321309Z node 10 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T09:00:11.321634Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0 describe result for acl check 2024-11-21T09:00:11.331310Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_B:0:1:2:3 2024-11-21T09:00:11.331335Z :INFO: [/Root] [/Root] [810b16a7-4cb279e9-f5cc1814-1eab756b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:11.332071Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 checking auth because of timeout 2024-11-21T09:00:11.332098Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 auth for : test-consumer 2024-11-21T09:00:11.332247Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 Handle describe topics response 2024-11-21T09:00:11.332284Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 auth is DEAD 2024-11-21T09:00:11.332305Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 auth ok: topics# 1, initDone# 1 2024-11-21T09:00:12.331386Z :INFO: [/Root] [/Root] [810b16a7-4cb279e9-f5cc1814-1eab756b] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:12.331407Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_B:0:1:2:3 2024-11-21T09:00:12.331417Z :INFO: [/Root] [/Root] [810b16a7-4cb279e9-f5cc1814-1eab756b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:12.331440Z :NOTICE: [/Root] [/Root] [810b16a7-4cb279e9-f5cc1814-1eab756b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:00:12.331448Z :DEBUG: [/Root] [/Root] [810b16a7-4cb279e9-f5cc1814-1eab756b] [] Abort session to cluster 2024-11-21T09:00:12.331825Z :NOTICE: [/Root] [/Root] [810b16a7-4cb279e9-f5cc1814-1eab756b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:12.331956Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:12.331970Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 grpc read failed 2024-11-21T09:00:12.332058Z :INFO: [/Root] [/Root] [ec45aac8-5023b29d-3d1e5d01-31e54ee3] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:12.331976Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 grpc closed 2024-11-21T09:00:12.332003Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_947708513682628464_v1 is DEAD 2024-11-21T09:00:12.332088Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:2:0 2024-11-21T09:00:12.332105Z :INFO: [/Root] [/Root] [ec45aac8-5023b29d-3d1e5d01-31e54ee3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4015 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:12.332113Z :NOTICE: [/Root] [/Root] [ec45aac8-5023b29d-3d1e5d01-31e54ee3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:00:12.332118Z :DEBUG: [/Root] [/Root] [ec45aac8-5023b29d-3d1e5d01-31e54ee3] [] Abort session to cluster 2024-11-21T09:00:12.332105Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:12.332123Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_10_2_947708513682628464_v1 2024-11-21T09:00:12.332128Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439654778033985486:2570] destroyed 2024-11-21T09:00:12.332142Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_2_947708513682628464_v1 2024-11-21T09:00:12.332320Z :NOTICE: [/Root] [/Root] [ec45aac8-5023b29d-3d1e5d01-31e54ee3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:12.332331Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic_B] pipe [10:7439654778033985483:2567] disconnected; active server actors: 1 2024-11-21T09:00:12.332339Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic_B] pipe [10:7439654778033985483:2567] client test-consumer disconnected session test-consumer_10_2_947708513682628464_v1 2024-11-21T09:00:12.332455Z :INFO: [/Root] SessionId [test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T09:00:12.332463Z :INFO: [/Root] SessionId [test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T09:00:12.332472Z :DEBUG: [/Root] SessionId [test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T09:00:12.332714Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_909041062273258242_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:12.332722Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_909041062273258242_v1 grpc read failed 2024-11-21T09:00:12.332725Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_909041062273258242_v1 grpc closed 2024-11-21T09:00:12.332738Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_909041062273258242_v1 is DEAD 2024-11-21T09:00:12.332748Z :INFO: [/Root] SessionId [test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T09:00:12.332757Z :DEBUG: [/Root] SessionId [test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T09:00:12.332779Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:12.332789Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_909041062273258242_v1 2024-11-21T09:00:12.332791Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439654769444050653:2507] destroyed 2024-11-21T09:00:12.332796Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_909041062273258242_v1 2024-11-21T09:00:12.332816Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439654769444050650:2504] disconnected; active server actors: 1 2024-11-21T09:00:12.332824Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439654769444050650:2504] client test-consumer disconnected session test-consumer_10_1_909041062273258242_v1 2024-11-21T09:00:12.332947Z :INFO: [/Root] SessionId [test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T09:00:12.332950Z :INFO: [/Root] SessionId [test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T09:00:12.332953Z :DEBUG: [/Root] SessionId [test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T09:00:12.332948Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0 grpc read done: success: 0 data: 2024-11-21T09:00:12.332958Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0 grpc read failed 2024-11-21T09:00:12.332962Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0 grpc closed 2024-11-21T09:00:12.332964Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|5308a75e-b588a9c7-e7a57ce-d9207a7b_0 is DEAD 2024-11-21T09:00:12.333043Z :INFO: [/Root] SessionId [test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T09:00:12.333047Z :DEBUG: [/Root] SessionId [test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T09:00:12.333090Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:00:12.333105Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:00:12.333132Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0 grpc read done: success: 0 data: 2024-11-21T09:00:12.333133Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0 grpc read failed 2024-11-21T09:00:12.333139Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0 grpc closed 2024-11-21T09:00:12.333143Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|49214f21-de5556ac-c6f9b912-682034fb_0 is DEAD 2024-11-21T09:00:12.333191Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:12.333205Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439654778033985371:2541] destroyed 2024-11-21T09:00:12.333209Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:12.333213Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439654778033985374:2541] destroyed 2024-11-21T09:00:12.333227Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:00:12.333320Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:00:12.333348Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:12.333360Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439654769444050634:2498] destroyed 2024-11-21T09:00:12.333367Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::WithoutPartitionWithSplit >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2024-11-21T08:59:31.151968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654612441141424:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:31.151980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:59:31.173883Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00382d/r3tmp/tmpQghaTT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1076, node 1 2024-11-21T08:59:31.198335Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:59:31.202046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00382d/r3tmp/yandex3lMjfr.tmp 2024-11-21T08:59:31.202054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00382d/r3tmp/yandex3lMjfr.tmp 2024-11-21T08:59:31.202103Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00382d/r3tmp/yandex3lMjfr.tmp 2024-11-21T08:59:31.202131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:59:31.205295Z INFO: TTestServer started on Port 15534 GrpcPort 1076 TClient is connected to server localhost:15534 PQClient connected to localhost:1076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:59:31.252989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:59:31.253013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:59:31.254098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:59:31.276483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:59:31.283749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:59:31.381030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654612441142187:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.381046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654612441142178:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.381063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.381574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654612441142221:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.381590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.381680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:59:31.383133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654612441142192:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:59:31.400096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:59:31.439192Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654612441142328:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:59:31.439307Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDRlY2E3MmYtMWJjMzc4NzctMTkzMzIwMzEtNzVlMWE3Mw==, ActorId: [1:7439654612441142175:2304], ActorState: ExecuteState, TraceId: 01jd6z5cnj7khdmr0yznnft7y3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:59:31.439834Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:59:31.454683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:59:31.470409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654612441142533:2593] 2024-11-21T08:59:36.152174Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654612441141424:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:36.152226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:59:36.557537Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:59:36.560217Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:59:36.560550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439654633915979308:2756], Recipient [1:7439654612441141828:2194]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:36.560561Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:36.560562Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:59:36.560567Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439654633915979304:2753], Recipient [1:7439654612441141828:2194]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T08:59:36.560568Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:59:36.565065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:59:36.565132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:59:36.565177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:59:36.565191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:59:36.565195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T08:59:36.565199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T08:59:36.565201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for p ... -11-21T09:00:12.886003Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [4:7439654787635863091:2434], Recipient [4:7439654787635862966:2434]: NKikimr::TEvKeyValue::TEvIntermediate 2024-11-21T09:00:12.886026Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:00:12.886032Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T09:00:12.886035Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715675:0 2024-11-21T09:00:12.886061Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [4:7439654766161025374:2146], Recipient [4:7439654766161025374:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:00:12.886068Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T09:00:12.886075Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:00:12.886077Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715675, path id: [OwnerId: 72057594046644480, LocalPathId: 13] 2024-11-21T09:00:12.886116Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [4:7439654787635862966:2434], Recipient [4:7439654787635862966:2434]: NKikimr::TEvKeyValue::TEvCollect 2024-11-21T09:00:12.886144Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:00:12.886151Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:7439654766161025519:2227], at schemeshard: 72057594046644480, txId: 281474976715675, path id: 13 2024-11-21T09:00:12.886160Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [4:7439654787635862966:2434], Recipient [4:7439654787635862966:2434]: NKikimrClient.TResponse Status: 1 Cookie: 5 WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } 2024-11-21T09:00:12.886167Z node 4 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2024-11-21T09:00:12.886167Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:00:12.886169Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:00:12.886171Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715675:0 ProgressState 2024-11-21T09:00:12.886172Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715675, State EXECUTED 2024-11-21T09:00:12.886174Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T09:00:12.886176Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715675, NewState WAIT_RS_ACKS 2024-11-21T09:00:12.886178Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:00:12.886178Z node 4 :PERSQUEUE DEBUG: [TxId: 281474976715675] PredicateAcks: 0/0 2024-11-21T09:00:12.886180Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T09:00:12.886180Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715675:0 progress is 1/1 2024-11-21T09:00:12.886181Z node 4 :PERSQUEUE DEBUG: [TxId: 281474976715675] PredicateAcks: 0/0 2024-11-21T09:00:12.886182Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715675 ready parts: 1/1 2024-11-21T09:00:12.886183Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715675 to the list for deletion 2024-11-21T09:00:12.886185Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715675, NewState DELETING 2024-11-21T09:00:12.886186Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715675, ready parts: 1/1, is published: false 2024-11-21T09:00:12.886188Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715675 2024-11-21T09:00:12.886188Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715675 ready parts: 1/1 2024-11-21T09:00:12.886191Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715675:0 2024-11-21T09:00:12.886192Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715675:0 2024-11-21T09:00:12.886195Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T09:00:12.886212Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 5 2024-11-21T09:00:12.886215Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715675, publications: 1, subscribers: 1 2024-11-21T09:00:12.886216Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [4:7439654787635862966:2434], Recipient [4:7439654787635862966:2434]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976715675" IncludeFrom: true To: "tx_00000281474976715675" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\205\252\315\357\2642\030\233\247\200\200\200\200@(\240\215\0060\205\252\315\357\26428\233\247\200\200\200\200@" } 2024-11-21T09:00:12.886217Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715675, [OwnerId: 72057594046644480, LocalPathId: 13], 3 2024-11-21T09:00:12.886245Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [4:7439654787635863094:2449], Recipient [4:7439654787635862966:2434]: NKikimr::TEvKeyValue::TEvCompleteGC 2024-11-21T09:00:12.886310Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [4:7439654787635863095:2434], Recipient [4:7439654787635862966:2434]: NKikimr::TEvKeyValue::TEvIntermediate 2024-11-21T09:00:12.886337Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T09:00:12.886388Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [4:7439654766161025519:2227], Recipient [4:7439654766161025374:2146]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 13] Version: 3 } 2024-11-21T09:00:12.886396Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:00:12.886407Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 13 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715675 2024-11-21T09:00:12.886423Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 13 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715675 2024-11-21T09:00:12.886430Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715675 2024-11-21T09:00:12.886433Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715675, pathId: [OwnerId: 72057594046644480, LocalPathId: 13], version: 3 2024-11-21T09:00:12.886436Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T09:00:12.886440Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [4:7439654787635862966:2434], Recipient [4:7439654787635862966:2434]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2024-11-21T09:00:12.886441Z node 4 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2024-11-21T09:00:12.886443Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:00:12.886445Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715675, State DELETING 2024-11-21T09:00:12.886447Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T09:00:12.886450Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715675 2024-11-21T09:00:12.886466Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715675, subscribers: 1 2024-11-21T09:00:12.886468Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [4:7439654787635862966:2434], Recipient [4:7439654787635862966:2434]: NKikimr::TEvKeyValue::TEvCollect 2024-11-21T09:00:12.886473Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [4:7439654787635863050:2444] 2024-11-21T09:00:12.886477Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:00:12.886517Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715675 2024-11-21T09:00:12.886523Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T09:00:12.886533Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [4:7439654787635863050:2444] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715675 at schemeshard: 72057594046644480 2024-11-21T09:00:12.886565Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [4:7439654787635863099:2450], Recipient [4:7439654787635862966:2434]: NKikimr::TEvKeyValue::TEvCompleteGC 2024-11-21T09:00:12.886746Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [4:7439654787635863065:2828], Recipient [4:7439654766161025374:2146]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:12.886754Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:12.886756Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T09:00:12.887407Z node 4 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T09:00:12.887616Z node 4 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "autoscalit-topic" 2024-11-21T09:00:12.887651Z node 4 :PQ_READ_PROXY DEBUG: Describe topic actor for path autoscalit-topic 2024-11-21T09:00:12.972663Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439654787635862966:2434], Partition 0, Sender [0:0:0], Recipient [4:7439654787635863026:2438], Cookie: 0 2024-11-21T09:00:12.972691Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439654787635863026:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:12.972695Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:12.972710Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:12.972734Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:12.972737Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:12.972743Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--true] >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> test_storage_config.py::TestStorageConfig::test_cases[case_7] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--true] >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> test_storage_config.py::TestStorageConfig::test_cases[case_6] [GOOD] >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots [GOOD] >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TxUsage::WriteToTopic_Demo_16 [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::TestBasicRemote [GOOD] Test command err: 2024-11-21T08:59:31.849201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654609827570565:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:31.849313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037e9/r3tmp/tmpjOJzMI/pdisk_1.dat 2024-11-21T08:59:31.876677Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T08:59:31.891861Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24062, node 1 2024-11-21T08:59:31.903441Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0037e9/r3tmp/yandex0QJdjl.tmp 2024-11-21T08:59:31.903453Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0037e9/r3tmp/yandex0QJdjl.tmp 2024-11-21T08:59:31.903494Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0037e9/r3tmp/yandex0QJdjl.tmp 2024-11-21T08:59:31.903517Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:59:31.906534Z INFO: TTestServer started on Port 1302 GrpcPort 24062 TClient is connected to server localhost:1302 PQClient connected to localhost:24062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:59:31.950498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:59:31.950523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:59:31.951477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:59:31.974784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:59:31.983707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:59:32.075026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654614122538604:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:32.075043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654614122538630:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:32.075047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:32.075621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:59:32.075990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654614122538661:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:32.076017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:32.076935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654614122538633:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:59:32.093058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:59:32.097786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:59:32.106203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:59:32.148563Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654614122538905:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:59:32.148633Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzY4Y2I1YWQtMjk3OGQ4ODMtYWZmNTdhYjYtNjEyMDdiYzc=, ActorId: [1:7439654614122538601:2304], ActorState: ExecuteState, TraceId: 01jd6z5dbaarasc5av6k463e3p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:59:32.149020Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654614122538973:2593] 2024-11-21T08:59:36.849657Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654609827570565:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:36.849700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:59:37.278325Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:59:37.281360Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:59:37.281824Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439654635597375757:2765], Recipient [1:7439654609827570975:2194]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:37.281836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:37.281838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:59:37.281843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439654635597375753:2762], Recipient [1:7439654609827570975:2194]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T08:59:37.281844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:59:37.286275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:59:37.286351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:59:37.286404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:59:37.286419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:59:37.286427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T08:59:37.286431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T08:59:37.286437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T08:59:37.286439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPat ... session cookie 1 consumer shared/some_user session shared/some_user_5_1_1431696380362735616_v1 is DEAD 2024-11-21T09:00:14.414478Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/some_user session shared/some_user_5_2_12249653581853157700_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:14.414486Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_12249653581853157700_v1 grpc read failed 2024-11-21T09:00:14.414487Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_12249653581853157700_v1 grpc closed 2024-11-21T09:00:14.414496Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_12249653581853157700_v1 is DEAD 2024-11-21T09:00:14.414526Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic2] pipe [5:7439654791000149502:2502] disconnected; active server actors: 1 2024-11-21T09:00:14.414538Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic2] pipe [5:7439654791000149502:2502] client some_user disconnected session shared/some_user_5_1_1431696380362735616_v1 2024-11-21T09:00:14.414553Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic2] consumer some_user rebalancing was scheduled 2024-11-21T09:00:14.414575Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic2] consumer some_user balancing. Sessions=1, Families=2, UnradableFamilies=1 [2 (1), ], RequireBalancing=0 [] 2024-11-21T09:00:14.414589Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic2] consumer some_user balancing of the family=2 (Status=Free, Partitions=[1]) failed because there are no suitable reading sessions. 2024-11-21T09:00:14.414603Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic2] consumer some_user balancing duration: 0.000021s 2024-11-21T09:00:14.414607Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic2] pipe [5:7439654791000149504:2503] disconnected; active server actors: 1 2024-11-21T09:00:14.414609Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic2] pipe [5:7439654791000149504:2503] client some_user disconnected session shared/some_user_5_2_12249653581853157700_v1 2024-11-21T09:00:14.414605Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:14.414626Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/some_user_5_1_1431696380362735616_v1 2024-11-21T09:00:14.414642Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654791000149507:2509] destroyed 2024-11-21T09:00:14.414648Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:14.414652Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/some_user_5_2_12249653581853157700_v1 2024-11-21T09:00:14.414654Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654791000149509:2510] destroyed 2024-11-21T09:00:14.414662Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/some_user_5_1_1431696380362735616_v1 2024-11-21T09:00:14.414668Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/some_user_5_2_12249653581853157700_v1 2024-11-21T09:00:14.414716Z node 5 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] [] Got error. Status: CLIENT_CANCELLED. Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T09:00:14.414782Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] [] In Reconnect, ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2024-11-21T09:00:14.414790Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] [] New values: ReadSizeBudget = 8388608, ReadSizeServerDelta = 0 2024-11-21T09:00:14.414813Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] [] Closing session to cluster: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T09:00:14.414873Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:14.414886Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] [] Abort session to cluster 2024-11-21T09:00:14.414894Z node 6 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1] got next reader event: 1 2024-11-21T09:00:14.414919Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:14.414933Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:1:1:10:11 2024-11-21T09:00:14.414927Z node 6 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1]: read session closed: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T09:00:14.414935Z node 5 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] [] Got error. Status: CLIENT_CANCELLED. Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T09:00:14.414941Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] Counters: { Errors: 1 CurrentSessionLifetimeMs: 952 BytesRead: 251 MessagesRead: 11 BytesReadCompressed: 251 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:14.414948Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:14.414951Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:1:1:10:11 2024-11-21T09:00:14.414952Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] Counters: { Errors: 1 CurrentSessionLifetimeMs: 952 BytesRead: 251 MessagesRead: 11 BytesReadCompressed: 251 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:14.414954Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] [] In Reconnect, ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2024-11-21T09:00:14.414956Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] [] New values: ReadSizeBudget = 8388608, ReadSizeServerDelta = 0 2024-11-21T09:00:14.414960Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] [] Closing session to cluster: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T09:00:14.414971Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [a374aa44-1a636cbb-9ffec530-38c57092] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:14.414999Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:14.415009Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] [] Abort session to cluster 2024-11-21T09:00:14.415015Z node 6 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0] got next reader event: 1 2024-11-21T09:00:14.415033Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:14.415046Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:0:1:16:17 2024-11-21T09:00:14.415054Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] Counters: { Errors: 1 CurrentSessionLifetimeMs: 953 BytesRead: 534 MessagesRead: 17 BytesReadCompressed: 534 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:14.415059Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:14.415062Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:0:1:16:17 2024-11-21T09:00:14.415064Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] Counters: { Errors: 1 CurrentSessionLifetimeMs: 953 BytesRead: 534 MessagesRead: 17 BytesReadCompressed: 534 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:14.415069Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [62301e23-da573254-2e4d5094-d3acf85d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:14.415240Z node 6 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1] schedule consumer creation 2024-11-21T09:00:14.415264Z node 6 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0]: read session closed: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T09:00:14.415675Z node 6 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0] schedule consumer creation >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> test_storage_config.py::TestStorageConfig::test_cases[case_1] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_2] >> TxUsage::WriteToTopic_Demo_17 >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:59:07.786359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:59:07.786374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:07.786377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:59:07.786379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:59:07.786388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:59:07.786391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:59:07.786397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:07.786458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:59:07.794915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:59:07.794930Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:59:07.796504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:59:07.796579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:59:07.796601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:59:07.799129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:59:07.799189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:59:07.799292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:07.799471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:07.800246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:07.800492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:07.800509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:07.800517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:59:07.800521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:07.800526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:59:07.800550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:59:07.801781Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:59:07.812769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:59:07.812822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:07.812859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:59:07.812898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:59:07.812903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:07.813329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:07.813346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:59:07.813371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:07.813379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:59:07.813382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:59:07.813387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:59:07.813685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:07.813692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:07.813695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:59:07.813927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:07.813934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:07.813937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:07.813941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:59:07.814335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:07.814592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:59:07.814626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:59:07.814740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:07.814757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:07.814761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:07.814793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:59:07.814797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:07.814814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:07.814821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:07.815096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:07.815103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:07.815130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:07.815134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:59:07.815191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:07.815196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:59:07.815203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:59:07.815205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:07.815209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:59:07.815212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:07.815215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:59:07.815217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:59:07.815223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:07.815227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:59:07.815229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... id#1009:0 progress is 1/1 2024-11-21T09:00:14.765832Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1009 ready parts: 1/1 2024-11-21T09:00:14.765838Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1009, ready parts: 1/1, is published: true 2024-11-21T09:00:14.765842Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1009 ready parts: 1/1 2024-11-21T09:00:14.765846Z node 162 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1009:0 2024-11-21T09:00:14.765850Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1009:0 2024-11-21T09:00:14.765868Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T09:00:14.766019Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1009 2024-11-21T09:00:14.766265Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1009 2024-11-21T09:00:14.767903Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 563 RawX2: 695784704476 } TabletId: 72075186233409548 State: 4 2024-11-21T09:00:14.767916Z node 162 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:00:14.767946Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 684 RawX2: 695784704586 } TabletId: 72075186233409549 State: 4 2024-11-21T09:00:14.767950Z node 162 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:00:14.767973Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 695784704269 } TabletId: 72075186233409546 State: 4 2024-11-21T09:00:14.767977Z node 162 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:00:14.768417Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:00:14.768583Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:00:14.768665Z node 162 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T09:00:14.768991Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:00:14.769032Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409548 2024-11-21T09:00:14.769089Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:00:14.769170Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:00:14.769174Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:00:14.769181Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T09:00:14.769217Z node 162 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T09:00:14.769238Z node 162 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409549 2024-11-21T09:00:14.769762Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:00:14.769795Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 Forgetting tablet 72075186233409546 2024-11-21T09:00:14.769901Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:00:14.769920Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:00:14.769971Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 436 RawX2: 695784704360 } TabletId: 72075186233409547 State: 4 2024-11-21T09:00:14.769977Z node 162 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:00:14.770417Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:00:14.770427Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:00:14.770439Z node 162 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2024-11-21T09:00:14.770455Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:00:14.770458Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T09:00:14.770469Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:00:14.770473Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:00:14.770476Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:00:14.770821Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:00:14.770831Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:00:14.770845Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:00:14.770850Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:00:14.770881Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:00:14.770944Z node 162 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T09:00:14.771299Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:00:14.771335Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:00:14.771763Z node 162 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T09:00:14.771805Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:00:14.771810Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:00:14.771820Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:00:14.772267Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:00:14.772277Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:00:14.772344Z node 162 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1009, wait until txId: 1009 TestWaitNotification wait txId: 1009 2024-11-21T09:00:14.772403Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1009: send EvNotifyTxCompletion 2024-11-21T09:00:14.772408Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1009 2024-11-21T09:00:14.772462Z node 162 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1009, at schemeshard: 72057594046678944 2024-11-21T09:00:14.772478Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1009: got EvNotifyTxCompletionResult 2024-11-21T09:00:14.772483Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1009: satisfy waiter [162:1058:3000] TestWaitNotification: OK eventTxId 1009 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted 2024-11-21T09:00:14.772557Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:00:14.772570Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T09:00:14.772576Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T09:00:14.772581Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T09:00:14.772589Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T09:00:14.772595Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T09:00:14.772603Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2024-11-21T09:00:14.772609Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2024-11-21T09:00:14.772616Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--false] >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2024-11-21T08:59:23.155161Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:23.155992Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:23.156081Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:23.156250Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:23.156488Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:23.156502Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:23.156649Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:45:2073] ControllerId# 72057594037932033 2024-11-21T08:59:23.156657Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:23.156690Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:23.156766Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:23.159135Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:23.159149Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:23.159519Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:53:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.159563Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:54:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.159591Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:55:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.159619Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:56:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.159643Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:57:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.159669Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:58:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.159716Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:59:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.159722Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:23.159735Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:45:2073] 2024-11-21T08:59:23.159740Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:45:2073] 2024-11-21T08:59:23.159747Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:23.159754Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:23.159928Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:23.159942Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:23.160576Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:23.160617Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:23.160771Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:67:2071] ControllerId# 72057594037932033 2024-11-21T08:59:23.160778Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:23.160791Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:23.160826Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:23.160934Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:23.160970Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:23.160975Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:23.161263Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:73:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.161299Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:74:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.161325Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:75:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.161354Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:76:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.161378Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:77:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.161414Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:78:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.161445Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:79:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:23.161449Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:23.161460Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:67:2071] 2024-11-21T08:59:23.161464Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:67:2071] 2024-11-21T08:59:23.161489Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:23.161498Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:23.161575Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:23.161600Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:23.164473Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:45:2073] 2024-11-21T08:59:23.164493Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:23.164499Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:23.164572Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:67:2071] 2024-11-21T08:59:23.164585Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:23.164590Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:23.164623Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:23.164640Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:45:2073] 2024-11-21T08:59:23.164645Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:23.164652Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:23.165251Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:23.165650Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:23.165669Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:59:23.165697Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:23.165718Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:23.166333Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:23.166356Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:23.166361Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:23.166389Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:89:2084] 2024-11-21T08:59:23.166400Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:49:2064] 2024-11-21T08:59:23.166406Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:49:2064] 2024-11-21T08:59:23.166440Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:23.166522Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:23.166572Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T08:59:23.166582Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:23.166588Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:23.166606Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:23.166626Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T08:59:23.166632Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T08:59:23.166687Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:23.166697Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:23.166715Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:23.166718Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:23.166728Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:95:2091] 2024-11-21T08:59:23.166770Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID ... 4037888] lookup [24:548:2090] 2024-11-21T09:00:16.346536Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:00:16.346541Z node 24 :TABLET_RESOLVER DEBUG: SelectForward node 24 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [23:316:2259] 2024-11-21T09:00:16.346545Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [24:547:2089] 2024-11-21T09:00:16.346547Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [24:548:2090] 2024-11-21T09:00:16.346556Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:00:16.346585Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 23 [24:547:2089] 2024-11-21T09:00:16.346606Z node 24 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T09:00:16.346615Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [24:547:2089] 2024-11-21T09:00:16.346619Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [24:547:2089] 2024-11-21T09:00:16.346685Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T09:00:16.346710Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T09:00:16.346719Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T09:00:16.346739Z node 23 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [24:547:2089] 2024-11-21T09:00:16.346799Z node 24 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [23:451:2360] CurrentLeaderTablet: [23:468:2372] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T09:00:16.346818Z node 23 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([24:547:2089]) [23:556:2424] 2024-11-21T09:00:16.346825Z node 24 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [23:451:2360] CurrentLeaderTablet: [23:468:2372] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T09:00:16.346836Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [23:451:2360] CurrentLeaderTablet: [23:468:2372] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T09:00:16.346839Z node 24 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2024-11-21T09:00:16.346844Z node 24 :TABLET_RESOLVER DEBUG: SelectForward node 24 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [23:451:2360] 2024-11-21T09:00:16.346852Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 23 [24:548:2090] 2024-11-21T09:00:16.346889Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [24:548:2090] 2024-11-21T09:00:16.346892Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [24:548:2090] 2024-11-21T09:00:16.346954Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [24:547:2089] 2024-11-21T09:00:16.346959Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [24:547:2089] 2024-11-21T09:00:16.346963Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [24:547:2089] 2024-11-21T09:00:16.346972Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [24:547:2089] 2024-11-21T09:00:16.346983Z node 24 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594037927937 Status=OK ClientId=[24:547:2089] 2024-11-21T09:00:16.347010Z node 23 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [24:548:2090] 2024-11-21T09:00:16.347034Z node 23 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [24:544:2089] EventType# 268959744 2024-11-21T09:00:16.347064Z node 23 :HIVE DEBUG: HIVE#72057594037927937 Handle TEvLocal::TEvRegisterNode from [24:544:2089] HiveId: 72057594037927937 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2024-11-21T09:00:16.347072Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2024-11-21T09:00:16.347076Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:00:16.347080Z node 23 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxRegisterNode(24)::Execute 2024-11-21T09:00:16.347099Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessWaitQueue (0) 2024-11-21T09:00:16.347104Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue (0) 2024-11-21T09:00:16.347108Z node 23 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - sending 2024-11-21T09:00:16.347112Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessWaitQueue (0) 2024-11-21T09:00:16.347115Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue (0) 2024-11-21T09:00:16.347124Z node 23 :HIVE WARN: HIVE#72057594037927937 Node(24, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:00:16.347136Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2024-11-21T09:00:16.347142Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:00:16.347178Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [24:548:2090] 2024-11-21T09:00:16.347182Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [24:548:2090] 2024-11-21T09:00:16.347186Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [24:548:2090] 2024-11-21T09:00:16.347192Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [24:548:2090] 2024-11-21T09:00:16.347199Z node 23 :HIVE TRACE: HIVE#72075186224037888 Handle TEvTabletPipe::TEvServerConnected([24:548:2090]) [23:557:2425] 2024-11-21T09:00:16.347205Z node 23 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - executing 2024-11-21T09:00:16.347211Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T09:00:16.347215Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:00:16.347220Z node 23 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2024-11-21T09:00:16.347224Z node 23 :HIVE DEBUG: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2024-11-21T09:00:16.347228Z node 23 :HIVE DEBUG: HIVE#72057594037927937 Handle ProcessWaitQueue (size: 0) 2024-11-21T09:00:16.347233Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T09:00:16.347238Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T09:00:16.347243Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:00:16.347253Z node 24 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72075186224037888 Status=OK ClientId=[24:548:2090] 2024-11-21T09:00:16.347271Z node 23 :HIVE DEBUG: HIVE#72057594037927937 TEvInterconnect::TEvNodeInfo NodeId 24 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" 2024-11-21T09:00:16.347289Z node 23 :PIPE_SERVER DEBUG: [72075186224037888] Push Sender# [24:545:2090] EventType# 268959744 2024-11-21T09:00:16.347309Z node 23 :HIVE DEBUG: HIVE#72075186224037888 Handle TEvLocal::TEvRegisterNode from [24:545:2090] HiveId: 72075186224037888 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2024-11-21T09:00:16.347315Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2024-11-21T09:00:16.347320Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:00:16.347325Z node 23 :HIVE DEBUG: HIVE#72075186224037888 THive::TTxRegisterNode(24)::Execute 2024-11-21T09:00:16.347338Z node 23 :HIVE WARN: HIVE#72075186224037888 Node(24, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:00:16.347342Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessWaitQueue (0) 2024-11-21T09:00:16.347346Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessBootQueue (0) 2024-11-21T09:00:16.347349Z node 23 :HIVE TRACE: HIVE#72075186224037888 ProcessBootQueue - sending 2024-11-21T09:00:16.347352Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessWaitQueue (0) 2024-11-21T09:00:16.347357Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessBootQueue (0) 2024-11-21T09:00:16.347364Z node 23 :HIVE WARN: HIVE#72075186224037888 Node(24, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:00:16.347370Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2024-11-21T09:00:16.347376Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:00:16.347399Z node 23 :HIVE TRACE: HIVE#72075186224037888 ProcessBootQueue - executing 2024-11-21T09:00:16.347404Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T09:00:16.347408Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:00:16.347411Z node 23 :HIVE DEBUG: HIVE#72075186224037888 THive::TTxProcessBootQueue()::Execute 2024-11-21T09:00:16.347415Z node 23 :HIVE DEBUG: HIVE#72075186224037888 0 nodes connected out of 0 2024-11-21T09:00:16.347418Z node 23 :HIVE DEBUG: HIVE#72075186224037888 Handle ProcessBootQueue (size: 0) 2024-11-21T09:00:16.347421Z node 23 :HIVE DEBUG: HIVE#72075186224037888 Handle ProcessWaitQueue (size: 0) 2024-11-21T09:00:16.347425Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T09:00:16.347429Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T09:00:16.347434Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:00:16.347448Z node 23 :HIVE DEBUG: HIVE#72075186224037888 TEvInterconnect::TEvNodeInfo NodeId 24 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2024-11-21T08:59:31.065663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654610550044576:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:31.065906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:59:31.086463Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003835/r3tmp/tmp4OL2mi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8952, node 1 2024-11-21T08:59:31.111387Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:59:31.115186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003835/r3tmp/yandexezEbVu.tmp 2024-11-21T08:59:31.115199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003835/r3tmp/yandexezEbVu.tmp 2024-11-21T08:59:31.115239Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003835/r3tmp/yandexezEbVu.tmp 2024-11-21T08:59:31.115274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:59:31.118989Z INFO: TTestServer started on Port 16036 GrpcPort 8952 TClient is connected to server localhost:16036 PQClient connected to localhost:8952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:59:31.141398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:59:31.150586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:59:31.167281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:59:31.167305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:59:31.168404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:59:31.297254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654610550045339:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.297278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654610550045350:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.297285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.297823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:59:31.297855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654610550045382:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.297870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.298982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654610550045353:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:59:31.323843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:59:31.328597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:59:31.343658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:59:31.371875Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654610550045626:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:59:31.371971Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWJlODJhYWQtOTVhYmNlOGEtOTNkYmY5ODgtMWRmNWZmM2Y=, ActorId: [1:7439654610550045336:2304], ActorState: ExecuteState, TraceId: 01jd6z5cjyecwas6bvp8rd49n8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:59:31.372528Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654610550045697:2597] 2024-11-21T08:59:36.066022Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654610550044576:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:36.066059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:59:36.499728Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:59:36.502577Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:59:36.502909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439654632024882473:2761], Recipient [1:7439654610550045001:2197]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:36.502920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:36.502921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:59:36.502926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439654632024882469:2758], Recipient [1:7439654610550045001:2197]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2024-11-21T08:59:36.502928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:59:36.508397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:59:36.508494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:59:36.508564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:59:36.508587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:59:36.508601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T08:59:36.508619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T08:59:36.508632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: ... letPipe::TEvServerDisconnected 2024-11-21T09:00:16.399181Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_5162255308504072631_v1 2024-11-21T09:00:16.399192Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7439654800208491839:2679] destroyed 2024-11-21T09:00:16.399200Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654800208491840:3240], Recipient [5:7439654800208491716:2658]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.399201Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.399203Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.399206Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_5_1_5162255308504072631_v1 2024-11-21T09:00:16.399210Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [5:7439654800208491838:2678] destroyed 2024-11-21T09:00:16.399216Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654795913524230:3083], Recipient [5:7439654795913523582:2434]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.399218Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.399220Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.399223Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_5162255308504072631_v1 2024-11-21T09:00:16.399226Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654795913524225:2612] destroyed 2024-11-21T09:00:16.399241Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_5162255308504072631_v1 2024-11-21T09:00:16.399248Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_5162255308504072631_v1 2024-11-21T09:00:16.399250Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_5162255308504072631_v1 2024-11-21T09:00:16.399314Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7439654795913524220:2609] disconnected; active server actors: 1 2024-11-21T09:00:16.399324Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7439654795913524220:2609] client test-consumer disconnected session test-consumer_5_1_5162255308504072631_v1 2024-11-21T09:00:16.399424Z :INFO: [/Root] SessionId [producer-1|d5926434-13d191a0-a5c9d864-b1385538_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T09:00:16.399445Z :INFO: [/Root] SessionId [producer-1|d5926434-13d191a0-a5c9d864-b1385538_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T09:00:16.399450Z :DEBUG: [/Root] SessionId [producer-1|d5926434-13d191a0-a5c9d864-b1385538_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T09:00:16.399539Z :INFO: [/Root] SessionId [producer-1|d5926434-13d191a0-a5c9d864-b1385538_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T09:00:16.399543Z :DEBUG: [/Root] SessionId [producer-1|d5926434-13d191a0-a5c9d864-b1385538_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T09:00:16.399667Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|d5926434-13d191a0-a5c9d864-b1385538_0 grpc read done: success: 0 data: 2024-11-21T09:00:16.399679Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|d5926434-13d191a0-a5c9d864-b1385538_0 grpc read failed 2024-11-21T09:00:16.399685Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|d5926434-13d191a0-a5c9d864-b1385538_0 grpc closed 2024-11-21T09:00:16.399687Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|d5926434-13d191a0-a5c9d864-b1385538_0 is DEAD 2024-11-21T09:00:16.399923Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7439654795913524240:2616], Recipient [5:7439654795913524242:2616]: NActors::TEvents::TEvPoison 2024-11-21T09:00:16.399941Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:00:16.400009Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654795913524271:3098], Recipient [5:7439654795913523582:2434]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.400017Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.400020Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:16.400039Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654795913524270:2616] destroyed 2024-11-21T09:00:16.400054Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7439654795913523582:2434], Partition 0, Sender [5:7439654795913523582:2434], Recipient [5:7439654795913523642:2438], Cookie: 0 2024-11-21T09:00:16.400066Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7439654795913523582:2434], Recipient [5:7439654795913523642:2438]: NKikimr::TEvPQ::TEvPipeDisconnected 2024-11-21T09:00:16.400073Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2024-11-21T09:00:16.400079Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:00:16.400088Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2024-11-21T09:00:16.400103Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:16.400119Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:16.400126Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:16.400130Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:00:16.478713Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439654795913523582:2434], Partition 0, Sender [0:0:0], Recipient [5:7439654795913523642:2438], Cookie: 0 2024-11-21T09:00:16.478739Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439654795913523642:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:16.478743Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:16.478761Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:16.478782Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:16.478784Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:16.478789Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:00:16.497094Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439654800208491716:2658], Partition 1, Sender [0:0:0], Recipient [5:7439654800208491796:2666], Cookie: 0 2024-11-21T09:00:16.497122Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439654800208491796:2666]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:16.497128Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:16.497142Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:16.497166Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:16.497170Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:16.497175Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:00:16.497603Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439654800208491713:2657], Partition 2, Sender [0:0:0], Recipient [5:7439654800208491799:2668], Cookie: 0 2024-11-21T09:00:16.497613Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439654800208491799:2668]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:16.497615Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:16.497619Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:16.497625Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:16.497628Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:16.497631Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:00:16.579009Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439654795913523582:2434], Partition 0, Sender [0:0:0], Recipient [5:7439654795913523642:2438], Cookie: 0 2024-11-21T09:00:16.579033Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439654795913523642:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:16.579046Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:16.579059Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:16.579078Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:16.579080Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:16.579085Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:00:16.583094Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7439654770143718682:2135]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T09:00:16.583111Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T09:00:16.583121Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7439654770143718682:2135], Recipient [5:7439654770143718682:2135]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:00:16.583124Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_2] [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] Test command err: 2024-11-21T08:58:39.369699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654388639285461:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.369725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.497345Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e74/r3tmp/tmpY1HRPc/pdisk_1.dat 2024-11-21T08:58:39.774752Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:39.775910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.775929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3962, node 1 2024-11-21T08:58:39.788385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:39.822731Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:39.822751Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:40.228159Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e74/r3tmp/yandexk32kKL.tmp 2024-11-21T08:58:40.228172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e74/r3tmp/yandexk32kKL.tmp 2024-11-21T08:58:40.228234Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e74/r3tmp/yandexk32kKL.tmp 2024-11-21T08:58:40.228277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.393643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654392934253351:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.393670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.393846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654392934253363:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.473284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.528291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654392934253365:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.588642Z INFO: TTestServer started on Port 10612 GrpcPort 3962 TClient is connected to server localhost:10612 PQClient connected to localhost:3962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:40.836658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:40.842265Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T08:58:40.868495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:58:41.040892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T08:58:41.389665Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654392934253441:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.394002Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNmNTBhOTAtNmQ1ZmRmMjctODQ5YTNmODUtZTYxYzIxMWQ=, ActorId: [1:7439654392934253340:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw93dakzcr8tj6sshd3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.400984Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.565773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.572039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.625603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654401524188503:2627] 2024-11-21T08:58:44.364524Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654388639285461:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.364551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.868089Z :WriteToTopic_Demo_2 INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900226Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960268Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.969278Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.978682Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654422999025332:2816] connected; active server actors: 1 2024-11-21T08:58:47.978805Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987574Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987661Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988147Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988165Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988169Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.988235Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.996523Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996603Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.996745Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005280Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005297Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005355Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.005369Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014041Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654422999025368:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.014077Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014082Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654422999025331:2815], now have 1 active actors on pipe 2024-11-21T08:58:48.015792Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439654388639285847 RawX2: 4294969474 } TxId: 281474976710674 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { ... 2024-11-21T09:00:16.147637Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] Returning serverBytesSize = 15001178 to budget 2024-11-21T09:00:16.147642Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] In ContinueReadingDataImpl, ReadSizeBudget = 15001178, ReadSizeServerDelta = 37427622 2024-11-21T09:00:16.147768Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T09:00:16.147791Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:00:16.147793Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] Returning serverBytesSize = 0 to budget 2024-11-21T09:00:16.147798Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (2-7) 2024-11-21T09:00:16.147800Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] Returning serverBytesSize = 0 to budget 2024-11-21T09:00:16.147807Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (8-11) 2024-11-21T09:00:16.147809Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] Returning serverBytesSize = 0 to budget 2024-11-21T09:00:16.147857Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T09:00:16.147878Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T09:00:16.147885Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2024-11-21T09:00:16.147890Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2024-11-21T09:00:16.147900Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 2} (4-4) 2024-11-21T09:00:16.147905Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 3} (5-5) 2024-11-21T09:00:16.147909Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 4} (6-6) 2024-11-21T09:00:16.147914Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 5} (7-7) 2024-11-21T09:00:16.147922Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 0} (8-8) 2024-11-21T09:00:16.147926Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 1} (9-9) 2024-11-21T09:00:16.147930Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 2} (10-10) 2024-11-21T09:00:16.147945Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 3} (11-11) 2024-11-21T09:00:16.147961Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] The application data is transferred to the client. Number of messages 12, size 15000000 bytes 2024-11-21T09:00:16.147974Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] Returning serverBytesSize = 0 to budget 0 12 2024-11-21T09:00:16.148011Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] Commit offsets [0, 12). Partition stream id: 1 2024-11-21T09:00:16.147988Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 grpc read done: success# 1, data# { read_request { bytes_size: 15001178 } } 2024-11-21T09:00:16.148080Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 got read request: guid# 3eb7541-eb4a35b5-8f12d0a8-7d5bc1ed 2024-11-21T09:00:16.148235Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 12 } } } } 2024-11-21T09:00:16.148316Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 12 prev 0 end 12 by cookie 2 2024-11-21T09:00:16.148366Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T09:00:16.148380Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T09:00:16.148429Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 12 (startOffset 0) session test-consumer_10_1_13059611750415319299_v1 2024-11-21T09:00:16.148475Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:00:16.148867Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 12 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:00:16.148889Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:00:16.148890Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T09:00:16.148932Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2024-11-21T09:00:16.148951Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 12 endOffset 12 with cookie 2 2024-11-21T09:00:16.148962Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 12 2024-11-21T09:00:16.149105Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 12 } } 2024-11-21T09:00:17.087386Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2024-11-21T09:00:17.087406Z :INFO: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:17.088767Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 checking auth because of timeout 2024-11-21T09:00:17.088799Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 auth for : test-consumer 2024-11-21T09:00:17.088967Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 Handle describe topics response 2024-11-21T09:00:17.088993Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 auth is DEAD 2024-11-21T09:00:17.089014Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 auth ok: topics# 1, initDone# 1 2024-11-21T09:00:18.089631Z :INFO: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:18.089651Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2024-11-21T09:00:18.089661Z :INFO: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2002 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:18.089684Z :NOTICE: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:00:18.089698Z :DEBUG: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] [] Abort session to cluster 2024-11-21T09:00:18.089732Z :NOTICE: [/Root] [/Root] [372ca757-780004d5-8951a067-ef8ff6d7] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:18.089979Z :INFO: [/Root] SessionId [test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2024-11-21T09:00:18.089984Z :INFO: [/Root] SessionId [test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0] PartitionId [0] Generation [2] Write session will now close 2024-11-21T09:00:18.089989Z :DEBUG: [/Root] SessionId [test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0] PartitionId [0] Generation [2] Write session: aborting 2024-11-21T09:00:18.090016Z :INFO: [/Root] SessionId [test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2024-11-21T09:00:18.090020Z :DEBUG: [/Root] SessionId [test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0] PartitionId [0] Generation [2] Write session: destroy 2024-11-21T09:00:18.090145Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:18.090161Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 grpc read failed 2024-11-21T09:00:18.090167Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 grpc closed 2024-11-21T09:00:18.090205Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13059611750415319299_v1 is DEAD 2024-11-21T09:00:18.090265Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:18.090277Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_13059611750415319299_v1 2024-11-21T09:00:18.090285Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439654804554660900:2533] destroyed 2024-11-21T09:00:18.090305Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_13059611750415319299_v1 2024-11-21T09:00:18.090368Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439654804554660897:2530] disconnected; active server actors: 1 2024-11-21T09:00:18.090382Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439654804554660897:2530] client test-consumer disconnected session test-consumer_10_1_13059611750415319299_v1 2024-11-21T09:00:18.090381Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0 grpc read done: success: 0 data: 2024-11-21T09:00:18.090387Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0 grpc read failed 2024-11-21T09:00:18.090392Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0 grpc closed 2024-11-21T09:00:18.090395Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|da795af0-8f708f9a-d9a579e-2d332888_0 is DEAD 2024-11-21T09:00:18.090634Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:00:18.090685Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:18.090700Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439654795964726201:2499] destroyed 2024-11-21T09:00:18.090714Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--true] [XFAIL] >> TPersQueueMirrorer::ValidStartStream [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--true] [GOOD] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--true] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--true] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--false] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--false] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] |91.9%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |91.9%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> TSettingsValidation::TestDifferentDedupParams >> test_storage_config.py::TestStorageConfig::test_cases[case_7] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_8] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2024-11-21T08:59:29.201618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654602877739811:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:29.201632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:59:29.221213Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00385a/r3tmp/tmpBJ2317/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26501, node 1 2024-11-21T08:59:29.244716Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:59:29.244729Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:59:29.246104Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:59:29.248674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00385a/r3tmp/yandexXA4EXB.tmp 2024-11-21T08:59:29.248681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00385a/r3tmp/yandexXA4EXB.tmp 2024-11-21T08:59:29.248722Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00385a/r3tmp/yandexXA4EXB.tmp 2024-11-21T08:59:29.248746Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:59:29.252232Z INFO: TTestServer started on Port 17227 GrpcPort 26501 TClient is connected to server localhost:17227 PQClient connected to localhost:26501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:59:29.270095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:59:29.271721Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:59:29.274365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:59:29.302459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:59:29.302480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:59:29.303597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:59:29.421984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654602877740572:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:29.421998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:29.422007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654602877740583:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:29.422611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654602877740615:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:29.422626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:29.422642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:59:29.424230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654602877740586:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:59:29.442091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:59:29.447518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:59:29.506188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:59:29.515820Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654602877740862:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:59:29.515916Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjY4YjM1NTMtODQ4Y2NiMzEtOWQwNWM3LThlODQxMmRh, ActorId: [1:7439654602877740569:2304], ActorState: ExecuteState, TraceId: 01jd6z5arcb0y6rmqnbtfp9253, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:59:29.516391Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654602877740932:2597] 2024-11-21T08:59:34.202201Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654602877739811:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:34.202238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:59:34.638264Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:59:34.641247Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:59:34.641618Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439654624352577708:2761], Recipient [1:7439654602877740276:2224]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:34.641631Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:34.641633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:59:34.641638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439654624352577704:2758], Recipient [1:7439654602877740276:2224]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2024-11-21T08:59:34.641640Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:59:34.646559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:59:34.646635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:59:34.646696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:59:34.646712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:59:34.646717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: ... tabase: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 5 EndOffset: 10 WriteTimestampMS: 1732179618645 CreateTimestampMS: 1732179618644 SizeLag: 612 WriteTimestampEstimateMS: 1732179618745 } Cookie: 18446744073709551615 } 2024-11-21T09:00:18.748813Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 5 committedOffset 5 2024-11-21T09:00:18.748838Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 sending to client partition status 2024-11-21T09:00:18.749134Z :INFO: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 5 2024-11-21T09:00:18.749305Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2024-11-21T09:00:18.749346Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2024-11-21T09:00:18.749365Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 5 committedOffset 5 clientCommitOffset (empty maybe) clientReadOffset 5 2024-11-21T09:00:18.749372Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2024-11-21T09:00:18.749384Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1732179618645, sizeLag# 612 2024-11-21T09:00:18.749391Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1TEvPartitionReady. Aval parts: 1 2024-11-21T09:00:18.749400Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 performing read request: guid# df859c58-dc3c1b54-64506438-6fa49898, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 734, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T09:00:18.749432Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 734 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 5 committedOffset 5 Guid df859c58-dc3c1b54-64506438-6fa49898 2024-11-21T09:00:18.749568Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T09:00:18.749578Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T09:00:18.749597Z node 6 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:00:18.749615Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 734 endOffset 10 max time lag 0ms effective offset 5 2024-11-21T09:00:18.749625Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 5 2024-11-21T09:00:18.749671Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2024-11-21T09:00:18.749681Z node 6 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:00:18.749721Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2024-11-21T09:00:18.749914Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1732179618645 CreateTimestampMS: 1732179618644 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1732179618645 CreateTimestampMS: 1732179618644 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1732179618645 CreateTimestampMS: 1732179618644 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1732179618645 CreateTimestampMS: 1732179618644 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1732179618645 CreateTimestampMS: 1732179618644 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551523 RealReadOffset: 9 WaitQuotaTimeMs: 0 } Cookie: 5 } 2024-11-21T09:00:18.749966Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2024-11-21T09:00:18.749978Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid df859c58-dc3c1b54-64506438-6fa49898 has messages 1 2024-11-21T09:00:18.750017Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 read done: guid# df859c58-dc3c1b54-64506438-6fa49898, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 435 2024-11-21T09:00:18.750034Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 response to read: guid# df859c58-dc3c1b54-64506438-6fa49898 2024-11-21T09:00:18.750125Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 Process answer. Aval parts: 0 2024-11-21T09:00:18.750203Z :DEBUG: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] Got ReadResponse, serverBytesSize = 435, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428365 2024-11-21T09:00:18.750231Z :DEBUG: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428365 2024-11-21T09:00:18.750335Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2024-11-21T09:00:18.750344Z :DEBUG: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] Returning serverBytesSize = 435 to budget 2024-11-21T09:00:18.750348Z :DEBUG: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] In ContinueReadingDataImpl, ReadSizeBudget = 435, ReadSizeServerDelta = 52428365 2024-11-21T09:00:18.750406Z :DEBUG: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T09:00:18.750441Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2024-11-21T09:00:18.750451Z :DEBUG: [] Take Data. Partition 0. Read: {0, 1} (6-6) 2024-11-21T09:00:18.750466Z :DEBUG: [] Take Data. Partition 0. Read: {0, 2} (7-7) 2024-11-21T09:00:18.750469Z :DEBUG: [] Take Data. Partition 0. Read: {0, 3} (8-8) 2024-11-21T09:00:18.750473Z :DEBUG: [] Take Data. Partition 0. Read: {0, 4} (9-9) 2024-11-21T09:00:18.750482Z :DEBUG: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2024-11-21T09:00:18.750490Z :DEBUG: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] Returning serverBytesSize = 0 to budget 2024-11-21T09:00:18.750501Z :INFO: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:18.750472Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 grpc read done: success# 1, data# { read_request { bytes_size: 435 } } 2024-11-21T09:00:18.750506Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:5 2024-11-21T09:00:18.750521Z :INFO: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] Counters: { Errors: 0 CurrentSessionLifetimeMs: 5 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:18.750529Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 got read request: guid# 46b71980-25f81d82-48e05653-9d25aa5e 2024-11-21T09:00:18.750562Z :NOTICE: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:00:18.750569Z :DEBUG: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] [] Abort session to cluster 2024-11-21T09:00:18.750680Z :NOTICE: [] [] [dbef181a-4a81c960-5388ebed-ba3dfb37] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:18.750796Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:18.750815Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 grpc read failed 2024-11-21T09:00:18.750821Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 grpc closed 2024-11-21T09:00:18.750838Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11588264149953227296_v1 is DEAD 2024-11-21T09:00:18.750887Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|810fc538-3d02bc73-ff6136e0-c01cea1c_0] Write session: destroy 2024-11-21T09:00:18.750991Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:18.751005Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_5_2_11588264149953227296_v1 2024-11-21T09:00:18.751016Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654813365761123:2490] destroyed 2024-11-21T09:00:18.751030Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_5_2_11588264149953227296_v1 2024-11-21T09:00:18.751085Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [5:7439654813365761120:2487] disconnected; active server actors: 1 2024-11-21T09:00:18.751097Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [5:7439654813365761120:2487] client user disconnected session shared/user_5_2_11588264149953227296_v1 >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> TxUsage::WriteToTopic_Demo_17 [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_6] [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_progress_when_tenant_tablets_run_on_dynamic_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_progress_when_tenant_tablets_run_on_dynamic_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true/cluster/node_1/logfile_3br0kkdo.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_progress_when_tenant_tablets_run_on_dynamic_nodes.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004696/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_tenants.py.TestTenants.test_progress_when_tenant_tablets_run_on_dynamic_nodes.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] Test command err: 2024-11-21T08:59:30.016669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654605834769935:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:30.016871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:59:30.040086Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003845/r3tmp/tmpuBrVgF/pdisk_1.dat 2024-11-21T08:59:30.068498Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18679, node 1 2024-11-21T08:59:30.073784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003845/r3tmp/yandex5HOfes.tmp 2024-11-21T08:59:30.073795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003845/r3tmp/yandex5HOfes.tmp 2024-11-21T08:59:30.073840Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003845/r3tmp/yandex5HOfes.tmp 2024-11-21T08:59:30.073869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:59:30.077269Z INFO: TTestServer started on Port 6993 GrpcPort 18679 TClient is connected to server localhost:6993 PQClient connected to localhost:18679 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:59:30.117610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:59:30.117643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:59:30.118601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:59:30.147873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:59:30.156872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:59:30.245019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654605834770699:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:30.245036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654605834770691:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:30.245050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:30.245725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T08:59:30.245949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654605834770735:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:30.245965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:30.247137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654605834770705:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T08:59:30.263801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:59:30.317880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:59:30.323817Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654605834770899:2328], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:59:30.323932Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmUzNWEzNzktZmY3MmMwNmUtMmUyNGRlNTMtOGFmODc5OGE=, ActorId: [1:7439654605834770688:2304], ActorState: ExecuteState, TraceId: 01jd6z5bj3543jtkjv1h5b3hxx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:59:30.324414Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:59:30.329496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654605834771047:2596] 2024-11-21T08:59:35.016740Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654605834769935:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:35.016768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:59:35.447199Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:59:35.449835Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:59:35.450199Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439654627309607828:2765], Recipient [1:7439654605834770353:2199]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:35.450213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:35.450215Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:59:35.450221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439654627309607824:2762], Recipient [1:7439654605834770353:2199]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T08:59:35.450222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:59:35.455257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:59:35.455334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T08:59:35.455385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:59:35.455398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:59:35.455406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T08:59:35.455415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T08:59:35.455425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId ... 11-21T09:00:23.520011Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPersQueue::TEvHasDataInfo 2024-11-21T09:00:23.520022Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654828996484163:3269], Recipient [5:7439654828996484010:2663]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520024Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520026Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520030Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_5_1_7047892893730529233_v1 2024-11-21T09:00:23.520033Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [5:7439654828996484161:2689] destroyed 2024-11-21T09:00:23.520037Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_5_2_2309526289774294786_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:2) wait for data done: readOffset 1 EndOffset 1 newEndOffset 1 commitOffset 1 clientCommitOffset 1 cookie 2 readingFinished 1 firstRead 0 2024-11-21T09:00:23.520043Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_7047892893730529233_v1 2024-11-21T09:00:23.520074Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7439654824701516496:2603] disconnected; active server actors: 1 2024-11-21T09:00:23.520080Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654828996484164:3270], Recipient [5:7439654828996484007:2662]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520081Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520083Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7439654824701516496:2603] client test-consumer disconnected session test-consumer_5_1_7047892893730529233_v1 2024-11-21T09:00:23.520083Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520086Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_7047892893730529233_v1 2024-11-21T09:00:23.520089Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7439654828996484162:2690] destroyed 2024-11-21T09:00:23.520093Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_7047892893730529233_v1 2024-11-21T09:00:23.520095Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer rebalancing was scheduled 2024-11-21T09:00:23.520104Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing. Sessions=1, Families=3, UnradableFamilies=2 [2 (1), 3 (2), ], RequireBalancing=0 [] >>>>> Session-2 Received TEndPartitionSessionEvent message 2024-11-21T09:00:23.520109Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing of the family=3 (Status=Free, Partitions=[2]) failed because there are no suitable reading sessions. 2024-11-21T09:00:23.520112Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing of the family=2 (Status=Free, Partitions=[1]) failed because there are no suitable reading sessions. 2024-11-21T09:00:23.520114Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing duration: 0.000008s EndPartitionSession { Partition session id: 2 Topic: "test-topic" Partition: 0 AdjacentPartitionIds: [] ChildPartitionIds: [1, 2] } >>>>> Session-2 Release() >>>>> Session-2 Closing reading session 2024-11-21T09:00:23.520533Z :INFO: [/Root] [/Root] [f060239-d1d4128d-969fec7e-9b7c3677] Closing read session. Close timeout: 5.000000s 2024-11-21T09:00:23.520547Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:2:0:1 2024-11-21T09:00:23.520554Z :INFO: [/Root] [/Root] [f060239-d1d4128d-969fec7e-9b7c3677] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2028 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 29 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:23.520681Z :INFO: [/Root] [/Root] [f060239-d1d4128d-969fec7e-9b7c3677] Closing read session. Close timeout: 0.000000s >>>>> 2024-11-21T09:00:23.520692Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:2:0:1 2024-11-21T09:00:23.520701Z :INFO: [/Root] [/Root] [f060239-d1d4128d-969fec7e-9b7c3677] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2028 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 29 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } Session-2 Received TSessionClosedEvent message 2024-11-21T09:00:23.520715Z :NOTICE: [/Root] [/Root] [f060239-d1d4128d-969fec7e-9b7c3677] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2024-11-21T09:00:23.520776Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_5_2_2309526289774294786_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:23.520785Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_5_2_2309526289774294786_v1 grpc read failed 2024-11-21T09:00:23.520787Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_5_2_2309526289774294786_v1 grpc closed 2024-11-21T09:00:23.520796Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_5_2_2309526289774294786_v1 is DEAD 2024-11-21T09:00:23.520839Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654833291451585:3328], Recipient [5:7439654824701515852:2429]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520848Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520852Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.520854Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_2_2309526289774294786_v1 2024-11-21T09:00:23.520856Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654833291451584:2722] destroyed 2024-11-21T09:00:23.520864Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_2_2309526289774294786_v1 2024-11-21T09:00:23.521011Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7439654824701516507:2611] disconnected; active server actors: 1 2024-11-21T09:00:23.521019Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7439654824701516507:2611] client test-consumer disconnected session test-consumer_5_2_2309526289774294786_v1 2024-11-21T09:00:23.521208Z :INFO: [/Root] SessionId [producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T09:00:23.521213Z :INFO: [/Root] SessionId [producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T09:00:23.521219Z :DEBUG: [/Root] SessionId [producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T09:00:23.521313Z :INFO: [/Root] SessionId [producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T09:00:23.521317Z :DEBUG: [/Root] SessionId [producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T09:00:23.521419Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0 grpc read done: success: 0 data: 2024-11-21T09:00:23.521427Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0 grpc read failed 2024-11-21T09:00:23.521432Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0 grpc closed 2024-11-21T09:00:23.521436Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|6e73dfba-9b1dc064-923b1a17-bb9c340d_0 is DEAD 2024-11-21T09:00:23.521689Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7439654824701516527:2618], Recipient [5:7439654824701516529:2618]: NActors::TEvents::TEvPoison 2024-11-21T09:00:23.521712Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:00:23.521751Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654824701516559:3109], Recipient [5:7439654824701515852:2429]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.521757Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.521760Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:23.521770Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654824701516558:2618] destroyed 2024-11-21T09:00:23.521796Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7439654824701515852:2429], Partition 0, Sender [5:7439654824701515852:2429], Recipient [5:7439654824701515910:2432], Cookie: 0 2024-11-21T09:00:23.521804Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7439654824701515852:2429], Recipient [5:7439654824701515910:2432]: NKikimr::TEvPQ::TEvPipeDisconnected 2024-11-21T09:00:23.521808Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2024-11-21T09:00:23.521813Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:00:23.521835Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2024-11-21T09:00:23.521846Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:23.521857Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:23.521864Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:23.521867Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:00:23.594326Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7439654798931710965:2138]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T09:00:23.594349Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T09:00:23.594359Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7439654798931710965:2138], Recipient [5:7439654798931710965:2138]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:00:23.594362Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_8] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_9] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> THeavyPerfTest::TTestLoadEverything [GOOD] >> THiveImplTest::BootQueueSpeed |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_2] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--true] [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--true] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_9] [GOOD] >> test_storage_config.py::TestStorageConfig::test_create_tablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] Test command err: 2024-11-21T08:59:31.664867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654610312636998:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:31.665017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003807/r3tmp/tmpRNtomy/pdisk_1.dat 2024-11-21T08:59:31.695239Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created TServer::EnableGrpc on GrpcPort 2300, node 1 2024-11-21T08:59:31.719823Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:59:31.719931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003807/r3tmp/yandexIcFPRD.tmp 2024-11-21T08:59:31.719938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003807/r3tmp/yandexIcFPRD.tmp 2024-11-21T08:59:31.720003Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003807/r3tmp/yandexIcFPRD.tmp 2024-11-21T08:59:31.720023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:59:31.721395Z INFO: TTestServer started on Port 11427 GrpcPort 2300 TClient is connected to server localhost:11427 PQClient connected to localhost:2300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:59:31.740278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:59:31.745688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:59:31.765597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:59:31.765635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:59:31.766986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:59:31.893369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654610312637739:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.893385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654610312637765:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.893390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.894017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:59:31.894561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654610312637797:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.894686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:59:31.895635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654610312637768:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:59:31.912164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:59:31.966731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:59:31.976056Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654610312637969:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:59:31.976107Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGYwODIxNWUtMTFkYzNkMmItNmIxNDYyMWItMjM5NjIwNWU=, ActorId: [1:7439654610312637736:2304], ActorState: ExecuteState, TraceId: 01jd6z5d5nbqcebb6dv2k95bq5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:59:31.976533Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:59:31.980697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654614607605408:2596] 2024-11-21T08:59:36.665306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654610312636998:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:59:36.665333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:59:37.090113Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T08:59:37.092685Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:59:37.093050Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439654636082442184:2760], Recipient [1:7439654610312637385:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:37.093059Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T08:59:37.093061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T08:59:37.093066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439654636082442180:2757], Recipient [1:7439654610312637385:2184]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2024-11-21T08:59:37.093067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T08:59:37.097165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T08:59:37.097239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T08:59:37.097289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T08:59:37.097302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T08:59:37.097306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T08:59:37.097311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T08:59:37.097316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: ... ode 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvProxyResponse 2024-11-21T09:00:27.415747Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2024-11-21T09:00:27.415806Z node 5 :PQ_READ_PROXY DEBUG: session cookie 3 consumer test-consumer session test-consumer_5_3_17478767482419203255_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 1 EndOffset: 1 WriteTimestampMS: 1732179624290 CreateTimestampMS: 1732179624290 SizeLag: 0 WriteTimestampEstimateMS: 1732179624290 } Cookie: 18446744073709551615 } 2024-11-21T09:00:27.415820Z node 5 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_5_3_17478767482419203255_v1 INIT DONE TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 1 committedOffset 1 2024-11-21T09:00:27.415840Z node 5 :PQ_READ_PROXY DEBUG: session cookie 3 consumer test-consumer session test-consumer_5_3_17478767482419203255_v1 sending to client partition status >>>>> Session-0 Received TStartPartitionSessionEvent message StartPartitionSession { Partition session id: 1 Topic: "test-topic" Partition: 0 CommittedOffset: 1 EndOffset: 1 } >>>>> Session-0 Acquire() >>>>> Session-0 Partitions {{test-topic : [0]}, } received #2 >>>>> Session-0 Release() >>>>> Session-0 Release() >>>>> Session-0 Closing reading session >>>>> 2024-11-21T09:00:27.416186Z :INFO: [/Root] [/Root] [e55bef69-a0e6ee4e-e9cc047e-edd180d2] Closing read session. Close timeout: 5.000000s Session-0 Acquired >>>>> Session-0 Start reading partition 0 without offset 2024-11-21T09:00:27.416201Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:1 2024-11-21T09:00:27.416226Z :INFO: [/Root] [/Root] [e55bef69-a0e6ee4e-e9cc047e-edd180d2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:27.416230Z :INFO: [/Root] [/Root] [e55bef69-a0e6ee4e-e9cc047e-edd180d2] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "test-topic". Partition: 0. Read offset: (empty maybe) 2024-11-21T09:00:27.416382Z :INFO: [/Root] [/Root] [e55bef69-a0e6ee4e-e9cc047e-edd180d2] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:27.416387Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:1 2024-11-21T09:00:27.416391Z :INFO: [/Root] [/Root] [e55bef69-a0e6ee4e-e9cc047e-edd180d2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:27.416406Z :NOTICE: [/Root] [/Root] [e55bef69-a0e6ee4e-e9cc047e-edd180d2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:27.416520Z node 5 :PQ_READ_PROXY DEBUG: session cookie 3 consumer test-consumer session test-consumer_5_3_17478767482419203255_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:27.416530Z node 5 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_5_3_17478767482419203255_v1 grpc read failed 2024-11-21T09:00:27.416533Z node 5 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_5_3_17478767482419203255_v1 grpc closed 2024-11-21T09:00:27.416545Z node 5 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_5_3_17478767482419203255_v1 is DEAD 2024-11-21T09:00:27.416628Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654850633287994:3403], Recipient [5:7439654833453417537:2435]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:27.416637Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:27.416641Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:27.416645Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Destroy direct read session test-consumer_5_3_17478767482419203255_v1 2024-11-21T09:00:27.416650Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [5:7439654850633287993:2764] destroyed 2024-11-21T09:00:27.416663Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_3_17478767482419203255_v1 2024-11-21T09:00:27.416923Z :DEBUG: [/Root] [/Root] [e55bef69-a0e6ee4e-e9cc047e-edd180d2] [] Skip partition stream create confirm. Partition stream id: 1 >>>>> Session-0 Received TSessionClosedEvent message SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2024-11-21T09:00:27.417647Z :INFO: [/Root] [/Root] [6228413b-47b70906-202d161f-bb29dadb] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:27.417664Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:1:1:0:0 2024-11-21T09:00:27.417671Z :INFO: [/Root] [/Root] [6228413b-47b70906-202d161f-bb29dadb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1013 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:27.417683Z :NOTICE: [/Root] [/Root] [6228413b-47b70906-202d161f-bb29dadb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:00:27.417693Z :DEBUG: [/Root] [/Root] [6228413b-47b70906-202d161f-bb29dadb] [] Abort session to cluster >>>>> Session-1 Received TSessionClosedEvent message 2024-11-21T09:00:27.417897Z :NOTICE: [/Root] [/Root] [6228413b-47b70906-202d161f-bb29dadb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:00:27.417928Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_3033050025041562414_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:27.417959Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_3033050025041562414_v1 grpc read failed 2024-11-21T09:00:27.417967Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_3033050025041562414_v1 grpc closed 2024-11-21T09:00:27.417974Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_3033050025041562414_v1 is DEAD 2024-11-21T09:00:27.418259Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][test-topic] pipe [5:7439654846338320568:2713] disconnected; active server actors: 1 2024-11-21T09:00:27.418270Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][test-topic] pipe [5:7439654846338320568:2713] client test-consumer disconnected session test-consumer_5_1_3033050025041562414_v1 2024-11-21T09:00:27.418273Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439654846338320572:3349], Recipient [5:7439654833453417536:2434]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:27.418276Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:27.418279Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:27.418283Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_3033050025041562414_v1 2024-11-21T09:00:27.418284Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] consumer test-consumer rebalancing was scheduled 2024-11-21T09:00:27.418288Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439654846338320571:2719] destroyed 2024-11-21T09:00:27.418297Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_3033050025041562414_v1 2024-11-21T09:00:27.418302Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][test-topic] pipe [5:7439654850633287990:2761] disconnected; active server actors: 1 2024-11-21T09:00:27.418309Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][test-topic] pipe [5:7439654850633287990:2761] client test-consumer disconnected session test-consumer_5_3_17478767482419203255_v1 2024-11-21T09:00:27.418449Z :DEBUG: [/Root] SessionId [producer-1|c6888f1d-3a127fce-3e998113-49dfd210_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T09:00:27.444496Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439654833453417536:2434], Partition 1, Sender [0:0:0], Recipient [5:7439654833453417628:2441], Cookie: 0 2024-11-21T09:00:27.444496Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439654833453417537:2435], Partition 0, Sender [0:0:0], Recipient [5:7439654833453417629:2442], Cookie: 0 2024-11-21T09:00:27.444512Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439654833453417629:2442]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:27.444519Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:27.444523Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439654833453417628:2441]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:27.444527Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:00:27.444536Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:27.444539Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:00:27.444562Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:27.444564Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:27.444566Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:00:27.444568Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:00:27.444570Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:00:27.444573Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:00:27.470113Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7439654811978579931:2141]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T09:00:27.470136Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T09:00:27.470149Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7439654811978579931:2141], Recipient [5:7439654811978579931:2141]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:00:27.470152Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> test_storage_config.py::TestStorageConfig::test_create_tablet [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--false] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveTest::TestBlockCreateTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045c2/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_tenants.py.TestTenants.test_force_delete_tenant_when_table_has_been_deleted.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045c2/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_tenants.py.TestTenants.test_force_delete_tenant_when_table_has_been_deleted.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045c2/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_tenants.py.TestTenants.test_force_delete_tenant_when_table_has_been_deleted.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045c2/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_tenants.py.TestTenants.test_force_delete_tenant_when_table_has_been_deleted.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045c2/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_tenants.py.TestTenants.test_force_delete_tenant_when_table_has_been_deleted.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045c2/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_tenants.py.TestTenants.test_force_delete_tenant_when_table_has_been_deleted.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045c2/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_tenants.py.TestTenants.test_force_delete_tenant_when_table_has_been_deleted.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045c2/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_tenants.py.TestTenants.test_force_delete_tenant_when_table_has_been_deleted.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--false] [GOOD] Test command err: contrib/python/PyYAML/py3/yaml/error.py:6: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/error.py:6: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/tokens.py:3: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql_empty_database_header.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/tokens.py:3: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql_empty_database_header.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql_empty_database_header.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql_empty_database_header.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql_empty_database_header.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql_empty_database_header.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql_empty_database_header.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql_empty_database_header.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_with_cpu.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045bf/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_with_cpu.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--false] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_storage_config.py::TestStorageConfig::test_create_tablet [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> TSettingsValidation::TestDifferentDedupParams [GOOD] >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [GOOD] Test command err: 2024-11-21T08:58:39.369746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654385590097691:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.370265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.496983Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e56/r3tmp/tmpdnVB9r/pdisk_1.dat 2024-11-21T08:58:39.719928Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:39.776439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.776469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:39.787098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15383, node 1 2024-11-21T08:58:39.822912Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:39.823288Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:40.228462Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e56/r3tmp/yandexd8rK4y.tmp 2024-11-21T08:58:40.228478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e56/r3tmp/yandexd8rK4y.tmp 2024-11-21T08:58:40.228520Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e56/r3tmp/yandexd8rK4y.tmp 2024-11-21T08:58:40.228556Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.394252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654389885065584:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.394276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.396504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654389885065596:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.475839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.529215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654389885065598:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.592664Z INFO: TTestServer started on Port 1605 GrpcPort 15383 TClient is connected to server localhost:1605 PQClient connected to localhost:15383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:40.835800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:40.842723Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:40.868532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:41.389657Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654389885065673:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.393688Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjlmNzIxZmItMzJhZTY0ODMtMTM2OWFlYWQtOWI1YjZjOGE=, ActorId: [1:7439654389885065573:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw90dp6c6eqcwaj2gn5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.400600Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.567154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.573348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.625603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654398475000737:2627] 2024-11-21T08:58:44.364597Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654385590097691:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.364625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.870244Z :WriteToTopic_Demo_1 INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900256Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960311Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.960405Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.979091Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654419949837563:2814] connected; active server actors: 1 2024-11-21T08:58:47.979196Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987515Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987641Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988064Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.988137Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988162Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988168Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.996510Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996524Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.998453Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005355Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005374Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005457Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.005459Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014053Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654419949837562:2813], now have 1 active actors on pipe 2024-11-21T08:58:48.014073Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014076Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654419949837602:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.015799Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439654385590098075 RawX2: 4294969473 } TxId: 281474976710674 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 Topi ... e Data. Partition 0. Read: {1, 2} (20-20) 2024-11-21T09:00:31.332235Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 3} (21-21) 2024-11-21T09:00:31.332244Z :DEBUG: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] [] The application data is transferred to the client. Number of messages 10, size 1000000 bytes 2024-11-21T09:00:31.332250Z :DEBUG: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] [] The application data is transferred to the client. Number of messages 6, size 14400000 bytes 2024-11-21T09:00:31.332254Z :DEBUG: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] [] Returning serverBytesSize = 0 to budget 2024-11-21T09:00:31.332243Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T09:00:31.332267Z :DEBUG: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] [] Returning serverBytesSize = 0 to budget 2024-11-21T09:00:31.332261Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 0 16 2024-11-21T09:00:31.332278Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1000800 } } 2024-11-21T09:00:31.332316Z :DEBUG: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] [] Commit offsets [6, 22). Partition stream id: 1 2024-11-21T09:00:31.332306Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 6 (startOffset 0) session test-consumer_10_1_15428143707453492341_v1 2024-11-21T09:00:31.332340Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:00:31.332382Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 got read request: guid# 5a9ae337-2416710-7de13a51-d793deee 2024-11-21T09:00:31.332542Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 6 end: 22 } } } } 2024-11-21T09:00:31.332617Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 22 prev 0 end 22 by cookie 3 2024-11-21T09:00:31.332665Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T09:00:31.332679Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T09:00:31.332933Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T09:00:31.332937Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:00:31.332952Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 22 (startOffset 0) session test-consumer_10_1_15428143707453492341_v1 2024-11-21T09:00:31.332967Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2024-11-21T09:00:31.332969Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:00:31.332976Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 6 endOffset 22 with cookie 2 2024-11-21T09:00:31.332983Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 6 2024-11-21T09:00:31.333184Z :DEBUG: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 6 } } 2024-11-21T09:00:31.333311Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 22 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:00:31.333333Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:00:31.333339Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T09:00:31.333371Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T09:00:31.333390Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 22 endOffset 22 with cookie 3 2024-11-21T09:00:31.333404Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 22 2024-11-21T09:00:31.333557Z :DEBUG: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 22 } } 2024-11-21T09:00:32.271377Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:21:22 2024-11-21T09:00:32.271417Z :INFO: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1001 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:32.272707Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 checking auth because of timeout 2024-11-21T09:00:32.272766Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 auth for : test-consumer 2024-11-21T09:00:32.273095Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 Handle describe topics response 2024-11-21T09:00:32.273135Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 auth is DEAD 2024-11-21T09:00:32.273152Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 auth ok: topics# 1, initDone# 1 2024-11-21T09:00:33.271841Z :INFO: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] Closing read session. Close timeout: 0.000000s 2024-11-21T09:00:33.272068Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:21:22 2024-11-21T09:00:33.272087Z :INFO: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2001 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:00:33.272147Z :NOTICE: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:00:33.272167Z :DEBUG: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] [] Abort session to cluster 2024-11-21T09:00:33.272407Z :NOTICE: [/Root] [/Root] [86d46c33-b869d08c-392291f-3c29492c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:00:33.272772Z :INFO: [/Root] SessionId [test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2024-11-21T09:00:33.272777Z :INFO: [/Root] SessionId [test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0] PartitionId [0] Generation [2] Write session will now close 2024-11-21T09:00:33.272781Z :DEBUG: [/Root] SessionId [test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0] PartitionId [0] Generation [2] Write session: aborting 2024-11-21T09:00:33.272906Z :INFO: [/Root] SessionId [test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2024-11-21T09:00:33.272912Z :DEBUG: [/Root] SessionId [test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0] PartitionId [0] Generation [2] Write session: destroy 2024-11-21T09:00:33.272987Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 grpc read done: success# 0, data# { } 2024-11-21T09:00:33.273045Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 grpc read failed 2024-11-21T09:00:33.273060Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 grpc closed 2024-11-21T09:00:33.273109Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_15428143707453492341_v1 is DEAD 2024-11-21T09:00:33.274407Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0 grpc read done: success: 0 data: 2024-11-21T09:00:33.274426Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0 grpc read failed 2024-11-21T09:00:33.274432Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0 grpc closed 2024-11-21T09:00:33.274436Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|2f66a378-16501c3d-acf52cbf-4a80fb07_0 is DEAD 2024-11-21T09:00:33.274811Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:00:33.274851Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439654866705726477:2530] disconnected; active server actors: 1 2024-11-21T09:00:33.274854Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439654866705726477:2530] client test-consumer disconnected session test-consumer_10_1_15428143707453492341_v1 2024-11-21T09:00:33.274950Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:33.274965Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_15428143707453492341_v1 2024-11-21T09:00:33.274971Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439654866705726480:2533] destroyed 2024-11-21T09:00:33.274978Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:00:33.274991Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439654858115791774:2499] destroyed 2024-11-21T09:00:33.275040Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_15428143707453492341_v1 2024-11-21T09:00:33.275070Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TSettingsValidation::TestDifferentDedupParams [GOOD] Test command err: 2024-11-21T08:58:50.591102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654432735549647:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:50.591148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:50.613387Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e38/r3tmp/tmpcjSnXq/pdisk_1.dat 2024-11-21T08:58:50.642136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26145, node 1 2024-11-21T08:58:50.646789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e38/r3tmp/yandexLSs6BA.tmp 2024-11-21T08:58:50.646799Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e38/r3tmp/yandexLSs6BA.tmp 2024-11-21T08:58:50.646839Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e38/r3tmp/yandexLSs6BA.tmp 2024-11-21T08:58:50.646861Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:50.651052Z INFO: TTestServer started on Port 21171 GrpcPort 26145 TClient is connected to server localhost:21171 PQClient connected to localhost:26145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:50.672318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:50.684047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:50.691556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:50.691580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:50.692712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T08:58:50.827269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654432735550394:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:50.827288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654432735550403:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:50.827293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:50.827827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T08:58:50.828605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654432735550437:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:50.828709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:50.829137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654432735550408:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T08:58:50.854728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T08:58:50.903838Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654432735550544:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:50.903895Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzJhOWI4MzUtOWM0MmQ1MWItMzE4NWVlNDYtNjY5MTAwNDg=, ActorId: [1:7439654432735550391:2304], ActorState: ExecuteState, TraceId: 01jd6z452a2gd6bycgvm8k9dx7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:50.904303Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:50.908516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:50.918157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654432735550750:2594] 2024-11-21T08:58:55.591361Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654432735549647:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:55.591405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:56.034183Z :Restarts INFO: TTopicSdkTestSetup started 2024-11-21T08:58:56.036910Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:56.040367Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654458505354840:2769] connected; active server actors: 1 2024-11-21T08:58:56.040459Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:56.040549Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:56.040593Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:56.040644Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:56.041144Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:56.041203Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:56.041311Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:56.041337Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:56.041346Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:56.041348Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:56.041350Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:56.041353Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:56.041361Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:56.041362Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:56.041425Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:56.041434Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:56.041438Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654458505354857:2427], now have 1 active actors on pipe 2024-11-21T08:58:56.089633Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:56.089650Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654458505354839:2768], now have 1 active actors on pipe 2024-11-21T08:58:56.089656Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T08:58:56.091340Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439654432735550085:2213] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitCha ... tics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523065Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=2, txId=? 2024-11-21T09:00:33.523067Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 2 2024-11-21T09:00:33.523115Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523123Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 3 written { offset: 1366 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523125Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=3, txId=? 2024-11-21T09:00:33.523127Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 3 2024-11-21T09:00:33.523146Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523153Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 4 written { offset: 1367 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523156Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=4, txId=? 2024-11-21T09:00:33.523157Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 4 2024-11-21T09:00:33.523174Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523182Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 5 written { offset: 1368 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523186Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=5, txId=? 2024-11-21T09:00:33.523188Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 5 2024-11-21T09:00:33.523254Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523288Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 6 written { offset: 1369 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523292Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=6, txId=? 2024-11-21T09:00:33.523294Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 6 2024-11-21T09:00:33.523321Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523328Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 7 written { offset: 1370 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523329Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=7, txId=? 2024-11-21T09:00:33.523330Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 7 2024-11-21T09:00:33.523347Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523355Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 8 written { offset: 1371 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523357Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=8, txId=? 2024-11-21T09:00:33.523359Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 8 2024-11-21T09:00:33.523377Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523385Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 9 written { offset: 1372 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523391Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=9, txId=? 2024-11-21T09:00:33.523393Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 9 2024-11-21T09:00:33.523409Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523418Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 10 written { offset: 1373 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523420Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=10, txId=? 2024-11-21T09:00:33.523422Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 10 2024-11-21T09:00:33.523568Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523612Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 11 written { offset: 1374 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523616Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=11, txId=? 2024-11-21T09:00:33.523619Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 11 2024-11-21T09:00:33.523646Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write 1 messages with Id from 55 to 55 2024-11-21T09:00:33.523656Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T09:00:33.523658Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 55 2024-11-21T09:00:33.523732Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T09:00:33.523736Z :INFO: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T09:00:33.523749Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 12 written { offset: 1375 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T09:00:33.523755Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] OnAck: seqNo=12, txId=? 2024-11-21T09:00:33.523756Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: acknoledged message 12 2024-11-21T09:00:33.523762Z :INFO: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T09:00:33.523768Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T09:00:33.523902Z :WARNING: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T09:00:33.524321Z :DEBUG: [/Root] SessionId [5fc8c4fa-2649262-89be6e1b-8ac52f4d|a9c7e70a-8cfeab30-df6b4f57-ab80dd95_0] PartitionId [0] Generation [1] Write session: destroy === === END TEST (supposed ok)=== === >> test_publish_into_schemeboard_with_common_ssring.py::TestOn3DC::test_create_dirs >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--true] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--false] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045a0/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045a0/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045a0/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045a0/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045a0/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045a0/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045a0/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045a0/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--false] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] [GOOD] >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] >> test_publish_into_schemeboard_with_common_ssring.py::TestOn3DC::test_create_dirs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: 2024-11-21T08:53:19.742274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:19.742321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:19.742330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0038c0/r3tmp/tmpnR76LK/pdisk_1.dat 2024-11-21T08:53:19.821568Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22788, node 1 2024-11-21T08:53:19.921199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:19.921221Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:19.921226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:19.921329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:19.928540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:20.014013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:20.014051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:20.025696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4695 2024-11-21T08:53:20.436098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:21.277562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:21.277586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:21.310780Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:21.311474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:21.367844Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:21.378230Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:21.378260Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:21.386089Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:21.386201Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:21.386215Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:21.386219Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:21.386222Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:21.386227Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:21.386231Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:21.386235Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:21.386318Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:21.561670Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:21.561700Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:21.562601Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:53:21.563909Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:53:21.563982Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:53:21.564619Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:53:21.569200Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:21.569221Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:21.569232Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:53:21.571265Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:21.571295Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:21.572732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:21.574519Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:21.574569Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:21.581553Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:21.593726Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:21.616375Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:21.741034Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:21.897841Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:22.654383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.654426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.658138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:53:22.847701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2433:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.847759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.848463Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2438:3075]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:53:22.848558Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T08:53:22.848578Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2440:3077] 2024-11-21T08:53:22.848591Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2440:3077] 2024-11-21T08:53:22.848802Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2441:2945] 2024-11-21T08:53:22.848927Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2440:3077], server id = [2:2441:2945], tablet id = 72075186224037897, status = OK 2024-11-21T08:53:22.848995Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2441:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T08:53:22.849019Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T08:53:22.849102Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:53:22.849122Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2438:3075], StatRequests.size() = 1 2024-11-21T08:53:22.852370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.852409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.852570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2450:3086], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:22.854433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T08:53:22.987168Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T08:53:22.987197Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T08:53:23.082051Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2440:3077], schemeshard count = 1 2024-11-21T08:53:23.339853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2452:3088], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T08:53:23.408698Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2590:3177]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T08:53:23.408751Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T08:53:23.408758Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2590:3177], StatRequests.size() = 1 2024-11-21T08:53:23.418534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd6yt4rx7xt754z8zzcrxegh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDI0YTdjMGYtNjM4M2M2ZDYtMzQyZDA1MmQtMjJiNTJjN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for TEvPeriodicTableStats 2024-11-21T08:53:25.534044Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count ... 1T08:59:29.768681Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:29.936026Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T08:59:29.936068Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:59:29.936072Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T08:59:29.936075Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T08:59:31.259766Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:59:31.259900Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:59:31.259967Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:59:32.706188Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:32.706215Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:35.273305Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:59:35.304290Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:35.304328Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:37.867714Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:59:37.867849Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:59:37.867919Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:59:37.888531Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:37.888558Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:40.432550Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:40.432578Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:41.654801Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:59:43.011797Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:43.011824Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:44.433653Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:59:44.433714Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:59:44.433800Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:59:45.825235Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:45.825266Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:48.306523Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:59:48.348480Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:48.348508Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:51.064141Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:59:51.064317Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:59:51.064411Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:59:51.095688Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:51.095723Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:53.839716Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:53.839747Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:55.079939Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:59:56.389020Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:56.389049Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:57.671105Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T08:59:57.671175Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:59:57.671278Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T08:59:58.992653Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:58.992687Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:01.461857Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:01.514421Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:01.514457Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:04.009035Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:00:04.009177Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:00:04.009279Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:04.051370Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:04.051400Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:06.624035Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:06.624065Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:07.873383Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:09.222533Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:09.222566Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:09.408785Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T09:00:09.408812Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T09:00:09.408817Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T09:00:09.408821Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T09:00:10.698215Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:00:10.698359Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:00:10.698429Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:12.157260Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:12.157294Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:14.538529Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:14.579734Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:14.579761Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:17.043997Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:00:17.044157Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:00:17.044241Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:17.106307Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:17.106341Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:19.659917Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:19.659946Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:20.862065Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:22.240934Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:22.240967Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:23.474931Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:00:23.475044Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:00:23.475111Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:24.484259Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:24.484295Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:26.476435Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:26.538469Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:26.538498Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:29.164227Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:00:29.164320Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:29.164450Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:00:29.216015Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:29.216049Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:29.237971Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T09:00:29.238007Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 187.000000s, at schemeshard: 72075186224037889 2024-11-21T09:00:29.238185Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2024-11-21T09:00:29.249797Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T09:00:31.937729Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:31.937759Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:33.150162Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:34.343731Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:34.343767Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:35.565813Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:00:35.565881Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2024-11-21T09:00:35.566014Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:13898:7624]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:00:35.567025Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:00:35.567710Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:00:35.567722Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [2:13898:7624], StatRequests.size() = 1 >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--true] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--false] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] Test command err: 2024-11-21T08:59:27.951474Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:27.952115Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:27.952183Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:27.952332Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:27.952497Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:27.952506Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:27.952634Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:45:2073] ControllerId# 72057594037932033 2024-11-21T08:59:27.952637Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:27.952658Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:27.952720Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:27.954215Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:27.954223Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:27.954495Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:53:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.954521Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:54:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.954536Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:55:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.954554Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:56:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.954568Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:57:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.954582Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:58:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.954602Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:59:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.954605Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:27.954613Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:45:2073] 2024-11-21T08:59:27.954616Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:45:2073] 2024-11-21T08:59:27.954621Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:27.954625Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:27.954758Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:27.954771Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:27.955613Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:27.955634Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:27.955727Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:67:2071] ControllerId# 72057594037932033 2024-11-21T08:59:27.955730Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:27.955738Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:27.955760Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:27.955831Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:27.955850Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:27.955852Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:27.956027Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:73:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.956043Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:74:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.956058Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:75:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.956073Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:76:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.956089Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:77:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.956111Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:78:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.956126Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:79:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:27.956128Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:27.956133Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:67:2071] 2024-11-21T08:59:27.956135Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:67:2071] 2024-11-21T08:59:27.956150Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:27.956155Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:27.956192Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:27.956234Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:27.958796Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:45:2073] 2024-11-21T08:59:27.958807Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:27.958812Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:27.958849Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:67:2071] 2024-11-21T08:59:27.958856Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:27.958861Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:27.958874Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:27.958885Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:45:2073] 2024-11-21T08:59:27.958888Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:27.958893Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:27.959348Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:27.959587Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:27.959594Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:59:27.959610Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:27.959622Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:27.959670Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:27.959675Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:27.959677Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:27.959696Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:89:2084] 2024-11-21T08:59:27.959701Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:49:2064] 2024-11-21T08:59:27.959704Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:49:2064] 2024-11-21T08:59:27.959725Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:27.959787Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:27.959804Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T08:59:27.959809Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:27.959812Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:27.959818Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:27.959829Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T08:59:27.959831Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T08:59:27.959874Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:27.959878Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:27.959886Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:27.959888Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:27.959893Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:95:2091] 2024-11-21T08:59:27.959920Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID ... etID: 72075186224037888} 2024-11-21T09:00:33.930622Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.930626Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T09:00:33.930629Z node 8 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T09:00:33.930633Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [8:404:2362] 2024-11-21T09:00:33.930636Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] immediate retry [8:404:2362] 2024-11-21T09:00:33.930638Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [8:404:2362] 2024-11-21T09:00:33.930643Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:00:33.930650Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T09:00:33.930656Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T09:00:33.930659Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T09:00:33.930661Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T09:00:33.930664Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.930668Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.930672Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.930677Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T09:00:33.930681Z node 8 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T09:00:33.930685Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [8:404:2362] 2024-11-21T09:00:33.930688Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [8:404:2362] 2024-11-21T09:00:33.951255Z node 8 :BS_PROXY_PUT INFO: [65bcfa2ad0971069] bootstrap ActorId# [8:408:2364] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:199:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T09:00:33.951329Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] Id# [72057594037927937:2:9:0:0:199:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:00:33.951338Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] restore Id# [72057594037927937:2:9:0:0:199:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:00:33.951349Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG33 2024-11-21T09:00:33.951354Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG32 2024-11-21T09:00:33.951389Z node 8 :BS_PROXY DEBUG: Send to queueActorId# [8:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:199:1] FDS# 199 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:00:33.951904Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:199:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 81566 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:00:33.951951Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T09:00:33.951961Z node 8 :BS_PROXY_PUT INFO: [65bcfa2ad0971069] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:00:33.952004Z node 8 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T09:00:33.952035Z node 8 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2024-11-21T09:00:33.952047Z node 8 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2024-11-21T09:00:33.962244Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [8:404:2362] 2024-11-21T09:00:33.962297Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [8:404:2362] 2024-11-21T09:00:33.962331Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:00:33.962372Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T09:00:33.962401Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T09:00:33.962411Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T09:00:33.962420Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T09:00:33.962427Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.962437Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.962442Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.962460Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T09:00:33.962466Z node 8 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T09:00:33.962479Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [8:404:2362] 2024-11-21T09:00:33.962485Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [8:404:2362] 2024-11-21T09:00:33.982836Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [8:404:2362] 2024-11-21T09:00:33.982861Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [8:404:2362] 2024-11-21T09:00:33.982888Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:00:33.982935Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T09:00:33.982965Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T09:00:33.982974Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T09:00:33.982980Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T09:00:33.982987Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.982995Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.983002Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:00:33.983022Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T09:00:33.983027Z node 8 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T09:00:33.983041Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [8:404:2362] 2024-11-21T09:00:33.983048Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed, check aliveness [8:404:2362] 2024-11-21T09:00:34.003466Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [8:411:2365] 2024-11-21T09:00:34.003493Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [8:411:2365] 2024-11-21T09:00:34.003524Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:00:34.003537Z node 8 :TABLET_RESOLVER DEBUG: SelectForward node 8 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [8:264:2257] 2024-11-21T09:00:34.003551Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [8:411:2365] 2024-11-21T09:00:34.003557Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [8:411:2365] 2024-11-21T09:00:34.003571Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [8:411:2365] 2024-11-21T09:00:34.003587Z node 8 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [8:411:2365] 2024-11-21T09:00:34.003622Z node 8 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([8:411:2365]) [8:412:2366] 2024-11-21T09:00:34.003636Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [8:411:2365] 2024-11-21T09:00:34.003642Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [8:411:2365] 2024-11-21T09:00:34.003646Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [8:411:2365] 2024-11-21T09:00:34.003654Z node 8 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [8:404:2362] EventType# 268697616 2024-11-21T09:00:34.003672Z node 8 :HIVE WARN: HIVE#72057594037927937 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2024-11-21T09:00:34.003685Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received poison pill [8:411:2365] 2024-11-21T09:00:34.003691Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [8:411:2365] 2024-11-21T09:00:34.003698Z node 8 :PIPE_SERVER DEBUG: [72057594037927937] Got PeerClosed from# [8:411:2365] 2024-11-21T09:00:34.003717Z node 8 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerDisconnected([8:411:2365]) [8:412:2366] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] [GOOD] >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--true] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--true] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--false] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--false] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_with_cpu.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_with_cpu.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_with_cpu.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_with_cpu.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_complete.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_complete.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_complete.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_complete.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_complete.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_complete.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_complete.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_complete.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00464b/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table3.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table3.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table3.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table3.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table3.enable_alter_database_create_hive_first--true/cluster/node_1/logfile_jw6f8wg5.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00458f/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00455c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk5/testing_out_stuff/test_serverless.py.test_discovery.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00455c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk5/testing_out_stuff/test_serverless.py.test_discovery.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00455c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk5/testing_out_stuff/test_serverless.py.test_discovery.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00455c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk5/testing_out_stuff/test_serverless.py.test_discovery.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00455c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk5/testing_out_stuff/test_serverless.py.test_discovery.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00455c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk5/testing_out_stuff/test_serverless.py.test_discovery.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00455c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk5/testing_out_stuff/test_serverless.py.test_discovery.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00455c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk5/testing_out_stuff/test_serverless.py.test_discovery.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004569/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database_wait.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004569/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database_wait.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004569/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database_wait.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004569/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_tenants.py.TestTenants.test_create_remove_database_wait.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004569/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_tenants.py.TestTenants.test_create_tables.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004569/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_tenants.py.TestTenants.test_create_tables.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004569/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_tenants.py.TestTenants.test_create_tables.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004569/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_tenants.py.TestTenants.test_create_tables.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--false] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_register_tenant_and_force_drop_with_table.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_register_tenant_and_force_drop_with_table.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_register_tenant_and_force_drop_with_table.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_register_tenant_and_force_drop_with_table.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_register_tenant_and_force_drop_with_table.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_register_tenant_and_force_drop_with_table.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_register_tenant_and_force_drop_with_table.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_register_tenant_and_force_drop_with_table.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_resolve_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0045ab/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_tenants.py.TestTenants.test_resolve_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--true] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004579/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk6/testing_out_stuff/test_serverless.py.test_discovery_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00451c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk7/testing_out_stuff/test_serverless.py.test_fixtures.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00451c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk7/testing_out_stuff/test_serverless.py.test_fixtures.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00451c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk7/testing_out_stuff/test_serverless.py.test_fixtures.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00451c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk7/testing_out_stuff/test_serverless.py.test_fixtures.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00451c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk7/testing_out_stuff/test_serverless.py.test_fixtures.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00451c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk7/testing_out_stuff/test_serverless.py.test_fixtures.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00451c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk7/testing_out_stuff/test_serverless.py.test_fixtures.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00451c/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk7/testing_out_stuff/test_serverless.py.test_fixtures.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--false] [XFAIL] >> ColumnShardTiers::DSConfigs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: 2024-11-21T08:58:33.928826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:58:33.929535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:58:33.929577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00477d/r3tmp/tmprNvSAP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7722, node 1 TClient is connected to server localhost:9405 2024-11-21T08:58:35.357712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:58:35.408692Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:35.409375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:35.409386Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:35.409389Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:35.409448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:35.468417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:35.468460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:35.479215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:58:35.641596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:35.691147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:35.691195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:35.691233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:35.691245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:35.691257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:35.691270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:35.691281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:35.691310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:35.691348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:35.691360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:35.691371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:35.691382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:35.694541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:35.694561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:35.694571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:35.694576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:35.694588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:35.694592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:35.694601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:35.694608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:35.694614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:35.694618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:35.694623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:58:35.694627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:58:35.694673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:35.694678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:35.694690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:35.694693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:35.694701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:35.694706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:35.694718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:35.694722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:35.694730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:35.694733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:35.696617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:35.696638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:35.696685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:35.696698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:35.696709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:35.696718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:35.696728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:35.696738Z node 1 :TX_COLUMNSHARD ... stamp"},"tiering2":{"tieringRuleId":"tiering2","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"},"tiering2":{"tieringRuleId":"tiering2","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"},"tiering2":{"tieringRuleId":"tiering2","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"},"tiering2":{"tieringRuleId":"tiering2","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} REQUEST=CREATE OBJECT tiering2 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" } ] }` );RESULT=;EXPECTATION=1 REQUEST=DROP OBJECT tier2 (TYPE TIER);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tier2 (TYPE TIER);RESULT=
: Error: Execution, code: 1060
:1:25: Error: Executing DROP OBJECT TIER
: Error: preparation problem: tiering in using by table ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT tier2 (TYPE TIER);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tier1 (TYPE TIER);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tier1 (TYPE TIER);RESULT=
: Error: Execution, code: 1060
:1:25: Error: Executing DROP OBJECT TIER
: Error: preparation problem: tiering in using by table ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT tier1 (TYPE TIER);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tiering2 (TYPE TIERING_RULE);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tiering2 (TYPE TIERING_RULE);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT tiering2 (TYPE TIERING_RULE);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);RESULT=
: Error: Execution, code: 1060
:1:28: Error: Executing DROP OBJECT TIERING_RULE
: Error: preparation problem: tiering in using by table ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2024-11-21T09:00:23.233795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715821:0, at schemeshard: 72057594046644480 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=
: Info: Execution, code: 1060
:1:12: Info: Executing DROP TABLE
: Info: Success, code: 4 ;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);EXPECTATION=1;WAITING=0 incorrect snapshot SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} TieringsCount incorrect: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}};expectation=0 SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);RESULT=;EXPECTATION=1 2024-11-21T09:00:34.399578Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:00:34.399620Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:00:34.399650Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; REQUEST=DROP OBJECT tier2 (TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier2 (TYPE TIER);RESULT=;EXPECTATION=1 2024-11-21T09:00:45.440371Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:00:45.440399Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:00:45.440420Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; FINISHED_REQUEST=DROP OBJECT tier2 (TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier1 (TYPE TIER);EXPECTATION=1;WAITING=0 incorrect snapshot SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 REQUEST=DROP OBJECT tier1 (TYPE TIER);RESULT=;EXPECTATION=1 SNAPSHOT: {"rules":{},"tiers":{}} SNAPSHOT: {"rules":{},"tiers":{}} SNAPSHOT: {"rules":{},"tiers":{}} SNAPSHOT: {"rules":{},"tiers":{}} 2024-11-21T09:00:56.514934Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:00:56.514966Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:00:56.514989Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: 2024-11-21T08:53:32.279885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T08:53:32.279926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T08:53:32.279934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003841/r3tmp/tmpa4xi0S/pdisk_1.dat 2024-11-21T08:53:32.359118Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10666, node 1 2024-11-21T08:53:32.473664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:53:32.473688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:53:32.473694Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:53:32.473805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:53:32.479862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T08:53:32.555519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:32.555551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:32.567128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7038 2024-11-21T08:53:32.977494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T08:53:33.821952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:33.821987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:33.861286Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T08:53:33.862707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:33.928994Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:53:33.947168Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T08:53:33.947200Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T08:53:33.958994Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T08:53:33.959206Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T08:53:33.959239Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T08:53:33.959246Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T08:53:33.959252Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T08:53:33.959260Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T08:53:33.959265Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T08:53:33.959273Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T08:53:33.959495Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T08:53:34.153007Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:34.153032Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T08:53:34.154424Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T08:53:34.156405Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T08:53:34.156504Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T08:53:34.157393Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T08:53:34.162955Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T08:53:34.162977Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T08:53:34.162989Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T08:53:34.164921Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:53:34.164955Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:53:34.166429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T08:53:34.167989Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T08:53:34.168017Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T08:53:34.170981Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T08:53:34.183131Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:53:34.205026Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T08:53:34.323656Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T08:53:34.480748Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T08:53:35.228559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.228592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:53:35.231427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T08:53:35.266468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:35.266520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:35.266576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:35.266599Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:35.266621Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:35.266646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:53:35.266671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:53:35.266694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:53:35.266715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:53:35.266740Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:53:35.266782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:53:35.266805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2244:2819];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:53:35.275111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2251:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:53:35.275146Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2251:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:53:35.275179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2251:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:53:35.275201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2251:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:53:35.275222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2251:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:53:35.275244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2251:2824];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... raversal 2024-11-21T08:59:40.855155Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:43.579411Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:59:43.579488Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:59:43.589774Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:43.589802Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:46.211252Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:46.211281Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:47.525241Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:59:48.931963Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:48.931998Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:50.487062Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:59:50.487142Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:59:51.897924Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:51.897952Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:54.445008Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T08:59:54.465758Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:54.465786Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:57.042547Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T08:59:57.042620Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T08:59:57.063601Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:57.063637Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T08:59:59.638475Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T08:59:59.638513Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:00.932396Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:02.301230Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:02.301264Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:03.755714Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:03.755806Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:05.145341Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:05.145375Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:07.622544Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:07.643261Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:07.643290Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:10.313004Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:10.313084Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:10.323427Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:10.323457Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:13.046125Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:13.046173Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:14.375236Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:15.809668Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:15.809700Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:16.020350Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T09:00:16.020390Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T09:00:16.020395Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T09:00:16.020400Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T09:00:17.350121Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:17.350196Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:18.817853Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:18.817883Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:21.487742Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:21.508460Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:21.508488Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:23.871022Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:23.871101Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:23.912660Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:23.912691Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:26.481441Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:26.481478Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:27.628868Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:28.970858Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:28.970905Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:30.493489Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:30.493592Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:31.920687Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:31.920729Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:34.577132Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:34.598030Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:34.598068Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:37.491827Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:37.491911Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:37.502348Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:37.502387Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:40.304357Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:40.304391Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:41.623143Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:42.990435Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:42.990476Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:44.460495Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:44.460582Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:45.866164Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:45.866200Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:48.577341Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:48.587729Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:48.587763Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:51.288356Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:51.288457Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:00:51.309240Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:51.309277Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:53.997884Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:53.997918Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:55.358683Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:00:56.585659Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:00:56.585693Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:00:56.658537Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T09:00:56.658571Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 197.000000s, at schemeshard: 72075186224037889 2024-11-21T09:00:56.658650Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 53 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2024-11-21T09:00:56.680320Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T09:00:56.838154Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T09:00:56.838183Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T09:00:56.838186Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T09:00:56.838189Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T09:00:58.159558Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:00:58.159638Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2024-11-21T09:00:58.159722Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:17229:10816]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:00:58.160433Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2024-11-21T09:00:58.160443Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [2:17229:10816], StatRequests.size() = 1 >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] >> test_storage_config.py::TestStorageConfig::test_cases[case_0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> TxUsage::WriteToTopic_Demo_44 [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_44 [GOOD] Test command err: 2024-11-21T08:58:39.369754Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654386542412744:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:39.369773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:58:39.497172Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003e45/r3tmp/tmpPM3xqE/pdisk_1.dat 2024-11-21T08:58:39.772459Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:58:39.772797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:39.772811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:58:39.785553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21808, node 1 2024-11-21T08:58:39.822507Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T08:58:40.228029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003e45/r3tmp/yandexIXHAbt.tmp 2024-11-21T08:58:40.228049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003e45/r3tmp/yandexIXHAbt.tmp 2024-11-21T08:58:40.228093Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003e45/r3tmp/yandexIXHAbt.tmp 2024-11-21T08:58:40.228132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:58:40.390706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654390837380636:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.390752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.391113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439654390837380648:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:58:40.480658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T08:58:40.528823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439654390837380650:2297], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T08:58:40.584600Z INFO: TTestServer started on Port 5913 GrpcPort 21808 TClient is connected to server localhost:5913 PQClient connected to localhost:21808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1732179520561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:40.837339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T08:58:40.841098Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T08:58:40.868331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T08:58:41.038776Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T08:58:41.389711Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439654390837380717:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T08:58:41.390263Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWRjYjcyNjMtOWExYTNiYzYtMjc4OTUyMTYtZTFiMWE0NzY=, ActorId: [1:7439654390837380625:2291], ActorState: ExecuteState, TraceId: 01jd6z3tw60bpgt5cnm5ad74h4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T08:58:41.400437Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T08:58:41.565760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.571703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T08:58:41.625621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439654399427315788:2627] 2024-11-21T08:58:44.364969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654386542412744:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:44.365051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T08:58:47.881112Z :WriteToTopic_Two_WriteSession INFO: TTopicSdkTestSetup started 2024-11-21T08:58:47.900249Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T08:58:47.960273Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T08:58:47.960369Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T08:58:47.978697Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439654420902152617:2816] connected; active server actors: 1 2024-11-21T08:58:47.978813Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T08:58:47.987532Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T08:58:47.987641Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T08:58:47.988036Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T08:58:47.988156Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T08:58:47.988172Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T08:58:47.988175Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T08:58:47.996524Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T08:58:47.996569Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T08:58:47.998441Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T08:58:48.005293Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T08:58:48.005310Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T08:58:48.005405Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T08:58:48.005424Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014054Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654420902152616:2815], now have 1 active actors on pipe 2024-11-21T08:58:48.014076Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T08:58:48.014079Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439654420902152653:2446], now have 1 active actors on pipe 2024-11-21T08:58:48.016441Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439654386542413124 RawX2: 4294969471 } TxId: 281474976710674 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitCha ... c: 'topic_A' requestId: 2024-11-21T09:00:59.780619Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T09:00:59.780641Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:00:59.780664Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 95 (startOffset 0) session test-consumer_9_1_3447894107855470698_v1 2024-11-21T09:00:59.780696Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 95 end: 100 } } } } 2024-11-21T09:00:59.780746Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 10 2024-11-21T09:00:59.780760Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:00:59.780778Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 100 prev 91 end 100 by cookie 12 2024-11-21T09:00:59.780797Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 10 } 2024-11-21T09:00:59.780803Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 92 endOffset 100 with cookie 10 2024-11-21T09:00:59.780827Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T09:00:59.780839Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T09:00:59.780914Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 replying for commits: assignId# 1, from# 10, to# 10, offset# 92 2024-11-21T09:00:59.781042Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:00:59.781057Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 100 (startOffset 0) session test-consumer_9_1_3447894107855470698_v1 2024-11-21T09:00:59.781060Z :DEBUG: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 92 } } 2024-11-21T09:00:59.781076Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:00:59.781076Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 11 2024-11-21T09:00:59.781097Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 11 } 2024-11-21T09:00:59.781106Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 95 endOffset 100 with cookie 11 2024-11-21T09:00:59.781111Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 replying for commits: assignId# 1, from# 11, to# 11, offset# 95 2024-11-21T09:00:59.781187Z :DEBUG: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 95 } } 2024-11-21T09:00:59.781335Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 100 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:00:59.781356Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:00:59.781359Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 12 2024-11-21T09:00:59.781371Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 12 } 2024-11-21T09:00:59.781382Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 100 endOffset 100 with cookie 12 2024-11-21T09:00:59.781388Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 replying for commits: assignId# 1, from# 12, to# 12, offset# 100 2024-11-21T09:00:59.781540Z :DEBUG: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 100 } } 2024-11-21T09:01:00.450487Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 checking auth because of timeout 2024-11-21T09:01:00.450538Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 auth for : test-consumer 2024-11-21T09:01:00.450788Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 Handle describe topics response 2024-11-21T09:01:00.450816Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 auth is DEAD 2024-11-21T09:01:00.450831Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 auth ok: topics# 1, initDone# 1 2024-11-21T09:01:00.482364Z :INFO: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] Counters: { Errors: 0 CurrentSessionLifetimeMs: 61060 BytesRead: 100000000 MessagesRead: 100 BytesReadCompressed: 100000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:01:01.447949Z :INFO: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] Closing read session. Close timeout: 0.000000s 2024-11-21T09:01:01.447970Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:99:100 2024-11-21T09:01:01.447978Z :INFO: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] Counters: { Errors: 0 CurrentSessionLifetimeMs: 62025 BytesRead: 100000000 MessagesRead: 100 BytesReadCompressed: 100000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:01:01.448007Z :NOTICE: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:01:01.448016Z :DEBUG: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] [] Abort session to cluster 2024-11-21T09:01:01.448125Z :NOTICE: [/Root] [/Root] [d724e245-63b7a0ea-d3e15266-9a74eab1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:01:01.448339Z :INFO: [/Root] SessionId [test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T09:01:01.448347Z :INFO: [/Root] SessionId [test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T09:01:01.448354Z :DEBUG: [/Root] SessionId [test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T09:01:01.448477Z :INFO: [/Root] SessionId [test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T09:01:01.448481Z :DEBUG: [/Root] SessionId [test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T09:01:01.448610Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 grpc read done: success# 0, data# { } 2024-11-21T09:01:01.448623Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 grpc read failed 2024-11-21T09:01:01.448628Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 closed 2024-11-21T09:01:01.448767Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_3447894107855470698_v1 is DEAD 2024-11-21T09:01:01.448849Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:01:01.448861Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_9_1_3447894107855470698_v1 2024-11-21T09:01:01.448884Z node 9 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [9:7439654732306944182:2498] disconnected; active server actors: 1 2024-11-21T09:01:01.448884Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439654732306944185:2501] destroyed 2024-11-21T09:01:01.448891Z node 9 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [9:7439654732306944182:2498] client test-consumer disconnected session test-consumer_9_1_3447894107855470698_v1 2024-11-21T09:01:01.448900Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_9_1_3447894107855470698_v1 2024-11-21T09:01:01.449039Z node 9 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0 grpc read done: success: 0 data: 2024-11-21T09:01:01.449045Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0 grpc read failed 2024-11-21T09:01:01.449117Z node 9 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 4 sessionId: test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0 2024-11-21T09:01:01.449123Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|981fbee2-b782199e-3600433a-f1142a78_0 is DEAD 2024-11-21T09:01:01.449506Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:01:01.449516Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:01:01.449539Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:01:01.449547Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439654732306944054:2478] destroyed 2024-11-21T09:01:01.449549Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:01:01.449551Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439654732306944057:2478] destroyed 2024-11-21T09:01:01.449570Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateExternalTablet >> THiveTest::TestCreateExternalTablet [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=945126) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/co ... st_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/pool.py:268: ResourceWarning: unclosed running multiprocessing pool ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [GOOD] |92.0%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] |92.0%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] [GOOD] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--false] [XFAIL] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_create_table.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_create_table.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_create_table.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_create_table.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table2.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table2.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table2.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table2.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table2.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table2.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table2.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004583/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_tenants.py.TestTenants.test_create_drop_create_table2.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [GOOD] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2024-11-21T08:59:20.457582Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:20.459397Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:20.459935Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:20.460155Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:20.460375Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:20.460387Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:20.460528Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T08:59:20.460531Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:20.461313Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:20.463497Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:20.465701Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:20.465716Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:20.465926Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.465947Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.465965Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.465981Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.466005Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.466020Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.466039Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.466144Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:20.466155Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T08:59:20.466158Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T08:59:20.466164Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:20.466169Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:20.466952Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:20.470542Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:20.470560Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:20.470565Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:20.474171Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:20.474247Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:20.474297Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:20.474309Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:20.474317Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:20.474854Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:20.475548Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:59:20.475561Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:20.475962Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:20.475974Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:20.475977Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:20.475996Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T08:59:20.476009Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:20.476049Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T08:59:20.476052Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T08:59:20.476177Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:20.477393Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T08:59:20.477412Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T08:59:20.477417Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T08:59:20.477424Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:20.477462Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:20.477476Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:20.477515Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T08:59:20.477520Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T08:59:20.477526Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:20.477602Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:20.477643Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:20.477665Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:59:20.479980Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T08:59:20.481143Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:20.481181Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:20.481187Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T08:59:20.482847Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T08:59:20.482861Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T08:59:20.482888Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:20.483214Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:20.483335Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:20.483368Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T08:59:20.483375Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T08:59:20.483380Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:20.483441Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T08:59:20.483460Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T08:59:20.483465Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T08:59:20.483479Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T08:59:20.483485Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:20.483498Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:20.483515Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:20.483531Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:20.483537Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T08:59:20.483548Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T08:59:20.483552Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T08:59:20.483565Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T08:59:20.483596Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... 4-11-21T09:01:02.711417Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T09:01:02.711453Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "def1" } } GroupParameters { StoragePoolSpecifier { Name: "def2" } } GroupParameters { StoragePoolSpecifier { Name: "def3" } } ReturnAllMatchingGroups: true 2024-11-21T09:01:02.711481Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1145b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2024-11-21T09:01:02.711491Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:01:02.711531Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [147:303:2283] 2024-11-21T09:01:02.711536Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [147:303:2283] 2024-11-21T09:01:02.711546Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:01:02.711551Z node 147 :TABLET_RESOLVER DEBUG: SelectForward node 147 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [147:92:2120] 2024-11-21T09:01:02.711558Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [147:303:2283] 2024-11-21T09:01:02.711564Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [147:303:2283] 2024-11-21T09:01:02.711568Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [147:303:2283] 2024-11-21T09:01:02.711588Z node 147 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [147:303:2283] 2024-11-21T09:01:02.711601Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [147:303:2283] 2024-11-21T09:01:02.711605Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [147:303:2283] 2024-11-21T09:01:02.711609Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [147:303:2283] 2024-11-21T09:01:02.711614Z node 147 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [147:270:2260] EventType# 268637702 2024-11-21T09:01:02.711634Z node 147 :HIVE DEBUG: HIVE#72057594037927937 Connected to tablet 72057594037932033 from tablet 72057594037927937 2024-11-21T09:01:02.711655Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2024-11-21T09:01:02.711660Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:01:02.711692Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T09:01:02.711697Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:01:02.711748Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483648 StoragePoolName: "def1" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483649 StoragePoolName: "def2" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483650 StoragePoolName: "def3" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2024-11-21T09:01:02.711773Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2024-11-21T09:01:02.711778Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:01:02.711789Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{76144397215552}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2024-11-21T09:01:02.711804Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{76144397215552}: tablet 72075186224037888 channel 0 assigned to group 2147483648 2024-11-21T09:01:02.711843Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{76144397215552}: tablet 72075186224037888 channel 1 assigned to group 2147483649 2024-11-21T09:01:02.711858Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{76144397215552}: tablet 72075186224037888 channel 2 assigned to group 2147483650 2024-11-21T09:01:02.711881Z node 147 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{76144397215552}(72075186224037888)::Execute - TryToBoot was not successfull 2024-11-21T09:01:02.711896Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2024-11-21T09:01:02.711901Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:01:02.722352Z node 147 :BS_PROXY_PUT INFO: [ef85ab71b8475fcf] bootstrap ActorId# [147:306:2286] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:703:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T09:01:02.722416Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Id# [72057594037927937:2:4:0:0:703:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:01:02.722426Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] restore Id# [72057594037927937:2:4:0:0:703:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:01:02.722438Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:703:1] Marker# BPG33 2024-11-21T09:01:02.722445Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:703:1] Marker# BPG32 2024-11-21T09:01:02.722483Z node 147 :BS_PROXY DEBUG: Send to queueActorId# [147:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:703:1] FDS# 703 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:01:02.722983Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:703:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85535 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:01:02.723018Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T09:01:02.723028Z node 147 :BS_PROXY_PUT INFO: [ef85ab71b8475fcf] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:01:02.723068Z node 147 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T09:01:02.723101Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2024-11-21T09:01:02.723121Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Complete (72057594037927937,0) TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040201 [147:259:2254]} 2024-11-21T09:01:02.723154Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{76144397215552}(72075186224037888)::Complete SideEffects: {Notifications: 0x10040207 [147:259:2254] Callbacks: 1 Actions: NKikimr::TTabletKillRequest} 2024-11-21T09:01:02.723208Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T09:01:02.723233Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T09:01:02.723242Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T09:01:02.723246Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T09:01:02.723254Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:01:02.723261Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:01:02.723266Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:01:02.723326Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [147:310:2289] 2024-11-21T09:01:02.723337Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [147:310:2289] 2024-11-21T09:01:02.723358Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:01:02.723373Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T09:01:02.723383Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T09:01:02.723388Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T09:01:02.723392Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T09:01:02.723396Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:01:02.723402Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:01:02.723407Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T09:01:02.723420Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T09:01:02.723426Z node 147 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T09:01:02.723440Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [147:310:2289] 2024-11-21T09:01:02.723445Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [147:310:2289] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] >> test_storage_config.py::TestStorageConfig::test_cases[case_0] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_10] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/grpcio/py3/grpc/_channel.py:1407: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/typing.py:1601: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> KqpOlapScheme::InsertAddInsertDrop [GOOD] >> KqpOlapScheme::InsertDropAddColumn >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TConsoleConfigTests::TestAddConfigItem >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAffectedConfigs >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] [SKIPPED] >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2024-11-21T08:56:27.120939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:56:27.120959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:27.120963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:56:27.120966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:56:27.120975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:56:27.120978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:56:27.120985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:56:27.121043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:56:27.123083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:56:27.123096Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:56:27.124284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:56:27.124408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:56:27.124420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T08:56:27.124996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:56:27.125063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:56:27.125106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.125158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.125545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.125711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.125717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.125725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:56:27.125729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.125739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:56:27.125749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.160735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T08:56:27.160788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.160823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T08:56:27.160848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T08:56:27.160853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.161304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.161317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T08:56:27.161338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.161343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T08:56:27.161346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:56:27.161349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:56:27.161628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.161636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T08:56:27.161639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:56:27.161912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.161919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.161922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.161933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.162276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:56:27.162822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:56:27.162877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T08:56:27.163009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.163013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T08:56:27.163016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.327367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T08:56:27.327403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T08:56:27.327410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.327463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:56:27.327469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T08:56:27.327488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.327494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T08:56:27.327890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T08:56:27.327897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T08:56:27.327927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T08:56:27.327930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T08:56:27.327985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T08:56:27.328001Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T08:56:27.328017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:56:27.328020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.328024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:56:27.328027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:56:27.328029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:56:27.328032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:56:27.328037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T08:56:27.328040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T08:56:27.328043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T08:56:27.328302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.328313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T08:56:27.328316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T08:56:27.328319Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T08:56:27.328322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T08:56:27.328331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T08:56:27.328334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T0 ... stered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T09:01:10.303184Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T09:01:10.303235Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 229 RawX2: 120259086509 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T09:01:10.303245Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T09:01:10.303302Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:01:10.303309Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T09:01:10.303340Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T09:01:10.303348Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T09:01:10.303971Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T09:01:10.303991Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T09:01:10.304057Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T09:01:10.304065Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:242:2232], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T09:01:10.304162Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T09:01:10.304176Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T09:01:10.304198Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:01:10.304203Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:01:10.304246Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:01:10.304255Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:01:10.304264Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:01:10.304270Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:01:10.304288Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T09:01:10.304296Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T09:01:10.304302Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T09:01:10.304576Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T09:01:10.304594Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T09:01:10.304598Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T09:01:10.304603Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T09:01:10.304608Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T09:01:10.304626Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T09:01:10.304630Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [28:95:2130] 2024-11-21T09:01:10.305485Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 1 2024-11-21T09:01:10.305597Z node 28 :TX_PROXY DEBUG: actor# [28:290:2272] Bootstrap 2024-11-21T09:01:10.306651Z node 28 :TX_PROXY DEBUG: actor# [28:290:2272] Become StateWork (SchemeCache [28:296:2277]) 2024-11-21T09:01:10.306854Z node 28 :TX_PROXY DEBUG: actor# [28:290:2272] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:01:10.307446Z node 28 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T09:01:10.308866Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:01:10.309667Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:01:10.309904Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:01:10.310043Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:01:10.310750Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:01:10.310765Z node 28 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:01:10.310820Z node 28 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:01:10.312564Z node 28 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:01:10.312724Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:01:10.312757Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:01:10.312782Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:01:10.312798Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:01:10.312827Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:01:10.334338Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:01:10.334391Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:01:10.345313Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:01:10.345366Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:01:10.345379Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:01:10.345388Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:01:10.345422Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:01:10.345432Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:01:10.345438Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:01:10.345447Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:01:10.356474Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:01:10.356547Z node 28 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T09:01:10.356750Z node 28 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T09:01:10.356758Z node 28 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T09:01:10.356808Z node 28 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T09:01:10.356958Z node 28 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/003126/r3tmp/tmpeyCZRr/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T09:01:10.357025Z node 28 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 28:1 Path# /home/runner/.ya/build/build_root/jptk/003126/r3tmp/tmpeyCZRr/pdisk_1.dat 2024-11-21T09:01:10.368237Z node 28 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:01:10.368322Z node 28 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:01:10.368330Z node 28 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:01:10.368362Z node 28 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:01:10.368378Z node 28 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:01:10.368462Z node 28 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:01:10.368470Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:01:10.368474Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:01:10.368490Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[28:385:2341] 2024-11-21T09:01:10.368910Z node 28 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T09:01:10.368918Z node 28 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [28:380:2338] 2024-11-21T09:01:10.369044Z node 28 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[28:385:2341] 2024-11-21T09:01:10.369064Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:01:10.369070Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:01:10.369073Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] |92.0%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] |92.0%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_10] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--false] [GOOD] >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--true] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] [GOOD] >> KqpOlapScheme::InsertDropAddColumn [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::InsertDropAddColumn [GOOD] Test command err: Trying to start YDB, gRPC: 17305, MsgBus: 22928 2024-11-21T08:57:07.861646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653991626912392:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:07.861660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040cf/r3tmp/tmpr3HajK/pdisk_1.dat 2024-11-21T08:57:07.919237Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17305, node 1 2024-11-21T08:57:07.938647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:07.938658Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:07.938660Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:07.938690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T08:57:07.963494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:07.963529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:07.964046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22928 TClient is connected to server localhost:22928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T08:57:08.012103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (created_at, id_second)) PARTITION BY HASH(created_at) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1, TTL = Interval("PT1H") ON created_at); 2024-11-21T08:57:08.200172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653995921880296:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.200201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.241597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.249216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:08.249267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:08.249310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:08.249329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:08.249346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:08.249363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:08.249384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:08.249404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:08.249424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:08.249442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:08.249459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:08.249474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439653995921880372:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:08.254669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:08.254688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:08.254701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:08.254711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:08.254735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:08.254740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:08.254750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:08.254756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:08.254768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:08.254772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:08.254778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:08.254782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:08.254899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:08.254908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:08.254927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:08.254938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:08.254950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:08.254956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:08.254978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:08.254984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:08.255000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:08.255003Z node 1 :TX_COLUMNSHARD WARN: tab ... , int_column Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT =1); 2024-11-21T09:01:09.633439Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7439655030781271606:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:01:09.633469Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:01:09.635659Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:01:09.642300Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:01:09.642325Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:01:09.642358Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:01:09.642375Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:01:09.642394Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:01:09.642413Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:01:09.642429Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:01:09.642456Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:01:09.642480Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:01:09.642493Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:01:09.642506Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:01:09.642522Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[13:7439655030781271652:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:01:09.642929Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:01:09.642942Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:01:09.642950Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:01:09.642953Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:01:09.642963Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:01:09.642971Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:01:09.642978Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:01:09.642981Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:01:09.642988Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:01:09.642994Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:01:09.642998Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:01:09.643005Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:01:09.643049Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:01:09.643060Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:01:09.643078Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:01:09.643082Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:01:09.643091Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:01:09.643095Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:01:09.643112Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:01:09.643121Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:01:09.643132Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:01:09.643139Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:01:09.644496Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:01:09.690416Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1392;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1392;columns=2; 2024-11-21T09:01:09.703527Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7439655030781271743:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:01:09.703556Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:01:09.706019Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:01:09.709059Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:01:09.711365Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7439655030781271775:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:01:09.711389Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:01:09.713923Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:01:09.717180Z node 13 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:01:14.369250Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7439655030781271007:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:01:14.369288Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [GOOD] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_10] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004562/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--true/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004562/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--true/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004562/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004562/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004562/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004562/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004562/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_dynamic_tenants.py.test_drop_tenant_without_nodes_could_continue.enable_alter_database_create_hive_first--true/cluster/node_1/logfile_336hizda.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] [GOOD] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:135: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.0%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |92.0%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] [GOOD] >> TTablesWithReboots::LostBorrowAckWithReboots [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::LostBorrowAckWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:58:50.696070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:58:50.696088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:50.696091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:58:50.696095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:58:50.696104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:58:50.696107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:58:50.696113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:58:50.696169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:58:50.703406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:58:50.703421Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:58:50.705057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:58:50.705124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:58:50.705145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:58:50.706855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:58:50.706909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:58:50.706990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:50.707153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:50.707656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:50.707845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:50.707851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:50.707858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:58:50.707862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:50.707866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:58:50.707891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:58:50.708811Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:58:50.719153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:58:50.719226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:50.719273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:58:50.719321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:58:50.719327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:50.719952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:50.719969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:58:50.720003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:50.720010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:58:50.720013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:58:50.720016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:58:50.720288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:50.720296Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:58:50.720299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:58:50.720500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:50.720505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:50.720508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:50.720513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:58:50.721050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:58:50.721425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:58:50.721466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:58:50.721621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:58:50.721641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:58:50.721646Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:50.721682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:58:50.721686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:58:50.721712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:58:50.721721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:58:50.722059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:58:50.722065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:58:50.722099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:58:50.722102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:58:50.722162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:58:50.722166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:58:50.722175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:58:50.722177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:50.722181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:58:50.722184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:58:50.722187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:58:50.722189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:58:50.722196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:58:50.722200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:58:50.722203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 057594046678944, txId: 1005 2024-11-21T09:01:27.477359Z node 122 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:01:27.477364Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:01:27.477376Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T09:01:27.477914Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 523986012430 } TabletId: 72075186233409546 State: 4 2024-11-21T09:01:27.477937Z node 122 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:01:27.477986Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:01:27.477993Z node 122 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:01:27.478073Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:01:27.478103Z node 122 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T09:01:27.478107Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:01:27.478113Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T09:01:27.478116Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:01:27.478121Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:01:27.478125Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:01:27.478145Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:01:27.478652Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:01:27.478707Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:01:27.479161Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:01:27.479302Z node 122 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:01:27.479358Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:01:27.479416Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2024-11-21T09:01:27.480068Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:01:27.480079Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:01:27.480096Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:659:2058] recipient: [122:15:2062] 2024-11-21T09:01:27.481019Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:01:27.481031Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:01:27.481118Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T09:01:27.481174Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T09:01:27.481184Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T09:01:27.481239Z node 122 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T09:01:27.481254Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:01:27.481258Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [122:662:2625] TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T09:01:27.481309Z node 122 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:01:27.481319Z node 122 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:667:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:668:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:670:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:671:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:673:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:674:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:677:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:678:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:682:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:683:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:691:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:692:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:694:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:695:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:696:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:697:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:699:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:700:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:701:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:702:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:703:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:704:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:705:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:706:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:708:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:709:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:710:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:711:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:712:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:713:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:714:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:715:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:716:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:717:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:718:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:719:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:720:2058] recipient: [122:15:2062] Leader for TabletID 72075186233409546 is [122:330:2318] sender: [122:721:2058] recipient: [122:15:2062] 2024-11-21T09:01:28.443585Z node 122 :HIVE INFO: [72057594037968897] TEvRequestHiveInfo, msg: TabletID: 72075186233409546 ReturnFollowers: false 2024-11-21T09:01:28.445127Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 523986012518 } TabletId: 72075186233409547 State: 4 2024-11-21T09:01:28.445154Z node 122 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:01:28.445480Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:01:28.445547Z node 122 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:01:28.445911Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:01:28.445975Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:01:28.446111Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:01:28.446116Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:01:28.446126Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 Forgetting tablet 72075186233409547 2024-11-21T09:01:28.446718Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:01:28.446730Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:01:28.446785Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409547 >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] [GOOD] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/PyYAML/py3/yaml/error.py:6: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/public/api/grpc/ydb_table_v1__intpy3___pb2_grpc.py:11: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004573/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff/test_serverless.py.test_create_table_with_quotas.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004573/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff/test_serverless.py.test_create_table_with_quotas.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004573/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff/test_serverless.py.test_create_table_with_quotas.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004573/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff/test_serverless.py.test_create_table_with_quotas.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004573/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff/test_serverless.py.test_create_table_with_quotas.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004573/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff/test_serverless.py.test_create_table_with_quotas.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004573/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff/test_serverless.py.test_create_table_with_quotas.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004573/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff/test_serverless.py.test_create_table_with_quotas.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter >> test_ping.py::TestPing::test_ping >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget >> test_ping.py::TestPing::test_ping [GOOD] >> test_system_views.py::TestQueryMetrics::test_case [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [SKIPPED] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:59:29.341935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:59:29.341955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:29.341959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:59:29.341964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:59:29.341969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:59:29.341973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:59:29.341981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:59:29.342056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:59:29.350263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:59:29.350274Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:59:29.351508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:59:29.351562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:59:29.351588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:59:29.353243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:59:29.353300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:59:29.353356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:29.353506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:29.353984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:29.354163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:29.354169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:29.354178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:59:29.354182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:29.354186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:59:29.354216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:59:29.355004Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:59:29.364845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:59:29.364894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.364934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:59:29.364966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:59:29.364971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.365375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:29.365390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:59:29.365427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.365434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:59:29.365436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:59:29.365440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:59:29.365685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.365691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:59:29.365693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:59:29.365900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.365906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.365909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:29.365914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:59:29.366401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:59:29.366734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:59:29.366783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:59:29.366919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:59:29.366935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:59:29.366939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:29.366971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:59:29.366975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:59:29.366993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:59:29.367001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:59:29.367278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:59:29.367284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:59:29.367312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:59:29.367315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:59:29.367372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:59:29.367376Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:59:29.367383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:59:29.367386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:29.367390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:59:29.367393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:59:29.367396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:59:29.367398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:59:29.367405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:59:29.367408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:59:29.367411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ctionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:01:42.242998Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:01:42.243047Z node 117 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 58us result status StatusSuccess 2024-11-21T09:01:42.243182Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:01:42.263653Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1074:2817] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:01:42.263686Z node 117 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][117:992:2817] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T09:01:42.263721Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1074:2817] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732179702238267 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732179702238267 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1732179702238267 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:01:42.264437Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1074:2817] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2024-11-21T09:01:42.264460Z node 117 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][117:992:2817] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=966184) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/co ... et the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_system_views.py::TestQueryMetricsUniqueQueries::test_case |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] [GOOD] >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004548/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk1/testing_out_stuff/test_serverless.py.test_create_table_using_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T08:57:29.512538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T08:57:29.512556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:29.512559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T08:57:29.512562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T08:57:29.512566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T08:57:29.512568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T08:57:29.512574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T08:57:29.512635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T08:57:29.519579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T08:57:29.519592Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T08:57:29.521301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T08:57:29.521467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T08:57:29.521535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T08:57:29.524321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T08:57:29.524407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T08:57:29.524501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:29.524759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:29.525453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:29.525724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:29.525735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:29.525746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T08:57:29.525752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:29.525759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T08:57:29.525796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T08:57:29.527167Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T08:57:29.542853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T08:57:29.542925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.542980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T08:57:29.543023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T08:57:29.543030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.543602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:29.543629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T08:57:29.543683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.543692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T08:57:29.543697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T08:57:29.543701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T08:57:29.544054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.544066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T08:57:29.544070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T08:57:29.544393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.544403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.544409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:29.544416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T08:57:29.544953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T08:57:29.545328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T08:57:29.545370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T08:57:29.545563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T08:57:29.545589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T08:57:29.545596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:29.545646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T08:57:29.545652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T08:57:29.545678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T08:57:29.545689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T08:57:29.546051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T08:57:29.546062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T08:57:29.546093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T08:57:29.546098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T08:57:29.546167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T08:57:29.546172Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T08:57:29.546183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T08:57:29.546186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:29.546192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T08:57:29.546198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T08:57:29.546203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T08:57:29.546206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T08:57:29.546217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T08:57:29.546221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T08:57:29.546225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:01:48.811284Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:01:48.811327Z node 202 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 52us result status StatusSuccess 2024-11-21T09:01:48.811444Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:01:48.821815Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1083:2853] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:01:48.821861Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1029:2853] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2024-11-21T09:01:48.821896Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1083:2853] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732179708809142 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732179708809142 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732179708809142 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:01:48.822817Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1083:2853] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T09:01:48.822837Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1029:2853] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cp_ic.py::TestCpIc::test_discovery >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> test_dispatch.py::TestMapping::test_mapping |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004535/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk0/testing_out_stuff/test_serverless.py.test_create_table.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004535/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk0/testing_out_stuff/test_serverless.py.test_create_table.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004535/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk0/testing_out_stuff/test_serverless.py.test_create_table.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004535/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk0/testing_out_stuff/test_serverless.py.test_create_table.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004535/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk0/testing_out_stuff/test_serverless.py.test_create_table.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004535/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk0/testing_out_stuff/test_serverless.py.test_create_table.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004535/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk0/testing_out_stuff/test_serverless.py.test_create_table.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004535/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk0/testing_out_stuff/test_serverless.py.test_create_table.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004599/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk2/testing_out_stuff/test_serverless.py.test_create_table_with_alter_quotas.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004599/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk2/testing_out_stuff/test_serverless.py.test_create_table_with_alter_quotas.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004599/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk2/testing_out_stuff/test_serverless.py.test_create_table_with_alter_quotas.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004599/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk2/testing_out_stuff/test_serverless.py.test_create_table_with_alter_quotas.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004599/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk2/testing_out_stuff/test_serverless.py.test_create_table_with_alter_quotas.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004599/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk2/testing_out_stuff/test_serverless.py.test_create_table_with_alter_quotas.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004599/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk2/testing_out_stuff/test_serverless.py.test_create_table_with_alter_quotas.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004599/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk2/testing_out_stuff/test_serverless.py.test_create_table_with_alter_quotas.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_retry.py::TestRetry::test_fail_first[kikimr0] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=975916) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tra ... e object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_ping.py::TestPing::test_error_on_cgi_parameters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view [GOOD] >> test_ping.py::TestPing::test_error_on_cgi_parameters [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_ping.py::TestPing::test_error_on_non_ping_path >> test_ping.py::TestPing::test_error_on_non_ping_path [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--false] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_ping.py::TestPing::test_error_on_non_ping_path [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=998534) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tra ... 22643, 0, 0)> send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json [GOOD] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00431a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scheme/testing_out_stuff/test_ydb_scheme.py.TestSchemeDescribe.test_describe_view/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00431a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scheme/testing_out_stuff/test_ydb_scheme.py.TestSchemeDescribe.test_describe_view/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TStorageBalanceTest::TestScenario3 [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2024-11-21T08:59:20.948429Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T08:59:20.949100Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:20.949167Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T08:59:20.949291Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T08:59:20.949444Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T08:59:20.949452Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T08:59:20.949621Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T08:59:20.949627Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T08:59:20.949660Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T08:59:20.949733Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T08:59:20.951547Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T08:59:20.951563Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T08:59:20.951813Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.951845Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.951872Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.951899Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.951924Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.951946Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.951965Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T08:59:20.951968Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T08:59:20.951976Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T08:59:20.951979Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T08:59:20.951984Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T08:59:20.951988Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T08:59:20.952083Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T08:59:20.954759Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:20.954772Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:20.954778Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T08:59:20.954997Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T08:59:20.955030Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:20.955058Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:20.955063Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:20.955067Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T08:59:20.955457Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T08:59:20.955532Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T08:59:20.955539Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T08:59:20.955915Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T08:59:20.955930Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T08:59:20.955933Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T08:59:20.955955Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T08:59:20.955963Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:20.955984Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T08:59:20.955987Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T08:59:20.956073Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T08:59:20.956113Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T08:59:20.956119Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T08:59:20.956123Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T08:59:20.956128Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:20.956157Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:20.956163Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:20.956183Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T08:59:20.956186Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T08:59:20.956191Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:20.956242Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T08:59:20.956268Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T08:59:20.956283Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T08:59:20.956678Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T08:59:20.956792Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T08:59:20.956805Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:20.956809Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T08:59:20.957482Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T08:59:20.957491Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T08:59:20.957502Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T08:59:20.957692Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T08:59:20.957751Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:20.957767Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T08:59:20.957772Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T08:59:20.957777Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T08:59:20.957822Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T08:59:20.957836Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T08:59:20.957839Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T08:59:20.957846Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T08:59:20.957850Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:20.957856Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:20.957873Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T08:59:20.957880Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T08:59:20.957883Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T08:59:20.957889Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T08:59:20.957892Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T08:59:20.957898Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T08:59:20.957915Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037954 skipped channel 1 2024-11-21T09:02:11.506292Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037954 skipped channel 2 2024-11-21T09:02:11.506312Z node 25 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{4876773792160}(72075186224037954)::Execute - TryToBoot was not successfull 2024-11-21T09:02:11.506334Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1501, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{997, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2024-11-21T09:02:11.506343Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1501, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:02:11.517713Z node 25 :BS_PROXY_PUT INFO: [d8c78320900fe758] bootstrap ActorId# [25:11714:6240] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:493:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T09:02:11.517772Z node 25 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] Id# [72057594037927937:2:493:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:02:11.517781Z node 25 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] restore Id# [72057594037927937:2:493:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:02:11.517792Z node 25 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:493:0:0:246:1] Marker# BPG33 2024-11-21T09:02:11.517798Z node 25 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:493:0:0:246:1] Marker# BPG32 2024-11-21T09:02:11.517834Z node 25 :BS_PROXY DEBUG: Send to queueActorId# [25:352:2087] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:493:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:02:11.518577Z node 25 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] received {EvVPutResult Status# OK ID# [72057594037927937:2:493:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 508 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 509 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:02:11.518610Z node 25 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] Result# TEvPutResult {Id# [72057594037927937:2:493:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T09:02:11.518621Z node 25 :BS_PROXY_PUT INFO: [d8c78320900fe758] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:493:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:02:11.518714Z node 25 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:493:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T09:02:11.518793Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} commited cookie 1 for step 493 2024-11-21T09:02:11.518809Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxReassignGroups(72075186224037954)::Complete 2024-11-21T09:02:11.518823Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{4876773792160}(72075186224037954)::Complete SideEffects: {Notifications: 0x7FF0000F [25:5471:2599]} 2024-11-21T09:02:11.518918Z node 25 :HIVE DEBUG: HIVE#72057594037927937 StorageBalancer received RestartCancelled for tablet (72075186224037954,0) 2024-11-21T09:02:11.518930Z node 25 :HIVE DEBUG: HIVE#72057594037927937 StorageBalancer initiating reassign for tablet 72075186224037962 2024-11-21T09:02:11.519087Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TEvReassignTablet TabletID: 72075186224037962 Channels: 1 Channels: 2 Channels: 0 ReassignReason: HIVE_REASSIGN_REASON_BALANCE 2024-11-21T09:02:11.519102Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1502, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2024-11-21T09:02:11.519109Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1502, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:02:11.519119Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxReassignGroups(72075186224037962,[0,1,2])::Execute 2024-11-21T09:02:11.519202Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037962 GroupParameters { StoragePoolSpecifier { Name: "def1" } } ReturnAllMatchingGroups: true 2024-11-21T09:02:11.519226Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1502, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{998, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2024-11-21T09:02:11.519235Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1502, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T09:02:11.519268Z node 25 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [25:1273:2637] 2024-11-21T09:02:11.519273Z node 25 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [25:1273:2637] 2024-11-21T09:02:11.519283Z node 25 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [25:1216:2599] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.056) 2024-11-21T09:02:11.620735Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::Handle TEvControllerSelectGroupsResult: success Status: OK MatchingGroups { Groups { GroupID: 2147483649 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2300000000 Occupancy: 0.046 } AllocatedSize: 2300000000 } Groups { GroupID: 2147483650 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2900000000 Occupancy: 0.058 } AllocatedSize: 2900000000 } Groups { GroupID: 2147483651 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2900000000 Occupancy: 0.058 } AllocatedSize: 2900000000 } Groups { GroupID: 2147483652 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3000000000 Occupancy: 0.06 } AllocatedSize: 3000000000 } Groups { GroupID: 2147483653 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2300000000 Occupancy: 0.046 } AllocatedSize: 2300000000 } Groups { GroupID: 2147483654 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3200000000 Occupancy: 0.064 } AllocatedSize: 3200000000 } Groups { GroupID: 2147483655 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2700000000 Occupancy: 0.054 } AllocatedSize: 2700000000 } Groups { GroupID: 2147483656 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2500000000 Occupancy: 0.05 } AllocatedSize: 2500000000 } Groups { GroupID: 2147483657 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3100000000 Occupancy: 0.062 } AllocatedSize: 3100000000 } Groups { GroupID: 2147483658 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2300000000 Occupancy: 0.046 } AllocatedSize: 2300000000 } Groups { GroupID: 2147483659 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } } 2024-11-21T09:02:11.620823Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1503, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2024-11-21T09:02:11.620833Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1503, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T09:02:11.620851Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}(72075186224037962,HIVE_REASSIGN_REASON_BALANCE,[]) 2024-11-21T09:02:11.620875Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 channel 0 assigned to group 2147483656 2024-11-21T09:02:11.620881Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 skipped reassign of channel 0 2024-11-21T09:02:11.620887Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 channel 1 assigned to group 2147483649 2024-11-21T09:02:11.620890Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 skipped reassign of channel 1 2024-11-21T09:02:11.620896Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 channel 2 assigned to group 2147483650 2024-11-21T09:02:11.620900Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 skipped reassign of channel 2 2024-11-21T09:02:11.620904Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 wasn't changed 2024-11-21T09:02:11.620908Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 skipped channel 0 2024-11-21T09:02:11.620945Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 skipped channel 1 2024-11-21T09:02:11.620969Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4876773792160}: tablet 72075186224037962 skipped channel 2 2024-11-21T09:02:11.620990Z node 25 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{4876773792160}(72075186224037962)::Execute - TryToBoot was not successfull 2024-11-21T09:02:11.621010Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1503, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{999, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2024-11-21T09:02:11.621020Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1503, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--true] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_db_counters.py::TestKqpCounters::test_case [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> test_db_counters.py::TestStorageCounters::test_storage_counters[disable_separate_quotas] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_list ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1018372) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_table.py::TestExecuteQueryWithParams::test_list [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--true] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] [GOOD] >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--false] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-column] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-row] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-row] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-column] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-column] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-column] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-row] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-row] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-column] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-column] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-column] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-row] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-column] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |92.3%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004534/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk8/testing_out_stuff/test_serverless.py.test_seamless_migration_to_exclusive_nodes.enable_alter_database_create_hive_first--false/cluster/slot_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-row] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-row] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-column] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-row] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-column] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-row] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-column] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-row] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-column] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_resolve_nodes.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_resolve_nodes.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_resolve_nodes.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_resolve_nodes.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004636/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk17/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false/cluster/node_1/logfile_evfyt4ms.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-row] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-column] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_dispatch.py::TestMapping::test_mapping [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-row] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-column] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-row] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-column] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-row] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true/cluster/slot_2/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true/cluster/slot_2/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true/cluster/slot_2/logfile_fi0dyotg.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true/cluster/node_1/logfile_ub3mq9p4.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_no_cpu.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_no_cpu.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_no_cpu.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_no_cpu.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004655/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_dynamic_tenants.py.test_create_tenant_then_exec_yql.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-row] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-column] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-row] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-column] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-column] >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-row] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-column] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-row] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-column] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-row] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-column] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-row] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-column] >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-row] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-column] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-row] >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-column] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--false] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--true/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--true/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--true/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--true/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004595/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk18/testing_out_stuff/test_tenants.py.TestTenants.test_when_deactivate_fat_tenant_creation_another_tenant_is_ok.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-row] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode_array |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64 >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_pretty >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64_array >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_pretty [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64 >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-plan] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-result_sets] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64_array |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode_array >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_csv >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_tsv >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_tsv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_pretty >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode_array [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64 >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64_array >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-column] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode_array >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_csv >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-plan] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_tsv >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_tsv [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_pretty >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64 |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode_array |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_csv >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_tsv |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_tsv [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-row] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-plan] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-result_sets] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-plan] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-column] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-plan] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-result_sets] >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-result_sets] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-plan] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-result_sets] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-plan] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_system_views.py::TestQueryMetricsUniqueQueries::test_case [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-plan] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-row] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/group_by_lookup.script-script] |92.5%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |92.5%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-result_sets] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith5Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith16Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-plan] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith32Cpu::test |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithHybridNodeWith2Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[join/group_by_lookup.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-plan] >> docker_wrapper_test.py::test_pg_generated[Test64BitErrorChecking] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith10Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-result_sets] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-column] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith38Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith16Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith5Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith19Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith7Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith2Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith10Cpu::test [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-row] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith35Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith19Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith32Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith7Cpu::test [GOOD] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-plan] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-column] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith6Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith17Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-plan] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith30Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith11Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith17Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith6Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith38Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-plan] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-row] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-plan] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_ydb_backup.py::TestBaseSingleFromDifPlaces::test_single_table_backup_from_different_places >> test_actorsystem.py::TestWithHybridNodeWith1Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith33Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith35Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith8Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith11Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[data] >> test_actorsystem.py::TestWithStorageNodeWith7Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-plan] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-column] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-result_sets] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] >> test_actorsystem.py::TestWithComputeNodeWith8Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith30Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith7Cpu::test [GOOD] >> test_ydb_backup.py::TestBaseSingleFromDifPlaces::test_single_table_backup_from_different_places [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint32 >> test_actorsystem.py::TestWithHybridNodeWith1Cpu::test [GOOD] >> test_db_counters.py::TestStorageCounters::test_storage_counters[disable_separate_quotas] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[scan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[data] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage.script-script] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[scan] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[data] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[scan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith18Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[data] >> test_db_counters.py::TestStorageCounters::test_storage_counters[enable_separate_quotas] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[scan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[data] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-result_sets] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_csv[sql] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[scan] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-plan] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_tsv[sql] >> test_actorsystem.py::TestWithStorageNodeWith33Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[data] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_raw[sql] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_format_parquet[row] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_format_parquet[column] [SKIPPED] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[scan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-result_sets] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[scan] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_json[sql] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[scan] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_csv[sql] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage_key.script-script] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint64_and_string >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[scan] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_csv[sql] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-plan] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_tsv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_list >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_json[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_struct >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_csv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_struct [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_multiple_files >> test_actorsystem.py::TestWithStorageNodeWith12Cpu::test >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_tsv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_multiple_files [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_ignore_excess_parameters >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_raw[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_ignore_excess_parameters [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_script_from_file >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-plan] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith18Cpu::test [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_raw[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_script_from_file [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint32 >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-plan] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_json[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint64_and_string >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-result_sets] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_csv[sql] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-result_sets] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_list >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_tsv[sql] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_struct >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage_key.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-plan] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_raw[sql] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-result_sets] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_json[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_struct [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_multiple_files >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_csv[sql] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_multiple_files [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_ignore_excess_parameters >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_csv[sql] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_ignore_excess_parameters [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_script_from_file >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_json[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_script_from_file [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_tsv[sql] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-result_sets] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_csv[sql] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[explain.script-script] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith12Cpu::test [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_tsv[sql] >> test_retry.py::TestRetry::test_low_rate[kikimr0] >> test_actorsystem.py::TestWithComputeNodeWith9Cpu::test >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_csv[sql] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-result_sets] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_tsv[sql] >> test_actorsystem.py::TestWithComputeNodeWith39Cpu::test >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_tsv[sql] >> test_actorsystem.py::TestWithStorageNodeWith8Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith19Cpu::test >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_tsv[sql] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith20Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-plan] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith24Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[explain.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith36Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith9Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith13Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith19Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith8Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith20Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-result_sets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. yielded = self.gen.throw(*exc_info) contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00453d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk4/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00453d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk4/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00453d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk4/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00453d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk4/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00453d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk4/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00453d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk4/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00453d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk4/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00453d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk4/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-result_sets] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith31Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith10Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith13Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-result_sets] >> test_ydb_backup.py::TestRecursiveNonConsistent::test_recursive_table_backup_from_different_places >> test_actorsystem.py::TestWithHybridNodeWith24Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith10Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/ct.script-script] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-plan] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith34Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith10Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith39Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith10Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/ct.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/script_params.script-script] >> test_actorsystem.py::TestWithComputeNodeWith1Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith36Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-plan] >> test_ydb_backup.py::TestRecursiveNonConsistent::test_recursive_table_backup_from_different_places [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith21Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith31Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/script_params.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[table_types.script-script] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith14Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith25Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith21Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[table_types.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith1Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith14Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith34Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith11Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-result_sets] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith11Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith25Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith11Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith32Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[data] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith11Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith16Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-plan] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith3Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith22Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[scan] >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[data] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[scan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[data] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith37Cpu::test >> test_ydb_backup.py::TestRecursiveSchemeOnly::test_recursive_table_backup_from_different_places >> test_actorsystem.py::TestWithStorageNodeWith16Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[scan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[scan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[scan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[data] >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[scan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[data] >> test_actorsystem.py::TestWithComputeNodeWith3Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[scan] >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith26Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[scan] >> test_actorsystem.py::TestWithHybridNodeWith32Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[data] >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[scan] >> test_actorsystem.py::TestWithHybridNodeWith22Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[data] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[data] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_impex.py::TestImpex::test_format_parquet[column] [SKIPPED] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[scan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[data] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] >> test_ydb_backup.py::TestRecursiveSchemeOnly::test_recursive_table_backup_from_different_places [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[scan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith12Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[data] >> test_actorsystem.py::TestWithStorageNodeWith35Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[data] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] [GOOD] >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[scan] >> test_actorsystem.py::TestWithHybridNodeWith26Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[data] >> test_actorsystem.py::TestWithComputeNodeWith27Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[data] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[scan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[scan] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[data] >> test_actorsystem.py::TestWithComputeNodeWith12Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[data] >> test_actorsystem.py::TestWithStorageNodeWith17Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[data] >> test_actorsystem.py::TestWithHybridNodeWith37Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[scan] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith4Cpu::test >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[scan] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[data] >> test_actorsystem.py::TestWithStorageNodeWith37Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[scan] >> test_actorsystem.py::TestWithHybridNodeWith33Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[scan] >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith17Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[data] >> test_actorsystem.py::TestWithComputeNodeWith13Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith27Cpu::test [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[data] >> test_actorsystem.py::TestWithComputeNodeWith4Cpu::test [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[data] >> test_actorsystem.py::TestWithHybridNodeWith4Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[data] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] >> test_actorsystem.py::TestWithComputeNodeWith13Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith35Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith27Cpu::test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[scripting] >> test_actorsystem.py::TestWithHybridNodeWith27Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[scripting] >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[stream] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[stream] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[stream] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith4Cpu::test [GOOD] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[scripting] >> test_actorsystem.py::TestWithHybridNodeWith38Cpu::test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[scripting] [GOOD] >> KqpOlapAggregations::AggregationCountPushdown [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[stream] >> test_actorsystem.py::TestWithHybridNodeWith33Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[stream] >> test_ydb_backup.py::TestRecursiveConsistent::test_recursive_table_backup_from_different_places |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationCountPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 26792, MsgBus: 1927 2024-11-21T08:58:03.621761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654232633066567:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:03.621961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00478f/r3tmp/tmpTA8zrJ/pdisk_1.dat 2024-11-21T08:58:03.675387Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26792, node 1 2024-11-21T08:58:03.687470Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:58:03.687490Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:58:03.687492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:58:03.687535Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1927 TClient is connected to server localhost:1927 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T08:58:03.723221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:58:03.723264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T08:58:03.724280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:58:03.753308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:58:03.764605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:58:03.776559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.776631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.776678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.776714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.776737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.776764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.776790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.776816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:03.776845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:03.776873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.776896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:03.776921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:03.780718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:58:03.780748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:58:03.780789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:58:03.780807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:58:03.780836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:58:03.780861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:58:03.780888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:58:03.780913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:58:03.780945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:58:03.780975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.780997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:58:03.781022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654232633067230:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:58:03.781552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:58:03.781570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:58:03.781582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:58:03.781587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:58:03.781606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:58:03.781610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:58:03.781620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:58:03.781626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:58:03.781642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:58:03.781648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:58:03.781655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891; ... rePortionFromChunks; 2024-11-21T08:58:03.790373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:58:03.790383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:58:03.790398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:58:03.790406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:58:03.790417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:58:03.790426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:58:03.790440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:58:03.790449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:58:03.790459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:58:03.790467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:58:03.823801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T08:58:08.621910Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439654232633066567:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:58:08.621949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T08:58:18.671387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T08:58:18.671410Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:04:03.793751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439654232633067229:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T09:04:03.794285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439654232633067234:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T09:04:03.794366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654232633067227:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T09:04:03.868695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=8e44f7fa-a7e711ef-8e9439ed-716aecd6;fline=with_appended.cpp:80;portions=8,;task_id=8e44f7fa-a7e711ef-8e9439ed-716aecd6; 2024-11-21T09:04:03.869424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=8e44f3c2-a7e711ef-a63b7111-fbfb3ef8;fline=with_appended.cpp:80;portions=8,;task_id=8e44f3c2-a7e711ef-a63b7111-fbfb3ef8; 2024-11-21T09:04:03.899410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=8e44d9fa-a7e711ef-99ae9904-dde65eb8;fline=with_appended.cpp:80;portions=8,;task_id=8e44d9fa-a7e711ef-99ae9904-dde65eb8; 2024-11-21T09:04:04.163751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439655783116266937:4938], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:04:04.163776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439655783116266947:4941], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:04:04.163785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:04:04.164960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:04:04.168241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T09:04:04.168322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439655783116266951:4942], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:04:04.553603Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732179844220, txId: 18446744073709551615] shutting down !!! Pushdown query execution time: 402 !!! JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"columns":["level"],"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":4}]},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '505) '('"_id" '"138b3be5-385c984a-bdbc032a-ec769f79") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '('"level") '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 'count '"level")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1053) '('"_id" '"b12d8b0f-a3e905a0-1a8d6e87-7373d877")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1156) '('"_id" '"81356dc6-d19a9120-6f1a930c-e2f88a30") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[scripting] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[scripting] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[stream] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[scripting] >> test_actorsystem.py::TestWithStorageNodeWith18Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[stream] >> test_actorsystem.py::TestWithComputeNodeWith28Cpu::test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith27Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[scripting] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith21Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith27Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[stream] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith37Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[stream] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[scripting] >> test_actorsystem.py::TestWithComputeNodeWith5Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[stream] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[scripting] >> test_actorsystem.py::TestWithHybridNodeWith13Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith21Cpu::test |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[stream] [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[stream] >> test_actorsystem.py::TestWithComputeNodeWith32Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith18Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[scripting] >> test_actorsystem.py::TestWithComputeNodeWith14Cpu::test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[scripting] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[stream] >> test_actorsystem.py::TestWithComputeNodeWith5Cpu::test [GOOD] >> test_ydb_backup.py::TestRecursiveConsistent::test_recursive_table_backup_from_different_places [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[scripting] >> test_actorsystem.py::TestWithStorageNodeWith21Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[scripting] >> test_actorsystem.py::TestWithHybridNodeWith13Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[scripting] >> test_actorsystem.py::TestWithHybridNodeWith5Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[stream] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table [GOOD] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParams.test_uint32/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParams.test_uint32/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParams.test_uint32/cluster/node_1/logfile_0wmm8w0q.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParamsFromJson.test_script_from_file/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParamsFromJson.test_script_from_file/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParamsFromJson.test_script_from_file/cluster/node_1/logfile_9aozygj8.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParamsFromStdin.test_simple_json.sql/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParamsFromStdin.test_simple_json.sql/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004230/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_sql/testing_out_stuff/test_ydb_sql.py.TestExecuteSqlWithParamsFromStdin.test_simple_json.sql/cluster/node_1/logfile_ocqg_vcw.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[scripting] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_actorsystem.py::TestWithComputeNodeWith14Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[scripting] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith21Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[stream] >> test_actorsystem.py::TestWithStorageNodeWith19Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith28Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[scripting] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[stream] >> test_actorsystem.py::TestWithHybridNodeWith38Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[scripting] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[scripting] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[stream] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[stream] >> test_actorsystem.py::TestWithHybridNodeWith5Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[scripting] >> test_actorsystem.py::TestWithStorageNodeWith28Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[stream] >> test_actorsystem.py::TestWithHybridNodeWith28Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[scripting] >> test_actorsystem.py::TestWithStorageNodeWith38Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[stream] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith32Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith19Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith6Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[scripting] >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[stream] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] Test command err: contrib/python/PyYAML/py3/yaml/scanner.py:286: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/scanner.py:286: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/scanner.py:286: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--true/cluster/slot_1/logfile_uk0sk6b3.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/scanner.py:286: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--true/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/scanner.py:286: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--true/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_10/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_10/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_9/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_9/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_10/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_10/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_9/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_9/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_8/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_8/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_7/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_7/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_6/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_6/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_5/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_5/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_4/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_4/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_tenants.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004626/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-false-client0] |92.5%| [TA] $(B)/ydb/tests/functional/canonical/test-results/py3test/{meta.json ... results_accumulator.log} >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.gz-gzip] >> test_statistics.py::TestS3::test_egress[v1-client0-json_list] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] [GOOD] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithParams.test_uint32/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithParams.test_uint32/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithParams.test_uint32/cluster/node_1/logfile_ipxfpezc.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithFormats.test_data_query_pretty/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithFormats.test_data_query_pretty/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithFormats.test_data_query_pretty/cluster/node_1/logfile_kp6t8bre.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithParamsFromJson.test_uint32.data/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithParamsFromJson.test_uint32.data/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004234/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_table/testing_out_stuff/test_ydb_table.py.TestExecuteQueryWithParamsFromJson.test_uint32.data/cluster/node_1/logfile_150544rx.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.5%| [TA] {RESULT} $(B)/ydb/tests/functional/canonical/test-results/py3test/{meta.json ... results_accumulator.log} >> test_actorsystem.py::TestWithHybridNodeWith6Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith22Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith14Cpu::test |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_actorsystem.py::TestWithStorageNodeWith28Cpu::test [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> test_test_connection.py::TestConnection::test_test_s3_connection[v2-client0] >> test_empty.py::TestS3::test_empty[v1-client0] >> test_actorsystem.py::TestWithHybridNodeWith28Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith22Cpu::test >> test_dispatch.py::TestMapping::test_idle >> test_actorsystem.py::TestWithComputeNodeWith29Cpu::test >> test_formats.py::TestS3Formats::test_format[v1-test.csv-csv_with_names-kikimr_settings0] >> test_actorsystem.py::TestWithHybridNodeWith39Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith7Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith14Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataset] >> test_actorsystem.py::TestWithStorageNodeWith22Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith33Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith1Cpu::test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith38Cpu::test [GOOD] >> test_ydb_backup.py::TestSingleBackupRestore::test_single_table_with_data_backup_restore >> test_actorsystem.py::TestWithComputeNodeWith22Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith7Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-json_list] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-json_each_row] >> test_actorsystem.py::TestWithComputeNodeWith29Cpu::test [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.gz-gzip] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith29Cpu::test >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.lz4-lz4] >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith8Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith1Cpu::test [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection[v2-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection[v1-client0] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings0-client0] >> test_statistics.py::TestS3::test_egress[v1-client0-json_each_row] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-csv_with_names] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-false-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-true-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v1-client0] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.lz4-lz4] [GOOD] >> test_ydb_backup.py::TestSingleBackupRestore::test_single_table_with_data_backup_restore [GOOD] >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.br-brotli] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v1-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] >> test_actorsystem.py::TestWithHybridNodeWith15Cpu::test >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith33Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith8Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith23Cpu::test >> test_statistics.py::TestS3::test_egress[v1-client0-csv_with_names] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith39Cpu::test [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-parquet] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.bz2-bzip2] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-false-client0] >> test_formats.py::TestS3Formats::test_format[v1-test.csv-csv_with_names-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.tsv-tsv_with_names-kikimr_settings0] >> test_actorsystem.py::TestWithComputeNodeWith23Cpu::test >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.bz2-bzip2] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.zst-zstd] >> test_actorsystem.py::TestWithHybridNodeWith15Cpu::test [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-parquet] [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-json_list] >> test_actorsystem.py::TestWithStorageNodeWith29Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.tsv-tsv_with_names-kikimr_settings0] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.zst-zstd] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-false] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.xz-xz] >> test_formats.py::TestS3Formats::test_format[v1-test_each_row.json-json_each_row-kikimr_settings0] >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataset] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataにちは% set] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.xz-xz] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith23Cpu::test [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.gz-gzip] >> test_actorsystem.py::TestWithComputeNodeWith2Cpu::test >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-true-client0] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client0-year Int32-False] >> test_actorsystem.py::TestWithComputeNodeWith23Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith16Cpu::test >> test_formats.py::TestS3Formats::test_format[v1-test_each_row.json-json_each_row-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test_list.json-json_list-kikimr_settings0] >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataにちは% set] [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-json_list] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataset] >> test_statistics.py::TestS3::test_egress[v2-client0-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-false-client0] >> test_actorsystem.py::TestWithStorageNodeWith39Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith2Cpu::test [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-false-client0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test_list.json-json_list-kikimr_settings0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-true-client0] >> test_formats.py::TestS3Formats::test_format[v1-test.parquet-parquet-kikimr_settings0] >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith16Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataset] [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-json_each_row] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataにちは% set] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-csv_with_names] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.gz-gzip] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.lz4-lz4] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-true-client0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.parquet-parquet-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test.csv-csv_with_names-kikimr_settings0] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-false-client0] >> test_statistics.py::TestS3::test_egress[v2-client0-csv_with_names] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith2Cpu::test >> test_statistics.py::TestS3::test_egress[v2-client0-parquet] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.lz4-lz4] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.br-brotli] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataにちは% set] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith17Cpu::test |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-parquet] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataset] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-false] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_list] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-true] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-true-client0] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith24Cpu::test >> test_ydb_backup.py::TestBackupRestoreInRoot::test_table_backup_restore_in_root >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings0-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings0-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.br-brotli] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith2Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith24Cpu::test >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.bz2-bzip2] >> test_actorsystem.py::TestWithComputeNodeWith34Cpu::test >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-false] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataset] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client0-year Int32-False] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataにちは% set] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client1-year Int32 NOT NULL-False] >> test_formats.py::TestS3Formats::test_format[v2-test.csv-csv_with_names-kikimr_settings0] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith17Cpu::test [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> test_formats.py::TestS3Formats::test_format[v2-test.tsv-tsv_with_names-kikimr_settings0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-false-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v2[v2-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.bz2-bzip2] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith30Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith39Cpu::test [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.zst-zstd] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_ydb_backup.py::TestBackupRestoreInRoot::test_table_backup_restore_in_root [GOOD] >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] >> test_db_counters.py::TestStorageCounters::test_storage_counters[enable_separate_quotas] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_each_row] >> test_actorsystem.py::TestWithStorageNodeWith24Cpu::test [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test.tsv-tsv_with_names-kikimr_settings0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client1-year Int32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client2-year Uint32-False] >> test_formats.py::TestS3Formats::test_format[v2-test_each_row.json-json_each_row-kikimr_settings0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-false] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataにちは% set] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-true] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataset] >> test_actorsystem.py::TestWithStorageNodeWith30Cpu::test |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table [GOOD] Test command err: ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParams.test_uint32/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParams.test_uint32/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParams.test_uint32/cluster/node_1/logfile_7gv6568p.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithFormats.test_yql_script_pretty/cluster/node_1/stdout'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithFormats.test_yql_script_pretty/cluster/node_1/stderr'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:76: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithFormats.test_yql_script_pretty/cluster/node_1/logfile__l5qpep7.log'> for key, value in sorted(labels.items(), key=lambda x: x[0]) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParamsFromJson.test_uint32/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParamsFromJson.test_uint32/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParamsFromJson.test_uint32/cluster/node_1/logfile_3s1ycf22.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParamsFromStdin.test_simple_json.scripting/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParamsFromStdin.test_simple_json.scripting/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/00423a/ydb/tests/functional/ydb_cli/test-results/py3test/testing_out_stuff/test_ydb_scripting/testing_out_stuff/test_ydb_scripting.py.TestExecuteScriptWithParamsFromStdin.test_simple_json.scripting/cluster/node_1/logfile_a0z_e4ve.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-false-client0] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith24Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-true-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.zst-zstd] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1019862) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043b7/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry_high_rate/testing_out_stuff/test_retry_high_rate.py.TestRetry.test_high_rate.kikimr0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043b7/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry_high_rate/testing_out_stuff/test_retry_high_rate.py.TestRetry.test_high_rate.kikimr0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.xz-xz] >> test_formats.py::TestS3Formats::test_format[v2-test_each_row.json-json_each_row-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test_list.json-json_list-kikimr_settings0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client2-year Uint32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client3-year Uint32 NOT NULL-True] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-true] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-false] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-true-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-false-client0] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.xz-xz] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-csv_with_names] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.gz-gzip] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataにちは% set] >> test_actorsystem.py::TestWithComputeNodeWith34Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client3-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client4-year Int64-False] >> test_formats.py::TestS3Formats::test_format[v2-test_list.json-json_list-kikimr_settings0] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_formats.py::TestS3Formats::test_format[v2-test.parquet-parquet-kikimr_settings0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-true] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-false-client0] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith30Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-true-client0] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.gz-gzip] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.lz4-lz4] >> test_actorsystem.py::TestWithStorageNodeWith30Cpu::test [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test.parquet-parquet-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.csv-csv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsAfterWait Test command err: Trying to start YDB, gRPC: 65381, MsgBus: 26270 2024-11-21T08:57:20.580521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654046910271919:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:20.580760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c7/r3tmp/tmpmNnkcG/pdisk_1.dat 2024-11-21T08:57:20.631309Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65381, node 1 2024-11-21T08:57:20.643706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:20.643721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:20.643723Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:20.643766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26270 TClient is connected to server localhost:26270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:20.681987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:20.682014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:20.683115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:20.687708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:20.797016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:21.518314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.518366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.518406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.518425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.518441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.518460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.518501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.518521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.518538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.518540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.518557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.518570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.518575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.518597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038900;self_id=[1:7439654051205241466:2296];tablet_id=72075186224038900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.518610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.518627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.518642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.518651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.518659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.518668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.518678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:21.518687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:21.518696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:21.518704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654051205241475:2300];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:21.532740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654051205241485:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:21.532768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654051205241485:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:21.532806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654051205241485:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:21.532824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654051205241485:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:21.532842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654051205241485:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:21.532865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654051205241485:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:21.532885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654051205241485:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:21.532904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439654051205241485:2310];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:21.532922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:74396540512052414 ... SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736180:0, at schemeshard: 72057594046644480 2024-11-21T09:04:31.849400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736181:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:32.729583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736182:0, at schemeshard: 72057594046644480 2024-11-21T09:04:32.747988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736183:0, at schemeshard: 72057594046644480 2024-11-21T09:04:32.771317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736184:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:33.565682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736185:0, at schemeshard: 72057594046644480 2024-11-21T09:04:33.613109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736186:0, at schemeshard: 72057594046644480 2024-11-21T09:04:33.638913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736187:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:34.353219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736188:0, at schemeshard: 72057594046644480 2024-11-21T09:04:34.374607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736189:0, at schemeshard: 72057594046644480 2024-11-21T09:04:34.394493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736190:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:35.106972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736191:0, at schemeshard: 72057594046644480 2024-11-21T09:04:35.130242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736192:0, at schemeshard: 72057594046644480 2024-11-21T09:04:35.156797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736193:0, at schemeshard: 72057594046644480 2024-11-21T09:04:35.919837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736194:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:35.941342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736195:0, at schemeshard: 72057594046644480 2024-11-21T09:04:35.958397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736196:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:37.610531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736197:0, at schemeshard: 72057594046644480 2024-11-21T09:04:37.625511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736198:0, at schemeshard: 72057594046644480 2024-11-21T09:04:37.641309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736199:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:37.935989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736200:0, at schemeshard: 72057594046644480 2024-11-21T09:04:37.952890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736201:0, at schemeshard: 72057594046644480 2024-11-21T09:04:37.972194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736202:0, at schemeshard: 72057594046644480 2024-11-21T09:04:38.598019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736203:0, at schemeshard: 72057594046644480 2024-11-21T09:04:38.617684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736204:0, at schemeshard: 72057594046644480 2024-11-21T09:04:38.635811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736205:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:39.812766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736206:0, at schemeshard: 72057594046644480 2024-11-21T09:04:39.830175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736207:0, at schemeshard: 72057594046644480 2024-11-21T09:04:39.851258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736208:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:40.570483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736209:0, at schemeshard: 72057594046644480 2024-11-21T09:04:40.593078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736210:0, at schemeshard: 72057594046644480 2024-11-21T09:04:40.609351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736211:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:41.176853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736212:0, at schemeshard: 72057594046644480 2024-11-21T09:04:41.199066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736213:0, at schemeshard: 72057594046644480 2024-11-21T09:04:41.223206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736214:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:42.710844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736215:0, at schemeshard: 72057594046644480 2024-11-21T09:04:42.737138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736216:0, at schemeshard: 72057594046644480 2024-11-21T09:04:42.758186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736217:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:43.606315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736218:0, at schemeshard: 72057594046644480 2024-11-21T09:04:43.627811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736219:0, at schemeshard: 72057594046644480 2024-11-21T09:04:43.651734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736220:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () VERIFY failed (2024-11-21T09:04:43.945932Z): verification=clean;fline=blobs_sharing_ut.cpp:294; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed 2024-11-21T09:04:44.726757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736221:0, at schemeshard: 72057594046644480 2024-11-21T09:04:44.773941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736222:0, at schemeshard: 72057594046644480 2024-11-21T09:04:44.800654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736223:0, at schemeshard: 72057594046644480 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 2024-11-21T09:04:45.361459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736224:0, at schemeshard: 72057594046644480 2024-11-21T09:04:45.381444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736225:0, at schemeshard: 72057594046644480 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:294: WaitResharding @ 0x124DA4F6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:628: Execute_ @ 0x124DDBD2 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7F5204C07D8F 11. ??:0: ?? @ 0x7F5204C07E3F 12. ??:0: ?? @ 0x11815028 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataにちは% set] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client4-year Int64-False] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataset] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-true] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client5-year Int64 NOT NULL-False] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-false] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-csv_with_names] [GOOD] >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] >> test_actorsystem.py::TestWithStorageNodeWith25Cpu::test >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-true-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-parquet] >> test_explicit_partitioning_0.py::TestS3::test_validation[v2-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.lz4-lz4] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.br-brotli] >> test_explicit_partitioning_0.py::TestS3::test_validation[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_validation[v1-client0] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith25Cpu::test >> test_s3_1.py::TestS3::test_write_result[v1-kikimr_params0-client0] >> test_explicit_partitioning_0.py::TestS3::test_validation[v1-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-false-client0] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--false] >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client6-year Uint64-False] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.br-brotli] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.bz2-bzip2] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataにちは% set] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-false-client0] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith25Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-true-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-false] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_insert[v2-kikimr_settings0-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-parquet] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-true] >> test_actorsystem.py::TestWithComputeNodeWith35Cpu::test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client6-year Uint64-False] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_list] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings0-client0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings0-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client7-year Uint64 NOT NULL-False] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.bz2-bzip2] [GOOD] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-False-client0] >> test_actorsystem.py::TestWithComputeNodeWith25Cpu::test [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.zst-zstd] >> test_actorsystem.py::TestWithStorageNodeWith3Cpu::test |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test [GOOD] >> test_ydb_backup.py::TestBackupRestoreInRootSchemeOnly::test_table_backup_restore_in_root_scheme_only >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-true] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-true-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-false] >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-false-client0] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataにちは% set] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client7-year Uint64 NOT NULL-False] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataset] >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client8-year String NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.zst-zstd] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.xz-xz] >> test_actorsystem.py::TestWithStorageNodeWith3Cpu::test [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client8-year String NOT NULL-True] [GOOD] >> test_ydb_backup.py::TestBackupRestoreInRootSchemeOnly::test_table_backup_restore_in_root_scheme_only [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client9-year String-False] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataにちは% set] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-false] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-false-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-true] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-true-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.xz-xz] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.gz-gzip] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.gz-gzip] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.lz4-lz4] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataにちは% set] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client9-year String-False] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataset] >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client10-year Utf8-False] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-false-client0] >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.lz4-lz4] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.br-brotli] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-true] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-csv_with_names] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-false] >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith35Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-false-client0] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataにちは% set] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client10-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-true-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client11-year Utf8 NOT NULL-True] >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test [GOOD] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--false] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.bz2-bzip2] >> test_format_setting.py::TestS3::test_interval_unit[v2-client0] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client11-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-true-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-false] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client12-year Date-False] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-true] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-false-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-csv_with_names] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataにちは% set] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataset] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-parquet] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_bindings_1.py::TestBindings::test_s3_insert[v2-kikimr_settings0-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_insert[v1-kikimr_settings0-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.bz2-bzip2] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.zst-zstd] >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test [GOOD] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-False-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-false-client0] [GOOD] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-True-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-true-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-false] >> test_s3_0.py::TestS3::test_csv[v1-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client12-year Date-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client13-year Date NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--false] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.disable_separate_quotas/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.disable_separate_quotas/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.disable_separate_quotas/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.disable_separate_quotas/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.enable_separate_quotas/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.enable_separate_quotas/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.enable_separate_quotas/cluster/node_1/stdout'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.enable_separate_quotas/cluster/node_1/stderr'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_db_counters.py.TestStorageCounters.test_storage_counters.enable_separate_quotas/cluster/node_1/logfile_tk4wir5v.log'> (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--false/cluster/slot_1/logfile_jnl2lm2y.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004670/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_dynamic_tenants.py.test_check_access.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.zst-zstd] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-true-client0] [GOOD] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-True-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.xz-xz] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v2-client0] >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_yq_v2.py::TestS3::test_removed_database_path[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v2-client0] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataset] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v1-client0] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataにちは% set] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_list] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v1-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.xz-xz] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-false-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.gz-gzip] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client13-year Date NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client14-year Datetime-False] >> docker_wrapper_test.py::test_pg_generated[Test64BitErrorChecking] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEncodedText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEscapedText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEscapedTextExistingBuffer] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayScanBackend] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayScanner] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayValueBackend] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test [GOOD] >> test_bindings_1.py::TestBindings::test_s3_insert[v1-kikimr_settings0-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v2-client0] >> docker_wrapper_test.py::test_pg_generated[TestArrayValueBackend] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestArrayValuer] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBadConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBinaryByteSliceToInt] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestBinaryByteSlicetoUUID] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestBindError] >> docker_wrapper_test.py::test_pg_generated[TestBindError] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteSliceToText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormatEncoding] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormatEncoding] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormats] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCloseBadConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCommit] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCommitInFailedTransaction] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCommitInFailedTransactionWithCancelContext] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnExecDeadlock] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnListen] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestConnPing] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPing] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.Background] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.WithTimeout] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.WithTimeout_exceeded] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnUnlisten] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestConnUnlistenAll] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNoticeHandler_Simple] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNoticeHandler_Simple] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNotificationHandler_Simple] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelBegin] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelExec] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelQuery] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyFromError] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCopyInBinaryError] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCopyInMultipleValues] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCopyInRaiseStmtTrigger] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCopyInSchemaStmt] >> docker_wrapper_test.py::test_pg_generated[TestCopyInSchemaStmt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmtAffectedRows] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCopyInTypes] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCopyInWrongType] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCopyOutsideOfTxnError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyRespLoopConnectionError] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyRespLoopConnectionError] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestCopySyntaxError] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestDataType] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypeLength] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypeName] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypePrecisionScale] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeBool] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeUUIDBackend] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeUUIDBinaryError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEmptyQuery] >> docker_wrapper_test.py::test_pg_generated[TestEmptyQuery] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestEmptyResultSetColumns] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEncodeAndParseTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEncodeDecode] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestErrorClass] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartup] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartup] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartupClosesConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnExec] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnQuery] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnQueryRowSimpleQuery] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestErrorSQLState] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestExec] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-false-client0] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestExec] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatAndParseTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatTsBackend] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestFullParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanDelimiter] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerArrayBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerArrayString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanUnsupported] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-true-client0] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValueErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValueUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestHasCorrectRootGroupPermissions] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestIPv6LoopbackParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInfinityTimestamp] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanNil] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInvalidProtocolParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIsUTF8] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue1046] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestIssue1062] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestIssue186] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestIssue196] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue282] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue494] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue617] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerConnCloseWhileQueryIsExecuting] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerFailedQuery] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.gz-gzip] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerFailedQuery] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestListenerListen] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestListenerPing] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestListenerReconnect] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlisten] >> test_ydb_backup.py::TestIncompleteBackup::test_incomplete_backup_will_not_be_restored >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataにちは% set] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlisten] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlistenAll] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestMinimalURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleEmptyResult] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleResult] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleSimpleQuery] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Connect] >> test_insert.py::TestS3::test_big_json_list_insert[v2-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.lz4-lz4] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Connect] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Driver] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_WorksWithOpenDB] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestNewListenerConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNoData] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNotifyExtra] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestNullAfterNonNull] >> docker_wrapper_test.py::test_pg_generated[TestNullAfterNonNull] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestOpenURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParameterCountMismatch] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseArray] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseArrayError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseComplete] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseEnviron] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseErrorInExtendedQuery] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestParseOpts] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseTsErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestPgpass] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestPing] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestQueryCancelRace] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQueryCancelledReused] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestQueryRowBugWorkaround] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestQuickClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQuoteIdentifier] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client14-year Datetime-False] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQuoteLiteral] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestReadFloatPrecision] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestReconnect] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestReturning] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestRowsCloseBeforeDone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestRowsColumnTypes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestRowsResultTag] [FAIL] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client15-year Datetime NOT NULL-True] >> docker_wrapper_test.py::test_pg_generated[TestRuntimeParameters] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_not_passed_when_disabled] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_not_set_for_IPv4] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_passed_when_asked_for] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_set_by_default] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSSLClientCertificates] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLConnection] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLRequireWithRootCert] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLVerifyCA] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLVerifyFull] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestScanNilTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestScanTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSimpleParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSimpleQuery] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStatment] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.Background] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout_exceeded] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout_exceeded] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.Background] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplitsThenMerges Test command err: Trying to start YDB, gRPC: 21985, MsgBus: 13004 2024-11-21T08:57:44.759046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654149103494900:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:44.759061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004800/r3tmp/tmp5NBTwG/pdisk_1.dat 2024-11-21T08:57:44.814784Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21985, node 1 2024-11-21T08:57:44.820063Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:44.820076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:44.820077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:44.820101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13004 TClient is connected to server localhost:13004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T08:57:44.859574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:44.859610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:44.860721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:44.890058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:44.963982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:45.546407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.546436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.546456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:45.546472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:45.546486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:45.546511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:45.546528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:45.546539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:45.546551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:45.546565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.546579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:45.546594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439654153398465650:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:45.547555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:45.547566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:45.547574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:45.547576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:45.547586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:45.547592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:45.547598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:45.547601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:45.547610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:45.547612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:45.547620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T08:57:45.547624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T08:57:45.547658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T08:57:45.547671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T08:57:45.547681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T08:57:45.547684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T08:57:45.547691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T08:57:45.547694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T08:57:45.547704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T08:57:45.547712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T08:57:45.547722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T08:57:45.547730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T08:57:45.549660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153398465663:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:45.549677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153398465663:2298];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:45.549705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439654153398465663:2298];tablet_id=7207518622 ... type: ESchemeOpAlterColumnTable, opId: 281474976736175:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:50.895195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736176:0, at schemeshard: 72057594046644480 2024-11-21T09:04:50.913015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736177:0, at schemeshard: 72057594046644480 2024-11-21T09:04:50.934314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736178:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:52.421984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736179:0, at schemeshard: 72057594046644480 2024-11-21T09:04:52.438207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736180:0, at schemeshard: 72057594046644480 2024-11-21T09:04:52.457901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736181:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:53.904095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736182:0, at schemeshard: 72057594046644480 2024-11-21T09:04:53.926406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736183:0, at schemeshard: 72057594046644480 2024-11-21T09:04:53.945128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736184:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:55.603217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736185:0, at schemeshard: 72057594046644480 2024-11-21T09:04:55.621440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736186:0, at schemeshard: 72057594046644480 2024-11-21T09:04:55.636527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736187:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:56.937766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736188:0, at schemeshard: 72057594046644480 2024-11-21T09:04:56.956383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736189:0, at schemeshard: 72057594046644480 2024-11-21T09:04:56.972747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736190:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:58.430773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736191:0, at schemeshard: 72057594046644480 2024-11-21T09:04:58.448288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736192:0, at schemeshard: 72057594046644480 2024-11-21T09:04:58.463723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736193:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:58.989109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736194:0, at schemeshard: 72057594046644480 2024-11-21T09:04:59.004283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736195:0, at schemeshard: 72057594046644480 2024-11-21T09:04:59.018457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736196:0, at schemeshard: 72057594046644480 2024-11-21T09:04:59.607874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736197:0, at schemeshard: 72057594046644480 2024-11-21T09:04:59.621705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736198:0, at schemeshard: 72057594046644480 2024-11-21T09:04:59.634969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736199:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:00.949078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736200:0, at schemeshard: 72057594046644480 2024-11-21T09:05:00.965107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736201:0, at schemeshard: 72057594046644480 2024-11-21T09:05:00.979961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736202:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:01.956250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736203:0, at schemeshard: 72057594046644480 2024-11-21T09:05:01.973178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736204:0, at schemeshard: 72057594046644480 2024-11-21T09:05:01.992955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736205:0, at schemeshard: 72057594046644480 2024-11-21T09:05:02.686971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736206:0, at schemeshard: 72057594046644480 2024-11-21T09:05:02.706357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736207:0, at schemeshard: 72057594046644480 2024-11-21T09:05:02.723858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736208:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:03.662290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736209:0, at schemeshard: 72057594046644480 2024-11-21T09:05:03.682845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736210:0, at schemeshard: 72057594046644480 2024-11-21T09:05:03.698885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736211:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:04.954386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736212:0, at schemeshard: 72057594046644480 2024-11-21T09:05:04.972472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736213:0, at schemeshard: 72057594046644480 2024-11-21T09:05:04.987064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736214:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () VERIFY failed (2024-11-21T09:05:05.806223Z): verification=clean;fline=blobs_sharing_ut.cpp:294; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed 2024-11-21T09:05:06.459091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736215:0, at schemeshard: 72057594046644480 2024-11-21T09:05:06.474837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736216:0, at schemeshard: 72057594046644480 2024-11-21T09:05:06.495787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736217:0, at schemeshard: 72057594046644480 2024-11-21T09:05:07.011084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736218:0, at schemeshard: 72057594046644480 2024-11-21T09:05:07.025601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736219:0, at schemeshard: 72057594046644480 2024-11-21T09:05:07.040382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736220:0, at schemeshard: 72057594046644480 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:294: WaitResharding @ 0x124DA4F6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:607: Execute_ @ 0x124DD682 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7F79B6C42D8F 11. ??:0: ?? @ 0x7F79B6C42E3F 12. ??:0: ?? @ 0x11815028 >> test_yq_v2.py::TestS3::test_removed_database_path[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-true-client0] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout_exceeded] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringToBytea] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringToUUID] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringWithNul] >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] >> docker_wrapper_test.py::test_pg_generated[TestStringWithNul] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToInt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToUUID] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTextDecodeIntoString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+00:00_=>_0000-01-01T11:59:59Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+04:00_=>_0000-01-01T11:59:59+04:00] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+04:01:02_=>_0000-01-01T11:59:59+04:01] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59-04:01:02_=>_0000-01-01T11:59:59-04:01] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00-04:00_=>_0000-01-02T00:00:00-04:00] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00.0+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00.000000+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00Z_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/11:59:59_=>_0000-01-01T11:59:59Z] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-false-client0] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/11:59:59_=>_0000-01-01T11:59:59Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00.000000_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00.0_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimestampWithOutTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimestampWithTimeZone] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestTxOptions] [FAIL] >> docker_wrapper_test.py::test_pg_generated[TestXactMultiStmt] [SKIPPED] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_list] [GOOD] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params0] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_each_row] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.lz4-lz4] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.br-brotli] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client15-year Datetime NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client0-year Int32-False] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.br-brotli] [GOOD] >> test_ydb_backup.py::TestIncompleteBackup::test_incomplete_backup_will_not_be_restored [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.bz2-bzip2] >> test_insert.py::TestS3::test_big_json_list_insert[v2-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplits Test command err: Trying to start YDB, gRPC: 28970, MsgBus: 20027 2024-11-21T08:57:37.002773Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654122488867431:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:37.002883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004845/r3tmp/tmp51or9K/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28970, node 1 2024-11-21T08:57:37.056560Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T08:57:37.058422Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:37.058431Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:37.058432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:37.058465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20027 TClient is connected to server localhost:20027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:37.099250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:37.104113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:37.104137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:37.105220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:37.188950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:37.827737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.827768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.827794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.827805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.827820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.827830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.827847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.827859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.827877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.827893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.827907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.827922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;self_id=[1:7439654122488870862:2296];tablet_id=72075186224038899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.827937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:37.827979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:37.828017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:37.828038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:37.828058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:37.828072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:37.828095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:37.828114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:37.828132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:37.828149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:37.828168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:37.828187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038896;self_id=[1:7439654122488870863:2297];tablet_id=72075186224038896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:37.828264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:37.828276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:37.828284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:37.828287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:37.828297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:37.828299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:37.828305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:37.828308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:37.828313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:37.828320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:37.828324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240388 ... RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:51.081872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736137:0, at schemeshard: 72057594046644480 2024-11-21T09:04:51.106428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736138:0, at schemeshard: 72057594046644480 2024-11-21T09:04:51.125796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736139:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:52.680563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736140:0, at schemeshard: 72057594046644480 2024-11-21T09:04:52.694404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736141:0, at schemeshard: 72057594046644480 2024-11-21T09:04:52.711728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736142:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:54.290472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736143:0, at schemeshard: 72057594046644480 2024-11-21T09:04:54.310682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736144:0, at schemeshard: 72057594046644480 2024-11-21T09:04:54.334571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736145:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:56.289049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736146:0, at schemeshard: 72057594046644480 2024-11-21T09:04:56.313450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736147:0, at schemeshard: 72057594046644480 2024-11-21T09:04:56.342620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736148:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:57.282655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736149:0, at schemeshard: 72057594046644480 2024-11-21T09:04:57.306267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736150:0, at schemeshard: 72057594046644480 2024-11-21T09:04:57.329024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736151:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:58.591523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736152:0, at schemeshard: 72057594046644480 2024-11-21T09:04:58.606375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736153:0, at schemeshard: 72057594046644480 2024-11-21T09:04:58.619065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736154:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:04:59.600289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736155:0, at schemeshard: 72057594046644480 2024-11-21T09:04:59.620748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736156:0, at schemeshard: 72057594046644480 2024-11-21T09:04:59.639736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736157:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:01.299379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736158:0, at schemeshard: 72057594046644480 2024-11-21T09:05:01.325046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736159:0, at schemeshard: 72057594046644480 2024-11-21T09:05:01.349778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736160:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:03.063488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736161:0, at schemeshard: 72057594046644480 2024-11-21T09:05:03.088197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736162:0, at schemeshard: 72057594046644480 2024-11-21T09:05:03.113547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736163:0, at schemeshard: 72057594046644480 2024-11-21T09:05:03.314614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736164:0, at schemeshard: 72057594046644480 2024-11-21T09:05:03.336598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736165:0, at schemeshard: 72057594046644480 2024-11-21T09:05:03.350746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736166:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:04.600618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736167:0, at schemeshard: 72057594046644480 2024-11-21T09:05:04.617252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736168:0, at schemeshard: 72057594046644480 2024-11-21T09:05:04.636878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736169:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:05.306484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736170:0, at schemeshard: 72057594046644480 2024-11-21T09:05:05.321008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736171:0, at schemeshard: 72057594046644480 2024-11-21T09:05:05.339125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736172:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:06.733229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736173:0, at schemeshard: 72057594046644480 2024-11-21T09:05:06.750017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736174:0, at schemeshard: 72057594046644480 2024-11-21T09:05:06.766804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736175:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:07.538378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736176:0, at schemeshard: 72057594046644480 2024-11-21T09:05:07.554655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736177:0, at schemeshard: 72057594046644480 2024-11-21T09:05:07.571107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736178:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () VERIFY failed (2024-11-21T09:05:07.881409Z): verification=clean;fline=blobs_sharing_ut.cpp:294; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed 2024-11-21T09:05:08.313543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736179:0, at schemeshard: 72057594046644480 2024-11-21T09:05:08.332804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736180:0, at schemeshard: 72057594046644480 2024-11-21T09:05:08.350001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736181:0, at schemeshard: 72057594046644480 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:294: WaitResharding @ 0x124DA4F6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:591: Execute_ @ 0x124DD24E 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7FF099E73D8F 11. ??:0: ?? @ 0x7FF099E73E3F 12. ??:0: ?? @ 0x11815028 >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-false-client0] [GOOD] >> test_format_setting.py::TestS3::test_interval_unit[v2-client0] [GOOD] >> test_insert.py::TestS3::test_big_json_list_insert[v1-client0] >> test_s3_1.py::TestS3::test_write_result[v1-kikimr_params0-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-true-client0] >> test_format_setting.py::TestS3::test_interval_unit[v1-client0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-false-client0] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> test_actorsystem.py::TestWithComputeNodeWith36Cpu::test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client0-year Int32-False] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_each_row] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-csv_with_names] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client1-year Int32 NOT NULL-False] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> test_s3_0.py::TestS3::test_csv[v1-false-client0] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.bz2-bzip2] [GOOD] >> test_s3_0.py::TestS3::test_csv[v1-true-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.zst-zstd] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client0-year Int32 NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/py3test >> docker_wrapper_test.py::test_pg_generated[TestXactMultiStmt] [SKIPPED] Test command err: ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestS3::test_big_json_list_insert[v1-client0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> test_insert.py::TestS3::test_insert_csv_delimiter[v2-client0] >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint |92.6%| [TA] $(B)/ydb/tests/postgres_integrations/go-libpq/test-results/py3test/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_interval_unit[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_bad_format_setting[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client1-year Int32 NOT NULL-False] [GOOD] >> test_s3_0.py::TestS3::test_csv[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client2-year Uint32-False] >> test_s3_0.py::TestS3::test_csv[v2-false-client0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_format_setting.py::TestS3::test_bad_format_setting[v2-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.zst-zstd] [GOOD] >> test_format_setting.py::TestS3::test_bad_format_setting[v1-client0] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.xz-xz] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success >> test_format_setting.py::TestS3::test_bad_format_setting[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.csv-csv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client0-year Int32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client1-year Uint32 NOT NULL-True] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-parquet] >> test_insert.py::TestS3::test_insert_csv_delimiter[v2-client0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_insert.py::TestS3::test_insert_csv_delimiter[v1-client0] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.csv-csv_with_names] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates >> TStateStorageTest::ShouldCountStates [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client2-year Uint32-False] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.xz-xz] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client3-year Uint32 NOT NULL-True] >> TStateStorageTest::ShouldDeleteNoCheckpoints >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure >> test_compressions.py::TestS3Compressions::test_invalid_compression[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client1-year Uint32 NOT NULL-True] [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client2-year Uint64 NOT NULL-True] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteGraph >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client3-year Uint32 NOT NULL-True] [GOOD] >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldGetMultipleStates >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client4-year Int64-False] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.json-json_each_row] >> test_s3_0.py::TestS3::test_csv[v2-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv[v2-true-client0] >> test_dispatch.py::TestMapping::test_idle [GOOD] >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith36Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert_csv_delimiter[v1-client0] [GOOD] >> test_insert.py::TestS3::test_append[v2-client0] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v1-client0] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2024-11-21T09:05:13.758137Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:35:2082] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2024-11-21T09:05:13.810825Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2024-11-21T09:05:13.898111Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2024-11-21T09:05:15.409785Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:35:2082] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2024-11-21T09:05:15.445462Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2024-11-21T09:05:15.445490Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' >> test_compressions.py::TestS3Compressions::test_invalid_compression[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client2-year Uint64 NOT NULL-True] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-parquet] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client3-year Date NOT NULL-False] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_list] >> test_s3_0.py::TestS3::test_csv[v2-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client4-year Int64-False] [GOOD] >> test_s3_0.py::TestS3::test_inference[v2-client0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client5-year Int64 NOT NULL-False] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.json-json_each_row] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.parquet-parquet] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client4-year String NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v2-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.parquet-parquet] [GOOD] >> test_s3_0.py::TestS3::test_inference[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.csv-csv_with_names] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client6-year Uint64-False] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_s3_0.py::TestS3::test_inference_null_column[v2-client0] >> test_insert.py::TestS3::test_append[v2-client0] [GOOD] >> TCheckpointStorageTest::ShouldRegisterCoordinator >> test_insert.py::TestS3::test_append[v1-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_list] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client4-year String NOT NULL-True] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client5-year String-False] >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc >> test_ydb_backup.py::TestAlterBackupRestore::test_alter_table_with_data_backup_restore >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client6-year Uint64-False] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.tsv-tsv_with_names] >> test_s3_0.py::TestS3::test_inference_null_column[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client7-year Uint64 NOT NULL-False] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_s3_0.py::TestS3::test_inference_optional_types[v2-client0] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> test_empty.py::TestS3::test_empty[v1-client0] [GOOD] >> test_empty.py::TestS3::test_empty[v2-client0] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client5-year String-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsWhenWait Test command err: Trying to start YDB, gRPC: 2831, MsgBus: 23863 2024-11-21T08:57:51.004711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439654181573545479:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:51.004856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047c1/r3tmp/tmpi6nvx5/pdisk_1.dat 2024-11-21T08:57:51.051053Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2831, node 1 2024-11-21T08:57:51.062815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:51.062829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:51.062831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:51.062867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23863 TClient is connected to server localhost:23863 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:51.105697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:51.105727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:51.106825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:51.133736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:51.225623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T08:57:51.788482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:51.788519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:51.788549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:51.788576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:51.788586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T08:57:51.788593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:51.788609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:51.788612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T08:57:51.788654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:51.788664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T08:57:51.788674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:51.788676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T08:57:51.788687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:51.788688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T08:57:51.788701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T08:57:51.788702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:51.788713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T08:57:51.788716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:51.788724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T08:57:51.788729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038876;self_id=[1:7439654181573549913:2317];tablet_id=72075186224038876;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:51.788742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T08:57:51.788752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T08:57:51.788767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T08:57:51.788780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;self_id=[1:7439654181573550055:2369];tablet_id=72075186224038847;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T08:57:51.789081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T08:57:51.789095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T08:57:51.789105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T08:57:51.789120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T08:57:51.789145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T08:57:51.789157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T08:57:51.789167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T08:57:51.789173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T08:57:51.789180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T08:57:51.789183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T08:57:51.789187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038847 ... lf is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736166:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:08.773066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736167:0, at schemeshard: 72057594046644480 2024-11-21T09:05:08.794196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736168:0, at schemeshard: 72057594046644480 2024-11-21T09:05:08.812924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736169:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:10.618515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736170:0, at schemeshard: 72057594046644480 2024-11-21T09:05:10.638320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736171:0, at schemeshard: 72057594046644480 2024-11-21T09:05:10.657329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736172:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:11.161459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736173:0, at schemeshard: 72057594046644480 2024-11-21T09:05:11.182325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736174:0, at schemeshard: 72057594046644480 2024-11-21T09:05:11.205286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736175:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:12.426470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736176:0, at schemeshard: 72057594046644480 2024-11-21T09:05:12.448610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736177:0, at schemeshard: 72057594046644480 2024-11-21T09:05:12.466303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736178:0, at schemeshard: 72057594046644480 2024-11-21T09:05:12.997271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736179:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:13.017486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736180:0, at schemeshard: 72057594046644480 2024-11-21T09:05:13.035149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736181:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:14.169455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736182:0, at schemeshard: 72057594046644480 2024-11-21T09:05:14.190303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736183:0, at schemeshard: 72057594046644480 2024-11-21T09:05:14.212091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736184:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:15.149259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736185:0, at schemeshard: 72057594046644480 2024-11-21T09:05:15.173670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736186:0, at schemeshard: 72057594046644480 2024-11-21T09:05:15.190838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736187:0, at schemeshard: 72057594046644480 2024-11-21T09:05:15.935841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736188:0, at schemeshard: 72057594046644480 2024-11-21T09:05:15.954214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736189:0, at schemeshard: 72057594046644480 2024-11-21T09:05:15.971320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736190:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:17.199663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736191:0, at schemeshard: 72057594046644480 2024-11-21T09:05:17.222712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736192:0, at schemeshard: 72057594046644480 2024-11-21T09:05:17.239356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736193:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:18.463871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736194:0, at schemeshard: 72057594046644480 2024-11-21T09:05:18.484664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736195:0, at schemeshard: 72057594046644480 2024-11-21T09:05:18.502861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736196:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:19.191462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736197:0, at schemeshard: 72057594046644480 2024-11-21T09:05:19.217248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736198:0, at schemeshard: 72057594046644480 2024-11-21T09:05:19.235228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736199:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:20.058904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736200:0, at schemeshard: 72057594046644480 2024-11-21T09:05:20.076655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736201:0, at schemeshard: 72057594046644480 2024-11-21T09:05:20.097020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736202:0, at schemeshard: 72057594046644480 2024-11-21T09:05:20.593487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736203:0, at schemeshard: 72057594046644480 2024-11-21T09:05:20.613404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736204:0, at schemeshard: 72057594046644480 2024-11-21T09:05:20.632050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736205:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T09:05:21.935800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736206:0, at schemeshard: 72057594046644480 2024-11-21T09:05:21.955732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736207:0, at schemeshard: 72057594046644480 2024-11-21T09:05:21.974244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736208:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () VERIFY failed (2024-11-21T09:05:22.033998Z): verification=clean;fline=blobs_sharing_ut.cpp:294; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed 2024-11-21T09:05:23.212130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736209:0, at schemeshard: 72057594046644480 2024-11-21T09:05:23.232458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736210:0, at schemeshard: 72057594046644480 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x126E163A 1. /-S/util/system/yassert.cpp:55: Panic @ 0x126D8DA6 2024-11-21T09:05:23.249029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736211:0, at schemeshard: 72057594046644480 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x136F6AE3 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:294: WaitResharding @ 0x124DA4F6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:644: Execute_ @ 0x124DE01E 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x124E2206 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1283BBAD 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x124E1BC9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1283C322 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1284F53C 10. ??:0: ?? @ 0x7F5A0B63CD8F 11. ??:0: ?? @ 0x7F5A0B63CE3F 12. ??:0: ?? @ 0x11815028 >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client6-year Utf8 NOT NULL-True] >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_each_row] [GOOD] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_s3_0.py::TestS3::test_inference_optional_types[v2-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-csv_with_names] >> test_insert.py::TestS3::test_append[v1-client0] [GOOD] >> test_ydb_backup.py::TestAlterBackupRestore::test_alter_table_with_data_backup_restore [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test_s3_0.py::TestS3::test_inference_multiple_files[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client7-year Uint64 NOT NULL-False] [GOOD] >> test_insert.py::TestS3::test_part_split[v2-client0] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client8-year String NOT NULL-True] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client6-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client7-year Utf8-False] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_s3_0.py::TestS3::test_inference_multiple_files[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client8-year String NOT NULL-True] [GOOD] >> test_s3_0.py::TestS3::test_inference_file_error[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client9-year String-False] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] >> test_insert.py::TestS3::test_part_split[v2-client0] [GOOD] >> test_insert.py::TestS3::test_part_split[v1-client0] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.json-json_each_row] [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.parquet-parquet] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client7-year Utf8-False] [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client8-year Int32-False] >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client9-year String-False] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client10-year Utf8-False] >> test_s3_0.py::TestS3::test_inference_file_error[v2-client0] [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> test_s3_0.py::TestS3::test_inference_parameters[v2-client0] >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> TStorageServiceTest::ShouldCreateCheckpoint >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.parquet-parquet] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2024-11-21T09:05:26.429341Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7439656133472499478:2048] with connection to localhost:24255:local 2024-11-21T09:05:26.429423Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:26.584949Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:26.584979Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:26.585149Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:26.607921Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2024-11-21T09:05:26.607942Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:26.877028Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7439656135893735107:2048] with connection to localhost:24255:local 2024-11-21T09:05:26.877070Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:26.955104Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2024-11-21T09:05:26.955125Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:27.304345Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7439656141335492796:2048] with connection to localhost:24255:local 2024-11-21T09:05:27.304476Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:27.337578Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:27.337597Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:27.337809Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:27.490607Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:27.490627Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:27.490838Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:27.593593Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Conflict with existing key., code: 2012 2024-11-21T09:05:27.593623Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:27.753834Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7439656138696239386:2048] with connection to localhost:24255:local 2024-11-21T09:05:27.753883Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:27.785648Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:27.785669Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:27.785844Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:27.821393Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2024-11-21T09:05:27.821411Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:28.024831Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7439656141279594445:2048] with connection to localhost:24255:local 2024-11-21T09:05:28.024885Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:28.060011Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:28.060029Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:28.060311Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:28.214127Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:28.214146Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:28.214329Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:28.246088Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2024-11-21T09:05:28.246111Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:28.246315Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:28.265103Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2024-11-21T09:05:28.265125Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.csv-csv_with_names] >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> TCheckpointStorageTest::ShouldCreateCheckpoint >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client8-year Int32-False] [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client9-year Uint32-False] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client10-year Utf8-False] [GOOD] >> test_insert.py::TestS3::test_part_split[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client11-year Utf8 NOT NULL-True] >> test_insert.py::TestS3::test_part_merge[v2-client0] >> TStorageServiceTest::ShouldRegister >> test_s3_0.py::TestS3::test_inference_parameters[v2-client0] [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged >> test_s3_0.py::TestS3::test_inference_timestamp[v2-client0] >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.csv-csv_with_names] [GOOD] >> TStorageServiceTest::ShouldGetState [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.tsv-tsv_with_names] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client11-year Utf8 NOT NULL-True] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client12-year Date-False] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-parquet] [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldSaveState >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_list] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client9-year Uint32-False] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client10-year Int64 NOT NULL-True] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2024-11-21T09:05:30.174324Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7439656151344676362:2048] with connection to localhost:11115:local 2024-11-21T09:05:30.174384Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:30.348435Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:30.348460Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:30.348637Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:30.497381Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:30.497417Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:30.497581Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:30.528789Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2024-11-21T09:05:30.528819Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:30.528982Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T09:05:30.549372Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2024-11-21T09:05:30.549394Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T09:05:30.836781Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7439656154085255065:2048] with connection to localhost:11115:local 2024-11-21T09:05:30.836839Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:30.870212Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:30.870252Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:30.870461Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2024-11-21T09:05:30.910087Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2024-11-21T09:05:30.910105Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2024-11-21T09:05:31.243617Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7439656157676735057:2048] with connection to localhost:11115:local 2024-11-21T09:05:31.243710Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:31.274268Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:31.274287Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:31.274405Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2024-11-21T09:05:31.309233Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2024-11-21T09:05:31.309249Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2024-11-21T09:05:31.496520Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7439656156490898949:2048] with connection to localhost:11115:local 2024-11-21T09:05:31.496588Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:31.527147Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:31.527184Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:31.527388Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:31.680928Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:31.680951Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:31.681110Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2024-11-21T09:05:31.719168Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2024-11-21T09:05:31.719192Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2024-11-21T09:05:31.945516Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7439656156185407739:2048] with connection to localhost:11115:local 2024-11-21T09:05:31.945583Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:31.975936Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:31.975955Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:31.976101Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:32.129674Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:32.129691Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:32.129819Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:32.183061Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2024-11-21T09:05:32.183086Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:32.183298Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:32.211503Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2024-11-21T09:05:32.211540Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:32.211714Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2024-11-21T09:05:32.228251Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2024-11-21T09:05:32.228274Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse >> test_insert.py::TestS3::test_part_merge[v2-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2024-11-21T09:05:30.067689Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7439656148698213693:2048] with connection to localhost:32342:local 2024-11-21T09:05:30.067825Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:30.103923Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:30.103965Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:30.104178Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:30.249255Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:30.249273Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:30.524080Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7439656151914858308:2048] with connection to localhost:32342:local 2024-11-21T09:05:30.524123Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:30.556056Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:30.556078Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:30.556266Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:30.716731Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:30.716753Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:30.716868Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T09:05:30.825818Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2024-11-21T09:05:30.825841Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T09:05:30.826012Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2024-11-21T09:05:30.931406Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2024-11-21T09:05:30.931429Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2024-11-21T09:05:30.931554Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T09:05:30.961075Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T09:05:31.176829Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7439656154693313814:2048] with connection to localhost:32342:local 2024-11-21T09:05:31.176894Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:31.207138Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:31.207158Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:31.207310Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:31.357908Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:31.357947Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:31.358115Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:31.415701Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2024-11-21T09:05:31.415720Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:31.415865Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T09:05:31.521553Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2024-11-21T09:05:31.521574Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T09:05:31.521722Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:31.574560Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2024-11-21T09:05:31.574579Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:31.574728Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2024-11-21T09:05:31.627123Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2024-11-21T09:05:31.627141Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2024-11-21T09:05:31.627280Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2024-11-21T09:05:31.682808Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2024-11-21T09:05:31.682831Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2024-11-21T09:05:31.683006Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2024-11-21T09:05:31.742801Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2024-11-21T09:05:31.742825Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2024-11-21T09:05:31.742979Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T09:05:31.776392Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T09:05:32.043553Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7439656158252477057:2048] with connection to localhost:32342:local 2024-11-21T09:05:32.043600Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:32.078319Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:32.078341Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:32.078465Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:32.225980Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:32.226019Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:32.226173Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2024-11-21T09:05:32.245154Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2024-11-21T09:05:32.245200Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2024-11-21T09:05:32.245350Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2024-11-21T09:05:32.245382Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2024-11-21T09:05:32.311618Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2024-11-21T09:05:32.311668Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2024-11-21T09:05:32.311678Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2024-11-21T09:05:32.313295Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2024-11-21T09:05:32.364277Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2024-11-21T09:05:32.364319Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2024-11-21T09:05:32.365734Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> test_insert.py::TestS3::test_part_merge[v1-client0] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> TStorageServiceTest::ShouldUseGc [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client10-year Int64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client12-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client11-year Int64-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client13-year Date NOT NULL-True] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.json-json_each_row] |92.7%| [TA] $(B)/ydb/core/kqp/ut/olap/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test_s3_0.py::TestS3::test_inference_timestamp[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_inference_projection[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client13-year Date NOT NULL-True] [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client14-year Datetime-False] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_list] [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_each_row] >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client11-year Int64-False] [GOOD] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> TStateStorageTest::ShouldSaveGetOldBigState >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client12-year Uint64-False] >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.parquet-parquet] >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreSingleTable::test_single_table |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_s3_0.py::TestS3::test_inference_projection[v2-client0] [GOOD] >> test_insert.py::TestS3::test_part_merge[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client14-year Datetime-False] [GOOD] >> test_s3_0.py::TestS3::test_inference_null_column_name[v2-client0] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_list] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client15-year Datetime NOT NULL-True] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_list] [SKIPPED] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_each_row] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_each_row] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2024-11-21T09:05:31.756961Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7439656157619344277:2048] with connection to localhost:24108:local 2024-11-21T09:05:31.757012Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:31.903471Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:31.903491Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:32.163912Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7439656161951024496:2048] with connection to localhost:24108:local 2024-11-21T09:05:32.163963Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:32.191814Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:32.191832Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:32.191948Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:32.219212Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2024-11-21T09:05:32.219258Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:32.219389Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:32.235030Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2024-11-21T09:05:32.235049Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:32.410442Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7439656160982641988:2048] with connection to localhost:24108:local 2024-11-21T09:05:32.410512Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:32.445046Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:32.445109Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:32.445326Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:32.612607Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:32.612642Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:32.612857Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:32.666638Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2024-11-21T09:05:32.666662Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:32.666822Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T09:05:32.776270Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2024-11-21T09:05:32.776292Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T09:05:32.776452Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:32.835217Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2024-11-21T09:05:32.835236Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:32.836538Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2024-11-21T09:05:32.894921Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2024-11-21T09:05:32.894943Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2024-11-21T09:05:32.895090Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T09:05:32.925841Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T09:05:33.309194Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7439656165054354434:2048] with connection to localhost:24108:local 2024-11-21T09:05:33.309254Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:33.340778Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:33.340800Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:33.340952Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:33.490283Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:33.490299Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:33.490579Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2024-11-21T09:05:33.510706Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2024-11-21T09:05:33.510725Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2024-11-21T09:05:33.740363Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7439656164052028774:2048] with connection to localhost:24108:local 2024-11-21T09:05:33.740381Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7439656164052028873:2128] 2024-11-21T09:05:33.740395Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T09:05:33.770976Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T09:05:33.770998Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T09:05:33.771173Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T09:05:33.933004Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T09:05:33.933027Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T09:05:33.933188Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:33.989137Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2024-11-21T09:05:33.989153Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:33.989276Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2024-11-21T09:05:34.040868Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2024-11-21T09:05:34.040888Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2024-11-21T09:05:34.040905Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2024-11-21T09:05:34.040940Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2024-11-21T09:05:34.041076Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T09:05:34.118474Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2024-11-21T09:05:34.146223Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2024-11-21T09:05:34.146238Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T09:05:34.146351Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:34.200667Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2024-11-21T09:05:34.200687Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:34.200856Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2024-11-21T09:05:34.251820Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2024-11-21T09:05:34.251845Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2024-11-21T09:05:34.251861Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2024-11-21T09:05:34.251904Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2024-11-21T09:05:34.252077Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2024-11-21T09:05:34.353198Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2024-11-21T09:05:34.415462Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2024-11-21T09:05:34.415481Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2024-11-21T09:05:34.416405Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T09:05:34.481345Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2024-11-21T09:05:34.481365Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T09:05:34.481553Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2024-11-21T09:05:34.544126Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2024-11-21T09:05:34.544146Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2024-11-21T09:05:34.544169Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2024-11-21T09:05:34.544202Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2024-11-21T09:05:34.545786Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T09:05:34.593923Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T09:05:34.610200Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2024-11-21T09:05:34.694327Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T09:05:34.697885Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T09:05:34.798300Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T09:05:34.801606Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T09:05:34.901969Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T09:05:34.905018Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client15-year Datetime NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client0-year Int32-False] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-csv_with_names] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client12-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client13-year Date-False] >> test_ydb_backup.py::TestPermissionsBackupRestoreSingleTable::test_single_table [GOOD] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.parquet-parquet] [GOOD] >> test_s3_0.py::TestS3::test_inference_null_column_name[v2-client0] [GOOD] >> test_insert.py::TestS3::test_part_binding[v2-client0-csv_with_names] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.csv-csv_with_names] >> test_s3_0.py::TestS3::test_inference_unsupported_types[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client13-year Date-False] [GOOD] >> test_s3_0.py::TestS3::test_inference_unsupported_types[v2-client0] [GOOD] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_s3_0.py::TestS3::test_json_list_formats[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client0-year Int32 NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f8c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_test_connection/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f8c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_test_connection/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1112214) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1113599 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] |92.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/olap/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client0-year Int32-False] [GOOD] >> test_insert.py::TestS3::test_part_binding[v2-client0-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.tsv-tsv_with_names] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client1-year Int32 NOT NULL-False] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_list] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_list] [SKIPPED] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_each_row] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-csv_with_names] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-parquet] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client0-year Int32 NOT NULL-True] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client1-year Uint32 NOT NULL-True] |92.7%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_s3_0.py::TestS3::test_json_list_formats[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv_with_hopping[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client1-year Int32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client2-year Uint32-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client1-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client2-year Uint64 NOT NULL-True] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.json-json_each_row] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes >> test_insert.py::TestS3::test_part_binding[v1-client0-json_each_row] [GOOD] >> test_insert.py::TestS3::test_part_binding[v1-client0-csv_with_names] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-parquet] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client2-year Uint64 NOT NULL-True] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_list] >> test_s3_0.py::TestS3::test_csv_with_hopping[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client2-year Uint32-False] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client3-year Date NOT NULL-False] >> test_s3_0.py::TestS3::test_csv_with_hopping[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client3-year Uint32 NOT NULL-True] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v2[v2-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v1[v1-client0] |92.7%| [TA] $(B)/ydb/tests/functional/autoconfig/test-results/py3test/{meta.json ... results_accumulator.log} |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_insert.py::TestS3::test_part_binding[v1-client0-csv_with_names] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-json_each_row] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.json-json_each_row] [GOOD] >> test.py::test_run_benchmark[scan-column] >> test.py::test_run_benchmark[generic-row] >> test.py::test_run_benchmark[generic-column] >> test.py::test_run_determentistic[row] >> test.py::test_plans[column] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.parquet-parquet] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] |92.7%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test_s3_0.py::TestS3::test_csv_with_hopping[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v1-false-client0] >> test_ydb_backup.py::TestPermissionsBackupRestoreFolderWithTable::test_folder_with_table >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client3-year Date NOT NULL-False] [GOOD] |92.7%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client3-year Uint32 NOT NULL-True] [GOOD] |92.7%| [TA] {RESULT} $(B)/ydb/tests/functional/autoconfig/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client4-year String NOT NULL-True] >> test.py::test_run_benchmark[scan-row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client4-year Int64-False] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.parquet-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_list] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.csv-csv_with_names] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params0] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_each_row] >> test_inflight.py::TestS3::test_data_inflight[v1-client0-kikimr_params0] >> test_s3_0.py::TestS3::test_raw[v1-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v1-true-client0] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client4-year String NOT NULL-True] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client5-year String-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client4-year Int64-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client5-year Int64 NOT NULL-False] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_ydb_backup.py::TestPermissionsBackupRestoreFolderWithTable::test_folder_with_table [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-json_each_row] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-csv_with_names] >> test_s3_0.py::TestS3::test_raw[v1-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v2-false-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client5-year String-False] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client6-year Utf8 NOT NULL-True] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-csv_with_names] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client6-year Uint64-False] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |92.7%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client6-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client7-year Utf8-False] |92.7%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_s3_0.py::TestS3::test_raw[v2-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v2-true-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.parquet-parquet] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test.py::test_plans[column] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client6-year Uint64-False] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client7-year Uint64 NOT NULL-False] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-csv_with_names] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client7-year Utf8-False] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-parquet] >> test.py::test_plans[row] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client8-year Int32-False] >> test.py::test_run_determentistic[column] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.csv-csv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.tsv-tsv_with_names] >> test_s3_0.py::TestS3::test_raw[v2-true-client0] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.csv-csv_with_names] >> test_insert.py::TestS3::test_error[v1-client0-tsv_with_names] >> test_s3_0.py::TestS3::test_limit[v1-false-kikimr_params0-client0] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client7-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client8-year String NOT NULL-True] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client8-year Int32-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_plans[column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00454d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00454d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00454d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--false/cluster/slot_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00454d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--false/cluster/slot_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00454d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--false/cluster/slot_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00454d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--false/cluster/slot_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00454d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/00454d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client9-year Uint32-False] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.tsv-tsv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_each_row.json-json_each_row] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings0-client0] [SKIPPED] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings0-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-parquet] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings0-client0] [SKIPPED] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_list] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client8-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client9-year String-False] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_each_row.json-json_each_row] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_list.json-json_list] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreDontOverwriteOnAlreadyExisting::test_dont_overwrite_on_already_existing >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client9-year Uint32-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client10-year Int64 NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f6c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_push_down/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f6c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_push_down/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1129244) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1130406 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_formats.py::TestS3Formats::test_format_inference[v2-test_list.json-json_list] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client9-year String-False] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_formats.py::TestS3Formats::test_format_inference[v2-test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client10-year Utf8-False] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client10-year Int64 NOT NULL-True] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client11-year Int64-False] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_list] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-tsv_with_names] [GOOD] >> test.py::test_plans[row] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_each_row] >> test_insert.py::TestS3::test_error[v1-client0-parquet] >> test_insert.py::TestS3::test_error[v1-client0-parquet] [SKIPPED] >> test_insert.py::TestS3::test_insert_empty_object[v2] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.parquet-parquet] [GOOD] >> test_formats.py::TestS3Formats::test_btc[v1] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreDontOverwriteOnAlreadyExisting::test_dont_overwrite_on_already_existing [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client10-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client11-year Int64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client11-year Utf8 NOT NULL-True] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client12-year Uint64-False] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_insert.py::TestS3::test_insert_empty_object[v2] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_insert.py::TestS3::test_insert_empty_object[v1] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.tsv-tsv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_plans[row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client11-year Utf8 NOT NULL-True] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-false] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client12-year Date-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-true] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-csv_with_names] >> test_formats.py::TestS3Formats::test_btc[v1] [GOOD] >> test_formats.py::TestS3Formats::test_btc[v2] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client12-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client13-year Date-False] |92.8%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.json-json_each_row] >> test_insert.py::TestS3::test_insert_empty_object[v1] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-true] [GOOD] >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-false] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client12-year Date-False] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client13-year Date NOT NULL-True] >> test.py::test_run_benchmark[generic-row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.parquet-parquet] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-false] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-true] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_btc[v2] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v1-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-csv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client13-year Date-False] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_format[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client0-year Int32 NOT NULL-True] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-parquet] >> test.py::test_run_determentistic[row] [GOOD] >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client13-year Date NOT NULL-True] [GOOD] >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] >> test_yq_streaming.py::TestYqStreaming::test_match_recognize_sink[v1] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client14-year Datetime-False] >> test.py::test_run_benchmark[scan-row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.parquet-parquet] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[generic-row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-false] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.csv-csv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client0-year Int32 NOT NULL-True] [GOOD] |92.8%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client1-year Uint32 NOT NULL-True] >> test_formats.py::TestS3Formats::test_invalid_format[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_format[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client1-year Uint32 NOT NULL-True] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client2-year Uint64 NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client14-year Datetime-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client15-year Datetime NOT NULL-True] >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-false] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_determentistic[row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v1-client0] [GOOD] >> test_empty.py::TestS3::test_empty[v2-client0] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions >> test_bindings_1.py::TestBindings::test_pg_binding[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[scan-row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_list] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f58/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_yq_v2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f58/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_yq_v2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1135652) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1136734 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1019222) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043c4/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_dispatch/testing_out_stuff/test_dispatch.py.TestMapping.test_mapping/alpha/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043c4/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_dispatch/testing_out_stuff/test_dispatch.py.TestMapping.test_mapping/alpha/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043c4/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_dispatch/testing_out_stuff/test_dispatch.py.TestMapping.test_mapping/beta/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043c4/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_dispatch/testing_out_stuff/test_dispatch.py.TestMapping.test_mapping/beta/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043c4/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_dispatch/testing_out_stuff/test_dispatch.py.TestMapping.test_mapping/alpha/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043c4/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_dispatch/testing_out_stuff/test_dispatch.py.TestMapping.test_mapping/alpha/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043c4/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_dispatch/testing_out_stuff/test_dispatch.py.TestMapping.test_mapping/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043c4/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_dispatch/testing_out_stuff/test_dispatch.py.TestMapping.test_mapping/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f62/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_early_finish/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f62/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_early_finish/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1134426) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1135606 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_format[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client2-year Uint64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client3-year Date NOT NULL-False] >> test_ydb_backup.py::TestPermissionsBackupRestoreSchemeOnly::test_scheme_only >> test_formats.py::TestS3Formats::test_invalid_input_compression[v1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client15-year Datetime NOT NULL-True] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] >> test_bindings_1.py::TestBindings::test_pg_binding[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client0-year Int32-False] >> test_bindings_1.py::TestBindings::test_pg_binding[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-false] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client3-year Date NOT NULL-False] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v1-client0] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client4-year String NOT NULL-True] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v2-client0] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-analytics] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_bindings_1.py::TestBindings::test_pg_binding[v1-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-yql_syntax-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_list] [GOOD] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v1] >> test_ydb_backup.py::TestPermissionsBackupRestoreSchemeOnly::test_scheme_only [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client0-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client1-year Int32 NOT NULL-False] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-yql_syntax-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-pg_syntax-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client4-year String NOT NULL-True] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client5-year String-False] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-false] [GOOD] >> test_stop.py::TestStop::test_stop_query[v1-streaming] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-true] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test_run_benchmark[generic-column] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client1-year Int32 NOT NULL-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client2-year Uint32-False] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-pg_syntax-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-yql_syntax-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-true] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_each_row] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-false] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client5-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client6-year Utf8 NOT NULL-True] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-csv_with_names] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_optional >> test_recovery.py::TestRecovery::test_delete >> test_formats.py::TestS3Formats::test_invalid_output_compression[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.json-json_each_row] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[generic-column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client2-year Uint32-False] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-false] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-yql_syntax-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client3-year Uint32 NOT NULL-True] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-true] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-pg_syntax-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client6-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client7-year Utf8-False] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] [GOOD] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client3-year Uint32 NOT NULL-True] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_type_as_column[v1] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-pg_syntax-client0] [GOOD] >> test.py::test_run_benchmark[scan-column] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-true] [GOOD] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client4-year Int64-False] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-false] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v1[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v1-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-csv_with_names] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_match_recognize_sink[v1] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-parquet] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client7-year Utf8-False] [GOOD] >> test_select_limit_db_id.py::TestSelectLimitWithDbId::test_select_same_with_id[v1-mvp_external_ydb_endpoint0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client8-year Int32-False] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v1-client0] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v2-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test_select_limit_db_id.py::TestSelectLimitWithDbId::test_select_same_with_id[v1-mvp_external_ydb_endpoint0] [SKIPPED] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f4c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_public_metrics/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f4c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_public_metrics/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1145250) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1146025 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v2-client0] [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[scan-column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming] [GOOD] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client4-year Int64-False] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-false] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client5-year Int64 NOT NULL-False] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-true] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client8-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client9-year Uint32-False] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v1-client0] [GOOD] >> test_public_metrics.py::TestPublicMetrics::test_select_limit[v1] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v2-client0] >> test_drain.py::TestHive::test_drain_on_stop >> test_ydb_backup.py::TestPermissionsBackupRestoreEmptyDir::test_empty_dir >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v1-client0] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-parquet] [GOOD] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v2-client0] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_statistics.py::TestS3::test_precompute[v1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.json-json_each_row] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-true] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client9-year Uint32-False] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-false] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client10-year Int64 NOT NULL-True] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client6-year Uint64-False] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v1-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v2-client0] >> test_mem_alloc.py::TestMemAlloc::test_hop_alloc[v1] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] [GOOD] >> test_recovery.py::TestRecovery::test_delete [GOOD] >> test_mem_alloc.py::TestMemAlloc::test_hop_alloc[v1] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreEmptyDir::test_empty_dir [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.parquet-parquet] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client10-year Int64 NOT NULL-True] [GOOD] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client11-year Int64-False] >> test_formats.py::TestS3Formats::test_no_nullable_column[v1-client0] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v1-client0] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_statistics.py::TestS3::test_precompute[v1-client0] [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] >> test_ydb_backup.py::TestRestoreACLOption::test_restore_acl_option >> test_statistics.py::TestS3::test_precompute[v2-client0] >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] [SKIPPED] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client6-year Uint64-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f97/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_compressions/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f97/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_compressions/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1109235) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1111857 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client7-year Uint64 NOT NULL-False] >> test_formats.py::TestS3Formats::test_no_nullable_column[v1-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v1-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] >> test_formats.py::TestS3Formats::test_no_nullable_column[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v1] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v2-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client11-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client12-year Uint64-False] >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_no_nullable_column[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v1-client0] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client7-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client8-year String NOT NULL-True] >> test.py::test_run_determentistic[column] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_ydb_backup.py::TestRestoreACLOption::test_restore_acl_option [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] >> test_statistics.py::TestS3::test_precompute[v2-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f65/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_streaming_join/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f65/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_streaming_join/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1133100) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1134473 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_sum[v1-client0] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-analytics] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client12-year Uint64-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] [SKIPPED] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1190110) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/tokens.py:3: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c42/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_mem_alloc.py.TestMemAlloc.test_hop_alloc.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/tokens.py:3: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c42/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/test_mem_alloc.py.TestMemAlloc.test_hop_alloc.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client13-year Date-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v2-client0] >> test_select_1.py::TestSelect1::test_select_pg[v1] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client8-year String NOT NULL-True] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client9-year String-False] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_determentistic[column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client0-year Int32 NOT NULL-True] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_optional [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v2-client0] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client0-year Int32 NOT NULL-True] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client1-year Uint32 NOT NULL-True] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client9-year String-False] [GOOD] |92.8%| [TA] $(B)/ydb/tests/functional/clickbench/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client10-year Utf8-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v2-client0] [GOOD] >> test_public_metrics.py::TestPublicMetrics::test_select_limit[v1] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v1-client0] |92.8%| [TA] {RESULT} $(B)/ydb/tests/functional/clickbench/test-results/py3test/{meta.json ... results_accumulator.log} >> test_bad_syntax.py::TestBadSyntax::test_type_as_column[v1] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client1-year Uint32 NOT NULL-True] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client2-year Uint64 NOT NULL-True] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v2-client0] >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v2-client0] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client10-year Utf8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client11-year Utf8 NOT NULL-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client2-year Uint64 NOT NULL-True] [GOOD] >> test_2_selects_limit.py::TestSelectLimit::test_select_same[v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client3-year Date NOT NULL-False] >> test_2_selects_limit.py::TestSelectLimit::test_select_same[v1] [SKIPPED] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v2-client0] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions [GOOD] >> test_formats.py::TestS3Formats::test_simple_pg_types[v1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client11-year Utf8 NOT NULL-True] [GOOD] |92.8%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client12-year Date-False] >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] [SKIPPED] |92.8%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client3-year Date NOT NULL-False] [GOOD] >> test_formats.py::TestS3Formats::test_simple_pg_types[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_simple_pg_types[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client4-year String NOT NULL-True] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] >> test_yds_bindings.py::TestBindings::test_yds_insert[v1] [SKIPPED] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] >> test_select_1.py::TestSelect1::test_select_1[v1] >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client4-year String NOT NULL-True] [GOOD] >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client5-year String-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client12-year Date-False] [GOOD] >> test_formats.py::TestS3Formats::test_simple_pg_types[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_precompute[v1-client0] >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client13-year Date NOT NULL-True] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v2-client0] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types_without_predicate >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] >> test_statistics.py::TestS3::test_sum[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_sum[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] [SKIPPED] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1194438) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/scanner.py:289: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c35/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_2_selects_limit.py.TestSelectLimit.test_select_same.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/scanner.py:289: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c35/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/test_2_selects_limit.py.TestSelectLimit.test_select_same.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client5-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client6-year Utf8 NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client13-year Date NOT NULL-True] [GOOD] >> test_formats.py::TestS3Formats::test_precompute[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client14-year Datetime-False] >> test_formats.py::TestS3Formats::test_precompute[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.csv-csv_with_names] >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data [GOOD] >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client6-year Utf8 NOT NULL-True] [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-finished] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client7-year Utf8-False] >> test_formats.py::TestS3Formats::test_precompute[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v1-client0] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client14-year Datetime-False] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client15-year Datetime NOT NULL-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client7-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client8-year Int32-False] >> test_select_1.py::TestSelect1::test_compile_error[v1] >> test_select_1.py::TestSelect1::test_select_pg[v1] [GOOD] >> test_inflight.py::TestS3::test_data_inflight[v1-client0-kikimr_params0] [GOOD] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params1] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.json-json_each_row] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client15-year Datetime NOT NULL-True] [GOOD] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] >> test_metrics_cleanup.py::TestCleanup::test_cleanup[v1] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client0-column_type0-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client0-column_type0-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client1-column_type1-True] >> test_stop.py::TestStop::test_stop_query[v1-streaming] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client1-column_type1-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client8-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client2-column_type2-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client9-year Uint32-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterAsyncReplication 2024-11-21 09:07:07,337 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2024-11-21 09:07:07,454 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 726084 75.4M 75.1M 21.3M test_tool run_ut @/home/runner/.ya/build/build_root/jptk/0040c5/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk14/testing_out_stuff/test_tool.args 726856 400M 396M 175M └─ ydb-core-kqp-ut-scheme --trace-path-append /home/runner/.ya/build/build_root/jptk/0040c5/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk14/ytest.rep Test command err: Trying to start YDB, gRPC: 29981, MsgBus: 22568 2024-11-21T08:57:08.068682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439653997138467942:2115];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c5/r3tmp/tmpjc7GXz/pdisk_1.dat 2024-11-21T08:57:08.113633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T08:57:08.143976Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29981, node 1 2024-11-21T08:57:08.168412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:08.168430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:08.168433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:08.168475Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22568 2024-11-21T08:57:08.208521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:08.208554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:22568 2024-11-21T08:57:08.212533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:08.227313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.230745Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:08.234327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.257830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.287580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.300587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:08.516530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653997138469430:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.516573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.547210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.553128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.560857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.615848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.624064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.638951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T08:57:08.660754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653997138469948:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.660781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.660901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439653997138469953:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T08:57:08.661520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T08:57:08.665473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439653997138469955:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T08:57:08.876808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26271, MsgBus: 32535 2024-11-21T08:57:09.132941Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439654000334315730:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T08:57:09.133022Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040c5/r3tmp/tmpUBBGtG/pdisk_1.dat 2024-11-21T08:57:09.146259Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26271, node 2 2024-11-21T08:57:09.156724Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T08:57:09.156737Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T08:57:09.156739Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T08:57:09.156779Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32535 TClient is connected to server localhost:32535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T08:57:09.233683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T08:57:09.233710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T08:57:09.234775Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T08:57:09.235399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.237034Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T08:57:09.243139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.256271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.275773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.285950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T08:57:09.435714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439654000334317269:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access p ... ER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:51.132652Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:06:52.134507Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:52.134541Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:52.134571Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:06:52.311442Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimr::NReplication::NController::TEvPrivate::TEvUpdateTenantNodes { Tenant: /Root } 2024-11-21T09:06:52.311470Z node 5 :REPLICATION_CONTROLLER INFO: [controller 72075186224037920] Discover tenant nodes: tenant# /Root 2024-11-21T09:06:52.311795Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimr::TEvDiscovery::TEvError 2024-11-21T09:06:53.136995Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:53.137048Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:53.137071Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:06:54.141194Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:54.141242Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:54.141264Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:06:55.143465Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:55.143504Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:55.143529Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:06:56.146219Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:56.146267Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:56.146293Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:06:57.150050Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:57.150093Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:57.150111Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:06:58.152146Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:58.152195Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:58.152239Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:06:59.161692Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:59.161728Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:06:59.161767Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:07:00.165908Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:00.165946Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:00.165968Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:07:01.168546Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:01.168582Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:01.168617Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:07:02.170671Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:02.170709Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:02.170736Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:07:02.311858Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimr::NReplication::NController::TEvPrivate::TEvUpdateTenantNodes { Tenant: /Root } 2024-11-21T09:07:02.311900Z node 5 :REPLICATION_CONTROLLER INFO: [controller 72075186224037920] Discover tenant nodes: tenant# /Root 2024-11-21T09:07:02.312348Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimr::TEvDiscovery::TEvError 2024-11-21T09:07:03.173927Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:03.173967Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:03.173990Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:07:04.181534Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:04.181588Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:04.181612Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:07:05.183657Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:05.183726Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:05.183750Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:07:06.185860Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:06.185898Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:06.185930Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete 2024-11-21T09:07:07.187783Z node 5 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037920] Handle NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:07.187821Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Execute: NKikimrReplication.TEvDescribeReplication PathId { OwnerId: 72057594046644480 LocalId: 17 } IncludeStats: false 2024-11-21T09:07:07.187844Z node 5 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037920][TxDescribeReplication] Complete Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7480276291/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/jptk/0040c5/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk14/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7480276291/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/jptk/0040c5/ydb/core/kqp/ut/scheme/test-results/unittest/testing_out_stuff/chunk14/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client2-column_type2-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client3-column_type3-False] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.parquet-parquet] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client3-column_type3-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client4-column_type4-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1182902) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client4-column_type4-True] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client5-column_type5-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client5-column_type5-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client9-year Uint32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client6-column_type6-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client10-year Int64 NOT NULL-True] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.parquet-parquet] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client6-column_type6-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client7-column_type7-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client7-column_type7-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client10-year Int64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client8-column_type8-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client11-year Int64-False] |92.9%| [TA] $(B)/ydb/core/kqp/ut/scheme/test-results/unittest/{meta.json ... results_accumulator.log} >> test_select_1.py::TestSelect1::test_select_1[v1] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client8-column_type8-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.tsv-tsv_with_names] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client9-column_type9-False] >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] [GOOD] |92.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/scheme/test-results/unittest/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client10-column_type10-False] >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start |92.9%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client10-column_type10-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client11-column_type11-False] |92.9%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_statistics.py::TestS3::test_sum[v2-client0] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] [GOOD] >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client11-column_type11-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client12-column_type12-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client11-year Int64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client12-column_type12-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client13-column_type13-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client12-year Uint64-False] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client0-column_type0-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client0-column_type0-True] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client1-column_type1-True] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client12-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client13-year Date-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client1-column_type1-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client2-column_type2-True] >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] >> test_recovery.py::TestRecovery::test_program_state_recovery >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client2-column_type2-True] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.csv-csv_with_names] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types_without_predicate [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client3-column_type3-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client3-column_type3-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client13-year Date-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client4-column_type4-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client0-year Int32 NOT NULL-False] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-finished] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client4-column_type4-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client5-column_type5-True] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client5-column_type5-True] [GOOD] >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client6-column_type6-True] >> test_invalid_consumer.py::TestConsumer::test_invalid[v1] |92.9%| [TA] $(B)/ydb/tests/functional/ydb_cli/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v1-client0] [GOOD] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1183953) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003ca4/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003ca4/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:06:37] send response localhost:24124/?database=local ::1 - - [21/Nov/2024 09:06:37] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client6-column_type6-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client7-column_type7-False] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |92.9%| [TA] {RESULT} $(B)/ydb/tests/functional/ydb_cli/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client0-year Int32 NOT NULL-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client7-column_type7-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client1-year Uint32 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client8-column_type8-False] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client8-column_type8-False] [GOOD] >> test_select_1.py::TestSelect1::test_compile_error[v1] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client9-column_type9-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v2-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client10-column_type10-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client10-column_type10-False] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v2-client0] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client11-column_type11-False] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.parquet-parquet] >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] [GOOD] >> test_s3_0.py::TestS3::test_limit[v1-false-kikimr_params0-client0] [GOOD] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client1-year Uint32 NOT NULL-False] [GOOD] >> test_s3_0.py::TestS3::test_limit[v1-true-kikimr_params0-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client11-column_type11-False] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client2-year Uint64 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client12-column_type12-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings1-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client13-column_type13-False] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client13-column_type13-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.csv-csv_with_names] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client0-column_type0-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client0-column_type0-False] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client1-column_type1-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client1-column_type1-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client2-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client2-column_type2-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client3-year Date NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client2-column_type2-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client3-column_type3-False] >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client3-column_type3-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client4-column_type4-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1188085) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/scanner.py:286: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c5c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_select_limit_db_id.py.TestSelectLimitWithDbId.test_select_same_with_id.v1-mvp_external_ydb_endpoint0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/scanner.py:286: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c5c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk38/testing_out_stuff/test_select_limit_db_id.py.TestSelectLimitWithDbId.test_select_same_with_id.v1-mvp_external_ydb_endpoint0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_big_state.py::TestBigState::test_gt_8mb[v1] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.tsv-tsv_with_names] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client4-column_type4-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client5-column_type5-True] >> test_s3_0.py::TestS3::test_limit[v1-true-kikimr_params0-client0] [GOOD] >> test_s3_0.py::TestS3::test_limit[v2-false-kikimr_params0-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client5-column_type5-True] [GOOD] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client6-column_type6-False] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client4-year Utf8 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client6-column_type6-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client7-column_type7-False] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client7-column_type7-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client8-column_type8-False] >> test_row_dispatcher.py::TestPqRowDispatcher::test_restart_compute_node >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client9-column_type9-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client10-column_type10-False] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client10-column_type10-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client11-column_type11-False] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client4-year Utf8 NOT NULL-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client5-year Int64 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client11-column_type11-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client12-column_type12-False] >> test_invalid_consumer.py::TestConsumer::test_invalid[v1] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client12-column_type12-False] [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-finished] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client13-column_type13-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client0-column_type0-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client0-column_type0-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client1-column_type1-False] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client1-column_type1-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client2-column_type2-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client6-year Int32-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client2-column_type2-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f83/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f83/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1113364) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f83/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/test_insert.py.TestS3.test_insert.v2-client0-json_list-dataset/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f83/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/test_insert.py.TestS3.test_insert.v2-client0-json_list-dataset/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1116017 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client3-column_type3-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client3-column_type3-False] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client4-column_type4-False] >> test_s3_0.py::TestS3::test_limit[v2-false-kikimr_params0-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_s3_0.py::TestS3::test_limit[v2-true-kikimr_params0-client0] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client4-column_type4-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1022229) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043a9/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/testing_out_stuff/test_retry.py.TestRetry.test_fail_first.kikimr0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043a9/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/testing_out_stuff/test_retry.py.TestRetry.test_fail_first.kikimr0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043a9/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/testing_out_stuff/test_retry.py.TestRetry.test_fail_first.kikimr0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043a9/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/testing_out_stuff/test_retry.py.TestRetry.test_fail_first.kikimr0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043a9/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/testing_out_stuff/test_retry.py.TestRetry.test_low_rate.kikimr0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/0043a9/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/testing_out_stuff/test_retry.py.TestRetry.test_low_rate.kikimr0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_empty.py::TestS3::test_empty[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f8a/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_empty/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f8a/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_empty/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1112333) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1113651 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_drain.py::TestHive::test_drain_on_stop [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client5-column_type5-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client5-column_type5-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client6-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client6-column_type6-False] |92.9%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client7-year Uint32-False] |92.9%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client6-column_type6-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client7-column_type7-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client7-column_type7-False] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client8-column_type8-False] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings1-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client9-column_type9-False] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client7-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client8-year Int64-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client9-column_type9-False] [GOOD] >> test_select_1.py::TestSelect1::test_select_z_x_y[v1] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client10-column_type10-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client10-column_type10-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client11-column_type11-False] >> test_s3_0.py::TestS3::test_limit[v2-true-kikimr_params0-client0] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v1-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client11-column_type11-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client12-column_type12-False] >> test_select_1.py::TestSelect1::test_select_pg[v2] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client13-column_type13-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client8-year Int64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client13-column_type13-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client9-year Uint64-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client0-column_type0-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client0-column_type0-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client1-column_type1-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client1-column_type1-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client2-column_type2-False] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] [GOOD] >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client2-column_type2-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client3-column_type3-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client3-column_type3-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client4-column_type4-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client9-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client10-year String NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client4-column_type4-True] [GOOD] >> test_recovery.py::TestRecovery::test_recovery >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client5-column_type5-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client5-column_type5-False] [GOOD] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client6-column_type6-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.json-json_each_row] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client6-column_type6-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client7-column_type7-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client10-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client11-year String-False] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-simple] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1196693) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client7-column_type7-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client8-column_type8-False] >> test_select_1.py::TestSelect1::test_select_pg[v2] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client9-column_type9-False] >> test_select_1.py::TestSelect1::test_select_1[v2] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client10-column_type10-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client10-column_type10-False] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client11-column_type11-False] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client11-column_type11-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client12-column_type12-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client13-column_type13-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client11-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client12-year Utf8-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client14-column_type14-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client14-column_type14-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client15-column_type15-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client15-column_type15-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client0-column_type0-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client0-column_type0-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client1-column_type1-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client12-year Utf8-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client13-year Date-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client1-column_type1-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client2-column_type2-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1197067) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bce/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_3_selects.py.TestSelects.test_3_selects.v1-mvp_external_ydb_endpoint0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bce/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/test_3_selects.py.TestSelects.test_3_selects.v1-mvp_external_ydb_endpoint0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client2-column_type2-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client3-column_type3-True] >> test_select_1.py::TestSelect1::test_select_1[v2] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client3-column_type3-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client4-column_type4-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client4-column_type4-True] [GOOD] >> test_select_1.py::TestSelect1::test_unwrap_null[v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client13-year Date-False] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client5-column_type5-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client0-year Int32 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client5-column_type5-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client6-column_type6-True] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client6-column_type6-True] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client7-column_type7-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client7-column_type7-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client8-column_type8-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client0-year Int32 NOT NULL-False] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-false] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client8-column_type8-False] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-true] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client1-year Uint32 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client9-column_type9-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client9-column_type9-False] [GOOD] >> test_select_1.py::TestSelect1::test_select_z_x_y[v1] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client10-column_type10-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-true] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-false] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client10-column_type10-False] [GOOD] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_time_order_recoverer[v1-kikimr0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client11-column_type11-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client11-column_type11-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client12-column_type12-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-true] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client1-year Uint32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client2-year Uint64 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client13-column_type13-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_select_1.py::TestSelect1::test_compile_error[v2] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client13-column_type13-False] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-false] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client14-column_type14-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client14-column_type14-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client15-column_type15-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client15-column_type15-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-simple] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-false-client0] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client2-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client3-year Date NOT NULL-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-false] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-false-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-true-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f5d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f5d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1135259) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1136240 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f5d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/test_bindings_1.py.TestBindings.test_s3_insert.v2-kikimr_settings0-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f5d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/test_bindings_1.py.TestBindings.test_s3_insert.v2-kikimr_settings0-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f5d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/test_bindings_1.py.TestBindings.test_s3_insert.v2-kikimr_settings0-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f5d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/test_bindings_1.py.TestBindings.test_s3_insert.v2-kikimr_settings0-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f5d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/test_bindings_1.py.TestBindings.test_s3_insert.v2-kikimr_settings0-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f5d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/test_bindings_1.py.TestBindings.test_s3_insert.v2-kikimr_settings0-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-false-client0] [GOOD] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v1] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-true-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1183901) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003ca8/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003ca8/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client4-year Utf8 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-true-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-false-client0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_recovery.py::TestRecovery::test_program_state_recovery [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-false-client0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-false] [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] >> test_select_1.py::TestSelect1::test_compile_error[v2] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-true] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client4-year Utf8 NOT NULL-False] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client5-year Int64 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-false-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-true-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1185243) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c7e/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_select_1.py.TestSelect1.test_select_10_p_19_plus_1.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c7e/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk33/testing_out_stuff/test_select_1.py.TestSelect1.test_select_10_p_19_plus_1.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-false-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-true-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-false-client0] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-true-client0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-true] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client6-year Int32-False] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-false] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-finished] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v2-false-client0] >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client6-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client7-year Uint32-False] >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-false] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-false-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-true-client0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_select_1.py::TestSelect1::test_unwrap_null[v1] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client7-year Uint32-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client8-year Int64-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-false-client0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client8-year Int64-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client9-year Uint64-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1182917) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003cc1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_yq_streaming.py.TestYqStreaming.test_match_recognize_sink.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003cc1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk46/testing_out_stuff/test_yq_streaming.py.TestYqStreaming.test_match_recognize_sink.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client9-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client10-year String NOT NULL-True] >> test_row_dispatcher.py::TestPqRowDispatcher::test_sensors >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] >> test_metrics_cleanup.py::TestCleanup::test_cleanup[v1] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-false-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-true-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client10-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client11-year String-False] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f89/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f89/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1112613) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1114780 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f89/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/test_formats.py.TestS3Formats.test_format.v1-test.csv-csv_with_names-kikimr_settings0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f89/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/test_formats.py.TestS3Formats.test_format.v1-test.csv-csv_with_names-kikimr_settings0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f89/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/test_formats.py.TestS3Formats.test_format.v1-test.csv-csv_with_names-kikimr_settings0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f89/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/test_formats.py.TestS3Formats.test_format.v1-test.csv-csv_with_names-kikimr_settings0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f89/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/test_formats.py.TestS3Formats.test_format.v1-test.csv-csv_with_names-kikimr_settings0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f89/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/test_formats.py.TestS3Formats.test_format.v1-test.csv-csv_with_names-kikimr_settings0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1189530) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c5a/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c5a/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client11-year String-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client12-year Utf8-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-true-client0] [GOOD] >> test_recovery.py::TestRecovery::test_ic_disconnection [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1184986) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:06:46] send response localhost:2398/?database=local ::1 - - [21/Nov/2024 09:06:46] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c84/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_restart_query.py.TestRestartQuery.test_restart_runtime_errors.v1-mvp_external_ydb_endpoint0-analytics/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c84/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk21/testing_out_stuff/test_restart_query.py.TestRestartQuery.test_restart_runtime_errors.v1-mvp_external_ydb_endpoint0-analytics/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:07:45] send response localhost:2398/?database=local ::1 - - [21/Nov/2024 09:07:45] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client12-year Utf8-False] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client13-year Date-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery.py::TestRecovery::test_ic_disconnection [SKIPPED] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1187280) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client0-year Int32 NOT NULL-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1189560) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c56/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_public_metrics.py.TestPublicMetrics.test_select_limit.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c56/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk15/testing_out_stuff/test_public_metrics.py.TestPublicMetrics.test_select_limit.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_continue_mode.py::TestContinueMode::test_deny_state_load_mode_from_checkpoint_in_modify_query[v1-mvp_external_ydb_endpoint0] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-with_checkpoints-mvp_external_ydb_endpoint0] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client0-year Int32 NOT NULL-False] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming[v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client1-year Uint32 NOT NULL-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_pg[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1193336) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c3b/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_select_1.py.TestSelect1.test_select_pg.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c3b/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_select_1.py.TestSelect1.test_select_pg.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client1-year Uint32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client2-year Uint64 NOT NULL-False] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] >> test_row_dispatcher.py::TestPqRowDispatcher::test_sensors [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client2-year Uint64 NOT NULL-False] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client3-year Date NOT NULL-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] >> test_watermarks.py::TestWatermarks::test_pq_watermarks[v1-mvp_external_ydb_endpoint0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client4-year Utf8 NOT NULL-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client4-year Utf8 NOT NULL-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client5-year Int64 NOT NULL-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f90/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f90/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1109501) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f90/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/test_statistics.py.TestS3.test_egress.v1-client0-json_list/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f90/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/test_statistics.py.TestS3.test_egress.v1-client0-json_list/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1112093 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case2[v1] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client6-year Int32-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f71/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_ydb_over_fq/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f71/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_ydb_over_fq/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1128675) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1130223 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions [GOOD] >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-with_checkpoints] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_1[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1196836) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003c12/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_select_1.py.TestSelect1.test_select_1.v1/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c12/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_select_1.py.TestSelect1.test_select_1.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c12/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_select_1.py.TestSelect1.test_select_1.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003c12/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk34/testing_out_stuff/test_select_1.py.TestSelect1.test_select_1.v2/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client6-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client7-year Uint32-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming[v1] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client7-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client8-year Int64-False] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case4[v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1185996) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:08:03] send response localhost:2772/?database=local ::1 - - [21/Nov/2024 09:08:03] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c78/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_stop.py.TestStop.test_stop_query.v1-streaming/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c78/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_stop.py.TestStop.test_stop_query.v1-streaming/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/003c78/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk41/testing_out_stuff/test_stop.py.TestStop.test_stop_query.v1-streaming/default/node_1/logfile_qnfx1fw6.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_continue_mode.py::TestContinueMode::test_deny_state_load_mode_from_checkpoint_in_modify_query[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client8-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client9-year Uint64-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] [GOOD] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-with_checkpoints-mvp_external_ydb_endpoint0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params1] [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client9-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client10-year String NOT NULL-True] >> test_delete_read_rules_after_abort_by_system.py::TestDeleteReadRulesAfterAbortBySystem::test_delete_read_rules_after_abort_by_system >> test_stop.py::TestStop::test_stop_query[v1-analytics] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client10-year String NOT NULL-True] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_compile_error[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1199416) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bc1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_select_1.py.TestSelect1.test_compile_error.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bc1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk32/testing_out_stuff/test_select_1.py.TestSelect1.test_compile_error.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client11-year String-False] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case2[v1] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client11-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client12-year Utf8-False] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client12-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client13-year Date-False] >> test_watermarks.py::TestWatermarks::test_pq_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings1-client0] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.json-json_each_row] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings1-client0] [SKIPPED] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v1-client0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client0-year Int32 NOT NULL-False] >> test_s3_0.py::TestS3::test_bad_format[v1-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v1-true-client0] >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.parquet-parquet] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case4[v1] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client0-year Int32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client1-year Uint32 NOT NULL-False] >> test_s3_0.py::TestS3::test_bad_format[v1-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v2-false-client0] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_time_order_recoverer[v1-kikimr0] [GOOD] >> test.py::test[action-eval_column--Analyze] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.parquet-parquet] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Debug] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-with_checkpoints] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client1-year Uint32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client2-year Uint64 NOT NULL-False] >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] >> test.py::test[action-eval_column--Analyze] [GOOD] >> test.py::test[action-eval_column--Debug] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client2-year Uint64 NOT NULL-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client3-year Date NOT NULL-False] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] >> test_s3_0.py::TestS3::test_bad_format[v2-false-client0] [GOOD] >> test.py::test[action-eval_column--Debug] [GOOD] >> test.py::test[action-eval_column--ForceBlocks] >> test_s3_0.py::TestS3::test_bad_format[v2-true-client0] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client3-year Date NOT NULL-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client4-year Utf8 NOT NULL-False] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[sampling-bind_default-default.txt-Debug] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.parquet-parquet] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] [GOOD] >> test.py::test[aggregate-group_by_tablerow_column--Debug] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_default-default.txt-ForceBlocks] >> test_s3_0.py::TestS3::test_bad_format[v2-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_bad_request_on_invalid_parquet[v2-client0] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client4-year Utf8 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client5-year Int64 NOT NULL-False] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] >> test.py::test[aggregate-agg_full_table_list-default.txt-ForceBlocks] >> test.py::test[action-eval_column--ForceBlocks] [GOOD] >> test.py::test[action-eval_column--Plan] [GOOD] >> test.py::test[action-eval_column--Results] >> test.py::test[sampling-bind_default-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] >> test.py::test[aggregate-group_by_tablerow_column--Debug] [GOOD] >> test.py::test[aggregate-group_by_tablerow_column--Plan] [GOOD] >> test.py::test[aggregate-group_by_tablerow_column--Results] >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] [GOOD] >> test_recovery.py::TestRecovery::test_recovery [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] >> test.py::test[schema-select_all-row_spec_diff_sort-Analyze] >> test.py::test[aggregate-group_by_tablerow_column--Results] [GOOD] >> test.py::test[bigdate-bitcast_interval64-default.txt-Debug] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client6-year Int32-False] >> test_s3_0.py::TestS3::test_bad_request_on_invalid_parquet[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_bad_request_on_compression[v2-client0] >> test.py::test[aggregate-agg_full_table_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_full_table_list-default.txt-Plan] [GOOD] >> test.py::test[aggregate-agg_full_table_list-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort-Analyze] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Debug] >> test.py::test[bigdate-bitcast_interval64-default.txt-Debug] [GOOD] >> test.py::test[bigdate-bitcast_interval64-default.txt-Plan] [GOOD] >> test.py::test[bigdate-bitcast_interval64-default.txt-Results] >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test.py::test[aggregate-agg_full_table_list-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Analyze] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.json-json_each_row] >> test.py::test[bigdate-bitcast_interval64-default.txt-Results] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-Debug] >> test.py::test[schema-select_all-row_spec_diff_sort-Debug] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-ForceBlocks] >> test.py::test[action-eval_column--Results] [GOOD] >> test.py::test[action-evaluate_queries--Analyze] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client6-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client7-year Uint32-False] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Debug] >> test_s3_0.py::TestS3::test_bad_request_on_compression[v2-client0] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-Debug] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-Plan] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] >> test.py::test[action-evaluate_queries--Analyze] [GOOD] >> test.py::test[action-evaluate_queries--Debug] >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Analyze] >> test.py::test[bigdate-const_date32-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-wo_compat-Debug] [SKIPPED] >> test.py::test[bigdate-table_yt_native-wo_compat-Plan] [SKIPPED] >> test.py::test[bigdate-table_yt_native-wo_compat-Results] [SKIPPED] >> test.py::test[bigdate-tz_table_fill--Debug] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.json-json_each_row] [GOOD] >> test.py::test[action-evaluate_queries--Debug] [GOOD] >> test.py::test[action-evaluate_queries--ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.parquet-parquet] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client7-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client8-year Int64-False] >> test.py::test[schema-select_with_map-sorted_desc-Analyze] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Debug] >> test.py::test[bigdate-tz_table_fill--Debug] [GOOD] >> test.py::test[bigdate-tz_table_fill--Plan] [GOOD] >> test.py::test[bigdate-tz_table_fill--Results] >> test.py::test[action-evaluate_queries--ForceBlocks] [GOOD] >> test.py::test[action-evaluate_queries--Plan] [GOOD] >> test.py::test[action-evaluate_queries--Results] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test.py::test[action-evaluate_queries--Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Debug] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-ForceBlocks] >> test.py::test[agg_apply-avg_decimal-default.txt-Analyze] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] >> test.py::test[bigdate-tz_table_fill--Results] [GOOD] >> test.py::test[binding-named_callable-default.txt-Debug] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1204602) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b9a/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_invalid_consumer.py.TestConsumer.test_invalid.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b9a/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_invalid_consumer.py.TestConsumer.test_invalid.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:08:30] send response localhost:26095/?database=local ::1 - - [21/Nov/2024 09:08:30] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003b9a/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/test_kill_pq_bill.py.TestKillPqBill.test_do_not_bill_pq.v1-mvp_external_ydb_endpoint0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client8-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client9-year Uint64-False] >> test.py::test[agg_apply-avg_decimal-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-avg_decimal-default.txt-Debug] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-ForceBlocks] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Plan] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] >> test.py::test[binding-named_callable-default.txt-Debug] [GOOD] >> test.py::test[binding-named_callable-default.txt-Plan] [GOOD] >> test.py::test[binding-named_callable-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Analyze] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] >> test.py::test[binding-named_callable-default.txt-Results] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Debug] >> test.py::test[schema-select_with_map-sorted_desc-Results] [GOOD] >> test.py::test[select-create_structures-default.txt-Analyze] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] [GOOD] >> test.py::test[agg_apply-avg_decimal-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-avg_decimal-default.txt-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client9-year Uint64-False] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client10-year String NOT NULL-True] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Debug] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Plan] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Debug] >> test_big_state.py::TestBigState::test_gt_8mb[v1] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.json-json_each_row] >> test.py::test[select-create_structures-default.txt-Analyze] [GOOD] >> test.py::test[select-create_structures-default.txt-Debug] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[agg_apply-avg_decimal-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-avg_decimal-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-avg_decimal-default.txt-Results] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] [GOOD] >> test.py::test[bitcast_implicit-mul_bitcast-default.txt-Debug] >> test.py::test[select-create_structures-default.txt-Debug] [GOOD] >> test.py::test[select-create_structures-default.txt-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client10-year String NOT NULL-True] [GOOD] >> test.py::test[agg_apply-avg_decimal-default.txt-Results] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Analyze] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client11-year String-False] >> test.py::test[bitcast_implicit-mul_bitcast-default.txt-Debug] [GOOD] >> test.py::test[bitcast_implicit-mul_bitcast-default.txt-Plan] [GOOD] >> test.py::test[bitcast_implicit-mul_bitcast-default.txt-Results] >> test.py::test[select-create_structures-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-create_structures-default.txt-Plan] [GOOD] >> test.py::test[select-create_structures-default.txt-Results] >> test.py::test[bitcast_implicit-mul_bitcast-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt--Debug] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.json-json_each_row] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Debug] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.parquet-parquet] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[select-create_structures-default.txt-Results] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Analyze] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client11-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client12-year Utf8-False] >> test.py::test[agg_apply-avg_numeric-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter >> test.py::test[select-dot_in_alias-default.txt-Analyze] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Debug] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_posix/big.csv-csv_with_names-POSIX] >> test.py::test[blocks-combine_all_avg_filter_opt--Debug] [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt--Plan] [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] >> test.py::test[agg_apply-avg_numeric-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Results] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[select-dot_in_alias-default.txt-Debug] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_count--Debug] >> test.py::test[agg_apply-avg_numeric-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_interval-default.txt-Analyze] >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_posix/big.csv-csv_with_names-POSIX] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client12-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] >> test.py::test[select-dot_in_alias-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Plan] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Results] >> test.py::test[agg_apply-sum_interval-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-sum_interval-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1211211) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b87/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_select_1.py.TestSelect1.test_select_z_x_y.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b87/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk36/testing_out_stuff/test_select_1.py.TestSelect1.test_select_z_x_y.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[blocks-combine_all_count--Debug] [GOOD] >> test.py::test[blocks-combine_all_count--Plan] [GOOD] >> test.py::test[blocks-combine_all_count--Results] >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] >> test.py::test[select-dot_in_alias-default.txt-Results] [GOOD] >> test.py::test[select-missing_with_nonpersist--Analyze] [SKIPPED] >> test.py::test[select-missing_with_nonpersist--Debug] [SKIPPED] >> test.py::test[select-missing_with_nonpersist--ForceBlocks] [SKIPPED] >> test.py::test[select-missing_with_nonpersist--Plan] [SKIPPED] >> test.py::test[select-missing_with_nonpersist--Results] [SKIPPED] >> test.py::test[select-null_check-default.txt-Analyze] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-date_time/simple_iso/big.csv-csv_with_names-ISO] >> test.py::test[agg_apply-sum_interval-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_interval-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Analyze] >> test.py::test[blocks-combine_all_count--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Debug] >> test_drain.py::TestHive::test_drain_tablets >> test.py::test[select-null_check-default.txt-Analyze] [GOOD] >> test.py::test[select-null_check-default.txt-Debug] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-date_time/simple_iso/big.csv-csv_with_names-ISO] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_posix/big.csv-csv_with_names-POSIX] >> test.py::test[agg_apply-sum_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-sum_interval-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-sum_interval-default.txt-Results] >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> test.py::test[aggregate-aggregation_and_order-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Debug] >> test.py::test[select-null_check-default.txt-Debug] [GOOD] >> test.py::test[select-null_check-default.txt-ForceBlocks] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> test.py::test[agg_apply-sum_interval-default.txt-Results] [GOOD] >> test.py::test[agg_apply-table--Analyze] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] >> test_s3_1.py::TestS3::test_precompute[v2-false-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_posix/big.csv-csv_with_names-POSIX] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] >> test_row_dispatcher.py::TestPqRowDispatcher::test_restart_compute_node [GOOD] >> test_s3_1.py::TestS3::test_precompute[v2-true-client0] >> test.py::test[select-null_check-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-null_check-default.txt-Plan] [GOOD] >> test.py::test[select-null_check-default.txt-Results] >> test.py::test[aggregate-aggregation_and_order-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-ForceBlocks] >> test_stop.py::TestStop::test_stop_query[v1-analytics] [GOOD] >> test.py::test[agg_apply-table--Analyze] [GOOD] >> test.py::test[agg_apply-table--Debug] >> test.py::test[select-null_check-default.txt-Results] [GOOD] >> test.py::test[select-substring-default.txt-Analyze] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-date_time/simple_iso/big.csv-csv_with_names-ISO] >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] >> test.py::test[aggregate-aggregation_and_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] >> test.py::test[agg_apply-table--Debug] [GOOD] >> test.py::test[agg_apply-table--ForceBlocks] >> test_s3_1.py::TestS3::test_precompute[v2-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v1-false-client0] >> test.py::test[select-substring-default.txt-Analyze] [GOOD] >> test.py::test[select-substring-default.txt-Debug] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-date_time/simple_iso/big.csv-csv_with_names-ISO] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-yql_syntax-client0] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Analyze] >> test.py::test[select-substring-default.txt-Debug] [GOOD] >> test.py::test[select-substring-default.txt-ForceBlocks] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] >> test.py::test[blocks-date_greater_or_equal--Debug] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Plan] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Results] >> test.py::test[agg_apply-table--ForceBlocks] [GOOD] >> test.py::test[agg_apply-table--Plan] [GOOD] >> test.py::test[agg_apply-table--Results] >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Analyze] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Debug] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-yql_syntax-client0] [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-pg_syntax-client0] >> test.py::test[agg_apply-table--Results] [GOOD] >> test.py::test[select-substring-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-substring-default.txt-Plan] [GOOD] >> test.py::test[select-substring-default.txt-Results] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-pg_syntax-client0] [SKIPPED] >> test.py::test[agg_phases-count_all_opt-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:09:08.699154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:09:08.699175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:09:08.699179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:09:08.699183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:09:08.699187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:09:08.699190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:09:08.699196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:09:08.699273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:09:08.710038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:09:08.710064Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:09:08.712859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:09:08.713016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:09:08.713067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:09:08.716509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:09:08.716611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:09:08.716711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:09:08.716987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:09:08.717724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:09:08.718018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:09:08.718027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:09:08.718041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:09:08.718047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:09:08.718054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:09:08.718098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:09:08.719522Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:09:08.735426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:09:08.735531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:09:08.735620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:09:08.735678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:09:08.735691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:09:08.736731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:09:08.736765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:09:08.736845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:09:08.736857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:09:08.736863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:09:08.736868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:09:08.737438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:09:08.737453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:09:08.737459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:09:08.737910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:09:08.737923Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:09:08.737929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:09:08.737937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:09:08.738608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:09:08.739072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:09:08.739130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:09:08.739340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:09:08.739369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:09:08.739376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:09:08.739431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:09:08.739438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:09:08.739471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:09:08.739484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:09:08.740087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:09:08.740102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:09:08.740153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:09:08.740159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:09:08.740284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:09:08.740293Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:09:08.740306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:09:08.740311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:09:08.740317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:09:08.740323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:09:08.740327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:09:08.740332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:09:08.740346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:09:08.740354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:09:08.740360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ult: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:09:30.689184Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:09:30.689284Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 115us result status StatusSuccess 2024-11-21T09:09:30.689508Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1122501) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1123822 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params0-false/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params0-false/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params0-false/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params0-false/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params0-false/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params0-false/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params1-false/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params1-false/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params1-false/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params1-false/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params1-false/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7c/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/test_size_limit.py.TestS3.test_size_limit.v1-client0-5-kikimr_params1-false/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-yql_syntax-client0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr >> test_s3_1.py::TestS3::test_precompute[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v1-true-client0] >> test.py::test[select-substring-default.txt-Results] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Analyze] >> test.py::test[aggregate-group_by_gs_alt_duo--Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--ForceBlocks] >> test.py::test[agg_phases-count_all_opt-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-count_all_opt-default.txt-Debug] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test.py::test[select-table_content_from_double_opt-default.txt-Analyze] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Debug] >> test_s3_1.py::TestS3::test_precompute[v1-true-client0] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] >> test_s3_1.py::TestS3::test_failed_precompute[v2-false-client0] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-yql_syntax-client0] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-pg_syntax-client0] >> test.py::test[blocks-date_greater_or_equal--Results] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--Debug] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Analyze] >> test.py::test[select-table_content_from_double_opt-default.txt-Debug] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-ForceBlocks] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Analyze] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Debug] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-pg_syntax-client0] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Plan] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-yql_syntax-client0] >> test.py::test[agg_phases-count_all_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-count_all_opt-default.txt-ForceBlocks] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] >> test_s3_1.py::TestS3::test_failed_precompute[v2-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v2-true-client0] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-yql_syntax-client0] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--ForceBlocks] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-pg_syntax-client0] >> test.py::test[select-values-default.txt-Analyze] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-pg_syntax-client0] [SKIPPED] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-yql_syntax-client0] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] [GOOD] >> test.py::test[select-values-default.txt-Analyze] [GOOD] >> test.py::test[select-values-default.txt-Debug] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field >> test.py::test[blocks-date_not_equals_scalar--Debug] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--Plan] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--Results] >> test.py::test[aggregate-group_by_gs_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Results] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration >> test.py::test[select-values-default.txt-Debug] [GOOD] >> test.py::test[select-values-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-yql_syntax-client0] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-pg_syntax-client0] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> test.py::test[agg_phases-count_all_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-count_all_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-count_all_opt-default.txt-Results] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v2-true-client0] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v1-false-client0] >> test.py::test[select-values-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-values-default.txt-Plan] [GOOD] >> test.py::test[select-values-default.txt-Results] >> test.py::test[aggregate-group_by_gs_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Analyze] >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-pg_syntax-client0] [GOOD] >> test.py::test[select-values-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Analyze] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v2-timestamp/completeness_iso/test.csv-csv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-true-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f79/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f79/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1123581) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1124403 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Debug] |93.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> test.py::test[blocks-date_not_equals_scalar--Results] [GOOD] >> test.py::test[blocks-div_uint64_opt2--Debug] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Analyze] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Debug] |93.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1232769) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v2-timestamp/completeness_iso/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v1-timestamp/completeness_iso/test.csv-csv_with_names] |93.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> test.py::test[blocks-div_uint64_opt2--Debug] [GOOD] >> test.py::test[blocks-div_uint64_opt2--Plan] [GOOD] >> test.py::test[blocks-div_uint64_opt2--Results] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-ForceBlocks] >> test.py::test[agg_phases-count_all_opt-default.txt-Results] [GOOD] >> test.py::test[agg_phases-count_opt-default.txt-Analyze] >> test_s3_1.py::TestS3::test_failed_precompute[v1-false-client0] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-ForceBlocks] >> test_s3_1.py::TestS3::test_failed_precompute[v1-true-client0] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v1-timestamp/completeness_iso/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v2-date_time/completeness_iso/test.csv-csv_with_names] >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> test.py::test[blocks-filter_direct_col--Debug] >> test.py::test[agg_phases-count_opt-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-count_opt-default.txt-Debug] >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v2-date_time/completeness_iso/test.csv-csv_with_names] [GOOD] >> test.py::test[blocks-filter_direct_col--Debug] [GOOD] >> test.py::test[blocks-filter_direct_col--Plan] [GOOD] >> test.py::test[blocks-filter_direct_col--Results] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v1-date_time/completeness_iso/test.csv-csv_with_names] |93.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--Analyze] >> test.py::test[blocks-filter_direct_col--Results] [GOOD] >> test.py::test[blocks-interval_div_scalar--Debug] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Analyze] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v1-true-client0] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_s3_1.py::TestS3::test_missed[v2-false-client0] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v1-date_time/completeness_iso/test.csv-csv_with_names] [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/as_default/test.csv] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Analyze] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Debug] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--Analyze] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--Debug] >> test.py::test[blocks-interval_div_scalar--Debug] [GOOD] >> test.py::test[blocks-interval_div_scalar--Plan] [GOOD] >> test.py::test[blocks-interval_div_scalar--Results] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> test.py::test[agg_phases-count_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-count_opt-default.txt-ForceBlocks] >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1217255) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b5c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_select_1.py.TestSelect1.test_ast_in_failed_query_runtime.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b5c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk31/testing_out_stuff/test_select_1.py.TestSelect1.test_ast_in_failed_query_runtime.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-ForceBlocks] >> test_s3_1.py::TestS3::test_missed[v2-false-client0] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |93.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> test_s3_1.py::TestS3::test_missed[v2-true-client0] >> test.py::test[blocks-interval_div_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1214993) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b6d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_select_1.py.TestSelect1.test_unwrap_null.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b6d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk37/testing_out_stuff/test_select_1.py.TestSelect1.test_unwrap_null.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/as_default/test.csv] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |93.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/parse_error/test.csv] >> test.py::test[aggregate-group_by_ru_join_simple--Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--ForceBlocks] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Plan] [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test_s3_1.py::TestS3::test_missed[v2-true-client0] [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata >> test_format_setting.py::TestS3::test_date_null[v2-date_null/parse_error/test.csv] [GOOD] >> test_s3_1.py::TestS3::test_missed[v1-false-client0] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> test_format_setting.py::TestS3::test_date_null[v1-date_null/as_default/test.csv] >> test.py::test[aggregate-group_by_ru_join_simple--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--Plan] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata >> test.py::test[aggregate-group_by_ru_join_simple--Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--Results] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[agg_phases-count_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-count_opt-default.txt-Plan] >> test.py::test[agg_phases-count_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-count_opt-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_big_state.py::TestBigState::test_gt_8mb[v1] [FAIL] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1187811) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c62/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_type_as_column.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c62/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_type_as_column.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Debug] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Plan] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Analyze] |93.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter [GOOD] >> test_format_setting.py::TestS3::test_date_null[v1-date_null/as_default/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null[v1-date_null/parse_error/test.csv] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Analyze] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Debug] |93.1%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] >> test_s3_1.py::TestS3::test_missed[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_missed[v1-true-client0] >> test_auditlog.py::test_single_dml_query_logged[replace] >> test_auditlog.py::test_single_dml_query_logged[update] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-ForceBlocks] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--Debug] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1203362) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003ba1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_select_timings.py.TestSelectTimings.test_select_timings.v1-analytics-finished/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003ba1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk39/testing_out_stuff/test_select_timings.py.TestSelectTimings.test_select_timings.v1-analytics-finished/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_null[v1-date_null/parse_error/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/as_default/test.csv] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[aggregate-group_by_ru_join_simple--Results] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-Analyze] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] >> test_s3_1.py::TestS3::test_missed[v1-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-false-client0] >> test.py::test[blocks-mul_uint64_opt2--Debug] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--Plan] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--Results] >> test.py::test[agg_phases-count_opt-default.txt-Results] [GOOD] >> test.py::test[agg_phases-sum-default.txt-Analyze] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] [GOOD] >> test.py::test[solomon-BadDownsamplingFill--Analyze] [SKIPPED] >> test.py::test[solomon-BadDownsamplingFill--Debug] >> test.py::test[bigdate-const_date32-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-Debug] >> test.py::test[solomon-BadDownsamplingFill--Debug] [SKIPPED] >> test.py::test[solomon-BadDownsamplingFill--ForceBlocks] [SKIPPED] >> test.py::test[solomon-BadDownsamplingFill--Plan] [SKIPPED] >> test.py::test[solomon-BadDownsamplingFill--Results] [SKIPPED] >> test.py::test[solomon-BadDownsamplingInterval--Analyze] [SKIPPED] >> test.py::test[solomon-BadDownsamplingInterval--Debug] [SKIPPED] >> test.py::test[solomon-BadDownsamplingInterval--ForceBlocks] [SKIPPED] >> test.py::test[solomon-BadDownsamplingInterval--Plan] [SKIPPED] >> test.py::test[solomon-BadDownsamplingInterval--Results] [SKIPPED] >> test.py::test[type_v3-float--Analyze] >> test.py::test[blocks-mul_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-string_with--Debug] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected >> test.py::test[agg_phases-sum-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-sum-default.txt-Debug] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-false-client0] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized >> test_s3_1.py::TestS3::test_simple_hits_47[v2-true-client0] >> test.py::test[bigdate-const_date32-default.txt-Debug] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-ForceBlocks] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[type_v3-float--Analyze] [GOOD] >> test.py::test[type_v3-float--Debug] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/as_default/test.csv] [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/parse_error/test.csv] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[blocks-string_with--Debug] [GOOD] >> test.py::test[blocks-string_with--Plan] [GOOD] >> test.py::test[blocks-string_with--Results] >> test.py::test[bigdate-const_date32-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-Plan] [GOOD] >> test.py::test[bigdate-const_date32-default.txt-Results] >> test.py::test[type_v3-float--Debug] [GOOD] >> test.py::test[type_v3-float--ForceBlocks] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-false-client0] >> test.py::test[bigdate-const_date32-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-Analyze] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/parse_error/test.csv] [GOOD] >> test.py::test[blocks-string_with--Results] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Debug] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/as_default/test.csv] >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[select] >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] >> test.py::test[type_v3-float--ForceBlocks] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> test_auditlog.py::test_dml_begin_commit_logged >> test.py::test[bigdate-table_arithmetic-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-Debug] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_s3_1.py::TestS3::test_simple_hits_47[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-true-client0] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test.py::test[agg_phases-sum-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-sum-default.txt-ForceBlocks] >> test.py::test[blocks-top_sort_one_desc--Debug] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Plan] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Results] |93.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[type_v3-float--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Results] [GOOD] >> test.py::test[blocks-tuple_type--Debug] >> test_recovery_mz.py::TestRecovery::test_recovery[v1] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/as_default/test.csv] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-true-client0] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-Debug] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/parse_error/test.csv] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-True-client0] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[blocks-tuple_type--Debug] [GOOD] >> test.py::test[blocks-tuple_type--Plan] [GOOD] >> test.py::test[blocks-tuple_type--Results] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] >> test_auditlog.py::test_dynconfig >> test_auditlog.py::test_dynconfig [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> test.py::test[blocks-tuple_type--Results] [GOOD] >> test.py::test[column_group-hint_append_fail--Debug] [SKIPPED] >> test.py::test[column_group-hint_append_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_append_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Debug] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail--Debug] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_non_lst_yson_fail--Debug] [SKIPPED] >> test.py::test[column_group-hint_non_lst_yson_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_non_lst_yson_fail--Results] [SKIPPED] >> test.py::test[column_group-many_inserts--Debug] [SKIPPED] >> test.py::test[agg_phases-sum-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-sum-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-sum-default.txt-Results] >> test.py::test[column_group-many_inserts--Plan] [SKIPPED] >> test.py::test[column_group-many_inserts--Results] [SKIPPED] >> test.py::test[column_order-insert_tmp-default.txt-Debug] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-True-client0] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-Plan] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-False-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_stop.py::TestStop::test_stop_query[v1-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1198058) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bc4/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_select_timings.py.TestSelectTimings.test_select_timings.v1-streaming-finished/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bc4/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk40/testing_out_stuff/test_select_timings.py.TestSelectTimings.test_select_timings.v1-streaming-finished/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/parse_error/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/as_default/multi_null.csv] >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/as_default/multi_null.csv] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] >> test.py::test[column_order-insert_tmp-default.txt-Debug] [GOOD] >> test.py::test[column_order-insert_tmp-default.txt-Plan] [GOOD] >> test.py::test[column_order-insert_tmp-default.txt-Results] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-False-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/parse_error/multi_null.csv] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-True-client0] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-on-Analyze] >> test.py::test[bigdate-table_yt_key_filter-on-Analyze] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-on-Debug] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-on-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-on-Plan] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-on-Results] [SKIPPED] >> test.py::test[bigdate-tznumliterals-default.txt-Analyze] >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] [GOOD] >> test.py::test[agg_phases-sum-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-Analyze] >> test.py::test[column_order-insert_tmp-default.txt-Results] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Debug] [SKIPPED] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Plan] [SKIPPED] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] [SKIPPED] >> test.py::test[bigdate-tznumliterals-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-tznumliterals-default.txt-Debug] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/parse_error/multi_null.csv] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-True-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/as_default/multi_null.csv] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/0030cc/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2024-11-21T09:10:01.505170Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:01.505153Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2024-11-21T09:10:01.482180Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:01.639810Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:01.639798Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2024-11-21T09:10:01.620391Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/0030af/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2024-11-21T09:10:01.738135Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:01.738121Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2024-11-21T09:10:01.714928Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:01.866627Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:01.866613Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2024-11-21T09:10:01.851664Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-False-client0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null >> test.py::test[agg_phases_agg_apply-max-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-Debug] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[bigdate-tznumliterals-default.txt-Debug] [GOOD] >> test.py::test[bigdate-tznumliterals-default.txt-ForceBlocks] |93.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] [SKIPPED] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/as_default/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/parse_error/multi_null.csv] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v1-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] >> test.py::test[bigdate-tznumliterals-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-tznumliterals-default.txt-Plan] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-False-client0] [GOOD] >> test.py::test[bigdate-tznumliterals-default.txt-Plan] [GOOD] >> test.py::test[bigdate-tznumliterals-default.txt-Results] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-True-client0] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/parse_error/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/as_default/multi_null.csv] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[bigdate-tznumliterals-default.txt-Results] [GOOD] >> test.py::test[binding-bind_cast-default.txt-Analyze] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1221740) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:08:46] send response localhost:15399/?database=local ::1 - - [21/Nov/2024 09:08:46] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b35/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_pq_read_write.py.TestPqReadWrite.test_pq_read_write.v1-with_checkpoints-mvp_external_ydb_endpoint0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b35/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk14/testing_out_stuff/test_pq_read_write.py.TestPqReadWrite.test_pq_read_write.v1-with_checkpoints-mvp_external_ydb_endpoint0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:09:32] send response localhost:15399/?database=local ::1 - - [21/Nov/2024 09:09:32] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-True-client0] [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[binding-bind_cast-default.txt-Analyze] [GOOD] >> test.py::test[binding-bind_cast-default.txt-Debug] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-False-client0] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/as_default/multi_null.csv] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/003063/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2024-11-21T09:10:05.086087Z: {"database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.086071Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2024-11-21T09:10:05.082241Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.201204Z: {"database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.201192Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2024-11-21T09:10:05.194291Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.313143Z: {"database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.313130Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2024-11-21T09:10:05.309178Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.423821Z: {"database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.423809Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2024-11-21T09:10:05.420364Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.534332Z: {"database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.534320Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2024-11-21T09:10:05.530742Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.644002Z: {"database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.643990Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2024-11-21T09:10:05.640615Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.793014Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/parse_error/multi_null.csv] >> test.py::test[agg_phases_agg_apply-max-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/003086/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt 2024-11-21T09:10:05.488922Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.488881Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2024-11-21T09:10:05.450533Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.620386Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.620371Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2024-11-21T09:10:05.594239Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.742813Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.742802Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2024-11-21T09:10:05.725901Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.870491Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.870478Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2024-11-21T09:10:05.848476Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:05.990530Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:05.990515Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2024-11-21T09:10:05.975321Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:06.115198Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:06.115185Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2024-11-21T09:10:06.096636Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test.py::test[binding-bind_cast-default.txt-Debug] [GOOD] >> test.py::test[binding-bind_cast-default.txt-ForceBlocks] >> test_auditlog.py::test_single_dml_query_logged[delete] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/parse_error/multi_null.csv] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-False-client0] [GOOD] >> test.py::test[binding-bind_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-bind_cast-default.txt-Plan] [GOOD] >> test.py::test[binding-bind_cast-default.txt-Results] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/as_default/multi_null.csv] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-True-client0] >> test.py::test[binding-bind_cast-default.txt-Results] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/003045/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2024-11-21T09:10:08.157405Z: {"tx_id":"01jd6zrtgx42ncjks5twe1ckzz","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:08.157393Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2024-11-21T09:10:08.157042Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2024-11-21T09:10:08.181617Z: {"tx_id":"01jd6zrtgx42ncjks5twe1ckzz","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:08.181606Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2024-11-21T09:10:08.160893Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:08.186433Z: {"tx_id":"01jd6zrtgx42ncjks5twe1ckzz","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:08.186422Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2024-11-21T09:10:08.184934Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1222238) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b30/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_yq_streaming.py.TestYqStreaming.test_yq_streaming.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b30/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk47/testing_out_stuff/test_yq_streaming.py.TestYqStreaming.test_yq_streaming.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/00303d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2024-11-21T09:10:08.577597Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} 2024-11-21T09:10:08.723284Z: {"old_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} 2024-11-21T09:10:08.840795Z: {"old_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"SUCCESS","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/as_default/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/parse_error/multi_null.csv] >> test.py::test[binding-table_concat_strict_binding-default.txt-Analyze] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-Debug] >> test.py::test[agg_phases_agg_apply-max-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-Results] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-True-client0] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-False-client0] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-Debug] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-ForceBlocks] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/parse_error/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v2] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-False-client0] [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-True-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/00301a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2024-11-21T09:10:11.809853Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} 2024-11-21T09:10:11.925407Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} 2024-11-21T09:10:12.037825Z: {"old_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[binding-table_concat_strict_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-Plan] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-False-client0] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-block_input_various_types-v3-Analyze] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Debug] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Plan] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Results] [SKIPPED] >> test.py::test[blocks-combine_hashed_min--Analyze] >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[insert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1223812) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:08:53] send response localhost:24452/?database=local ::1 - - [21/Nov/2024 09:08:53] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b2c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_watermarks.py.TestWatermarks.test_pq_watermarks.v1-mvp_external_ydb_endpoint0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b2c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_watermarks.py.TestWatermarks.test_pq_watermarks.v1-mvp_external_ydb_endpoint0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test.py::test[agg_phases_agg_apply-max-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Analyze] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types >> test.py::test[blocks-combine_hashed_min--Analyze] [GOOD] >> test.py::test[blocks-combine_hashed_min--Debug] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1221733) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:08:45] send response localhost:31356/?database=local ::1 - - [21/Nov/2024 09:08:45] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b39/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_continue_mode.py.TestContinueMode.test_deny_state_load_mode_from_checkpoint_in_modify_query.v1-mvp_external_ydb_endpoint0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b39/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/test_continue_mode.py.TestContinueMode.test_deny_state_load_mode_from_checkpoint_in_modify_query.v1-mvp_external_ydb_endpoint0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:09:41] send response localhost:31356/?database=local ::1 - - [21/Nov/2024 09:09:41] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test.py::test[aggr_factory-bottom_by-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Debug] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-True-client0] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[blocks-combine_hashed_min--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_min--ForceBlocks] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-True-client0] [GOOD] >> test.py::test[blocks-combine_hashed_min--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_min--Plan] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_min--Results] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-False-client0] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1213101) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b74/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_read_rules_deletion.py.TestReadRulesDeletion.test_delete_read_rules.v1-simple/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b74/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk16/testing_out_stuff/test_read_rules_deletion.py.TestReadRulesDeletion.test_delete_read_rules.v1-simple/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1225544) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b28/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_yq_streaming.py.TestYqStreaming.test_early_finish_case2.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b28/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk44/testing_out_stuff/test_yq_streaming.py.TestYqStreaming.test_early_finish_case2.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/002f5d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2024-11-21T09:10:19.604957Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2024-11-21T09:10:19.604941Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2024-11-21T09:10:19.581322Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} 2024-11-21T09:10:20.003446Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2024-11-21T09:10:20.003431Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2024-11-21T09:10:19.975720Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} >> test.py::test[blocks-combine_hashed_min--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--Analyze] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/002f43/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2024-11-21T09:10:20.165691Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} 2024-11-21T09:10:20.324309Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-True-client0] >> test.py::test[aggr_factory-bottom_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[blocks-combine_hashed_minmax_double--Analyze] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--Debug] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/jptk/002f1e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2024-11-21T09:10:22.545665Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:22.545652Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2024-11-21T09:10:22.518765Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2024-11-21T09:10:22.697666Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2024-11-21T09:10:22.697649Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2024-11-21T09:10:22.659838Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-True-client0] [GOOD] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-False-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1231813) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:09:01] send response localhost:17022/?database=local ::1 - - [21/Nov/2024 09:09:01] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b02/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b02/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[blocks-combine_hashed_minmax_double--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--ForceBlocks] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1227672) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:08:59] send response localhost:12639/?database=local ::1 - - [21/Nov/2024 09:08:59] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b1e/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_pq_read_write.py.TestPqReadWrite.test_pq_read_schema_metadata.v1-mvp_external_ydb_endpoint0-with_checkpoints/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b1e/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/test_pq_read_write.py.TestPqReadWrite.test_pq_read_schema_metadata.v1-mvp_external_ydb_endpoint0-with_checkpoints/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:09:54] send response localhost:12639/?database=local ::1 - - [21/Nov/2024 09:09:54] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[aggr_factory-bottom_by-default.txt-Results] [GOOD] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[aggr_factory-stddev-default.txt-Analyze] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null [GOOD] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] >> test.py::test[blocks-combine_hashed_minmax_double--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--Plan] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-True-client0] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[aggr_factory-stddev-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-Debug] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] [GOOD] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1187059) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c71/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_simple_optional/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c71/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_simple_optional/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c71/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_simple_optional/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c71/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_simple_optional/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c71/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_simple_optional/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c71/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk29/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_simple_optional/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-False-client0] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1183965) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c89/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_3_sessions/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c89/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_3_sessions/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c89/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_3_sessions/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c89/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_3_sessions/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c89/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_3_sessions/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c89/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk22/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_3_sessions/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[aggr_factory-stddev-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-ForceBlocks] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types [GOOD] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-False-client0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-False-client0] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1218478) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:08:25] send response localhost:17291/?database=local ::1 - - [21/Nov/2024 09:08:25] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b53/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_continue_mode.py.TestContinueMode.test_continue_from_offsets.v1-mvp_external_ydb_endpoint0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b53/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_continue_mode.py.TestContinueMode.test_continue_from_offsets.v1-mvp_external_ydb_endpoint0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/003b53/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/test_continue_mode.py.TestContinueMode.test_continue_from_offsets.v1-mvp_external_ydb_endpoint0/default/node_1/logfile_ki6_q5gh.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[aggr_factory-stddev-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-Results] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003fa1/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003fa1/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1108106) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1111562 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1229984) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b10/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_yq_streaming.py.TestYqStreaming.test_early_finish_case4.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b10/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk45/testing_out_stuff/test_yq_streaming.py.TestYqStreaming.test_early_finish_case4.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-True-client0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] |93.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[blocks-combine_hashed_minmax_double--Plan] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] >> test_delete_read_rules_after_abort_by_system.py::TestDeleteReadRulesAfterAbortBySystem::test_delete_read_rules_after_abort_by_system [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Analyze] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1197068) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c0d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_nested_types_without_predicate/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c0d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_nested_types_without_predicate/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c0d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_nested_types_without_predicate/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c0d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_nested_types_without_predicate/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c0d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_nested_types_without_predicate/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c0d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk26/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_nested_types_without_predicate/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[aggr_factory-top-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Debug] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-False-client0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-top-default.txt-ForceBlocks] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-True-client0] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |93.3%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] >> test.py::test[aggr_factory-top-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Plan] |93.3%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[aggr_factory-top-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Results] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-True-client0] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v2] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v1] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-False-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1199941) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bba/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_metrics_cleanup.py.TestCleanup.test_cleanup.v1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bba/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/test_metrics_cleanup.py.TestCleanup.test_cleanup.v1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states [GOOD] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1208380) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b94/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filter_use_unsupported_predicate/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b94/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filter_use_unsupported_predicate/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b94/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filter_use_unsupported_predicate/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b94/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filter_use_unsupported_predicate/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b94/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filter_use_unsupported_predicate/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b94/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk23/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filter_use_unsupported_predicate/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] >> test.py::test[aggr_factory-top-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-Analyze] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-False-client0] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-True-client0] >> test_recovery_mz.py::TestRecovery::test_recovery[v1] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-Debug] >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-True-client0] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-False-client0] >> test.py::test[aggr_factory-transform_input-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-ForceBlocks] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1119226) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1120992 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings0-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings0-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings0-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings0-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings0-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings0-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings0-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings0-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings0-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings0-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings0-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings0-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings1-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings1-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings1-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings1-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings1-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_binding_operations.v1-kikimr_settings1-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings1-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings1-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings1-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings1-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings1-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f7d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/test_bindings_0.py.TestBindings.test_modify_connection_with_a_lot_of_bindings.v1-kikimr_settings1-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-False-client0] [GOOD] >> test_eval.py::TestEval::test_eval_2_2[v1] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-True-client0] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-Results] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Analyze] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-True-client0] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-False-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1201024) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bb5/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_stop_start/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bb5/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_stop_start/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bb5/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_stop_start/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bb5/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_stop_start/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bb5/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_stop_start/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003bb5/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk30/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_stop_start/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[aggr_factory-udaf-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Debug] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-False-client0] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-True-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] >> test.py::test[aggr_factory-udaf-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-ForceBlocks] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-False-client0] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] >> test.py::test[aggr_factory-udaf-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-True-client0] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_eval.py::TestEval::test_eval_2_2[v1] [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table2-default.txt-Analyze] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-True-client0] [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-False-client0] >> test.py::test[aggregate-agg_phases_table2-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-agg_phases_table2-default.txt-Debug] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-False-client0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test.py::test[aggregate-agg_phases_table2-default.txt-Debug] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-True-client0] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-False-client0] |93.3%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] |93.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[aggregate-agg_phases_table2-default.txt-Debug] [GOOD] |93.3%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-True-client0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-True-client0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] >> test_s3_1.py::TestS3::test_huge_source[v2-false-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v2] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] >> test_s3_1.py::TestS3::test_huge_source[v2-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v2-true-client0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1219503) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b45/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_sensors/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b45/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_sensors/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b45/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_sensors/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b45/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_sensors/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b45/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_sensors/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b45/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk28/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_sensors/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_huge_source[v2-true-client0] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] >> test_s3_1.py::TestS3::test_huge_source[v1-false-client0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1144969) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1146002 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_data_inflight.v1-client0-kikimr_params0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_data_inflight.v1-client0-kikimr_params0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_data_inflight.v1-client0-kikimr_params0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_data_inflight.v1-client0-kikimr_params0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_data_inflight.v1-client0-kikimr_params0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_data_inflight.v1-client0-kikimr_params0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params1/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params1/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params1/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params1/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params1/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f4e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/test_inflight.py.TestS3.test_inflight.v1-client0-kikimr_params1/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] >> test_s3_1.py::TestS3::test_huge_source[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v1-true-client0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1220553) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b3c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_many_partitions/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b3c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_many_partitions/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b3c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_many_partitions/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b3c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_many_partitions/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b3c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_many_partitions/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b3c/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk25/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_many_partitions/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_eval.py::TestEval::test_eval_2_2[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1231234) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b0d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_delete_read_rules_after_abort_by_system.py.TestDeleteReadRulesAfterAbortBySystem.test_delete_read_rules_after_abort_by_system/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b0d/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_delete_read_rules_after_abort_by_system.py.TestDeleteReadRulesAfterAbortBySystem.test_delete_read_rules_after_abort_by_system/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_huge_source[v1-true-client0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] >> test_s3_1.py::TestS3::test_top_level_listing[v2-false-client0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> test_s3_1.py::TestS3::test_top_level_listing[v2-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v2] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v1] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] >> test_s3_1.py::TestS3::test_top_level_listing[v1-false-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1142982) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v1-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1143606 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v1-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v1-false-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v1-false-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v1-false-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v1-false-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v1-false-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v1-false-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_limit.v1-false-kikimr_params0-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_limit.v1-false-kikimr_params0-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_limit.v1-false-kikimr_params0-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_limit.v1-false-kikimr_params0-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_limit.v1-false-kikimr_params0-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_limit.v1-false-kikimr_params0-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [21/Nov/2024 09:10:30] send response localhost:12723/?database=local ::1 - - [21/Nov/2024 09:10:30] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_bad_format.v1-false-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_bad_format.v1-false-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_bad_format.v1-false-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_bad_format.v1-false-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_bad_format.v1-false-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f54/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_bad_format.v1-false-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v2] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1202249) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery_mz.py::TestRecovery::test_recovery[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1215992) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b60/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_recovery_match_recognize.py.TestRecoveryMatchRecognize.test_time_order_recoverer.v1-kikimr0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b60/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_recovery_match_recognize.py.TestRecoveryMatchRecognize.test_time_order_recoverer.v1-kikimr0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b60/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_recovery_match_recognize.py.TestRecoveryMatchRecognize.test_time_order_recoverer.v1-kikimr0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b60/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk20/testing_out_stuff/test_recovery_match_recognize.py.TestRecoveryMatchRecognize.test_time_order_recoverer.v1-kikimr0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v2] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1208699) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b91/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_restart_compute_node/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b91/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_restart_compute_node/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/003b91/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_restart_compute_node/cp/node_1/logfile_e52ogt40.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b91/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_restart_compute_node/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b91/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_restart_compute_node/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b91/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_restart_compute_node/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003b91/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_restart_compute_node/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/jptk/003b91/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk27/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_restart_compute_node/compute/node_1/logfile_ecpczv0n.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1212255) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v2] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-3.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/PyHamcrest/py3/hamcrest/core/base_description.py:43: DeprecationWarning: Call append_description_of instead of append_value >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-3.test] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v2] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v1] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/copy.py:71: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/copy.py:71: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-8.test] >> TYdbControlPlaneStorageGetResult::ShouldSuccess >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission >> TYdbControlPlaneStorageCreateQuery::ShouldValidate |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageCreateQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist >> TYdbControlPlaneStorageGetResult::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageGetResult::ShouldEmpty >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldValidate >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> TYdbControlPlaneStorageGetResult::ShouldEmpty [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldValidate >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] >> TYdbControlPlaneStorageDeleteConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckSuperUser >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckIdempotencyKey >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> TYdbControlPlaneStorageCreateBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-3.test] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v2] >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageListQueries::ShouldSuccess >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-12.test] >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess >> TYdbControlPlaneStorageListQueries::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldPageToken >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> TYdbControlPlaneStorageListBindings::ShouldSuccess >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxCountConnections >> TYdbControlPlaneStorageListBindings::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByName >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess >> CreateQueryRequest::ShouldCreateSimpleQuery >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [GOOD] >> CreateQueryRequest::ShouldCreateSimpleQuery [GOOD] >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxCountConnections [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldValidate >> TYdbControlPlaneStorageListConnections::ShouldSuccess >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas [GOOD] >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse [GOOD] >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::OverrideQuotas >> TYdbControlPlaneStorageListQueries::ShouldValidate >> TYdbControlPlaneStorageModifyQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey >> TYdbControlPlaneStorageCreateConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckIdempotencyKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageQuotas::OverrideQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::GetStaleUsage >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldSucccess >> TYdbControlPlaneStorageListBindings::ShouldFilterByName [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldPageToken >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun >> TYdbControlPlaneStorageListConnections::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldPageToken >> TYdbControlPlaneStorageListQueries::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterName >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageQuotas::GetStaleUsage [GOOD] >> TYdbControlPlaneStorageQuotas::PushUsageUpdate >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist >> TYdbControlPlaneStorageQuotas::PushUsageUpdate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageControlQuery::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldValidate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete >> TYdbControlPlaneStorageCreateConnection::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxLengthName >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource >> TYdbControlPlaneStorageListQueries::ShouldFilterName [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageControlQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v2] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterType >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionFailed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivatePublic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess >> TYdbControlPlaneStorageCreateConnection::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionWrite >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionSuccess |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterType [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterMode >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource [GOOD] >> TYdbControlPlaneStorageTest::ShouldCreateTable >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListConnections::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldValidate >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldValidate >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> TYdbControlPlaneStorageTest::ShouldCreateTable [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateWrite >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields >> TYdbControlPlaneStorageListConnections::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageCreateConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldValidate >> TYdbControlPlaneStorageWriteResultData::ShouldValidateWrite [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateRead >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldValidate >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListQueries::ShouldFilterMode [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility >> TYdbControlPlaneStorageGetTask::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> TYdbControlPlaneStorageWriteResultData::ShouldValidateRead [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks >> TYdbControlPlaneStorageModifyConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPermission >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess >> TYdbControlPlaneStorageListBindings::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldValidate >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePublic >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckExist >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListBindings::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckIdempotencyKey >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionWrite [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... tSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2024-11-21T09:13:54.423588Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:13:54.423590Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:13:54.423679Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2024-11-21T09:13:54.423686Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:13:54.423688Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:13:54.423709Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2024-11-21T09:13:54.423718Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:13:54.423720Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:13:54.423800Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2024-11-21T09:13:54.423808Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:13:54.423810Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:13:54.424136Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2024-11-21T09:13:54.424143Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:13:54.424144Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:13:54.424155Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2024-11-21T09:13:54.424158Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:13:54.424160Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:13:54.424281Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:13:54.424287Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:13:54.424288Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:13:54.454435Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:13:54.454453Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:13:54.515661Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:13:54.515681Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:13:54.537009Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:13:54.537028Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:13:54.537276Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:13:54.537289Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:13:54.537353Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:13:54.537363Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:13:54.537477Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:13:54.537479Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:13:54.537575Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:13:54.537577Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:13:54.537658Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:13:54.537661Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:13:54.537675Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:13:54.537678Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:13:54.537743Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:13:54.537745Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:13:54.540876Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:13:54.540893Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:13:54.543180Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:13:54.543194Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:13:54.543481Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:13:54.543491Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:13:54.543615Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:13:54.543624Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:13:54.544521Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:13:54.544531Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys": >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldMoveFromScopeToPrivateWithError >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePingTask::ShouldValidate |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist >> TYdbControlPlaneStoragePingTask::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState >> TYdbControlPlaneStorageModifyConnection::ShouldMoveFromScopeToPrivateWithError [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1187355) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c64/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filters_non_optional_field/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c64/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filters_non_optional_field/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c64/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filters_non_optional_field/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c64/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filters_non_optional_field/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c64/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filters_non_optional_field/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003c64/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk24/testing_out_stuff/test_row_dispatcher.py.TestPqRowDispatcher.test_filters_non_optional_field/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivate >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAst |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v1] [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1133464) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1134953 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/compute/node_3/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/compute/node_3/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/compute/node_2/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/compute/node_2/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_precompute.v2-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/compute/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/compute/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/cp/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/cp/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:177: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f64/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_precompute.v2-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPublic |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckPermission >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckLowerCaseName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... ate session OK 2024-11-21T09:13:59.315602Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:13:59.315603Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:13:59.315670Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2024-11-21T09:13:59.315679Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:13:59.315680Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:13:59.315756Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:13:59.315765Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:13:59.315767Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:13:59.315819Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2024-11-21T09:13:59.315827Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:13:59.315828Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:13:59.315882Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2024-11-21T09:13:59.315890Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:13:59.315891Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:13:59.315945Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2024-11-21T09:13:59.315952Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:13:59.315953Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:13:59.352429Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:13:59.352446Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:13:59.400628Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:13:59.400645Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:13:59.416375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:13:59.416393Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:13:59.429009Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:13:59.429036Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:13:59.432348Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:13:59.432366Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:13:59.432620Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:13:59.432624Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:13:59.432744Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:13:59.432746Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:13:59.436308Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:13:59.436331Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:13:59.436627Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:13:59.436630Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:13:59.444708Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:13:59.444728Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:13:59.445051Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:13:59.445057Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:13:59.448653Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:13:59.448666Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:13:59.448952Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:13:59.448958Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:13:59.456330Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:13:59.456345Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:13:59.456561Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:13:59.456564Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings": >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility >> TYdbControlPlaneStoragePipeline::ShouldCheckAst [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear >> TVectorIndexTests::CreateTableWithError >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [GOOD] >> TVectorIndexTests::CreateTableWithError [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckExist >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite [GOOD] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:14:04.808901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:14:04.808927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:14:04.808931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:14:04.808936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:14:04.808942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:14:04.808946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:14:04.808955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:14:04.809038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:14:04.819937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:14:04.819959Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:14:04.824830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:14:04.825610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:14:04.825664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:14:04.827415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:14:04.827621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:14:04.827728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:04.827841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:14:04.828833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:04.829128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:14:04.829142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:04.829200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:14:04.829208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:14:04.829215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:14:04.829230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:14:04.830604Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:14:04.849476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:14:04.849567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:04.849631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:14:04.849678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:14:04.849687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:04.850554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:04.850583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:14:04.850650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:04.850662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:14:04.850666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:14:04.850671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:14:04.851108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:04.851121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:14:04.851126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:14:04.851499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:04.851509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:04.851514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:04.851522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:14:04.852143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:14:04.852551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:14:04.852610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:14:04.852795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:04.852821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:14:04.852830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:04.852883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:14:04.852890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:04.852920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:14:04.852932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:14:04.853361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:14:04.853370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:14:04.853418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:04.853424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:14:04.853539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:04.853548Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:14:04.853560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:14:04.853565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:14:04.853571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:14:04.853576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:14:04.853581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:14:04.853585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:14:04.853596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:14:04.853602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:14:04.853606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:14:04.853898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:14:04.853914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:14:04.853919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:14:04.853924Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:14:04.853930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:14:04.853944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:14:04.854539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:14:04.854656Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:14:04.856386Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:14:04.857269Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:14:04.857765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:14:04.857824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2024-11-21T09:14:04.857850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name: __ydb_parent, at schemeshard: 72057594046678944 2024-11-21T09:14:04.857855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name: __ydb_parent, at schemeshard: 72057594046678944 2024-11-21T09:14:04.859313Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:14:04.859850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name: __ydb_parent" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:14:04.859877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name: __ydb_parent, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2024-11-21T09:14:04.859940Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T09:14:04.860474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:14:04.860526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2024-11-21T09:14:04.860546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2024-11-21T09:14:04.860550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2024-11-21T09:14:04.860984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:14:04.861003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test |93.4%| [TA] $(B)/ydb/tests/fq/yds/test-results/py3test/{meta.json ... results_accumulator.log} |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckPermission >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame >> TYdbControlPlaneStorageModifyBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckAllowedSymbolsName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckExist |93.5%| [TA] $(B)/ydb/tests/functional/sqs/multinode/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMoveToScope ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... -11-21T09:14:03.282366Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:03.282637Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7vukpee4scbu] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:03.393636Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:03.396471Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7vrgcr35j0t3] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:03.510851Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:03.511044Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7vnvnvka56gj] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:03.627977Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:03.628193Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7vkhg1q336ue] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:03.757727Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:03.757941Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7vgv12rqh9m9] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:03.792327Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: BAD_SESSION, Issues: [ {
: Error: Exceeded maximum allowed number of active transactions, code: 2014 } {
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:780: Too many transactions, current active: 10 MaxTxPerSession: 10 } ], Query: --!syntax_v1 -- Query name: Unknown query name PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateConnection::TTestCaseShouldCheckCommitTransactionReadWrite::Execute_(NUnitTest::TTestContext&)"); DECLARE $idempotency_key as String; DECLARE $scope as String; SELECT `response` FROM `idempotency_keys` WHERE `scope` = $scope AND `idempotency_key` = $idempotency_key; 2024-11-21T09:14:03.899155Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:03.899317Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7vcvt5p36j2l] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.011320Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.012385Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7v8kut7jc1i2] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.130459Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.130694Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7v57mgd2gma8] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.263167Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.263366Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7v1k2ecnd8f4] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.388294Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.388521Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7uthvioutqcr] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.485698Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.488387Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7upoh6haps1m] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.579349Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.579527Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7ummvbs59meb] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.701440Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.701669Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7ujtlvlcbu6a] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.862449Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.862639Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7ug4m43t96f1] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2024-11-21T09:14:04.961119Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2024-11-21T09:14:04.961350Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcueoit7ub9fe5b7n3u] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |93.5%| [TA] {RESULT} $(B)/ydb/tests/fq/yds/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] |93.5%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldValidate |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TYdbControlPlaneStorageCreateBinding::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxCountBindings >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldValidate >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... AGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:05.402625Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:05.402795Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2024-11-21T09:14:05.402797Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:05.402797Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:05.402841Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2024-11-21T09:14:05.402845Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:05.402846Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:05.402856Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2024-11-21T09:14:05.402858Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:05.402859Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:05.402961Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2024-11-21T09:14:05.402963Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:05.402964Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:05.403053Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2024-11-21T09:14:05.403056Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:05.403058Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:05.403130Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:05.403131Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:05.403133Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:05.429452Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:05.429469Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:05.460481Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:05.460500Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:05.461108Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:05.461118Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:05.461301Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:05.461306Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:05.471531Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:05.471551Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:05.472756Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:05.472776Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:05.472895Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:05.472898Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:05.473320Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:05.473326Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:05.473472Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:05.473475Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:05.473535Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:05.473537Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:05.473587Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:05.473588Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:05.473629Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:05.473631Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:05.473665Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:05.473666Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:05.473707Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:05.473708Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:05.473753Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:05.473753Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckSuperUser::Execute_(NUnitTest::TTestContext&)/tenant_acks": >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit >> TYdbControlPlaneStorageGetQueryStatus::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckSuperUser ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMoveToScope [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivatePublic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... ePublic::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2024-11-21T09:14:04.050024Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:04.050025Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:04.050076Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:04.050077Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:04.050078Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:04.050125Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2024-11-21T09:14:04.050126Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:04.050128Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:04.050176Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2024-11-21T09:14:04.050177Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:04.050179Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:04.050234Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2024-11-21T09:14:04.050235Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:04.050237Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:04.104315Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:04.104334Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:04.171625Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:04.171642Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:04.188562Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:04.188582Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:04.216546Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:04.216561Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:04.216586Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:04.216594Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:04.216752Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:04.216756Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:04.216783Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:04.216786Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:04.216824Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:04.216825Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:04.216867Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:04.216869Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:04.216890Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:04.216892Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:04.216911Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:04.216912Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:04.216953Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:04.216955Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:04.216961Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:04.216963Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:04.217004Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:04.217007Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:04.217012Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:04.217013Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxCountBindings [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> TYdbControlPlaneStorageModifyBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckSuperUser >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... TORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:05.290462Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:05.290556Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2024-11-21T09:14:05.290558Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:05.290559Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:05.290645Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2024-11-21T09:14:05.290647Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:05.290648Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:05.291595Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2024-11-21T09:14:05.291612Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:05.291613Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:05.291821Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2024-11-21T09:14:05.291834Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:05.291836Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:05.324272Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:05.324295Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:05.348294Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:05.348320Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:05.388546Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:05.388566Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:05.389344Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:05.389353Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:05.389449Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:05.389459Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:05.389525Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:05.389528Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:05.389602Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:05.389604Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:05.389669Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:05.389672Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:05.389987Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:05.389994Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:05.390123Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:05.390125Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:05.390364Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:05.390369Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:05.390449Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:05.390451Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:05.390523Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:05.390525Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:05.390588Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:05.390589Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:05.397858Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:05.397874Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": >> TYdbControlPlaneStorageModifyConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... tCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:07.613667Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2024-11-21T09:14:07.613674Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:07.613675Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:07.618069Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2024-11-21T09:14:07.618083Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:07.618085Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:07.618838Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2024-11-21T09:14:07.618845Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:07.618847Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:07.619033Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2024-11-21T09:14:07.619036Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:07.619038Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:07.619138Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2024-11-21T09:14:07.619140Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:07.619142Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:07.619169Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2024-11-21T09:14:07.619173Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:07.619175Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:07.648597Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:07.648614Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:07.704766Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:07.704786Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:07.726871Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:07.726891Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:07.727933Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:07.727944Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:07.728179Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:07.728187Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:07.732697Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:07.732712Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:07.732965Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:07.732976Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:07.733058Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:07.733064Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:07.733578Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:07.733586Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:07.733724Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:07.733727Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:07.733796Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:07.733797Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:07.733897Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:07.733899Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:07.733954Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:07.733956Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:07.734009Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:07.734010Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:07.734066Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:07.734067Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas": >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-3.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-4.test] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... GE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2024-11-21T09:14:08.665400Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:08.665401Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:08.665530Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2024-11-21T09:14:08.665539Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:08.665540Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:08.665554Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2024-11-21T09:14:08.665570Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:08.665572Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:08.665637Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:08.665664Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:08.665670Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:08.665692Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2024-11-21T09:14:08.665703Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:08.665705Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:08.665780Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2024-11-21T09:14:08.665789Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:08.665790Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:08.701468Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:08.701490Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:08.761061Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:08.761087Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:08.765197Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:08.765214Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:08.765653Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:08.765657Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:08.765808Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:08.765811Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:08.766483Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:08.766488Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:08.800298Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:08.800315Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:08.801718Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:08.801728Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:08.801938Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:08.801941Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:08.803911Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:08.803923Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:08.804126Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:08.804129Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:08.804260Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:08.804262Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:08.804356Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:08.804358Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:08.804420Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:08.804421Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:08.804467Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:08.804468Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas": >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckWithoutIdempotencyKey >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPermission >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckExist >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey [GOOD] Test command err: 2024-11-21T09:13:44.572749Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: CreateQueryRequest 2024-11-21T09:13:44.574294Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: CreateQueryRequest: content { type: ANALYTICS name: "my_query_1" acl { visibility: SCOPE } text: "SELECT 1;" } 2024-11-21T09:13:44.574343Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: CreateQueryRequest, success: content { type: ANALYTICS name: "my_query_1" acl { visibility: SCOPE } text: "SELECT 1;" } query_id: b163e036-1b5e4444-25abeae6-50863e38 Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root ... table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:11.983911Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2024-11-21T09:14:11.983920Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:11.983921Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:11.984021Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2024-11-21T09:14:11.984029Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:11.984030Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:11.984102Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2024-11-21T09:14:11.984109Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:11.984110Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:11.984183Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2024-11-21T09:14:11.984190Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:11.984191Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:11.984288Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:11.984295Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:11.984296Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:11.985269Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2024-11-21T09:14:11.985281Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:11.985282Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:12.040599Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:12.040617Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:12.095502Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:12.095519Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:12.127687Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:12.127705Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:12.128056Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:12.128059Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:12.132251Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:12.132283Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:12.133523Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:12.133535Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:12.133828Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:12.133833Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:12.134616Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:12.134624Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:12.134776Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:12.134779Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:12.134882Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:12.134884Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:12.134973Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:12.134975Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:12.135057Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:12.135058Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:12.135131Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:12.135133Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:12.135197Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:12.135199Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:12.135303Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:12.135305Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases": ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageModifyQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckLimit >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionFailed >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:10.184813Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2024-11-21T09:14:10.184814Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:10.184816Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:10.184876Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2024-11-21T09:14:10.184879Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:10.184880Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:10.184964Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2024-11-21T09:14:10.184966Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:10.184967Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:10.185026Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2024-11-21T09:14:10.185028Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:10.185029Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:10.185101Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2024-11-21T09:14:10.185103Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:10.185104Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:10.185215Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2024-11-21T09:14:10.185217Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:10.185219Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:10.204260Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:10.204280Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:10.259610Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:10.259638Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:10.273170Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:10.273193Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:10.273558Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:10.273560Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:10.273687Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:10.273689Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:10.284825Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:10.284843Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:10.285349Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:10.285355Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:10.296632Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:10.296648Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:10.297049Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:10.297055Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:10.297222Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:10.297228Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:10.297622Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:10.297629Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:10.297746Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:10.297748Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:10.297826Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:10.297827Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:10.297898Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:10.297899Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:10.297973Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:10.297975Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckChangeAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes": >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-std] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckIdempotencyKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... viousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:14.516716Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:14.516940Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:14.516944Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:14.516946Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:14.517452Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2024-11-21T09:14:14.517460Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:14.517461Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:14.520071Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2024-11-21T09:14:14.520084Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:14.520086Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:14.520371Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2024-11-21T09:14:14.520374Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:14.520375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:14.521701Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2024-11-21T09:14:14.521714Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:14.521716Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:14.541645Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:14.541667Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:14.647934Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:14.647950Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:14.654095Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:14.654114Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:14.654122Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:14.654138Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:14.671005Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:14.671029Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:14.671600Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:14.671613Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:14.671784Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:14.671789Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:14.671903Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:14.671904Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:14.671955Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:14.671957Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:14.671963Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:14.671967Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:14.672003Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:14.672004Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:14.672057Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:14.672059Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:14.672077Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:14.672079Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:14.676999Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:14.677021Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:14.677180Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:14.677191Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionSuccess::Execute_(NUnitTest::TTestContext&)/mappings": >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... :14:15.698422Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:15.698424Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:15.698427Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2024-11-21T09:14:15.698432Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:15.698434Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:15.698584Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:15.698590Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:15.698591Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:15.698609Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2024-11-21T09:14:15.698611Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:15.698613Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:15.701766Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2024-11-21T09:14:15.701778Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:15.701779Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:15.709063Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:15.709080Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:15.776547Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:15.776568Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:15.784838Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:15.784857Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:15.792141Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:15.792160Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:15.799786Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:15.799803Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:15.817380Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:15.817400Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:15.818367Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:15.818379Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:15.819345Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:15.819352Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:15.820379Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:15.820388Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:15.820573Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:15.820575Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:15.820859Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:15.820864Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:15.820998Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:15.821001Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:15.821302Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:15.821307Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:15.821378Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:15.821379Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:15.821440Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:15.821445Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageModifyQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionFailed >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-8.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] >> TFileStoreWithReboots::AlterAssignDrop >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionSuccess >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckScopeVisibility >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScopeWithPrivateConnection >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckQueryName >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScopeWithPrivateConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreateScopeeBindingWithUnavailableConnection >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> TYdbControlPlaneStorageModifyQuery::ShouldCheckQueryName [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> TYdbControlPlaneStorageListQueries::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckPrivateVisibility >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-std] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreateScopeeBindingWithUnavailableConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateBindingWithUnavailableConnection >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test.py::test[insert_monotonic-from_empty--Plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-12.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... ROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:25.913620Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2024-11-21T09:14:25.913624Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:25.913626Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:25.913792Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2024-11-21T09:14:25.913794Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:25.913795Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:25.914147Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2024-11-21T09:14:25.914149Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:25.914150Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:25.914265Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2024-11-21T09:14:25.914267Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:25.914269Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:25.914390Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2024-11-21T09:14:25.914392Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:25.914393Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:25.953236Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:25.953254Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:26.039866Z node 16 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:26.039886Z node 16 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:26.084111Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:26.084129Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:26.084541Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:26.084548Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:26.085198Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:26.085203Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:26.085355Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:26.085357Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:26.085429Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:26.085431Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:26.085531Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:26.085533Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:26.085616Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:26.085618Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:26.086303Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:26.086311Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:26.086477Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:26.086479Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:26.086568Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:26.086571Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:26.086647Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:26.086650Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:26.086732Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:26.086734Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:26.086801Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:26.086803Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:27.840007Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: PingTaskRequest (resign): UNAVAILABLE 1 2024-11-21T09:14:27.839970Z 0.000000s 2024-11-21T09:14:28.229313Z node 16 :YQ_CONTROL_PLANE_STORAGE DEBUG: PingTaskRequest (resign): UNAVAILABLE 1 2024-11-21T09:14:28.229289Z 0.000000s 2024-11-21T09:14:28.473402Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: Validation: (NYql::TCodeLineException) :0: Error parsing proto message for query. Please contact internal support >> test.py::test[insert_monotonic-from_empty--Plan] [GOOD] >> test.py::test[insert_monotonic-from_empty--Results] >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckPrivateVisibility >> TYdbControlPlaneStorageListQueries::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckSuperUser >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test.py::test[insert_monotonic-from_empty--Results] [GOOD] >> test.py::test[join-convert_key--Analyze] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateBindingWithUnavailableConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateConnectionWithDesctructionBinding >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test.py::test[join-convert_key--Analyze] [GOOD] >> test.py::test[join-convert_key--Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... _PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2024-11-21T09:14:28.303060Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:28.303061Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:28.303128Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2024-11-21T09:14:28.303129Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:28.303131Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:28.303176Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2024-11-21T09:14:28.303179Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:28.303181Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:28.309078Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2024-11-21T09:14:28.309101Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:28.309105Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:28.312625Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2024-11-21T09:14:28.312651Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:28.312654Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:28.313153Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2024-11-21T09:14:28.313165Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:28.313167Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:28.359623Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:28.359641Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:28.416481Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:28.416500Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:28.425986Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:28.425986Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:28.426004Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:28.426009Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:28.428337Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:28.428352Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:28.428605Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:28.428608Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:28.440391Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:28.440425Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:28.441185Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:28.441196Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:28.441477Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:28.441481Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:28.444326Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:28.444343Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:28.444643Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:28.444647Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:28.444771Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:28.444774Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:28.445619Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:28.445634Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:28.446369Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:28.446380Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:28.446579Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:28.446582Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys": >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> TYdbControlPlaneStorageListQueries::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test.py::test[join-convert_key--Debug] [GOOD] >> test.py::test[join-convert_key--ForceBlocks] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-4.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateConnectionWithDesctructionBinding [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> TFileStoreWithReboots::AlterAssignDrop [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::AlterAssignDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:14:22.160223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:14:22.160256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:14:22.160260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:14:22.160266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:14:22.160276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:14:22.160280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:14:22.160292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:14:22.160384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:14:22.173819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:14:22.173849Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:14:22.176903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:14:22.177052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:14:22.177105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:14:22.180562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:14:22.180673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:14:22.180818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:22.181031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:14:22.183084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:22.184248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:14:22.184276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:22.184296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:14:22.184309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:14:22.184316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:14:22.184377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:14:22.188283Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:14:22.219486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:14:22.219590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:22.219667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:14:22.224699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:14:22.224755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:22.229828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:22.229884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:14:22.229982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:22.229999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:14:22.230004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:14:22.230010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:14:22.231117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:22.231139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:14:22.231146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:14:22.232426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:22.232444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:22.232450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:22.232459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:14:22.233219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:14:22.234095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:14:22.234162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:14:22.234394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:22.234431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:14:22.234454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:22.234521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:14:22.234529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:22.234569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:14:22.234583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:14:22.235045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:14:22.235059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:14:22.235111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:22.235118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:14:22.235208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:22.235216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:14:22.235230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:14:22.235234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:14:22.235240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:14:22.235245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:14:22.235250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:14:22.235254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:14:22.235267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:14:22.235274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:14:22.235278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... rdLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:14:36.511189Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:36.511248Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409546 2024-11-21T09:14:36.511599Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:14:36.511929Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2024-11-21T09:14:36.511961Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000005 2024-11-21T09:14:36.514641Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:36.514687Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 118 RawX2: 180388628576 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:14:36.514709Z node 42 :FLAT_TX_SCHEMESHARD INFO: TDropFileStore::TPropose, operationId: 1005:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T09:14:36.514758Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:14:36.514798Z node 42 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T09:14:36.514805Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:14:36.514820Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:14:36.514831Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:14:36.514839Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T09:14:36.514850Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:14:36.514855Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:14:36.514859Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:14:36.514884Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:14:36.514892Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T09:14:36.514896Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T09:14:36.514899Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T09:14:36.515273Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:14:36.515289Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:14:36.515663Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:14:36.515712Z node 42 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:14:36.515719Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:14:36.515771Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:14:36.515798Z node 42 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:36.515804Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [42:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T09:14:36.515810Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [42:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 3 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T09:14:36.515944Z node 42 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:14:36.515956Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:14:36.515966Z node 42 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:14:36.515971Z node 42 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:14:36.515976Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:14:36.516049Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:14:36.516055Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:14:36.516066Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:14:36.516125Z node 42 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T09:14:36.516165Z node 42 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:14:36.516174Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:14:36.516177Z node 42 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:14:36.516181Z node 42 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T09:14:36.516185Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:14:36.516195Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T09:14:36.516265Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:36.516868Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:14:36.516979Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:14:36.516990Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:14:36.517204Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T09:14:36.517292Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T09:14:36.517299Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T09:14:36.517377Z node 42 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T09:14:36.517397Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:14:36.517420Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [42:448:2429] TestWaitNotification: OK eventTxId 1005 2024-11-21T09:14:36.517491Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/FS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:14:36.517529Z node 42 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/FS" took 53us result status StatusPathDoesNotExist 2024-11-21T09:14:36.517568Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/FS\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/FS" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted 2024-11-21T09:14:36.517636Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:14:36.517646Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T09:14:36.517655Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T09:14:36.517662Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T09:14:36.517671Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes [GOOD] >> test.py::test[join-convert_key--ForceBlocks] [GOOD] >> test.py::test[join-convert_key--Plan] [GOOD] >> test.py::test[join-convert_key--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> TYdbControlPlaneStorageListBindings::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckSuperUser >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test.py::test[join-convert_key--Results] [GOOD] >> test.py::test[join-convert_key-off-Analyze] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... 24-11-21T09:14:36.459696Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:36.459760Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2024-11-21T09:14:36.459765Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:36.459767Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:36.459795Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2024-11-21T09:14:36.459797Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:36.459798Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:36.459858Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2024-11-21T09:14:36.459860Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:36.459860Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:36.460069Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2024-11-21T09:14:36.460073Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:36.460075Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:36.472289Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:36.472317Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:36.548399Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:36.548425Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:36.564499Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:36.564518Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:36.564907Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:36.564913Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:36.565072Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:36.565075Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:36.569326Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:36.569356Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:36.578459Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:36.578489Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:36.579716Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:36.579733Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:36.579971Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:36.579975Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:36.580091Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:36.580093Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:36.580194Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:36.580196Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:36.580278Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:36.580289Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:36.580322Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:36.580325Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:36.580427Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:36.580430Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:36.585746Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:36.585764Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:37.448950Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: [yandexcloud://test_folder_id_1, test_user@staff, utbueoit6v8ris9r7s8r] CreateBindingRequest, validation failed: **** (D7BA8005) content { name: "test_binding_name_1" connection_id: "utcueoit701s5i6hh24m" setting { object_storage { subset { path_pattern: "/root/" schema { column { name: "a" type { type_id: BOOL } } } partitioned_by: "a" } } } acl { visibility: PRIVATE } } error:
: Error: Column "a" from projection does not support Bool type, code: 400010 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... TROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:35.927121Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:35.927643Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2024-11-21T09:14:35.927648Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:35.927651Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:35.929384Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2024-11-21T09:14:35.929388Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:35.929392Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:35.930436Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2024-11-21T09:14:35.930440Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:35.930443Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:35.930676Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2024-11-21T09:14:35.930678Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:35.930680Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:35.930847Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2024-11-21T09:14:35.930848Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:35.930849Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:35.930962Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:35.930964Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:35.930965Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:35.959996Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:35.960020Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:36.044589Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:36.044612Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:36.056912Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:36.056934Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:36.057673Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:36.057683Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:36.057998Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:36.058003Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:36.059733Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:36.059751Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:36.060147Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:36.060152Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:36.060374Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:36.060376Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:36.060499Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:36.060501Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:36.067328Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:36.067346Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:36.068570Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:36.068580Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:36.068802Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:36.068806Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:36.068883Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:36.068884Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:36.069222Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:36.069229Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:36.069382Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:36.069385Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks": >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test.py::test[join-convert_key-off-Analyze] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test.py::test[join-convert_key-off-Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:1050: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[join-convert_key-off-Debug] [GOOD] >> test.py::test[join-convert_key-off-ForceBlocks] [SKIPPED] >> test.py::test[join-convert_key-off-Plan] [GOOD] >> test.py::test[join-convert_key-off-Results] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Analyze] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByName >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-short_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> TYdbControlPlaneStorageListBindings::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckFilterByConnectionId >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Analyze] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Debug] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByName [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByMe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Debug] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-ForceBlocks] [SKIPPED] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Plan] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Analyze] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> TYdbControlPlaneStorageListBindings::ShouldCheckFilterByConnectionId [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByMe [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCombineFilters >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test.py::test[join-join_without_correlation_and_dict_access--Analyze] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> TYdbControlPlaneStorageListConnections::ShouldCombineFilters [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Debug] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Plan] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:47.109943Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:47.109997Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2024-11-21T09:14:47.109998Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:47.109999Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:47.110060Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2024-11-21T09:14:47.110061Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:47.110063Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:47.110115Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2024-11-21T09:14:47.110116Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:47.110118Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:47.110182Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2024-11-21T09:14:47.110184Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:47.110186Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:47.110238Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:47.110240Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:47.110241Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:47.110289Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2024-11-21T09:14:47.110291Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:47.110292Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:47.130605Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:47.130626Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:47.174901Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:47.174921Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:47.181475Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:47.181496Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:47.196105Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:47.196122Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:47.196127Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:47.196133Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:47.200810Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:47.200830Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:47.201130Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:47.201134Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:47.201242Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:47.201243Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:47.201766Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:47.201774Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:47.201908Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:47.201911Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2024-11-21T09:14:47.202012Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:47.202014Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:47.202118Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:47.202120Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:47.202185Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:47.202186Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:47.202242Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:47.202244Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:47.202310Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:47.202311Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas": >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 10668 ? Ss 07:59 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S 07:59 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 07:59 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 07:59 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 07:59 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/0:0H-events_highpri] root 9 1.5 0.0 0 0 ? I 07:59 1:08 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 07:59 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 07:59 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/0] root 15 0.2 0.0 0 0 ? I 07:59 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 07:59 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 07:59 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 07:59 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 07:59 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/3:0H-kblockd] root 38 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 07:59 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 07:59 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/5:0H-kblockd] root 50 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 07:59 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 07:59 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 07:59 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 07:59 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 07:59 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/10:0H-kblockd] root 80 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 07:59 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 07:59 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 07:59 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/13:0H-kblockd] root 98 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 07:59 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 07:59 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 07:59 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 07:59 0:00 [kworker/16:0-events] root 115 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 07:59 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 07:59 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 07:59 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 07:59 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 07:59 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 07:59 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 07:59 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 07:59 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 07:59 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 07:59 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 07:59 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 07:59 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 07:59 0:03 [migration/25] root 167 0.0 0.0 ... StorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:49.143880Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:49.144058Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2024-11-21T09:14:49.144061Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:49.144063Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:49.144286Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2024-11-21T09:14:49.144288Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:49.144290Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:49.144405Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2024-11-21T09:14:49.144407Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:49.144408Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:49.144521Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2024-11-21T09:14:49.144524Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:49.144525Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:49.144636Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2024-11-21T09:14:49.144639Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:49.144640Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:49.149354Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)" 2024-11-21T09:14:49.149374Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)": 2024-11-21T09:14:49.203776Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2024-11-21T09:14:49.203792Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2024-11-21T09:14:49.220896Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2024-11-21T09:14:49.220919Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2024-11-21T09:14:49.225354Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets" 2024-11-21T09:14:49.225372Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets": 2024-11-21T09:14:49.225472Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes" 2024-11-21T09:14:49.225482Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes": 2024-11-21T09:14:49.225830Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/jobs" 2024-11-21T09:14:49.225842Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/jobs": 2024-11-21T09:14:49.225891Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections" 2024-11-21T09:14:49.225896Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections": 2024-11-21T09:14:49.225922Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2024-11-21T09:14:49.225924Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings": 2024-11-21T09:14:49.225983Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries" 2024-11-21T09:14:49.225987Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries": 2024-11-21T09:14:49.225994Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings" 2024-11-21T09:14:49.225995Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings": 2024-11-21T09:14:49.228187Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases" 2024-11-21T09:14:49.228201Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases": 2024-11-21T09:14:49.228484Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas" 2024-11-21T09:14:49.228488Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas": 2024-11-21T09:14:49.228616Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants" 2024-11-21T09:14:49.228618Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants": 2024-11-21T09:14:49.228710Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small" 2024-11-21T09:14:49.228713Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small": 2024-11-21T09:14:49.229408Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2024-11-21T09:14:49.229417Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks": >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Analyze] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test.py::test[join-join_without_correlation_and_dict_access-off-Analyze] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] |93.6%| [TA] $(B)/ydb/tests/fq/control_plane_storage/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Debug] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-ForceBlocks] [SKIPPED] >> test.py::test[join-join_without_correlation_and_dict_access-off-Plan] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Results] [GOOD] >> test.py::test[join-late_mergejoin_on_empty--Analyze] [SKIPPED] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test.py::test[join-late_mergejoin_on_empty--Debug] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty--ForceBlocks] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty--Plan] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty--Results] [SKIPPED] >> test.py::test[join-left_join_null_column-off-Analyze] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] |93.6%| [TA] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test.py::test[join-left_join_null_column-off-Analyze] [GOOD] >> test.py::test[join-left_join_null_column-off-Debug] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test.py::test[join-left_join_null_column-off-Debug] [GOOD] >> test.py::test[join-left_join_null_column-off-ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test.py::test[join-left_join_null_column-off-ForceBlocks] [SKIPPED] >> test.py::test[join-left_join_null_column-off-Plan] [GOOD] >> test.py::test[join-left_join_null_column-off-Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-off-Analyze] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test.py::test[join-lookupjoin_bug7646_subst-off-Analyze] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-off-Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] [GOOD] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> test.py::test[join-lookupjoin_bug7646_subst-off-Debug] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-off-ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst-off-Plan] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-off-Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Analyze] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test.py::test[join-lookupjoin_inner_1o2o--Analyze] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test.py::test[join-lookupjoin_inner_1o2o--Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] [GOOD] 2024-11-21 09:14:58,923 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2024-11-21 09:14:59,173 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1141069 247M 244M 113M ydb-tests-fq-s3 --basetemp /home/runner/.ya/build/build_root/jptk/003f56/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules --ya-t 1143080 99.5M 97.1M 58.8M ├─ moto_server s3 --host ::1 --port 7485 1143610 106M 11.8M 60.2M ├─ ydb-tests-fq-s3 --basetemp /home/runner/.ya/build/build_root/jptk/003f56/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 1143614 522M 513M 300M └─ ydbd server --suppress-version-check --node=1 --tenant=/default --node-kind=yq --log-file-name=/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/test-result Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1141069) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/tools/python3/Lib/subprocess.py:1127: ResourceWarning: subprocess 1143080 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "teardown", log, nextitem=nextitem)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 182, in pytest_runtest_teardown item.session._setupstate.teardown_exact(nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 526, in teardown_exact fin() File "contrib/python/pytest/py3/_pytest/fixtures.py", line 701, in subrequest.node.addfinalizer(lambda: fixturedef.finish(request=subrequest)) File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1024, in finish func() File "contrib/python/pytest/py3/_pytest/fixtures.py", line 911, in _teardown_yield_fixture next(it) File "ydb/tests/fq/s3/conftest.py", line 115, in kikimr_yqv1 with start_kikimr(kikimr_params, kikimr_extensions) as kikimr: File "contrib/tools/python3/Lib/contextlib.py", line 144, in __exit__ next(self.gen) File "ydb/tests/tools/fq_runner/kikimr_utils.py", line 406, in start_kikimr kikimr.stop() File "ydb/tests/tools/fq_runner/kikimr_runner.py", line 727, in stop tenant.stop() File "ydb/tests/tools/fq_runner/kikimr_runner.py", line 55, in stop self.kikimr_cluster.stop(kill=False) File "ydb/tests/library/harness/kikimr_runner.py", line 442, in stop exception = self.__stop_node(node, kill) File "ydb/tests/library/harness/kikimr_runner.py", line 423, in __stop_node node.stop() File "ydb/tests/library/harness/kikimr_runner.py", line 197, in stop super(KiKiMRNode, self).stop() File "ydb/tests/library/harness/daemon.py", line 171, in stop wait_for(lambda: not self.is_alive(), self.__timeout) File "ydb/tests/library/common/wait_for.py", line 19, in wait_for time.sleep(step_seconds) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...out has expired: Command '['/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/ydb-tests-fq-s3', '--basetemp', '/home/runner/.ya/build/build_root/jptk/003f56/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/jptk/003f56', '--source-root', '/home/runner/.ya/build/build_root/jptk/003f56/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/s3', '--test-tool-bin', '/home/runner/.ya/tools/v4/7480276291/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/fq/s3', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--test-file-filter', 'test_format_setting.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...out has expired: Command '['/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/ydb-tests-fq-s3', '--basetemp', '/home/runner/.ya/build/build_root/jptk/003f56/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/jptk/003f56', '--source-root', '/home/runner/.ya/build/build_root/jptk/003f56/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/jptk/003f56/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/s3', '--test-tool-bin', '/home/runner/.ya/tools/v4/7480276291/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/fq/s3', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--test-file-filter', 'test_format_setting.py']' stopped by 600 seconds timeout",), {}) >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test.py::test[join-lookupjoin_inner_1o2o--Debug] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test.py::test[join-lookupjoin_inner_1o2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Plan] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-off-Analyze] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-off-Analyze] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test.py::test[join-mapjoin_early_rewrite-off-Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test.py::test[join-mapjoin_early_rewrite-off-Debug] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite-off-Plan] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-off-Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-off-Analyze] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test.py::test[join-mergejoin_with_different_key_names-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-off-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> test.py::test[join-mergejoin_with_different_key_names-off-Debug] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names-off-Plan] >> test.py::test[join-mergejoin_with_different_key_names-off-Plan] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-off-Results] [GOOD] >> test.py::test[join-mergejoin_with_table_range-off-Analyze] |93.6%| [TA] $(B)/ydb/tests/fq/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test.py::test[join-mergejoin_with_table_range-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_with_table_range-off-Debug] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test.py::test[join-mergejoin_with_table_range-off-Debug] [GOOD] >> test.py::test[join-mergejoin_with_table_range-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_with_table_range-off-Plan] [GOOD] >> test.py::test[join-mergejoin_with_table_range-off-Results] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on--Analyze] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> TAsyncIndexTests::Decimal |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> TAsyncIndexTests::Decimal [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on--Analyze] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on--Debug] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:15:21.886149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:21.886182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:21.886187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:21.886193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:21.886199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:21.886203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:21.886213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:21.886313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:21.898728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:21.898755Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:15:21.902087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:21.902996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:21.903057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:21.904785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:21.904991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:21.905117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:21.905230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:21.906266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:21.906577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:21.906591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:21.906634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:21.906643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:21.906649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:21.906669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:15:21.908176Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:15:21.936103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:21.936202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:21.936399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:21.936461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:21.936471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:21.937618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:21.937653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:21.937724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:21.937737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:21.937742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:21.937748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:21.938351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:21.938365Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:21.938371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:21.938898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:21.938911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:21.938917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:21.938925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:21.939645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:21.940156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:21.940240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:21.940450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:21.940484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:21.940494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:21.940554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:21.940563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:21.940610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:21.940624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:21.941378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:21.941392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:21.941443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:21.941449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:15:21.941551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:21.941559Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:21.941573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:21.941578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:21.941585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:21.941591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:21.941596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:21.941601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:21.941615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:21.941621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:21.941625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:15:21.942028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:15:21.942048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:15:21.942054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:15:21.942060Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:15:21.942065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:21.942096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 077457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:15:22.077472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:22.077478Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:15:22.077483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:15:22.077490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T09:15:22.077843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:15:22.077860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T09:15:22.077881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:15:22.077889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:15:22.077899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:15:22.077907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:22.077911Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:15:22.077915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:15:22.077920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T09:15:22.084876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:15:22.084923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:15:22.084938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:15:22.091449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:15:22.091581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:15:22.091653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:15:22.091684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:15:22.091809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:15:22.091822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T09:15:22.091841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T09:15:22.091847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T09:15:22.091854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T09:15:22.091949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:15:22.092139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:15:22.092152Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:15:22.092162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T09:15:22.092165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T09:15:22.092171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T09:15:22.092198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T09:15:22.092227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T09:15:22.092235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:15:22.092241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:15:22.092324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:15:22.092331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T09:15:22.092334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T09:15:22.092340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:15:22.092344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T09:15:22.092347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T09:15:22.092355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:15:22.101054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:15:22.101114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T09:15:22.101318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:15:22.101432Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 138us result status StatusSuccess 2024-11-21T09:15:22.101755Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test.py::test[join-nopushdown_filter_with_depends_on--Debug] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on--ForceBlocks] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] |93.6%| [TA] {RESULT} $(B)/ydb/tests/fq/s3/test-results/py3test/{meta.json ... results_accumulator.log} |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test.py::test[join-nopushdown_filter_with_depends_on--ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on--Plan] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test.py::test[join-nopushdown_filter_with_depends_on--Results] [GOOD] >> test.py::test[join-premap_map_semi--Analyze] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:299: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[join-premap_map_semi--Analyze] [GOOD] >> test.py::test[join-premap_map_semi--Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test.py::test[join-premap_map_semi--Debug] [GOOD] >> test.py::test[join-premap_map_semi--ForceBlocks] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> test_public_api.py::TestExplain::test_explain_data_query >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> test.py::test[join-premap_map_semi--ForceBlocks] [GOOD] >> test.py::test[join-premap_map_semi--Plan] >> test.py::test[join-premap_map_semi--Plan] [GOOD] >> test.py::test[join-premap_map_semi--Results] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test.py::test[join-premap_map_semi--Results] [GOOD] >> test.py::test[join-pullup_context_dep--Analyze] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test.py::test[join-pullup_context_dep--Analyze] [GOOD] >> test.py::test[join-pullup_context_dep--Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test.py::test[join-pullup_context_dep--Debug] [GOOD] >> test.py::test[join-pullup_context_dep--ForceBlocks] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test.py::test[join-pullup_context_dep--ForceBlocks] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] [GOOD] >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> test.py::test[join-pullup_context_dep--Plan] [GOOD] >> test.py::test[join-pullup_context_dep--Results] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test.py::test[join-pullup_context_dep--Results] [GOOD] >> test.py::test[join-simple_columns_partial-off-Analyze] >> TS3WrapperTests::MultipartUpload >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> TS3WrapperTests::MultipartUpload [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2024-11-21T09:15:39.542221Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 015493CD-01EE-4E5C-AFF5-AD99BDFC23D0, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:27644 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 84EC0560-DDBC-4EFE-A5EA-0C0FA9110C6C amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2024-11-21T09:15:39.544712Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 015493CD-01EE-4E5C-AFF5-AD99BDFC23D0, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2024-11-21T09:15:39.544866Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B33C4D91-850B-4994-937A-7C0CB88335C0, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:27644 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3A4FEE7B-27B6-4244-B72C-34B7D57022A4 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2024-11-21T09:15:39.545674Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B33C4D91-850B-4994-937A-7C0CB88335C0, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T09:15:39.545769Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 41FE347A-42B5-4E75-B98D-204FC6570E03, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:27644 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 999C9E8D-8B86-4FE7-9C65-D098FB59CD00 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2024-11-21T09:15:39.546591Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 41FE347A-42B5-4E75-B98D-204FC6570E03, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T09:15:39.546684Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 228D5FE4-A062-49F8-881F-245DFD244168, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:27644 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AD94486F-0A11-466A-BB34-484AE74A1535 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T09:15:39.548263Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 228D5FE4-A062-49F8-881F-245DFD244168, response# GetObjectResult { } >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test.py::test[join-simple_columns_partial-off-Analyze] [GOOD] >> test.py::test[join-simple_columns_partial-off-Debug] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> TS3WrapperTests::HeadUnknownObject >> TS3WrapperTests::HeadUnknownObject [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test.py::test[join-simple_columns_partial-off-Debug] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[join-simple_columns_partial-off-ForceBlocks] [SKIPPED] >> test.py::test[join-simple_columns_partial-off-Plan] [GOOD] >> test.py::test[join-simple_columns_partial-off-Results] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2024-11-21T09:15:41.672853Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 509061D2-B293-44A0-9AF0-33E1BE622B07, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:18377 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4F9B5EF1-4847-4F1D-B0AC-FFDE813F2C7E amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T09:15:41.674548Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 509061D2-B293-44A0-9AF0-33E1BE622B07, response# No response body. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] [GOOD] |93.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[join-simple_columns_partial-off-Results] [GOOD] |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart >> TS3WrapperTests::GetObject |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TS3WrapperTests::GetObject [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2024-11-21T09:15:44.005370Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 39DA1858-0487-4A14-B153-F748EEE3DCFD, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:10599 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B0A053D4-7252-4B70-BF60-8799CDCD7FE4 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2024-11-21T09:15:44.007061Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 39DA1858-0487-4A14-B153-F748EEE3DCFD, response# ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2024-11-21T09:15:44.024992Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D48E3336-648F-40DF-878B-8A08BDAF5110, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:22188 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D6FB2EFC-9F93-4134-A59B-D5BAC25E055F amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T09:15:44.026898Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D48E3336-648F-40DF-878B-8A08BDAF5110, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T09:15:44.027061Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 011E7C25-D8F5-4D37-B765-88DEE495A920, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:22188 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 21505003-690C-49F7-9E79-D53750C614B2 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T09:15:44.027878Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 011E7C25-D8F5-4D37-B765-88DEE495A920, response# GetObjectResult { } >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> TS3WrapperTests::CompleteUnknownUpload >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> TS3WrapperTests::AbortUnknownUpload >> TS3WrapperTests::AbortMultipartUpload >> TS3WrapperTests::AbortUnknownUpload [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2024-11-21T09:15:45.553164Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 03A34332-EFFB-40B7-84CA-53B1F6B62653, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:29752 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6A7D0B55-7A1E-4E28-90FF-D52D09FADDF2 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2024-11-21T09:15:45.557241Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 03A34332-EFFB-40B7-84CA-53B1F6B62653, response# |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2024-11-21T09:15:46.128630Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 8608D7AE-705F-4A83-B879-A966ADCA4702, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:11124 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0CA06D0A-B928-49C3-88DC-3C5068C2DD69 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2024-11-21T09:15:46.130234Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 8608D7AE-705F-4A83-B879-A966ADCA4702, response# |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2024-11-21T09:15:46.217165Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 32EA7A24-14AD-43F5-9CCB-A2C2B918F91A, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:1964 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BF9F3729-6E8F-4AD4-BF75-38E669F0791F amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2024-11-21T09:15:46.218888Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 32EA7A24-14AD-43F5-9CCB-A2C2B918F91A, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2024-11-21T09:15:46.219048Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 185BA774-EC05-4E6F-B2CF-2378160F151D, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:1964 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E9C18840-56EF-44A2-8D67-C486DECE43A8 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2024-11-21T09:15:46.219904Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 185BA774-EC05-4E6F-B2CF-2378160F151D, response# AbortMultipartUploadResult { } 2024-11-21T09:15:46.220138Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 99EAFED5-54EB-4333-BB9C-52DEBB01508B, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:1964 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0B0A1E1A-00DA-4E87-9EE7-C8ECF9B040F8 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T09:15:46.221071Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 99EAFED5-54EB-4333-BB9C-52DEBB01508B, response# No response body. |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> TS3WrapperTests::HeadObject >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2024-11-21T09:15:47.868805Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 6FB8C5B5-D9AF-47AC-8EC2-05F5755F4E7D, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:26722 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 927B8C43-89C7-44F7-A33B-0314769E2AAE amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T09:15:47.870488Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 6FB8C5B5-D9AF-47AC-8EC2-05F5755F4E7D, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T09:15:47.870680Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 43A9353E-B888-4ED4-8A35-EB5334E8D739, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:26722 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 076708FD-D570-48A2-A1E2-EC7EBBE5796B amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T09:15:47.871779Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 43A9353E-B888-4ED4-8A35-EB5334E8D739, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } >> TS3WrapperTests::CopyPartUpload >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots >> IntermediateDirsReboots::CreateDirWithIntermediateDirsForceDrop >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropWithData >> TS3WrapperTests::CopyPartUpload [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2024-11-21T09:15:49.112832Z node 1 :S3_WRAPPER NOTICE: Request: uuid# BA13BFE1-424E-4740-BA7D-CA5C72C64B85, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:65129 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 44EF1723-2660-4E19-B770-16D8CB4D6118 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T09:15:49.114276Z node 1 :S3_WRAPPER NOTICE: Response: uuid# BA13BFE1-424E-4740-BA7D-CA5C72C64B85, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T09:15:49.114426Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B501D3F8-D802-4F8A-9650-D62004E0AA53, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:65129 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C5489D37-EC35-474F-AABB-D6248952098D amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2024-11-21T09:15:49.115217Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B501D3F8-D802-4F8A-9650-D62004E0AA53, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2024-11-21T09:15:49.115395Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 9D4B98AB-E6BB-4F20-9CCE-66DEFA024A05, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:65129 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2887105B-F825-4F79-85A0-D07B50AB5AB6 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2024-11-21T09:15:49.116121Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 9D4B98AB-E6BB-4F20-9CCE-66DEFA024A05, response# UploadPartCopyResult { } 2024-11-21T09:15:49.116234Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 1D441AEE-F85E-4E6C-AF4C-E141200D2020, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:65129 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0E3A7364-10AB-4750-A8DC-4F50544522F6 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2024-11-21T09:15:49.117363Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 1D441AEE-F85E-4E6C-AF4C-E141200D2020, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2024-11-21T09:15:49.117488Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 2FE12AE0-E014-405C-87CA-B2688C7D0E70, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:65129 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6665D718-B41D-44A3-B1E0-2F7CF0E77381 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2024-11-21T09:15:49.118054Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 2FE12AE0-E014-405C-87CA-B2688C7D0E70, response# GetObjectResult { } >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirs |93.7%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> IntermediateDirsReboots::Fake [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] [GOOD] >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirsForceDrop >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> TSolomonReboots::CreateDropSolomonWithReboots |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::Fake [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-fifo] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> TConsistentOpsWithReboots::DropNotNullColumnTableWithReboots |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> BasicUsage::WaitEventBlocksBeforeDiscovery >> IntermediateDirsReboots::CreateDirWithIntermediateDirsForceDrop [GOOD] >> BasicUsage::FallbackToSingleDb >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateDirWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:49.186317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:49.186356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.186362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:49.186367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:49.186379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:49.186396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:49.186406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.186520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:49.200958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:49.200982Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.203786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:49.203912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:49.203953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:49.207335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:49.207449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:49.207579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.207879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.209747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.212235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.212271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.212290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:49.212302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.212309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:49.212372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:49.214541Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.235287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:49.236054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.236162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:49.236270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:49.236282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:49.237805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:49.237822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:49.237826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:49.238533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:49.239210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.239224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.239230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.239238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.240039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:49.240713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:49.241548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:49.241847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.241886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:49.241895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.241992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:49.242004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.242040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:49.242056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.242739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.242754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.242810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.242817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:49.242916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.242924Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:49.242940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:49.242945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.242950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:49.242957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.242962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:49.242967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:49.242983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:49.242989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:49.242994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ogress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:15:59.625160Z node 41 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T09:15:59.625178Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:15:59.625183Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:15:59.625189Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T09:15:59.625194Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:15:59.625199Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:15:59.625204Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:15:59.625224Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:15:59.625230Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T09:15:59.625234Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T09:15:59.625238Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T09:15:59.625242Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:15:59.625246Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:15:59.625651Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:15:59.625764Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:15:59.625771Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [41:302:2294] 2024-11-21T09:15:59.625921Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.625943Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.625949Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:59.625954Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T09:15:59.625958Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T09:15:59.625965Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:15:59.625969Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T09:15:59.626124Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.626141Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.626147Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:15:59.626152Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T09:15:59.626158Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:15:59.626340Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.626354Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.626358Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:15:59.626363Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:15:59.626368Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:15:59.626663Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:15:59.626678Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:15:59.626696Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:15:59.626892Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.626907Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.626913Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:15:59.626918Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:15:59.626923Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:15:59.627325Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.627345Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.627351Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:15:59.627356Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:15:59.627361Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:15:59.627377Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T09:15:59.627383Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [41:301:2293] 2024-11-21T09:15:59.627605Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.628172Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.628242Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T09:15:59.628293Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:15:59.628300Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:15:59.628315Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:15:59.628321Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:15:59.628327Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:59.628374Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.628661Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:15:59.628684Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:15:59.628690Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:302:2294] 2024-11-21T09:15:59.629173Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:15:59.629302Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:15:59.629348Z node 41 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 59us result status StatusPathDoesNotExist 2024-11-21T09:15:59.629391Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:49.186296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:49.186344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.186351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:49.186357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:49.186375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:49.186379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:49.186389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.186522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:49.199150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:49.199176Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.201257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:49.201404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:49.201455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:49.205749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:49.205874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:49.207086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.208190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.209718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.212172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.212195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.212269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:49.212281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.212290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:49.212367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:49.214434Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.235292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:49.236054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.236162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:49.236292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:49.236304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:49.237503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:49.237519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:49.237524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:49.238208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:49.238745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.238787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.243356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:49.244086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:49.244170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:49.244464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.244502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:49.244512Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.244611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:49.244622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.244655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:49.244671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.245339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.245353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.245404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.245409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:49.245499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.245508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:49.245522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:49.245526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.245533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:49.245539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.245545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:49.245549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:49.245565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:49.245571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:49.245576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.475021Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:01.475026Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T09:16:01.475030Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T09:16:01.475033Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:16:01.475037Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T09:16:01.475104Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:16:01.475112Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T09:16:01.475125Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:16:01.475130Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:16:01.475136Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T09:16:01.475140Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:16:01.475144Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:16:01.475148Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:16:01.475176Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:01.475181Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T09:16:01.475185Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T09:16:01.475188Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T09:16:01.475191Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:16:01.475194Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:16:01.475359Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.475373Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.475377Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:01.475382Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T09:16:01.475386Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:01.475559Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.475571Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.475576Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:01.475580Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:16:01.475584Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:16:01.475870Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.475885Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.475890Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:01.475894Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:16:01.475898Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:01.476082Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:01.476117Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.476126Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.476130Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:01.476134Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:16:01.476142Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:16:01.476153Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T09:16:01.476157Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [49:301:2293] Leader for TabletID 72057594037968897 is [49:213:2213] sender: [49:339:2058] recipient: [49:15:2062] 2024-11-21T09:16:01.476633Z node 49 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T09:16:01.476719Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.476796Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:01.476870Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:16:01.477142Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:01.477152Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:16:01.477165Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:16:01.477171Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:16:01.477176Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:01.477180Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:16:01.477186Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:01.477374Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.477435Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.477608Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:01.477625Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:16:01.477630Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [49:302:2294] 2024-11-21T09:16:01.477987Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:01.478033Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:16:01.478112Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:01.478144Z node 49 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 42us result status StatusPathDoesNotExist 2024-11-21T09:16:01.478176Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> KqpSqlIn::KeyTypeMissmatch_Str >> KqpRanges::IsNull >> KqpKv::BulkUpsert >> KqpNewEngine::Select1 >> KqpRanges::WhereInSubquery >> KqpNotNullColumns::InsertNotNullPk |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> KqpNewEngine::LocksMultiShard >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> KqpNewEngine::Select1 [GOOD] >> KqpNewEngine::Replace >> KqpSqlIn::KeySuffix >> KqpKv::BulkUpsert [GOOD] >> KqpKv::ReadRows_NonExistentKeys >> KqpSqlIn::KeyTypeMissmatch_Str [GOOD] >> KqpSqlIn::SecondaryIndex_PgKey+EnableKqpDataQueryStreamLookup >> KqpNotNullColumns::InsertNotNullPk [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> KqpRanges::WhereInSubquery [GOOD] >> KqpRanges::ValidatePredicatesDataQuery |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> KqpNewEngine::LocksMultiShard [GOOD] >> KqpNewEngine::LocksMultiShardOk >> KqpNotNullColumns::ReplaceNotNullPk >> KqpRanges::IsNull [GOOD] >> KqpRanges::IsNullInValue >> KqpKv::ReadRows_NonExistentKeys [GOOD] >> KqpKv::ReadRows_NotFullPK >> KqpNewEngine::Replace [GOOD] >> KqpNewEngine::SelfJoin |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg [GOOD] >> KqpNotNullColumns::InsertNotNull >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> KqpNewEngine::LocksMultiShardOk [GOOD] >> KqpNewEngine::LocksEffects >> KqpNotNullColumns::ReplaceNotNullPk [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg >> KqpKv::ReadRows_NotFullPK [GOOD] >> KqpKv::ReadRows_PgValue >> KqpSqlIn::KeySuffix [GOOD] >> KqpSqlIn::KeySuffix_OnlyTail >> KqpSqlIn::SecondaryIndex_PgKey+EnableKqpDataQueryStreamLookup [GOOD] >> KqpSqlIn::SecondaryIndex_PgKey-EnableKqpDataQueryStreamLookup >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> KqpNewEngine::SelfJoin [GOOD] >> KqpNewEngine::ReadRangeWithParams >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirsForceDrop [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> KqpRanges::IsNullInValue [GOOD] >> KqpRanges::IsNullInJsonValue >> KqpNotNullColumns::InsertNotNull [GOOD] >> KqpNotNullColumns::InsertFromSelect >> KqpNotNullColumns::ReplaceNotNullPkPg [GOOD] >> KqpNotNullColumns::ReplaceNotNull >> KqpNewEngine::SimpleUpsertSelect >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:52.123546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:52.123574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:52.123580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:52.123585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:52.123591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:52.123595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:52.123605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:52.123691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:52.135537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:52.135559Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:52.138124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:52.138236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:52.138280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:52.141813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:52.141926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:52.142035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.142324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:52.143593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.143976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:52.143993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.144009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:52.144017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:52.144023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:52.144076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:52.146012Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:52.165527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:52.165627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.165698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:52.165772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:52.165782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.166739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.166778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:52.166861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.166876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:52.166881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:52.166887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:52.169263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.169293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:52.169302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:52.170120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.170139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.170146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.170156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.170893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:52.171567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:52.171633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:52.171865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.171902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:52.171911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.171987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:52.171997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.172032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:52.172046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:52.172679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:52.172694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:52.172742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.172748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:52.172835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.172843Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:52.172857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:52.172862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.172868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:52.172874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.172879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:52.172883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:52.172899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:52.172906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:52.172909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 39Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [53:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T09:16:05.770822Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:16:05.770830Z node 53 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T09:16:05.770844Z node 53 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:16:05.770849Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:16:05.770854Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T09:16:05.770858Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:16:05.770863Z node 53 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:16:05.770868Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:16:05.770900Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:05.770907Z node 53 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T09:16:05.770911Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T09:16:05.770914Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T09:16:05.770918Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:16:05.770921Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:16:05.771024Z node 53 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.771035Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.771042Z node 53 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:05.771047Z node 53 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:16:05.771051Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:16:05.771154Z node 53 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.771165Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.771169Z node 53 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:05.771173Z node 53 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T09:16:05.771177Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:05.771594Z node 53 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.771613Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.771618Z node 53 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:05.771622Z node 53 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:16:05.771626Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:05.771786Z node 53 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.771799Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.771803Z node 53 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:05.771808Z node 53 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:16:05.771812Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:16:05.771822Z node 53 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T09:16:05.771828Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [53:301:2293] 2024-11-21T09:16:05.771921Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:05.771928Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 Leader for TabletID 72057594037968897 is [53:215:2215] sender: [53:339:2058] recipient: [53:15:2062] 2024-11-21T09:16:05.772319Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.772462Z node 53 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T09:16:05.772527Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:05.772596Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:16:05.772699Z node 53 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T09:16:05.772729Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:05.772758Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:16:05.772986Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:05.772997Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:16:05.773011Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:16:05.773018Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:16:05.773023Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:05.773027Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:16:05.773033Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:05.773295Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.773539Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.773661Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:05.773682Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:16:05.773688Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [53:302:2294] 2024-11-21T09:16:05.774261Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:05.774295Z node 53 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:05.774343Z node 53 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:16:05.774453Z node 53 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:05.774498Z node 53 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 55us result status StatusPathDoesNotExist 2024-11-21T09:16:05.774544Z node 53 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpNewEngine::LocksEffects [GOOD] >> KqpNewEngine::LocksNoMutations >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit [GOOD] >> KqpNewEngine::DependentSelect >> KqpNewEngine::ReadRangeWithParams [GOOD] >> KqpNewEngine::ScalarFunctions >> KqpSqlIn::KeySuffix_OnlyTail [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate >> KqpSqlIn::SecondaryIndex_PgKey-EnableKqpDataQueryStreamLookup [GOOD] >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In >> KqpNotNullColumns::ReplaceNotNull [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg >> KqpRanges::ValidatePredicatesDataQuery [GOOD] >> KqpReturning::ReturningTwice >> KqpNotNullColumns::InsertFromSelect [GOOD] >> KqpNotNullColumns::InsertNotNullPg >> KqpRanges::IsNullInJsonValue [GOOD] >> KqpRanges::IsNullPartial >> KqpRanges::NullInKeySuffix >> KqpNewEngine::SimpleUpsertSelect [GOOD] >> KqpNewEngine::ShuffleWrite >> KqpNewEngine::LocksNoMutations [GOOD] >> KqpNewEngine::LocksInRoTx >> KqpNewEngine::DependentSelect [GOOD] >> KqpNewEngine::DqSourceCount >> KqpNotNullColumns::InsertNotNullPg [GOOD] >> KqpNotNullColumns::FailedMultiEffects >> KqpNewEngine::Delete >> KqpNotNullColumns::ReplaceNotNullPg [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate [GOOD] >> KqpSort::PassLimit >> KqpKv::ReadRows_PgValue [GOOD] >> KqpKv::ReadRows_PgKey >> KqpSqlIn::KeySuffix_NotPointPrefix [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Int >> KqpReturning::ReturningTwice [GOOD] >> KqpReturning::ReturningSerial >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> KqpNewEngine::ShuffleWrite [GOOD] >> KqpNewEngine::SqlInFromCompact >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In [GOOD] >> KqpSqlIn::PhasesCount >> KqpNotNullColumns::FailedMultiEffects [GOOD] >> KqpRanges::IsNullPartial [GOOD] >> KqpRanges::LiteralOr >> KqpNewEngine::DqSourceCount [GOOD] >> KqpNewEngine::DqSource >> KqpRanges::NullInKeySuffix [GOOD] >> KqpRanges::NullInPredicate >> KqpNewEngine::LocksInRoTx [GOOD] >> KqpNewEngine::LiteralKeys ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> KqpKv::ReadRows_PgKey [GOOD] >> KqpKv::ReadRows_Decimal >> KqpNewEngine::Delete [GOOD] >> KqpNewEngine::DeleteOn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::FailedMultiEffects [GOOD] Test command err: Trying to start YDB, gRPC: 13804, MsgBus: 22805 2024-11-21T09:16:03.172397Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658870188629948:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:03.172418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e57/r3tmp/tmpMeeyhf/pdisk_1.dat 2024-11-21T09:16:03.287683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:03.287712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:03.291912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:03.295307Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13804, node 1 2024-11-21T09:16:03.374861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:03.374877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:03.374878Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:03.374916Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22805 TClient is connected to server localhost:22805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:03.496044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.607953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870188630342:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.607986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.750056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.813751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870188630443:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.813773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.813788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870188630448:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.814562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:03.816594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658870188630450:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:03.968984Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658870188630562:2330], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestInsertNotNullPk, code: 2029 2024-11-21T09:16:03.969450Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk0Zjg5MTktN2VhNWFkOGYtODIxZWU1ZjMtZWEyZjY3MmI=, ActorId: [1:7439658870188630339:2297], ActorState: ExecuteState, TraceId: 01jd703nzwdvtqh2s7vxm5xj6x, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2024-11-21T09:16:03.973129Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658870188630571:2334], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:03.973208Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk0Zjg5MTktN2VhNWFkOGYtODIxZWU1ZjMtZWEyZjY3MmI=, ActorId: [1:7439658870188630339:2297], ActorState: ExecuteState, TraceId: 01jd703p026bmyzza63szzzhs4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 30175, MsgBus: 9632 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e57/r3tmp/tmpLvRcaY/pdisk_1.dat 2024-11-21T09:16:04.277430Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:04.278214Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30175, node 2 2024-11-21T09:16:04.293836Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.293849Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.293852Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.293905Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9632 TClient is connected to server localhost:9632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.363494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.363541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.364561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:04.365316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.631348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658875172190453:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.631377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.648930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.660843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658875172190551:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.660879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.660928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658875172190556:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.661818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:04.670262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439658875172190558:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:04.843464Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:04.845419Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439658875172190669:2330], status: BAD_REQUEST, iss ... 9:16:07.175493Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5479 TClient is connected to server localhost:5479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:07.238289Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:07.238337Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:07.239124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.239406Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:07.240795Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:07.478893Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658888748629002:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.478913Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.480558Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.497639Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658888748629100:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.497670Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.497708Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658888748629105:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.498504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:07.502431Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658888748629107:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:07.614693Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7439658888748629218:2330], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2024-11-21T09:16:07.614758Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OGE4MTQ3MWItZGI2MmJjYWItNWE3ZDYxYjEtMzIxMzgzZGI=, ActorId: [5:7439658888748628974:2295], ActorState: ExecuteState, TraceId: 01jd703shw6xzcsh35cj458rpk, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2024-11-21T09:16:07.639893Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:07.640501Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7439658888748629227:2334], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2024-11-21T09:16:07.640582Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OGE4MTQ3MWItZGI2MmJjYWItNWE3ZDYxYjEtMzIxMzgzZGI=, ActorId: [5:7439658888748628974:2295], ActorState: ExecuteState, TraceId: 01jd703shz2pp7ny3jxwd3kxs2, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 26847, MsgBus: 5068 2024-11-21T09:16:07.852383Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658888327775297:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e57/r3tmp/tmpNLY08y/pdisk_1.dat 2024-11-21T09:16:07.855598Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:07.862343Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26847, node 6 2024-11-21T09:16:07.878503Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:07.878518Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:07.878520Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:07.878579Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5068 TClient is connected to server localhost:5068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:07.951545Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:07.951591Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:07.954294Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:07.955454Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.960434Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:08.237274Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658892622743036:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.237300Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.240556Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.249554Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658892622743134:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.249579Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.249645Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658892622743139:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.250321Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:08.259431Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658892622743141:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:08.343959Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439658892622743230:2324], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:55: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64,'Value':String>
:3:55: Error: Failed to convert 'Value': Null to String
:3:55: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:08.344041Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NTYxMGQxMTItNDc3ZWU1MjQtZjBiNzBiMjItMTg2YTljMzA=, ActorId: [6:7439658892622743018:2297], ActorState: ExecuteState, TraceId: 01jd703t8m0prevmccy8rqvmjn, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:08.356099Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpNotNullColumns::OptionalParametersDataQuery [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery >> KqpSort::PassLimit [GOOD] >> KqpSort::Offset >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> KqpNewEngine::ScalarFunctions [GOOD] >> KqpNewEngine::ScalarMultiUsage >> KqpNewEngine::SqlInFromCompact [GOOD] >> KqpNewEngine::SqlInAsScalar >> KqpSqlIn::KeyTypeMissmatch_Int [GOOD] >> KqpSqlIn::InWithCast >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] [GOOD] >> KqpKv::ReadRows_Decimal [GOOD] >> KqpNewEngine::LiteralKeys [GOOD] >> KqpSqlIn::PhasesCount [GOOD] >> KqpRanges::LiteralOr [GOOD] >> KqpRanges::Like >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow >> KqpNewEngine::DqSource [GOOD] >> KqpNewEngine::DqSourceLiteralRange >> KqpReturning::ReturningSerial [GOOD] >> KqpReturning::ReturningColumnsOrder >> KqpNewEngine::DeleteOn [GOOD] >> KqpNewEngine::DeleteWithBuiltin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Decimal [GOOD] Test command err: Trying to start YDB, gRPC: 29111, MsgBus: 18666 2024-11-21T09:16:03.172497Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658869004187016:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:03.172513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e78/r3tmp/tmpm3inxe/pdisk_1.dat 2024-11-21T09:16:03.272461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:03.272491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:03.273362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:03.279744Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29111, node 1 2024-11-21T09:16:03.373004Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:03.373019Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:03.373021Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:03.373094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18666 TClient is connected to server localhost:18666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:03.521871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.524158Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:03.607860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658869004187408:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.607900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.750081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.824766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658869004187516:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.824781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658869004187521:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.824793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.825544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:03.827288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658869004187523:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 16768, MsgBus: 5357 2024-11-21T09:16:04.281849Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e78/r3tmp/tmpispPDn/pdisk_1.dat 2024-11-21T09:16:04.295121Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16768, node 2 2024-11-21T09:16:04.309445Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.309457Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.309460Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.309503Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5357 TClient is connected to server localhost:5357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.367448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.367498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.368632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:04.374580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.375952Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:04.580171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658876347693786:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.580228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.582872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS [] IsSuccess(): 1 GetStatus(): SUCCESS 2024-11-21T09:16:04.622138Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: no keys are found in request's proto Trying to start YDB, gRPC: 26936, MsgBus: 21677 2024-11-21T09:16:04.820484Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658874558165655:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.820508Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e78/r3tmp/tmpG9enKs/pdisk_1.dat 2024-11-21T09:16:04.838134Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26936, node 3 2024-11-21T09:16:04.856817Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.856834Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.856837Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.856884Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21677 TClient is connected to server localhost:21677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.920947Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.920985Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.922847Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:04.926423Z nod ... s from node 4, TabletId: 72075186224037929 not found 2024-11-21T09:16:07.493786Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715744:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.495339Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037930 not found 2024-11-21T09:16:07.518824Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037931 not found 2024-11-21T09:16:07.518858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715746:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.546737Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715748:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.548313Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037932 not found 2024-11-21T09:16:07.575037Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715750:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.576517Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037933 not found 2024-11-21T09:16:07.602701Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715752:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.603493Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037934 not found 2024-11-21T09:16:07.630951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715754:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.632996Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037935 not found 2024-11-21T09:16:07.658856Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.659306Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037936 not found 2024-11-21T09:16:07.687407Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037937 not found 2024-11-21T09:16:07.688028Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715758:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.716420Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037938 not found 2024-11-21T09:16:07.717667Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.761077Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037939 not found Trying to start YDB, gRPC: 2447, MsgBus: 27605 2024-11-21T09:16:08.084675Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658893196221754:2194];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:08.094589Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e78/r3tmp/tmppX7Cnj/pdisk_1.dat 2024-11-21T09:16:08.105866Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2447, node 5 2024-11-21T09:16:08.129958Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:08.129968Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:08.129969Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:08.130004Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27605 2024-11-21T09:16:08.184389Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:08.184421Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:27605 2024-11-21T09:16:08.185507Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:08.189329Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.192334Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:08.479366Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.560967Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037888 not found 2024-11-21T09:16:08.561508Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.583213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.584226Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037889 not found 2024-11-21T09:16:08.612699Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037890 not found Trying to start YDB, gRPC: 15570, MsgBus: 22207 2024-11-21T09:16:08.818137Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658891276678092:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:08.818404Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e78/r3tmp/tmp3nRafa/pdisk_1.dat 2024-11-21T09:16:08.830651Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15570, node 6 2024-11-21T09:16:08.849203Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:08.849217Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:08.849218Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:08.849257Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22207 TClient is connected to server localhost:22207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:08.918659Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:08.918714Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:08.919762Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:08.921426Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.161461Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658895571645988:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.161487Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.165220Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.193477Z node 6 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Uint64 for column Key22, but expected Decimal(22,9) 2024-11-21T09:16:09.194117Z node 6 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Decimal(35,10) for column Key22, but expected Decimal(22,9) >> KqpSqlIn::TableSource >> KqpSort::Offset [GOOD] >> KqpSort::OffsetPk >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LiteralKeys [GOOD] Test command err: Trying to start YDB, gRPC: 24743, MsgBus: 23557 2024-11-21T09:16:03.440291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658870162984002:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:03.440310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e31/r3tmp/tmpoGtQo0/pdisk_1.dat 2024-11-21T09:16:03.516918Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24743, node 1 2024-11-21T09:16:03.538609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:03.538631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:03.538634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:03.538697Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:03.544516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:03.544548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:03.545883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23557 TClient is connected to server localhost:23557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:03.596099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:03.605717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.672387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.695549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.709008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.793849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870162985531:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.793882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.835007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.842115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.898113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.955059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.968958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.982628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.991660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870162986048:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.991685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.991730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870162986053:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.992494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:03.995567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658870162986055:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 17136, MsgBus: 7056 2024-11-21T09:16:04.539604Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658873203215773:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.541391Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e31/r3tmp/tmpUesIML/pdisk_1.dat 2024-11-21T09:16:04.552270Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17136, node 2 2024-11-21T09:16:04.559472Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.559485Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.559487Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.559528Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7056 TClient is connected to server localhost:7056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.638799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.638829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.639915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:04.641685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.647223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.665136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.685098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.695513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.878628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658873203217171:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.878683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.884802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.893381Z node 2 :FLAT_TX_SCHEMESHARD ... 9:16:07.668895Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:07.669127Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.676489Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:07.691502Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.708046Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.735969Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:07.751040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.907773Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658886046043283:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.907810Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.910988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.919629Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.931728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.945202Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.960346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.973604Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.993579Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658886046043789:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.993611Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.993811Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658886046043794:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.994602Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:07.999984Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658886046043796:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 27956, MsgBus: 17978 2024-11-21T09:16:08.686789Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658891293777295:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e31/r3tmp/tmpb9XrxO/pdisk_1.dat 2024-11-21T09:16:08.693686Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:08.703165Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27956, node 6 2024-11-21T09:16:08.712445Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:08.712458Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:08.712460Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:08.712501Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17978 TClient is connected to server localhost:17978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:08.792618Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:08.792646Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:08.792901Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.793239Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:08.805198Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.816350Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.837677Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.849263Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.049796Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658895588745994:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.049822Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.056356Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.067534Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.078609Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.085211Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.092585Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.107701Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.122249Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658895588746486:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.122264Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658895588746491:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.122274Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.122898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:09.128791Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658895588746493:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::PhasesCount [GOOD] Test command err: Trying to start YDB, gRPC: 17724, MsgBus: 30037 2024-11-21T09:16:03.182846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658872522003388:2227];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:03.225780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e43/r3tmp/tmpuOp72h/pdisk_1.dat 2024-11-21T09:16:03.274756Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:03.283401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:03.283428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:03.284359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17724, node 1 2024-11-21T09:16:03.374361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:03.374381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:03.374383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:03.374417Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30037 TClient is connected to server localhost:30037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:03.520899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.523985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:03.527923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:03.591104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.650927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.662605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.704068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658872522004742:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.704106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.750061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.760143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.773691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.786623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.801258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.815610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.830081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658872522005248:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.830103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.830106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658872522005253:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.830786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:03.834670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658872522005255:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 27845, MsgBus: 18195 2024-11-21T09:16:04.259049Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658873594684319:2195];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.260652Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e43/r3tmp/tmpo9Eq4l/pdisk_1.dat 2024-11-21T09:16:04.285624Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27845, node 2 2024-11-21T09:16:04.304828Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.304850Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.304853Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.304901Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18195 TClient is connected to server localhost:18195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.362585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.362613Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.362974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.363658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:04.368850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.381328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.399853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.411138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.636492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658873594685724:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, ... sed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.139532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.156257Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.442341Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658887171873773:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.442427Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.447269Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.456663Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.468765Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.524915Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.533555Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.545862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.561508Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658887171874290:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.561532Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.561551Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658887171874295:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.562212Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:07.567821Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439658887171874297:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:07.822086Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.830213Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.840675Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29411, MsgBus: 7611 2024-11-21T09:16:08.550302Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658893157053223:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:08.550366Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e43/r3tmp/tmpS0fA1t/pdisk_1.dat 2024-11-21T09:16:08.562314Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29411, node 5 2024-11-21T09:16:08.572178Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:08.572197Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:08.572199Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:08.572270Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7611 TClient is connected to server localhost:7611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:08.650573Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:08.650606Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:08.651762Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:08.652948Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.664735Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:08.677328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.688501Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.720789Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:08.780442Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.924266Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658893157054788:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.924299Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.929802Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.937061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.945901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.003533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.010217Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.023890Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.040721Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658897452022592:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.040757Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658897452022597:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.040757Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.041629Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:09.050251Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658897452022599:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpSqlIn::InWithCast [GOOD] >> KqpRanges::NullInKey >> KqpNewEngine::ScalarMultiUsage [GOOD] >> KqpNewEngine::SqlInAsScalar [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 24399, MsgBus: 28864 2024-11-21T09:16:04.619560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658876513265011:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.619705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e1b/r3tmp/tmpwNCbUQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24399, node 1 2024-11-21T09:16:04.702553Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:04.712059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.712077Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.712079Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.712125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:04.718848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.718894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.719951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28864 TClient is connected to server localhost:28864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.777139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.964975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658876513265472:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.965009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.000037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:05.064879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658880808232868:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.064904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.065004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658880808232873:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.065864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:05.067529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658880808232875:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:05.193246Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658880808232964:2324], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing key column in input: Key for table: /Root/TestReplaceNotNullPk, code: 2029 2024-11-21T09:16:05.193666Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDg0YjA5ZTUtMmE5NWMyZTgtYmYyN2IxOTMtOTU0ZDI0ZTg=, ActorId: [1:7439658876513265469:2297], ActorState: ExecuteState, TraceId: 01jd703q64bf9vy7fnnv4x311x, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2024-11-21T09:16:05.197249Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658880808232973:2328], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:49: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:49: Error: Failed to convert 'Key': Null to Uint64
:1:49: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:05.197356Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDg0YjA5ZTUtMmE5NWMyZTgtYmYyN2IxOTMtOTU0ZDI0ZTg=, ActorId: [1:7439658876513265469:2297], ActorState: ExecuteState, TraceId: 01jd703q6a887kaqjxzqqabm56, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 5729, MsgBus: 5974 2024-11-21T09:16:05.530821Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658881427882824:2194];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:05.532637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e1b/r3tmp/tmpSUMiVf/pdisk_1.dat 2024-11-21T09:16:05.547217Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5729, node 2 2024-11-21T09:16:05.576460Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:05.576475Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:05.576477Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:05.576523Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5974 TClient is connected to server localhost:5974 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:16:05.630191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:05.630226Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:05.631305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:05.633824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:05.639694Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:05.910292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658881427883254:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.910353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.911893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:05.936693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658881427883367:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.936724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.936827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658881427883372:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:05.937621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:05.939881Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T09:16:05.939979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:743965888142788 ... 644480 2024-11-21T09:16:08.161989Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.181731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.378623Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658893663883724:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.378689Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.381590Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.440776Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.449163Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.462899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.476775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.491307Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.507095Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658893663884226:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.507122Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.507163Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658893663884231:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.507961Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:08.510528Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658893663884233:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:08.688118Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16127, MsgBus: 22646 2024-11-21T09:16:09.139595Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658898819136217:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:09.139614Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e1b/r3tmp/tmpRV3KtQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16127, node 6 2024-11-21T09:16:09.162536Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:09.162810Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:09.162822Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:09.162824Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:09.162867Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22646 TClient is connected to server localhost:22646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:09.239755Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:09.239788Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:09.240817Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:09.242588Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:09.253893Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.269768Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:09.292846Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.303331Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.506762Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658898819137786:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.506794Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.509470Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.518647Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.527295Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.541074Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.547377Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.562681Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.577471Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658898819138288:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.577493Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.577517Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658898819138293:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.578169Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:09.581678Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658898819138295:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:09.792885Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.853634Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180569896, txId: 281474976715673] shutting down 2024-11-21T09:16:09.874366Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180569917, txId: 281474976715675] shutting down 2024-11-21T09:16:09.893976Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180569938, txId: 281474976715677] shutting down >> KqpReturning::ReturningColumnsOrder [GOOD] >> KqpReturning::ReturningTypes >> KqpRanges::Like [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::InWithCast [GOOD] Test command err: Trying to start YDB, gRPC: 28440, MsgBus: 14320 2024-11-21T09:16:04.066591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658876735026486:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.066616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e26/r3tmp/tmpZSxlnu/pdisk_1.dat 2024-11-21T09:16:04.132538Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28440, node 1 2024-11-21T09:16:04.146477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.146490Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.146492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.146529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14320 2024-11-21T09:16:04.166735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.166804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.167866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.209242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.213072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:04.215679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.234507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.259676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.273533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.404749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658876735028018:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.404784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.444991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.453589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.465985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.522542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.536278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.550077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.565578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658876735028535:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.565602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.565617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658876735028540:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.566423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:04.570236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658876735028542:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:04.786627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.796796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.809451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 14131, MsgBus: 3104 2024-11-21T09:16:05.581376Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658877477287467:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:05.583752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e26/r3tmp/tmp7pCKYH/pdisk_1.dat 2024-11-21T09:16:05.598445Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14131, node 2 2024-11-21T09:16:05.604661Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:05.604679Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:05.604681Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:05.604731Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3104 TClient is connected to server localhost:3104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ... eTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.231949Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.424927Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658894549617406:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.424952Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.431419Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.438629Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.448661Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.456072Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.469782Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.483751Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.500820Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658894549617897:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.500851Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.500899Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658894549617902:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.501669Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:08.510922Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439658894549617904:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:08.782218Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.796225Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.810443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 17881, MsgBus: 21488 2024-11-21T09:16:09.413425Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658894692906933:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:09.415341Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e26/r3tmp/tmpWPtos9/pdisk_1.dat 2024-11-21T09:16:09.423765Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17881, node 5 2024-11-21T09:16:09.441712Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:09.441724Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:09.441725Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:09.441761Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21488 TClient is connected to server localhost:21488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:09.514606Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:09.514636Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:09.514938Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.515558Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:09.523429Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:09.534258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.554432Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.564908Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.739842Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658894692908325:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.739872Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.746257Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.752424Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.766455Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.780799Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.792794Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.809100Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.825391Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658894692908825:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.825427Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.825476Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658894692908830:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.826273Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:09.834880Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658894692908832:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpNewEngine::DqSourceLiteralRange [GOOD] >> KqpNewEngine::DqSourceLimit >> KqpNewEngine::DeleteWithBuiltin [GOOD] >> KqpNewEngine::DeleteON >> KqpRanges::NullInPredicateRow [GOOD] >> KqpRanges::UpdateMulti ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ScalarMultiUsage [GOOD] Test command err: Trying to start YDB, gRPC: 22106, MsgBus: 63887 2024-11-21T09:16:03.172817Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658869083917967:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:03.173213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e89/r3tmp/tmpEXjQb6/pdisk_1.dat 2024-11-21T09:16:03.279075Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:03.284400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:03.284433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:03.285929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22106, node 1 2024-11-21T09:16:03.373212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:03.373227Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:03.373229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:03.373271Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63887 TClient is connected to server localhost:63887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:03.498294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.609081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658869083918425:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.609107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.609113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658869083918433:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.613944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:16:03.615928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658869083918439:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 5623, MsgBus: 18764 2024-11-21T09:16:04.051299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658874426856854:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e89/r3tmp/tmpTUDbFc/pdisk_1.dat 2024-11-21T09:16:04.055213Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:04.064342Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5623, node 2 2024-11-21T09:16:04.082787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.082800Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.082802Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.082841Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18764 TClient is connected to server localhost:18764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.152578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.152619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.154161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:04.154667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.158801Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:04.163701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.185213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.206515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.273035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.371954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658874426858232:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.372032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.377028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.385477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.396013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.409957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.425470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.437971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.462021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658874426858735:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.462044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.462196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658874426858740:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:04.463134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:04.465842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439658874426858748:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:04.775531Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439658874426859127:2481], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:3:26: Error: At function: KiWriteTable! ... 74976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.272797Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.287804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.300813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.318035Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658888167361863:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.318066Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.318100Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658888167361868:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.318924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:07.328083Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658888167361870:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:07.549825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.556816Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.566956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.623559Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.261288Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180568223, txId: 281474976715692] shutting down 2024-11-21T09:16:08.362843Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180568356, txId: 281474976715695] shutting down 2024-11-21T09:16:08.467122Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180568454, txId: 281474976715698] shutting down 2024-11-21T09:16:08.600981Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180568573, txId: 281474976715701] shutting down 2024-11-21T09:16:08.719399Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180568713, txId: 281474976715704] shutting down 2024-11-21T09:16:08.817571Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180568811, txId: 281474976715707] shutting down 2024-11-21T09:16:08.928945Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180568923, txId: 281474976715710] shutting down 2024-11-21T09:16:08.974844Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180569021, txId: 281474976715713] shutting down 2024-11-21T09:16:09.016851Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180569056, txId: 281474976715715] shutting down Trying to start YDB, gRPC: 29736, MsgBus: 28702 2024-11-21T09:16:09.300419Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658897535094036:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e89/r3tmp/tmp1lvp5a/pdisk_1.dat 2024-11-21T09:16:09.306149Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:09.321495Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29736, node 6 2024-11-21T09:16:09.332415Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:09.332430Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:09.332432Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:09.332481Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28702 TClient is connected to server localhost:28702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:09.399691Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:09.399739Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:09.400831Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:09.403040Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.405666Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:09.409815Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.420646Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.443011Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:09.455196Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.652741Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658897535095446:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.652776Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.658686Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.672495Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.686705Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.695044Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.751621Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.766982Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.789560Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658897535095952:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.789586Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.789706Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658897535095957:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.790287Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:09.792077Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658897535095959:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpSort::OffsetPk [GOOD] >> KqpSort::OffsetTopSort >> KqpSqlIn::TableSource [GOOD] >> KqpSqlIn::TupleParameter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::Like [GOOD] Test command err: Trying to start YDB, gRPC: 3044, MsgBus: 63952 2024-11-21T09:16:03.172848Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658869379780721:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:03.173179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e80/r3tmp/tmpzzhZVL/pdisk_1.dat 2024-11-21T09:16:03.276390Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:03.290655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:03.290687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:03.291755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3044, node 1 2024-11-21T09:16:03.374572Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:03.374590Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:03.374592Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:03.374626Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63952 TClient is connected to server localhost:63952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:03.500886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.507634Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:03.519355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.550038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.576721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.587021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.614794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658869379782105:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.619856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.750087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.758111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.767372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.822510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.835760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.850524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.872282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658869379782621:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.872343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.872396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658869379782627:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.873435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:03.877081Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T09:16:03.877145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658869379782629:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:04.045882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.086726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.180843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.210275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.272407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25174, MsgBus: 27820 2024-11-21T09:16:04.724588Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658874229725964:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.724607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e80/r3tmp/tmpb3ojDp/pdisk_1.dat 2024-11-21T09:16:04.737207Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25174, node 2 2024-11-21T09:16:04.747051Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.747064Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.747065Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.747099Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27820 TClient is connected to server localhost:27820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.799484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.802258Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:04.808513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.824565Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.824616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.825724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node ... VICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658892008637621:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.922735Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.928318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.939247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.961405Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.971608Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.980677Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.987733Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.005112Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658896303605412:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.005135Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.005236Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658896303605417:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.006063Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:09.009120Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658896303605419:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:09.211453Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.248830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.284343Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.315943Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.360516Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28081, MsgBus: 16061 2024-11-21T09:16:09.731647Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658898178395182:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:09.733440Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e80/r3tmp/tmp7NVDFA/pdisk_1.dat 2024-11-21T09:16:09.749784Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28081, node 6 2024-11-21T09:16:09.760084Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:09.760097Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:09.760099Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:09.760133Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16061 TClient is connected to server localhost:16061 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:16:09.836842Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:09.836877Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:09.838604Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:09.838931Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.840914Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:09.842777Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.858141Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.880007Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.892445Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.093501Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658902473363893:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.093530Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.099940Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.110126Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.121732Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.136595Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.149966Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.164522Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.182043Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658902473364384:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.182087Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.182123Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658902473364389:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.183042Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:10.190672Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658902473364391:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:10.377607Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpNewEngine::InShardsWrite >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] >> KqpNewEngine::SequentialReadsPragma-Enabled >> KqpNewEngine::BlindWrite >> KqpSort::TopSortParameter |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-short_polling-fifo] [GOOD] >> KqpRanges::NullInKey [GOOD] >> KqpRanges::LiteralOrCompisite >> KqpNewEngine::DqSourceLimit [GOOD] >> KqpNewEngine::DeleteON [GOOD] >> KqpNewEngine::DeleteByKey >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirs [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> KqpNotNullColumns::UpsertNotNullPk >> KqpRanges::UpdateMulti [GOOD] >> KqpRanges::UpdateWhereInBigLiteralList >> KqpReturning::ReturningTypes [GOOD] >> KqpSort::OffsetTopSort [GOOD] >> KqpRanges::UpdateWhereInNoFullScan >> KqpNewEngine::ContainerRegistryCombiner ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLimit [GOOD] Test command err: Trying to start YDB, gRPC: 21450, MsgBus: 10391 2024-11-21T09:16:05.959473Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658878653675107:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:05.959608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dba/r3tmp/tmpPyNVi5/pdisk_1.dat 2024-11-21T09:16:06.030905Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21450, node 1 2024-11-21T09:16:06.048769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:06.048779Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:06.048781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:06.048822Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:06.058509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:06.058536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:06.059551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10391 TClient is connected to server localhost:10391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:06.102448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.105113Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:06.112818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.175664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.197999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.212672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.299026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658882948643802:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.299053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.339831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.347819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.355197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.362279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.369711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.383931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.401530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658882948644309:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.401575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.401628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658882948644315:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.402441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:06.411079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658882948644317:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 19764, MsgBus: 6513 2024-11-21T09:16:06.826703Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658882603172040:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:06.826789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dba/r3tmp/tmpVvmzjq/pdisk_1.dat 2024-11-21T09:16:06.839138Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19764, node 2 2024-11-21T09:16:06.850440Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:06.850453Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:06.850458Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:06.850488Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6513 TClient is connected to server localhost:6513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:06.926912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:06.926939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:06.927942Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:06.929655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.931644Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:06.937371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.949115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.972335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.987551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.184524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658886898140746:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.184557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found ... undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.933643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.944004Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:09.944036Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:09.945240Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:09.993094Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.013713Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.025355Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.196624Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658901771663544:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.196653Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.200068Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.210905Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.268015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.276751Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.289985Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.303672Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.320509Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658901771664048:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.320554Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.320557Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658901771664053:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.321500Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:10.330995Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658901771664055:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 11658, MsgBus: 18672 2024-11-21T09:16:10.781544Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658901989302538:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:10.781564Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002dba/r3tmp/tmp8jV7PV/pdisk_1.dat 2024-11-21T09:16:10.807216Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11658, node 6 2024-11-21T09:16:10.833973Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:10.833989Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:10.833991Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:10.834034Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18672 2024-11-21T09:16:10.881530Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:10.881557Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:10.882646Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:10.907359Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.916711Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.938900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.002513Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.016404Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.137733Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658906284271374:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.137763Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.144555Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.152583Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.165487Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.178584Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.193263Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.207752Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.225345Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658906284271888:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.225378Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658906284271893:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.225377Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.226119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:11.234069Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658906284271895:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSort::OffsetTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 19734, MsgBus: 7357 2024-11-21T09:16:05.812331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658879020854597:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:05.812435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e14/r3tmp/tmpYQOCQ7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19734, node 1 2024-11-21T09:16:05.880199Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:05.890269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:05.890281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:05.890283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:05.890315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7357 2024-11-21T09:16:05.914603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:05.914623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:05.915697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:05.957811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:05.961386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:05.969425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.031745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.095309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.112791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.147478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658883315823311:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.147508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.190548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.199191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.208344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.215536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.229509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.244639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.260145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658883315823811:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.260179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.260193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658883315823816:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.260976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:06.271370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658883315823818:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 5572, MsgBus: 19106 2024-11-21T09:16:06.881772Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658884842620333:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:06.881821Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e14/r3tmp/tmpTAHZMR/pdisk_1.dat 2024-11-21T09:16:06.895709Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5572, node 2 2024-11-21T09:16:06.904835Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:06.904846Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:06.904848Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:06.904877Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19106 TClient is connected to server localhost:19106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:06.981340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:06.981376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:06.982984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:06.985489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:06.990534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.011731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:07.036811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.053273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.252074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658889137589031:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.252094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.260806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe ... T09:16:10.140934Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.142006Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:10.144475Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:10.145925Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.158773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.183027Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.193567Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.349213Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658900629839032:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.349232Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.354866Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.361941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.374292Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.387493Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.402576Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.418407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.431376Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658900629839536:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.431423Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658900629839541:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.431428Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.432160Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:10.435467Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658900629839543:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 8999, MsgBus: 10724 2024-11-21T09:16:10.999383Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658902735577218:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:10.999577Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e14/r3tmp/tmppC5JB8/pdisk_1.dat 2024-11-21T09:16:11.013477Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8999, node 6 2024-11-21T09:16:11.023755Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:11.023767Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:11.023769Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:11.023810Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10724 TClient is connected to server localhost:10724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:11.099818Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:11.099857Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:11.100952Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:11.102662Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:11.116282Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.137766Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.169553Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.182562Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.373459Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658907030546072:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.373491Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.381481Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.390078Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.407209Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.417929Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.432428Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.445920Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.462784Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658907030546572:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.462811Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.462861Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658907030546577:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.463719Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:11.471662Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658907030546579:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] >> KqpNewEngine::InShardsWrite [GOOD] >> KqpNewEngine::Join ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningTypes [GOOD] Test command err: Trying to start YDB, gRPC: 25967, MsgBus: 24858 2024-11-21T09:16:03.172456Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658870454609842:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:03.172487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e60/r3tmp/tmpKCgRix/pdisk_1.dat 2024-11-21T09:16:03.290879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:03.290906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:03.293760Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:03.294443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25967, node 1 2024-11-21T09:16:03.374263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:03.374280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:03.374281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:03.374320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24858 TClient is connected to server localhost:24858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:03.524769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.532772Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:03.545110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.622235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:03.641424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.652445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:03.684770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870454611161:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.684814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.750028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.758490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.766544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.779269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.786946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.842550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.851385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870454611679:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.851415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.851560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658870454611684:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:03.852295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:03.855873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658870454611686:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Warning: Type annotation, code: 1030
:4:13: Warning: At function: RemovePrefixMembers, At function: RemoveSystemMembers, At function: PersistableRepr, At function: SqlProject
:4:27: Warning: At function: Filter, At function: Coalesce
:4:50: Warning: At function: SqlIn
:4:50: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 13762, MsgBus: 8793 2024-11-21T09:16:04.468344Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658874779406652:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.470775Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e60/r3tmp/tmpUMQSVe/pdisk_1.dat 2024-11-21T09:16:04.483375Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13762, node 2 2024-11-21T09:16:04.492793Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:04.492810Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:04.492812Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:04.492854Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8793 TClient is connected to server localhost:8793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:04.571563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:04.571594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:04.572042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.572671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:04.573966Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:04.582521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.595690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:04.617103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:04.638014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:04.79 ... RN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:09.881144Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.893374Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:09.921487Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:09.933865Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.112461Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658902522854064:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.112501Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.119508Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.129586Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.143739Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.156891Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.171416Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.187052Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.202573Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658902522854565:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.202605Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.202650Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658902522854570:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.203318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:10.213698Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658902522854572:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:10.429227Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 [[[2];["321"]];[["111"];[2]]] Trying to start YDB, gRPC: 5421, MsgBus: 17809 2024-11-21T09:16:10.745658Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658901633870052:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002e60/r3tmp/tmpKREC2I/pdisk_1.dat 2024-11-21T09:16:10.747328Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:10.760800Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5421, node 6 2024-11-21T09:16:10.769558Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:10.769573Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:10.769575Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:10.769638Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17809 TClient is connected to server localhost:17809 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:16:10.844910Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:10.844954Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:10.846382Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:10.849062Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.850947Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:10.857294Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.869021Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.894703Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.908017Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.132281Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658905928838760:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.132314Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.137750Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.148693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.157718Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.172509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.186669Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.201030Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.217454Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658905928839252:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.217482Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.217539Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658905928839257:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.218216Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:11.226482Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658905928839259:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpSqlIn::TupleParameter [GOOD] >> KqpSqlIn::TupleLiteral >> KqpNewEngine::BlindWrite [GOOD] >> KqpNewEngine::BlindWriteParameters >> KqpSort::TopSortParameter [GOOD] >> KqpSort::TopSortExprPk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateSolomonWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:49.762316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:49.762339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.762344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:49.762350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:49.762356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:49.762360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:49.762369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.762447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:49.774155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:49.774182Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.777215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:49.777354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:49.777399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:49.781082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:49.781206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:49.781323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.781624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.782802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.783161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.783172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.783187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:49.783193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.783200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:49.783250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:49.784756Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.801020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:49.801113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.801180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:49.801242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:49.801251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.802086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.802114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:49.802168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.802178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:49.802183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:49.802188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:49.802633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.802645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:49.802650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:49.802998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.803007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.803013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.803020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.803596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:49.804008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:49.804053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:49.804274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.804301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:49.804308Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.804380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:49.804387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.804417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:49.804430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.806441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.806458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.806512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.806518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:49.806607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.806616Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:49.806629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:49.806634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.806640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:49.806647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.806653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:49.806658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:49.806672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:49.806679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:49.806683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... hId: 3] was 3 2024-11-21T09:16:11.701869Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:16:11.701872Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:16:11.701878Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:16:11.701881Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:16:11.701884Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:16:11.701888Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:16:11.701892Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T09:16:11.701894Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T09:16:11.701912Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 5 2024-11-21T09:16:11.701917Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T09:16:11.701921Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:16:11.701924Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T09:16:11.701928Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T09:16:11.701934Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T09:16:11.701938Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 2 2024-11-21T09:16:11.702725Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.702758Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.702764Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:11.702770Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:11.702776Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:11.703067Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.703080Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.703084Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:11.703088Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:16:11.703092Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:11.703193Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.703203Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.703206Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:11.703210Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T09:16:11.703213Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:16:11.703710Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.703733Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.703738Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:11.703744Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T09:16:11.703752Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:16:11.703880Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.703892Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.703896Z node 86 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:11.703900Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 2 2024-11-21T09:16:11.703904Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T09:16:11.703913Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T09:16:11.706666Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.706697Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.706709Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.706732Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:11.706745Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:16:11.707909Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:16:11.707922Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:16:11.707998Z node 86 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:16:11.708019Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:16:11.708024Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [86:426:2399] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:16:11.708095Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:11.708137Z node 86 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 52us result status StatusSuccess 2024-11-21T09:16:11.708226Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeSolomonVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SolomonVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SolomonDescription { Name: "z" PathId: 6 PartitionCount: 2 Partitions { PartitionId: 0 TabletId: 72075186233409546 ShardIdx: 1 } Partitions { PartitionId: 1 TabletId: 72075186233409547 ShardIdx: 2 } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:11.708290Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:11.708309Z node 86 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 22us result status StatusPathDoesNotExist 2024-11-21T09:16:11.708329Z node 86 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpNewEngine::Aggregate >> KqpRanges::IsNotNullSecondComponent >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> KqpNotNullColumns::UpsertNotNullPk [GOOD] >> KqpNotNullColumns::UpsertNotNull >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> KqpNewEngine::DeleteByKey [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 24628, MsgBus: 9237 2024-11-21T09:16:06.431769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658883488289884:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:06.431829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d9d/r3tmp/tmpJ7ZSF3/pdisk_1.dat 2024-11-21T09:16:06.493568Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24628, node 1 2024-11-21T09:16:06.511147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:06.511161Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:06.511163Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:06.511195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9237 2024-11-21T09:16:06.535428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:06.535447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:06.536962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:06.563473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:06.793706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658883488290282:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.793746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.828667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:06.896001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658883488290382:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.896031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.896136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658883488290387:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:06.897139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:06.899933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658883488290389:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 62891, MsgBus: 27717 2024-11-21T09:16:07.343901Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658886563726990:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:07.346098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d9d/r3tmp/tmpw6VsbH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62891, node 2 2024-11-21T09:16:07.368747Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:07.376000Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:07.376010Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:07.376011Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:07.376043Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27717 TClient is connected to server localhost:27717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:07.446666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:07.446716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:07.447069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.447928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:07.461060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.470843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.491072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.503672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.668471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658886563728385:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.668488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.674670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.729867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.741482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.748273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.762912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.776858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.795019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658886563728892:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.795049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.795142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658886563728897:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.795770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, sub ... 9:16:10.590632Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:10.593594Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.596685Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:10.607209Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.617520Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.638213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.649369Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.872516Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658901685465759:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.872544Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.880483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.887791Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.899259Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.915318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.928610Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.942168Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.963675Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658901685466252:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.963701Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658901685466257:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.963701Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.964364Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:10.967427Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658901685466259:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 12713, MsgBus: 10877 2024-11-21T09:16:11.399977Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658903666666205:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d9d/r3tmp/tmpIIL3wm/pdisk_1.dat 2024-11-21T09:16:11.404165Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:11.413835Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12713, node 6 2024-11-21T09:16:11.423866Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:11.423883Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:11.423885Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:11.423921Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10877 TClient is connected to server localhost:10877 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:11.497343Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:11.497377Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:11.499950Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:11.501664Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.504991Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:11.518823Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.544109Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.557312Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.754619Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658903666667606:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.754671Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.761573Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.769982Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.781056Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.838502Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.850988Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.865813Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.883142Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658903666668110:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.883185Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.883403Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658903666668115:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.884356Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:11.892373Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658903666668117:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpNewEngine::DeleteWithInputMultiConsumption >> KqpNewEngine::ContainerRegistryCombiner [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate >> KqpRanges::UpdateWhereInBigLiteralList [GOOD] >> KqpRanges::ScanKeyPrefix >> KqpRanges::UpdateWhereInNoFullScan [GOOD] >> KqpRanges::UpdateWhereInWithNull >> KqpNewEngine::Join [GOOD] >> KqpNewEngine::ItemsLimit >> KqpMergeCn::TopSortByDesc_Double_Limit3 >> KqpNewEngine::BlindWriteParameters [GOOD] >> KqpNewEngine::BlindWriteListParameter >> KqpSort::TopSortExprPk [GOOD] >> KqpSort::TopSortTableExpr >> KqpNewEngine::Aggregate [GOOD] >> KqpNewEngine::AggregateTuple >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join1.test] >> KqpNotNullColumns::UpsertNotNull [GOOD] >> KqpNotNullColumns::UpsertNotNullPg >> KqpAgg::AggWithLookup >> KqpNewEngine::PkSelect1 |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] [GOOD] >> KqpSqlIn::TupleLiteral [GOOD] >> KqpSqlIn::TupleSelect >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::NoFullScanAtScanQuery >> KqpNewEngine::JoinIdxLookup >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] >> KqpNewEngine::DeferredEffects >> KqpRanges::IsNotNullSecondComponent [GOOD] >> KqpRanges::IsNotNullInValue >> KqpRanges::ScanKeyPrefix [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> KqpNotNullColumns::UpsertNotNullPg [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> KqpNewEngine::Update >> TConsistentOpsWithReboots::DropNotNullColumnTableWithReboots [GOOD] >> KqpRanges::UpdateWhereInWithNull [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> KqpNewEngine::BlindWriteListParameter [GOOD] >> KqpNewEngine::BatchUpload >> KqpNewEngine::AggregateTuple [GOOD] >> KqpNewEngine::AsyncIndexUpdate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ScanKeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 22367, MsgBus: 9930 2024-11-21T09:16:07.344296Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658889950770799:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:07.344334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d7a/r3tmp/tmpoLBJMt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22367, node 1 2024-11-21T09:16:07.416366Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:07.432429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:07.432446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:07.432447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:07.432496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:07.443037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:07.443065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:07.444258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9930 TClient is connected to server localhost:9930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:07.493088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.495736Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:07.505268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.521129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.547423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.558290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:07.687752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658889950772193:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.687782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.728594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.737213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.751251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.763419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.778401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.793923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:07.809498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658889950772703:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.809527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.809568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658889950772708:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:07.810265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:07.821755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658889950772710:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:08.018302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.056353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.104516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.143398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.225062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22955, MsgBus: 18795 2024-11-21T09:16:08.646651Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658891922761967:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d7a/r3tmp/tmpACGdy8/pdisk_1.dat 2024-11-21T09:16:08.649608Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:08.655606Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22955, node 2 2024-11-21T09:16:08.666269Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:08.666289Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:08.666291Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:08.666335Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18795 TClient is connected to server localhost:18795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:08.745580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:08.745628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:08.748683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:08.751393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.760953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.777000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.837430Z no ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.246688Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658910731552792:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.246728Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.253504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.261711Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.274034Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.284486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.299090Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.313256Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.333489Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658910731553292:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.333523Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.333592Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658910731553297:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.334421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:12.339903Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658910731553299:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:12.531753Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.582356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.619479Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.651830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.711170Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25981, MsgBus: 27018 2024-11-21T09:16:13.024170Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658913655858583:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:13.025800Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d7a/r3tmp/tmpBLTH6V/pdisk_1.dat 2024-11-21T09:16:13.039296Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25981, node 6 2024-11-21T09:16:13.055436Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.055451Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.055453Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.055499Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27018 2024-11-21T09:16:13.124358Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.124395Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.127295Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:13.134547Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.141297Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.154046Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.181868Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.195443Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.392796Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658913655859987:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.392835Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.404992Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.412296Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.426768Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.443277Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.454139Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.468233Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.492021Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658913655860477:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.492094Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.492464Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658913655860484:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.493339Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:13.495725Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T09:16:13.495830Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658913655860486:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteWithInputMultiConsumption [GOOD] Test command err: Trying to start YDB, gRPC: 13288, MsgBus: 24007 2024-11-21T09:16:07.894122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658887192746375:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:07.894230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d50/r3tmp/tmpz4e11L/pdisk_1.dat 2024-11-21T09:16:07.971421Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13288, node 1 2024-11-21T09:16:07.992421Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:07.992443Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:07.992445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:07.992489Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:07.996565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:07.996609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:07.997592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24007 TClient is connected to server localhost:24007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:08.064607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.067786Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:08.079428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:08.144503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.169267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.185357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:08.271821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658891487715204:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.271867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.318773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.375120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.387236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.402677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.413667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.428224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:08.444170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658891487715721:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.444197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.444312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658891487715726:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:08.445077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:08.448544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T09:16:08.448614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658891487715728:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 2194, MsgBus: 12132 2024-11-21T09:16:08.997302Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658894083934497:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:09.000804Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d50/r3tmp/tmpdFN4eB/pdisk_1.dat 2024-11-21T09:16:09.025110Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2194, node 2 2024-11-21T09:16:09.036595Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:09.036614Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:09.036617Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:09.036666Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12132 TClient is connected to server localhost:12132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:09.100497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:09.100535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:09.100824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.101859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:09.102012Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:09.110028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.122725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.145320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.157344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:09.342070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658898378903213:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:09.342094Z node 2 :KQP_WORKLOAD_SERVIC ... eState: Disconnected -> Connecting 2024-11-21T09:16:11.873174Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.873654Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:11.875189Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:11.887420Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.904970Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:11.925182Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.940577Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.124584Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658908758734192:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.124613Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.129306Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.141216Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.153989Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.166350Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.182767Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.193896Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.211736Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658908758734694:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.211768Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.211962Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658908758734699:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.213168Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:12.221446Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658908758734701:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 17374, MsgBus: 3847 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d50/r3tmp/tmpddipiF/pdisk_1.dat 2024-11-21T09:16:12.952424Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:12.953466Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17374, node 6 2024-11-21T09:16:12.961883Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.961898Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.961900Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.961935Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3847 TClient is connected to server localhost:3847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:13.037317Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.037360Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.038443Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:13.040104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.041310Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:13.051947Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.061143Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.084980Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.100182Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.294511Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658914175801688:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.294601Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.297869Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.306054Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.365606Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.376610Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.390764Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.404890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.421014Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658914175802186:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.421056Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658914175802191:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.421063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.421796Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:13.424258Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658914175802193:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpMergeCn::TopSortByDesc_Double_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Float_Limit4 >> KqpSort::TopSortTableExpr [GOOD] >> KqpSort::TopSortTableExprOffset >> KqpNewEngine::JoinIdxLookup [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop [GOOD] >> KqpNewEngine::PureExpr >> KqpAgg::AggWithLookup [GOOD] >> KqpAgg::AggWithSelfLookup >> KqpNewEngine::PkSelect1 [GOOD] >> KqpNewEngine::PkSelect2 >> KqpNewEngine::ItemsLimit [GOOD] >> KqpNewEngine::JoinDictWithPure >> KqpNewEngine::DeferredEffects [GOOD] >> KqpNewEngine::DecimalColumn >> KqpNotNullColumns::UpdateNotNullPk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropNotNullColumnTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:53.780560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:53.780585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:53.780590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:53.780595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:53.780601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:53.780605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:53.780614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:53.780697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:53.797470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:53.797491Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:53.799599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:53.799727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:53.799768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:53.802495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:53.802592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:53.802683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:53.802901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:53.803618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:53.803885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:53.803897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:53.803910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:53.803917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:53.803923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:53.803962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:53.805237Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:53.822858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:53.822921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:53.822970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:53.823042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:53.823049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:53.823662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:53.823683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:53.823728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:53.823735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:53.823739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:53.823742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:53.824307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:53.824324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:53.824330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:53.824792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:53.824808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:53.824814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:53.824821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:53.825502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:53.825961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:53.826008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:53.826185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:53.826214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:53.826221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:53.826287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:53.826294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:53.826315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:53.826327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:53.826898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:53.826914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:53.826952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:53.826958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:53.827036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:53.827044Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:53.827056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:53.827061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:53.827067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:53.827074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:53.827079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:53.827082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:53.827097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:53.827103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:53.827107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... CHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:14.196937Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:14.196963Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:16:14.197006Z node 78 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:14.197012Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [78:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T09:16:14.197017Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [78:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T09:16:14.197073Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:16:14.197082Z node 78 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:16:14.197099Z node 78 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:16:14.197104Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:16:14.197111Z node 78 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T09:16:14.197274Z node 78 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.197288Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.197293Z node 78 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:16:14.197298Z node 78 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2024-11-21T09:16:14.197303Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:14.197464Z node 78 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.197477Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.197481Z node 78 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:16:14.197485Z node 78 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:16:14.197489Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:16:14.197501Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T09:16:14.198325Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:16:14.198342Z node 78 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:14.198420Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:16:14.198450Z node 78 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:16:14.198455Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:16:14.198461Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T09:16:14.198466Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:16:14.198472Z node 78 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:16:14.198476Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:16:14.198497Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:16:14.198822Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.198905Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.200203Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 335007451427 } TabletId: 72075186233409546 State: 4 2024-11-21T09:16:14.200246Z node 78 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:16:14.200639Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:14.200728Z node 78 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:16:14.201295Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:14.201353Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2024-11-21T09:16:14.201530Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:14.201535Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:16:14.201548Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:14.202165Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:14.202182Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:16:14.202374Z node 78 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:16:14.202435Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:16:14.202442Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:16:14.202503Z node 78 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:16:14.202522Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:16:14.202528Z node 78 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [78:500:2475] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409556 is deleted 2024-11-21T09:16:14.202584Z node 78 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:16:14.202598Z node 78 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409556 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409556 2024-11-21T09:16:14.202681Z node 78 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:14.202734Z node 78 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 63us result status StatusSuccess 2024-11-21T09:16:14.202820Z node 78 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:14.202886Z node 78 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/TestNotNullTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:14.202908Z node 78 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/TestNotNullTable" took 25us result status StatusPathDoesNotExist 2024-11-21T09:16:14.202930Z node 78 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/TestNotNullTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirB/TestNotNullTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpSqlIn::TupleSelect [GOOD] >> KqpSqlIn::TupleNotOnlyOfKeys >> KqpRanges::IsNotNullInValue [GOOD] >> KqpRanges::IsNotNullInJsonValue >> KqpNewEngine::DuplicatedResults >> KqpNewEngine::AsyncIndexUpdate [GOOD] >> KqpNewEngine::AutoChooseIndex >> KqpNewEngine::Update [GOOD] >> KqpNewEngine::UpdateFromParams >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg >> KqpRanges::UpdateWhereInBigLiteralListPrefix [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate >> KqpNewEngine::BatchUpload [GOOD] >> KqpNewEngine::BrokenLocksAtROTx >> KqpMergeCn::TopSortBy_Float_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Date_Limit4 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:54.901640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:54.901664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:54.901671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:54.901676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:54.901682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:54.901685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:54.901703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:54.901783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:54.914734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:54.914756Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:54.917259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:54.917372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:54.917425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:54.922553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:54.922678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:54.922780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:54.923013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:54.923822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:54.924140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:54.924154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:54.924168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:54.924175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:54.924181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:54.924242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:54.925575Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:54.944806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:54.944897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.944957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:54.945033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:54.945042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.945675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:54.945705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:54.945757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.945767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:54.945772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:54.945777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:54.946254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.946268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:54.946273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:54.946735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.946747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.946754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:54.946762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:54.947402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:54.947841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:54.947890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:54.948086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:54.948113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:54.948133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:54.948202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:54.948231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:54.948270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:54.948283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:54.948815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:54.948843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:54.948882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:54.948888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:54.948970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.948977Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:54.948989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:54.948994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:54.949000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:54.949006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:54.949011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:54.949015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:54.949028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:54.949034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:54.949038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4] was 4 Forgetting tablet 72075186233409548 2024-11-21T09:16:14.611250Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:14.611287Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:16:14.611327Z node 74 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2024-11-21T09:16:14.611360Z node 74 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:16:14.611730Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:14.611759Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 Forgetting tablet 72075186233409546 2024-11-21T09:16:14.612090Z node 74 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.612099Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.612102Z node 74 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:16:14.612105Z node 74 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2024-11-21T09:16:14.612108Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 2024-11-21T09:16:14.612118Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 Forgetting tablet 72075186233409547 2024-11-21T09:16:14.612168Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:14.612188Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T09:16:14.612377Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:14.612385Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2024-11-21T09:16:14.612394Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T09:16:14.612398Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T09:16:14.612402Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:16:14.612405Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T09:16:14.612408Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:16:14.612410Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:16:14.612413Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:16:14.612416Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:16:14.612419Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:14.612421Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:16:14.612425Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:14.612445Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.612618Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.612750Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.612764Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.612771Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.613234Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.613254Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:14.613262Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2024-11-21T09:16:14.613288Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:16:14.613295Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2024-11-21T09:16:14.613302Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:14.613348Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:14.613354Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:16:14.613549Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 6 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:16:14.613604Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:16:14.613611Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:16:14.613660Z node 74 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:16:14.613672Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:16:14.613676Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [74:601:2556] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2024-11-21T09:16:14.613727Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:16:14.613735Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T09:16:14.613740Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T09:16:14.613745Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T09:16:14.613749Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T09:16:14.613754Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T09:16:14.613759Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2024-11-21T09:16:14.613764Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2024-11-21T09:16:14.613770Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2024-11-21T09:16:14.613776Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2024-11-21T09:16:14.613844Z node 74 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:14.613869Z node 74 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 32us result status StatusSuccess 2024-11-21T09:16:14.613936Z node 74 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSort::TopSortTableExprOffset [GOOD] >> KqpSort::TopSortResults >> KqpNewEngine::PureExpr [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral >> KqpAgg::AggWithSelfLookup [GOOD] >> KqpAgg::AggWithSelfLookup2 >> KqpNewEngine::PkSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect2 >> KqpNewEngine::DecimalColumn [GOOD] >> KqpNewEngine::DecimalColumn35 >> KqpNotNullColumns::UpdateNotNullPk [GOOD] >> KqpNotNullColumns::UpdateNotNullPkPg >> KqpNewEngine::JoinIdxLookupWithPredicate [GOOD] >> KqpNewEngine::JoinPure >> KqpKv::ReadRows_SpecificKey >> KqpNewEngine::JoinDictWithPure [GOOD] >> KqpNewEngine::IdxLookupExtractMembers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:52.843136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:52.843163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:52.843169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:52.843174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:52.843181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:52.843185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:52.843195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:52.843289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:52.855197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:52.855224Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:52.857734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:52.857855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:52.857904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:52.861256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:52.861379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:52.861479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.861776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:52.863224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.863561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:52.863577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.863590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:52.863598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:52.863604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:52.863667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:52.865472Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:52.884724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:52.884800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.884855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:52.884922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:52.884930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.888700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.888742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:52.888806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.888821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:52.888826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:52.888831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:52.889524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.889543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:52.889549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:52.890000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.890012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.890019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.890028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.890752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:52.891295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:52.891353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:52.891572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.891605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:52.891614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.891691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:52.891700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.891727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:52.891741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:52.892331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:52.892346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:52.892392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.892399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:52.892493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.892501Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:52.892516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:52.892520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.892526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:52.892532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.892537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:52.892541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:52.892553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:52.892559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:52.892563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... sg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:15.234399Z node 87 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:15.234403Z node 87 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T09:16:15.234407Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:16:15.234418Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:16:15.234936Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 272 } } 2024-11-21T09:16:15.234951Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:16:15.234972Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 272 } } 2024-11-21T09:16:15.234985Z node 87 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 272 } } 2024-11-21T09:16:15.235443Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 373662157087 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:16:15.235455Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:16:15.235473Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 373662157087 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:16:15.235479Z node 87 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:16:15.235488Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 347 RawX2: 373662157087 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:16:15.235498Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:15.235502Z node 87 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:16:15.235507Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:16:15.235513Z node 87 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T09:16:15.235744Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:15.235758Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:15.236134Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:16:15.236160Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:16:15.236249Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:16:15.236261Z node 87 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:16:15.236272Z node 87 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:16:15.236276Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:16:15.236282Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:16:15.236287Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:16:15.236292Z node 87 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:16:15.236296Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:16:15.236314Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1002 2024-11-21T09:16:15.237051Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T09:16:15.237061Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 TestWaitNotification wait txId: 1003 2024-11-21T09:16:15.237075Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:16:15.237079Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:16:15.237144Z node 87 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:16:15.237162Z node 87 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:16:15.237171Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:16:15.237175Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [87:424:2399] 2024-11-21T09:16:15.237191Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:16:15.237194Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [87:424:2399] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:16:15.237257Z node 87 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:15.237289Z node 87 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 42us result status StatusSuccess 2024-11-21T09:16:15.237369Z node 87 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "TestNotNullTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:15.237423Z node 87 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/TestNotNullTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:15.237449Z node 87 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/TestNotNullTable" took 28us result status StatusSuccess 2024-11-21T09:16:15.237524Z node 87 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/TestNotNullTable" PathDescription { Self { Name: "TestNotNullTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TestNotNullTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: true IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 >> KqpRanges::IsNotNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInJsonValue2 >> KqpNewEngine::UpdateFromParams [GOOD] >> KqpNewEngine::UnionAllPure >> KqpNewEngine::MultiOutput >> KqpNewEngine::DuplicatedResults [GOOD] >> KqpNewEngine::FlatmapLambdaMutiusedConnections >> KqpNewEngine::BrokenLocksAtROTx [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join1.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] >> KqpKv::ReadRows_SpecificKey [GOOD] >> KqpKv::ReadRows_UnknownTable >> KqpNewEngine::AutoChooseIndex [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLimit >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral [GOOD] >> KqpNewEngine::PrunePartitionsByExpr >> KqpNewEngine::PkRangeSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect3 >> KqpMergeCn::TopSortBy_Date_Limit4 [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 >> KqpNotNullColumns::UpdateNotNullPkPg [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNull >> KqpNewEngine::JoinPure [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys >> KqpRanges::UpdateWhereInMultipleUpdate [GOOD] >> KqpRanges::UpdateWhereInFullScan >> KqpAgg::AggWithSelfLookup2 [GOOD] >> KqpAgg::AggWithHop >> KqpNewEngine::DecimalColumn35 [GOOD] >> KqpNewEngine::ComplexLookupLimit >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] Test command err: Trying to start YDB, gRPC: 1807, MsgBus: 20585 2024-11-21T09:16:10.044307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658898989260038:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d3e/r3tmp/tmpYqlQoh/pdisk_1.dat 2024-11-21T09:16:10.081704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:10.099843Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1807, node 1 2024-11-21T09:16:10.120064Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:10.120076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:10.120078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:10.120120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:10.138139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:10.138162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:10.139233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20585 TClient is connected to server localhost:20585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:10.187096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.195899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.258244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.277160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.286326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.371472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658898989261420:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.371504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.409514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.416605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.429231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.435948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.443361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.450113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.458907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658898989261924:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.458930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.458935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658898989261929:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.459583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:10.463991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658898989261931:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:10.653835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.664568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.675637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 15161, MsgBus: 19419 2024-11-21T09:16:11.115220Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658905743005685:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:11.115455Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d3e/r3tmp/tmpCqoAWN/pdisk_1.dat 2024-11-21T09:16:11.132001Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15161, node 2 2024-11-21T09:16:11.140194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:11.140221Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:11.140223Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:11.140256Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19419 TClient is connected to server localhost:19419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:11.215357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:11.215383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:11.216535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:11.220708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.224400Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemesh ... EMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.153961Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.169299Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658918380134959:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.169321Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.169341Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658918380134964:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.169945Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:14.173714Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439658918380134966:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:14.408362Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.415880Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.428708Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:22: Warning: At function: Filter, At function: Coalesce
:7:31: Warning: At function: SqlIn
:7:31: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 24751, MsgBus: 5030 2024-11-21T09:16:15.114522Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658923163300809:2073];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:15.115527Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d3e/r3tmp/tmppo4dW5/pdisk_1.dat 2024-11-21T09:16:15.137015Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24751, node 5 2024-11-21T09:16:15.146012Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:15.146022Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:15.146023Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:15.146054Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5030 TClient is connected to server localhost:5030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:15.214861Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.214903Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:15.215777Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:15.220131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.228757Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:15.232084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.243486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.271909Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.295527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.492286Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658923163302346:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.492325Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.496271Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.505386Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.520158Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.533994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.546936Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.561041Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.576924Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658923163302850:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.576957Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.576978Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658923163302855:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.577759Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:15.580550Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658923163302857:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:15.786770Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.801220Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.813537Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNewEngine::IdxLookupExtractMembers [GOOD] >> KqpNewEngine::FullScanCount >> KqpNotNullColumns::UpdateTable_UniqIndexPg [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2024-11-21T09:15:59.716798Z :FallbackToSingleDb INFO: Random seed for debugging is 1732180559716777 2024-11-21T09:15:59.818272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658853262373142:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:15:59.818469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:15:59.828907Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658854619229147:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:15:59.829141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:15:59.864614Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:15:59.864614Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00329d/r3tmp/tmpzNlucS/pdisk_1.dat 2024-11-21T09:15:59.916570Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:15:59.920628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:15:59.920653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:15:59.923200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16294, node 1 2024-11-21T09:15:59.968563Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00329d/r3tmp/yandexHPHdBc.tmp 2024-11-21T09:15:59.968586Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00329d/r3tmp/yandexHPHdBc.tmp 2024-11-21T09:15:59.968656Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00329d/r3tmp/yandexHPHdBc.tmp 2024-11-21T09:15:59.968699Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:15:59.969217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:15:59.969254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:15:59.976749Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:15:59.977331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:00.016462Z INFO: TTestServer started on Port 25611 GrpcPort 16294 TClient is connected to server localhost:25611 PQClient connected to localhost:16294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:00.049076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:00.076898Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:00.092386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:00.175533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658857557341335:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:00.175562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:00.175789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658857557341348:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:00.178265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T09:16:00.179105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658857557341382:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:00.179125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:00.185460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658857557341350:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T09:16:00.330759Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439658858914196747:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:00.330906Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTI3MWJjNjQtODJkYjc3N2YtNWI3ZjE5MTktNzg4ODUwMmU=, ActorId: [2:7439658858914196706:2277], ActorState: ExecuteState, TraceId: 01jd703j9qdws1ra1qpfc6938z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:00.331169Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658857557341442:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:00.332882Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODM4OGU3NjAtMWRmMWE5Y2UtZmUyNDQxNDYtZGNiYjk4MGM=, ActorId: [1:7439658857557341333:2299], ActorState: ExecuteState, TraceId: 01jd703j9f7m3gywfwa01rerdr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:00.333020Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:16:00.333094Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:16:00.358768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:00.430898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:00.505970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16294", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T09:16:00.555741Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd703jmd7b87d7x9g42h7mb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI2ZmMyOGItNzA4ZWQyM2EtOTAyNTZlOWUtNmUwYjI0ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439658857557341820:2923] 2024-11-21T09:16:04.818760Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439658853262373142:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.818819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:16:04.832357Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439658854619229147:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.832440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:16:06.605746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhos ... opic AND SourceId == $SourceId; 2024-11-21T09:16:14.964012Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:16:14.964013Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T09:16:14.964018Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439658918577378269:2468] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T09:16:14.964451Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439658918577378269:2468] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T09:16:14.982300Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439658918577378269:2468] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T09:16:14.982368Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439658918577378301:2468] connected; active server actors: 1 2024-11-21T09:16:14.982382Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439658918577378269:2468] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T09:16:14.982389Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439658918577378269:2468] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T09:16:14.982475Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439658918577378301:2468] disconnected; active server actors: 1 2024-11-21T09:16:14.982485Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439658918577378301:2468] disconnected no session 2024-11-21T09:16:15.000064Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439658918577378269:2468] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T09:16:15.000088Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439658918577378269:2468] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T09:16:15.000091Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439658918577378269:2468] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T09:16:15.000103Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T09:16:15.000598Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:16:15.000619Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439658918577378318:2468], now have 1 active actors on pipe 2024-11-21T09:16:15.000680Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T09:16:15.000780Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:16:15.000793Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:16:15.000842Z node 4 :PERSQUEUE INFO: new Cookie src|2300774f-7566ab1c-76a8f073-611feae_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T09:16:15.000898Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T09:16:15.000920Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:16:15.001178Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:16:15.001182Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:16:15.001212Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:16:15.001321Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|2300774f-7566ab1c-76a8f073-611feae_0 2024-11-21T09:16:15.004526Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732180575004 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:15.004588Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|2300774f-7566ab1c-76a8f073-611feae_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T09:16:15.004820Z :INFO: [] MessageGroupId [src] SessionId [src|2300774f-7566ab1c-76a8f073-611feae_0] Write session: close. Timeout = 0 ms 2024-11-21T09:16:15.004827Z :INFO: [] MessageGroupId [src] SessionId [src|2300774f-7566ab1c-76a8f073-611feae_0] Write session will now close 2024-11-21T09:16:15.004833Z :DEBUG: [] MessageGroupId [src] SessionId [src|2300774f-7566ab1c-76a8f073-611feae_0] Write session: aborting 2024-11-21T09:16:15.004965Z :INFO: [] MessageGroupId [src] SessionId [src|2300774f-7566ab1c-76a8f073-611feae_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:16:15.004970Z :DEBUG: [] MessageGroupId [src] SessionId [src|2300774f-7566ab1c-76a8f073-611feae_0] Write session: destroy 2024-11-21T09:16:15.005196Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|2300774f-7566ab1c-76a8f073-611feae_0 grpc read done: success: 0 data: 2024-11-21T09:16:15.005208Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2300774f-7566ab1c-76a8f073-611feae_0 grpc read failed 2024-11-21T09:16:15.005216Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2300774f-7566ab1c-76a8f073-611feae_0 grpc closed 2024-11-21T09:16:15.005222Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2300774f-7566ab1c-76a8f073-611feae_0 is DEAD 2024-11-21T09:16:15.005409Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:16:15.005655Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:16:15.005671Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439658918577378318:2468] destroyed 2024-11-21T09:16:15.005691Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. PORTS 24784 3328 Session was created >>> Ready to answer: ok 2024-11-21T09:16:16.052744Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2024-11-21T09:16:16.052784Z :INFO: [/Root] [] [67d084b0-c45e4212-b0e51c45-d1f73e73] Open read subsessions to databases: { name: , endpoint: localhost:3328, path: /Root } 2024-11-21T09:16:16.052864Z :INFO: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Starting read session 2024-11-21T09:16:16.052869Z :DEBUG: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Starting single session 2024-11-21T09:16:16.053126Z :DEBUG: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T09:16:16.053132Z :DEBUG: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T09:16:16.053138Z :DEBUG: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] [] Reconnecting session to cluster in 0.000000s 2024-11-21T09:16:16.053187Z :ERROR: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:3328
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3328. 2024-11-21T09:16:16.053195Z :DEBUG: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T09:16:16.053198Z :DEBUG: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T09:16:16.053214Z :INFO: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:3328" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:3328
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3328. " } 2024-11-21T09:16:16.053308Z :NOTICE: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:16:16.053315Z :DEBUG: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:3328" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:3328
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3328. " } 2024-11-21T09:16:16.053337Z :INFO: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Closing read session. Close timeout: 0.010000s 2024-11-21T09:16:16.053346Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T09:16:16.053354Z :INFO: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:16.053360Z :INFO: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:16.053362Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T09:16:16.053364Z :INFO: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:16.053368Z :INFO: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:16.053370Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T09:16:16.053374Z :INFO: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:16.053378Z :NOTICE: [/Root] [/Root] [2c69ab39-b4d2a3c-d5069e25-c8551da4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:55.105915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:55.105942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:55.105947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:55.105953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:55.105959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:55.105963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:55.105973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:55.106050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:55.118142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:55.118166Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:55.120855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:55.120942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:55.120985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:55.125507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:55.125631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:55.125753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:55.126115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:55.127252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:55.127592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:55.127603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:55.127616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:55.127623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:55.127629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:55.127677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:55.129177Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:55.147481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:55.147575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:55.147642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:55.147715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:55.147725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:55.148613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:55.148645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:55.148713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:55.148725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:55.148729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:55.148735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:55.149329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:55.149355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:55.149364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:55.149955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:55.149974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:55.149982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:55.149992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:55.150821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:55.151385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:55.151468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:55.151706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:55.151741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:55.151749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:55.151820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:55.151827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:55.151857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:55.151869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:55.152548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:55.152565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:55.152614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:55.152619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:55.152714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:55.152722Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:55.152734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:55.152738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:55.152744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:55.152749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:55.152754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:55.152758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:55.152772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:55.152778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:55.152782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... r pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:16.411301Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:16:16.411303Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:16:16.411307Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:16:16.411310Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:16:16.411313Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:16:16.411316Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:16:16.411319Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T09:16:16.411322Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T09:16:16.411339Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 5 2024-11-21T09:16:16.411343Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T09:16:16.411347Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:16:16.411349Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T09:16:16.411352Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T09:16:16.411355Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T09:16:16.411358Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2024-11-21T09:16:16.411837Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.411856Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.411861Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:16.411865Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:16.411873Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:16.412020Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.412032Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.412036Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:16.412039Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:16:16.412043Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:16.412426Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.412445Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.412451Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:16.412456Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T09:16:16.412463Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:16:16.412546Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.412555Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.412559Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:16.412562Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T09:16:16.412565Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:16:16.412632Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.412640Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.412644Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:16.412647Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T09:16:16.412653Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T09:16:16.412662Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T09:16:16.413309Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.413338Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.413349Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.413411Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:16.413708Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:16:16.413777Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:16:16.413784Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:16:16.413842Z node 83 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:16:16.413860Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:16:16.413865Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [83:448:2417] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:16:16.413933Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:16.413971Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 49us result status StatusSuccess 2024-11-21T09:16:16.414061Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 6 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 6 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:16.414112Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:16.414135Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 26us result status StatusPathDoesNotExist 2024-11-21T09:16:16.414155Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpNewEngine::UnionAllPure [GOOD] >> KqpNewEngine::UpsertEmptyInput >> KqpKv::ReadRows_UnknownTable [GOOD] >> KqpKv::ReadRows_SpecificReturnValue >> KqpNewEngine::StreamLookupWithView >> KqpNewEngine::FlatmapLambdaMutiusedConnections [GOOD] >> KqpNewEngine::EmptyMapWithBroadcast >> KqpNewEngine::MultiOutput [GOOD] >> KqpNewEngine::LocksSingleShard >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable [GOOD] >> KqpRanges::IsNotNullInJsonValue2 [GOOD] >> KqpRanges::DuplicateKeyPredicateParam >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] >> KqpSort::TopSortResults [GOOD] >> KqpNewEngine::PkRangeSelect3 [GOOD] >> KqpNewEngine::PkRangeSelect4 >> KqpNewEngine::PrunePartitionsByExpr [GOOD] >> KqpNewEngine::PruneWritePartitions >> KqpNotNullColumns::UpdateTable_DontChangeNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda >> KqpAgg::AggWithHop [GOOD] >> KqpAgg::GroupByLimit >> KqpNewEngine::JoinPureUncomparableKeys [GOOD] >> KqpNewEngine::JoinProjectMulti >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Decimal_Limit5 >> KqpKv::ReadRows_SpecificReturnValue [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 >> KqpNewEngine::ComplexLookupLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] Test command err: Trying to start YDB, gRPC: 61791, MsgBus: 18604 2024-11-21T09:16:11.422500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658906226863068:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:11.422575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ccc/r3tmp/tmpyQ4lgh/pdisk_1.dat 2024-11-21T09:16:11.497012Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61791, node 1 2024-11-21T09:16:11.520087Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:11.520102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:11.520105Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:11.520144Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18604 TClient is connected to server localhost:18604 2024-11-21T09:16:11.565192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:11.565222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:11.566375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:11.585201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.588757Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:11.597549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.667934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.694391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.707966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.807863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906226864469:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.807892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.848257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.856812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.865664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.879588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.893053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.908808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.925505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906226864984:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.925543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.925632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906226864989:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.926443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:11.934094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658906226864991:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 25823, MsgBus: 19491 2024-11-21T09:16:12.515497Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658909508664496:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:12.519776Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ccc/r3tmp/tmpLFZcix/pdisk_1.dat 2024-11-21T09:16:12.530132Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25823, node 2 2024-11-21T09:16:12.541694Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.541708Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.541710Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.541748Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19491 TClient is connected to server localhost:19491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:12.617647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:12.617678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:12.617951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.619145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:12.624781Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:12.635057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.644900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.666284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.678555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.874535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658909508665910:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.874573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not fou ... SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.607467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.615782Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.637173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:15.647025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.828386Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658923872810361:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.828412Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.833679Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.843339Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.855182Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.869490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.886769Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.896812Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.912762Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658923872810862:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.912790Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.912879Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658923872810867:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.913561Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:15.917465Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T09:16:15.917558Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658923872810869:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 18500, MsgBus: 16918 2024-11-21T09:16:16.433762Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658926080091543:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:16.433780Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ccc/r3tmp/tmpNNfUyo/pdisk_1.dat 2024-11-21T09:16:16.448889Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18500, node 6 2024-11-21T09:16:16.458120Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:16.458137Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:16.458139Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:16.458180Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16918 TClient is connected to server localhost:16918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:16.534085Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:16.534122Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:16.535203Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:16.540763Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.548653Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.559201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.579843Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.639030Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.800515Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658926080093113:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.800556Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.803599Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.815188Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.832129Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.852603Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.870048Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.882369Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.904448Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658926080093611:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.904485Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.904660Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658926080093616:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.905629Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:16.909184Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T09:16:16.909361Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658926080093618:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopSortResults [GOOD] Test command err: Trying to start YDB, gRPC: 31516, MsgBus: 19111 2024-11-21T09:16:11.421826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658906767548130:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:11.421902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cae/r3tmp/tmp2jdal2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31516, node 1 2024-11-21T09:16:11.503640Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:11.516419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:11.516431Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:11.516432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:11.516465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:11.523688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:11.523718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:11.524837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19111 TClient is connected to server localhost:19111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:11.576404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.578640Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:11.585291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.600501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.618215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.629549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.792908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906767549562:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.792936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.838892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.849622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.857357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.912387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.920225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.936927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.952468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906767550077:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.952498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.952598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906767550083:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.953460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:11.955020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658906767550085:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 32581, MsgBus: 13642 2024-11-21T09:16:12.513298Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658907852024279:2126];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cae/r3tmp/tmpHGbpIA/pdisk_1.dat 2024-11-21T09:16:12.515986Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:12.527471Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32581, node 2 2024-11-21T09:16:12.540163Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.540179Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.540182Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.540275Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13642 TClient is connected to server localhost:13642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:12.613376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:12.613411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:12.614365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:12.616159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.617852Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:12.627486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.638917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:12.662885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.724903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.830488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658907852025743:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.830514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not fou ... f is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.655660Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.671061Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.694265Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.707002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.885578Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658917728105904:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.885619Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.891270Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.900511Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.909975Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.924438Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.938840Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.951754Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.028862Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658922023073727:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.028890Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.028930Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658922023073732:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.029748Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:15.032113Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439658922023073734:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 12181, MsgBus: 5819 2024-11-21T09:16:15.684411Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658921815532959:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:15.684491Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cae/r3tmp/tmpqU2C0L/pdisk_1.dat 2024-11-21T09:16:15.699947Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12181, node 5 2024-11-21T09:16:15.710378Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:15.710392Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:15.710395Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:15.710445Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5819 TClient is connected to server localhost:5819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:15.760846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.762593Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:15.770024Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.784521Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.784556Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:15.785715Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:15.837335Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.877548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.890874Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.036990Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658926110501782:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.037044Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.039676Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.047169Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.058392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.074335Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.086662Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.142407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.161796Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658926110502295:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.161823Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.161947Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658926110502300:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.162627Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:16.168863Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658926110502302:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:16.373780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpRanges::UpdateWhereInFullScan [GOOD] >> KqpRanges::ValidatePredicates >> KqpMergeCn::TopSortBy_Int32_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup >> KqpExtractPredicateLookup::OverflowLookup >> KqpNewEngine::UpsertEmptyInput [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn >> KqpNewEngine::FullScanCount [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:54.437026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:54.437071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:54.437078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:54.437083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:54.437090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:54.437094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:54.437104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:54.437200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:54.449520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:54.449542Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:54.451854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:54.451963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:54.452006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:54.454782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:54.454878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:54.454979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:54.455199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:54.456137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:54.456453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:54.456466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:54.456478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:54.456486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:54.456492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:54.456531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:54.457881Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:54.477182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:54.477278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.477350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:54.477422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:54.477432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.478620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:54.478651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:54.478722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.478736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:54.478741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:54.478746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:54.479239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.479254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:54.479259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:54.479673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.479684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.479691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:54.479699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:54.480419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:54.480989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:54.481088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:54.481321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:54.481358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:54.481367Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:54.481442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:54.481452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:54.481499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:54.481513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:54.482256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:54.482274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:54.482325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:54.482331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:54.482421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:54.482428Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:54.482442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:54.482446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:54.482452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:54.482457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:54.482463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:54.482466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:54.482481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:54.482487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:54.482491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:16:17.251836Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:4, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:17.251840Z node 88 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:4, at schemeshard: 72057594046678944 2024-11-21T09:16:17.251844Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:4, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:16:17.251850Z node 88 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:4 129 -> 240 2024-11-21T09:16:17.252012Z node 88 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.252025Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.252029Z node 88 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:17.252033Z node 88 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 5 2024-11-21T09:16:17.252037Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2024-11-21T09:16:17.252474Z node 88 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.252495Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.252500Z node 88 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:17.252504Z node 88 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 5 2024-11-21T09:16:17.252509Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2024-11-21T09:16:17.252590Z node 88 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 11 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.252601Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 11 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.252604Z node 88 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:16:17.252608Z node 88 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 11], version: 3 2024-11-21T09:16:17.252612Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 4 2024-11-21T09:16:17.252620Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/5, is published: true 2024-11-21T09:16:17.253643Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.253681Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:4, at schemeshard: 72057594046678944 2024-11-21T09:16:17.253698Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.253712Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.253724Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:4, at schemeshard: 72057594046678944 2024-11-21T09:16:17.253787Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.253821Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:4, at schemeshard: 72057594046678944 2024-11-21T09:16:17.253828Z node 88 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:4 ProgressState 2024-11-21T09:16:17.253841Z node 88 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:4 progress is 5/5 2024-11-21T09:16:17.253846Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2024-11-21T09:16:17.253852Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 5/5, is published: true 2024-11-21T09:16:17.253857Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2024-11-21T09:16:17.253864Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:16:17.253868Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:16:17.253881Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T09:16:17.253888Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:16:17.253891Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:16:17.253896Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2024-11-21T09:16:17.253899Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:16:17.253901Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:16:17.253905Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2024-11-21T09:16:17.253909Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T09:16:17.253911Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T09:16:17.253916Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2024-11-21T09:16:17.253919Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:4 2024-11-21T09:16:17.253922Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:4 2024-11-21T09:16:17.253935Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2024-11-21T09:16:17.254005Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:16:17.254022Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:16:17.254604Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:16:17.254614Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:16:17.254679Z node 88 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:16:17.254700Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:16:17.254705Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [88:418:2393] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:16:17.254776Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:17.254826Z node 88 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 63us result status StatusSuccess 2024-11-21T09:16:17.254920Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 9 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "table_name" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:17.254985Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:17.255008Z node 88 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 25us result status StatusPathDoesNotExist 2024-11-21T09:16:17.255030Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate >> KqpNewEngine::EmptyMapWithBroadcast [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute >> KqpNewEngine::LocksSingleShard [GOOD] >> KqpNewEngine::LocksNoMutationsSharded >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] Test command err: Trying to start YDB, gRPC: 62842, MsgBus: 25338 2024-11-21T09:16:12.139999Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658908809769127:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:12.140321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b37/r3tmp/tmpryOcjQ/pdisk_1.dat 2024-11-21T09:16:12.204865Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62842, node 1 2024-11-21T09:16:12.216391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.216405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.216407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.216449Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25338 2024-11-21T09:16:12.240395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:12.240426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:12.241344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:12.277924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.488973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658908809769722:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.489001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.526328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.591951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658908809769822:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.591978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.592049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658908809769827:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.592751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:12.594603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658908809769829:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:12.789908Z node 1 :RPC_REQUEST WARN: Client lost Trying to start YDB, gRPC: 21862, MsgBus: 65198 2024-11-21T09:16:13.010673Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658912323752983:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:13.010702Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:13.022269Z node 2 :IMPORT WARN: Table profiles were not loaded test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b37/r3tmp/tmpb2pyzn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21862, node 2 2024-11-21T09:16:13.042556Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.042569Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.042570Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.042607Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65198 TClient is connected to server localhost:65198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:13.114182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.114208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.114432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.115223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:13.117714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.129726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.149487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.163888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.348606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658912323754529:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.348630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.354541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.363748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.371838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.384073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.397694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.411613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.425294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658912323755030:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.425313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.425400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658912323755035:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.426110Z node 2 :FLAT_TX_SCHEMESHARD WAR ... sed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.928383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.949343Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.960357Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.201236Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658928371664355:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.201257Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.207498Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.262868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.323817Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.346525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.359624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.373364Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.392747Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658928371664873:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.392778Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.392784Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658928371664878:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.393587Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:16.399902Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658928371664880:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:16.584063Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22033, MsgBus: 3188 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b37/r3tmp/tmpZc9i1w/pdisk_1.dat 2024-11-21T09:16:16.948313Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658924766513229:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:16.950668Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:16.957638Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22033, node 6 2024-11-21T09:16:16.972707Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:16.972725Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:16.972728Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:16.972768Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3188 TClient is connected to server localhost:3188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.028304Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.030787Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.037748Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.046407Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.046448Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.050694Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:17.095653Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.118492Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:17.175775Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.289330Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658929061481910:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.289362Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.300604Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.312330Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.327236Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.341622Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.361872Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.375761Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.392967Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658929061482414:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.393000Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658929061482419:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.393003Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.393894Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:17.401967Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658929061482421:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:17.594394Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpRanges::DuplicateKeyPredicateParam [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed >> KqpNewEngine::MultiSelect >> KqpSort::UnionAllSortLimit >> KqpNewEngine::PkRangeSelect4 [GOOD] >> KqpNewEngine::PrecomputeKey >> KqpNewEngine::StreamLookupWithView [GOOD] >> KqpNewEngine::Truncated >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex [GOOD] >> KqpNotNullColumns::UpdateOnNotNull ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] Test command err: Trying to start YDB, gRPC: 9630, MsgBus: 23466 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002c25/r3tmp/tmpj5qVr8/pdisk_1.dat 2024-11-21T09:16:11.879618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 9630, node 1 2024-11-21T09:16:11.903223Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:11.923958Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:11.923969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:11.923971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:11.924012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:11.936468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:11.936507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:11.940669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23466 TClient is connected to server localhost:23466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:11.991493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.994251Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:12.237934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658907972024085:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.237967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.275932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.303043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658907972024183:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.303092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658907972024188:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.303092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.303761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:12.305726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658907972024190:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:12.397006Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658907972024279:2324], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2024-11-21T09:16:12.397446Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWNmMGI4ZDQtMWM5YmRiNjktODIxNDgzN2UtYWJmNmU1Mzg=, ActorId: [1:7439658907972024067:2297], ActorState: ExecuteState, TraceId: 01jd703y7910kk05f4tfsrhmhe, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2024-11-21T09:16:12.400767Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658907972024288:2328], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:12.400877Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWNmMGI4ZDQtMWM5YmRiNjktODIxNDgzN2UtYWJmNmU1Mzg=, ActorId: [1:7439658907972024067:2297], ActorState: ExecuteState, TraceId: 01jd703y7e3dh8rcxp4b0vmhcw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 22355, MsgBus: 18478 2024-11-21T09:16:12.754316Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658910734933421:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:12.756978Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002c25/r3tmp/tmpQG4lb2/pdisk_1.dat 2024-11-21T09:16:12.780526Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22355, node 2 2024-11-21T09:16:12.793837Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.793854Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.793856Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.793897Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18478 TClient is connected to server localhost:18478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:12.856876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:12.856914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:12.857164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.858089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:13.098186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658915029901162:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.098238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.100196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.112730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658915029901261:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.112754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658915029901266:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.112773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.113445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:13.116355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439658915029901268:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:13.210831Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439658915029901357:2324], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: A ... : 2024-11-21T09:16:15.018701Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7439658924622982762:2455], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:73: Error: At function: KiUpdateTable!
:1:73: Error: Can't set NULL or optional value to not null column: fk, code: 2031 2024-11-21T09:16:15.018791Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGM1NDM0YjMtOTJlOWZjNTctYjc0MWYwZGQtYWJmMDkxZWY=, ActorId: [4:7439658920328015019:2355], ActorState: ExecuteState, TraceId: 01jd7040s6dbs07fg6s1462chk, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 21047, MsgBus: 6020 2024-11-21T09:16:15.393622Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658922138520672:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:15.397828Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002c25/r3tmp/tmpODDoHh/pdisk_1.dat 2024-11-21T09:16:15.408462Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21047, node 5 2024-11-21T09:16:15.424934Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:15.424956Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:15.424959Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:15.425018Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6020 TClient is connected to server localhost:6020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:15.493703Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.493735Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:16:15.494483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.494905Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:15.734159Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658922138521094:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.734210Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.734415Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658922138521129:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.735111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:16:15.737085Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:16:15.737189Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658922138521131:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:16:15.810634Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.692200Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70427v2zsry07tsk7rnkqg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzI4MmVmZGMtYzE0ZWFmNS05OTI1ZDctYmYwNDVhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:16.692313Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MzI4MmVmZGMtYzE0ZWFmNS05OTI1ZDctYmYwNDVhNTg=, ActorId: [5:7439658926433489246:2487], ActorState: ExecuteState, TraceId: 01jd70427v2zsry07tsk7rnkqg, Create QueryResponse for error on request, msg: 2024-11-21T09:16:16.871354Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7042dr1czthznx92y9qp1f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OWIyZTBhZjEtMmU3MjM5ODAtNjdjMzBiMTMtNTAyNjEwZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:16.871441Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OWIyZTBhZjEtMmU3MjM5ODAtNjdjMzBiMTMtNTAyNjEwZGU=, ActorId: [5:7439658926433489311:2509], ActorState: ExecuteState, TraceId: 01jd7042dr1czthznx92y9qp1f, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 15832, MsgBus: 22743 2024-11-21T09:16:17.132564Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658933218407206:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:17.132940Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002c25/r3tmp/tmpSEBXrq/pdisk_1.dat 2024-11-21T09:16:17.162034Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15832, node 6 2024-11-21T09:16:17.183348Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:17.183365Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:17.183367Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:17.183420Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22743 TClient is connected to server localhost:22743 2024-11-21T09:16:17.239237Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.239272Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.240302Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.249017Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.252895Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.512534Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658933218407807:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.512563Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.516628Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.534744Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658933218407952:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.534767Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.534905Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658933218407957:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.535587Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:17.540815Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658933218407959:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::FullScanCount [GOOD] Test command err: Trying to start YDB, gRPC: 25203, MsgBus: 63098 2024-11-21T09:16:11.287537Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658906672792021:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:11.287679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cf0/r3tmp/tmp42TknY/pdisk_1.dat 2024-11-21T09:16:11.346369Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25203, node 1 2024-11-21T09:16:11.364489Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:11.364509Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:11.364512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:11.364555Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63098 2024-11-21T09:16:11.385097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:11.385141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:11.386203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:11.430242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.435960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.465724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.492619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.504964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.644702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906672793413:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.644735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.699049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.708293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.717390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.724345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.780180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.790056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.804415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906672793931:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.804451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.804450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658906672793936:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:11.805332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:11.815078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658906672793938:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 10119, MsgBus: 8616 2024-11-21T09:16:12.385825Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658911588962112:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cf0/r3tmp/tmpQDm57D/pdisk_1.dat 2024-11-21T09:16:12.389605Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:12.400030Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10119, node 2 2024-11-21T09:16:12.407663Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.407677Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.407680Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.407719Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8616 TClient is connected to server localhost:8616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:12.486913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:12.486944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:12.488147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:12.488514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.493029Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:12.496799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.507210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:12.534377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.546816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.717468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658911588963505:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.717506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.721056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... elf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.171857Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.345686Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658926986361815:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.345712Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.351741Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.360478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.373263Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.387057Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.400545Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.414756Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.432081Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658926986362315:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.432113Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.432116Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658926986362320:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.432746Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:16.442248Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658926986362322:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 9276, MsgBus: 8037 2024-11-21T09:16:16.934815Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658926580858936:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cf0/r3tmp/tmpjBzhng/pdisk_1.dat 2024-11-21T09:16:16.940687Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:16.952379Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9276, node 6 2024-11-21T09:16:16.958330Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:16.958345Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:16.958347Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:16.958392Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8037 TClient is connected to server localhost:8037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.034043Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.034073Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.035121Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:17.040803Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.044687Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.051363Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.060816Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.082170Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.093197Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.292564Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658930875827645:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.292593Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.296057Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.304945Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.363405Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.377866Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.392024Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.402943Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.417781Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658930875828148:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.417808Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.417900Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658930875828153:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.418984Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:17.421607Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658930875828155:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNewEngine::JoinProjectMulti [GOOD] >> KqpNewEngine::JoinMultiConsumer >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] >> KqpNewEngine::PruneWritePartitions [GOOD] >> KqpNewEngine::PruneEffectPartitions >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] Test command err: Trying to start YDB, gRPC: 12572, MsgBus: 14580 2024-11-21T09:16:12.534792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658910522682926:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:12.534894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b0b/r3tmp/tmpAA7nsQ/pdisk_1.dat 2024-11-21T09:16:12.611603Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12572, node 1 2024-11-21T09:16:12.630569Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.630580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.630582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.630619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:12.636400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:12.636430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:12.637525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14580 TClient is connected to server localhost:14580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:12.688494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.701475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:12.725342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.747469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.757017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.881561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658910522684338:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.881590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.918333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.924766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.935285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.942355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.949455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.955997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.017573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658914817652149:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.017609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.017617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658914817652154:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.018323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:13.020313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658914817652156:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 12854, MsgBus: 14969 2024-11-21T09:16:13.434647Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658912925536667:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b0b/r3tmp/tmpKjNhSw/pdisk_1.dat 2024-11-21T09:16:13.441599Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:13.445830Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12854, node 2 2024-11-21T09:16:13.455422Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.455431Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.455432Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.455468Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14969 TClient is connected to server localhost:14969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:13.535533Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.535554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.535828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.536639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:13.539271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.553394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.573416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.593481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.772469Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658912925538055:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.772504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.775346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.784144Z node 2 :FLAT_TX_SCHEMESH ... opose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.718796Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.781199Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.797367Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.972588Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658927241098974:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.972615Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.978792Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.988841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.003030Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.022804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.032025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.045929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.064202Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658931536066773:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.064244Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.064382Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658931536066778:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.065216Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:17.071718Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658931536066780:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:17.313010Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14714, MsgBus: 20454 2024-11-21T09:16:17.741274Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658930691204165:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:17.741322Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b0b/r3tmp/tmplaYAzT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14714, node 6 2024-11-21T09:16:17.764973Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:17.764989Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:17.764991Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:17.765026Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:17.765125Z node 6 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20454 TClient is connected to server localhost:20454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.843422Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.843456Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.844240Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.844519Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:17.848407Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.859587Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.869295Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.892251Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:17.905446Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.098950Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658934986173025:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.098985Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.103826Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.111205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.122657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.137832Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.150853Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.164632Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.180946Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658934986173527:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.180977Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.181159Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658934986173532:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.182013Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.184524Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658934986173534:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:18.372183Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpMergeCn::TopSortBy_String_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Utf8_Limit2 >> KqpNotNullColumns::AlterAddNotNullColumn [GOOD] >> KqpNotNullColumns::AlterAddIndex >> KqpNotNullColumns::UpsertNotNullPkPg >> KqpNotNullColumns::SelectNotNullColumns >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] >> KqpNewEngine::LocksNoMutationsSharded [GOOD] >> KqpNewEngine::MultiEffects >> KqpNotNullColumns::UpdateOnNotNull [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg >> KqpNewEngine::FlatMapLambdaInnerPrecompute [GOOD] >> KqpNewEngine::DqSourceSequentialLimit >> KqpNewEngine::MultiSelect [GOOD] >> KqpNewEngine::MultiStatement >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] >> KqpRanges::MergeRanges >> KqpNewEngine::Truncated [GOOD] >> KqpNewEngine::StaleRO >> KqpSort::UnionAllSortLimit [GOOD] >> KqpSqlIn::CantRewrite >> KqpAgg::GroupByLimit [GOOD] >> KqpExtractPredicateLookup::ComplexRange >> KqpNewEngine::PrecomputeKey [GOOD] >> KqpNewEngine::PruneEffectPartitions [GOOD] >> KqpNewEngine::PrimaryView |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> KqpNewEngine::JoinMultiConsumer [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] Test command err: Trying to start YDB, gRPC: 24332, MsgBus: 64758 2024-11-21T09:16:12.651685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658909911697151:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:12.652628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ade/r3tmp/tmpt6pqBi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24332, node 1 2024-11-21T09:16:12.732944Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:12.747119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.747131Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.747132Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.747166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:12.751565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:12.751590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:12.752712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64758 TClient is connected to server localhost:64758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:12.793712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.804930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.822539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.842747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:12.854974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.988479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658909911698669:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.988516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.028569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.038278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.048710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.061638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.076183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.090722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.106655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658914206666477:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.106702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.106789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658914206666482:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.107581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:13.116737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658914206666484:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:13.324113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.411797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.448007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.485725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.599065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10196, MsgBus: 31334 2024-11-21T09:16:13.943795Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658912076174693:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:13.943838Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ade/r3tmp/tmpCqHkNq/pdisk_1.dat 2024-11-21T09:16:13.956819Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10196, node 2 2024-11-21T09:16:13.968187Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.968201Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.968202Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.968254Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31334 TClient is connected to server localhost:31334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:14.043836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:14.043867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:14.044970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:14.046110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.048425Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:14.060932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.074802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.097604Z ... 97 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.484477Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.485247Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:17.486867Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.493443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.505146Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.529589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.540138Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.728283Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658931010036782:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.728337Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.731377Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.743676Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.751502Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.757899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.765841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.828646Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.841264Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658931010037290:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.841293Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.841391Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658931010037295:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.842048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:17.848590Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658931010037297:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 27816, MsgBus: 11338 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ade/r3tmp/tmpjisKW3/pdisk_1.dat 2024-11-21T09:16:18.324307Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:18.332427Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27816, node 6 2024-11-21T09:16:18.336821Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.336832Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.336834Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.336872Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11338 TClient is connected to server localhost:11338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.410182Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.410229Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.411251Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:18.413020Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.414952Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:18.418609Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.433344Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.458891Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.472428Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.632863Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658934383350078:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.632947Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.635544Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.643133Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.655193Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.711487Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.727149Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.739496Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.807952Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658934383350595:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.807976Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.808126Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658934383350600:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.808948Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.811346Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658934383350602:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] >> KqpMergeCn::SortBy_Int32 >> TSolomonReboots::CreateDropSolomonWithReboots [GOOD] >> KqpNotNullColumns::AlterAddIndex [GOOD] >> KqpNotNullColumns::SelectNotNullColumns [GOOD] >> KqpNotNullColumns::UpdateNotNull >> KqpNotNullColumns::UpsertNotNullPkPg [GOOD] >> KqpRanges::DateKeyPredicate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrecomputeKey [GOOD] Test command err: Trying to start YDB, gRPC: 17982, MsgBus: 10569 2024-11-21T09:16:13.603414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658912260757126:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:13.603485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002abb/r3tmp/tmp4ttphF/pdisk_1.dat 2024-11-21T09:16:13.675258Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17982, node 1 2024-11-21T09:16:13.696100Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.696115Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.696117Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.696158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:13.704633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.704661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.705884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10569 TClient is connected to server localhost:10569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:13.749709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.756061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.777933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.803683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.817629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.980296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658912260758531:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.980330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.024927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.032757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.042352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.048681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.063528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.078569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.097345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658916555726335:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.097370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.097410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658916555726340:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.098231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:14.104141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658916555726342:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 28973, MsgBus: 12935 2024-11-21T09:16:14.776363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658918262801028:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:14.776576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002abb/r3tmp/tmpWorvhk/pdisk_1.dat 2024-11-21T09:16:14.792262Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28973, node 2 2024-11-21T09:16:14.800953Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:14.800968Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:14.800969Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:14.801006Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12935 TClient is connected to server localhost:12935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:14.876702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:14.876743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:14.877945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:14.881442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.892595Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:14.908885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.926319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.949322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.009536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.088954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658922557769852:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.090503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.091157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo u ... _SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.685822Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.699264Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.722023Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.757364Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.780556Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.923810Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658929391206883:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.923839Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.928298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.935623Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.953856Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.962861Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.980516Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.992796Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.006094Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658933686174682:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.006117Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.006126Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658933686174687:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.006782Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.009466Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658933686174689:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 27127, MsgBus: 25689 2024-11-21T09:16:18.528271Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658936704359576:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:18.528333Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002abb/r3tmp/tmpS7einc/pdisk_1.dat 2024-11-21T09:16:18.540942Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27127, node 6 2024-11-21T09:16:18.551521Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.551539Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.551540Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.551604Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25689 TClient is connected to server localhost:25689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.628643Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.628677Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.629691Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:18.631421Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.632684Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:18.643209Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:18.659210Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting...2024-11-21T09:16:18.693166Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.706225Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.881232Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658936704361136:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.881272Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.888651Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.897935Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.907252Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.921116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.935347Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.991585Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.008883Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658940999328945:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.008883Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658940999328940:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.008896Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.009583Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.020320Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658940999328947:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup >> KqpNewEngine::MultiEffects [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinMultiConsumer [GOOD] Test command err: Trying to start YDB, gRPC: 25333, MsgBus: 16887 2024-11-21T09:16:13.792586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658914128601242:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:13.792661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a7d/r3tmp/tmpHmhrP7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25333, node 1 2024-11-21T09:16:13.853135Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:13.861110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.861123Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.861124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.861155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16887 2024-11-21T09:16:13.892128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.892165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.893267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:13.930258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.941767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.010249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.030115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.040292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.128486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658918423569932:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.128514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.167148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.178944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.188723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.202343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.218877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.230953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.250183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658918423570448:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.250224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.250456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658918423570453:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.251214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:14.257940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658918423570455:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 21365, MsgBus: 62746 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a7d/r3tmp/tmpy9XITk/pdisk_1.dat 2024-11-21T09:16:14.702689Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:14.708456Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21365, node 2 2024-11-21T09:16:14.718933Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:14.718947Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:14.718949Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:14.718985Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62746 TClient is connected to server localhost:62746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:14.791382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:14.791431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:14.792438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:14.794805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.808656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.819180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:14.842599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.902577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.064980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658922511421308:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.065007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.071093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.077870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.091985Z no ... 97 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.848293Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.848792Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:17.850060Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.851595Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.857240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.918177Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.937176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.950187Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.059597Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658936334896018:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.059621Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.062398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.069166Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.080896Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.097596Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.108748Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.123454Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.141661Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658936334896531:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.141685Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.141773Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658936334896536:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.142576Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.150370Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658936334896538:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 15903, MsgBus: 15914 2024-11-21T09:16:18.677791Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a7d/r3tmp/tmppVzJXq/pdisk_1.dat 2024-11-21T09:16:18.698200Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15903, node 6 2024-11-21T09:16:18.721077Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.721092Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.721094Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.721132Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15914 2024-11-21T09:16:18.771461Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.771489Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.772671Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.790162Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.805705Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:18.868922Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.889503Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.899794Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.020236Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658941249889499:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.020262Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.026448Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.035234Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.046509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.060721Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.074942Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.089261Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.104915Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658941249890003:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.104947Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.105050Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658941249890008:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.105753Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.108182Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658941249890010:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] Test command err: Trying to start YDB, gRPC: 17318, MsgBus: 20861 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ada/r3tmp/tmptvlyki/pdisk_1.dat 2024-11-21T09:16:13.546703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:13.549010Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17318, node 1 2024-11-21T09:16:13.589529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.589541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.589543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.589580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:13.601497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.601525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.602711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20861 TClient is connected to server localhost:20861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:13.638599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.642675Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:13.651501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.714046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.737279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:13.750634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.854998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658913346599887:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.855034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.897661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.907473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.915806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.923339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.936763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.952665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.963787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658913346600402:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.963815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.963952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658913346600407:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:13.964751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:13.970787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658913346600409:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:14.162473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.270116Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180574313, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 29530, MsgBus: 21722 2024-11-21T09:16:14.585081Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658917088376458:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ada/r3tmp/tmp5qyAZ8/pdisk_1.dat 2024-11-21T09:16:14.591809Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:14.595415Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29530, node 2 2024-11-21T09:16:14.607090Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:14.607103Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:14.607105Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:14.607148Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21722 TClient is connected to server localhost:21722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:14.687083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:14.687113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:14.687485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.688138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:14.694342Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:14.705263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.718300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.734993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.750279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.913968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658917088377851:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09: ... 594046644480 waiting... 2024-11-21T09:16:17.913552Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.926055Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.132504Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658934161497456:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.132533Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.137069Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.145359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.158041Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.171673Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.179048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.193584Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.208412Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658934161497956:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.208439Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.208480Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658934161497961:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.209188Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.212889Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658934161497963:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:18.414033Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.522409Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180578562, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 23749, MsgBus: 16004 2024-11-21T09:16:18.694236Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658936152132441:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:18.694404Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ada/r3tmp/tmp6L46oi/pdisk_1.dat 2024-11-21T09:16:18.709905Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23749, node 6 2024-11-21T09:16:18.732741Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.732758Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.732760Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.732803Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16004 TClient is connected to server localhost:16004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.794784Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.794812Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.795856Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:18.797982Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.799124Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:18.810216Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.824799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.846430Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.861126Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.035773Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658940447101281:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.035944Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.039038Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.045798Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.053667Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.067716Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.074683Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.088869Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.104381Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658940447101782:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.104410Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.104480Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658940447101787:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.105216Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.109063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658940447101789:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:19.282290Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.416801Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180579458, txId: 281474976715673] shutting down >> KqpMergeCn::TopSortBy_Utf8_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 >> KqpNewEngine::PureTxMixedWithDeferred >> KqpNewEngine::MultiStatement [GOOD] >> KqpNewEngine::DqSourceSequentialLimit [GOOD] >> KqpNewEngine::MultiStatementMixPure >> KqpNewEngine::DqSourceLocksEffects >> KqpNewEngine::StaleRO [GOOD] >> KqpNewEngine::StaleRO_Immediate >> KqpRanges::MergeRanges [GOOD] >> KqpSqlIn::CantRewrite [GOOD] >> KqpSqlIn::ComplexKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] Test command err: Trying to start YDB, gRPC: 19481, MsgBus: 30839 2024-11-21T09:16:14.899788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658918579622349:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:14.900016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a4a/r3tmp/tmpbIaQ35/pdisk_1.dat 2024-11-21T09:16:14.975909Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19481, node 1 2024-11-21T09:16:14.995997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:14.996009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:14.996011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:14.996046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:14.999977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.000007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:15.001227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30839 TClient is connected to server localhost:30839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:15.060868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.289950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658922874590241:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.289982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.322796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.347714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658922874590339:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.347736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.347803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658922874590344:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.348444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:15.350322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658922874590346:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:15.483364Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658922874590457:2330], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:61: Error: At function: KiUpdateTable!
:1:61: Error: Cannot update primary key column: Key 2024-11-21T09:16:15.483497Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmM5NzBjNjUtZGNlMGY2NGMtNGFmYWMxZWEtYTM5ODUzOGY=, ActorId: [1:7439658922874590223:2297], ActorState: ExecuteState, TraceId: 01jd70417qbhnx7ktgkpw85z76, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:15.487057Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658922874590466:2334], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Cannot update primary key column: Key 2024-11-21T09:16:15.487172Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmM5NzBjNjUtZGNlMGY2NGMtNGFmYWMxZWEtYTM5ODUzOGY=, ActorId: [1:7439658922874590223:2297], ActorState: ExecuteState, TraceId: 01jd70417w163fq4d3668rystp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 32447, MsgBus: 9761 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a4a/r3tmp/tmpLYFRsQ/pdisk_1.dat 2024-11-21T09:16:15.804435Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:15.807043Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32447, node 2 2024-11-21T09:16:15.824708Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:15.824724Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:15.824726Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:15.824773Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9761 TClient is connected to server localhost:9761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:15.895064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.895091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:16:15.895440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.896804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:15.897356Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:16.185448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658924904440911:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.185496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.188973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.201199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658924904441009:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.201222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.201244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658924904441014:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.201977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:16.204101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439658924904441016:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:16.339653Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439658924904441127:2330], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:76: Error: At function: KiUpdateTable!
:1:76: Error: Cannot update primary key column: Key 2024-11-21T09:16:16.340103Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjllZTgwNTMtZGU4NjMyZDktNTNiNmMyOTEtNjZmOGZmM2M=, ActorId: [2:7439658924904440908:2297], Act ... :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658934187641236:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:18.642503Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a4a/r3tmp/tmpmhy3Y5/pdisk_1.dat 2024-11-21T09:16:18.660475Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24695, node 5 2024-11-21T09:16:18.683624Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.683637Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.683638Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.683690Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25137 TClient is connected to server localhost:25137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.748073Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.748110Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.750832Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:18.751830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.763041Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:18.962318Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658934187641840:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.962346Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.964304Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.973194Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658934187641938:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.973229Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.973273Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658934187641943:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.973995Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.982938Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658934187641945:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:19.112270Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7439658938482609359:2333], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:19.112350Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MzllNWJjMTMtY2M3MmRiMGYtN2MyZDI4ZWYtYTNhNzVmMGY=, ActorId: [5:7439658934187641822:2297], ActorState: ExecuteState, TraceId: 01jd7044s6ff8j0by3ja3h1rdb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 31401, MsgBus: 1674 2024-11-21T09:16:19.393785Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a4a/r3tmp/tmp1wFruc/pdisk_1.dat 2024-11-21T09:16:19.421879Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31401, node 6 2024-11-21T09:16:19.434790Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.434802Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.434803Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.434830Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1674 TClient is connected to server localhost:1674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:19.492620Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:19.492653Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:19.493055Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.495421Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:19.495552Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:19.744915Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658940380150544:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.744937Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.748289Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.760893Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658940380150642:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.760923Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.761051Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658940380150647:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.761825Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.766739Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658940380150649:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:19.920725Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:19.922398Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439658940380150768:2333], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2024-11-21T09:16:19.922464Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NjY5ZTA5ZDItNzVjYmRlZWItNmIzYWNlNGYtYTAxYjAxYg==, ActorId: [6:7439658940380150526:2297], ActorState: ExecuteState, TraceId: 01jd7045hw9rdvp4520vxfjcdy, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::CreateDropSolomonWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:52.375294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:52.375317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:52.375323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:52.375328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:52.375334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:52.375338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:52.375346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:52.375415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:52.386771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:52.386799Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:52.389693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:52.389808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:52.389855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:52.393328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:52.393453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:52.393590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.393982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:52.395023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.395387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:52.395405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.395421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:52.395430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:52.395436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:52.395490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:52.397426Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:52.415617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:52.415693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.415757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:52.415814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:52.415821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.416779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.416840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:52.416923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.416937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:52.416941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:52.416947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:52.417698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.417720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:52.417727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:52.418240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.418252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.418258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.418265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.419016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:52.419492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:52.419553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:52.419786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:52.419811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:52.419817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.419881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:52.419888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:52.419920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:52.419934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:52.420346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:52.420358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:52.420418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:52.420424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:52.420510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:52.420516Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:52.420527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:52.420531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.420537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:52.420542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:52.420547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:52.420551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:52.420561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:52.420568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:52.420572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TX_SCHEMESHARD INFO: TDropSolomon TPropose operationId#1004:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T09:16:19.764992Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 3] name: Solomon type: EPathTypeSolomonVolume state: EPathStateDrop stepDropped: 0 droppedTxId: 1004 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:19.764996Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:19.765021Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:16:19.765049Z node 106 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 130 2024-11-21T09:16:19.765072Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:19.765079Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:19.765220Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:19.765477Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:16:19.765774Z node 106 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:19.765781Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:19.765810Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:19.765829Z node 106 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:19.765834Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [106:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T09:16:19.765838Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [106:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T09:16:19.765891Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:16:19.765898Z node 106 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1004:0 ProgressState 2024-11-21T09:16:19.765906Z node 106 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:16:19.765910Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:16:19.765915Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T09:16:19.765919Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:16:19.765924Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:16:19.765927Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:16:19.765956Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:16:19.765961Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T09:16:19.765967Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T09:16:19.765970Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T09:16:19.766054Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:19.766065Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:19.766069Z node 106 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:16:19.766073Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:16:19.766076Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:19.766112Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:19.766118Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:16:19.766122Z node 106 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:16:19.766125Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T09:16:19.766128Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:19.766134Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T09:16:19.766747Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:19.766758Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:19.766886Z node 106 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:16:19.766946Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:19.767016Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2024-11-21T09:16:19.767198Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:19.767220Z node 106 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:16:19.767307Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:19.767351Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2024-11-21T09:16:19.767509Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:16:19.767544Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:19.767550Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:16:19.767560Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:19.768147Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:19.768159Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:16:19.768188Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:19.768194Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:16:19.768250Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:16:19.768307Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:16:19.768314Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:16:19.768367Z node 106 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:16:19.768382Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:16:19.768386Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [106:446:2419] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T09:16:19.768436Z node 106 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:16:19.768447Z node 106 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T09:16:19.768513Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:19.768536Z node 106 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Solomon" took 44us result status StatusPathDoesNotExist 2024-11-21T09:16:19.768569Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] Test command err: Trying to start YDB, gRPC: 20636, MsgBus: 64249 2024-11-21T09:16:14.236385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658916401607292:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:14.236482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a74/r3tmp/tmpjvIFir/pdisk_1.dat 2024-11-21T09:16:14.289318Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20636, node 1 2024-11-21T09:16:14.312130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:14.312146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:14.312148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:14.312186Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64249 2024-11-21T09:16:14.337403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:14.337426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:14.338621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:14.377270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.379804Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:14.384470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.401387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.428574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.439906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.617086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658916401608685:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.617139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.657480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.665748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.679176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.692870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.707527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.720648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.736587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658916401609189:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.736623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.736774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658916401609194:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.737665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:14.740324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658916401609196:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 29115, MsgBus: 11106 2024-11-21T09:16:15.346796Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658924134905854:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a74/r3tmp/tmpiesoAD/pdisk_1.dat 2024-11-21T09:16:15.348961Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:15.356116Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29115, node 2 2024-11-21T09:16:15.366173Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:15.366185Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:15.366187Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:15.366236Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11106 TClient is connected to server localhost:11106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:15.448558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.448586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:15.448910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.449578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:15.450217Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:15.457338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:15.467467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.486884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.499085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.680602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658924134907242:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.680627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not fou ... node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.608885Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658936459536815:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.608908Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.612602Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.621419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.636448Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.647735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.662286Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.677282Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.703714Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658936459537309:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.703794Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.703929Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658936459537318:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.704779Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.708161Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T09:16:18.708405Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658936459537320:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:18.922459Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7973, MsgBus: 10448 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a74/r3tmp/tmpvbsWYw/pdisk_1.dat 2024-11-21T09:16:19.133949Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658939077127078:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:19.135599Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:19.158389Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7973, node 6 2024-11-21T09:16:19.184550Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.184576Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.184578Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.184621Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10448 TClient is connected to server localhost:10448 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:16:19.233485Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:19.233521Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:19.234511Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:19.236952Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.240390Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:19.244091Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.301486Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.330593Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.354829Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.494082Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658939077128456:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.494106Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.504855Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.563361Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.571407Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.586342Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.604063Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.617073Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.632972Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658939077128971:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.633005Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.633071Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658939077128976:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.633752Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.640606Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658939077128978:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:19.831110Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.850795Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T09:16:19.859591Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> KqpNotNullColumns::UpdateNotNull [GOOD] >> KqpNotNullColumns::UpdateNotNullPg >> KqpNewEngine::PrimaryView [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple >> KqpRanges::DateKeyPredicate [GOOD] >> KqpRanges::DuplicateKeyPredicateLiteral >> LocalPartitionReader::Simple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::MergeRanges [GOOD] Test command err: Trying to start YDB, gRPC: 22542, MsgBus: 16095 2024-11-21T09:16:10.394951Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658900194537680:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:10.395140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d1a/r3tmp/tmpI7uXbR/pdisk_1.dat 2024-11-21T09:16:10.456681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22542, node 1 2024-11-21T09:16:10.475831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:10.475843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:10.475845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:10.475893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:10.495452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:10.495575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:16095 2024-11-21T09:16:10.496614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:10.533327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.561815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.627520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:10.647558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:10.657443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.728474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658900194539215:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.728505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.774758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.781752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.793803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.808313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.823160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.837725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:10.853371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658900194539731:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.853398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.853477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658900194539736:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:10.854170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:10.863187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658900194539738:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:11.071615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.160998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.189793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.213731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.254786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15876, MsgBus: 17839 2024-11-21T09:16:11.534180Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658905599463695:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:11.534198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d1a/r3tmp/tmptKmBVK/pdisk_1.dat 2024-11-21T09:16:11.553848Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15876, node 2 2024-11-21T09:16:11.567502Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:11.567513Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:11.567515Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:11.567560Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17839 TClient is connected to server localhost:17839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:11.634684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:11.634722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:11.635849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:11.640187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:11.649001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:11.660255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:11.682278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660 ... r: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.648804Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.653761Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.667858Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.685085Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.698064Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.711404Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.726859Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.744889Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439658936598293994:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.744920Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.745045Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439658936598293999:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.745775Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.752338Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439658936598294001:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:18.967985Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.031017Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180579073, txId: 281474976715673] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2024-11-21T09:16:19.075825Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180579122, txId: 281474976715675] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ Trying to start YDB, gRPC: 8509, MsgBus: 16654 2024-11-21T09:16:19.437925Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439658941259778065:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:19.437950Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d1a/r3tmp/tmpLxdvSG/pdisk_1.dat 2024-11-21T09:16:19.464132Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8509, node 9 2024-11-21T09:16:19.471476Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.471489Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.471491Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.471534Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16654 TClient is connected to server localhost:16654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:19.539116Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:19.539149Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:16:19.545057Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:19.545472Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.546725Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:19.557532Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.569521Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.599539Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.617512Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.849431Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439658941259779627:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.849471Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.858849Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.867636Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.883587Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.895443Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.910039Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.922977Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.937820Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439658941259780127:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.937845Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439658941259780132:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.937851Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.938561Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.941392Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7439658941259780134:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:20.130290Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.218309Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180580256, txId: 281474976715673] shutting down |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> KqpNewEngine::MultiEffectsOnSameTable [GOOD] >> KqpNewEngine::LookupColumns >> KqpMergeCn::SortBy_Int32 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 >> KqpNewEngine::PureTxMixedWithDeferred [GOOD] >> KqpNewEngine::ReadAfterWrite >> KqpNewEngine::MultiStatementMixPure [GOOD] >> KqpNewEngine::MultiUsagePrecompute ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrimaryView [GOOD] Test command err: Trying to start YDB, gRPC: 29984, MsgBus: 5464 2024-11-21T09:16:14.810338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658918682583078:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:14.810401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a61/r3tmp/tmpCFWODi/pdisk_1.dat 2024-11-21T09:16:14.863431Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29984, node 1 2024-11-21T09:16:14.888010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:14.888033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:14.888035Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:14.888078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5464 2024-11-21T09:16:14.910251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:14.910279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:14.911372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:14.980926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.987116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.061276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:15.081368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.095549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.180882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658922977551908:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.180919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.220714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.235371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.245882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.259830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.275014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.288503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.307278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658922977552422:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.307314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.307369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658922977552427:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.308410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:15.315245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658922977552429:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 17153, MsgBus: 23539 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a61/r3tmp/tmpQKjoGo/pdisk_1.dat 2024-11-21T09:16:15.730418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:15.738411Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17153, node 2 2024-11-21T09:16:15.749432Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:15.749445Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:15.749446Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:15.749492Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23539 TClient is connected to server localhost:23539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:15.828407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.828438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:15.828745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.829963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:15.836709Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:15.845221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.858900Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:16:15.861481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.881426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.897479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.069762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658928424908945:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.069800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.075292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... Table, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.847947Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.869160Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.885787Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.071549Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658941126427225:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.071595Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.077336Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.085005Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.095853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.110063Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.124247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.138755Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.157082Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658941126427728:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.157109Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.157148Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658941126427733:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.157849Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.165192Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658941126427735:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 62047, MsgBus: 5019 2024-11-21T09:16:19.664401Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658938200177312:2194];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a61/r3tmp/tmpQU0A3V/pdisk_1.dat 2024-11-21T09:16:19.666296Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:19.677981Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62047, node 6 2024-11-21T09:16:19.686490Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.686501Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.686503Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.686552Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5019 TClient is connected to server localhost:5019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:19.764065Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:19.764100Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:19.765189Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:19.766936Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.777333Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.787120Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:19.809780Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.825497Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.002892Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658942495146021:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.002919Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.008174Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.014904Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.027077Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.040763Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.054296Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.061594Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.077825Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658942495146521:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.077854Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.077890Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658942495146526:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.078694Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:20.081450Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658942495146528:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:20.251815Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.261568Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.271763Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpNewEngine::StaleRO_Immediate [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup >> KqpNewEngine::DqSourceLocksEffects [GOOD] >> KqpNotNullColumns::UpdateNotNullPg [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn >> LocalPartitionReader::Booting [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_Int32 [GOOD] Test command err: Trying to start YDB, gRPC: 9525, MsgBus: 22287 2024-11-21T09:16:15.851228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658923737653025:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:15.851306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0029a5/r3tmp/tmpgZqO08/pdisk_1.dat 2024-11-21T09:16:15.914946Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9525, node 1 2024-11-21T09:16:15.936420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:15.936437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:15.936438Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:15.936476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22287 2024-11-21T09:16:15.955749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.955774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:15.957828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:16.000798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.007454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:16.219720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658928032620795:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.219755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.255389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 18156, MsgBus: 25657 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0029a5/r3tmp/tmp8DappJ/pdisk_1.dat 2024-11-21T09:16:16.546664Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:16.546756Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 18156, node 2 2024-11-21T09:16:16.566555Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:16.566567Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:16.566569Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:16.566621Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25657 TClient is connected to server localhost:25657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:16.629797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:16.629832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:16.630241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.630702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:16.866975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658928462788616:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.867022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.868396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.876702Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Unknown table '/Root/WrongTable' Trying to start YDB, gRPC: 27007, MsgBus: 21212 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0029a5/r3tmp/tmppW6ZYt/pdisk_1.dat 2024-11-21T09:16:17.276342Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 27007, node 3 2024-11-21T09:16:17.292451Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:17.301588Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:17.301602Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:17.301605Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:17.301644Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21212 TClient is connected to server localhost:21212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.361329Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.361355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.368531Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:17.368688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.585811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 7304, MsgBus: 31661 2024-11-21T09:16:17.791081Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439658930154624945:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:17.792831Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0029a5/r3tmp/tmpccS2vp/pdisk_1.dat 2024-11-21T09:16:17.811307Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7304, node 4 2024-11-21T09:16:17.840401Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:17.840416Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:17.840418Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:17.840456Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31661 TClient is connected to server localhost:31661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 Statu ... SHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.901713Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.913014Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.114068Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658938067651777:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.114109Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.118878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.127851Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.137475Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.152631Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.166405Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.180022Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.196585Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658938067652267:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.196627Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658938067652272:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.196636Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.197381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.199833Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658938067652274:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:19.417476Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.523032Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180579563, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 25828, MsgBus: 6486 2024-11-21T09:16:19.911404Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658941832659156:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0029a5/r3tmp/tmpKpJN9B/pdisk_1.dat 2024-11-21T09:16:19.914137Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:19.924167Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25828, node 6 2024-11-21T09:16:19.936867Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.936879Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.936881Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.936923Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6486 TClient is connected to server localhost:6486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:20.010932Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:20.010980Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:20.012010Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:20.013787Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.020720Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.031268Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.053907Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.065129Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.253547Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658946127627857:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.253577Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.323250Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.331332Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.341598Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.348697Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.363489Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.379522Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.439342Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658946127628376:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.439372Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.439471Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658946127628381:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.440202Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:20.446463Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658946127628383:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:20.640843Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.739543Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180580781, txId: 281474976715673] shutting down >> KqpSqlIn::ComplexKey [GOOD] >> KqpSqlIn::Dict |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> KqpRanges::DuplicateKeyPredicateLiteral [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLocksEffects [GOOD] Test command err: Trying to start YDB, gRPC: 9235, MsgBus: 26612 2024-11-21T09:16:15.276262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658920471062694:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:15.276297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a36/r3tmp/tmp98vCP4/pdisk_1.dat 2024-11-21T09:16:15.342595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9235, node 1 2024-11-21T09:16:15.370502Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:15.370532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:15.370534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:15.370574Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:15.376064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:15.376091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:15.377197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26612 TClient is connected to server localhost:26612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:15.441318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.443601Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:15.445228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.508148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.531820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.542810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:15.653574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658920471064233:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.653622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.695287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.702512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.715313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.730037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.743011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.757573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.774695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658920471064747:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.774731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.774829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658920471064752:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.775617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:15.784321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658920471064754:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 1603, MsgBus: 29018 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a36/r3tmp/tmput6BDQ/pdisk_1.dat 2024-11-21T09:16:16.380374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:16.381221Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1603, node 2 2024-11-21T09:16:16.396664Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:16.396689Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:16.396692Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:16.396734Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29018 TClient is connected to server localhost:29018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:16.471808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:16.471838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:16.472232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.473640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:16.474252Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:16.483238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:16.493308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.513962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.524751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.699160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658926332588430:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.699181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.703796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... 40500Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:19.549099Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:19.579412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.604058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.614657Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.786015Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658938198267610:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.786043Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.792133Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.799895Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.810077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.824180Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.837548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.844836Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.861391Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658938198268111:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.861422Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658938198268116:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.861425Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.862063Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.864681Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658938198268118:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 21829, MsgBus: 25552 2024-11-21T09:16:20.378833Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658945633473944:2070];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a36/r3tmp/tmpeT1NBf/pdisk_1.dat 2024-11-21T09:16:20.385936Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 21829, node 6 2024-11-21T09:16:20.396761Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:20.397585Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:20.397596Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:20.397600Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:20.397654Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25552 TClient is connected to server localhost:25552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:20.476613Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:20.476647Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:20.479258Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:20.481127Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.488662Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.545660Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.565744Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.576928Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.730044Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658945633475460:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.730110Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.737015Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.747938Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.806299Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.818184Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.834788Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.849013Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.862549Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658945633475970:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.862571Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.862579Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658945633475975:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.863213Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:20.865738Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658945633475977:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:21.127394Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YWJiMjY3ZDgtYzBhZWVjNzMtZWM5NzFlZGYtZTEzY2JmYTE=, ActorId: [6:7439658949928443571:2454], ActorState: ExecuteState, TraceId: 01jd7046qv2a1d3cq7vdswje1w, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`, code: 2001 |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> KqpNewEngine::LookupColumns [GOOD] >> KqpExtractPredicateLookup::OverflowLookup [GOOD] >> KqpExtractPredicateLookup::SimpleRange >> KqpNewEngine::ReadAfterWrite [GOOD] >> KqpNewEngine::ReadDifferentColumns >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin [GOOD] >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup >> KqpNewEngine::MultiUsagePrecompute [GOOD] >> KqpNewEngine::MultiUsageInnerConnection >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LookupColumns [GOOD] Test command err: Trying to start YDB, gRPC: 9086, MsgBus: 16312 2024-11-21T09:16:16.242143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658927310122619:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:16.242276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00285a/r3tmp/tmpPrjR8p/pdisk_1.dat 2024-11-21T09:16:16.296680Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9086, node 1 2024-11-21T09:16:16.315738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:16.315770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:16.315773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:16.315827Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16312 2024-11-21T09:16:16.343164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:16.343194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:16.344259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:16.395533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.399329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.460668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.479697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.495385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.596687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658927310124022:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.596711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.637563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.651479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.659314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.673635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.680466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.694811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.713816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658927310124535:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.713855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.713996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658927310124540:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.714843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:16.722053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658927310124542:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 12418, MsgBus: 24203 2024-11-21T09:16:17.360379Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658931748607239:2126];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00285a/r3tmp/tmpMDMILz/pdisk_1.dat 2024-11-21T09:16:17.364174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:17.374311Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12418, node 2 2024-11-21T09:16:17.384778Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:17.384802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:17.384804Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:17.384847Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24203 TClient is connected to server localhost:24203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.460260Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.460291Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.461449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:17.463603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.471523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.481123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.503937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.515392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.713547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658931748608718:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.713576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.721218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.732698Z node 2 :FLAT_TX_SCHEMESHAR ... SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.292588Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:20.299691Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.313894Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.335491Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.346327Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.534538Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658943709566499:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.534561Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.542247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.549811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.558445Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.565532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.572340Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.579281Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.596713Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658943709566992:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.596744Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.596832Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658943709566997:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.597588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:20.606910Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658943709566999:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 25180, MsgBus: 15001 2024-11-21T09:16:21.120190Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658948123247739:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:21.120482Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00285a/r3tmp/tmpPF3vOw/pdisk_1.dat 2024-11-21T09:16:21.133829Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25180, node 6 2024-11-21T09:16:21.148377Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:21.148403Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:21.148405Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:21.148449Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15001 TClient is connected to server localhost:15001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:21.223557Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:21.223586Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:21.223953Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.224580Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:21.224995Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:21.231895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.248553Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.268643Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:21.279560Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.484170Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658948123249294:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.484197Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.488171Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.498309Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.513266Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.525117Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.538644Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.554306Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.574755Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658948123249785:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.574791Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.574901Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658948123249790:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.575895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:21.579294Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658948123249792:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> LocalPartitionReader::FeedSlowly >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] Test command err: Trying to start YDB, gRPC: 20939, MsgBus: 16680 2024-11-21T09:16:16.016257Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658927655483655:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:16.016286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00287b/r3tmp/tmp4El04A/pdisk_1.dat 2024-11-21T09:16:16.077976Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20939, node 1 2024-11-21T09:16:16.102455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:16.102472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:16.102474Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:16.102528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16680 2024-11-21T09:16:16.149488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:16.149518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:16.150647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:16.187048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.195694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.215728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.239104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.250956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:16.384438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658927655485213:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.384467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.438247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.446178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.502024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.512286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.519408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.535070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.550246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658927655485729:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.550289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.550377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658927655485734:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:16.551199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:16.553369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658927655485736:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:16.758685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:16.870107Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180576910, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 28010, MsgBus: 20958 2024-11-21T09:16:17.136766Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658933246679582:2070];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:17.140141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00287b/r3tmp/tmpJT2sLz/pdisk_1.dat 2024-11-21T09:16:17.156286Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28010, node 2 2024-11-21T09:16:17.161602Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:17.161626Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:17.161628Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:17.161661Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20958 TClient is connected to server localhost:20958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.240890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.240960Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.240974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:16:17.242000Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.242083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:17.253317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.317894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.349261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.362628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.480746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658933246681106:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you do ... 594046644480 waiting... 2024-11-21T09:16:20.469765Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.480432Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.672736Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658943960695533:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.672759Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.679532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.687959Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.699731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.712472Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.727871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.740909Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.758603Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658943960696023:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.758637Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.758718Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658943960696028:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.759622Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:20.762085Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658943960696030:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:20.958181Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.074833Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180581117, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 19182, MsgBus: 64162 2024-11-21T09:16:21.250892Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658948259008077:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00287b/r3tmp/tmpGJ8lZ3/pdisk_1.dat 2024-11-21T09:16:21.253551Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:21.273393Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19182, node 6 2024-11-21T09:16:21.280768Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:21.280795Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:21.280797Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:21.280844Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64162 TClient is connected to server localhost:64162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:21.352654Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:21.352687Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:21.353168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.353481Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:21.364680Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:21.377353Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.449444Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.472895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.486120Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.614444Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658948259009470:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.614473Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.621125Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.629920Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.643777Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.658120Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.671805Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.690914Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.704028Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658948259009981:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.704061Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.704075Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658948259009986:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.704906Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:21.713117Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658948259009988:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:21.916182Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.030616Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582069, txId: 281474976715673] shutting down >> LocalPartitionReader::FeedSlowly [GOOD] >> KqpSqlIn::Dict [GOOD] >> KqpSqlIn::Delete >> KqpNewEngine::ReadDifferentColumns [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk >> KqpRanges::DuplicateCompositeKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] >> KqpNewEngine::MultiUsageInnerConnection [GOOD] >> KqpNewEngine::MultipleBroadcastJoin |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |93.9%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 27868, MsgBus: 8735 2024-11-21T09:16:17.323334Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658932654478604:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002816/r3tmp/tmplkqPYz/pdisk_1.dat 2024-11-21T09:16:17.365518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:17.392084Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27868, node 1 2024-11-21T09:16:17.420085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:17.420098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:17.420100Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:17.420131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:17.421932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.421958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.423078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8735 TClient is connected to server localhost:8735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.478085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.480417Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.652814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658932654479057:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.652847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.684068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.757651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T09:16:17.767945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.785544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2024-11-21T09:16:17.801916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.814611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.834476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715766:2, at schemeshard: 72057594046644480 2024-11-21T09:16:17.842711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715767:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.862637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715770:2, at schemeshard: 72057594046644480 2024-11-21T09:16:17.871746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715771:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.890514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715774:2, at schemeshard: 72057594046644480 2024-11-21T09:16:17.908431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.926349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.939197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715778:2, at schemeshard: 72057594046644480 2024-11-21T09:16:17.955082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.974690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715782:2, at schemeshard: 72057594046644480 2024-11-21T09:16:17.992231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715783:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.004502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658936949447652:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.004523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.024342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658936949447940:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.024363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.024372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658936949447945:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.024964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.030897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658936949447947:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } [] Trying to start YDB, gRPC: 21312, MsgBus: 20683 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002816/r3tmp/tmpDi8mci/pdisk_1.dat 2024-11-21T09:16:18.639370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:18.639846Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21312, node 2 2024-11-21T09:16:18.653925Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.653937Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.653939Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.653975Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20683 TClient is connected to server localhost:20683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.729685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11 ... ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:21.503561Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.509165Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:21.525456Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.552849Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.569643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.729202Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658949968622745:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.729251Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.734846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.742082Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.755720Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.771005Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.784751Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.798289Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.813541Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658949968623264:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.813562Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.813608Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658949968623269:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.814267Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:21.817610Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658949968623271:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 7814, MsgBus: 10152 2024-11-21T09:16:22.313904Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658951454329064:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002816/r3tmp/tmpdnHiqp/pdisk_1.dat 2024-11-21T09:16:22.317554Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:22.323620Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7814, node 6 2024-11-21T09:16:22.331612Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:22.331627Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:22.331628Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:22.331665Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10152 TClient is connected to server localhost:10152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:22.415971Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:22.416000Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:22.416359Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.416941Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:22.417476Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:22.419657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.428827Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.450006Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.460075Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.625889Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658951454330465:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.625909Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.632697Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.639838Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.651959Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.666218Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.679961Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.693812Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.711157Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658951454330967:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.711181Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658951454330972:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.711193Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.711915Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:22.721077Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658951454330974:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |93.9%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> GenericFederatedQuery::YdbFilterPushdown >> GenericFederatedQuery::PostgreSQLSelectCount >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> GenericFederatedQuery::PostgreSQLFilterPushdown >> GenericFederatedQuery::YdbManagedSelectAll >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] |93.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 8038, MsgBus: 65387 2024-11-21T09:16:16.953905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658926532393441:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:16.954048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002842/r3tmp/tmpd6jIK3/pdisk_1.dat 2024-11-21T09:16:17.018959Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8038, node 1 2024-11-21T09:16:17.035186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:17.035199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:17.035201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:17.035236Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:17.052699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:17.052731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:17.053896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65387 TClient is connected to server localhost:65387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:17.100413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.102673Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:17.109156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.124521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:17.145622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:17.158902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.329532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658930827362140:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.329557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.369944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.378942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.387538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.443427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.501749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.514273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.528893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658930827362668:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.528919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.529007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658930827362673:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:17.529701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:17.533691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658930827362675:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:17.722998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:17.759724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29527, MsgBus: 11086 2024-11-21T09:16:18.040926Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658934924777077:2132];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:18.043036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002842/r3tmp/tmpHV9nOW/pdisk_1.dat 2024-11-21T09:16:18.055975Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29527, node 2 2024-11-21T09:16:18.068847Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.068864Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.068866Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.068906Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11086 TClient is connected to server localhost:11086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.140220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.140261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.141387Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:18.145173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.146652Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:18.151453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.160598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.185441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.202517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.367666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadS ... OpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.641898Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.653704Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.847927Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658947709043562:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.847951Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.854717Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.861295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.875074Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.888502Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.895716Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.909877Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.925241Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658947709044056:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.925277Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658947709044061:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.925278Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.926016Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:21.929362Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658947709044063:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:22.098615Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.125628Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22409, MsgBus: 62866 2024-11-21T09:16:22.416438Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658953877916231:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:22.416754Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002842/r3tmp/tmp1shnCm/pdisk_1.dat 2024-11-21T09:16:22.429206Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22409, node 6 2024-11-21T09:16:22.439519Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:22.439535Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:22.439539Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:22.439586Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62866 TClient is connected to server localhost:62866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:22.516794Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:22.516821Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:22.517989Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:22.519683Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.529813Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.539162Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.557462Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.569071Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.746267Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658953877917783:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.746296Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.751866Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.759659Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.770681Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.777279Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.784256Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.791319Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.807516Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658953877918283:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.807539Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.807585Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658953877918288:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.808202Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:22.811812Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658953877918290:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:22.997210Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.022252Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpRanges::ValidatePredicates [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput >> KqpRanges::DeleteNotFullScan [GOOD] >> KqpRanges::CastKeyBounds ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> KqpSqlIn::Delete [GOOD] |93.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] Test command err: Trying to start YDB, gRPC: 2284, MsgBus: 25284 2024-11-21T09:16:19.213808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658937825076951:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002745/r3tmp/tmpf2pw9w/pdisk_1.dat 2024-11-21T09:16:19.249599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 2284, node 1 2024-11-21T09:16:19.281891Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:19.301457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.301481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.301483Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.301520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:19.308137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:19.308172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:19.309194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25284 TClient is connected to server localhost:25284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:19.347873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.350934Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:19.608536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658937825077383:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.608584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.645756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.716184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658937825077484:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.716223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.716318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658937825077489:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.717140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.719585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658937825077491:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 14911, MsgBus: 31508 2024-11-21T09:16:20.096666Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658943101403587:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:20.096868Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002745/r3tmp/tmpxLji76/pdisk_1.dat 2024-11-21T09:16:20.115757Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14911, node 2 2024-11-21T09:16:20.136475Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:20.136493Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:20.136496Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:20.136543Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31508 TClient is connected to server localhost:31508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:20.198397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:20.198429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:16:20.198834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.199643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:20.453198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658943101404186:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.453226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.454605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.464730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658943101404284:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.464762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.464768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658943101404289:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.465406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:20.473345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439658943101404291:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:20.629958Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439658943101404424:2336], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Can't set NULL or optional value to not null column: Value, code: 2031 2024-11-21T09:16:20.630392Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzhkMDM4MDMtNGE0OTBhZi02ZTAyMjFlYy03OTM5NDhlMw==, ActorId: [2:7439658943101404159:2296], ActorState: ExecuteState, TraceId: 01jd70468j2374wg5y6w1gbhfe, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 28321, MsgBus: 12079 2024-11-21T09:16:20.814815Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658945589574060:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002745/r3tmp/tmp8aW1g3/pdisk_1.dat 2024-11-21T09:16:20.819166Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:20.828807Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28321, node 3 2024-11-21T09:16:20.848372Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:20.848384Z node 3 :NET_CLASS ... x with tx_id: Trying to start YDB, gRPC: 24173, MsgBus: 24159 2024-11-21T09:16:22.947312Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658952508978069:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002745/r3tmp/tmp3oqp2o/pdisk_1.dat 2024-11-21T09:16:22.950731Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:22.961368Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24173, node 6 2024-11-21T09:16:22.978686Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:22.978702Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:22.978704Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:22.978763Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24159 TClient is connected to server localhost:24159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:23.047179Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.047219Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.048373Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.049957Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.282400Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658956803945819:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.282431Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.285993Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.295653Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658956803945963:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.295684Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.295727Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658956803945968:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.296473Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:23.301649Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658956803945970:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:23.508097Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7439658956803946154:2347], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd704916a1q42mh4816tapyq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 }. 2024-11-21T09:16:23.508285Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7439658956803946155:2348], TxId: 281474976715664, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd704916a1q42mh4816tapyq. SessionId : ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7439658956803946150:2297], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:23.508331Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7439658956803946156:2349], TxId: 281474976715664, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd704916a1q42mh4816tapyq. SessionId : ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7439658956803946150:2297], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:23.508360Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7439658956803946158:2350], TxId: 281474976715664, task: 4. Ctx: { TraceId : 01jd704916a1q42mh4816tapyq. SessionId : ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7439658956803946150:2297], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:23.508659Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=, ActorId: [6:7439658956803945808:2297], ActorState: ExecuteState, TraceId: 01jd704916a1q42mh4816tapyq, Create QueryResponse for error on request, msg: 2024-11-21T09:16:23.516539Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439658956803946187:2354], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2024-11-21T09:16:23.516616Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=, ActorId: [6:7439658956803945808:2297], ActorState: ExecuteState, TraceId: 01jd70492tdevf2b6j3pvwxpv5, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2024-11-21T09:16:23.519354Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439658956803946205:2362], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2024-11-21T09:16:23.519405Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=, ActorId: [6:7439658956803945808:2297], ActorState: ExecuteState, TraceId: 01jd70492x5pqc4979225wqgx5, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2024-11-21T09:16:23.521726Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439658956803946222:2370], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2024-11-21T09:16:23.521788Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=, ActorId: [6:7439658956803945808:2297], ActorState: ExecuteState, TraceId: 01jd70492z30tzjdm6vm129w8k, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2024-11-21T09:16:23.547151Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:23.548118Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439658956803946239:2378], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2024-11-21T09:16:23.548234Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=, ActorId: [6:7439658956803945808:2297], ActorState: ExecuteState, TraceId: 01jd7049329ekc4vvprefc891c, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2024-11-21T09:16:23.594473Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:23.595381Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439658956803946257:2387], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2024-11-21T09:16:23.595462Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=, ActorId: [6:7439658956803945808:2297], ActorState: ExecuteState, TraceId: 01jd70493xcsqvkg91q5tb465s, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2024-11-21T09:16:23.642749Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:23.643630Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439658956803946275:2396], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2024-11-21T09:16:23.643914Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NGVkY2QyMTUtZGQ5NDAyMmMtZDFhN2NjYzMtZGYzMTc1OWU=, ActorId: [6:7439658956803945808:2297], ActorState: ExecuteState, TraceId: 01jd70495c5p23mcrr1gft5bb1, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ValidatePredicates [GOOD] Test command err: Trying to start YDB, gRPC: 5543, MsgBus: 2313 2024-11-21T09:16:12.049753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658909181751688:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:12.049994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b65/r3tmp/tmpCYvMOb/pdisk_1.dat 2024-11-21T09:16:12.110240Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5543, node 1 2024-11-21T09:16:12.131327Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:12.131349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:12.131351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:12.131387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2313 2024-11-21T09:16:12.150141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:12.150171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:12.151329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:12.197185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:12.201201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.268887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.301272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.311695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:12.406480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658909181753220:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.406516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.451058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.458578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.466058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.473378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.487719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.502255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.520543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658909181753734:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.520575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.520675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658909181753739:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:12.521382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:12.529626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658909181753741:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:12.722406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.767235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.799340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.843282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:12.895661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 14956, MsgBus: 22168 2024-11-21T09:16:13.163410Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658915373637596:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:13.163613Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b65/r3tmp/tmpsto7ub/pdisk_1.dat 2024-11-21T09:16:13.178413Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14956, node 2 2024-11-21T09:16:13.188400Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.188410Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.188412Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.188453Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22168 TClient is connected to server localhost:22168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:13.263781Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.263819Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.265056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:13.266611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.279214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.297596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itse ... 7Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582741, txId: 281474976715733] shutting down 2024-11-21T09:16:22.730739Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582776, txId: 281474976715735] shutting down EXPECTED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]]] RECEIVED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 <= 2000 AND Key2 <= 2 AND Key3 <= "resource_3" AND Key4 <= "uid:11" ORDER BY `Value`; 2024-11-21T09:16:22.763806Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582804, txId: 281474976715737] shutting down EXPECTED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]]] RECEIVED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key2 > 8 ORDER BY `Value`; 2024-11-21T09:16:22.792583Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582839, txId: 281474976715739] shutting down 2024-11-21T09:16:22.816637Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582860, txId: 281474976715741] shutting down 2024-11-21T09:16:22.840563Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582881, txId: 281474976715743] shutting down EXPECTED: [[[19u]];[[20u]]] RECEIVED: [[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key2 < 9 ORDER BY `Value`; 2024-11-21T09:16:22.865873Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582909, txId: 281474976715745] shutting down 2024-11-21T09:16:22.886423Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582930, txId: 281474976715747] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key2 <= 2 AND Key3 <= "resource_3" AND Key4 <= "uid:11" ORDER BY `Value`; 2024-11-21T09:16:22.915003Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582958, txId: 281474976715749] shutting down 2024-11-21T09:16:22.942797Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180582986, txId: 281474976715751] shutting down EXPECTED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]]] RECEIVED: [[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 = 2000 AND Key2 = 2 AND Key3 = "resource_3" AND Key4 = "uid:11" ORDER BY `Value`; 2024-11-21T09:16:22.969338Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583014, txId: 281474976715753] shutting down 2024-11-21T09:16:22.991980Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583035, txId: 281474976715755] shutting down EXPECTED: [[[10u]]] RECEIVED: [[[10u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 != 2000 AND Key2 != 2 AND Key3 != "resource_3" AND Key4 != "uid:11" ORDER BY `Value`; 2024-11-21T09:16:23.043846Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583084, txId: 281474976715757] shutting down 2024-11-21T09:16:23.088254Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583133, txId: 281474976715759] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 IS NULL ORDER BY `Value`; 2024-11-21T09:16:23.109074Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583154, txId: 281474976715761] shutting down 2024-11-21T09:16:23.131744Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583175, txId: 281474976715763] shutting down EXPECTED: [] RECEIVED: [] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key2 IS NULL ORDER BY `Value`; 2024-11-21T09:16:23.159182Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583203, txId: 281474976715765] shutting down 2024-11-21T09:16:23.186138Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583231, txId: 281474976715767] shutting down EXPECTED: [] RECEIVED: [] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 IS NOT NULL ORDER BY `Value`; 2024-11-21T09:16:23.210532Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583252, txId: 281474976715769] shutting down 2024-11-21T09:16:23.233207Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583273, txId: 281474976715771] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 > 1000 AND Key2 IS NULL ORDER BY `Value`; 2024-11-21T09:16:23.260302Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583301, txId: 281474976715773] shutting down 2024-11-21T09:16:23.296013Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583336, txId: 281474976715775] shutting down EXPECTED: [] RECEIVED: [] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 > 1000 OR Key2 IS NULL ORDER BY `Value`; 2024-11-21T09:16:23.328801Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583371, txId: 281474976715777] shutting down 2024-11-21T09:16:23.355875Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583399, txId: 281474976715779] shutting down EXPECTED: [[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 >= 1000 OR Key2 IS NOT NULL ORDER BY `Value`; 2024-11-21T09:16:23.386384Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583427, txId: 281474976715781] shutting down 2024-11-21T09:16:23.413971Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583455, txId: 281474976715783] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NOT NULL ORDER BY `Value`; 2024-11-21T09:16:23.443752Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583490, txId: 281474976715785] shutting down 2024-11-21T09:16:23.471494Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583511, txId: 281474976715787] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NULL ORDER BY `Value`; 2024-11-21T09:16:23.501017Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583546, txId: 281474976715789] shutting down 2024-11-21T09:16:23.526649Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583567, txId: 281474976715791] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Value = 20 ORDER BY `Value`; 2024-11-21T09:16:23.547208Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583588, txId: 281474976715793] shutting down 2024-11-21T09:16:23.566929Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583609, txId: 281474976715795] shutting down EXPECTED: [[[20u]]] RECEIVED: [[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE (Key1 <= 1000) OR (Key1 > 2000 AND Key1 < 5000) OR (Key1 >= 8000) ORDER BY `Value`; 2024-11-21T09:16:23.611594Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583651, txId: 281474976715797] shutting down 2024-11-21T09:16:23.657503Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583700, txId: 281474976715799] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < NULL ORDER BY `Value`; 2024-11-21T09:16:23.671327Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583669, txId: 281474976715801] shutting down 2024-11-21T09:16:23.683453Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180583682, txId: 281474976715803] shutting down EXPECTED: [] RECEIVED: [] 2024-11-21T09:16:23.694434Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037919 not found 2024-11-21T09:16:23.694547Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037920 not found 2024-11-21T09:16:23.694567Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037921 not found >> KqpNewEngine::MultipleBroadcastJoin [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::Delete [GOOD] Test command err: Trying to start YDB, gRPC: 6686, MsgBus: 12595 2024-11-21T09:16:18.517799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658935452377346:2150];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:18.518447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027b0/r3tmp/tmpxU1ami/pdisk_1.dat 2024-11-21T09:16:18.572696Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6686, node 1 2024-11-21T09:16:18.591844Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.591856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.591858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.591893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12595 2024-11-21T09:16:18.618033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.618076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.622385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:18.652780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.656351Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:18.659837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.732967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.757534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.769709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.825876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658935452378780:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.825904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.860257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.867278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.925381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.935032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.949169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.956375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.971734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658935452379298:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.971774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.971794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658935452379303:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.972571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.976078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658935452379305:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 21991, MsgBus: 22859 2024-11-21T09:16:19.473237Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658937903518333:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:19.473400Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027b0/r3tmp/tmpNVBDrO/pdisk_1.dat 2024-11-21T09:16:19.485250Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21991, node 2 2024-11-21T09:16:19.503016Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.503030Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.503032Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.503084Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22859 TClient is connected to server localhost:22859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:19.573667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:19.573698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:19.574853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:19.576004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.580016Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:19.585215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.594953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.641929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.656365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.797493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658937903519860:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.797519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found ... oposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.115104Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.126640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.140811Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.154988Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.170465Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658953234319584:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.170500Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.170517Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439658953234319589:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.171212Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:22.174420Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439658953234319591:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:22.444385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.453941Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.462968Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 14507, MsgBus: 25437 2024-11-21T09:16:23.031020Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658957067982215:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:23.031206Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027b0/r3tmp/tmppriLXM/pdisk_1.dat 2024-11-21T09:16:23.045721Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14507, node 5 2024-11-21T09:16:23.050925Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:23.050939Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:23.050940Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:23.050974Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25437 TClient is connected to server localhost:25437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:23.132096Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.132126Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.133184Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.134600Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.140965Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.149941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.170211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.182131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.340889Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658957067983767:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.340913Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.346905Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.353604Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.365877Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.379913Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.394065Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.407410Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.416404Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658957067984274:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.416429Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658957067984279:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.416434Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.417251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:23.420455Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658957067984281:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:23.601166Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.609144Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.617237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |93.9%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultipleBroadcastJoin [GOOD] Test command err: Trying to start YDB, gRPC: 23208, MsgBus: 29350 2024-11-21T09:16:18.532487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658933783429285:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:18.532504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027c6/r3tmp/tmpqn1jfU/pdisk_1.dat 2024-11-21T09:16:18.596384Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23208, node 1 2024-11-21T09:16:18.621840Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.621856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.621858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.621897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:18.632756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.632786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.633795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29350 TClient is connected to server localhost:29350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.687587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.692691Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:18.695325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.718111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.740815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.753785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.884873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658933783430819:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.884907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.917619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.927466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.937489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.949335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.956462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.970030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.978677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658933783431325:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.978704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.978730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658933783431332:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.979396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.982452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658933783431334:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 28971, MsgBus: 17782 2024-11-21T09:16:19.447933Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658938703482843:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027c6/r3tmp/tmpIbO5md/pdisk_1.dat 2024-11-21T09:16:19.458605Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:19.484812Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28971, node 2 2024-11-21T09:16:19.496772Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.496791Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.496793Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.496837Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17782 TClient is connected to server localhost:17782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:19.547803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:19.547834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:19.548322Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:19.552376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.553992Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:19.565513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.625075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.644478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.656290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.788864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658938703484221:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.788885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not fou ... e, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.547555Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.565437Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.575428Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:22.756672Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658952254293050:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.756720Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.761737Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.769502Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.777090Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.784114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.791126Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.797957Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:22.807505Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658952254293552:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.807530Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.807540Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658952254293557:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:22.808187Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:22.811433Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658952254293559:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 6403, MsgBus: 21029 2024-11-21T09:16:23.349067Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658956734827742:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:23.349132Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027c6/r3tmp/tmpgaYNJF/pdisk_1.dat 2024-11-21T09:16:23.361979Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6403, node 6 2024-11-21T09:16:23.371878Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:23.371892Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:23.371894Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:23.371947Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21029 TClient is connected to server localhost:21029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:23.449607Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.449635Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.450709Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.452474Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.463611Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.472139Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.493009Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.505427Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:23.698209Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658956734829309:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.698252Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.704727Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.712314Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.722553Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.729101Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.743398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.758214Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.777360Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658956734829810:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.777390Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.777493Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658956734829815:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.778343Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:23.789819Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658956734829817:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:23.978966Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.986018Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.996232Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 [] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage >> KqpRanges::CastKeyBounds [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> TTicketParserTest::AuthenticationWithUserAccount >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> TTicketParserTest::TicketFromCertificateWithValidationGood >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> TTicketParserTest::AccessServiceAuthenticationOk >> TTicketParserTest::LoginGood >> TTicketParserTest::BulkAuthorizationRetryError >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> KqpExtractPredicateLookup::ComplexRange [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::CastKeyBounds [GOOD] Test command err: Trying to start YDB, gRPC: 10535, MsgBus: 15189 2024-11-21T09:16:19.224597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658938904095186:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:19.225077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002787/r3tmp/tmp36ltjL/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10535, node 1 2024-11-21T09:16:19.305241Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T09:16:19.305252Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T09:16:19.308281Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:19.325242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:19.325267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:19.326346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:19.328382Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.328394Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.328396Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.328431Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15189 TClient is connected to server localhost:15189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:19.393804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:19.395685Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:19.604696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658938904095585:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.604723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.638804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:19.706196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658938904095685:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.706224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.706258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658938904095690:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:19.706907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:19.708741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658938904095692:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T09:16:19.816756Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658938904095781:2324], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2024-11-21T09:16:19.816838Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjdhNWI0NWItNGM3ZjhhMWItNjlkMWU1MmUtN2RmZmEyZDY=, ActorId: [1:7439658938904095567:2297], ActorState: ExecuteState, TraceId: 01jd7045f5dhv2f34vz7makrzm, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2024-11-21T09:16:19.827826Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:16:19.829480Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658938904095790:2328], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Key, code: 2031 2024-11-21T09:16:19.829557Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjdhNWI0NWItNGM3ZjhhMWItNjlkMWU1MmUtN2RmZmEyZDY=, ActorId: [1:7439658938904095567:2297], ActorState: ExecuteState, TraceId: 01jd7045f9dmyfwwmzqhbym12x, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 9077, MsgBus: 19745 2024-11-21T09:16:20.109924Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658943319878560:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:20.110314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002787/r3tmp/tmp0nGNbb/pdisk_1.dat 2024-11-21T09:16:20.125410Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9077, node 2 2024-11-21T09:16:20.139192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:20.139204Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:20.139206Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:20.139246Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19745 TClient is connected to server localhost:19745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:20.214117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:20.214141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:20.214385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.215183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:20.225659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.237734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.256173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.266864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.407524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658943319880089:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.407555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.414220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.422699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.433457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part prop ... type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:23.262586Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.421901Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658955086950260:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.421929Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.426366Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.432617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.443036Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.456837Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.463496Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.478152Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.494088Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658955086950765:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.494111Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658955086950770:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.494127Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:23.494856Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:23.497412Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658955086950772:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Delete","Table":"Join2"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","Stats":{"ComputeNodes":[{"Tasks":[{"NodeId":5,"FinishTimeMs":1732180583708,"TaskId":1,"Host":"ghrun-qcxhsi27zq","ComputeTimeUs":47}],"CpuTimeUs":177}],"UseLlvm":"undefined","Tasks":1,"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1732180583708,"NodesScanShards":[],"CpuTimeUs":{"Count":1,"Sum":177,"Max":177,"Min":177}},"CTE Name":"precompute_0_0"}],"Node Type":"Effect"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":29052,"CpuTimeUs":27290},"ProcessCpuTimeUs":469,"TotalDurationUs":32531,"ResourcePoolId":"default","QueuedTimeUs":218},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-Cpu":0.177,"Name":"Delete","Table":"Join2"}],"Node Type":"Delete"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 7932, MsgBus: 29577 2024-11-21T09:16:24.057437Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658962398206926:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:24.060252Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002787/r3tmp/tmpy2Sgsr/pdisk_1.dat 2024-11-21T09:16:24.072638Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7932, node 6 2024-11-21T09:16:24.083515Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:24.083528Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:24.083530Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:24.083583Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29577 TClient is connected to server localhost:29577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:24.160140Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:24.160189Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:24.160478Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.161220Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:24.172330Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.181230Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.200700Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.211335Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.362468Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658962398208315:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.362495Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.368471Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.376378Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.387775Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.402401Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.415985Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.429562Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.437941Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658962398208827:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.437973Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.438020Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658962398208832:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.438668Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:24.442688Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658962398208834:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnsupported >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationUnavailable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::ComplexRange [GOOD] Test command err: Trying to start YDB, gRPC: 22927, MsgBus: 16586 2024-11-21T09:16:13.676010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658914063137616:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:13.676223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002aab/r3tmp/tmpKKzeQN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22927, node 1 2024-11-21T09:16:13.759294Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T09:16:13.759632Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T09:16:13.759761Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:13.776248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:13.776282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:13.777323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:13.791779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:13.791800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:13.791801Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:13.791829Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16586 TClient is connected to server localhost:16586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:13.847920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:13.858310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:13.872641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.891055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:13.903412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.024155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658918358106436:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.024184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.068569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.084732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.094054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.104610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.127650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.140342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:14.156168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658918358106950:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.156223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.156324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658918358106955:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:14.157231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:14.160129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658918358106957:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 24936, MsgBus: 14011 2024-11-21T09:16:14.788634Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658916991863669:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:14.790002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002aab/r3tmp/tmpfSuec9/pdisk_1.dat 2024-11-21T09:16:14.802890Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:14.806522Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:14.806552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:14.812545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24936, node 2 2024-11-21T09:16:14.820200Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:14.820222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:14.820223Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:14.820253Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14011 TClient is connected to server localhost:14011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:14.868437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.869753Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:14.880176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.904508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:14.940078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:14.949853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:15.065522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658921286832355:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:15.065548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool d ... oadService] [TPoolCreatorActor] ActorId: [9:7439658956698669934:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:23.704140Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.710890Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.722510Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.728967Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.736530Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.743347Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.750418Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.765646Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.778879Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:16:23.793974Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7843, MsgBus: 5135 2024-11-21T09:16:24.250767Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439658960330794649:2154];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002aab/r3tmp/tmppch9E8/pdisk_1.dat 2024-11-21T09:16:24.253983Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:24.264024Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7843, node 10 2024-11-21T09:16:24.270999Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:24.271013Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:24.271015Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:24.271054Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5135 TClient is connected to server localhost:5135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:24.350525Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:24.350553Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:24.351593Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:24.352692Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.369729Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.379071Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.398983Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.408860Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.596507Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7439658960330796094:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.596533Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.602232Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.608958Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.618669Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.632639Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.639391Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.646872Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.662845Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7439658960330796585:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.662867Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7439658960330796590:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.662870Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.663426Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:24.666078Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439658960330796592:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:24.863084Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.870090Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.877325Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.891863Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.905863Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.919718Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.934466Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.947757Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.954884Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.968443Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> TGroupMapperTest::Block42_2disk >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> test_public_api.py::TestSessionNotFound::test_session_not_found >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> TGroupMapperTest::Mirror3dc >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> TGroupMapperTest::SanitizeGroupTest3dc >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> TGroupMapperTest::NonUniformCluster2 >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] Test command err: Trying to start YDB, gRPC: 63528, MsgBus: 29727 2024-11-21T09:16:20.404071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658944458349302:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:20.404108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00271d/r3tmp/tmpXmpl5R/pdisk_1.dat 2024-11-21T09:16:20.465166Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63528, node 1 2024-11-21T09:16:20.491619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:20.491632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:20.491634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:20.491670Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:20.505219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:20.505249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:20.506097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29727 TClient is connected to server localhost:29727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:20.540197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.544336Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:20.549777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.618456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.636119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.647272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:20.738507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658944458350839:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.738540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.783723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.791096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.848595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.860685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.873770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.887847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:20.903923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658944458351357:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.903955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.904047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658944458351362:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:20.904724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:20.907199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658944458351364:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 1626, MsgBus: 18340 2024-11-21T09:16:21.294663Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658947798439833:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00271d/r3tmp/tmp2nlutv/pdisk_1.dat 2024-11-21T09:16:21.297926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:21.308592Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1626, node 2 2024-11-21T09:16:21.322673Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:21.322686Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:21.322688Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:21.322731Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18340 TClient is connected to server localhost:18340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:21.397529Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:21.397562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:21.398010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.398422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:21.405929Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:21.421320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.479290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:21.498957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:21.509893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:21.630664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658947798441217:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:21.630702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found ... wn -> Disconnected 2024-11-21T09:16:24.142316Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:24.143101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.143342Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:24.151548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.161262Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.180631Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.192543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.342863Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658961224005892:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.342887Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.349379Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.356370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.411702Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.422786Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.429364Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.436545Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.445239Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658961224006406:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.445266Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.445327Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439658961224006411:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.446049Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:24.449433Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439658961224006413:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 8077, MsgBus: 29734 2024-11-21T09:16:24.958815Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658960693472458:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:24.958844Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00271d/r3tmp/tmpES1Avm/pdisk_1.dat 2024-11-21T09:16:24.974260Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8077, node 6 2024-11-21T09:16:24.982758Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:24.982789Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:24.982792Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:24.982839Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29734 TClient is connected to server localhost:29734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.057696Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.057740Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.058843Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.062149Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.069460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.077810Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.098815Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.155465Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.329638Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658964988441312:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:25.329670Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:25.337273Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.346156Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.356364Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.368418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.382697Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.400258Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.412742Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658964988441815:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:25.412768Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:25.412864Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439658964988441820:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:25.413687Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:25.423708Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439658964988441822:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TGroupMapperTest::Mirror3dc [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 2934, MsgBus: 61397 2024-11-21T09:16:23.779611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658957202456834:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:23.779665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ea7/r3tmp/tmpYGYSln/pdisk_1.dat 2024-11-21T09:16:23.859406Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2934, node 1 2024-11-21T09:16:23.878844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.878869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.879973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.933535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:23.933554Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:23.933555Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:23.933582Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61397 TClient is connected to server localhost:61397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:24.045000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.052430Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:24.118117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658961497424519:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.118146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.786896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.854397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658961497424653:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.854421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658961497424658:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.854425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.855961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.857824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658961497424660:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:25.142026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.192976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2024-11-21T09:16:25.249300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.322880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.366211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.437554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T09:16:25.447154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.713177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715693:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] Test command err: Trying to start YDB, gRPC: 24067, MsgBus: 63420 2024-11-21T09:16:23.779688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658957215268081:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:23.779725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e96/r3tmp/tmpCViwtt/pdisk_1.dat 2024-11-21T09:16:23.862669Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24067, node 1 2024-11-21T09:16:23.878922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.878955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.880081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.932917Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:23.932942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:23.932944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:23.932973Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63420 TClient is connected to server localhost:63420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:24.032711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.036545Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:24.104558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658961510235765:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.104589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.786855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.854424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658961510235898:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.854453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.854468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658961510235903:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.855993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.857448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658961510235905:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T09:16:25.142140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.204775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480 2024-11-21T09:16:25.265957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.332476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.390166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.447192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T09:16:25.505440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.747360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710693:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TTicketParserTest::LoginBad >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 11249, MsgBus: 16959 2024-11-21T09:16:23.784596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658957852122795:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:23.784756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e99/r3tmp/tmpEgO6V4/pdisk_1.dat 2024-11-21T09:16:23.861164Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11249, node 1 2024-11-21T09:16:23.885673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.885713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.886802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.934349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:23.934370Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:23.934371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:23.934410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16959 TClient is connected to server localhost:16959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:24.034097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.041186Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:24.109247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962147090706:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.109282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.786815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.854520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962147090838:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.854573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962147090844:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.854579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.855976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.857449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658962147090846:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:25.142247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.193359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2024-11-21T09:16:25.265869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.379943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.436782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.492425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T09:16:25.500685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.747481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715693:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 22150, MsgBus: 32099 2024-11-21T09:16:23.799541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658956170663668:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:23.799706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ec1/r3tmp/tmpwaoyOf/pdisk_1.dat 2024-11-21T09:16:23.867137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22150, node 1 2024-11-21T09:16:23.899636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.899667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.900744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.932473Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:23.932497Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:23.932499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:23.932538Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32099 TClient is connected to server localhost:32099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:24.035327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:24.112444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658960465631560:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.112470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.801490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.860734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658960465631691:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.860756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.860771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658960465631696:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.861351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.869105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658960465631698:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T09:16:25.142029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.199022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480 2024-11-21T09:16:25.266268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.343009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.409006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.468117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T09:16:25.480411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.735996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710693:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationRetryError >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2024-11-21T09:16:02.835040Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658865348161575:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:02.835601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003d89/r3tmp/tmpFI9GRs/pdisk_1.dat 2024-11-21T09:16:02.885591Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:16:02.888629Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:16:02.894172Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:02.925487Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7670, node 1 2024-11-21T09:16:02.939107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:02.939149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:02.940579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:02.968524Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003d89/r3tmp/yandexjcFMAn.tmp 2024-11-21T09:16:02.968534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003d89/r3tmp/yandexjcFMAn.tmp 2024-11-21T09:16:02.968600Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003d89/r3tmp/yandexjcFMAn.tmp 2024-11-21T09:16:02.968656Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:02.980825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:02.980859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:02.985025Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:16:02.985433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:03.020644Z INFO: TTestServer started on Port 20002 GrpcPort 7670 TClient is connected to server localhost:20002 PQClient connected to localhost:7670 === TenantModeEnabled() = 0 === Init PQ - start server on port 7670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:03.061374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:16:03.061470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.061604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T09:16:03.061723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:16:03.061739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.064875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T09:16:03.064938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2024-11-21T09:16:03.065963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.065991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:16:03.066011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T09:16:03.066017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 2024-11-21T09:16:03.066471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T09:16:03.066487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T09:16:03.066492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T09:16:03.068989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.069007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:16:03.069014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T09:16:03.069862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.069900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.069907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T09:16:03.069915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T09:16:03.070785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:03.072398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T09:16:03.072463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:16:03.073582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180563120, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:16:03.073646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439658865348161956 RawX2: 4294969644 } } Step: 1732180563120 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T09:16:03.073673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T09:16:03.073778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T09:16:03.073792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T09:16:03.073838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:16:03.073883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T09:16:03.074701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:16:03.074713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:16:03.074779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:16:03.074784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439658865348161990:2378], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T09:16:03.074793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:03.074800Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T09:16:03.074819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T09:16:03.074822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T09:16:03.074828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T09:16:03.074833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T09:16:03.074838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T09:16:03.074841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T09:16:03.074854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T09:16:03.074860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T09:16:03.074873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T09:16:03.075534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at ... nradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T09:16:25.897817Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_4285635975284269708_v1" (Sender=[5:7439658966001645606:2562], Pipe=[5:7439658966001645609:2562], Partitions=[], ActiveFamilyCount=0) 2024-11-21T09:16:25.897833Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_4285635975284269708_v1" sender [5:7439658966001645606:2562] lock partition 0 for ReadingSession "shared/cli_5_1_4285635975284269708_v1" (Sender=[5:7439658966001645606:2562], Pipe=[5:7439658966001645609:2562], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2024-11-21T09:16:25.897851Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T09:16:25.897861Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000050s 2024-11-21T09:16:25.898134Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_4285635975284269708_v1" ClientId: "cli" PipeClient { RawX1: 7439658966001645609 RawX2: 4503621102209538 } Path: "/Root/PQ/rt3.dc1--topic1" } 2024-11-21T09:16:25.898161Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T09:16:25.898220Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1 2024-11-21T09:16:25.898258Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_4285635975284269708_v1:1 with generation 1 2024-11-21T09:16:25.899988Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1732180585895 } Cookie: 18446744073709551615 } 2024-11-21T09:16:25.900008Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2024-11-21T09:16:25.900031Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 sending to client partition status 2024-11-21T09:16:25.900360Z :INFO: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (empty maybe) 2024-11-21T09:16:25.900547Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2024-11-21T09:16:25.900627Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T09:16:25.900652Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T09:16:25.900655Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2024-11-21T09:16:25.900690Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2024-11-21T09:16:25.900706Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1TEvPartitionReady. Aval parts: 1 2024-11-21T09:16:25.900741Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 performing read request: guid# 328a7a2-db253586-62aa544b-5e6a5e5d, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T09:16:25.900817Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 328a7a2-db253586-62aa544b-5e6a5e5d 2024-11-21T09:16:25.901096Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1732180585794 CreateTimestampMS: 1732180585794 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1732180585795 CreateTimestampMS: 1732180585794 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1732180585795 CreateTimestampMS: 1732180585794 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T09:16:25.901147Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2024-11-21T09:16:25.901167Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 328a7a2-db253586-62aa544b-5e6a5e5d has messages 1 2024-11-21T09:16:25.901226Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 read done: guid# 328a7a2-db253586-62aa544b-5e6a5e5d, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 371 2024-11-21T09:16:25.901251Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 response to read: guid# 328a7a2-db253586-62aa544b-5e6a5e5d 2024-11-21T09:16:25.901367Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 Process answer. Aval parts: 0 2024-11-21T09:16:25.901463Z :DEBUG: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] Got ReadResponse, serverBytesSize = 371, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2024-11-21T09:16:25.901496Z :DEBUG: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2024-11-21T09:16:25.901600Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2024-11-21T09:16:25.901618Z :DEBUG: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] Returning serverBytesSize = 371 to budget 2024-11-21T09:16:25.901624Z :DEBUG: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] In ContinueReadingDataImpl, ReadSizeBudget = 371, ReadSizeServerDelta = 52428429 2024-11-21T09:16:25.901728Z :DEBUG: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T09:16:25.901780Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T09:16:25.901793Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T09:16:25.901798Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2024-11-21T09:16:25.901809Z :DEBUG: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2024-11-21T09:16:25.901817Z :DEBUG: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] Returning serverBytesSize = 0 to budget 2024-11-21T09:16:25.901821Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 grpc read done: success# 1, data# { read_request { bytes_size: 371 } } 2024-11-21T09:16:25.901851Z :INFO: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:25.901858Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2024-11-21T09:16:25.901870Z :INFO: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 6 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:25.901890Z :NOTICE: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:16:25.901897Z :DEBUG: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] [] Abort session to cluster 2024-11-21T09:16:25.901874Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 got read request: guid# 5a7bcbe3-5c59a0c6-a879efdf-dc6a68ce 2024-11-21T09:16:25.902073Z :NOTICE: [] [] [d8391ca2-c6469e1-8cdc15dd-a696551a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:16:25.902190Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 grpc read done: success# 0, data# { } 2024-11-21T09:16:25.902201Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 grpc read failed 2024-11-21T09:16:25.902206Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 grpc closed 2024-11-21T09:16:25.902223Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_4285635975284269708_v1 is DEAD 2024-11-21T09:16:25.902304Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_4285635975284269708_v1 2024-11-21T09:16:25.902611Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7439658966001645609:2562] disconnected; active server actors: 1 2024-11-21T09:16:25.902626Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7439658966001645609:2562] client cli disconnected session shared/cli_5_1_4285635975284269708_v1 >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 15971, MsgBus: 21603 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ea4/r3tmp/tmpXI374y/pdisk_1.dat 2024-11-21T09:16:23.839704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:23.860332Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15971, node 1 2024-11-21T09:16:23.891521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.891547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.892559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.932393Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:23.932420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:23.932423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:23.932470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21603 TClient is connected to server localhost:21603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:24.033561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.040664Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:24.114524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962848089142:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.114551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.841857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.908443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962848089274:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.908490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.908626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962848089280:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.909518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.911527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658962848089282:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:25.142090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.192978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2024-11-21T09:16:25.250198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.319888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.385202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.472733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T09:16:25.487587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.748056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.754197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.754412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.754546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2024-11-21T09:16:26.111568Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180586157, txId: 281474976715716] shutting down >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify >> TGroupMapperTest::MakeDisksForbidden [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 18897, MsgBus: 5413 2024-11-21T09:16:23.803991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658958597945685:2139];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:23.804062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001eb7/r3tmp/tmpHWtpmh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18897, node 1 2024-11-21T09:16:23.879852Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:23.880155Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T09:16:23.880165Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T09:16:23.904433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:23.904460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:23.908575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:23.932953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:23.932974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:23.932975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:23.933004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5413 TClient is connected to server localhost:5413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:24.031903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:24.036515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:24.111559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962892913486:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.111594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.805698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.866651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962892913617:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.866714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.866720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658962892913623:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:24.867251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T09:16:24.868797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658962892913625:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T09:16:25.142053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.193525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480 2024-11-21T09:16:25.312810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.365014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.421530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.470251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T09:16:25.480411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.711020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710693:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.723762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.724329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.724593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2024-11-21T09:16:26.111949Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180586150, txId: 281474976710716] shutting down >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-strings.test] >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] |94.0%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |94.0%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> TTicketParserTest::AuthorizationModify [GOOD] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] |94.0%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2024-11-21T09:16:25.200841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658964745797043:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.200906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a35/r3tmp/tmpXvkZ0C/pdisk_1.dat 2024-11-21T09:16:25.294055Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2826, node 1 2024-11-21T09:16:25.303275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.303308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.304396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.339803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.339819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.339820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.339856Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.420152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.424536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:25.432579Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:25.432633Z node 1 :GRPC_CLIENT DEBUG: [45473f082690] Connect to grpc://localhost:29380 2024-11-21T09:16:25.436793Z node 1 :GRPC_CLIENT DEBUG: [45473f082690] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2024-11-21T09:16:25.439365Z node 1 :GRPC_CLIENT DEBUG: [45473f082690] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:25.440298Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T09:16:25.440570Z node 1 :GRPC_CLIENT DEBUG: [45473f082b10] Connect to grpc://localhost:15582 2024-11-21T09:16:25.440717Z node 1 :GRPC_CLIENT DEBUG: [45473f082b10] Request GetUserAccountRequest { user_account_id: "user1" } 2024-11-21T09:16:25.446960Z node 1 :GRPC_CLIENT DEBUG: [45473f082b10] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2024-11-21T09:16:25.448355Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a35/r3tmp/tmp2px7HX/pdisk_1.dat 2024-11-21T09:16:25.672444Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:25.680438Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22038, node 2 2024-11-21T09:16:25.696407Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.696427Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.696429Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.696469Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.760024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.760075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.761140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.762851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.764775Z node 2 :TICKET_PARSER ERROR: Ticket **** (8E120919): Token is not supported 2024-11-21T09:16:26.122389Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658968814223676:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.122414Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a35/r3tmp/tmpleYExQ/pdisk_1.dat 2024-11-21T09:16:26.137017Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63398, node 3 2024-11-21T09:16:26.146108Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.146121Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.146123Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.146165Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:26.222848Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.222885Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.223958Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.224628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:26.226699Z node 3 :TICKET_PARSER ERROR: Ticket **** (8E120919): Unknown token 2024-11-21T09:16:26.530408Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439658969621325605:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a35/r3tmp/tmpNnVdm8/pdisk_1.dat 2024-11-21T09:16:26.533236Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:26.544005Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23702, node 4 2024-11-21T09:16:26.557107Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.557123Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.557125Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.557184Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depr ... Denied" retryable:0 2024-11-21T09:16:26.657492Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.connect now has a valid subject "user1@as" 2024-11-21T09:16:26.657522Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:26.657712Z node 4 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:26.657773Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Request AuthorizeRequest { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } api_key: "ApiK****alid (AB5B5EA8)" } 2024-11-21T09:16:26.658281Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Response AuthorizeResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2024-11-21T09:16:26.658328Z node 4 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) permission something.read now has a valid subject "ApiKey-value-valid@as" 2024-11-21T09:16:26.658355Z node 4 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2024-11-21T09:16:26.658522Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T09:16:26.658584Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:26.663363Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Status 16 Access Denied 2024-11-21T09:16:26.663476Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2024-11-21T09:16:26.663491Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2024-11-21T09:16:26.663702Z node 4 :TICKET_PARSER TRACE: Ticket **** (E2D1584C) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:26.663759Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Request AuthorizeRequest { iam_token: "**** (E2D1584C)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:26.664345Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Status 16 Access Denied 2024-11-21T09:16:26.664421Z node 4 :TICKET_PARSER TRACE: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2024-11-21T09:16:26.664434Z node 4 :TICKET_PARSER DEBUG: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2024-11-21T09:16:26.664661Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:26.664698Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:26.665147Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Status 16 Access Denied 2024-11-21T09:16:26.665178Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2024-11-21T09:16:26.665187Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2024-11-21T09:16:26.665364Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:26.665436Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2024-11-21T09:16:26.666034Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Status 16 Access Denied 2024-11-21T09:16:26.666077Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2024-11-21T09:16:26.666088Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2024-11-21T09:16:26.666222Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:26.666254Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:26.666696Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:26.666741Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T09:16:26.666774Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:26.666923Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:26.666987Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2024-11-21T09:16:26.667439Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:26.667483Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T09:16:26.667509Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:26.667609Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2024-11-21T09:16:26.667639Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2024-11-21T09:16:26.667982Z node 4 :GRPC_CLIENT DEBUG: [45473f083890] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:26.668015Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2024-11-21T09:16:26.668035Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:27.007237Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658974043699036:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a35/r3tmp/tmpkRPKOe/pdisk_1.dat 2024-11-21T09:16:27.008986Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:27.017394Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14039, node 5 2024-11-21T09:16:27.036422Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:27.036437Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:27.036439Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:27.036481Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:27.106577Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:27.106619Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:27.107745Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:27.109515Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:27.111715Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:27.111743Z node 5 :GRPC_CLIENT DEBUG: [45473f084190] Connect to grpc://localhost:16096 2024-11-21T09:16:27.111931Z node 5 :GRPC_CLIENT DEBUG: [45473f084190] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:27.117315Z node 5 :GRPC_CLIENT DEBUG: [45473f084190] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:27.117383Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T09:16:27.117414Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:27.117559Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:27.117572Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T09:16:27.117610Z node 5 :GRPC_CLIENT DEBUG: [45473f084190] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:27.117754Z node 5 :GRPC_CLIENT DEBUG: [45473f084190] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:27.118161Z node 5 :GRPC_CLIENT DEBUG: [45473f084190] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:27.118238Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T09:16:27.118324Z node 5 :GRPC_CLIENT DEBUG: [45473f084190] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:27.118387Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2024-11-21T09:16:27.118410Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2024-11-21T09:16:24.664356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658959544968745:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:24.664599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032f4/r3tmp/tmpM760JJ/pdisk_1.dat 2024-11-21T09:16:24.755355Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15312, node 1 2024-11-21T09:16:24.768045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:24.768074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:24.769192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:24.811529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:24.811543Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:24.811545Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:24.811580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:24.959237Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:24.965054Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:16410, port: 16410 2024-11-21T09:16:24.965091Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:25.021151Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:25.076370Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T09:16:25.124891Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****rTxw (A901816B) () has now valid token of ldapuser@ldap 2024-11-21T09:16:25.359263Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658966912957508:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.359467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032f4/r3tmp/tmpPTXcUS/pdisk_1.dat 2024-11-21T09:16:25.373126Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17178, node 2 2024-11-21T09:16:25.384939Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.384952Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.384956Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.385006Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:25.442313Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:25.444763Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:23718, port: 23718 2024-11-21T09:16:25.444795Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:25.459817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.459849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.460983Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.496458Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:25.540380Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T09:16:25.540561Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T09:16:25.540575Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:25.584417Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:25.633828Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:25.634243Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****6sKw (C984069C) () has now valid token of ldapuser@ldap 2024-11-21T09:16:25.841465Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658965333393362:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.841646Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032f4/r3tmp/tmpRIJzRc/pdisk_1.dat 2024-11-21T09:16:25.855261Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5301, node 3 2024-11-21T09:16:25.862122Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.862135Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.862137Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.862171Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:25.935992Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:25.938509Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:16898, port: 16898 2024-11-21T09:16:25.938540Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:25.943034Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.943076Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.944195Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.988453Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:26.032616Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****gy-A (0E33C880) () has now valid token of ldapuser@ldap 2024-11-21T09:16:26.324137Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439658967975987311:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.324156Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032f4/r3tmp/tmpp38XEm/pdisk_1.dat 2024-11-21T09:16:26.334964Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14024, node 4 2024-11-21T09:16:26.344379Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.344400Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.344402Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.344439Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:26.413458Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:26.415499Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://qqq:17402 ldaps://localhost:17402 ldaps://localhost:11111, port: 17402 2024-11-21T09:16:26.415537Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:26.426236Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.426265Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.427515Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.472413Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:26.516342Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T09:16:26.516506Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T09:16:26.516522Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:26.560648Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:26.608385Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:26.608806Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****JgPg (B7720C7C) () has now valid token of ldapuser@ldap 2024-11-21T09:16:26.812807Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658970865426776:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.813039Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032f4/r3tmp/tmphANBnx/pdisk_1.dat 2024-11-21T09:16:26.825789Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9256, node 5 2024-11-21T09:16:26.844425Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.844452Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.844453Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.844501Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:26.894633Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:26.896718Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:21607, port: 21607 2024-11-21T09:16:26.896750Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:26.912952Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.912994Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.915444Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.944442Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T09:16:26.992405Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T09:16:26.992654Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T09:16:26.992672Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T09:16:27.040437Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T09:16:27.088431Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T09:16:27.088929Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****vjRA (94F1255B) () has now valid token of ldapuser@ldap 2024-11-21T09:16:27.296957Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439658972810623387:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:27.297120Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032f4/r3tmp/tmprAjEJZ/pdisk_1.dat 2024-11-21T09:16:27.308294Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2749, node 6 2024-11-21T09:16:27.316772Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:27.316786Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:27.316788Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:27.316835Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:27.397204Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:27.397238Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:27.398791Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:27.411510Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:27.413369Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:3294, port: 3294 2024-11-21T09:16:27.413405Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:27.460478Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2024-11-21T09:16:27.460506Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:3294. Bad search filter 2024-11-21T09:16:27.460731Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****1NXw (BCB79268) () has now permanent error message 'Could not login via LDAP' >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TGroupMapperTest::MapperSequentialCalls >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::CreateAndWait >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> TSchemeShardSubDomainTest::LS >> TSchemeShardSubDomainTest::SchemeQuotas >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::DeleteAndRestart >> TSchemeShardSubDomainTest::SetSchemeLimits >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] Test command err: 2024-11-21T09:16:25.555736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658963546596661:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.555791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a2e/r3tmp/tmphUTH9P/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18119, node 1 2024-11-21T09:16:25.617299Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:25.618470Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.618473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.618475Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.618509Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.658091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.658121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.659192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.685069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.688187Z node 1 :TICKET_PARSER DEBUG: Ticket FFDCEEDBB43F465DDA24CB33267F29986FBEB053 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2024-11-21T09:16:26.463214Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658970226178318:2079];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.463321Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a2e/r3tmp/tmphHAATf/pdisk_1.dat 2024-11-21T09:16:26.476703Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26721, node 2 2024-11-21T09:16:26.484173Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.484195Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.484197Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.484244Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:26.563758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.563788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.564996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.566545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:26.568982Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:26.572655Z node 2 :TICKET_PARSER DEBUG: Ticket 5CE7490426BFC00EF6DD0B6EA4F63416EFFC21FF () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2024-11-21T09:16:27.312298Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658975736027150:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:27.312315Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a2e/r3tmp/tmp2cZ95l/pdisk_1.dat 2024-11-21T09:16:27.327338Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14953, node 3 2024-11-21T09:16:27.338054Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:27.338065Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:27.338067Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:27.338108Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:27.413198Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:27.413233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:27.414334Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:27.418155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:27.419843Z node 3 :TICKET_PARSER DEBUG: Ticket AF000DF8448440BD1EE2D1C0DAD6B141B03AD11F () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2024-11-21T09:16:27.419924Z node 3 :TICKET_PARSER ERROR: Ticket AF000DF8448440BD1EE2D1C0DAD6B141B03AD11F: Cannot create token from certificate. Client certificate failed verification 2024-11-21T09:16:27.890190Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439658975281883899:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:27.890208Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a2e/r3tmp/tmp3wyga9/pdisk_1.dat 2024-11-21T09:16:27.901943Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13549, node 4 2024-11-21T09:16:27.912375Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:27.912390Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:27.912391Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:27.912432Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:27.990731Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:27.990761Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:27.991857Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:27.993068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:27.994633Z node 4 :TICKET_PARSER DEBUG: Ticket 32FC1024ED70B4CA42E67666F66286DEE7DD0275 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::LS [GOOD] >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] Test command err: 2024-11-21T09:16:25.494050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658965461985388:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.494117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a32/r3tmp/tmpJJtUhc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63116, node 1 2024-11-21T09:16:25.568745Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:25.574594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.574611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.574613Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.574657Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:16:25.595506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.595547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.596684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.636050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.639339Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:25.640016Z node 1 :TICKET_PARSER DEBUG: Ticket 3A99FB2E832BD26C509C8434E543217AB251C36C () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2024-11-21T09:16:26.355182Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658970057769280:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.355345Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a32/r3tmp/tmpABORoJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13538, node 2 2024-11-21T09:16:26.372058Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:26.376824Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.376838Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.376840Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.376877Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:26.455191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.455225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.456565Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.458105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:26.468391Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:26.469234Z node 2 :TICKET_PARSER DEBUG: Ticket 92F14F181F27485B915EB4FEDC9AB9A448D6DC11 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2024-11-21T09:16:26.469306Z node 2 :TICKET_PARSER ERROR: Ticket 92F14F181F27485B915EB4FEDC9AB9A448D6DC11: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2024-11-21T09:16:26.956227Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658970530601635:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.956271Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a32/r3tmp/tmpi9S3PV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17124, node 3 2024-11-21T09:16:26.976519Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:26.979165Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.979179Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.979180Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.979221Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:27.056221Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:27.056257Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:27.057356Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:27.060683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:27.061917Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:27.062584Z node 3 :TICKET_PARSER DEBUG: Ticket EFAEB6E139AD74DFDBBBA75E5115AD5B6F385116 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2024-11-21T09:16:27.062655Z node 3 :TICKET_PARSER ERROR: Ticket EFAEB6E139AD74DFDBBBA75E5115AD5B6F385116: Cannot create token from certificate. Client certificate failed verification 2024-11-21T09:16:27.611458Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439658973466485971:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:27.611639Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a32/r3tmp/tmpouOBsk/pdisk_1.dat 2024-11-21T09:16:27.622223Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28392, node 4 2024-11-21T09:16:27.631372Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:27.631385Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:27.631387Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:27.631436Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:27.711636Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:27.711676Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:27.712719Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:27.714133Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:27.715999Z node 4 :TICKET_PARSER DEBUG: Ticket 1939A662AD955B8CE8B2A8F210905C7AB6C8934F () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2024-11-21T09:16:27.716093Z node 4 :TICKET_PARSER ERROR: Ticket 1939A662AD955B8CE8B2A8F210905C7AB6C8934F: Cannot create token from certificate. Client certificate failed verification 2024-11-21T09:16:27.947839Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658972465123414:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:27.948012Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a32/r3tmp/tmpNGqe6B/pdisk_1.dat 2024-11-21T09:16:27.954707Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63062, node 5 2024-11-21T09:16:27.964693Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:27.964706Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:27.964708Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:27.964759Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:28.049492Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:28.049528Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:28.049847Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:28.050576Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:28.051776Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2024-11-21T09:16:28.051793Z node 5 :GRPC_CLIENT DEBUG: [6573f082450] Connect to grpc://localhost:21878 2024-11-21T09:16:28.052293Z node 5 :GRPC_CLIENT DEBUG: [6573f082450] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2024-11-21T09:16:28.054419Z node 5 :GRPC_CLIENT DEBUG: [6573f082450] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2024-11-21T09:16:28.054477Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2024-11-21T09:16:28.054509Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:28.054614Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2024-11-21T09:16:28.054661Z node 5 :GRPC_CLIENT DEBUG: [6573f082450] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2024-11-21T09:16:28.055160Z node 5 :GRPC_CLIENT DEBUG: [6573f082450] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2024-11-21T09:16:28.055192Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "" 2024-11-21T09:16:28.055207Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2024-11-21T09:16:26.521381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658969262457939:2126];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.522095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a26/r3tmp/tmp8LJ1ky/pdisk_1.dat 2024-11-21T09:16:26.576297Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6534, node 1 2024-11-21T09:16:26.589873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.589886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.589888Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.589925Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:26.616985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:26.624665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.624695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.627181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.650615Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:26.656584Z node 1 :TICKET_PARSER DEBUG: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2024-11-21T09:16:26.656615Z node 1 :TICKET_PARSER ERROR: Ticket **** (5DAB89DE): Token is not in correct format 2024-11-21T09:16:26.892959Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658971830791957:2119];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a26/r3tmp/tmpkwBQCw/pdisk_1.dat 2024-11-21T09:16:26.897858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:26.901561Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15079, node 2 2024-11-21T09:16:26.912513Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.912526Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.912529Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.912569Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:26.992114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.992147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.993226Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.994413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:26.996101Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T09:16:26.996125Z node 2 :GRPC_CLIENT DEBUG: [47663f0831d0] Connect to grpc://localhost:23261 2024-11-21T09:16:26.996877Z node 2 :GRPC_CLIENT DEBUG: [47663f0831d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T09:16:27.003033Z node 2 :GRPC_CLIENT DEBUG: [47663f0831d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2024-11-21T09:16:27.003175Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2024-11-21T09:16:27.003205Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:27.003430Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T09:16:27.003476Z node 2 :GRPC_CLIENT DEBUG: [47663f0831d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T09:16:27.004080Z node 2 :GRPC_CLIENT DEBUG: [47663f0831d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2024-11-21T09:16:27.004136Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2024-11-21T09:16:27.004147Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2024-11-21T09:16:27.273714Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658973306293041:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:27.273991Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a26/r3tmp/tmp90P9v9/pdisk_1.dat 2024-11-21T09:16:27.283885Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6194, node 3 2024-11-21T09:16:27.295608Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:27.295619Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:27.295620Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:27.295653Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:27.374067Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:27.374099Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:27.375191Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:27.375644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:27.377196Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:27.378299Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 202 ... de 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T09:16:27.386176Z node 3 :GRPC_CLIENT DEBUG: [47663f085390] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T09:16:27.386715Z node 3 :GRPC_CLIENT DEBUG: [47663f085390] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:27.386750Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T09:16:27.386799Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T09:16:27.699631Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439658973356874929:2260];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:27.699655Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a26/r3tmp/tmpe3aJ1I/pdisk_1.dat 2024-11-21T09:16:27.708129Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10176, node 4 2024-11-21T09:16:27.720308Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:27.720326Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:27.720328Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:27.720373Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:27.801072Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:27.801112Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:27.801500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:27.802033Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:27.803399Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2024-11-21T09:16:27.803415Z node 4 :GRPC_CLIENT DEBUG: [47663f083f50] Connect to grpc://localhost:61513 2024-11-21T09:16:27.803620Z node 4 :GRPC_CLIENT DEBUG: [47663f083f50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2024-11-21T09:16:27.805259Z node 4 :GRPC_CLIENT DEBUG: [47663f083f50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2024-11-21T09:16:27.805338Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2024-11-21T09:16:27.805350Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2024-11-21T09:16:27.805354Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2024-11-21T09:16:27.805358Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2024-11-21T09:16:27.805363Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T09:16:27.805407Z node 4 :GRPC_CLIENT DEBUG: [47663f0831d0] Connect to grpc://localhost:13143 2024-11-21T09:16:27.805524Z node 4 :GRPC_CLIENT DEBUG: [47663f0831d0] Request GetUserAccountRequest { user_account_id: "user1" } 2024-11-21T09:16:27.806649Z node 4 :GRPC_CLIENT DEBUG: [47663f0831d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2024-11-21T09:16:27.806742Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T09:16:28.044847Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439658976813718078:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:28.045142Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a26/r3tmp/tmplnKDUd/pdisk_1.dat 2024-11-21T09:16:28.054427Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11820, node 5 2024-11-21T09:16:28.066786Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:28.066800Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:28.066802Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:28.066846Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:28.146640Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:28.146671Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:28.147554Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:28.147640Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:28.149514Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T09:16:28.149532Z node 5 :GRPC_CLIENT DEBUG: [47663f082d50] Connect to grpc://localhost:63452 2024-11-21T09:16:28.149727Z node 5 :GRPC_CLIENT DEBUG: [47663f082d50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T09:16:28.151589Z node 5 :GRPC_CLIENT DEBUG: [47663f082d50] Status 14 Service Unavailable 2024-11-21T09:16:28.151678Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T09:16:28.151688Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T09:16:28.151694Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:28.151707Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T09:16:28.151766Z node 5 :GRPC_CLIENT DEBUG: [47663f082d50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T09:16:28.152347Z node 5 :GRPC_CLIENT DEBUG: [47663f082d50] Status 1 CANCELLED 2024-11-21T09:16:28.152408Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2024-11-21T09:16:28.152423Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2024-11-21T09:16:28.152429Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> TSchemeShardSubDomainTest::CreateDropNbs >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-strings.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.620345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.620367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.620376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.633229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.633243Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.637871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.638113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645707Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.664774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.664838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.664975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.664982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.665660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.665672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.665676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.666154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.666584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.667646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.667689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.667849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.667949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.667979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.668455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.668489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.668553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668560Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.668568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.668574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.668584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.668592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.668602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.668607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.668611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.668888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.668905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.668910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.668914Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.668918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.668932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.687449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T09:16:28.687475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T09:16:28.687530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.687548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.687554Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:28.687631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T09:16:28.687644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:28.687664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.687673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:28.687680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:16:28.688086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.688094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.688117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:28.688131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.688136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T09:16:28.688140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T09:16:28.688148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.688154Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T09:16:28.688161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T09:16:28.688165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:28.688170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T09:16:28.688174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:28.688179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T09:16:28.688182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T09:16:28.688223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T09:16:28.688232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T09:16:28.688236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:28.688239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T09:16:28.688437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:28.688450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:28.688454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:28.688458Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:28.688463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.688522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:28.688530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:28.688533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:28.688537Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:16:28.688540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:28.688550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T09:16:28.689348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T09:16:28.689429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T09:16:28.689468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T09:16:28.689480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T09:16:28.689528Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T09:16:28.689563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.689568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:452:2407] TestWaitNotification: OK eventTxId 100 2024-11-21T09:16:28.689622Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.689649Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusSuccess 2024-11-21T09:16:28.689740Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.689824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.689838Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 16us result status StatusSuccess 2024-11-21T09:16:28.689881Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.621548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.621561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.621572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.621644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.632845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.632863Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.637981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.638252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.646030Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.661873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.661924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.662033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.662040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.662587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.662598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.662602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.663012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.663365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.663390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.663945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.664348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.664673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.665504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.665601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.665637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.666112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.666151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.666226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666232Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.666242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.666245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.666256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.666263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.666273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.666279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.666283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.666555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.666577Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.666583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.666596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.698667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:16:28.698673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.698676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:28.698693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2024-11-21T09:16:28.698714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.698721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:28.698931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.699082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:28.699203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.699209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.699227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:28.699243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.699246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:28.699249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:28.699279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.699283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2024-11-21T09:16:28.699289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:28.699291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.699294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:28.699296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.699299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:28.699301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:28.699309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:28.699312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T09:16:28.699314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:16:28.699319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T09:16:28.699397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.699406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.699410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.699414Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:28.699418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.699485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.699490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.699492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.699495Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:28.699497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:28.699503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:28.699673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:28.699680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:28.699695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:28.699745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:28.699750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:28.699757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.700161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.700637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.700662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:28.700675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T09:16:28.700713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:28.700718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T09:16:28.700769Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:28.700784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.700788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2328] TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:28.700844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.700864Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 29us result status StatusPathDoesNotExist 2024-11-21T09:16:28.700907Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:28.700968Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.700984Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2024-11-21T09:16:28.701056Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.632845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.632863Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.638227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.638480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645771Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.664827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.664885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.665006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.665013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.665695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.665705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.665709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.666201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.666651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.667581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.667612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.667721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.667794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.667818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.668454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.668509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.668602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668612Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.668629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.668633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.668645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.668654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.668673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.668680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.668684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.669175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.669201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.669207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.669212Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.669218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.669235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.679373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:28.679377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T09:16:28.679429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.679435Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#101:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.679441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.679466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.679549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.679558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.679562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.679566Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T09:16:28.679570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:28.679869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.679881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.679884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.679887Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:16:28.679891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:28.679902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T09:16:28.680037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T09:16:28.680057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2024-11-21T09:16:28.680327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.680346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.680353Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:16:28.680373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T09:16:28.680393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:28.680399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:28.680644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.680718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:28.681046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.681053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:28.681073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:28.681084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.681088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:28.681093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T09:16:28.681155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.681161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:28.681170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:28.681174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.681179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:28.681184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.681188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:28.681191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:28.681201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:28.681206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T09:16:28.681210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T09:16:28.681214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T09:16:28.681284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.681292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.681296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.681303Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T09:16:28.681306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:28.681385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.681393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.681397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.681400Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:16:28.681403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:28.681410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:28.682113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.682379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T09:16:28.683023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.683091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.683101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2024-11-21T09:16:28.683118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2024-11-21T09:16:28.683567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.683597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> TSchemeShardSubDomainTest::DeclareAndDelete >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> KqpExtractPredicateLookup::SimpleRange [GOOD] >> KqpExtractPredicateLookup::PointJoin+EnableKqpDataQueryStreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.628126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.628152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.628157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.628162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.628174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.628178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.628187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.628271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.638415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.638429Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.640476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.641117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.641144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.642389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.642556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.642632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.642688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.643464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645744Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.664834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.664893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.665006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.665013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.665661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.665673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.665678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.666113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.666531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.667573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.667613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.667765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.667861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.667892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.669521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.669530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.669557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.669562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.669636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.669643Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.669653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.669657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.669662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.669667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.669672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.669675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.669684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.669689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.669692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.669993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.670010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.670015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.670020Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.670025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.670040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... pose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.761118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T09:16:28.761149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T09:16:28.761302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.761331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.761341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:28.761469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T09:16:28.761486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:28.761523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.761537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:28.761549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:28.762135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.762153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.762189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:28.762212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.762219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:332:2310], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T09:16:28.762226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:332:2310], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:16:28.762314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.762322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T09:16:28.762332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T09:16:28.762336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:28.762341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T09:16:28.762346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:28.762354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T09:16:28.762358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T09:16:28.762433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:28.762439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T09:16:28.762443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:28.762446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T09:16:28.762591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:28.762603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:28.762608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:28.762612Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:28.762616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.762690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:28.762699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:28.762702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:28.762706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:16:28.762710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:28.762718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T09:16:28.763652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T09:16:28.763695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T09:16:28.763748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T09:16:28.763764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T09:16:28.763825Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.763844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:475:2425] TestWaitNotification: OK eventTxId 100 2024-11-21T09:16:28.763906Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763935Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 38us result status StatusSuccess 2024-11-21T09:16:28.764024Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.764093Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.764109Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2024-11-21T09:16:28.764150Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.633009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.633043Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.637928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.638150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645676Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.661360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.661428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.661569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.661576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.662315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.662327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.662331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.662797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.663232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.663256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.663757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.664106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.664671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.665475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.665636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.665668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.666116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.666164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.666225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666232Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.666242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.666246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.666256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.666264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.666274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.666279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.666283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.666592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.666614Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.666619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.666629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 2024-11-21T09:16:28.691207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.691210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.691214Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:28.691217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:28.691224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:28.691665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:28.691676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:28.691680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:28.691762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.692067Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T09:16:28.693060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.693107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:28.693153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.693180Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T09:16:28.693247Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2024-11-21T09:16:28.693410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:28.693443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2024-11-21T09:16:28.693522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:28.693540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:28.693678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:28.693687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:28.693706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:28.693738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:28.693743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:28.693750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.694357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:28.694369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:16:28.694460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:16:28.694467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:16:28.694476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:28.694481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:16:28.694836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:28.694858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T09:16:28.694901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:28.694906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T09:16:28.694956Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:28.694969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.694974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:492:2447] TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:28.695029Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.695053Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 31us result status StatusPathDoesNotExist 2024-11-21T09:16:28.695087Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:28.695202Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.695222Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 20us result status StatusSuccess 2024-11-21T09:16:28.695271Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T09:16:28.695342Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:16:28.695359Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T09:16:28.695367Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T09:16:28.695422Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.695437Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2024-11-21T09:16:28.695473Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.632916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.632933Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.636176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.637112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.637147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.638807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.639038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645911Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.664305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.664379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.664556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.664565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.665421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.665436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.665442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.666043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.666634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.667721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.667761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.667917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.668000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.668019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.668028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.668448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.668496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.668561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668566Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.668574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.668577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.668584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.668590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.668597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.668601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.668604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.668848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.668859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.668862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.668865Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.668868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.668879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... for txId: 101 at step: 5000003 2024-11-21T09:16:28.679202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.679221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.679227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:28.679301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T09:16:28.679307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:28.679325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:28.679333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:28.679340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:16:28.679697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.679705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:28.679728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:28.679743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.679747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:28.679751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:28.679801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.679807Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:28.679816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:28.679820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.679825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:28.679830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.679834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:28.679838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:28.679846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:28.679851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T09:16:28.679854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T09:16:28.679858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T09:16:28.679945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.679954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.679958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.679962Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T09:16:28.679966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:28.680047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.680056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.680059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.680063Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:16:28.680069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:28.680077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:28.680751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.681059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T09:16:28.681128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T09:16:28.681147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T09:16:28.681169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:28.681172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T09:16:28.681231Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T09:16:28.681249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.681253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:327:2319] 2024-11-21T09:16:28.681303Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:28.681310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.681313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:327:2319] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:28.681372Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.681399Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 39us result status StatusSuccess 2024-11-21T09:16:28.681511Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.682311Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.682342Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir" took 38us result status StatusSuccess 2024-11-21T09:16:28.682416Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.632896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.632917Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.637775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.637971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645791Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.664962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.665049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.665180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.665189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.665951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.665963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.665967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.666463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.666929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.667753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.667784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.667932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.668017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.668023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.668040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.668049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.668554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.668587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.668654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668659Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.668668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.668671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.668681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.668689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.668720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.668725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.668729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.669002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.669015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.669031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.669035Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.669040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.669053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T09:16:28.673480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:16:28.673525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T09:16:28.673578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.673595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.673600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:28.673645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T09:16:28.673655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:28.673672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.673679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:28.673687Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:28.673792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.674115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.674123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.674142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:28.674155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.674159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:28.674163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:28.674206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.674211Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:28.674218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:28.674222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.674226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:28.674231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.674235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:28.674238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:28.674247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:28.674251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T09:16:28.674255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:28.674260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T09:16:28.674368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.674378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.674382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.674385Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:28.674389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.674482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.674491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.674494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.674497Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:16:28.674501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:28.674510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:28.675395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.675465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T09:16:28.676247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.676281Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2024-11-21T09:16:28.676286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2024-11-21T09:16:28.676322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.676329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.676870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.676894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2024-11-21T09:16:28.676949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:28.676964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T09:16:28.676984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:16:28.676987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T09:16:28.677075Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:28.677094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.677099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:304:2296] 2024-11-21T09:16:28.677113Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:28.677130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.677134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:304:2296] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.644531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.644551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.644555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.644559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.644571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.644574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.644582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.644655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.655232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.655257Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.658494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.659321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.659365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.661481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.661695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.661808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.663082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.663537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.663545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.663558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665271Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.682609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.682683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.682765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.682853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.682861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.683455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.683477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.683508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.683516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.683520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.683525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.683877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.683885Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.683890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.684190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.684197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.684202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.684225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.684820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.685203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.685243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.685399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.685420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.685430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.685494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.685501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.685522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.685532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.685911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.685917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.685942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.685946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.686010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.686016Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.686025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.686029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.686034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.686039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.686043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.686047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.686057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.686062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.686066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.686328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.686339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.686344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.686348Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.686353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.686365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:28.775872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:28.776069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.776085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.776117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.776121Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:16:28.776130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:16:28.776133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:28.776136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T09:16:28.776145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:632:2561] message: TxId: 102 2024-11-21T09:16:28.776149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:28.776152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:16:28.776154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:16:28.776168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:28.776532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.776542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:633:2562] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2024-11-21T09:16:28.777120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.777147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2024-11-21T09:16:28.777602Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777620Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 20us result status StatusSuccess 2024-11-21T09:16:28.777676Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777713Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777725Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 13us result status StatusSuccess 2024-11-21T09:16:28.777777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777831Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777838Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 8us result status StatusSuccess 2024-11-21T09:16:28.777856Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777882Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777888Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 7us result status StatusSuccess 2024-11-21T09:16:28.777908Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.632847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.632863Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.637742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.637925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645668Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.663555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.663610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.663718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.663724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.664399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.664411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.664415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.664771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.665225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.665802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.666241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.666289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.666445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.666540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.666572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.667052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.667089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.667174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667182Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.667193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.667197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.667206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.667213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.667224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.667228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.667232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.667512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.667529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.667534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.667538Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.667543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.667556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.822082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:16:28.822088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T09:16:28.822486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:28.822506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:28.823109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.823138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.823207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.823214Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T09:16:28.823225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T09:16:28.823229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:28.823235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T09:16:28.823248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:488:2439] message: TxId: 103 2024-11-21T09:16:28.823254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:28.823259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T09:16:28.823263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T09:16:28.823282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:16:28.823663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.823674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:489:2440] TestWaitNotification: OK eventTxId 103 2024-11-21T09:16:28.823764Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.823804Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 46us result status StatusSuccess 2024-11-21T09:16:28.823905Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.823989Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.824011Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 23us result status StatusSuccess 2024-11-21T09:16:28.824080Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.824132Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.824145Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 14us result status StatusSuccess 2024-11-21T09:16:28.824180Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.824239Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.824257Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 19us result status StatusSuccess 2024-11-21T09:16:28.824303Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.633015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.633047Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.636981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.637689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.637718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.638986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.639172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645663Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.663744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.663787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.663883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.663889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.664492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.664503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.664507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.664928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.665323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.665884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.666300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.666340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.666484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666512Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.666568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.666597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.667718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.667753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.667822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667829Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.667837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.667843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.667852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.667859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.667869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.667873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.667877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.668161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.668176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.668180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.668184Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.668188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.668200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T09:16:28.768336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T09:16:28.768646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.768661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:28.768665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:28.768669Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:16:28.768674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:16:28.768685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T09:16:28.768734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 197 } } 2024-11-21T09:16:28.768743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2024-11-21T09:16:28.768759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 197 } } 2024-11-21T09:16:28.768770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 197 } } 2024-11-21T09:16:28.769147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 621 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:16:28.769159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2024-11-21T09:16:28.769173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 621 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:16:28.769178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:16:28.769186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 621 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:16:28.769193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.769197Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.769202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2024-11-21T09:16:28.769206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T09:16:28.769830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.769851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:28.770044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.770106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.770155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.770161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:28.770172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:28.770176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.770182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T09:16:28.770193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:272:2264] message: TxId: 101 2024-11-21T09:16:28.770198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:28.770203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:28.770206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:28.770226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:28.770611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.770623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:273:2265] TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:28.770726Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.770759Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 39us result status StatusSuccess 2024-11-21T09:16:28.770848Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.770960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.770986Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 29us result status StatusSuccess 2024-11-21T09:16:28.771063Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.632924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.632942Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.638694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.638915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.642000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645786Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.664566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.664628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.664771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.664779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.665427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.665438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.665443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.665902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.666387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666400Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.666411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.667538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.667592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.667780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.667909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.667951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.668498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.668537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.668617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668625Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.668635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.668639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.668649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.668653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.668657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.668668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.668673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.668677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.668996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.669014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.669036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.669041Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.669045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.669062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... NFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2024-11-21T09:16:28.756554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:28.756568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 Leader for TabletID 72057594046678944 is [1:543:2478] sender: [1:601:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.756695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:28.756716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.756719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:599:2522] TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:28.756790Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.756818Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 45us result status StatusPathDoesNotExist 2024-11-21T09:16:28.756852Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:28.757033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:543:2478] sender: [1:605:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:543:2478] sender: [1:608:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:543:2478] sender: [1:609:2058] recipient: [1:607:2527] Leader for TabletID 72057594046678944 is [1:610:2528] sender: [1:611:2058] recipient: [1:607:2527] 2024-11-21T09:16:28.761665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.761690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.761696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.761702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.761707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.761711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.761720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.761769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.762679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.762892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.762923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.762968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.762973Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.763027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.763080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.763397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.764512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.764526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.764535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.764542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.764548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.764965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:610:2528] sender: [1:668:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.795695Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.795740Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 74us result status StatusPathDoesNotExist 2024-11-21T09:16:28.795774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:28.795849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.795881Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 32us result status StatusSuccess 2024-11-21T09:16:28.795941Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.633121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.633139Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.637920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.638106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645800Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.659447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.660331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.660435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.660539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.660547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.661386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.661399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.661403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.661753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.662138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.662895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.663490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.663876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.664650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.665503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.665629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.665661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.666136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.666161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.666225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666232Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.666242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.666246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.666256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.666264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.666274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.666279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.666283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.666571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.666594Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.666598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.666610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6:28.780489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409546 2024-11-21T09:16:28.780576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:28.780595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:28.780680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T09:16:28.780703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:28.780767Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2024-11-21T09:16:28.780953Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2024-11-21T09:16:28.781235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:28.781271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2024-11-21T09:16:28.781684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409549 2024-11-21T09:16:28.781797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:16:28.781835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:28.781898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:28.782036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:28.782045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:28.782066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:28.782142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T09:16:28.782150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T09:16:28.782165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T09:16:28.782168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2024-11-21T09:16:28.782864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:28.782934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:28.782941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:28.782954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.782992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:28.782997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:16:28.783006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:16:28.783009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:16:28.783233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T09:16:28.783242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T09:16:28.783254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:28.783258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:16:28.783320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:16:28.783326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:16:28.783369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:28.783706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2024-11-21T09:16:28.783758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:28.783767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T09:16:28.783781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:16:28.783784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T09:16:28.783844Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:28.783867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.783872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:720:2605] 2024-11-21T09:16:28.783887Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:28.783900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:28.783903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:720:2605] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2024-11-21T09:16:28.783966Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.783994Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusPathDoesNotExist 2024-11-21T09:16:28.784032Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:28.784075Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.784088Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 14us result status StatusPathDoesNotExist 2024-11-21T09:16:28.784104Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:28.784142Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:28.784158Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 18us result status StatusSuccess 2024-11-21T09:16:28.784237Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.909682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.909712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.909718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.909725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.909741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.909745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.909755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.909838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.921550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.921572Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.924329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.925096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.925128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.926604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.926821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.926911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.926977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.928127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.928412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.928426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.928465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.928474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.928480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.928493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.929906Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.948623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.948710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.948777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.948851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.948861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.949626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.949652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.949697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.949706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.949711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.949716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.950123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.950624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.950653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.951189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.951596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.951641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.951810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.951831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.951839Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.951880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.951885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.951906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.951915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.952337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.952342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.952375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.952378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.952444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.952450Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.952461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.952465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.952471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.952476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.952481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.952485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.952496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.952501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.952505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.952731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.952744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.952749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.952754Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.952758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.952773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:29.027372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:29.027375Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:16:29.027377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:29.027474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:29.027484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:29.027488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:29.027492Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:29.027496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:29.027504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T09:16:29.027796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:29.027807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:29.027813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:29.027817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:29.028019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:29.028092Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:16:29.028283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.028321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:29.028368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409546 2024-11-21T09:16:29.028492Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T09:16:29.028516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:29.028537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:29.028558Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:16:29.028576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:29.028592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2024-11-21T09:16:29.028966Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T09:16:29.029044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:16:29.029078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2024-11-21T09:16:29.029168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:29.029173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:16:29.029182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:29.029253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:29.029257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:29.029272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:29.029306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:29.029658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:29.029669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:16:29.029685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:16:29.029690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:16:29.029702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:29.029706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:16:29.030134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:16:29.030145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:16:29.030180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:29.030189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:29.030196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:29.030200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:29.030209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.030457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T09:16:29.030499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:16:29.030504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T09:16:29.030560Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:29.030574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:29.030579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:536:2491] TestWaitNotification: OK eventTxId 102 2024-11-21T09:16:29.031908Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.031951Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 60us result status StatusPathDoesNotExist 2024-11-21T09:16:29.031997Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:29.032077Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.032090Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 15us result status StatusPathDoesNotExist 2024-11-21T09:16:29.032105Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.910119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.910141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.910147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.910152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.910162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.910166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.910174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.910249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.921571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.921588Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.924704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.925598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.925642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.927439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.927680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.927767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.927842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.929039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.929296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.929309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.929349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.929358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.929365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.929378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.930703Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.946953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.947035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.947102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.947179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.947187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.947839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.947870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.947908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.947918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.947923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.947928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.948432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.948448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.948453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.948876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.948886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.948892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.948899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.949518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.949967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.950020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.950158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.950227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.950232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.950254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.950263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.950772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.950832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.950936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.950945Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.950958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.950963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.950970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.950976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.950981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.950985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.950999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.951005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.951010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.951331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.951350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.951355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.951360Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.951365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.951383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... UG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T09:16:29.043402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:29.043959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.043971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:29.044025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:29.044042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.044046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:29.044050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T09:16:29.044121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.044127Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:29.044137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:29.044140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:29.044145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:29.044148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:29.044152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:29.044155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:29.044187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T09:16:29.044192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2024-11-21T09:16:29.044195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T09:16:29.044197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T09:16:29.044337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.044347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.044352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:29.044355Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T09:16:29.044361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:29.044699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.044713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.044717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:29.044721Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:16:29.044725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T09:16:29.044737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2024-11-21T09:16:29.044742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:908:2742] 2024-11-21T09:16:29.045333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:29.045402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:29.045414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:29.045418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:909:2743] TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:29.045528Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.045559Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 40us result status StatusSuccess 2024-11-21T09:16:29.045639Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.045712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.045728Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 18us result status StatusSuccess 2024-11-21T09:16:29.045764Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.045803Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.045812Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 12us result status StatusSuccess 2024-11-21T09:16:29.045843Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:29.139255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:29.139286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.139292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:29.139298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:29.139315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:29.139319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:29.139330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.139404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:29.149737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:29.149756Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:29.152375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:29.152456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:29.152486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:29.154617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:29.154863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:29.154961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.155028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:29.156069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.156445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.156457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.156502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:29.156510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.156516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:29.156528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.157696Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:29.171706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:29.171811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.171900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:29.171992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:29.171998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.172920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.172957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:29.173013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.173037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:29.173043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:29.173048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:29.173594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.173609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:29.173614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:29.173973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.173984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.173990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.173997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.174667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:29.175100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:29.175163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:29.175383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.175411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.175423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.175492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:29.175501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.175535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.175549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:29.175990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.175999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.176066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.176072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:29.176175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.176183Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:29.176196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:29.176201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.176225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:29.176233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.176239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:29.176244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:29.176258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:29.176265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:29.176271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:29.176668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.176685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.176691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:29.176698Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:29.176703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.176721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.186692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.186696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:29.186700Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:16:29.186705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:29.186718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T09:16:29.186909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T09:16:29.186937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2024-11-21T09:16:29.187222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.187243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.187251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId#101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:16:29.187261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.187264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:29.187288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2024-11-21T09:16:29.187308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.187315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:29.187597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:29.187684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:29.187954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.187961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.187982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:29.188003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.188007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:29.188011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:29.188063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.188072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2024-11-21T09:16:29.188079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:29.188082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:29.188088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:29.188092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:29.188097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:29.188103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:29.188114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:29.188119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T09:16:29.188122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:16:29.188126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T09:16:29.188201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.188232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.188237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:29.188240Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:29.188244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:29.188327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.188334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:29.188337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:29.188341Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:29.188344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:29.188353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:29.188384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:29.188389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:29.188402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:29.188448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:29.188453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:29.188461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.189185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:29.189485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:29.189504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:29.189515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T09:16:29.189562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:29.189568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T09:16:29.189632Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:29.189648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:29.189652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2329] TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:29.189724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.189748Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusPathDoesNotExist 2024-11-21T09:16:29.189788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.629160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.629180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.629185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.629190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.629199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.629202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.629210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.629273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.639525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.639540Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.642040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.642708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.642736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.643906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.644107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.644180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.645346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.645625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.645630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.645641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.646802Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.662431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.662495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.662617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.662624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.663317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.663329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.663333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.663797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.664251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.664268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.664281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.664802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.665148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.665176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.665512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.665608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.665641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.666216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.666256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.666349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666357Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.666367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.666371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.666381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.666388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.666399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.666404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.666407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.666711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.666739Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.666744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.666761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:29.244747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2024-11-21T09:16:29.244779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 108 at step: 5000004 2024-11-21T09:16:29.244886Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.244909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.244917Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId#108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2024-11-21T09:16:29.244979Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 129 2024-11-21T09:16:29.245042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:29.245060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T09:16:29.245635Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.245644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.245692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:16:29.245723Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.245732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:333:2311], at schemeshard: 72057594046678944, txId: 108, path id: 1 2024-11-21T09:16:29.245738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:333:2311], at schemeshard: 72057594046678944, txId: 108, path id: 5 2024-11-21T09:16:29.245753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.245759Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TProposedWaitParts operationId#108:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:16:29.245766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterOlapStore TProposedWaitParts operationId#108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2024-11-21T09:16:29.246041Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T09:16:29.246058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T09:16:29.246063Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T09:16:29.246068Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T09:16:29.246074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:29.246386Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T09:16:29.246401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T09:16:29.246405Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T09:16:29.246410Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T09:16:29.246414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:16:29.246427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2024-11-21T09:16:29.246785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2024-11-21T09:16:29.246922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T09:16:29.247220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T09:16:29.258247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2024-11-21T09:16:29.258274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2024-11-21T09:16:29.258303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2024-11-21T09:16:29.258315Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2024-11-21T09:16:29.258407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2024-11-21T09:16:29.258413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2024-11-21T09:16:29.258423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2024-11-21T09:16:29.258433Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 108 2024-11-21T09:16:29.259161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.259456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.259492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.259501Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2024-11-21T09:16:29.259516Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2024-11-21T09:16:29.259523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T09:16:29.259530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2024-11-21T09:16:29.259545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:485:2436] message: TxId: 108 2024-11-21T09:16:29.259555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T09:16:29.259560Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2024-11-21T09:16:29.259565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2024-11-21T09:16:29.259600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:16:29.260057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T09:16:29.260071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:855:2782] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2024-11-21T09:16:29.260883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:29.260939Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.261044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:29.261575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.261610Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2024-11-21T09:16:29.261689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2024-11-21T09:16:29.261696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2024-11-21T09:16:29.261787Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2024-11-21T09:16:29.261808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2024-11-21T09:16:29.261812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:893:2820] TestWaitNotification: OK eventTxId 109 >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::SimultaneousDefine ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.744710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.744733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.744737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.744741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.744753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.744756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.744763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.744823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.753310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.753330Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.755611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.756229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.756264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.757721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.757929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.758039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.758105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.759091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.759397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.759408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.759436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.759441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.759445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.759456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.760357Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.774857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.774931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.774994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.775068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.775076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.775663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.775683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.775711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.775718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.775720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.775724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.776015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.776024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.776026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.776286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.776292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.776295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.776299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.776695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.776948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.776980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.777116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.777187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.777191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.777208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.777215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.777544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.777569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.777622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.777626Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.777632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.777635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.777638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.777641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.777643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.777645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.777652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.777655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.777657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.777839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.777847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.777850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.777852Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.777855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.777864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 0 2024-11-21T09:16:29.419631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Source { RawX1: 429 RawX2: 8589936986 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T09:16:29.419639Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:16:29.419648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 429 RawX2: 8589936986 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T09:16:29.419663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.419668Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.419673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2024-11-21T09:16:29.419678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T09:16:29.419683Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 129 -> 240 2024-11-21T09:16:29.420146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.420190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.420198Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2024-11-21T09:16:29.420222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:16:29.420228Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2024-11-21T09:16:29.420238Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T09:16:29.420243Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 240 -> 240 2024-11-21T09:16:29.420824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.420840Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2024-11-21T09:16:29.420856Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2024-11-21T09:16:29.420861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T09:16:29.420867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2024-11-21T09:16:29.420882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:652:2566] message: TxId: 106 2024-11-21T09:16:29.420889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T09:16:29.420895Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T09:16:29.420899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T09:16:29.420937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:16:29.420942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:29.421433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T09:16:29.421448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:809:2712] TestWaitNotification: OK eventTxId 106 2024-11-21T09:16:29.421603Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.421657Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 60us result status StatusSuccess 2024-11-21T09:16:29.421764Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.421856Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.421879Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 24us result status StatusSuccess 2024-11-21T09:16:29.421933Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.421997Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.422016Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 21us result status StatusSuccess 2024-11-21T09:16:29.422077Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:29.543778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:29.543811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.543817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:29.543824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:29.543841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:29.543846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:29.543858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.543961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:29.554549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:29.554574Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:29.556999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:29.557597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:29.557627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:29.558837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:29.558982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:29.559054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.559123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:29.559962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.560272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.560283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.560327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:29.560335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.560341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:29.560357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.561589Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:29.579462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:29.579561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.579646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:29.579727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:29.579736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.580639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.580667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:29.580718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.580729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:29.580735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:29.580740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:29.581126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.581137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:29.581141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:29.581460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.581469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.581475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.581482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.582131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:29.582495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:29.582547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:29.582751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.582774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.582784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.582841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:29.582848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.582878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.582891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:29.583328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.583336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.583381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.583386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:29.583476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.583483Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:29.583495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:29.583500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.583506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:29.583512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.583517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:29.583521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:29.583532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:29.583538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:29.583542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:29.583873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.583887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.583892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:29.583898Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:29.583903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.583917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46678944 2024-11-21T09:16:29.664530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:29.664534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:29.664539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:29.664791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:29.665048Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T09:16:29.665124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T09:16:29.665180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T09:16:29.665264Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:16:29.665397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.665425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T09:16:29.665492Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T09:16:29.665536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:29.665557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:29.665662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409546 2024-11-21T09:16:29.665823Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2024-11-21T09:16:29.665975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T09:16:29.666002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2024-11-21T09:16:29.666095Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:16:29.666221Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T09:16:29.666251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:29.666274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2024-11-21T09:16:29.666636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:16:29.666667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2024-11-21T09:16:29.667095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:29.667105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:29.667129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:29.667180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:29.667186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:29.667196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.667360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T09:16:29.667369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T09:16:29.667389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:29.667394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:16:29.667988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:16:29.667997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:16:29.668012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T09:16:29.668016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T09:16:29.668025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:29.668029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:16:29.668053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:16:29.668058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:16:29.668413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:29.668434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2024-11-21T09:16:29.668489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:16:29.668496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T09:16:29.668512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T09:16:29.668515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T09:16:29.668584Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:29.668607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:29.668612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:655:2559] 2024-11-21T09:16:29.668631Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T09:16:29.668643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:16:29.668646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:655:2559] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T09:16:29.668721Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.668760Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 55us result status StatusPathDoesNotExist 2024-11-21T09:16:29.668804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:29.668851Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:29.668872Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2024-11-21T09:16:29.668938Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.632832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.632854Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.638300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.638508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645729Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.662238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.662293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.662393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.662398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.663051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.663063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.663068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.663430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.663761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.663774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.663787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.664333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.664759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.664801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.665497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.665622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.665653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.666084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.666127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.666224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666231Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.666241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.666245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.666256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.666263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.666274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.666279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.666283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.666560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.666591Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.666595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.666608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 37 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:29.727348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.727363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, schema: Name: "Table11" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key", at schemeshard: 72057594046678944 2024-11-21T09:16:29.727436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_0, child name: Table11, child id: [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2024-11-21T09:16:29.727451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2024-11-21T09:16:29.727458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2024-11-21T09:16:29.727475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2024-11-21T09:16:29.727480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 1 -> 2 2024-11-21T09:16:29.727603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 137:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:29.727610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.727626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2024-11-21T09:16:29.727637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2024-11-21T09:16:29.728179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2024-11-21T09:16:29.728220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2024-11-21T09:16:29.728266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.728272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:29.728314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2024-11-21T09:16:29.728328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.728333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1015:2885], at schemeshard: 72057594046678944, txId: 137, path id: 2 2024-11-21T09:16:29.728339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1015:2885], at schemeshard: 72057594046678944, txId: 137, path id: 10 2024-11-21T09:16:29.728426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.728435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet72057594046678944 2024-11-21T09:16:29.728472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2024-11-21T09:16:29.728619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2024-11-21T09:16:29.728630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2024-11-21T09:16:29.728634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2024-11-21T09:16:29.728640Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2024-11-21T09:16:29.728645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2024-11-21T09:16:29.728747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2024-11-21T09:16:29.728756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2024-11-21T09:16:29.728760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2024-11-21T09:16:29.728764Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2024-11-21T09:16:29.728768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2024-11-21T09:16:29.728776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2024-11-21T09:16:29.729634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2024-11-21T09:16:29.729669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2024-11-21T09:16:29.729676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2024-11-21T09:16:29.729845Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2024-11-21T09:16:29.729904Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2024-11-21T09:16:29.729937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2024-11-21T09:16:29.729944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2024-11-21T09:16:29.729959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2024-11-21T09:16:29.729968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T09:16:29.729974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2024-11-21T09:16:29.729994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 2 -> 3 2024-11-21T09:16:29.730395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2024-11-21T09:16:29.730556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2024-11-21T09:16:29.731144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.731204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.731212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#137:0 ProgressState at tabletId# 72057594046678944 2024-11-21T09:16:29.731223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId#137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2024-11-21T09:16:29.731301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId#137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 965 RawX2: 4294970141 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2024-11-21T09:16:29.732082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2024-11-21T09:16:29.732123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> TSchemeShardSubDomainTest::RmDir >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets >> TSchemeShardSubDomainTest::CreateDropSolomon >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:29.967778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:29.967797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.967800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:29.967804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:29.967814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:29.967817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:29.967823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.967877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:29.975630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:29.975652Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:29.978030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:29.978529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:29.978553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:29.979739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:29.979888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:29.979950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.980007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:29.980974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.981243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.981254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.981293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:29.981299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.981305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:29.981320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.982456Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:29.995086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:29.995159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.995218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:29.995294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:29.995301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.996052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.996085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:29.996145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.996157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:29.996162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:29.996170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:29.996745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.996759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:29.996764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:29.997251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.997264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.997270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.997277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.997829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:29.998289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:29.998337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:29.998472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.998495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.998502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.998542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:29.998548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.998568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.998576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:29.998966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.998971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.999000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.999003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:29.999063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.999068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:29.999076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:29.999079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.999083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:29.999086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.999089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:29.999092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:29.999099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:29.999103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:29.999106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:29.999324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.999334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.999337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:29.999340Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:29.999343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.999353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46 2024-11-21T09:16:30.011613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 0, tablet: 72075186233409547 2024-11-21T09:16:30.011615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 0, tablet: 72075186233409548 2024-11-21T09:16:30.019744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:16:30.019793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409546 2024-11-21T09:16:30.019800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T09:16:30.019807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409546 shardIdx# 72057594046678944:1 at schemeshard# 72057594046678944 2024-11-21T09:16:30.020413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.020828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T09:16:30.020852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409547 2024-11-21T09:16:30.020857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T09:16:30.020862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2024-11-21T09:16:30.020940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2024-11-21T09:16:30.020947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2024-11-21T09:16:30.020949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T09:16:30.020952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2024-11-21T09:16:30.020956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T09:16:30.021480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.021503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.021516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.021520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.021524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:30.021529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T09:16:30.021553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.021857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T09:16:30.021879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2024-11-21T09:16:30.021943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.021957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.021961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:30.022003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T09:16:30.022009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:30.022036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:30.022045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:30.022391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.022396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:30.022428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.022431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:30.022506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.022513Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:30.022524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:30.022528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:30.022534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:30.022539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:30.022544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:30.022548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:30.022579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T09:16:30.022584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2024-11-21T09:16:30.022586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2024-11-21T09:16:30.022660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:30.022668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:30.022671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:30.022674Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T09:16:30.022677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:30.022684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2024-11-21T09:16:30.022687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:305:2297] 2024-11-21T09:16:30.023163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:30.023177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.023181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:312:2304] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:30.023258Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.023282Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 30us result status StatusSuccess 2024-11-21T09:16:30.023345Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::RmDir [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.434485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.434516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.434522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.434528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.434544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.434548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.434559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.434663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.442802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.442820Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.445051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.445750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.445784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.446906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.447072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.447161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.447237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.448194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.448533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.448543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.448585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.448594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.448599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.448617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.449695Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.463306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.463381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.463441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.463504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.463512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.464143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.464164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.464199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.464234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.464239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.464244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.464667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.464678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.464683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.465010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.465016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.465031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.465036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.465541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.465906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.465954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.466096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.466116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.466125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.466174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.466179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.466205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.466216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.466624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.466631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.466658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.466662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.466740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.466746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.466759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.466763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.466769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.466774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.466779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.466783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.466794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.466799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.466804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.467081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.467093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.467098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.467103Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.467107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.467120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:30.467689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:30.467777Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2024-11-21T09:16:30.467912Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:30.469162Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:16:30.469718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.469768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.469783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.469947Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:30.470558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.470588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2024-11-21T09:16:30.470692Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T09:16:30.470734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T09:16:30.470752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T09:16:30.470809Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T09:16:30.470824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.470828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:281:2273] TestWaitNotification: OK eventTxId 100 2024-11-21T09:16:30.470883Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.470903Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 26us result status StatusPathDoesNotExist 2024-11-21T09:16:30.470949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.307072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.307097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.307103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.307112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.307128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.307132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.307143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.307217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.319302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.319327Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.322299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.323141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.323176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.324786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.325013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.325124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.325208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.326167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.326454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.326464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.326501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.326509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.326515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.326530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.327859Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.342012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.342086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.342147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.342216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.342222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.342811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.342831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.342861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.342867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.342870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.342874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.343210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.343218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.343221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.343641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.343658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.343664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.343684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.344351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.344821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.344877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.345076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.345101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.345110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.345168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.345176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.345211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.345224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.345727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.345735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.345794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.345801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.345900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.345909Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.345923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.345927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.345933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.345939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.345944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.345948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.345960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.345966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.345970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.346303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.346316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.346322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.346326Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.346331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.346341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... de 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:30.415529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T09:16:30.415561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.415875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T09:16:30.415902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T09:16:30.415970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.415991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.415997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:30.416100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T09:16:30.416108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:30.416137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.416148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T09:16:30.416156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:16:30.416585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.416594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.416631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:30.416648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.416653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T09:16:30.416657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T09:16:30.416727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.416733Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T09:16:30.416742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T09:16:30.416745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:30.416749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T09:16:30.416752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:30.416756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T09:16:30.416758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T09:16:30.416787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T09:16:30.416791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2024-11-21T09:16:30.416793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:30.416795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T09:16:30.416874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.416880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.416883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:30.416888Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:30.416890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.416981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.416988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.416990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:30.416992Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:16:30.416994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T09:16:30.416999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2024-11-21T09:16:30.417002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:563:2474] 2024-11-21T09:16:30.417441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T09:16:30.417627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T09:16:30.417642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.417646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:564:2475] TestWaitNotification: OK eventTxId 100 2024-11-21T09:16:30.417735Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.417767Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusSuccess 2024-11-21T09:16:30.417856Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:16:30.418433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.418459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.418473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, at schemeshard: 72057594046678944 2024-11-21T09:16:30.418841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.418860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.455571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.455600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.455606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.455611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.455625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.455629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.455640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.455714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.466937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.466958Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.469635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.470422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.470454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.471912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.472090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.472180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.472270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.473305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.473574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.473586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.473622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.473630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.473636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.473650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.474895Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.493486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.493567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.493626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.493700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.493708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.494328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.494349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.494381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.494390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.494394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.494399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.494748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.494756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.494761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.495076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.495083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.495088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.495094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.495755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.496146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.496186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.496378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.496400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.496410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.496458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.496465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.496487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.496499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.496891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.496898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.496923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.496928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.496992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.496998Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.497008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.497012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.497031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.497036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.497041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.497045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.497055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.497060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.497065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.497351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.497363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.497368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.497373Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.497377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.497389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1T09:16:30.526124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:30.526237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:16:30.526614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.526641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:30.526661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T09:16:30.526670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T09:16:30.526680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526686Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2024-11-21T09:16:30.526693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:16:30.526697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:30.526702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T09:16:30.526706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:30.526710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:16:30.526713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:16:30.526742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T09:16:30.526747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T09:16:30.526750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:16:30.526754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T09:16:30.526907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.526918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.526922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:30.526927Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:30.526931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.527211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.527227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.527231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:30.527235Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:30.527240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:30.527252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T09:16:30.527615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.527626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.527630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.527951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:30.528019Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:16:30.528072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.528145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2024-11-21T09:16:30.528446Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T09:16:30.528488Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:16:30.528519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:30.528554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2024-11-21T09:16:30.528717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:30.528740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:30.528807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:30.528812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:30.528829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2024-11-21T09:16:30.528916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:30.528920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:30.528929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.529014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:30.529365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:30.529377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:16:30.529815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:16:30.529826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:16:30.529840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:30.529845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:16:30.529877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:30.529887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T09:16:30.529935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:16:30.529940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T09:16:30.529995Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:30.530011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.530015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:520:2475] TestWaitNotification: OK eventTxId 102 2024-11-21T09:16:30.530079Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.530108Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 38us result status StatusPathDoesNotExist 2024-11-21T09:16:30.530154Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.509842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.509862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.509866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.509869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.509881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.509884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.509891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.509947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.518909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.518927Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.521391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.522183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.522214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.523647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.523876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.523961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.524028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.525307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.525606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.525621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.525655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.525663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.525669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.525681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526960Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.542159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.542229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.542362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.542370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.542973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.542984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.542987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.543280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.543286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.543289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.543506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.543511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.543514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.543518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.543900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.544181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.544244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.544374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.544391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.544399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.544439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.544444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.544466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.544478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.544801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.544806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.544835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.544838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.544898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.544902Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.544910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.544913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.544917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.544920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.544922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.544925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.544932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.544936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.544939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.545138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.545146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.545149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.545152Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.545155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.545165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xecute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.625705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:30.625710Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2024-11-21T09:16:30.625716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T09:16:30.625814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.625824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.625827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:30.625831Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:16:30.625835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:16:30.625843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T09:16:30.626025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 310 } } 2024-11-21T09:16:30.626036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2024-11-21T09:16:30.626052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 310 } } 2024-11-21T09:16:30.626064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 310 } } 2024-11-21T09:16:30.626389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 497 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:16:30.626398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2024-11-21T09:16:30.626412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 497 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:16:30.626417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:16:30.626424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 497 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:16:30.626434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.626438Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.626443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:16:30.626448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T09:16:30.627133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:30.627164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:30.627177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.627194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.627237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.627247Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:16:30.627258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:16:30.627261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:30.627266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T09:16:30.627278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:305:2297] message: TxId: 102 2024-11-21T09:16:30.627284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:30.627289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:16:30.627293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:16:30.627314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:30.627716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.627728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:533:2475] TestWaitNotification: OK eventTxId 102 2024-11-21T09:16:30.627824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.627861Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 44us result status StatusSuccess 2024-11-21T09:16:30.627955Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.628065Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.628091Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 28us result status StatusSuccess 2024-11-21T09:16:30.628180Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.489893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.489921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.489927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.489932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.489944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.489947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.489955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.490018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.498987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.499002Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.501343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.502067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.502103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.503472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.503652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.503734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.503796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.504689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.504909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.504916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.504940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.504945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.504950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.504962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.506173Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.518857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.518920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.518981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.519061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.519066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.519691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.519736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.519775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.519783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.519786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.519789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.520243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.520254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.520259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.520588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.520597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.520602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.520608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.521240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.521622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.521661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.521821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.521843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.521851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.521892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.521897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.521915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.521924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.522259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.522265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.522286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.522289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.522342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.522347Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.522353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.522355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.522359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.522362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.522364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.522366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.522373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.522376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.522378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.522609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.522620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.522623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.522625Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.522629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.522641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:30.523145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:30.523206Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2024-11-21T09:16:30.523365Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:30.524388Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:16:30.525038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.525078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.525090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.525219Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:30.525928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.525956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2024-11-21T09:16:30.526077Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T09:16:30.526122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T09:16:30.526139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T09:16:30.526200Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.526222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:281:2273] TestWaitNotification: OK eventTxId 100 2024-11-21T09:16:30.526282Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526304Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 31us result status StatusPathDoesNotExist 2024-11-21T09:16:30.526351Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.512621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.512639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.512643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.512646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.512656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.512659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.512665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.512709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.520743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.520757Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.522616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.523175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.523194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.524847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.525062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.525148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.525223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.526129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.526415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.526422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.526428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.526440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.527540Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.541133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.541222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.541297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.541390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.541399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.541975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.542048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.542063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.542069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.542480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.542880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.542893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.542898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.543350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.543654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.543705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.543868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.543891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.543901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.543950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.543956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.543982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.543993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.544432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.544440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.544481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.544487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.544576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.544583Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.544595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.544599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.544605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.544611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.544615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.544619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.544630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.544635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.544640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.544847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.544859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.544864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.544868Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.544873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.544886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T09:16:30.550013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.550267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T09:16:30.550290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T09:16:30.550345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.550361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.550367Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:30.550430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T09:16:30.550437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:30.550454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.550461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:30.550468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:16:30.550858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.550864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.550887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:30.550900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.550904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T09:16:30.550908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T09:16:30.550961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.550967Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T09:16:30.550988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T09:16:30.550996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:30.551001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T09:16:30.551006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:30.551010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T09:16:30.551014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T09:16:30.551023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:30.551028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T09:16:30.551031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:30.551034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T09:16:30.551115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.551124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.551128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:30.551132Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:30.551135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.551207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.551215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.551218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:30.551221Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:16:30.551224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:30.551232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T09:16:30.551938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T09:16:30.551981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T09:16:30.552028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T09:16:30.552048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T09:16:30.552116Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T09:16:30.552136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.552141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:306:2298] TestWaitNotification: OK eventTxId 100 2024-11-21T09:16:30.552224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.552252Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 55us result status StatusSuccess 2024-11-21T09:16:30.552349Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.552421Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.552433Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 14us result status StatusSuccess 2024-11-21T09:16:30.552472Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.324417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.324441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.324447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.324453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.324469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.324473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.324483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.324558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.334618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.334638Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.337303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.338073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.338109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.339436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.339675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.339763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.339835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.341218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.341553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.341567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.341611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.341620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.341627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.341644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.343150Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.359758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.359839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.359915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.359988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.359996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.360647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.360675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.360719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.360729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.360733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.360738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.361110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.361121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.361126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.361443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.361453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.361459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.361465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.361998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.362320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.362362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.362510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.362527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.362535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.362575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.362579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.362601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.362610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.362928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.362933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.362970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.362976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.363053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.363059Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.363068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.363070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.363074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.363078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.363081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.363084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.363092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.363096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.363099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.363302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.363311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.363315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.363318Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.363321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.363331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 04, tablet: 72075186233409552, partId: 0 2024-11-21T09:16:30.490148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409552 Status: OK 2024-11-21T09:16:30.490156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2024-11-21T09:16:30.490168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409552 Status: OK at tablet72057594046678944 2024-11-21T09:16:30.490392Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.492671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2024-11-21T09:16:30.492708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409550 Status: OK 2024-11-21T09:16:30.492717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2024-11-21T09:16:30.492725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409550 Status: OK at tablet72057594046678944 2024-11-21T09:16:30.493111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.493130Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 51us result status StatusSuccess 2024-11-21T09:16:30.493214Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/PQGroup_2" PathDescription { Self { Name: "PQGroup_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: false CreateTxId: 104 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 1 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 1 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 } BalancerTabletID: 72075186233409552 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 5 ShardsInside: 7 ShardsLimit: 7 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 50 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.494281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.495748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2024-11-21T09:16:30.495775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409551 Status: OK 2024-11-21T09:16:30.495783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2024-11-21T09:16:30.495791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409551 Status: OK at tablet72057594046678944 2024-11-21T09:16:30.495799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2024-11-21T09:16:30.498153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.498202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.498210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.498220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2024-11-21T09:16:30.498251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.499695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T09:16:30.499730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000003 2024-11-21T09:16:30.499834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.499856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.499865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2024-11-21T09:16:30.499920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T09:16:30.499957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.499969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T09:16:30.502329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.502339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.502379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:30.502422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.502427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:332:2310], at schemeshard: 72057594046678944, txId: 104, path id: 1 2024-11-21T09:16:30.502433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:332:2310], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T09:16:30.502550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.502559Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T09:16:30.502572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T09:16:30.502577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:30.502586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T09:16:30.502592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:30.502598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T09:16:30.502603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T09:16:30.502636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T09:16:30.502642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2024-11-21T09:16:30.502647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:16:30.502651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:16:30.502815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:16:30.502827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:16:30.502832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T09:16:30.502837Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:30.502842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:30.503042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:16:30.503053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:16:30.503057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T09:16:30.503062Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:16:30.503066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:16:30.503076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T09:16:30.504002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T09:16:30.504527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.553603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.553627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.553632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.553638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.553654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.553658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.553668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.553736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.564804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.564824Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.567122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.567672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.567695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.568981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.569167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.569231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.569303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.570115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.570377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.570385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.570415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.570421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.570425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.570435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.571449Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.583780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.583859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.583922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.584003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.584013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.584610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.584637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.584680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.584690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.584695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.584701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.585145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.585155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.585159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.585490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.585498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.585503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.585507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.585933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.586338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.586398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.586602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.586629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.586638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.586685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.586690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.586714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.586724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.587232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.587244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.587293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.587310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.587406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.587426Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.587439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.587444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.587450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.587456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.587461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.587465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.587479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.587485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.587489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.587804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.587818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.587837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.587842Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.587846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.587863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... emeshard# 72057594046678944 2024-11-21T09:16:30.654934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409551, partId: 0 2024-11-21T09:16:30.654945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2024-11-21T09:16:30.654950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T09:16:30.654954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2024-11-21T09:16:30.654960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T09:16:30.655639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.655982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.656010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.656034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.656041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.656046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:30.656053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T09:16:30.656086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.656484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T09:16:30.656518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T09:16:30.656594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.656618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.656625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:30.656725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T09:16:30.656734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T09:16:30.656766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.656776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T09:16:30.656785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:30.657322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.657332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.657380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:30.657396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.657401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:30.657406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:30.657480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.657488Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:30.657501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:30.657506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:30.657512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:30.657517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:30.657523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:30.657527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:30.657564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T09:16:30.657574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2024-11-21T09:16:30.657578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:30.657581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T09:16:30.657716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:30.657728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:30.657733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:30.657738Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:30.657742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.657839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:30.657849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:30.657853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:30.657858Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:16:30.657861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T09:16:30.657871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2024-11-21T09:16:30.657876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:559:2469] 2024-11-21T09:16:30.658602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:30.658626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:30.658639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.658644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:560:2470] TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:30.658759Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.658795Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 45us result status StatusSuccess 2024-11-21T09:16:30.658916Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.613146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.613174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.613180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.613186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.613200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.613204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.613214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.613291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.624869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.624890Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.627607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.628316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.628351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.629880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.630088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.630180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.630253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.631223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.631495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.631509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.631547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.631555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.631560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.631574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.632935Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.651206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.651301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.651379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.651460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.651471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.652384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.652415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.652465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.652476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.652481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.652486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.652980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.652994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.652999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.653409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.653422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.653432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.653439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.654088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.654614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.654669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.654867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.654902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.654913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.654973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.654981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.655011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.655024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.655568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.655578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.655620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.655626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.655717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.655725Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.655737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.655742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.655748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.655753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.655758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.655762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.655775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.655781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.655786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.656111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.656130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.656135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.656140Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.656145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.656162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... calPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.667200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:30.667204Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:30.667208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.667510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.667524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:30.667528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:30.667533Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:30.667537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T09:16:30.667548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2024-11-21T09:16:30.667553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:273:2265] 2024-11-21T09:16:30.667945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.667956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.667960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.667964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.667968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.667971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.668193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:30.668528Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2024-11-21T09:16:30.668595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T09:16:30.668679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T09:16:30.668762Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T09:16:30.668792Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T09:16:30.668812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.668840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T09:16:30.668878Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2024-11-21T09:16:30.668906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:30.668926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:30.668954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T09:16:30.668970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:30.668994Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T09:16:30.669011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:30.669045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.669050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:274:2266] 2024-11-21T09:16:30.669061Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T09:16:30.669102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:30.669126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:30.669164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:16:30.669184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:30.669242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:30.669247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:30.669276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:30.669358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:30.669366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:30.669376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.669754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T09:16:30.669776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:30.670413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:16:30.670437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T09:16:30.670448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:30.670462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:16:30.670489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:30.670515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2024-11-21T09:16:30.670622Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.670662Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 53us result status StatusPathDoesNotExist 2024-11-21T09:16:30.670724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:30.670791Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.670814Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 24us result status StatusSuccess 2024-11-21T09:16:30.670887Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.597557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.597577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.597581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.597585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.597598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.597600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.597608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.597666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.605495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.605517Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.608357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.609157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.609188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.610290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.610443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.610514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.610563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.611560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.611844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.611853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.611892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.611900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.611906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.611918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.613004Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.629760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.629852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.629929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.630002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.630010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.630658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.630684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.630722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.630732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.630736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.630740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.631084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.631093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.631097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.631418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.631427Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.631432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.631438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.632029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.632452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.632501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.632702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.632728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.632738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.632791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.632797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.632827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.632839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.633274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.633284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.633332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.633341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.633451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.633465Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.633481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.633486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.633493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.633499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.633504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.633508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.633524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.633530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.633535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.633899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.633921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.633926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.633931Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.633936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.633952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T09:16:30.640797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T09:16:30.640827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T09:16:30.640892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.640908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.640922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:30.640994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T09:16:30.641002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T09:16:30.641041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.641048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:30.641060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:16:30.641500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.641508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.641544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:30.641558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.641563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T09:16:30.641568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T09:16:30.641656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.641664Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T09:16:30.641677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T09:16:30.641680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:30.641686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T09:16:30.641691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:30.641696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T09:16:30.641700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T09:16:30.641710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:30.641716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T09:16:30.641720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:30.641724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T09:16:30.641803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.641811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.641816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:30.641821Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:30.641825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.641919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.641930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:30.641933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:30.641936Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:16:30.641939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:30.641946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T09:16:30.642588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T09:16:30.642627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T09:16:30.642673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T09:16:30.642689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T09:16:30.642709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:30.642711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T09:16:30.642771Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T09:16:30.642786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.642789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:308:2300] 2024-11-21T09:16:30.642802Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:30.642812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.642814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:308:2300] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:30.642859Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.642884Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 32us result status StatusSuccess 2024-11-21T09:16:30.642967Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.643025Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.643039Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 15us result status StatusPathDoesNotExist 2024-11-21T09:16:30.643055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.451193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.451220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.451225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.451229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.451243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.451247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.451257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.451327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.462160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.462182Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.464983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.465767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.465802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.467473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.467720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.467808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.467889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.469475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.469735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.469747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.469782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.469790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.469796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.469812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.471084Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.487984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.488061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.488128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.488199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.488226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.488889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.488913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.488953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.488963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.488968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.488972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.489428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.489440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.489445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.489810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.489821Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.489826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.489832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.490410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.490874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.490925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.491099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.491124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.491134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.491187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.491193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.491219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.491230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.491713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.491723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.491762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.491767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.491845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.491852Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.491863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.491867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.491873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.491879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.491883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.491886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.491898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.491904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.491908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.492171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.492187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.492191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.492195Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.492199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.492233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... LAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.747672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.747703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:30.747728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.747734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 1 2024-11-21T09:16:30.747739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T09:16:30.747801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.747813Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2024-11-21T09:16:30.747824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T09:16:30.747828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:30.747834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T09:16:30.747838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:30.747843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T09:16:30.747847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T09:16:30.747883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:30.747890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2024-11-21T09:16:30.747895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:16:30.747899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T09:16:30.748023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:16:30.748035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:16:30.748040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T09:16:30.748045Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:30.748049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.748156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:16:30.748168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:16:30.748172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T09:16:30.748176Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:30.748180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:30.748190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T09:16:30.748643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.748656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:16:30.748965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T09:16:30.749017Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:16:30.749143Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:16:30.749402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.749470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2024-11-21T09:16:30.749753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:30.749802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2024-11-21T09:16:30.750015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:30.750023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:30.750045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:30.750131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:30.750136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:30.750146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.750223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T09:16:30.750623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:16:30.750638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:16:30.750669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:16:30.750674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:16:30.750988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:16:30.751010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T09:16:30.751076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T09:16:30.751083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T09:16:30.751155Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T09:16:30.751173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T09:16:30.751178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2089:3694] TestWaitNotification: OK eventTxId 104 2024-11-21T09:16:30.752333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.752373Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 55us result status StatusPathDoesNotExist 2024-11-21T09:16:30.752425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:30.752508Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:30.752520Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 13us result status StatusPathDoesNotExist 2024-11-21T09:16:30.752535Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] |94.1%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpQueryService::FlowControllOnHugeRealTable-LongRow >> KqpQueryService::ExecuteQueryPg >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex >> KqpQueryServiceScripts::ExecuteScriptPg >> KqpQueryService::TableSink_Htap-withOltpSink >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex >> KqpQueryService::DmlNoTx >> KqpQueryServiceScripts::ExecuteScriptWithParameters >> KqpQueryService::StreamExecuteQueryPure >> KqpQueryServiceScripts::ForgetScriptExecution >> KqpQueryService::ExecuteQueryPure >> KqpQueryServiceScripts::TestFetchMoreThanLimit >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:31.866899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:31.866924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:31.866928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:31.866932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:31.866945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:31.866948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:31.866955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:31.867017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:31.874685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:31.874706Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:31.877357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:31.877960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:31.877990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:31.879498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:31.879689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:31.879781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:31.879860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:31.880920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:31.881209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:31.881217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:31.881250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:31.881257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:31.881262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:31.881274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:31.882468Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:31.897153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:31.897251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:31.897331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:31.897423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:31.897432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:31.898274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:31.898297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:31.898340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:31.898348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:31.898352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:31.898355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:31.898723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:31.898730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:31.898733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:31.899008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:31.899015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:31.899019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:31.899024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:31.899469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:31.899845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:31.899891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:31.900070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:31.900088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:31.900096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:31.900137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:31.900143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:31.900169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:31.900177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:31.900674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:31.900686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:31.900735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:31.900741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:31.900828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:31.900836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:31.900849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:31.900853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:31.900860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:31.900865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:31.900870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:31.900874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:31.900886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:31.900893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:31.900897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:31.901159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:31.901170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:31.901173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:31.901177Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:31.901180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:31.901190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ode 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:16:32.053378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T09:16:32.053647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:32.053663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:32.053891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:32.053909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:32.053950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:32.053955Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T09:16:32.053964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T09:16:32.053967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:32.053971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T09:16:32.053982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:488:2439] message: TxId: 103 2024-11-21T09:16:32.053989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:32.053995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T09:16:32.053999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T09:16:32.054017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:16:32.054301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:16:32.054308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:489:2440] TestWaitNotification: OK eventTxId 103 2024-11-21T09:16:32.054392Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:32.054423Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 39us result status StatusSuccess 2024-11-21T09:16:32.054524Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:32.054607Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:32.054625Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 19us result status StatusSuccess 2024-11-21T09:16:32.054683Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:32.054721Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:32.054729Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 9us result status StatusSuccess 2024-11-21T09:16:32.054759Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:32.054796Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:32.054812Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 17us result status StatusSuccess 2024-11-21T09:16:32.054858Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryService::TableSink_OlapInsert >> KqpQueryService::FlowControllOnHugeLiteralAsTable >> KqpService::CloseSessionsWithLoad >> KqpQueryService::SessionFromPoolError >> KqpService::CloseSessionsWithLoad [FAIL] >> KqpService::PatternCache ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.263003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.263022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.263026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.263030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.263040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.263043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.263049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.263103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.271576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.271596Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.273991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.274675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.274702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.275942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.276095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.276154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.276236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.277260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.277544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.277556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.277592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.277599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.277605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.277618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.278850Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.293466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.293541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.293595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.293651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.293656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.294251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.294269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.294299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.294308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.294312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.294317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.294694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.294702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.294705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.295002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.295012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.295018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.295024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.295538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.295915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.295960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.296126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.296150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.296156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.296219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.296228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.296256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.296268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.296742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.296750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.296787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.296792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.296868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.296874Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.296884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.296888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.296894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.296899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.296904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.296908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.296918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.296924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.296928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.297226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.297238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.297242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.297245Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.297248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.297256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... poseLatency: 2 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 163 } } 2024-11-21T09:16:32.490945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:16:32.490951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2024-11-21T09:16:32.490963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:16:32.490967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2024-11-21T09:16:32.491520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:32.491556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet72075186233409546 2024-11-21T09:16:32.491564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:16:32.491569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2024-11-21T09:16:32.491583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet72075186233409546 2024-11-21T09:16:32.491614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2024-11-21T09:16:32.491638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2024-11-21T09:16:32.491648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T09:16:32.492073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:32.492172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:32.492507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T09:16:32.492516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T09:16:32.492554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2024-11-21T09:16:32.492577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T09:16:32.492582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 1 2024-11-21T09:16:32.492587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 2 2024-11-21T09:16:32.492648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:32.492654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2024-11-21T09:16:32.492668Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:32.492673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2024-11-21T09:16:32.492678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T09:16:32.492847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:32.492857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:32.492861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T09:16:32.492865Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2024-11-21T09:16:32.492871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T09:16:32.493068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:32.493079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:32.493083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T09:16:32.493089Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:32.493093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T09:16:32.493104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T09:16:32.493863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:32.493876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T09:16:32.493937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T09:16:32.493960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T09:16:32.493963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:32.493967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T09:16:32.493976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2494] message: TxId: 104 2024-11-21T09:16:32.493979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:32.493982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T09:16:32.493984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T09:16:32.493997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T09:16:32.494071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T09:16:32.494076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T09:16:32.494117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T09:16:32.494459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T09:16:32.494662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T09:16:32.494668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 0, path id: 1 2024-11-21T09:16:32.494701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T09:16:32.494704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:821:2742] 2024-11-21T09:16:32.494784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2024-11-21T09:16:32.494960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T09:16:32.494981Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 27us result status StatusSuccess 2024-11-21T09:16:32.495037Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |94.1%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpUpsert >> KqpQueryService::StreamExecuteQueryPure [GOOD] >> KqpQueryService::StreamExecuteQuery >> KqpQueryService::ExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryPgTableSelect >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::SessionFromPoolSuccess >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> KqpQueryService::DdlColumnTable >> KqpQueryService::ExecuteQueryPg [GOOD] >> KqpQueryService::ExecuteQueryMultiResult >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpInsert >> KqpQueryService::DmlNoTx [GOOD] >> KqpQueryService::DdlWithExplicitTransaction >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] [GOOD] >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:29.904917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:29.904949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.904956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:29.904962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:29.904979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:29.904983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:29.904995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.905095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:29.917128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:29.917153Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:29.920069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:29.920903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:29.920939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:29.922534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:29.922720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:29.922811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.922885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:29.924167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.924524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.924538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.924587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:29.924596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.924604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:29.924622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.926166Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:29.943696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:29.943791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.943878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:29.943960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:29.943969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.944873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.944903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:29.944955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.944964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:29.944969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:29.944974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:29.945458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.945471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:29.945477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:29.945837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.945849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.945855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.945863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.946489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:29.946915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:29.946974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:29.947171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.947195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.947206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.947260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:29.947267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.947298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.947310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:29.947807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.947816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.947863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.947868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:29.947960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.947967Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:29.947980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:29.947984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.947989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:29.947994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.947999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:29.948003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:29.948016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:29.948021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:29.948025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:29.948344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.948363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.948368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:29.948373Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:29.948378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.948394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... Status: COMPLETE TxId: 104 Step: 10100 OrderId: 104 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 148 } } 2024-11-21T09:16:33.177733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:16:33.177741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2024-11-21T09:16:33.177755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:16:33.177760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2024-11-21T09:16:33.177855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:33.177861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet72075186233409546 2024-11-21T09:16:33.177868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:16:33.177872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2024-11-21T09:16:33.177883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet72075186233409546 2024-11-21T09:16:33.177909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2024-11-21T09:16:33.177929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2024-11-21T09:16:33.177938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T09:16:33.178544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:33.178790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:33.178832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T09:16:33.178840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T09:16:33.178880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2024-11-21T09:16:33.178912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T09:16:33.178916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 1 2024-11-21T09:16:33.178919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 2 2024-11-21T09:16:33.178974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:33.178979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2024-11-21T09:16:33.178992Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:33.178996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2024-11-21T09:16:33.179000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T09:16:33.179144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:33.179153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:33.179157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T09:16:33.179160Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2024-11-21T09:16:33.179164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T09:16:33.179297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:33.179304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:33.179307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T09:16:33.179309Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:33.179313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T09:16:33.179322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T09:16:33.179829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:33.179839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T09:16:33.179937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T09:16:33.179965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T09:16:33.179968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:33.179971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T09:16:33.179980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2494] message: TxId: 104 2024-11-21T09:16:33.179984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:33.179987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T09:16:33.179990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T09:16:33.180001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T09:16:33.180166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T09:16:33.180171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T09:16:33.180440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T09:16:33.180505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T09:16:33.180754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T09:16:33.180760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 0, path id: 1 2024-11-21T09:16:33.180796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T09:16:33.180800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:751:2671] 2024-11-21T09:16:33.180891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2024-11-21T09:16:33.181013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T09:16:33.181049Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 42us result status StatusSuccess 2024-11-21T09:16:33.181107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsBasic >> KqpQueryServiceScripts::ExecuteScriptWithParameters [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout >> KqpQueryServiceScripts::TestFetchMoreThanLimit [GOOD] >> KqpQueryServiceScripts::Tcl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.227672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.227704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.227710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.227716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.227732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.227736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.227748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.227876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.236622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.236645Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.239072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.239624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.239666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.241359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.241599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.241707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.241828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.242785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.243129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.243140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.243181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.243189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.243195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.243216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.244515Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.259621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.259716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.259803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.259880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.259889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.260864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.260900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.260953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.260963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.260968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.260973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.261476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.261492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.261497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.261856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.261866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.261870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.261876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.262424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.262843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.262896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.263094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.263119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.263130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.263196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.263204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.263237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.263250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.263723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.263732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.263780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.263786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.263896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.263904Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.263918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.263922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.263929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.263935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.263940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.263944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.263957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.263964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.263969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.264305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.264321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.264326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.264331Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.264336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.264353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... poseLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 212 } } 2024-11-21T09:16:33.486923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T09:16:33.486932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2024-11-21T09:16:33.486945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T09:16:33.486950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T09:16:33.487063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:33.487070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T09:16:33.487078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:16:33.487082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2024-11-21T09:16:33.487094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T09:16:33.487124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2024-11-21T09:16:33.487145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:33.487156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:33.487801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:33.487856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:33.487896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:33.487902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:33.487950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:33.487976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:33.487981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T09:16:33.487985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T09:16:33.488092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:33.488102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:16:33.488117Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:33.488122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T09:16:33.488126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T09:16:33.488365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:33.488379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:33.488384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:16:33.488389Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T09:16:33.488394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:33.488687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:33.488699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:33.488703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:16:33.488707Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:16:33.488712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:16:33.488724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T09:16:33.489139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:33.489150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:33.489235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:33.489271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T09:16:33.489275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:33.489280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T09:16:33.489292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2024-11-21T09:16:33.489297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:33.489302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T09:16:33.489308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T09:16:33.489324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:33.489436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:33.489442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:33.489558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:33.489757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:33.489994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:33.490003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2024-11-21T09:16:33.490022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:16:33.490027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:662:2597] 2024-11-21T09:16:33.490162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2024-11-21T09:16:33.490331Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:33.490362Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 38us result status StatusSuccess 2024-11-21T09:16:33.490445Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] >> KqpQueryService::ExecuteQueryScalar >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> KqpQueryService::StreamExecuteQuery [GOOD] >> KqpQueryService::StreamExecuteQueryMultiResult >> KqpQueryService::ExecuteQueryMultiResult [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> KqpQueryService::DdlWithExplicitTransaction [GOOD] >> KqpQueryService::Ddl_Dml >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> KqpQueryService::Explain >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> KqpQueryService::TableSink_Htap-withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapComplex+withOltpSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T09:16:34.058840Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.058855Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.058860Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.059026Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.061707Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:16:34.061732Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.062267Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.062274Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.062278Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.062384Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.062457Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:16:34.062468Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.062693Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.062696Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.062698Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.063250Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:16:34.063264Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.063267Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.063423Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T09:16:34.063652Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.063655Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.063657Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.063725Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:16:34.063735Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.063737Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.063745Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T09:16:34.063959Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:16:34.063964Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:16:34.063967Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.064026Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.064170Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.065757Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:16:34.065855Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.066764Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T09:16:34.067401Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T09:16:34.067479Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.067489Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:16:34.067493Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:16:34.067497Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T09:16:34.067502Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T09:16:34.067505Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T09:16:34.067509Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T09:16:34.067513Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T09:16:34.067525Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T09:16:34.067528Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T09:16:34.067532Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T09:16:34.067535Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T09:16:34.067538Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T09:16:34.067541Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T09:16:34.067545Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T09:16:34.067548Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T09:16:34.067580Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T09:16:34.067583Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T09:16:34.067587Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T09:16:34.067590Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T09:16:34.067593Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T09:16:34.067596Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T09:16:34.067600Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T09:16:34.067603Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T09:16:34.067606Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T09:16:34.067609Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T09:16:34.067612Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T09:16:34.067615Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T09:16:34.067618Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T09:16:34.067622Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T09:16:34.067625Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T09:16:34.067628Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T09:16:34.067656Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T09:16:34.067659Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T09:16:34.067662Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T09:16:34.067665Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T09:16:34.067670Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T09:16:34.067673Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T09:16:34.067676Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T09:16:34.067679Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T09:16:34.067682Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T09:16:34.067685Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T09:16:34.067688Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T09:16:34.067691Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T09:16:34.067694Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T09:16:34.067700Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T09:16:34.067702Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T09:16:34.067706Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T09:16:34.067709Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T09:16:34.067712Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T09:16:34.067724Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T09:16:34.067820Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T09:16:34.067853Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T09:16:34.067859Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T09:16:34.067862Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T09:16:34.067866Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T09:16:34.067869Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T09:16:34.067873Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T09:16:34.067876Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T09:16:34.067879Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T09:16:34.067883Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T09:16:34.067886Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T09:16:34.067889Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T09:16:34.067893Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T09:16:34.067896Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T09:16:34.067899Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T09:16:34.067902Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T09:16:34.067905Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T09:16:34.067910Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T09:16:34.067913Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T09:16:34.067916Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T09:16:34.067920Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T09:16:34.067923Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T09:16:34.067926Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T09:16:34.067929Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T09:16:34.067932Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T09:16:34.067935Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T09:16:34.067938Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T09:16:34.067941Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T09:16:34.067944Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T09:16:34.067947Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T09:16:34.067951Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T09:16:34.067954Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T09:16:34.067957Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T09:16:34.067964Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T09:16:34.067969Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T09:16:34.067972Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T09:16:34.067975Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T09:16:34.067978Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T09:16:34.067981Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T09:16:34.067984Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T09:16:34.067988Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T09:16:34.067991Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T09:16:34.067994Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T09:16:34.067998Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T09:16:34.068001Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T09:16:34.068004Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T09:16:34.068007Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T09:16:34.068010Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T09:16:34.068014Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T09:16:34.068017Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T09:16:34.068021Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T09:16:34.068025Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T09:16:34.068057Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:16:34.068469Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.068473Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.068477Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.068547Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.068650Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.068700Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.068799Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.169128Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.169212Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:16:34.169238Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.169246Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:16:34.169278Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:16:34.372295Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T09:16:34.472589Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:16:34.472684Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:16:34.472751Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:16:34.473194Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.473200Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.473204Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.473296Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.473423Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.473476Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.473603Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.573959Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.574035Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:16:34.574058Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.574063Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:16:34.574109Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T09:16:34.574145Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:16:34.574208Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:16:34.574228Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:16:34.574259Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> KqpExtractPredicateLookup::PointJoin+EnableKqpDataQueryStreamLookup [GOOD] >> KqpExtractPredicateLookup::PointJoin-EnableKqpDataQueryStreamLookup >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery >> KqpQueryServiceScripts::Tcl [GOOD] >> KqpQueryService::ExecuteQueryScalar [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] >> KqpQueryService::Explain [GOOD] >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::SeveralCTAS >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] Test command err: Trying to start YDB, gRPC: 13263, MsgBus: 32107 2024-11-21T09:16:32.647260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658996873136518:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.647456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002abd/r3tmp/tmpJUpCuV/pdisk_1.dat 2024-11-21T09:16:32.723277Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13263, node 1 2024-11-21T09:16:32.748384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.748419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.749323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:32.781684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.781697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.781699Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.781737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32107 TClient is connected to server localhost:32107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.908761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.916551Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.929340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.993875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.007038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.019114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.039945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001168105355:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.039976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.107087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.117327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.130308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.137710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.151485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.159811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001168105847:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.159817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001168105853:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.159834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.160450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.164284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659001168105855:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 17066, MsgBus: 5224 2024-11-21T09:16:33.513531Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658997885414085:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002abd/r3tmp/tmpmOueX9/pdisk_1.dat 2024-11-21T09:16:33.516104Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:33.524441Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17066, node 2 2024-11-21T09:16:33.534893Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.534909Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.534912Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.534943Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5224 TClient is connected to server localhost:5224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:33.612920Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.612945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.613221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.614017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.622896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.634804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.658482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.671329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.805612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658997885415466:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.805646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.810556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.816775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.822780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.830100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.837380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.843773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.853379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658997885415967:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.853400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.853442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658997885415972:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.854051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.857545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439658997885415974:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 18655, MsgBus: 26106 2024-11-21T09:16:34.221108Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659003866146640:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.221282Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002abd/r3tmp/tmpVFFjxV/pdisk_1.dat 2024-11-21T09:16:34.231047Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18655, node 3 2024-11-21T09:16:34.241800Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:34.241814Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:34.241816Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:34.241859Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26106 TClient is connected to server localhost:26106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:34.321630Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:34.321664Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:34.322693Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:34.323914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.332805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.342298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.363162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.373542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.522529Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003866148174:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.522575Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.526935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.532770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.544626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.558588Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.565568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.580134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.594917Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003866148686:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.594941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003866148691:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.594946Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.595774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.599539Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659003866148693:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryScalar [GOOD] Test command err: Trying to start YDB, gRPC: 21155, MsgBus: 25633 2024-11-21T09:16:32.629707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658997308251570:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.629940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002bd3/r3tmp/tmpMCAepm/pdisk_1.dat 2024-11-21T09:16:32.692552Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21155, node 1 2024-11-21T09:16:32.774168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.774190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.775196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:32.782540Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.782552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.782554Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.782599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25633 TClient is connected to server localhost:25633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.900994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:32.917144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:32.981702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.001595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.012082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.032989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001603220401:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.033012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.106993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.116238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.124050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.137417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.144264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.152587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001603220895:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001603220900:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.153316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.157338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659001603220902:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 7614, MsgBus: 4909 2024-11-21T09:16:33.508080Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659001287808155:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.508153Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002bd3/r3tmp/tmpGgN1nV/pdisk_1.dat 2024-11-21T09:16:33.517825Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7614, node 2 2024-11-21T09:16:33.529004Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.529027Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.529030Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.529068Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4909 TClient is connected to server localhost:4909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:33.608622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.608671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.609779Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.610906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.808159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659001287808753:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.808180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.810585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.818855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659001287808854:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.818877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.818886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659001287808859:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.819486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.822525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659001287808861:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 14474, MsgBus: 23603 2024-11-21T09:16:34.178823Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659003524089307:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.179024Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002bd3/r3tmp/tmpeEKmrv/pdisk_1.dat 2024-11-21T09:16:34.189881Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14474, node 3 2024-11-21T09:16:34.201611Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:34.201624Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:34.201626Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:34.201668Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23603 TClient is connected to server localhost:23603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:34.279290Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:34.279322Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:34.280466Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:34.281539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.282267Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:34.290123Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.299053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.320051Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.332905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.504866Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003524090855:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.504902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.511178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.518186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.530763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.544730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.551032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.558249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.566908Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003524091356:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.566935Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003524091361:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.566937Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.567556Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.571259Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659003524091363:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:37.845338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:37.845374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:37.845380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:37.845385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:37.845392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:37.845396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:37.845405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:37.845530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:37.864652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:37.864682Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:37.869090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:37.869240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:37.869284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:37.897568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:37.897746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:37.897886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:37.899676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:37.905506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:37.905931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:37.905950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:37.905966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:37.905977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:37.905984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:37.906040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:37.914075Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:37.935800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:37.935899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:37.935973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:37.936026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:37.936035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:37.937078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:37.937125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:37.937208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:37.937226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:37.937231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:37.937236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:37.938029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:37.938051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:37.938059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:37.938755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:37.938772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:37.938778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:37.938787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:37.939477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:37.943713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:37.943829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:37.944201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:37.944312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:37.944328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:37.944453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:37.944468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:37.944517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:37.944534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:37.945532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:37.945557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:37.945635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:37.945645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:37.945785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:37.945798Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:37.945825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:37.945833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:37.945846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:37.945857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:37.945866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:37.945873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:37.945903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:37.945910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:37.945914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 16 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:34.935719Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:16:34.935782Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 75us result status StatusSuccess 2024-11-21T09:16:34.935959Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:34.946351Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:16:34.946385Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:16:34.946400Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T09:16:34.946416Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2024-11-21T09:16:34.946438Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732180594932740 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732180594932740 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:16:34.946467Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1732180594932740 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:16:34.947428Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2024-11-21T09:16:34.947635Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T09:16:34.947825Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T09:16:34.947842Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] Test command err: Trying to start YDB, gRPC: 64239, MsgBus: 24085 2024-11-21T09:16:32.592326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658994677088234:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.592368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ce8/r3tmp/tmp444NG3/pdisk_1.dat 2024-11-21T09:16:32.684111Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:32.694455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.694480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.701035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64239, node 1 2024-11-21T09:16:32.781702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.781716Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.781718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.781755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24085 TClient is connected to server localhost:24085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:32.910968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:32.912699Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.917658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.980159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:32.991314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.004839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.026299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658998972056843:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.026332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.108173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.116081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.123036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.130989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.145920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.159825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658998972057343:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.159854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.159866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658998972057348:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.160437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.164298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658998972057350:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 61586, MsgBus: 2987 2024-11-21T09:16:33.693083Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659000502019416:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.693107Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ce8/r3tmp/tmpnBNmvt/pdisk_1.dat 2024-11-21T09:16:33.710975Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61586, node 2 2024-11-21T09:16:33.732892Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.732909Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.732911Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.732952Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2987 TClient is connected to server localhost:2987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:33.793191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.793235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.794368Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.796096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.800648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.810681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.827306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.838800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.976454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659000502020948:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.976477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.980604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.986528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.998229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.005211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.019245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.026574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.034218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004796988758:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.034248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004796988763:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.034249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.034855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.039583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659004796988765:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 12598, MsgBus: 64254 2024-11-21T09:16:34.397943Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659003867439748:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.398216Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ce8/r3tmp/tmparUQI6/pdisk_1.dat 2024-11-21T09:16:34.408998Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12598, node 3 2024-11-21T09:16:34.417042Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:34.417056Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:34.417058Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:34.417109Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64254 TClient is connected to server localhost:64254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:34.498216Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:34.498254Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:34.499402Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:34.500595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.504165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.518034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.535174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.545302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.687355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003867441284:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.687383Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.693266Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.699690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.712577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.726792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.740340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.747266Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.755813Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003867441795:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.755844Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.755863Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003867441800:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.756454Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.760311Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659003867441802:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQueryService::Ddl_Dml [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] >> KqpQueryService::IssuesInCaseOfSuccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: Trying to start YDB, gRPC: 23406, MsgBus: 22645 2024-11-21T09:16:32.648259Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658993542960144:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.648279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002868/r3tmp/tmptdAeEP/pdisk_1.dat 2024-11-21T09:16:32.746801Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:32.751839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.751858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.753063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23406, node 1 2024-11-21T09:16:32.782835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.782859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.782861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.782897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22645 TClient is connected to server localhost:22645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.927980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.935506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.998130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.012618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.019479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.039492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658997837928986:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.039521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.107226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.115989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.122943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.130998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.144505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.152688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658997837929479:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658997837929484:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.153289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.158160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658997837929486:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 11007, MsgBus: 6035 2024-11-21T09:16:33.748728Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659001789950353:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.748784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002868/r3tmp/tmpksrPn7/pdisk_1.dat 2024-11-21T09:16:33.757292Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11007, node 2 2024-11-21T09:16:33.768429Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.768447Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.768449Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.768500Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6035 TClient is connected to server localhost:6035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:33.849074Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.849116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.850146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.851337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.865747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.874298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:33.893316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.903749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.060197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659006084919189:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.060238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.066235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.071550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.081926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.088926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.143728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.152609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.208743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659006084919700:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.208775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.208790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659006084919705:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.209353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.214357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659006084919707:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:34.377325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 10 Trying to start YDB, gRPC: 22059, MsgBus: 30577 2024-11-21T09:16:34.651459Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659003979112161:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.651744Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002868/r3tmp/tmpQ1T5mA/pdisk_1.dat 2024-11-21T09:16:34.661088Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22059, node 3 2024-11-21T09:16:34.672321Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:34.672336Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:34.672338Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:34.672386Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30577 TClient is connected to server localhost:30577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:34.752188Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:34.752240Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:34.753255Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:34.753951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.761436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.771717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.788793Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.799104Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.935061Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659003979113694:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.935082Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.939824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.946406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.957355Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.964224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.019477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.027672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.036309Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659008274081505:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.036339Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.036350Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659008274081510:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.037012Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:35.040150Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659008274081512:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> KqpQueryService::TableSink_HtapComplex+withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapComplex-withOltpSink >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> KqpQueryService::SeveralCTAS [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Ddl_Dml [GOOD] Test command err: Trying to start YDB, gRPC: 6799, MsgBus: 23879 2024-11-21T09:16:32.592348Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658997329822487:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.592373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a92/r3tmp/tmp3cf7zb/pdisk_1.dat 2024-11-21T09:16:32.685919Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:32.692515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.692542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.693845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6799, node 1 2024-11-21T09:16:32.781692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.781705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.781707Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.781737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23879 TClient is connected to server localhost:23879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.917889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.920848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.924636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.940424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.958004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:32.970174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:32.991624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658997329823812:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:32.991652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.106583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.116704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.122729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.130917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.137479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.149157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001624791611:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.149188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.149198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001624791616:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.150169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.157650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659001624791618:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 6294, MsgBus: 8167 2024-11-21T09:16:33.694702Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658998452700899:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.694974Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a92/r3tmp/tmpxxmF8f/pdisk_1.dat 2024-11-21T09:16:33.714262Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6294, node 2 2024-11-21T09:16:33.722771Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.722791Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.722793Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.722839Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8167 TClient is connected to server localhost:8167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:33.797215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.797244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.797593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.798299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.800186Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:33.801877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.810862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.829972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.839648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.027646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659002747669755:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.027668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or y ... 4:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.914602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.922057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.929732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.944991Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659005712372997:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.945012Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.945075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659005712373002:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.945688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.949677Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659005712373004:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:35.146285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.179540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.184226Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzQ0NTM4MjYtOGJiNmI2NTUtMzc0YjUzNTAtZGZkNjFjMzY=, ActorId: [3:7439659010007340669:2468], ActorState: ExecuteState, TraceId: 01jd704mehdwz97gx8z3vw7mqj, Create QueryResponse for error on request, msg: 2024-11-21T09:16:35.196784Z node 3 :KQP_COMPILE_SERVICE WARN: queryId in recompile request and queryId in cache are different, queryId in request: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (1, \"1\");\n SELECT * FROM TestDdlDml2;\n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (2, \"2\");\n SELECT * FROM TestDdlDml2;\n CREATE TABLE TestDdlDml33 (\n Key Uint64,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }}, queryId in cache: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1, Value2) VALUES (1, \"1\", \"1\");\n SELECT * FROM TestDdlDml2;\n ALTER TABLE TestDdlDml2 DROP COLUMN Value2;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }} 2024-11-21T09:16:35.220553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.246218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.281645Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659010007341073:2549], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:11:17: Error: At function: KiReadTable!
:11:17: Error: Cannot find table 'db.[/Root/TestDdlDml5]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:35.281718Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTdjNDY3N2MtNTBkZjU0MWYtMjZmZmY2ZTQtMjM2YjUwMzQ=, ActorId: [3:7439659010007340936:2526], ActorState: ExecuteState, TraceId: 01jd704mh8b01dp7n1ys3cwxtn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:35.293223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.308852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.359862Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715697, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl1', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479 2024-11-21T09:16:35.359903Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDk4M2M0MDEtZTgzODA4MGEtNTZmNWZlNDctOWNmYjFjOWY=, ActorId: [3:7439659010007341338:2603], ActorState: ExecuteState, TraceId: 01jd704mmwd4tpm1qdghnmb7gf, Create QueryResponse for error on request, msg: 2024-11-21T09:16:35.363954Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715699, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479 2024-11-21T09:16:35.363993Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzM1YTY5ZGYtMzg3NTE2NWMtYzJhNGNkZDItMTZlZjA1YWU=, ActorId: [3:7439659010007341362:2610], ActorState: ExecuteState, TraceId: 01jd704mn0ezzaq2e4n17n1372, Create QueryResponse for error on request, msg: 2024-11-21T09:16:35.403287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.420782Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715705, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479 2024-11-21T09:16:35.420829Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGZhN2FjMTUtYjZmY2M2M2ItNWFmOTM2NDQtNWU0OGUxMDU=, ActorId: [3:7439659010007341446:2635], ActorState: ExecuteState, TraceId: 01jd704mnx5j4vkvk0jvn850qr, Create QueryResponse for error on request, msg: 2024-11-21T09:16:35.437729Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659010007341631:2673], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl4]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:35.437799Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Zjk1OWI3NDgtNmEwOWQ1NTYtNmQ0N2MzYTMtYzYwNTViYWE=, ActorId: [3:7439659010007341627:2671], ActorState: ExecuteState, TraceId: 01jd704mqb9tk7vfkjshra8w3s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:35.461386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.517145Z node 3 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [3:7439659010007341845:2724], owner: [3:7439659005712372460:2367], statement id: 1 2024-11-21T09:16:35.517221Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGQ2ODFmMzYtYjQyYWNlMjEtNjkzMzNmNDEtNDY4YzRkNjk=, ActorId: [3:7439659010007341843:2723], ActorState: ExecuteState, TraceId: 01jd704msv32ap86b2m2a39by0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:35.538849Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659010007341885:2741], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:3:44: Error: Failed to convert 'Value': String to Optional
:3:44: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:35.538918Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjIxZDJlYmQtOWU2YmZjOWYtNjM2ZWE0Y2EtOGNlNDk0YTc=, ActorId: [3:7439659010007341870:2734], ActorState: ExecuteState, TraceId: 01jd704mt83bzftwzp99cv37rh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:35.547079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.562120Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659010007342001:2764], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:8:29: Error: At function: KiWriteTable!
:8:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:8:44: Error: Failed to convert 'Value': String to Optional
:8:44: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:35.562205Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzZmNWJjZGItYmFjYjU4MGItNzhjN2I1YTMtZGUyNzZjNGM=, ActorId: [3:7439659010007341912:2750], ActorState: ExecuteState, TraceId: 01jd704mtpaq83xmsvmh4qq18x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |94.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS [GOOD] Test command err: Trying to start YDB, gRPC: 19154, MsgBus: 14245 2024-11-21T09:16:32.656333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658994971971841:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.656407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ad8/r3tmp/tmpXxUluw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19154, node 1 2024-11-21T09:16:32.716681Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:32.755430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.755454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.756537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:32.784078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.784092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.784094Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.784129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14245 TClient is connected to server localhost:14245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.907548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.919875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.983368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.997116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.005302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.028464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658999266940530:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.028489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.107320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.115986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.122974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.130951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.144226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.152484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658999266941023:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658999266941028:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.157314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658999266941030:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 22682, MsgBus: 15210 2024-11-21T09:16:33.527673Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658999710998432:2088];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.529171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ad8/r3tmp/tmpwxNx2m/pdisk_1.dat 2024-11-21T09:16:33.541065Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22682, node 2 2024-11-21T09:16:33.547381Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.547393Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.547394Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.547422Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15210 TClient is connected to server localhost:15210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:33.627388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.627425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.628531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.629724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.642028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.656808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.673387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.684328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.857615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658999710999931:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.857638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.863401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.869229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.879401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.934327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.942532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.948892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.959181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658999711000434:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.959227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.959292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658999711000439:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.959965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.962468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439658999711000441:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 5392, MsgBus: 12752 2024-11-21T09:16:35.424456Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659008857587001:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:35.424475Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002ad8/r3tmp/tmpdy1p6R/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5392, node 3 2024-11-21T09:16:35.439968Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:35.440716Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:35.440728Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:35.440729Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:35.440764Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12752 TClient is connected to server localhost:12752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:35.524886Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:35.524919Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:35.526041Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:35.527243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.689386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659008857587597:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.689401Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659008857587602:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.689407Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.689932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:16:35.691406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659008857587612:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:16:35.797007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 2024-11-21T09:16:35.823220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.853582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] Test command err: Trying to start YDB, gRPC: 21104, MsgBus: 16002 2024-11-21T09:16:32.592612Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658994908680773:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.592647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d10/r3tmp/tmppUIU93/pdisk_1.dat 2024-11-21T09:16:32.691524Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:32.703028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.703053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.704258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21104, node 1 2024-11-21T09:16:32.782933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.782943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.782945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.782974Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16002 TClient is connected to server localhost:16002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.916277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.920473Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.928354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.942939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.956909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.970034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.992542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658994908682090:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:32.992567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.106431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.116010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.123045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.131210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.144902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.159826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658999203649899:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.159843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658999203649904:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.159850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.160395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.164271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658999203649906:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:33.323927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.324340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.324579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.641072Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439658999203650855:2606] TxId: 281474976715691. Ctx: { TraceId: 01jd704jycceqb67jce6sjmfg7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzJmMDcxYzYtZWM2YzhlNzctOWFlNzhkMC00MzcwZGU5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:16:33.643387Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180593689, txId: 281474976715690] shutting down Trying to start YDB, gRPC: 25186, MsgBus: 15505 2024-11-21T09:16:33.886725Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659000289009539:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.888006Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d10/r3tmp/tmpbwqAd6/pdisk_1.dat 2024-11-21T09:16:33.897435Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25186, node 2 2024-11-21T09:16:33.908979Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.908994Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.908996Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.909052Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15505 TClient is connected to server localhost:15505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:33.986233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.986257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.987379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.989048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.999930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.007742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, subop ... peration type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.180616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004583978218:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.180641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.185301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.191988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.200896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.207954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.215211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.222743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.238068Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004583978731:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.238100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004583978736:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.238099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.238799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.242599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659004583978738:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:34.406045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.406384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.406712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28706, MsgBus: 10228 2024-11-21T09:16:34.985424Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659002656509355:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.985439Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d10/r3tmp/tmpCxBmvT/pdisk_1.dat 2024-11-21T09:16:34.994755Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28706, node 3 2024-11-21T09:16:35.004787Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:35.004802Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:35.004804Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:35.004840Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10228 TClient is connected to server localhost:10228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:35.086089Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:35.086119Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:35.087107Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:35.087819Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.091705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.105080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.124790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.135300Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.288447Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659006951478205:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.288505Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.293439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.299397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.307208Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.313994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.321381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.328390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.336668Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659006951478705:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.336699Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.336741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659006951478710:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.337366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:35.341384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659006951478712:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:35.505484Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.505800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.506145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] Test command err: Trying to start YDB, gRPC: 17742, MsgBus: 31747 2024-11-21T09:16:32.641591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658995681851057:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.641718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cac/r3tmp/tmptgJEvC/pdisk_1.dat 2024-11-21T09:16:32.708445Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17742, node 1 2024-11-21T09:16:32.740617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.740648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.741441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:32.781722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.781742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.781743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.781773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31747 TClient is connected to server localhost:31747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.910572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.916683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.923746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.941372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.960408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.972423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.994058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658995681852448:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:32.994086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.106715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.116638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.122789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.130914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.144080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.152408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658999976820255:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658999976820260:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.157456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658999976820262:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:33.368184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.368501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.368662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.701975Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439658999976821281:2626] TxId: 281474976715693. Ctx: { TraceId: 01jd704k0c9959k9w82exxn96f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFlNGIxOTMtNGY0NmM4MC03OGZlYjFiOC0zNjZlODk5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:16:33.704074Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T09:16:33.704156Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439658999976821281:2626] TxId: 281474976715693. Ctx: { TraceId: 01jd704k0c9959k9w82exxn96f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFlNGIxOTMtNGY0NmM4MC03OGZlYjFiOC0zNjZlODk5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:16:33.704373Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmFlNGIxOTMtNGY0NmM4MC03OGZlYjFiOC0zNjZlODk5, ActorId: [1:7439658999976821260:2626], ActorState: ExecuteState, TraceId: 01jd704k0c9959k9w82exxn96f, Create QueryResponse for error on request, msg: 2024-11-21T09:16:33.704453Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180593745, txId: 281474976715692] shutting down 2024-11-21T09:16:33.704975Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439658999976821286:2631], TxId: 281474976715693, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmFlNGIxOTMtNGY0NmM4MC03OGZlYjFiOC0zNjZlODk5. CustomerSuppliedId : . TraceId : 01jd704k0c9959k9w82exxn96f. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439658999976821281:2626], status: ABORTED, reason: {
: Error: Terminate execution } Trying to start YDB, gRPC: 28038, MsgBus: 16782 2024-11-21T09:16:33.919191Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658999951432902:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.919217Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cac/r3tmp/tmpU4E1mP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28038, node 2 2024-11-21T09:16:33.937103Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:33.937406Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.937417Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.937419Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.937458Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16782 TClient is connected to server localhost:16782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathI ... tion type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.298813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.306578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.314423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.329384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004246402248:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.329414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.329492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004246402253:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.330210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.333955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659004246402255:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:34.517655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.518101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.518335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.641118Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659004246402805:2476], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2024-11-21T09:16:34.641668Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGZiOTM0MzItZWQ4ZWY3MjMtOGNjMzEyYjUtNTEwM2JiYzc=, ActorId: [2:7439659004246402800:2475], ActorState: ExecuteState, TraceId: 01jd704ktm6h32y5t1eyfavs7p, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:34.764159Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659004246403096:2573], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2024-11-21T09:16:34.764236Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmQ3N2FjZjAtZTZhNjYyNmUtYTY4YWQ1NzItZTZjYjUwNTY=, ActorId: [2:7439659004246403094:2572], ActorState: ExecuteState, TraceId: 01jd704m263c1rza7gw02qye15, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 28923, MsgBus: 27106 2024-11-21T09:16:35.020117Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659010018441774:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:35.020135Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002cac/r3tmp/tmpC5CDGs/pdisk_1.dat 2024-11-21T09:16:35.034706Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28923, node 3 2024-11-21T09:16:35.040590Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:35.040605Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:35.040607Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:35.040644Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27106 TClient is connected to server localhost:27106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:35.120633Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:35.120677Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:35.121739Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:35.122891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:35.138668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.152600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.170718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.183216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.350429Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659010018443321:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.350455Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.355877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.362291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.370216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.377183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.383890Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.391114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.399776Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659010018443822:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.399801Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.399809Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659010018443827:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.400421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:35.404718Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659010018443829:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:35.572487Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.572842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.573119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T09:16:35.865462Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.865468Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.865471Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:35.865563Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:16:35.865574Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.865576Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.866231Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006483s 2024-11-21T09:16:35.866344Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:35.866455Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:16:35.866470Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.866691Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.866694Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.866697Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:35.866739Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:16:35.866746Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.866748Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.866761Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006219s 2024-11-21T09:16:35.866827Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:35.866951Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:16:35.867001Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867241Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867245Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867247Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:35.867388Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:16:35.867395Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867397Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867407Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.156780s 2024-11-21T09:16:35.867454Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:35.867479Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:16:35.867486Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867650Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867654Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867656Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:35.867712Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:16:35.867716Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867717Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867724Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.214942s 2024-11-21T09:16:35.867773Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:35.867793Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:16:35.867799Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867952Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867954Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.867956Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:35.867998Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:35.868052Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:35.869029Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.869084Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T09:16:35.869092Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.869095Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.869121Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.215534s 2024-11-21T09:16:35.869178Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:35.869187Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:16:35.869486Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.869489Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.869491Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:35.869564Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:35.869673Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:35.869725Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.869787Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:35.970111Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:35.970194Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:16:35.970216Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:35.970222Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:16:35.970256Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:16:36.070446Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:16:36.070542Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:16:36.070917Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:36.070922Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:36.070925Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:36.070977Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:36.071176Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:36.071254Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:36.071355Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:36.171578Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:36.171654Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:16:36.171680Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:36.171686Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:16:36.171724Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T09:16:36.171756Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:16:36.171789Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:16:36.171811Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:16:36.171847Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] [GOOD] >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] |94.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] |94.1%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 17529, MsgBus: 27082 2024-11-21T09:16:32.592768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658994036569344:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.594364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00299b/r3tmp/tmp9foUY1/pdisk_1.dat 2024-11-21T09:16:32.678256Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:32.692338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.692365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.693482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17529, node 1 2024-11-21T09:16:32.781676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.781692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.781695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.781731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27082 TClient is connected to server localhost:27082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:32.910793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:32.932509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658994036569801:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:32.932534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.101336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.115352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:33.115377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:33.115407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:33.115426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:33.115454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:16:33.115474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:33.115476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:16:33.115494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:16:33.115499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:33.115515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:16:33.115521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:16:33.115533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:16:33.115538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:16:33.115581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:16:33.115604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:16:33.115630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:16:33.115655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:16:33.115666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:16:33.115673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:16:33.115707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439658998331537229:2307];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:16:33.115713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:16:33.115743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:16:33.115762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:16:33.115785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439658998331537228:2306];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:16:33.119179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439658998331537324:2313];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:33.119206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439658998331537324:2313];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:33.119242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439658998331537324:2313];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:16:33.119261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439658998331537227:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:33.119270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439658998331537324:2313];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:33.119282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439658998331537227:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:33.119287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439658998331537324:2313];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descrip ... TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:16:36.242611Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:16:36.242621Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:16:36.242639Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:16:36.242648Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:16:36.242660Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:16:36.242670Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:16:36.242693Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:16:36.242702Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:16:36.242713Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:16:36.242722Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:16:36.242840Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:16:36.242853Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:16:36.242863Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:16:36.242867Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:16:36.242886Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:16:36.242890Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:16:36.242900Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:16:36.242905Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:16:36.242915Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:16:36.242920Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:16:36.242927Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:16:36.242931Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:16:36.242964Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:16:36.242970Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:16:36.242988Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:16:36.242993Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:16:36.243005Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:16:36.243009Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:16:36.243026Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:16:36.243029Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:16:36.243040Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:16:36.243044Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:16:36.253028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.261446Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659014758201025:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.261479Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.261499Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659014758201030:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.262242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T09:16:36.265460Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659014758201032:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T09:16:36.618882Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;local_tx_no=27;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037909;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715672}],"starts":[{"inc":{"count_not_include":2},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2024-11-21T09:16:36.618890Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037905;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 3 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715672}],"starts":[{"inc":{"count_not_include":2},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2024-11-21T09:16:36.618951Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037906;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 1 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715672}],"starts":[{"inc":{"count_not_include":2},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2024-11-21T09:16:36.830390Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;local_tx_no=69;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037909;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 20, 40 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715681}],"starts":[{"inc":{"count_not_include":2},"id":281474976715681}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715681}]},"p":{"include":2147483647}}]}; 2024-11-21T09:16:36.830390Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;local_tx_no=16;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037900;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 10 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715681}],"starts":[{"inc":{"count_not_include":2},"id":281474976715681}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715681}]},"p":{"include":2147483647}}]}; 2024-11-21T09:16:36.830462Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;local_tx_no=16;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037901;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 30 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715681}],"starts":[{"inc":{"count_not_include":2},"id":281474976715681}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715681}]},"p":{"include":2147483647}}]}; >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] Test command err: Trying to start YDB, gRPC: 10066, MsgBus: 10826 2024-11-21T09:16:32.652957Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658994672985833:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.652995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002af4/r3tmp/tmpgf2fqP/pdisk_1.dat 2024-11-21T09:16:32.718695Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10066, node 1 2024-11-21T09:16:32.783243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.783258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.783260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.783293Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:32.796380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.796413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.797423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10826 TClient is connected to server localhost:10826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.927763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.930772Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.939397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.003642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.015859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.025408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.088079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658998967954698:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.088101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.122438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.128278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.137365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.144666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.151089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.159401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.173867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658998967955190:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.173892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.173899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658998967955195:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.174510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.178474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658998967955197:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:33.375966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.376420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.376686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30563, MsgBus: 18647 2024-11-21T09:16:35.013798Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659010323363836:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:35.013815Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002af4/r3tmp/tmp8mSuEF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30563, node 2 2024-11-21T09:16:35.029002Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:35.031183Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:35.031193Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:35.031194Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:35.031221Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18647 TClient is connected to server localhost:18647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:35.114033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:35.114080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:35.115273Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:35.116334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.126408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.135291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.153797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.163718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024- ... ZjZlM2MtODI5NmVlMTMtNjM2Y2JkNjctMjIzY2JlMmQ=, TxId: 2024-11-21T09:16:35.899820Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: e8cc7b98-8de32044-31ef7d25-49ca0c48, reply NOT_FOUND, issues: {
: Error: No such execution } 2024-11-21T09:16:35.972533Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTUwOWIzN2EtNGY2OGE2YWUtNzJkNmYwMDUtNmRlMzZhZGE=, ActorId: [2:7439659010323367932:3007], ActorState: ExecuteState, TraceId: 01jd704n746dcewjtvzswmqs6k, Create QueryResponse for error on request, msg: 2024-11-21T09:16:35.973075Z node 2 :KQP_PROXY WARN: [TQueryBase] [TForgetScriptExecutionOperationQueryActor] TraceId: e8cc7b98-8de32044-31ef7d25-49ca0c48, State: Delete script results in range (-49998; 2], Finish with ABORTED, Issues: {
: Error: Transaction locks invalidated. Table: `/Root/.metadata/result_sets`, code: 2001 }, SessionId: ydb://session/3?node_id=2&id=MTUwOWIzN2EtNGY2OGE2YWUtNzJkNmYwMDUtNmRlMzZhZGE=, TxId: 01jd704n7ydewb3gvfxpmcjth3 Trying to start YDB, gRPC: 7658, MsgBus: 25209 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002af4/r3tmp/tmpoBL05m/pdisk_1.dat 2024-11-21T09:16:36.106402Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:36.106485Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 7658, node 3 2024-11-21T09:16:36.117209Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:36.117224Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:36.117226Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:36.117265Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25209 TClient is connected to server localhost:25209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:36.196469Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:36.196512Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:36.197673Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:36.199324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.200227Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:36.204317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.212466Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.231488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.241346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.374975Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659014477580779:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.375019Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.381140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.388326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.399718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.406098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.460816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.469055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.478132Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659014477581295:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.478161Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659014477581300:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.478160Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.478723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:36.482567Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659014477581302:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:36.647899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.648505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.648806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.992588Z node 3 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 4c118b2e-4f3f9767-17753579-c45da3ee, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=3&id=ZDlmYTRhMDEtOWIwOTU5MDAtNzUwNmM4YzctYzA0ZDFlN2I=, TxId: 2024-11-21T09:16:36.995780Z node 3 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 4c118b2e-4f3f9767-17753579-c45da3ee, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=3&id=MzU4ZDM3YjQtZmIyNTNlMDEtZDExMWMyN2QtODRlMTE5Yjg=, TxId: 2024-11-21T09:16:36.998550Z node 3 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 4c118b2e-4f3f9767-17753579-c45da3ee, reply NOT_FOUND, issues: {
: Error: No such execution } 2024-11-21T09:16:37.055781Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWU0NjM2ZDUtYTVhMzM1YWYtMzM5NjQwMjItODkyNTc4NmU=, ActorId: [3:7439659014477582303:2632], ActorState: ExecuteState, TraceId: 01jd704p999p678dpbrc94v09w, Create QueryResponse for error on request, msg: 2024-11-21T09:16:37.055832Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjhjYTIwYzktNDAzZjFjOTgtZjVmMmM3Y2EtNjM1OWZlZjY=, ActorId: [3:7439659014477582300:2630], ActorState: ExecuteState, TraceId: 01jd704p993sm2a5jqf4bss6ep, Create QueryResponse for error on request, msg: 2024-11-21T09:16:37.055901Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGNjYjljYjctMWYwODZkMTgtZmE2MTIxYzYtMTQyMjQ5MmQ=, ActorId: [3:7439659014477582409:2668], ActorState: ExecuteState, TraceId: 01jd704p999q3zjjsbr4t4acyr, Create QueryResponse for error on request, msg: 2024-11-21T09:16:37.056377Z node 3 :KQP_PROXY WARN: [TQueryBase] [TForgetScriptExecutionOperationQueryActor] TraceId: 4c118b2e-4f3f9767-17753579-c45da3ee, State: Delete script results in range (-100000; 0], Finish with ABORTED, Issues: {
: Error: Transaction locks invalidated. Table: `/Root/.metadata/result_sets`, code: 2001 }, SessionId: ydb://session/3?node_id=3&id=YWU0NjM2ZDUtYTVhMzM1YWYtMzM5NjQwMjItODkyNTc4NmU=, TxId: 01jd704p9xe0xg5029pzh12dcx 2024-11-21T09:16:37.056399Z node 3 :KQP_PROXY WARN: [TQueryBase] [TForgetScriptExecutionOperationQueryActor] TraceId: 4c118b2e-4f3f9767-17753579-c45da3ee, State: Delete script results in range (-100000; 0], Finish with ABORTED, Issues: {
: Error: Transaction locks invalidated. Table: `/Root/.metadata/result_sets`, code: 2001 }, SessionId: ydb://session/3?node_id=3&id=OGNjYjljYjctMWYwODZkMTgtZmE2MTIxYzYtMTQyMjQ5MmQ=, TxId: 01jd704p9w9jj2wasmct73xwtw 2024-11-21T09:16:37.056404Z node 3 :KQP_PROXY WARN: [TQueryBase] [TForgetScriptExecutionOperationQueryActor] TraceId: 4c118b2e-4f3f9767-17753579-c45da3ee, State: Delete script results in range (-100000; 0], Finish with ABORTED, Issues: {
: Error: Transaction locks invalidated. Table: `/Root/.metadata/result_sets`, code: 2001 }, SessionId: ydb://session/3?node_id=3&id=NjhjYTIwYzktNDAzZjFjOTgtZjVmMmM3Y2EtNjM1OWZlZjY=, TxId: 01jd704p9xe91wpc8zgwfpbvbh ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 11314, MsgBus: 62414 2024-11-21T09:16:32.612701Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658996460459132:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.612762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a54/r3tmp/tmpXtKXEq/pdisk_1.dat 2024-11-21T09:16:32.684110Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11314, node 1 2024-11-21T09:16:32.711823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.711840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.713229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:32.781683Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.781695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.781697Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.781732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62414 TClient is connected to server localhost:62414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.905882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.913272Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.923988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.939037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:32.950445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:32.961287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.981124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658996460460447:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:32.981159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.107053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.116162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.123099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.130914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.137426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.149233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000755428256:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.149265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000755428261:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.149266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.150160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.157162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000755428263:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:33.319478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.319713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.319995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.650497Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180593696, txId: 281474976710690] shutting down Trying to start YDB, gRPC: 65528, MsgBus: 63305 2024-11-21T09:16:33.900927Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658999431721343:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.901310Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a54/r3tmp/tmpTFpE3v/pdisk_1.dat 2024-11-21T09:16:33.912025Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65528, node 2 2024-11-21T09:16:33.922105Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.922119Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.922121Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.922158Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63305 TClient is connected to server localhost:63305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:34.004164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:34.004192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:34.004542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.005221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:34.008800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.020823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.037091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:34.048622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Ope ... 1474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.399826Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659007719669760:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.399847Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659007719669765:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.399853Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.400436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:35.404347Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659007719669767:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:35.565488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.565827Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.566061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.697281Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2JiOTg2ZWItMTk0NmQ2ZmUtMzkyMWY1YTYtOTQ5ZjlkY2E=, ActorId: [3:7439659007719670310:2475], ActorState: ExecuteState, TraceId: 01jd704mvc4jv8hw5gr5jmmg0y, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 21074, MsgBus: 14911 2024-11-21T09:16:36.122589Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439659014083877082:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:36.122648Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a54/r3tmp/tmpry6SRI/pdisk_1.dat 2024-11-21T09:16:36.131282Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21074, node 4 2024-11-21T09:16:36.140729Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:36.140744Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:36.140746Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:36.140785Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14911 TClient is connected to server localhost:14911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:36.223195Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:36.223226Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:36.224309Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:36.225068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.229147Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.286360Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.309739Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.321332Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.417278Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659014083878614:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.417314Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.422180Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.429094Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.441670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.455377Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.462194Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.469002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.477912Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659014083879115:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.477919Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659014083879120:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.477936Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.478543Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:36.482629Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439659014083879122:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:36.693320Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.693665Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.694083Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.959772Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 82850db1-e6f8e599-90ed2e77-dd5147bc, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=YjNiOTgzZjQtMjZkMjZlN2MtM2RkZTY3Zi00N2Y2ZGM5Yg==, TxId: 2024-11-21T09:16:37.045820Z node 4 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 82850db1-e6f8e599-90ed2e77-dd5147bc, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=YTZjZTI5ODYtYjI2NmFlZmUtZDg1M2EzODUtMmU5YjkyZGE=, TxId: 2024-11-21T09:16:37.064277Z node 4 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 82850db1-e6f8e599-90ed2e77-dd5147bc, reply NOT_FOUND, issues: {
: Error: No such execution } 2024-11-21T09:16:37.068287Z node 4 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 82850db1-e6f8e599-90ed2e77-dd5147bc, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=NDdlYzA1NTMtMmUyMDU5MjMtZWZmNDFmZi00MTE5Yzc0MA==, TxId: 2024-11-21T09:16:37.068343Z node 4 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: 82850db1-e6f8e599-90ed2e77-dd5147bc, check lease failed 2024-11-21T09:16:37.099933Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 82850db1-e6f8e599-90ed2e77-dd5147bc, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=4&id=NzFhNGI3NjMtZDQ0MGM3ZmEtZDE1MGMxNDYtYTBjODY5MWI=, TxId: >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.619672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.619702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.619719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.619724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.620586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.620599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.620614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.620706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.632876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.632895Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.635634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.636493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.636552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.638347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.638560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.639783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.639913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.641804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645664Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.660291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.660380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.660480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.660566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.660575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.661360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.661374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.661379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.661859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.661877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.662308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.662326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.662918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.663566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.664059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.664688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.665517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.665630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.665650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.665661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.666327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.666364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.666450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666456Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.666465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.666467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.666474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.666477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.666480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.666488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.666492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.666494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.666769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.666783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.666786Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.666789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.666802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... Latency: 2 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 173 } } 2024-11-21T09:16:37.223540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:16:37.223546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2024-11-21T09:16:37.223561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:16:37.223566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2024-11-21T09:16:37.224039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:37.224053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet72075186233409546 2024-11-21T09:16:37.224061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:16:37.224066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2024-11-21T09:16:37.224077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet72075186233409546 2024-11-21T09:16:37.224107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2024-11-21T09:16:37.224131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2024-11-21T09:16:37.224142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T09:16:37.224573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:37.224855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:37.225176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T09:16:37.225184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T09:16:37.225225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2024-11-21T09:16:37.225251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T09:16:37.225256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 1 2024-11-21T09:16:37.225262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 2 2024-11-21T09:16:37.225401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:37.225409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2024-11-21T09:16:37.225426Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:37.225431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2024-11-21T09:16:37.225435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T09:16:37.225549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:37.225560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:37.225565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T09:16:37.225570Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2024-11-21T09:16:37.225576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T09:16:37.225742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:37.225752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T09:16:37.225756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T09:16:37.225762Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:37.225766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T09:16:37.225776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T09:16:37.226442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T09:16:37.226454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T09:16:37.226591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T09:16:37.226630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T09:16:37.226635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:37.226640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T09:16:37.226653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2494] message: TxId: 104 2024-11-21T09:16:37.226658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:16:37.226662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T09:16:37.226666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T09:16:37.226686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T09:16:37.226865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T09:16:37.226871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T09:16:37.226923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T09:16:37.227150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T09:16:37.227422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T09:16:37.227431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 0, path id: 1 2024-11-21T09:16:37.227513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T09:16:37.227518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1429:3342] 2024-11-21T09:16:37.227581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2024-11-21T09:16:37.227987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T09:16:37.228012Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 32us result status StatusSuccess 2024-11-21T09:16:37.228098Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] Test command err: Trying to start YDB, gRPC: 61979, MsgBus: 17422 2024-11-21T09:16:32.592385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658997208683337:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.592424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b27/r3tmp/tmpLDyljx/pdisk_1.dat 2024-11-21T09:16:32.682470Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:32.692622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.692653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.693928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61979, node 1 2024-11-21T09:16:32.783686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.783705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.783706Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.783742Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17422 TClient is connected to server localhost:17422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.900965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.905714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.908769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.976697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.987775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.997579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.022475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001503651953:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.022502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.100830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.106792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.116222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.122789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.130959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.138743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.152920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001503652448:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.152939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.153098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001503652453:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.153658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.157739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659001503652459:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:33.327577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 121 Trying to start YDB, gRPC: 9123, MsgBus: 17111 2024-11-21T09:16:35.726402Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659009057401959:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:35.726540Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b27/r3tmp/tmpZuvHe1/pdisk_1.dat 2024-11-21T09:16:35.737760Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9123, node 2 2024-11-21T09:16:35.746002Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:35.746012Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:35.746013Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:35.746039Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17111 TClient is connected to server localhost:17111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:35.826714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:35.826754Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:35.827880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:35.829007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.832785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.841129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.857170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.868382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:35.980374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659009057403491:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.980397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:35.985264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:35.992340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.046904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.056124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.062893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.070350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.078597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659013352371302:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.078624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.078644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659013352371307:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.079168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:36.083063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659013352371309:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:36.242760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.250621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.259751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27351, MsgBus: 23944 2024-11-21T09:16:36.625472Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659014589767684:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:36.625538Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b27/r3tmp/tmpBagFEg/pdisk_1.dat 2024-11-21T09:16:36.637084Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27351, node 3 2024-11-21T09:16:36.646612Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:36.646626Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:36.646628Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:36.646668Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23944 TClient is connected to server localhost:23944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:36.725927Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:36.725965Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:36.727077Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:36.728304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.734009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.742891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.760693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.772964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:36.942500Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659014589769235:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.942525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.947980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.959070Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.966380Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.973366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.980018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.987295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:36.995875Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659014589769739:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.995899Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.995906Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659014589769744:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:36.996530Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:37.000024Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659014589769746:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2024-11-21T09:16:24.673278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658959464275095:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:24.673535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032e9/r3tmp/tmpN9UcnK/pdisk_1.dat 2024-11-21T09:16:24.755494Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16897, node 1 2024-11-21T09:16:24.774382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:24.774415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:24.775476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:24.811583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:24.811599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:24.811601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:24.811634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:25.077496Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:25.080053Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:4512, port: 4512 2024-11-21T09:16:25.080088Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:25.132456Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:25.180393Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T09:16:25.180618Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T09:16:25.180636Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:25.228379Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:25.274116Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T09:16:25.274920Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****eqQQ (4F12269F) () has now valid token of ldapuser@ldap 2024-11-21T09:16:29.673584Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439658959464275095:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:29.673633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:16:30.676578Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****eqQQ (4F12269F) 2024-11-21T09:16:30.676632Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:4512, port: 4512 2024-11-21T09:16:30.676652Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:30.728405Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:30.728572Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:4512 return no entries 2024-11-21T09:16:30.728746Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****eqQQ (4F12269F) () has now permanent error message 'Could not login via LDAP' 2024-11-21T09:16:35.378169Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659009830996230:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:35.378331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032e9/r3tmp/tmps2YqR9/pdisk_1.dat 2024-11-21T09:16:35.387415Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4343, node 2 2024-11-21T09:16:35.398789Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:35.398802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:35.398806Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:35.398856Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:35.451947Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:35.453587Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:14747, port: 14747 2024-11-21T09:16:35.453622Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T09:16:35.457423Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:35.478812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:35.478852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:35.479934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:35.504464Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:35.552387Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T09:16:35.600666Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****GUdA (6C7B5BC6) () has now valid token of ldapuser@ldap 2024-11-21T09:16:35.833756Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659009554780163:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:35.833798Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032e9/r3tmp/tmpIJSGbX/pdisk_1.dat 2024-11-21T09:16:35.843091Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32454, node 3 2024-11-21T09:16:35.855184Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:35.855197Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:35.855199Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:35.855239Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:35.934426Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:35.934462Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:35.935591Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:35.956255Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:35.957971Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:17922, port: 17922 2024-11-21T09:16:35.957996Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T09:16:35.962053Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:36.004453Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:36.048736Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****rTaw (632246FF) () has now valid token of ldapuser@ldap 2024-11-21T09:16:36.305267Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439659011853085979:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:36.305546Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032e9/r3tmp/tmplyXd8w/pdisk_1.dat 2024-11-21T09:16:36.316076Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19043, node 4 2024-11-21T09:16:36.328077Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:36.328091Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:36.328093Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:36.328134Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:36.407635Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:36.407673Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:36.408657Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:36.474739Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:36.476655Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24677, port: 24677 2024-11-21T09:16:36.476704Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T09:16:36.484662Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:36.528409Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T09:16:36.572607Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****u8eQ (F6134101) () has now valid token of ldapuser@ldap 2024-11-21T09:16:36.770251Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439659014050439122:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:36.770587Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032e9/r3tmp/tmp2NZlx8/pdisk_1.dat 2024-11-21T09:16:36.781802Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1819, node 5 2024-11-21T09:16:36.794088Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:36.794100Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:36.794102Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:36.794152Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:36.859756Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:36.861955Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23525, port: 23525 2024-11-21T09:16:36.861981Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T09:16:36.865951Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:36.872994Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:36.873047Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:36.874034Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:36.908450Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T09:16:36.956394Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T09:16:36.956579Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T09:16:36.956609Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T09:16:37.000405Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T09:16:37.048400Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T09:16:37.048922Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****FD1Q (50C8CC8D) () has now valid token of ldapuser@ldap 2024-11-21T09:16:37.236087Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439659016708129790:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:37.236327Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032e9/r3tmp/tmpXC3A2K/pdisk_1.dat 2024-11-21T09:16:37.246861Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14326, node 6 2024-11-21T09:16:37.255646Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:37.255657Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:37.255659Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:37.255701Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:37.312182Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:37.313921Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7972, port: 7972 2024-11-21T09:16:37.313946Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T09:16:37.318219Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:37.336415Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:37.336460Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:37.337472Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:37.364440Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2024-11-21T09:16:37.364469Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:7972. Bad search filter 2024-11-21T09:16:37.364720Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****vPOQ (499C7ECC) () has now permanent error message 'Could not login via LDAP' >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] >> TGRpcStreamingTest::SimpleEcho >> TGRpcStreamingTest::WriteAndFinishWorks >> TGRpcStreamingTest::ClientNeverWrites |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient >> TGRpcStreamingTest::ClientDisconnects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:28.625047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:28.625075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.625080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:28.625086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:28.625096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:28.625099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:28.625107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:28.625189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:28.635611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:28.635632Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:28.638605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:28.639391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:28.639419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:28.641305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:28.641540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:28.641633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.641715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:28.642800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.644456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:28.644466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.644472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:28.644486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.645834Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:28.665232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:28.665296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.665352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:28.665423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:28.665431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:28.666182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:28.666195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:28.666199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:28.666784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:28.666812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:28.667195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.667207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.667211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.667737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:28.668099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:28.668138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:28.668303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.668412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:28.668419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:28.668440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.668448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:28.668883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:28.668919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:28.668989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:28.668996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:28.669005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:28.669009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.669014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:28.669034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:28.669038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:28.669041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:28.669049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:28.669052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:28.669054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:28.669343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.669357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:28.669362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:28.669367Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:28.669371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:28.669384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 136 } } 2024-11-21T09:16:37.831236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 762 RawX2: 4294969995 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2024-11-21T09:16:37.831242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 0 2024-11-21T09:16:37.831254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 762 RawX2: 4294969995 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2024-11-21T09:16:37.831259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T09:16:37.831731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:16:37.831745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T09:16:37.831751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:16:37.831756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2024-11-21T09:16:37.831766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T09:16:37.831793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2024-11-21T09:16:37.831814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:37.831827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:16:37.832167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:16:37.832268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:16:37.832538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:37.832549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:37.832587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:16:37.832609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:37.832615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 2 2024-11-21T09:16:37.832620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 4 2024-11-21T09:16:37.832707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:16:37.832715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:16:37.832731Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:16:37.832736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:16:37.832742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2024-11-21T09:16:37.832870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T09:16:37.832883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T09:16:37.832887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T09:16:37.832892Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2024-11-21T09:16:37.832897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:37.833090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T09:16:37.833104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T09:16:37.833107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T09:16:37.833115Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:16:37.833119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:16:37.833132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2024-11-21T09:16:37.833625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:16:37.833636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:37.833709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:16:37.833733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T09:16:37.833737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T09:16:37.833743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T09:16:37.833747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T09:16:37.833752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T09:16:37.833756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T09:16:37.833773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:16:37.833861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:37.833868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:37.833988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T09:16:37.834245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T09:16:37.834475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:37.834485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2024-11-21T09:16:37.834601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2024-11-21T09:16:37.834705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T09:16:37.834711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T09:16:37.834788Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T09:16:37.834805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T09:16:37.834810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:992:2920] TestWaitNotification: OK eventTxId 107 2024-11-21T09:16:37.834898Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:37.834929Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 41us result status StatusSuccess 2024-11-21T09:16:37.835016Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] |94.1%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapUpdate >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> TGRpcStreamingTest::SimpleEcho [GOOD] >> TWebLoginService::AuditLogLogout >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> TWebLoginService::AuditLogLoginBadPassword >> TWebLoginService::AuditLogLoginSuccess >> TWebLoginService::AuditLogLdapLoginBadBind ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:29.808732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:29.808764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.808770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:29.808776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:29.808792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:29.808796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:29.808809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:29.808897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:29.821649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:29.821677Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:29.825213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:29.826061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:29.826107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:29.827822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:29.828028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:29.828135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.828252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:29.829477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.829805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.829817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.829862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:29.829870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.829876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:29.829894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.831404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:29.850333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:29.850439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.850533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:29.850632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:29.850643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.851615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.851643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:29.851696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.851706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:29.851712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:29.851718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:29.852193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.852219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:29.852225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:29.852718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.852729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.852735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.852743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.853413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:29.853898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:29.853959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:29.854171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:29.854199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:29.854211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.854269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:29.854279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:29.854312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.854325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:29.854806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:29.854817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:29.854867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:29.854874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:29.854975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:29.854984Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:29.854998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:29.855003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.855009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:29.855015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:29.855021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:29.855025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:29.855038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:29.855044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:29.855049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:29.855379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.855396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:29.855402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:29.855407Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:29.855412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:29.855428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... ShardId: 72075186233409548 CpuTimeUsec: 180 } } 2024-11-21T09:16:38.321641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T09:16:38.321655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2024-11-21T09:16:38.321674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T09:16:38.321681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T09:16:38.321774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.321786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T09:16:38.321795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:16:38.321800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2024-11-21T09:16:38.321825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T09:16:38.321859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2024-11-21T09:16:38.321882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:16:38.321893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:38.322532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.322876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.322933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.322940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:38.322993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:38.323023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.323028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T09:16:38.323033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T09:16:38.323165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.323174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:16:38.323192Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.323197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T09:16:38.323203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T09:16:38.323406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:38.323419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:38.323423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:16:38.323429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2024-11-21T09:16:38.323436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:16:38.323646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:38.323657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:38.323661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:16:38.323665Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:16:38.323669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:16:38.323680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T09:16:38.324153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.324163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:38.324304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:38.324346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T09:16:38.324351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:38.324357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T09:16:38.324369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2024-11-21T09:16:38.324375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:38.324379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T09:16:38.324384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T09:16:38.324404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:38.324485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.324489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:38.324873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:38.324930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:38.325158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.325167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2024-11-21T09:16:38.325239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:16:38.325248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1341:3268] 2024-11-21T09:16:38.325382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2024-11-21T09:16:38.325903Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:38.325947Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 56us result status StatusSuccess 2024-11-21T09:16:38.326047Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TWebLoginService::AuditLogLdapLoginBadPassword >> TGRpcStreamingTest::ReadFinish >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2024-11-21T09:16:38.248584Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659021903570151:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.248608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044ba/r3tmp/tmpwtc5Wc/pdisk_1.dat 2024-11-21T09:16:38.327869Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.346714Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:34364 2024-11-21T09:16:38.346807Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] facade attach Name# Session actor# [1:7439659021903570446:2247] peer# ipv6:[::1]:34364 2024-11-21T09:16:38.346825Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] facade write Name# Session data# peer# ipv6:[::1]:34364 2024-11-21T09:16:38.346945Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] facade write Name# Session data# peer# ipv6:[::1]:34364 grpc status# (0) message# 2024-11-21T09:16:38.347108Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] write finished Name# Session ok# true peer# ipv6:[::1]:34364 2024-11-21T09:16:38.347150Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2024-11-21T09:16:38.347228Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:34364 2024-11-21T09:16:38.347406Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] write finished Name# Session ok# true peer# ipv6:[::1]:34364 2024-11-21T09:16:38.347435Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] stream finished Name# Session ok# true peer# ipv6:[::1]:34364 grpc status# (0) message# 2024-11-21T09:16:38.347438Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2024-11-21T09:16:38.347449Z node 1 :GRPC_SERVER DEBUG: [0x16f0be945400] deregistering request Name# Session peer# ipv6:[::1]:34364 (finish done) 2024-11-21T09:16:38.350720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:38.350746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:38.351848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2024-11-21T09:16:38.322287Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659022430919115:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.322465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b7/r3tmp/tmpqU7Yfe/pdisk_1.dat 2024-11-21T09:16:38.375321Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.386694Z node 1 :GRPC_SERVER DEBUG: [0x445c7eb45400] stream accepted Name# Session ok# true peer# ipv6:[::1]:55158 2024-11-21T09:16:38.386790Z node 1 :GRPC_SERVER DEBUG: [0x445c7eb45400] facade attach Name# Session actor# [1:7439659022430919607:2247] peer# ipv6:[::1]:55158 2024-11-21T09:16:38.386899Z node 1 :GRPC_SERVER DEBUG: [0x445c7eb45400] stream done notification Name# Session ok# true peer# ipv6:[::1]:55158 2024-11-21T09:16:38.386939Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2024-11-21T09:16:38.387025Z node 1 :GRPC_SERVER DEBUG: [0x445c7eb45400] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2024-11-21T09:16:38.387037Z node 1 :GRPC_SERVER DEBUG: [0x445c7eb45400] deregistering request Name# Session peer# unknown (finish done) 2024-11-21T09:16:38.423899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:38.423943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:38.425052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2024-11-21T09:16:38.266667Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659022821106356:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.266926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044b4/r3tmp/tmpYuAXuP/pdisk_1.dat 2024-11-21T09:16:38.327923Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.346780Z node 1 :GRPC_SERVER DEBUG: [0x55df7e945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:57558 2024-11-21T09:16:38.346888Z node 1 :GRPC_SERVER DEBUG: [0x55df7e945400] facade attach Name# Session actor# [1:7439659022821106853:2245] peer# ipv6:[::1]:57558 2024-11-21T09:16:38.346905Z node 1 :GRPC_SERVER DEBUG: [0x55df7e945400] facade read Name# Session peer# ipv6:[::1]:57558 2024-11-21T09:16:38.346976Z node 1 :GRPC_SERVER DEBUG: [0x55df7e945400] read finished Name# Session ok# false data# peer# ipv6:[::1]:57558 2024-11-21T09:16:38.347005Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2024-11-21T09:16:38.347025Z node 1 :GRPC_SERVER DEBUG: [0x55df7e945400] facade finish Name# Session peer# ipv6:[::1]:57558 grpc status# (9) message# Everything is A-OK 2024-11-21T09:16:38.347166Z node 1 :GRPC_SERVER DEBUG: [0x55df7e945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:57558 2024-11-21T09:16:38.347183Z node 1 :GRPC_SERVER DEBUG: [0x55df7e945400] stream finished Name# Session ok# true peer# ipv6:[::1]:57558 grpc status# (9) message# Everything is A-OK 2024-11-21T09:16:38.347187Z node 1 :GRPC_SERVER DEBUG: [0x55df7e945400] deregistering request Name# Session peer# ipv6:[::1]:57558 (finish done) 2024-11-21T09:16:38.347188Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2024-11-21T09:16:38.367933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:38.367958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:38.369058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2024-11-21T09:16:38.281210Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659019672421268:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.281488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c1/r3tmp/tmptLKEk1/pdisk_1.dat 2024-11-21T09:16:38.340367Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.352865Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:43928 2024-11-21T09:16:38.352947Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] facade attach Name# Session actor# [1:7439659019672421761:2247] peer# ipv6:[::1]:43928 2024-11-21T09:16:38.352969Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] facade read Name# Session peer# ipv6:[::1]:43928 2024-11-21T09:16:38.352989Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] facade write Name# Session data# peer# ipv6:[::1]:43928 2024-11-21T09:16:38.353091Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] facade finish Name# Session peer# ipv6:[::1]:43928 grpc status# (0) message# 2024-11-21T09:16:38.353210Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] write finished Name# Session ok# true peer# ipv6:[::1]:43928 2024-11-21T09:16:38.353232Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2024-11-21T09:16:38.353304Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] read finished Name# Session ok# false data# peer# ipv6:[::1]:43928 2024-11-21T09:16:38.353308Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:43928 2024-11-21T09:16:38.353317Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] stream finished Name# Session ok# true peer# ipv6:[::1]:43928 grpc status# (0) message# 2024-11-21T09:16:38.353321Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2024-11-21T09:16:38.353327Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2024-11-21T09:16:38.353333Z node 1 :GRPC_SERVER DEBUG: [0x1723be945400] deregistering request Name# Session peer# ipv6:[::1]:43928 (finish done) 2024-11-21T09:16:38.383074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:38.383113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:38.384185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2024-11-21T09:16:38.248692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659021682423810:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.248711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044c3/r3tmp/tmpTXZDCm/pdisk_1.dat 2024-11-21T09:16:38.331354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.346713Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:52260 2024-11-21T09:16:38.346800Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] facade attach Name# Session actor# [1:7439659021682424105:2247] peer# ipv6:[::1]:52260 2024-11-21T09:16:38.346813Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] facade read Name# Session peer# ipv6:[::1]:52260 2024-11-21T09:16:38.346868Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] read finished Name# Session ok# true data# peer# ipv6:[::1]:52260 2024-11-21T09:16:38.346879Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2024-11-21T09:16:38.346883Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] facade write Name# Session data# peer# ipv6:[::1]:52260 2024-11-21T09:16:38.346977Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] facade finish Name# Session peer# ipv6:[::1]:52260 grpc status# (0) message# 2024-11-21T09:16:38.346989Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] write finished Name# Session ok# true peer# ipv6:[::1]:52260 2024-11-21T09:16:38.347119Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:52260 2024-11-21T09:16:38.347120Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] stream finished Name# Session ok# true peer# ipv6:[::1]:52260 grpc status# (0) message# 2024-11-21T09:16:38.347140Z node 1 :GRPC_SERVER DEBUG: [0x7827e945400] deregistering request Name# Session peer# ipv6:[::1]:52260 (finish done) 2024-11-21T09:16:38.350571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:38.350604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:38.351651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TGRpcStreamingTest::ReadFinish [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> KqpQueryService::TableSink_OltpUpsert [GOOD] >> KqpQueryService::TableSink_OltpUpdate >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLogout [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> KqpQueryService::TableSink_OltpInsert [GOOD] >> KqpQueryService::TableSink_OltpInteractive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T09:16:38.894321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:38.894343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.894347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:38.894352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:38.895433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:38.895449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:38.895464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.895545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:38.910202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:38.910220Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.912820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:38.912843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:38.912865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:38.916098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:38.916154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:38.916295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.916544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:38.918096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:38.919906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.919913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:38.919927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.921855Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:38.938747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.938823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.938872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.938920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.938927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:38.939714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:38.939724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:38.939727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:38.940105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:38.940556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.940578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.940999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:38.941442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:38.941944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:38.942148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:38.942275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.942312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:38.942805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.942847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:38.942911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:38.942939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:38.942943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.942948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:38.942952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.942956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:38.942960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:38.942970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:38.942975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:38.942979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:38.943277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:38.943306Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:38.943310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.943326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:38.943960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:38.944075Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.945479Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:38.946620Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T09:16:38.946655Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T09:16:38.946726Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29355, port: 29355 2024-11-21T09:16:38.953425Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:38.961467Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:29355. Invalid credentials 2024-11-21T09:16:38.961909Z node 1 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2024-11-21T09:16:38.962031Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:38.962863Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T09:16:38.939665Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T09:16:38.961766Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:29355. Invalid credentials, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T09:16:38.961766Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:29355. Invalid credentials, login_user=user1@ldap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T09:16:38.900512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:38.900541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.900547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:38.900551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:38.900567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:38.900571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:38.900580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.900671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:38.911606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:38.911624Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.914603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:38.914631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:38.914655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:38.917867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:38.917915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:38.918030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.918095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:38.918735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:38.919907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.919913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:38.919927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.921342Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:38.943573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.943645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.943752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.943760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.944465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.944498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:38.944552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.944560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:38.944564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:38.944568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:38.944993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.945008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:38.945032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:38.945443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.945453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.945458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.945463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.946016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:38.946497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:38.946547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:38.946703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.946731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.946744Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.946831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:38.946839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.946865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.946876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:38.947400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.947409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.947443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.947448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:38.947526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.947532Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:38.947550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:38.947554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.947559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:38.947564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.947568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:38.947571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:38.947582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:38.947588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:38.947592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:38.947883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.947898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.947902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:38.947906Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:38.947910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.947923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:38.948588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:38.948680Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:16:38.948784Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:38.950226Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:16:38.950966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.954272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.954317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:38.954322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:38.954333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.954344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:38.954348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:38.954352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:38.954357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T09:16:38.954361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T09:16:38.954560Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:38.955505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.955552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T09:16:38.955618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.955624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.955658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.955663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:38.955771Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T09:16:38.955807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:38.955816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:38.955820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:38.955824Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T09:16:38.955829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.955846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:38.956308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T09:16:38.956404Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T09:16:38.957384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T09:16:38.957395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T09:16:39.026945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjIzNzk5LCJpYXQiOjE3MzIxODA1OTksInN1YiI6InVzZXIxIn0.PhS6dryCVxtJAJp70vPgMEnZxvoAC5kzxVAxAKRHqVeIRskSFMv65UbdRo3sHi6FnHHMo4ZXE20C9aRWZvA6_8r8E3LewXTZZkNlLfjlSRZbhj5ytc2yCp2HkBMoJNy5RZeRy4gsbjx29f-PFIJ8KAQvUj4SW_sMHI0P6ckV1iuvjfnisnDtbT8wHmGZz96BtAbhtCQ-1h5D8bx-_NSRQlzTq0ggz-7d9R-gUr_A1yf-GW65Lwqr7CQHeqIRZ1yDbPp590D98CRoLOvr2S4O-eGuT2d8ThRTxuiWMktC1xnRuftmmD_sl0oQt8ZGV406xltBNCbOKmDKkrJs2xSs5g", at schemeshard: 72057594046678944 2024-11-21T09:16:39.027054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:39.027063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:39.027128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:39.027134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T09:16:39.027612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2024-11-21T09:16:38.944493Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T09:16:38.955545Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T09:16:39.027384Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 AUDIT LOG checked line: 2024-11-21T09:16:39.027384Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T09:16:38.894319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:38.894343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.894348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:38.894352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:38.895392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:38.895407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:38.895425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.895534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:38.908476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:38.908498Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.911143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:38.911166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:38.911187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:38.914453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:38.914510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:38.915822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.916548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:38.918820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:38.919914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.919920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:38.919935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.921412Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:38.940783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.940854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.940943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.940951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:38.941699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:38.941712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:38.941717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:38.942135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:38.942573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942588Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.943203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:38.943699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:38.943741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:38.943915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.944014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:38.944021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.944042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.944054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:38.944501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.944511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.944540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.944545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:38.944599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.944606Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:38.944621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:38.944625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.944630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:38.944635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.944641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:38.944644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:38.944655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:38.944660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:38.944664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:38.944957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.944974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.944979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:38.944983Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:38.944988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.945006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:38.945668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:38.945765Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.946274Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:38.947532Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T09:16:38.947580Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T09:16:38.947636Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22629, port: 22629 2024-11-21T09:16:38.953367Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:38.961454Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=user1, attributes: 1.1 2024-11-21T09:16:39.008352Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: uid=user1,dc=search,dc=yandex,dc=net 2024-11-21T09:16:39.008594Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:22629. Invalid credentials 2024-11-21T09:16:39.008947Z node 1 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2024-11-21T09:16:39.009092Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:39.009869Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T09:16:38.941651Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T09:16:39.008894Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:22629. Invalid credentials, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T09:16:39.008894Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:22629. Invalid credentials, login_user=user1@ldap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T09:16:38.894319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:38.894341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.894345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:38.894348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:38.895414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:38.895429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:38.895448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.895587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:38.907989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:38.908010Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.910625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:38.910650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:38.910675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:38.914583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:38.914629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:38.915820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.916532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:38.918278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:38.919913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.919919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:38.919936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.921338Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:38.937000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.937925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.938006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.938047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.938055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.938871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.938912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:38.938990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:38.939020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:38.939025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:38.939569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939584Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:38.939989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.940006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.940589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:38.940984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:38.941932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:38.942157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:38.942279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.942328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:38.942763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.942806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:38.942869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942873Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:38.942901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:38.942906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.942912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:38.942917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.942921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:38.942925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:38.942938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:38.942944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:38.942948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:38.943215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:38.943233Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:38.943237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.943246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:38.943778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943930Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.945434Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:38.946649Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T09:16:38.946696Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T09:16:38.946756Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:12941, port: 12941 2024-11-21T09:16:38.953421Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:38.961453Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=bad_user, attributes: 1.1 2024-11-21T09:16:38.962521Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:12941 return no entries 2024-11-21T09:16:38.962726Z node 1 :HTTP ERROR: Login fail for bad_user@ldap: Could not login via LDAP 2024-11-21T09:16:38.962792Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:38.963287Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T09:16:38.938905Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T09:16:38.962694Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:12941 return no entries, login_user=bad_user@ldap AUDIT LOG checked line: 2024-11-21T09:16:38.962694Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:12941 return no entries, login_user=bad_user@ldap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:30.740412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:30.740435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.740441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:30.740446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:30.740458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:30.740463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:30.740473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:30.740544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:30.752090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:30.752108Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:30.754769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:30.755500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:30.755527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:30.756633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:30.756780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:30.756847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.756904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:30.757710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.757967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.757976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.758004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:30.758009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.758014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:30.758023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.759005Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:30.775168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:30.775272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.775360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:30.775443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:30.775453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.776360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.776387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:30.776437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.776448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:30.776453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:30.776458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:30.776878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.776888Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:30.776892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:30.777218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.777226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.777232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.777240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.777877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:30.778272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:30.778325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:30.778520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:30.778542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:30.778552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.778608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:30.778614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:30.778645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.778656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:30.779039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:30.779045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:30.779089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:30.779094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:30.779179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:30.779186Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:30.779197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:30.779201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.779207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:30.779212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:30.779217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:30.779221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:30.779231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:30.779237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:30.779241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:30.779553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.779567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:30.779572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:30.779577Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:30.779582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:30.779595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... mplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1728 DataSize: 1728 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.910609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:478:2438] sender: [1:749:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:478:2438] sender: [1:752:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:478:2438] sender: [1:753:2058] recipient: [1:751:2676] Leader for TabletID 72057594046678944 is [1:754:2677] sender: [1:755:2058] recipient: [1:751:2676] 2024-11-21T09:16:38.925992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:38.926029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.926034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:38.926040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:38.926045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:38.926049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:38.926059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.926134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:38.927582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:38.928039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:38.928089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:38.928122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:38.928127Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.928177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:38.928315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:38.928344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:16:38.928354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:16:38.928473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T09:16:38.928512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:38.928562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:38.928566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:38.928585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.928977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.932172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.932199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.932660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:38.932678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.932688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:38.932977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:754:2677] sender: [1:813:2058] recipient: [1:15:2062] 2024-11-21T09:16:38.963951Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:38.964010Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 65us result status StatusSuccess 2024-11-21T09:16:38.964094Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1728 DataSize: 1728 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2024-11-21T09:16:38.777722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659020024382745:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.777745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044a9/r3tmp/tmpT5d7BN/pdisk_1.dat 2024-11-21T09:16:38.833868Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.847707Z node 1 :GRPC_SERVER DEBUG: [0x5415fe945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:35264 2024-11-21T09:16:38.847818Z node 1 :GRPC_SERVER DEBUG: [0x5415fe945400] facade attach Name# Session actor# [1:7439659020024383239:2245] peer# ipv6:[::1]:35264 2024-11-21T09:16:38.847835Z node 1 :GRPC_SERVER DEBUG: [0x5415fe945400] facade read Name# Session peer# ipv6:[::1]:35264 2024-11-21T09:16:38.847874Z node 1 :GRPC_SERVER DEBUG: [0x5415fe945400] facade finish Name# Session peer# ipv6:[::1]:35264 grpc status# (0) message# 2024-11-21T09:16:38.847994Z node 1 :GRPC_SERVER DEBUG: [0x5415fe945400] read finished Name# Session ok# false data# peer# ipv6:[::1]:35264 2024-11-21T09:16:38.848011Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2024-11-21T09:16:38.848012Z node 1 :GRPC_SERVER DEBUG: [0x5415fe945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:35264 2024-11-21T09:16:38.848019Z node 1 :GRPC_SERVER DEBUG: [0x5415fe945400] stream finished Name# Session ok# true peer# ipv6:[::1]:35264 grpc status# (0) message# 2024-11-21T09:16:38.848038Z node 1 :GRPC_SERVER DEBUG: [0x5415fe945400] deregistering request Name# Session peer# ipv6:[::1]:35264 (finish done) 2024-11-21T09:16:38.879376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:38.879404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:38.880476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T09:16:38.895148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:38.895195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.895200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:38.895204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:38.896141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:38.896154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:38.896167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.896256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:38.907949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:38.907970Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.910584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:38.910618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:38.910663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:38.914525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:38.914565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:38.915828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.916547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:38.918444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:38.919912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.919919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:38.919932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.921277Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:38.939601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.939673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.939767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.939774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:38.940524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:38.940536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:38.940540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:38.941007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:38.941430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.941450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.942007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:38.942389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:38.942430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:38.942578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:38.942681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.942715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:38.943130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.943167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:38.943230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943237Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:38.943257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:38.943261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.943266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:38.943270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.943274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:38.943277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:38.943288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:38.943293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:38.943296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:38.943578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:38.943603Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:38.943607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.943620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:38.944232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:38.944329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:16:38.945323Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:38.946601Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:272:2264]) 2024-11-21T09:16:38.947209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.952325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.952369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:38.952375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:38.952391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.952403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:38.952410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:38.952416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:38.952421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T09:16:38.952426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T09:16:38.952608Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:38.953650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.953693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T09:16:38.953735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.953741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.953776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.953780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:38.953981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:38.953995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:38.954001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:38.954006Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T09:16:38.954011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.954030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:38.954086Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T09:16:38.954482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T09:16:38.954576Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T09:16:38.957385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T09:16:38.957398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T09:16:39.151986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjIzNzk5LCJpYXQiOjE3MzIxODA1OTksInN1YiI6InVzZXIxIn0.DH-bGhBfY2LyQntJATAgt_6VDbrReiZC38vvgJlUDoRaXf_4zc06Pyo87CBrQ9sou5mOu7swRfIAGlEPt5tBiycRTyCmF666M1E4BgOH9ehUMo_B80EuEccde0uyXG4OEhczPMv1Y0HEC7dKHKGgH2-B0MrQBaZttNwgTLR6oFLH1vhDMuh6z8wKmXBESYnb8qtOnOhiZDsr7Pck8zbIUyn4iImoP8A5wiosrQ6TnXrJlfDyn9Ae1sNqQ0-Yw01c4_S4WoZOCqthlh1YZ1HicH-okHi4ePOpGHAReWVAy8EALiBjuV9APCkSkLcd4BL5zvxFwSM4twerkvpvWO8r1Q", at schemeshard: 72057594046678944 2024-11-21T09:16:39.152075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:39.152085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:39.152153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:39.152160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T09:16:39.152620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T09:16:39.152789Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:39.152822Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 36us result status StatusSuccess 2024-11-21T09:16:39.152918Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5B8UnTyQNnKx75jVkWVv\nQNxLtSjY0tqS+e97X6QqqgcAY3y6UU5Fo8hDHP5usUNgVmcCePlRsAYN38pzJ8fh\nGiQrMLpYQJruDToWSooexLb7AABgv7JoQD0F+C8uoQ+Y7YSXgM43ARDDqDWTzXPZ\n6++BS+OVnyERoNgmulSW37anuusz/XaJvfRgYvjUOb30Zz9deHCu7LvHlqAZPomD\nIPDufMbNRoePDcOFUPDIocDRGLIzPMl42Gsb10kzWCGt46jH6SHQ/Z3W8Kutw9bE\nvpq4HkOc8P+aIeUj78rFqDQAG0/BwNQAlyWG5MXNd1WYrQ8a5dr9AH1Y4en7guxY\nfwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732266999147 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:39.153717Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout 2024-11-21T09:16:39.153738Z node 1 :HTTP ERROR: Logout: No ydb_session_id cookie 2024-11-21T09:16:39.153810Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout 2024-11-21T09:16:39.157937Z node 1 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2024-11-21T09:16:39.158008Z node 1 :HTTP ERROR: Logout: Token is not in correct format 2024-11-21T09:16:39.158112Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2024-11-21T09:16:38.940477Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T09:16:38.953686Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T09:16:39.152415Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 2024-11-21T09:16:39.158356Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJh****8r1Q (93E11374), operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2024-11-21T09:16:39.158356Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJh****8r1Q (93E11374), operation=LOGOUT, status=SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:38.894397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:38.894424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.894429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:38.894433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:38.895434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:38.895453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:38.895470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.895553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:38.909121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:38.909142Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.911783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:38.912614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:38.912643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:38.914273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:38.914487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:38.915821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.916604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:38.918794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:38.919943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.919949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:38.919961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.921339Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:38.940606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.940671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.940764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.940771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:38.941451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:38.941463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:38.941468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:38.941869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:38.941892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:38.942214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.942838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:38.943193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:38.943226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:38.943369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.943460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:38.943467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.943488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.943501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:38.943899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.943930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.943935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:38.943996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.944002Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:38.944016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:38.944020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.944025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:38.944030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.944035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:38.944038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:38.944047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:38.944052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:38.944057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:38.944376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.944389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.944394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:38.944399Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:38.944405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.944417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:38.944971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:38.945069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:16:38.945303Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:38.946692Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:16:38.947278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.947304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Login authentication is disabled, at schemeshard: 72057594046678944 2024-11-21T09:16:38.947553Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:38.948328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Login authentication is disabled" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.948354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Login authentication is disabled, operation: CREATE USER, path: /MyRoot 2024-11-21T09:16:38.948481Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T09:16:38.948621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T09:16:38.948626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T09:16:39.048479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Error: "Login authentication is disabled", at schemeshard: 72057594046678944 2024-11-21T09:16:39.048524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:39.048532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:39.048587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:39.048595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T09:16:39.048724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T09:16:39.048797Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:39.048827Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 33us result status StatusSuccess 2024-11-21T09:16:39.048937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvGV0gvMBAoIIZupP93FS\nPiE6LmyJVk4nE/egdFS7kYOxkx/GzDwTVFFwS6SRZtvP5VYwe0jcqNo2jNzmiq3L\nWNyR/3n4IZmJm8gptnAMxm+r5u7tILj2gyXQPahTy7N5bKtxpxYRZddEbNGVlEqS\np5ZU02LnQdNwaXD6L5pO6xhc6CB8Kzmp/sX+x8Ly+Ji2NLFpLEfhQVIu7YUdWr9k\nnrPDWi4S8VXsomOSVrNkWDgl8henX/Edc3XL8U9ul1UE4NNnnN+NKjzfMJfvN1kc\npyxj/CcdoPRwrPxUVY70Mh09qQw5UKW70D75YJJ01rgNZrXop8DPJn5ghRL6Zpij\nKwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732266999047 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginBadPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T09:16:38.894390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:38.894412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.894417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:38.894420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:38.895426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:38.895439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:38.895452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:38.895529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:38.907993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:38.908011Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:38.910782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:38.910809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:38.910835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:38.915619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:38.915668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:38.915808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.916530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:38.918129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.919899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:38.919906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.919913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:38.919926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.921752Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:38.938707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.938779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.938827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.938861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.938866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:38.939714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.939723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:38.939726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:38.939730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:38.940158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:38.940587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.940606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.940611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.941166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:38.941746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:38.941945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:38.942147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:38.942272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:38.942300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.942312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:38.942776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.942827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:38.942898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:38.942904Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:38.942925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:38.942929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.942937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:38.942942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:38.942946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:38.942950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:38.942960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:38.942966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:38.942969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:38.943264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:38.943285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:38.943290Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:38.943294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.943306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:38.943960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:38.944067Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:16:38.945323Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:38.946590Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:16:38.947166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:38.952342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:38.952386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:38.952391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:38.952406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:38.952416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:38.952421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:38.952425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:38.952428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T09:16:38.952431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T09:16:38.952662Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:38.953575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:38.953617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T09:16:38.953676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:38.953680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:38.953708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:38.953714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:38.953813Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T09:16:38.953840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:38.953846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:38.953850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:38.953853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T09:16:38.953856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:38.953868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:38.954185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T09:16:38.954282Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T09:16:38.957429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T09:16:38.957442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T09:16:39.061651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2024-11-21T09:16:39.061703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:39.061709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:39.061755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:39.061761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T09:16:39.062090Z node 1 :HTTP ERROR: Login fail for user1: Invalid password 2024-11-21T09:16:39.062176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2024-11-21T09:16:38.939656Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T09:16:38.953611Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T09:16:39.062030Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Invalid password, login_user=user1 AUDIT LOG checked line: 2024-11-21T09:16:39.062030Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Invalid password, login_user=user1 |94.2%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings >> TWebLoginService::AuditLogLdapLoginSuccess >> KqpQueryService::TableSink_OltpInteractive [GOOD] >> AggregateStatistics::ShouldBePings [GOOD] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2024-11-21T09:16:39.939649Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:39.939804Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T09:16:39.939950Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T09:16:39.939961Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.939970Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [1:7:2054], tablet id = 1, status = ERROR 2024-11-21T09:16:39.939990Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2024-11-21T09:16:39.940006Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 2 2024-11-21T09:16:39.940009Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2024-11-21T09:16:39.940018Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 4, status = ERROR 2024-11-21T09:16:39.940021Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2024-11-21T09:16:39.940030Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 5 2024-11-21T09:16:39.940033Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2024-11-21T09:16:39.940041Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2024-11-21T09:16:39.940050Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 7, status = ERROR 2024-11-21T09:16:39.940053Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2024-11-21T09:16:39.940058Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T09:16:39.940061Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.940066Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 8 2024-11-21T09:16:39.940078Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2024-11-21T09:16:39.940082Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2024-11-21T09:16:39.947202Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:39.947354Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T09:16:39.947905Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.947925Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T09:16:39.948471Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T09:16:39.948476Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.948532Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:39.948557Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:39.948576Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T09:16:39.948582Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [3:41:2056], tablet id = 3, status = OK 2024-11-21T09:16:39.948589Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:41:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.948595Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T09:16:39.948598Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T09:16:39.948604Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [4:43:2056], tablet id = 4, status = OK 2024-11-21T09:16:39.948608Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.948615Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T09:16:39.948617Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:16:39.948620Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T09:16:39.948622Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.948628Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T09:16:39.948630Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.948637Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T09:16:39.948645Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T09:16:39.958832Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:39.958862Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:16:39.958888Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:39.958893Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:16:39.969230Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T09:16:39.969272Z node 1 :STATISTICS INFO: Node 2 is unavailable 2024-11-21T09:16:39.969279Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T09:16:39.969303Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:39.969307Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T09:16:39.969311Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:39.969314Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:16:39.969355Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:39.969360Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2024-11-21T09:16:39.950974Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:39.951147Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 2, status = OK 2024-11-21T09:16:39.951214Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.951229Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 3, status = OK 2024-11-21T09:16:39.951236Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.951242Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 4, status = OK 2024-11-21T09:16:39.951252Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.951261Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 5, status = OK 2024-11-21T09:16:39.951267Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.951272Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T09:16:39.951297Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [1:7:2054], tablet id = 1, status = OK 2024-11-21T09:16:39.951304Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:7:2054], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.951310Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T09:16:39.951313Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.951317Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2024-11-21T09:16:39.951326Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 6, status = OK 2024-11-21T09:16:39.951331Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.951336Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T09:16:39.951344Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 5, status = ERROR 2024-11-21T09:16:39.951347Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.951352Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 7, status = OK 2024-11-21T09:16:39.951357Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:39.951364Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T09:16:39.951367Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.951370Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2024-11-21T09:16:39.951376Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 7, status = ERROR 2024-11-21T09:16:39.951380Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.961484Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2024-11-21T09:16:39.961542Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2024-11-21T09:16:39.961574Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2024-11-21T09:16:39.961578Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2024-11-21T09:16:39.961581Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2024-11-21T09:16:39.961591Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2024-11-21T09:16:39.961597Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2024-11-21T09:16:39.961601Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2024-11-21T09:16:39.961603Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2024-11-21T09:16:39.961607Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T09:16:39.961631Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:39.961635Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2024-11-21T09:16:39.961654Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T09:16:39.961658Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.961663Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T09:16:39.961666Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:39.961684Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T09:16:39.961687Z node 1 :STATISTICS DEBUG: Skip EvClientConnected >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 16532, MsgBus: 13463 2024-11-21T09:16:32.592287Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658995714647913:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.592324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b5c/r3tmp/tmpqehFEo/pdisk_1.dat 2024-11-21T09:16:32.697673Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:32.700556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.700588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.704601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16532, node 1 2024-11-21T09:16:32.781724Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.781741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.781749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.781784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13463 TClient is connected to server localhost:13463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.902835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.906494Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.934143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658995714648302:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:32.934169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.101726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.164313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000009615746:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.164345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000009615751:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.164356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.165146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.171689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000009615753:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 19281, MsgBus: 1568 2024-11-21T09:16:33.665756Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659000440423252:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.665777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b5c/r3tmp/tmpGF9nea/pdisk_1.dat 2024-11-21T09:16:33.676543Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19281, node 2 2024-11-21T09:16:33.691692Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.691707Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.691710Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.691755Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1568 TClient is connected to server localhost:1568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:33.766033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.766067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.767380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.767965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.988818Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659000440423852:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.988860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.990130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.045738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004735391249:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.045760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.045929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659004735391254:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.046592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.053374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659004735391256:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:34.165134Z node 2 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Operation is aborting because an duplicate key;tx_id=4; 2024-11-21T09:16:34.165215Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037888 errors: Status: STATUS_BAD_REQUEST Issues: { message: "Operation is aborting because an duplicate key" } 2024-11-21T09:16:34.165271Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037888 Status: STATUS_BAD_REQUEST Issues: { message: "Operation is aborting because an duplicate key" } 2024-11-21T09:16:34.165334Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659004735391393:2331], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [2:7439659004735391377:2331]Got BAD REQUEST for table `[OwnerId: 72057594046644480, LocalPathId: 2]`. ShardID=72075186224037888, Sink=[2:7439659004735391393:2331].{
: Fatal: Operation is aborting because an duplicate key } 2024-11-21T09:16:34.165476Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659004735391386:2331], SessionActorId: [2:7439659004735391377:2331], Bad request. Table `/Root/DataShard`. {
: Fatal: Operation is aborting because an duplicate key }. statusCode=BAD_REQUEST. subIssues=
: Fatal: Operation is aborting because an duplicate key . sessionActorId=[2:7439659004735391377:2331]. isRollback=0 2024-11-21T09:16:34.165510Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDM1ZjcxMTYtZjM1MWYyODYtODkzMDVkMjUtOTg2MjZmN2Q=, ActorId: [2:7439659004735391377:2331], ActorState: ExecuteState, TraceId: 01jd704kf9arrqwjmdr4xq20yv, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [2:7439659004735391387:2331] from: [2:7439659004735391386:2331] 2024-11-21T09:16:34.165622Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659004735391387:2331] TxId: 281474976715663. Ctx: { TraceId: 01jd704kf9arrqwjmdr4xq20yv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDM1ZjcxMTYtZjM1MWYyODYtODkzMDVkMjUtOTg2MjZmN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table `/Root/DataShard`. {
: Fatal: Operation is aborting because an duplicate key };
: Fatal: Operation is aborting because an duplicate key } 2024-11-21T09:16:34.166597Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDM1ZjcxMTYtZjM1MWYyODYtODkzMDVkMjUtOTg2MjZmN2Q=, ActorId: [2:7439659004735391377:2331], ActorState: ExecuteState, TraceId: 01jd704kf9arrqwjmdr4xq20yv, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:16:38.666220Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659000440423252:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.666258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7134, MsgBus: 30390 2024-11-21T09:16:39.372273Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659027336631414:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:39.373478Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002b5c/r3tmp/tmpcZaLTh/pdisk_1.dat 2024-11-21T09:16:39.383943Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7134, node 3 2024-11-21T09:16:39.398100Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:39.398113Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:39.398115Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:39.398159Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30390 TClient is connected to server localhost:30390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:39.471779Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:39.471812Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:39.472865Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:39.473540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:39.656971Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659027336631866:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.656993Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.660493Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:39.719868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:39.742523Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659027336633168:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.742546Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.743089Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659027336633172:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.743104Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.743119Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659027336633177:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.743756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:16:39.745244Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659027336633179:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2024-11-21T09:16:40.013425Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:40.013543Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:40.115609Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2024-11-21T09:16:40.115640Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T09:16:40.115649Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T09:16:40.115835Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T09:16:40.115847Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:40.115858Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17:2054], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T09:16:40.115862Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:40.115876Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2024-11-21T09:16:40.115884Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> KqpExtractPredicateLookup::PointJoin-EnableKqpDataQueryStreamLookup [GOOD] >> KqpExtractPredicateLookup::SqlInJoin+EnableKqpDataQueryStreamLookup >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationModify ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T09:16:40.127885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:40.127910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:40.127916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:40.127920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:40.127937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:40.127941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:40.127949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:40.128020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:40.136791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:40.136813Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:40.138964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:40.138989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:40.139011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:40.141880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:40.141942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:40.142047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:40.142112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:40.142806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:40.143130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:40.143142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:40.143184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:40.143192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:40.143199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:40.143212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:40.145179Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:40.163618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:40.163734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:40.163802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:40.163858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:40.163866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:40.164811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:40.164846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:40.164904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:40.164913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:40.164918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:40.164923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:40.165323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:40.165332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:40.165336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:40.165618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:40.165625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:40.165630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:40.165637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:40.166281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:40.166650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:40.166701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:40.166895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:40.166918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:40.166937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:40.166996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:40.167002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:40.167031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:40.167045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:40.167455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:40.167463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:40.167505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:40.167510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:40.167591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:40.167598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:40.167610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:40.167614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:40.167620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:40.167625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:40.167630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:40.167634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:40.167643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:40.167649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:40.167653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:40.167966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:40.167978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:40.167983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:40.167988Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:40.167993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:40.168008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:40.168704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:40.168803Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:40.169377Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:40.170769Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T09:16:40.170803Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T09:16:40.170869Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19254, port: 19254 2024-11-21T09:16:40.171236Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T09:16:40.175084Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=user1, attributes: 1.1 2024-11-21T09:16:40.216340Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: uid=user1,dc=search,dc=yandex,dc=net 2024-11-21T09:16:40.216810Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:40.217893Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T09:16:40.218094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T09:16:40.218102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T09:16:40.303572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjIzODAwLCJleHRlcm5hbF9hdXRoZW50aWNhdGlvbiI6ImxkYXAiLCJpYXQiOjE3MzIxODA2MDAsInN1YiI6InVzZXIxIn0.VAI0rcMfUeFcky3xyDKT34C0Xd_vsXySyP3y1tgI9FUtXidwHgN9waMWwjbu2Zdb_6Z9aJ4KWk6KaRmiGG5enfp1P-v3bUTPJQbFmjBEwL89kCmwQxUF9rof9d5nIAR3SeGJErVdRR4WOhxYIm1GKm3I022Dbo4iT1GTfMrWfVDaN7jrT08CHgBLwyUlByJa_Yi9tbFKBbnitAeB-ZMbSYDF8Sh8tSa3Km8m8O2jbg1nxpS8O-7lsq6PzfdPGH7neGk5C_IYDvuch6T1eQZpvSsm8IW8nTMzjy3b-R0qTu13utjaiGzZank_KGavQ9oSfXjs7YJc8_FzOGG9MZ_eKg", at schemeshard: 72057594046678944 2024-11-21T09:16:40.303825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:40.303835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:40.303882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:40.303886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T09:16:40.304122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(2): 2024-11-21T09:16:40.164840Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T09:16:40.303772Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T09:16:40.303772Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1@ldap |94.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] |94.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlapReboots::DropMultipleTables >> TOlapReboots::DropMultipleStandaloneTables >> TOlapReboots::CreateTable >> TOlapReboots::DropTableThenStore >> TOlapReboots::CreateDropTable >> TOlapReboots::CreateMultipleTables >> TOlapReboots::CreateStore >> TOlapReboots::CreateDropStandaloneTable >> TOlapReboots::CreateStandaloneTable |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2024-11-21T09:16:25.200539Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658963388826632:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.200583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a42/r3tmp/tmpfGgfU2/pdisk_1.dat 2024-11-21T09:16:25.287491Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22495, node 1 2024-11-21T09:16:25.300464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.300494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.301555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.337306Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.337321Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.337323Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.337362Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.431194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.436817Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:25.438020Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T09:16:25.438060Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Connect to grpc://localhost:23057 2024-11-21T09:16:25.438691Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T09:16:25.444645Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Status 14 Service Unavailable 2024-11-21T09:16:25.444769Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T09:16:25.444779Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:25.444791Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T09:16:25.445518Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T09:16:25.446292Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Status 14 Service Unavailable 2024-11-21T09:16:25.446386Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T09:16:25.446392Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:26.203112Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T09:16:26.203146Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T09:16:26.203234Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T09:16:26.208607Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Status 14 Service Unavailable 2024-11-21T09:16:26.208676Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T09:16:26.208693Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:27.203439Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T09:16:27.203476Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T09:16:27.203566Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T09:16:27.204379Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Status 14 Service Unavailable 2024-11-21T09:16:27.204444Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T09:16:27.204459Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:29.204362Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T09:16:29.204420Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T09:16:29.204515Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T09:16:29.205397Z node 1 :GRPC_CLIENT DEBUG: [171abf082b10] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:29.205500Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2024-11-21T09:16:30.200716Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439658963388826632:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:30.200755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:16:37.687005Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659016373200044:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:37.687152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a42/r3tmp/tmp1vSGUC/pdisk_1.dat 2024-11-21T09:16:37.693625Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23100, node 2 2024-11-21T09:16:37.703367Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:37.703383Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:37.703384Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:37.703414Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:37.787142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:37.787166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:37.788251Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:37.788930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:37.789881Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:37.790733Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T09:16:37.790760Z node 2 :GRPC_CLIENT DEBUG: [171abf088090] Connect to grpc://localhost:4950 2024-11-21T09:16:37.790995Z node 2 :GRPC_CLIENT DEBUG: [171abf088090] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_ ... Request { iam_token: "**** (8E120919)" actions { items { resource_path { id: "gizmo" type: "iam.gizmo" } permission: "monitoring.view" } } result_filter: ALL_FAILED } 2024-11-21T09:16:40.079923Z node 3 :GRPC_CLIENT DEBUG: [171abf087e50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:40.079963Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:40.347604Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439659028087099774:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:40.347658Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a42/r3tmp/tmpLy0R7g/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25923, node 4 2024-11-21T09:16:40.361830Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:40.366195Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:40.366207Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:40.366208Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:40.366242Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:40.448000Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:40.448036Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:40.449118Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:40.449830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:40.451869Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T09:16:40.451905Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2024-11-21T09:16:40.451911Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2024-11-21T09:16:40.451919Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T09:16:40.451929Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2024-11-21T09:16:40.451950Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Connect to grpc://localhost:3947 2024-11-21T09:16:40.452566Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:40.452674Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:40.452705Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:40.452732Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:40.452773Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T09:16:40.454857Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Status 16 Access Denied 2024-11-21T09:16:40.454929Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2024-11-21T09:16:40.455093Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Status 16 Access Denied 2024-11-21T09:16:40.455124Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2024-11-21T09:16:40.455205Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:40.455224Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Status 16 Access Denied 2024-11-21T09:16:40.455227Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2024-11-21T09:16:40.455233Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2024-11-21T09:16:40.455285Z node 4 :GRPC_CLIENT DEBUG: [171abf084cd0] Status 16 Access Denied 2024-11-21T09:16:40.455306Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2024-11-21T09:16:40.455315Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T09:16:40.455567Z node 4 :GRPC_CLIENT DEBUG: [171abf086e90] Connect to grpc://localhost:18487 2024-11-21T09:16:40.455692Z node 4 :GRPC_CLIENT DEBUG: [171abf086e90] Request GetUserAccountRequest { user_account_id: "user1" } 2024-11-21T09:16:40.457350Z node 4 :GRPC_CLIENT DEBUG: [171abf086e90] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2024-11-21T09:16:40.457463Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T09:16:40.728347Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439659031442316033:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:40.728490Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a42/r3tmp/tmp9MqZaQ/pdisk_1.dat 2024-11-21T09:16:40.742624Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10098, node 5 2024-11-21T09:16:40.752810Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:40.752831Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:40.752833Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:40.752873Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:40.828476Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:40.828510Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:40.829552Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:40.830812Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:40.832617Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T09:16:40.832630Z node 5 :GRPC_CLIENT DEBUG: [171abf082210] Connect to grpc://localhost:20210 2024-11-21T09:16:40.832794Z node 5 :GRPC_CLIENT DEBUG: [171abf082210] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T09:16:40.834726Z node 5 :GRPC_CLIENT DEBUG: [171abf082210] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:40.834847Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:40.834967Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T09:16:40.835011Z node 5 :GRPC_CLIENT DEBUG: [171abf082210] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T09:16:40.835891Z node 5 :GRPC_CLIENT DEBUG: [171abf082210] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:40.835955Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2024-11-21T09:16:25.201781Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658967629501777:2197];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.202694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a39/r3tmp/tmpRNv4pR/pdisk_1.dat 2024-11-21T09:16:25.288184Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:25.300469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.300500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25366, node 1 2024-11-21T09:16:25.302882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.338993Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.339005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.339006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.339045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:25.445123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.448358Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:25.449446Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:25.449475Z node 1 :GRPC_CLIENT DEBUG: [546abf082690] Connect to grpc://localhost:20027 2024-11-21T09:16:25.450019Z node 1 :GRPC_CLIENT DEBUG: [546abf082690] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2024-11-21T09:16:25.462325Z node 1 :GRPC_CLIENT DEBUG: [546abf082690] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:25.462432Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:25.671121Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658964304104530:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a39/r3tmp/tmpIaBiGT/pdisk_1.dat 2024-11-21T09:16:25.678667Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 1268, node 2 2024-11-21T09:16:25.688437Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:25.692580Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.692594Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.692596Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.692633Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.772639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.772672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.773310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:25.773659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:25.775201Z node 2 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2024-11-21T09:16:25.775234Z node 2 :GRPC_CLIENT DEBUG: [546abf082f90] Connect to grpc://localhost:26580 2024-11-21T09:16:25.775390Z node 2 :GRPC_CLIENT DEBUG: [546abf082f90] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2024-11-21T09:16:25.781254Z node 2 :GRPC_CLIENT DEBUG: [546abf082f90] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2024-11-21T09:16:25.781337Z node 2 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2024-11-21T09:16:26.095183Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658970879099671:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.095355Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a39/r3tmp/tmpD4PbCk/pdisk_1.dat 2024-11-21T09:16:26.105706Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5124, node 3 2024-11-21T09:16:26.119441Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.119450Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.119452Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.119494Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:26.197529Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.197559Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.197948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:26.198572Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.199846Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:26.199865Z node 3 :GRPC_CLIENT DEBUG: [546abf083890] Connect to grpc://localhost:29938 2024-11-21T09:16:26.200010Z node 3 :GRPC_CLIENT DEBUG: [546abf083890] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2024-11-21T09:16:26.209105Z node 3 :GRPC_CLIENT DEBUG: [546abf083890] Status 14 Service Unavailable 2024-11-21T09:16:26.209175Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:26.209187Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:26.209237Z node 3 :GRPC_CLIENT DEBUG: [546abf083890] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2024-11-21T09:16:26.209853Z node 3 :GRPC_CLIENT DEBUG: [546abf083890] Status 1 CANCELLED 2024-11-21T09:16:26.209897Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2024-11-21T09:16:26.546295Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439658970053619545:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a39/r3tmp/tmp0VA4V0/pdisk_1.dat 2024-11-21T09:16:26.549101Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:26.559713Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18654, node 4 2024-11-21T09:16:26.568886Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.568896Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.568898Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.568939Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:26.645555Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.645586Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.646558Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.649047Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:26.650180Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:26.651027Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2024-11-21T09:16:26.651057Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Connect to grpc://localhost:21315 2024-11-21T09:16:26.651419Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2024-11-21T09:16:26.658519Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Status 14 Service Unavailable 2024-11-21T09:16:26.658587Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:26.658595Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2024-11-21T09:16:26.658649Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2024-11-21T09:16:26.660470Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Status 14 Service Unavailable 2024-11-21T09:16:26.660553Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:27.545959Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T09:16:27.545976Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2024-11-21T09:16:27.546037Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2024-11-21T09:16:27.546873Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Status 14 Service Unavailable 2024-11-21T09:16:27.546933Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:28.546442Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T09:16:28.546459Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2024-11-21T09:16:28.546532Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2024-11-21T09:16:28.547352Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Status 14 Service Unavailable 2024-11-21T09:16:28.547401Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:30.547308Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T09:16:30.547333Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2024-11-21T09:16:30.547400Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2024-11-21T09:16:30.548275Z node 4 :GRPC_CLIENT DEBUG: [546abf085390] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:30.548356Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2024-11-21T09:16:31.545697Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439658970053619545:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:31.545733Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:16:38.894951Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439659022894575472:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.894970Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a39/r3tmp/tmpE2g6k6/pdisk_1.dat 2024-11-21T09:16:38.908162Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23299, node 5 2024-11-21T09:16:38.918627Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:38.918640Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:38.918642Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:38.918703Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:38.994950Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:38.994985Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:38.996091Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:38.997281Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:38.998825Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2024-11-21T09:16:38.998847Z node 5 :GRPC_CLIENT DEBUG: [546abf087c10] Connect to grpc://localhost:21204 2024-11-21T09:16:38.999029Z node 5 :GRPC_CLIENT DEBUG: [546abf087c10] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2024-11-21T09:16:39.000970Z node 5 :GRPC_CLIENT DEBUG: [546abf087c10] Status 14 Service Unavailable 2024-11-21T09:16:39.001030Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:39.001047Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2024-11-21T09:16:39.001092Z node 5 :GRPC_CLIENT DEBUG: [546abf087c10] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2024-11-21T09:16:39.001666Z node 5 :GRPC_CLIENT DEBUG: [546abf087c10] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2024-11-21T09:16:39.001735Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2024-11-21T09:16:41.229140Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:41.229265Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T09:16:41.229320Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:41.229335Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T09:16:41.229347Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T09:16:41.229349Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:41.229388Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:41.229410Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T09:16:41.229428Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T09:16:41.229434Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [3:41:2056], tablet id = 3, status = OK 2024-11-21T09:16:41.229438Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:41:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:41.229444Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T09:16:41.229447Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T09:16:41.229454Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [4:43:2056], tablet id = 4, status = OK 2024-11-21T09:16:41.229458Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T09:16:41.229464Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T09:16:41.229466Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:16:41.229470Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T09:16:41.229471Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:41.229478Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T09:16:41.229479Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T09:16:41.229487Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T09:16:41.229496Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T09:16:41.239629Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:41.239655Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:16:41.239683Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:41.239688Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:16:41.249884Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T09:16:41.249916Z node 1 :STATISTICS INFO: Node 2 is unavailable 2024-11-21T09:16:41.249923Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T09:16:41.249950Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:41.249954Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T09:16:41.249958Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:41.249961Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:16:41.250000Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T09:16:41.250004Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2024-11-21T09:16:25.201576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658964382809647:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.201665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3f/r3tmp/tmpjgB22G/pdisk_1.dat 2024-11-21T09:16:25.291515Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2925, node 1 2024-11-21T09:16:25.297697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.297729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.300453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.337255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.337268Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.337270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.337316Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.444252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.447424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:25.448475Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:25.448505Z node 1 :GRPC_CLIENT DEBUG: [48f7f0828d0] Connect to grpc://localhost:5395 2024-11-21T09:16:25.448844Z node 1 :GRPC_CLIENT DEBUG: [48f7f0828d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2024-11-21T09:16:25.462048Z node 1 :GRPC_CLIENT DEBUG: [48f7f0828d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2024-11-21T09:16:25.462256Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3f/r3tmp/tmpdA8S88/pdisk_1.dat 2024-11-21T09:16:25.677348Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:25.679178Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2394, node 2 2024-11-21T09:16:25.689133Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.689147Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.689148Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.689179Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.769811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.769841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.770911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.772235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.774224Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:25.774249Z node 2 :GRPC_CLIENT DEBUG: [48f7f083650] Connect to grpc://localhost:62296 2024-11-21T09:16:25.774416Z node 2 :GRPC_CLIENT DEBUG: [48f7f083650] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2024-11-21T09:16:25.776439Z node 2 :GRPC_CLIENT DEBUG: [48f7f083650] Status 14 Service Unavailable 2024-11-21T09:16:25.776500Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:25.776513Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:25.776551Z node 2 :GRPC_CLIENT DEBUG: [48f7f083650] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2024-11-21T09:16:25.779251Z node 2 :GRPC_CLIENT DEBUG: [48f7f083650] Status 1 CANCELLED 2024-11-21T09:16:25.779309Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3f/r3tmp/tmpWI6Un0/pdisk_1.dat 2024-11-21T09:16:26.112453Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:26.112930Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4309, node 3 2024-11-21T09:16:26.127143Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.127157Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.127159Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.127201Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:26.204989Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.205036Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:26.205461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:26.206157Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.210185Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:26.211120Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:26.211136Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Connect to grpc://localhost:25483 2024-11-21T09:16:26.211330Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2024-11-21T09:16:26.213306Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Status 14 Service Unavailable 2024-11-21T09:16:26.213368Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:26.213382Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:26.213416Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2024-11-21T09:16:26.213854Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Status 14 Service Unavailable 2024-11-21T09:16:26.213900Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:27.104502Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2024-11-21T09:16:27.104524Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:27.104574Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2024-11-21T09:16:27.116520Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Status 14 Service Unavailable 2024-11-21T09:16:27.116589Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:28.104838Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2024-11-21T09:16:28.104854Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:28.104902Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2024-11-21T09:16:28.105601Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Status 14 Service Unavailable 2024-11-21T09:16:28.105649Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:30.105748Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2024-11-21T09:16:30.105766Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:30.105811Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2024-11-21T09:16:30.106507Z node 3 :GRPC_CLIENT DEBUG: [48f7f080290] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2024-11-21T09:16:30.106588Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:38.427932Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439659022581001753:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.428088Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3f/r3tmp/tmpVyV7rr/pdisk_1.dat 2024-11-21T09:16:38.439005Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20722, node 4 2024-11-21T09:16:38.450242Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:38.450258Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:38.450260Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:38.450307Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:38.530860Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:38.530889Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:38.531246Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:38.531894Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:38.533235Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:38.533257Z node 4 :GRPC_CLIENT DEBUG: [48f7f083ad0] Connect to grpc://localhost:7094 2024-11-21T09:16:38.533438Z node 4 :GRPC_CLIENT DEBUG: [48f7f083ad0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2024-11-21T09:16:38.535432Z node 4 :GRPC_CLIENT DEBUG: [48f7f083ad0] Status 14 Service Unavailable 2024-11-21T09:16:38.535499Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T09:16:38.535511Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T09:16:38.535561Z node 4 :GRPC_CLIENT DEBUG: [48f7f083ad0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2024-11-21T09:16:38.536169Z node 4 :GRPC_CLIENT DEBUG: [48f7f083ad0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2024-11-21T09:16:38.536238Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T09:16:40.721571Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439659027927856341:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:40.721589Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3f/r3tmp/tmpNmMvx7/pdisk_1.dat 2024-11-21T09:16:40.735018Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13978, node 5 2024-11-21T09:16:40.745699Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:40.745713Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:40.745716Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:40.745759Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:40.822113Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:40.822147Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:40.823190Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:40.824495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:40.826610Z node 5 :TICKET_PARSER ERROR: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] [GOOD] >> TOlapReboots::CreateMultipleStandaloneTables >> TSchemeShardViewTest::DropView |94.2%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TSchemeShardViewTest::AsyncDropSameView >> TSchemeShardViewTest::EmptyName >> TSchemeShardViewTest::EmptyQueryText >> TSchemeShardViewTest::CreateView >> TSchemeShardViewTest::AsyncCreateSameView >> TSchemeShardViewTest::ReadOnlyMode |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::DropView [GOOD] >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> GroupWriteTest::Simple >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] [GOOD] >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-strings.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:42.105455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:42.105505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:42.105515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:42.105525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:42.105529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:42.105537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:42.117355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:42.117380Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:42.120561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:42.121396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:42.121437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:42.123114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:42.123305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:42.124276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.124952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.126750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.128436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.128442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.128453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.130025Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.147209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.147263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:42.147350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.147357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:42.147961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:42.147985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:42.147989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:42.148329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:42.148647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.149632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.149665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.149800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.149882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.149890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.149909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.149920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.150243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.150248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.150271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.150276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:42.150349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.150355Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.150365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.150369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.150374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.150378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.150382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.150386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.150394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.150399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.150403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:42.150668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.150679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.150683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:42.150687Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:42.150691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.150702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:42.160958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T09:16:42.160961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T09:16:42.160965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:16:42.161067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:42.161073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T09:16:42.161084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T09:16:42.161323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.161332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.161336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:42.161340Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T09:16:42.161344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:16:42.161414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.161422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.161425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:42.161429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:16:42.161432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:42.161439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T09:16:42.161748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:42.161762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2024-11-21T09:16:42.161811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:42.161829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T09:16:42.161852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:16:42.161855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T09:16:42.161864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T09:16:42.161868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T09:16:42.161932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161947Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.161960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:330:2322] 2024-11-21T09:16:42.161980Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.161990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:330:2322] 2024-11-21T09:16:42.162003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.162007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:330:2322] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T09:16:42.162061Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162081Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 26us result status StatusSuccess 2024-11-21T09:16:42.162177Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162225Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162238Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 14us result status StatusSuccess 2024-11-21T09:16:42.162288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162326Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162336Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 11us result status StatusSuccess 2024-11-21T09:16:42.162366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:42.105438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:42.105465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:42.105475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:42.105515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:42.105519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:42.105527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:42.118077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:42.118095Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:42.120716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:42.121512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:42.121533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:42.122857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:42.123021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:42.124311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.124971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.126866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.128462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.128467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.128478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.130089Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.144118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.145088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.145167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:42.145210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.145215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.145965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:42.146042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:42.146060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:42.146063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:42.146478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:42.146851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.146870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.147365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.147703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.148592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.148760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.148870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.148908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.149385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.149418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:42.149488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149495Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.149505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.149508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.149518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.149525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.149535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.149539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.149542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:42.149808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.149816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.149819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:42.149823Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:42.149825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.149834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 160744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127, operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T09:16:42.160787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:42.160795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127, operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T09:16:42.161094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T09:16:42.161118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T09:16:42.161182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161207Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2024-11-21T09:16:42.161229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T09:16:42.161250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.161258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.161647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.161680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:42.161697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T09:16:42.161705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:16:42.161768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.161774Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:16:42.161782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:16:42.161785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:42.161790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T09:16:42.161794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:42.161798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:16:42.161801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:16:42.161814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:42.161818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T09:16:42.161822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:42.161825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T09:16:42.161898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.161908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.161912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:42.161917Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:42.161921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.162026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.162037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.162041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:42.162045Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:42.162049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.162060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T09:16:42.162242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:42.162258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.162629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:42.162983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:42.163007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2024-11-21T09:16:42.163058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T09:16:42.163062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2024-11-21T09:16:42.163074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T09:16:42.163077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T09:16:42.163135Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T09:16:42.163151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.163155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:326:2318] 2024-11-21T09:16:42.163162Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T09:16:42.163175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.163178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:326:2318] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2024-11-21T09:16:42.163235Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:42.163254Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 28us result status StatusPathDoesNotExist 2024-11-21T09:16:42.163286Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:42.105439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:42.105466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:42.105476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:42.105525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:42.105529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:42.105537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:42.116894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:42.116913Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:42.119676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:42.120440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:42.120470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:42.122531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:42.122769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:42.124316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.124915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.126879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.128451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.128455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.128465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.130051Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.145343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.145405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.145450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:42.145488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.145495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:42.146145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:42.146162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:42.146166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:42.146664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:42.147089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.147111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.147698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.148078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.148618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.148791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.148890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.148922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.149371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.149406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:42.149471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149476Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.149484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.149488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.149498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.149507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.149518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.149524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.149528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:42.149786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.149798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.149802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:42.149805Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:42.149808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.149820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:42.150550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:42.150646Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:16:42.151870Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:42.153077Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:16:42.153688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.153714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2024-11-21T09:16:42.153718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2024-11-21T09:16:42.153730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150, at schemeshard: 72057594046678944 2024-11-21T09:16:42.153945Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:42.154503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.154525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150, operation: CREATE VIEW, path: /MyRoot/ 2024-11-21T09:16:42.154616Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> GroupWriteTest::TwoTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:42.105456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:42.105493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:42.105505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:42.105518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:42.105522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:42.105531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:42.118251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:42.118274Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:42.121183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:42.121984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:42.122017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:42.123632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:42.123858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:42.124331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.125007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.127033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.128432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.128437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.128447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.130341Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.145401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.145450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.145494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:42.145532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.145538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:42.146193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:42.146209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:42.146213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:42.146751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:42.147372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.147402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.147944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.148327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.148624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.148826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.148937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.148966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.149471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.149515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:42.149575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149582Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.149594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.149598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.149605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.149611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.149623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.149628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.149644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:42.149922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.149938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.149942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:42.149947Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:42.149951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.149982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T09:16:42.158476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.158491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2024-11-21T09:16:42.158504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:16:42.158508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.158965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:42.158981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T09:16:42.159002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159007Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2024-11-21T09:16:42.159011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T09:16:42.159024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.159339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T09:16:42.159354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T09:16:42.159394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2024-11-21T09:16:42.159431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T09:16:42.159446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.159453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.159774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.159794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:42.159808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T09:16:42.159813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:16:42.159858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159862Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:16:42.159867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:16:42.159870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:42.159873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T09:16:42.159878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:16:42.159880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:16:42.159883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:16:42.159889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:42.159893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T09:16:42.159895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:42.159897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T09:16:42.159950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.159956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.159958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:42.159961Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:42.159963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.160028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.160034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:16:42.160036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:16:42.160038Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:16:42.160040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.160047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T09:16:42.160078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:42.160081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:42.160087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.160623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:42.160647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:16:42.160831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T09:16:42.160868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:16:42.160871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T09:16:42.160908Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:42.160918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.160921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:320:2312] TestWaitNotification: OK eventTxId 102 2024-11-21T09:16:42.160960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:42.160974Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 21us result status StatusPathDoesNotExist 2024-11-21T09:16:42.160996Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:42.105455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:42.105487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:42.105497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:42.105512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:42.105516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:42.105525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:42.116479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:42.116500Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:42.119204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:42.119993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:42.120040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:42.122489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:42.122710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:42.124299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.124982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.126789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.128436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.128442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.128453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.130090Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.147487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.147541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:42.147628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.147635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:42.148294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:42.148310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:42.148314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:42.148786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:42.149187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.149206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.150238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.150280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.150426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.150448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.150456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.150515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.150523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.150543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.150554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.150969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.150977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.151003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.151008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:42.151072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.151079Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.151088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.151092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.151097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.151101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.151105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.151109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.151119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.151125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.151129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:42.151417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.151432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.151437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:42.151441Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:42.151446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.151460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ecute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "Some query" } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.154132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2024-11-21T09:16:42.154140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2024-11-21T09:16:42.154153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:42.154164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:16:42.154171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.154318Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:42.154909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:42.154932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T09:16:42.154978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.154985Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2024-11-21T09:16:42.154991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T09:16:42.155010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.155140Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T09:16:42.155411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T09:16:42.155432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T09:16:42.155490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.155509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.155515Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T09:16:42.155534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T09:16:42.155561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.155571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.155965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.155978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.156000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:42.156015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.156020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T09:16:42.156024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:16:42.156076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.156082Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T09:16:42.156090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T09:16:42.156094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:42.156098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T09:16:42.156103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T09:16:42.156106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T09:16:42.156110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T09:16:42.156120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:42.156125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T09:16:42.156128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T09:16:42.156131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T09:16:42.156238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:42.156250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:42.156257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:42.156262Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T09:16:42.156266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.156371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:42.156381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T09:16:42.156385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T09:16:42.156389Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T09:16:42.156392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.156402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T09:16:42.156865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T09:16:42.157053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2024-11-21T09:16:42.157084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:42.157160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T09:16:42.157733Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:42.157747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.157750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:296:2288] TestWaitNotification: OK eventTxId 101 2024-11-21T09:16:42.157806Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:42.157838Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 26us result status StatusSuccess 2024-11-21T09:16:42.157908Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:42.124796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:42.124816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.124819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:42.124822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:42.124833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:42.124835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:42.124842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.124905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:42.133481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:42.133496Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:42.135837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:42.136562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:42.136596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:42.138063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:42.138281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:42.138395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.138476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.139404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.139677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.139688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.139745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.139753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.139760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.139774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.141031Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.158807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.158885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.158957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:42.159003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.159010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:42.159709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.159733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:42.159738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:42.159742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:42.160255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.160287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:42.160292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:42.160814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.160829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.160835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.160842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.161489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.161984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.162041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.162238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.162354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.162362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.162391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.162404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.162908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.162956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.162961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:42.163045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.163053Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.163064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.163069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.163074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.163080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.163084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.163088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.163100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.163106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.163110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:42.163437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.163454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.163459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:42.163464Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:42.163469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.163486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:42.168681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150, operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T09:16:42.169033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T09:16:42.169063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:16:42.169098Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T09:16:42.169175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.169195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.169205Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T09:16:42.169238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T09:16:42.169273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.169286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:42.169963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.169971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.170017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:42.170041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.170048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:42.170055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:16:42.170069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.170077Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:42.170092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:42.170097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:42.170104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:42.170112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:42.170119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:42.170124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:42.170141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:42.170148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T09:16:42.170153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T09:16:42.170157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T09:16:42.170388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:42.170407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:42.170411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:42.170416Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T09:16:42.170420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.170527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:42.170535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:42.170539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:42.170543Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T09:16:42.170546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.170554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:42.171632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:42.171746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2024-11-21T09:16:42.171833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T09:16:42.171861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T09:16:42.171909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:16:42.171914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T09:16:42.171927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T09:16:42.171932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T09:16:42.172042Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T09:16:42.172072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.172079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:300:2292] 2024-11-21T09:16:42.172161Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:16:42.172182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.172187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:300:2292] 2024-11-21T09:16:42.172226Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T09:16:42.172243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:16:42.172248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:300:2292] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T09:16:42.172347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:42.172389Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 53us result status StatusSuccess 2024-11-21T09:16:42.172505Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:42.105440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:42.105468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:42.105478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:42.105490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:42.105494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:42.105503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:42.118365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:42.118388Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:42.121120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:42.121881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:42.121913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:42.123388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:42.123586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:42.124299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.124969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.126772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.128442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.128446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.128454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.130024Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.145232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.145300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.145349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:42.145384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.145389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:42.146103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:42.146117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:42.146120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:42.146753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:42.146782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:42.147108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.147117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.147121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.147527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.147849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.148608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.148787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.148879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.148900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.148911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.149356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.149400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:42.149474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149480Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.149489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.149493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.149503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.149506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.149510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.149522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.149528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.149532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:42.149799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.149816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.149820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:42.149825Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:42.149829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.149844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:16:42.150465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:16:42.150540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:16:42.151888Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:16:42.152931Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:16:42.153448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.153483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2024-11-21T09:16:42.153488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2024-11-21T09:16:42.153503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:16:42.153515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:16:42.153520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.153655Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:16:42.154233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T09:16:42.154255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T09:16:42.154291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.154295Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2024-11-21T09:16:42.154300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T09:16:42.154316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.154414Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T09:16:42.154735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T09:16:42.154754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T09:16:42.154798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.154812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.154816Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T09:16:42.154832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T09:16:42.154853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.154860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.155186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.155194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.155215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:16:42.155227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.155230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:16:42.155233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:16:42.155292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.155303Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:16:42.155312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:16:42.155315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:42.155318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T09:16:42.155321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:16:42.155324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:16:42.155326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:16:42.155333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:16:42.155336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T09:16:42.155339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T09:16:42.155340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T09:16:42.155405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:42.155411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:42.155416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:42.155419Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T09:16:42.155421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.155485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:42.155491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:16:42.155494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:16:42.155498Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T09:16:42.155501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:16:42.155509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T09:16:42.156013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:16:42.156320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> TGroupMapperTest::Block42_2disk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:16:42.105439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:42.105466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:42.105475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:42.105521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:42.105525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:42.105533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:42.105603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:42.116475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:42.116499Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:42.119355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:42.120178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:42.120225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:42.122514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:42.122707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:42.124297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.124921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.126854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.128551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.128559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.128564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.128576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.130192Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.148064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.148121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:42.148201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.148230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:42.148859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.148872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:42.148876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:42.148880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:42.149310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:42.149333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:42.150562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.150576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.150581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.150587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.151168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.151625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.151664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.151809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.151833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.151841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.151900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.151908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.151929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.151941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.152396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.152405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.152432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.152436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:16:42.152496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.152503Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.152512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.152516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.152521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.152525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.152530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.152533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.152544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.152549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.152553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:16:42.152849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.152866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:16:42.152870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:16:42.152876Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:16:42.152880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.152897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ead records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.249993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.250005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.250011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.250016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.251836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.251855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.251943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:42.251954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.251960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:42.252282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:376:2347] sender: [1:432:2058] recipient: [1:15:2062] 2024-11-21T09:16:42.283681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:42.283748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2024-11-21T09:16:42.283757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2024-11-21T09:16:42.283782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:16:42.283800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T09:16:42.283810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:42.284649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2024-11-21T09:16:42.284681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2024-11-21T09:16:42.284724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.284732Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2024-11-21T09:16:42.284741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2024-11-21T09:16:42.284765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.285119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T09:16:42.285157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2024-11-21T09:16:42.285287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.285310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.285318Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2024-11-21T09:16:42.285339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2024-11-21T09:16:42.285366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.285377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T09:16:42.285755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.285763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.285799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:42.285817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.285821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:425:2386], at schemeshard: 72057594046678944, txId: 103, path id: 1 2024-11-21T09:16:42.285827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:425:2386], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T09:16:42.285887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.285893Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T09:16:42.285903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T09:16:42.285907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:42.285912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T09:16:42.285917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:16:42.285922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T09:16:42.285926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T09:16:42.285936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:16:42.285941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T09:16:42.285945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T09:16:42.285948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:16:42.286048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:42.286060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:42.286067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:16:42.286072Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:16:42.286076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:42.286174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:42.286182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:16:42.286185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:16:42.286189Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:16:42.286192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:16:42.286201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T09:16:42.286781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:16:42.286991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_2disk [GOOD] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> test_postgres.py::TestPGSQL::test_sql_suite[plan-strings.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] |94.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> GroupWriteTest::TwoTables [GOOD] |94.2%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 8469917646233253270 2024-11-21T09:16:42.724717Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T09:16:42.724745Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T09:16:42.728647Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T09:16:42.728668Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T09:16:42.728683Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T09:16:42.728687Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T09:16:42.729353Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T09:16:42.729369Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T09:16:42.744043Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:42.744069Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:42.744635Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T09:16:42.744646Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T09:16:43.570846Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T09:16:43.570876Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:43.570883Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:43.570888Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T09:16:43.570892Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:43.570896Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:43.570900Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T09:16:43.570904Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:43.570908Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:43.577254Z 1 00h01m20.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2352192:3] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.577570Z 4 00h01m20.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2352192:6] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.577583Z 8 00h01m20.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2352192:2] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.577591Z 3 00h01m20.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2352192:5] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.577598Z 7 00h01m20.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2352192:1] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.577605Z 2 00h01m20.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [72058679074007041:2:23:0:11:2352192:4] barrier# {Soft# {Gen# 2 Step# 19} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.577985Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2024-11-21T09:16:43.578002Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2024-11-21T09:16:43.578008Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2024-11-21T09:16:43.578013Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2024-11-21T09:16:43.578018Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2024-11-21T09:16:43.578023Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} >> GroupWriteTest::Simple [GOOD] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 7073579678393917005 2024-11-21T09:16:42.581965Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T09:16:42.588712Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T09:16:42.588731Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T09:16:42.589262Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T09:16:42.599677Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:42.600370Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T09:16:43.857673Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T09:16:43.857696Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:43.857703Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T09:16:43.857706Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:43.864897Z 1 00h01m30.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3518760:4] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.865324Z 3 00h01m30.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3518760:6] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.865341Z 8 00h01m30.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3518760:3] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.865351Z 7 00h01m30.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3518760:2] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.865359Z 2 00h01m30.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3518760:5] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.865368Z 6 00h01m30.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:33:0:11:3518760:1] barrier# {Soft# {Gen# 2 Step# 28} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T09:16:43.865855Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2024-11-21T09:16:43.865876Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] [GOOD] >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> QueryStats::Ranges [GOOD] >> KqpQueryService::TableSink_OlapRWQueries |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join1.test] |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] >> KqpQueryService::TableSink_OltpUpdate [GOOD] |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 23282, MsgBus: 12930 2024-11-21T09:16:32.611053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658995884910812:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.611248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a70/r3tmp/tmpYcwJqd/pdisk_1.dat 2024-11-21T09:16:32.677693Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23282, node 1 2024-11-21T09:16:32.711385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.711408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.712582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:32.782830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.782846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.782848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.782877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12930 TClient is connected to server localhost:12930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.911647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.932926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658995884911409:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:32.932952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.101130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.162306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000179878807:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.162331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.162345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000179878812:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.162944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.164437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000179878814:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 9141, MsgBus: 17072 2024-11-21T09:16:33.482871Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659000978101258:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.483155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a70/r3tmp/tmpPC9FLb/pdisk_1.dat 2024-11-21T09:16:33.494012Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9141, node 2 2024-11-21T09:16:33.509539Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.509557Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.509559Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.509601Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17072 TClient is connected to server localhost:17072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:33.584223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.584263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.585546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:33.586214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.789132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659000978101858:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.789153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.790880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.798501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659000978101957:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.798525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.798562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659000978101962:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.799373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.808675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659000978101964:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:33.905992Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659000978102109:2337], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2024-11-21T09:16:33.906091Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2UzMzNjNjktYjdmNDk0NGItZWEyM2YyZmMtYTkxZWViZmM=, ActorId: [2:7439659000978102107:2336], ActorState: ExecuteState, TraceId: 01jd704k7ebngw32tj5vd3y980, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:16:38.483099Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659000978101258:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:38.483142Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16554, MsgBus: 9691 2024-11-21T09:16:39.172932Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659025332994469:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:39.173139Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a70/r3tmp/tmp9T398V/pdisk_1.dat 2024-11-21T09:16:39.181385Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16554, node 3 2024-11-21T09:16:39.197025Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:39.197041Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:39.197042Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:39.197077Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9691 TClient is connected to server localhost:9691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:39.273286Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:39.273331Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:39.274399Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:39.275576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:39.494917Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659025332995068:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.494942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.497948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:39.554837Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659025332995169:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.554863Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659025332995174:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.554864Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:39.555496Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:39.562609Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659025332995176:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:16:44.173444Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439659025332994469:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:44.173474Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.3%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpYql::UpdatePk >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpPragma::ResetPerQuery >> KqpYql::NonStrictDml >> KqpYql::UuidPrimaryKeyDisabled >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] Test command err: Trying to start YDB, gRPC: 13370, MsgBus: 17156 2024-11-21T09:16:32.646621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658994183117572:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.646888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a3d/r3tmp/tmpNsNdwD/pdisk_1.dat 2024-11-21T09:16:32.715108Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13370, node 1 2024-11-21T09:16:32.746659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.746686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.747728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:32.782794Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.782810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.782811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.782842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17156 TClient is connected to server localhost:17156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.915992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:32.920014Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:32.971333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658994183118169:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:32.971356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.101284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.116810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:33.116871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:33.116920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:16:33.116943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:33.116966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:16:33.116990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:16:33.117006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:16:33.117042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:16:33.117070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:16:33.117090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:16:33.117111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:16:33.117133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439658998478085591:2308];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:16:33.117577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:16:33.117589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:16:33.117602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:16:33.117606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:16:33.117621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:16:33.117625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:16:33.117631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:16:33.117646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:16:33.117656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:16:33.117659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:16:33.117665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:16:33.117669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:16:33.117724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:16:33.117737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:16:33.117755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:16:33.117759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:16:33.117771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:16:33.117780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:16:33.117797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:16:33.117806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:16:33.117817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:16:33.117825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:16:33.120476Z ... .740693Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:44.740708Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:16:44.740723Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:16:44.740737Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:16:44.740755Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:16:44.740772Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:16:44.740787Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:16:44.740807Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:16:44.740818Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439659045893230623:2305];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:16:44.741144Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:16:44.741155Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:16:44.741163Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:16:44.741167Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:16:44.741178Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:16:44.741185Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:16:44.741191Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:16:44.741198Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:16:44.741204Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:16:44.741210Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:16:44.741214Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:16:44.741218Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:16:44.741244Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:16:44.741251Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:16:44.741261Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:16:44.741268Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:16:44.741276Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:16:44.741283Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:16:44.741300Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:16:44.741303Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:16:44.741320Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:16:44.741328Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:16:44.784977Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659045893230731:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:44.785018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:44.785050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659045893230736:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:44.785727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:44.791282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659045893230738:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:16:44.967127Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 101 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"101;"}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"102;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; 2024-11-21T09:16:44.967147Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 103 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"103;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; 2024-11-21T09:16:44.967207Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=6;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 102 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"101;"}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"102;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; 2024-11-21T09:16:44.967247Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 104 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"104;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; >> KqpYql::InsertCV >> KqpScripting::ScriptExplainCreatedTable >> KqpYql::DdlDmlMix >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpScripting::LimitOnShard >> KqpYql::RefSelect >> KqpScripting::ScanQuery >> KqpYql::TableUseBeforeCreate >> KqpYql::EvaluateExpr2 >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> KqpYql::EvaluateExpr1 >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpExtractPredicateLookup::SqlInJoin+EnableKqpDataQueryStreamLookup [GOOD] >> KqpExtractPredicateLookup::SqlInJoin-EnableKqpDataQueryStreamLookup >> KqpScripting::ScriptValidate >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> TConsistentOpsWithReboots::DropWithData [GOOD] >> KqpYql::NonStrictDml [GOOD] >> KqpYql::JsonNumberPrecision >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join1.test] [GOOD] >> KqpYql::InsertCV [GOOD] >> KqpYql::InsertCVList >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> KqpYql::DdlDmlMix [GOOD] >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> KqpYql::CreateUseTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 9823, MsgBus: 22564 2024-11-21T09:16:45.514270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659049472793195:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.514463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004966/r3tmp/tmpDmk1R3/pdisk_1.dat 2024-11-21T09:16:45.570326Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9823, node 1 2024-11-21T09:16:45.588312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.588324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.588326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.588361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:45.614370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.614404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.615502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22564 TClient is connected to server localhost:22564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.697959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.700705Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.831671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659049472793788:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.831704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.930049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659049472793818:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.930081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.937167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659049472793833:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.937207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.942565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659049472793848:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.942588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.945292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.007730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053767761231:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.007762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginCheckRemovedUser >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] >> KqpYql::RefSelect [GOOD] >> KqpYql::ScriptUdf >> KqpYql::UpdatePk [GOOD] >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable >> KqpYql::TableUseBeforeCreate [GOOD] >> KqpYql::PgIntPrimaryKey+EnableKqpDataQueryStreamLookup >> KqpYql::EvaluateExpr2 [GOOD] >> KqpYql::EvaluateExpr3 >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::Discard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 3063, MsgBus: 8983 2024-11-21T09:16:45.470703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659052800649762:2250];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.470728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004972/r3tmp/tmpsSCOEh/pdisk_1.dat 2024-11-21T09:16:45.540583Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3063, node 1 2024-11-21T09:16:45.570509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.570531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.571250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:45.588294Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.588310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.588312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.588342Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8983 TClient is connected to server localhost:8983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.693408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.696083Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.700930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.726075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.742749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.752037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.785847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659052800651093:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.785877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.921350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.927955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.941024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.995101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.051734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.059483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.074963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659057095618917:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.074992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.074998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659057095618924:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.075640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.079525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659057095618926:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:20: Warning: At function: AsStruct
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 26502, MsgBus: 3267 2024-11-21T09:16:45.470489Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659049348608442:2210];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.470510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00496f/r3tmp/tmpIyi4rB/pdisk_1.dat 2024-11-21T09:16:45.543131Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26502, node 1 2024-11-21T09:16:45.569616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.569649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.570699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:45.588394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.588418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.588423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.588462Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3267 TClient is connected to server localhost:3267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.702536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.788868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659049348608879:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.788895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.921389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.941994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659049348608979:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.942020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.942039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659049348608984:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.943344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:45.946161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659049348608986:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 18854, MsgBus: 6737 2024-11-21T09:16:45.766545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659051683975218:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.766554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00492c/r3tmp/tmpU3MGXw/pdisk_1.dat 2024-11-21T09:16:45.819128Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18854, node 1 2024-11-21T09:16:45.837288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.837311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.837313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.837344Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6737 2024-11-21T09:16:45.866021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.866050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.867094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.900245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.903777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.917594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.940289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.000770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.101532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659055978944061:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.101568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.135834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.142957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.150709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.157616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.213992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.221044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.235816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659055978944576:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.235845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.235846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659055978944581:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.236482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.240672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659055978944583:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 >> KqpYql::InsertCVList [GOOD] >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::JoinIndexLookup [GOOD] >> KqpScripting::ScriptStats >> KqpScripting::NoAstSizeLimit [GOOD] >> KqpPragma::Warning [GOOD] >> KqpYql::JsonNumberPrecision [GOOD] >> KqpYql::ScriptUdf [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropWithData [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:49.278607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:49.278636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.278642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:49.278647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:49.278654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:49.278659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:49.278668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.278759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:49.293134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:49.293176Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.296284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:49.296419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:49.296462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:49.300154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:49.300283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:49.300447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.300790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.301943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.302311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.302326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.302340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:49.302349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.302356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:49.302408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:49.304022Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.324797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:49.324914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.325011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:49.325126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:49.325139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.326189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.326258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:49.326329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.326341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:49.326346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:49.326351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:49.326969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.326996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:49.327003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:49.327441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.327453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.327459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.327467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.328227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:49.328722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:49.328784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:49.329026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.329076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:49.329084Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.329166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:49.329175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.329209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:49.329224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.329716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.329728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.329776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.329782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:49.329893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.329901Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:49.329913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:49.329918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.329924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:49.329943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.329948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:49.329953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:49.329966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:49.329972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:49.329976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... de 145 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T09:16:46.217767Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:16:46.217788Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T09:16:46.217801Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409548 2024-11-21T09:16:46.218582Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:16:46.218596Z node 145 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:16:46.218928Z node 145 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1006 TestWaitNotification: OK eventTxId 1007 wait until 72075186233409546 is deleted wait until 72075186233409548 is deleted 2024-11-21T09:16:46.219044Z node 145 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:16:46.219059Z node 145 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409548 2024-11-21T09:16:46.219140Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219190Z node 145 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 61us result status StatusSuccess 2024-11-21T09:16:46.219295Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 14 } } Children { Name: "dst2" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "src2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219363Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/src1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219386Z node 145 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/src1" took 25us result status StatusPathDoesNotExist 2024-11-21T09:16:46.219405Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/src1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirB/src1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219460Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/src2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219489Z node 145 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/src2" took 31us result status StatusSuccess 2024-11-21T09:16:46.219580Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/src2" PathDescription { Self { Name: "src2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "src2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219646Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/dst1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219659Z node 145 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/dst1" took 14us result status StatusPathDoesNotExist 2024-11-21T09:16:46.219674Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/dst1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirB/dst1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219715Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/dst2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:46.219729Z node 145 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/dst2" took 16us result status StatusSuccess 2024-11-21T09:16:46.219786Z node 145 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/dst2" PathDescription { Self { Name: "dst2" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "dst2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScripting::ScriptExplain [GOOD] >> KqpYql::EvaluateExpr3 [GOOD] >> KqpYql::CreateUseTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList [GOOD] Test command err: Trying to start YDB, gRPC: 12563, MsgBus: 65283 2024-11-21T09:16:45.525454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659053097679601:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.525566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004961/r3tmp/tmpL29hkA/pdisk_1.dat 2024-11-21T09:16:45.580990Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12563, node 1 2024-11-21T09:16:45.596708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.596719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.596719Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.596750Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:45.623352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.623385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.624483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65283 TClient is connected to server localhost:65283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.691304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:45.701405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.730520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.750957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.761981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.824239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053097680987:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.824264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.921438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.928532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.941626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.952035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.963108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.975017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.983876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053097681500:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.983907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.983954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053097681505:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.984566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:45.988169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659053097681507:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:46.201394Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659057392649124:2468], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jd704z6b3gme86b5pa9zwysp. SessionId : ydb://session/3?node_id=1&id=OGZkNWVhYjQtMTQxNGU3YTUtNWE5ZTg0OTAtYzM1NzY4ZWE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:16:46.201517Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659057392649126:2469], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd704z6b3gme86b5pa9zwysp. SessionId : ydb://session/3?node_id=1&id=OGZkNWVhYjQtMTQxNGU3YTUtNWE5ZTg0OTAtYzM1NzY4ZWE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659057392649121:2459], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:46.202692Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGZkNWVhYjQtMTQxNGU3YTUtNWE5ZTg0OTAtYzM1NzY4ZWE=, ActorId: [1:7439659057392649098:2459], ActorState: ExecuteState, TraceId: 01jd704z6b3gme86b5pa9zwysp, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Conflict with existing key., code: 2012 Trying to start YDB, gRPC: 9685, MsgBus: 9944 2024-11-21T09:16:46.396514Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659055950981905:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.396683Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004961/r3tmp/tmpaU4TC2/pdisk_1.dat 2024-11-21T09:16:46.405876Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9685, node 2 2024-11-21T09:16:46.414815Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.414828Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.414830Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.414866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9944 TClient is connected to server localhost:9944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.496691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.496737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.497866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.499457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.502635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.518649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.537186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.546719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.697294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659055950983440:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.697321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.701707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.707081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.718096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.732326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.745552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.752063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.760493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659055950983952:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.760514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659055950983957:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.760518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.761153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.765351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659055950983959:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:46.966225Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659055950984279:2468], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jd704zyq0jakdrbj3d50szze. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MWViMTUwZTYtMWE1ZGIwZTQtYzM5NWJlY2UtOTlmODYyNWE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2024-11-21T09:16:46.966291Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659055950984280:2469], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MWViMTUwZTYtMWE1ZGIwZTQtYzM5NWJlY2UtOTlmODYyNWE=. TraceId : 01jd704zyq0jakdrbj3d50szze. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659055950984276:2459], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:46.966482Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWViMTUwZTYtMWE1ZGIwZTQtYzM5NWJlY2UtOTlmODYyNWE=, ActorId: [2:7439659055950984253:2459], ActorState: ExecuteState, TraceId: 01jd704zyq0jakdrbj3d50szze, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 >> KqpScripting::ScanQueryDisable [GOOD] >> KqpYql::PgIntPrimaryKey+EnableKqpDataQueryStreamLookup [GOOD] >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 27495, MsgBus: 15925 2024-11-21T09:16:45.651566Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659049928259884:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00493b/r3tmp/tmpPc4vKO/pdisk_1.dat 2024-11-21T09:16:45.710811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:45.727499Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27495, node 1 2024-11-21T09:16:45.740380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.740416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.740871Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.740879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.740881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.740914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:45.741402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15925 TClient is connected to server localhost:15925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.816535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.819080Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.829000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.848962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.866021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.878410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.971748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659049928261264:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.971787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.010159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.015817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.024047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.031083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.038313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.044996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.053871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659054223229066:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.053891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.053911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659054223229071:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.054586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.058605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659054223229073:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:46.261513Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659054223229399:2459] TxId: 281474976710672. Ctx: { TraceId: 01jd704z9b9rd09cqyy8k6mtfh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE3MzFiY2MtZDg3NGNiNGMtOTQzNmU3ZDMtNTY0YzM4OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:16:46.263447Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606310, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 5024, MsgBus: 15731 2024-11-21T09:16:46.525429Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659056631356114:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.525656Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00493b/r3tmp/tmpMd49N1/pdisk_1.dat 2024-11-21T09:16:46.546125Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5024, node 2 2024-11-21T09:16:46.553298Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.553311Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.553313Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.553356Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15731 TClient is connected to server localhost:15731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.630095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.630126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.630470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.631881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.817666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056631356698:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.817714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.819281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.827649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056631356814:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.827679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.831304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056631356825:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.831317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056631356830:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.831326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.832121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.835530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659056631356832:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 30151, MsgBus: 31653 2024-11-21T09:16:45.470498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659052802788470:2219];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.470610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00495b/r3tmp/tmp2iJcMB/pdisk_1.dat 2024-11-21T09:16:45.542206Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30151, node 1 2024-11-21T09:16:45.569774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.569807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.572485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:45.588287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.588302Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.588304Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.588340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31653 TClient is connected to server localhost:31653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.689154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.701022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.772280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.796991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.815632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.837401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659052802789839:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.837424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.923219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.931137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.940881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.947366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.954022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.961270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.970152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659052802790340:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.970177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.970198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659052802790345:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.970818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:45.974689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659052802790347:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:46.198402Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659057097757968:2465], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yql:644:18: Error: At function: AggregationTraits /lib/yql/aggregate.yql:58:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2024-11-21T09:16:46.198515Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGVhZjEyODQtY2JjYjBiYjYtYzdiYTY5ZmMtNDAzZWUxYWQ=, ActorId: [1:7439659057097757927:2454], ActorState: ExecuteState, TraceId: 01jd704z7hfgbcbspyrsc62q7d, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 1521, MsgBus: 11522 2024-11-21T09:16:46.337995Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659056763065102:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.338167Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00495b/r3tmp/tmpL6LrJu/pdisk_1.dat 2024-11-21T09:16:46.353265Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1521, node 2 2024-11-21T09:16:46.360549Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.360558Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.360561Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.360602Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11522 TClient is connected to server localhost:11522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.440445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.440476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.440951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.442082Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.448786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.457658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.475935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.487737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.641855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056763066643:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.641880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.648078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.655804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.668586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.682618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.696745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.711910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.726718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056763067153:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.726738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.726836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056763067158:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.727628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.730657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659056763067160:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 5567, MsgBus: 29997 2024-11-21T09:16:45.611701Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659050313259215:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.611749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004952/r3tmp/tmpda6N6f/pdisk_1.dat 2024-11-21T09:16:45.666265Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5567, node 1 2024-11-21T09:16:45.688414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.688430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.688432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.688473Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29997 2024-11-21T09:16:45.710826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.710852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.711926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.741133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.745128Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.751500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.818993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.837444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.850393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.913516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659050313260755:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.913541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.950108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.955670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.961515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.967599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.977179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.989006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.997634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659050313261247:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.997652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659050313261252:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.997654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.998202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.002380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659050313261254:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:46.285117Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606317, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 14364, MsgBus: 63601 2024-11-21T09:16:46.503675Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659056748550970:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.503693Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004952/r3tmp/tmpFfLwla/pdisk_1.dat 2024-11-21T09:16:46.515095Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14364, node 2 2024-11-21T09:16:46.522270Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.522282Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.522286Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.522314Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63601 TClient is connected to server localhost:63601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.603764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.603793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.604834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.606507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.608832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.621135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.640827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:46.652954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.790907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056748552504:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.790928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.795790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.801680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.808195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.815577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.829663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.843570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.852626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056748553006:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.852645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056748553011:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.852651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.853374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.856575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659056748553013:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpYql::FlexibleTypes >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots [GOOD] >> KqpYql::Discard [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] Test command err: Trying to start YDB, gRPC: 18853, MsgBus: 32036 2024-11-21T09:16:45.470492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659050819340972:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.470518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00496c/r3tmp/tmprHWAHl/pdisk_1.dat 2024-11-21T09:16:45.540365Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18853, node 1 2024-11-21T09:16:45.569881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.569913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.570964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:45.588278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.588294Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.588301Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.588335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32036 TClient is connected to server localhost:32036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.689231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.701048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.723731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.742371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.753954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.787595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659050819342305:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.787614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.921430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.928186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.940290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.947246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.958850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.969069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.983976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659050819342820:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.983997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659050819342825:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.984002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.984594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:45.988611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659050819342827:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 14413, MsgBus: 11210 2024-11-21T09:16:46.344832Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659054402488083:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.345068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00496c/r3tmp/tmpyCY7dE/pdisk_1.dat 2024-11-21T09:16:46.355610Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14413, node 2 2024-11-21T09:16:46.365828Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.365846Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.365848Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.365899Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11210 TClient is connected to server localhost:11210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.444884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.444915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.445984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.447650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.455067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.463978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.482702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.494560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.652275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054402489624:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.652302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.655793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.661247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.716663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.723972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.731135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.738310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.746317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054402490129:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.746337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.746380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054402490134:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.746894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.751309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659054402490136:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::ScriptUdf [GOOD] Test command err: Trying to start YDB, gRPC: 10976, MsgBus: 28072 2024-11-21T09:16:45.709808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659049475556886:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.710013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004935/r3tmp/tmpNWrmJd/pdisk_1.dat 2024-11-21T09:16:45.773665Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10976, node 1 2024-11-21T09:16:45.786009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.786036Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.786038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.786075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28072 2024-11-21T09:16:45.810460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.810482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.811646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.846430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.851681Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.854614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.869378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:45.928870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.940308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.027596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053770525734:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.027633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.063933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.070418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.079782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.086971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.142860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.150201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.158952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053770526249:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.158976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.158979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053770526254:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.159531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.163159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659053770526256:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 24229, MsgBus: 29781 2024-11-21T09:16:46.604986Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659057780930017:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.605001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004935/r3tmp/tmpptbbUq/pdisk_1.dat 2024-11-21T09:16:46.618318Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24229, node 2 2024-11-21T09:16:46.628781Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.628797Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.628799Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.628841Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29781 TClient is connected to server localhost:29781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.706605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.706635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.707663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.708810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.712553Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:46.717857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.725817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.743036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.753068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.895888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659057780931573:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.895914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.900874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.906984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.912598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.920054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.927476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.941615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.954293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659057780932087:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.954322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.954342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659057780932092:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.954937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.961488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659057780932094:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 3636, MsgBus: 5686 2024-11-21T09:16:45.659247Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659052695198770:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.659443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004950/r3tmp/tmpjBXQd5/pdisk_1.dat 2024-11-21T09:16:45.729845Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3636, node 1 2024-11-21T09:16:45.749090Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.749110Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.749112Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.749151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5686 2024-11-21T09:16:45.759688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.759711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.761022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.805492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.812507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.816032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.838320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:45.857318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.868429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.960849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659052695200319:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.960884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.999033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.005000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.017043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.023696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.031129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.038342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.046871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056990168119:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.046898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.046914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056990168124:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.047509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.051358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659056990168126:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 11014, MsgBus: 20965 2024-11-21T09:16:46.528491Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659054283832903:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.528520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004950/r3tmp/tmpydxw9v/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11014, node 2 2024-11-21T09:16:46.545020Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:46.548445Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.548456Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.548457Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.548488Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20965 TClient is connected to server localhost:20965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.628646Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.628672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.629673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.631348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.632704Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:46.640667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.662130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:46.692933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.704325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.857613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054283834430:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.857635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.862672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.869351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.878893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.884984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.892513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.898986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.908954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054283834940:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.908987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.908999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054283834945:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.909734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.913051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659054283834947:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:47.103435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.131703Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607178, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 29663, MsgBus: 29521 2024-11-21T09:16:45.614298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659050887230267:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.614356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00494b/r3tmp/tmpvlUYSv/pdisk_1.dat 2024-11-21T09:16:45.665997Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29663, node 1 2024-11-21T09:16:45.685865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.685875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.685876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.685918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29521 2024-11-21T09:16:45.712915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.712938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.714046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.745712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.748229Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.753751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.773510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.797182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.806744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.928061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659050887231675:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.928087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.962496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.967926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.975082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.981755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.988944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.995903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.004704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659055182199475:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.004730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659055182199480:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.004742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.005227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.009028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659055182199482:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:46.186943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1207, MsgBus: 25969 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00494b/r3tmp/tmpK36zwW/pdisk_1.dat 2024-11-21T09:16:46.500592Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:46.502761Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1207, node 2 2024-11-21T09:16:46.508985Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.509004Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.509006Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.509082Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25969 TClient is connected to server localhost:25969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.589159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.589190Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.590225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.591314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.594847Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:46.605775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.616140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.634555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.647079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.825375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054508992628:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.825405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.830964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.838014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.850674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.864688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.879310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.893271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.908263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054508993130:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.908292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.908307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054508993135:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.909080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.913022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659054508993137:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:47.130086Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659058803960736:2459], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:47.130171Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDQyMTMyMzctNzA0ZjhlMmItMmViOWJlMDctNWZiNTQ5OTc=, ActorId: [2:7439659058803960734:2458], ActorState: ExecuteState, TraceId: 01jd70504r0mceqsnd9qwb8yem, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr3 [GOOD] Test command err: Trying to start YDB, gRPC: 13002, MsgBus: 21906 2024-11-21T09:16:45.754389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659052987823603:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00492a/r3tmp/tmpRydnIF/pdisk_1.dat 2024-11-21T09:16:45.787632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:45.807681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13002, node 1 2024-11-21T09:16:45.825868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.825884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.825886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.825927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21906 2024-11-21T09:16:45.851388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.851422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.852476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.880443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.884693Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.894870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.909623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.928644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.941486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.064726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659057282792297:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.064756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.087944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.095072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.108354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.163584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.218806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.227136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.235881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659057282792814:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.235903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.235911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659057282792819:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.236540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.240179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659057282792821:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 24827, MsgBus: 10972 2024-11-21T09:16:46.653926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00492a/r3tmp/tmpu7PfIC/pdisk_1.dat 2024-11-21T09:16:46.659499Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24827, node 2 2024-11-21T09:16:46.672992Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.673004Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.673006Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.673062Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10972 TClient is connected to server localhost:10972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.749844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.749871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.752173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.752603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.759580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.767350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.783515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.795111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.978877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659054394937260:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.978955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.983335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.989177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.997273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.003831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.011042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.018166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.027667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659058689905059:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.027695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.027742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659058689905064:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.028444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:47.031343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659058689905066:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard [GOOD] Test command err: Trying to start YDB, gRPC: 14407, MsgBus: 64499 2024-11-21T09:16:46.045212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659054917097127:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.045229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004925/r3tmp/tmpaRCddY/pdisk_1.dat 2024-11-21T09:16:46.098347Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14407, node 1 2024-11-21T09:16:46.113054Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.113073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.113075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.113123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64499 2024-11-21T09:16:46.145247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.145287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.146448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.174302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.182886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.198217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.216080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.227954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.356854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659054917098665:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.356890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.392414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.398388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.409254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.416073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.423464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.437372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.445599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659054917099178:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.445623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.445658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659054917099183:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.446251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.450063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659054917099185:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 32472, MsgBus: 22397 2024-11-21T09:16:46.927425Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659057592882664:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.927481Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004925/r3tmp/tmpqRMCea/pdisk_1.dat 2024-11-21T09:16:46.938056Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32472, node 2 2024-11-21T09:16:46.949556Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.949575Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.949578Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.949626Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22397 TClient is connected to server localhost:22397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:47.027893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:47.027927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:47.028927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:47.030637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.038445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.048110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.068308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.078934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.215227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659061887851500:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.215250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.220569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.227582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.235538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.241909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.249031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.257575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.272106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659061887852014:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.272126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.272237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659061887852019:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.273004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:47.276401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659061887852021:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:47.458783Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659061887852312:2458], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2024-11-21T09:16:47.458883Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmJjNjM2NS1lMDY5ZDVlZi04YmUyNWUxNi02ZjVlMWI5Ng==, ActorId: [2:7439659061887852305:2454], ActorState: ExecuteState, TraceId: 01jd7050evbhkpctmb0qf9tfym, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 25238, MsgBus: 22182 2024-11-21T09:16:45.732159Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659051868598246:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.732224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00492d/r3tmp/tmppHm3H6/pdisk_1.dat 2024-11-21T09:16:45.789154Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25238, node 1 2024-11-21T09:16:45.808254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.808270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.808272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.808315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22182 2024-11-21T09:16:45.832050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.832074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.833158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.865264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.871456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.893256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.910780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.920546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.063519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056163566932:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.063559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.097220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.103025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.115572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.129243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.136130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.143294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.151980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056163567443:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.152002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.152037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056163567448:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.152790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.156727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659056163567450:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:46.343912Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606387, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 29837, MsgBus: 24246 2024-11-21T09:16:46.611725Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659056523657359:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.611773Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00492d/r3tmp/tmp1A6WjD/pdisk_1.dat 2024-11-21T09:16:46.628551Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29837, node 2 2024-11-21T09:16:46.639296Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.639307Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.639311Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.639348Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24246 TClient is connected to server localhost:24246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.675889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.677583Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.685341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.714697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.714726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.715976Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.744127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.757870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.766925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.912184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056523658897:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.912228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.917066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.924401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.934386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.941354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.948558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.955380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.963722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056523659411:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.963743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659056523659416:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.963746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.964306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.968575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659056523659418:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:47.202484Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607248, txId: 281474976710671] shutting down >> KqpYql::ColumnNameConflict >> KqpYql::EvaluateIf >> KqpScripting::QueryStats >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup [GOOD] >> KqpScripting::ScriptStats [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] [GOOD] >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::FromBytes >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 13438, MsgBus: 5312 2024-11-21T09:16:46.651920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659056840700064:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.651955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004922/r3tmp/tmpl7J7m2/pdisk_1.dat 2024-11-21T09:16:46.706593Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13438, node 1 2024-11-21T09:16:46.727704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.727714Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.727731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.727767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5312 2024-11-21T09:16:46.752082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.752107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.753207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.782809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.949360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056840700662:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.949382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.980240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.040417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659061135668059:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.040449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.040506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659061135668064:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.041114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:47.045170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659061135668066:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 12662, MsgBus: 7612 2024-11-21T09:16:47.505825Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659061719157852:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:47.506027Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004922/r3tmp/tmp9M5MTu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12662, node 2 2024-11-21T09:16:47.520500Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:47.528802Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:47.528819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:47.528821Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:47.528864Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7612 TClient is connected to server localhost:7612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:47.606119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:47.606147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:47.607287Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:47.607935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.775504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659061719158450:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.775530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.777340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.785974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659061719158549:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.785992Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.786057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659061719158554:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.786811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:47.794525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659061719158556:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } >> KqpScripting::EndOfQueryCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 17607, MsgBus: 2197 2024-11-21T09:16:46.347656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659053978085523:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:46.347840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004924/r3tmp/tmpNj50mn/pdisk_1.dat 2024-11-21T09:16:46.397971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17607, node 1 2024-11-21T09:16:46.415339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.415350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.415351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.415380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2197 2024-11-21T09:16:46.448139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.448179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.449175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.484122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.494263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.510932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.534077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.591059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.662667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053978087078:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.662713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.698646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.705598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.717650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.732408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.745542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.758876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.768577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053978087583:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.768607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.768614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659053978087588:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.769311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.772545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659053978087590:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 12010, MsgBus: 14263 2024-11-21T09:16:47.251619Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659058868365615:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:47.251806Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004924/r3tmp/tmpeVbXGK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12010, node 2 2024-11-21T09:16:47.266386Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:47.268572Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:47.268585Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:47.268587Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:47.268616Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14263 TClient is connected to server localhost:14263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:47.351789Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:47.351826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:47.352885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:47.354079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.355990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.369723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.385961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.397237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.554253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659058868367161:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.554283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.559695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.566705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.578764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.585440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.592181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.599310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.615352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659058868367668:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.615378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659058868367673:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.615391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.616037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:47.619436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659058868367675:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:47.860435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.920550Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607962, txId: 281474976715674] shutting down 2024-11-21T09:16:47.987475Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180608032, txId: 281474976715678] shutting down 2024-11-21T09:16:48.026087Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180608024, txId: 281474976715682] shutting down |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpYql::BinaryJsonOffsetNormal >> KqpScripting::StreamScanQuery >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> KqpPragma::OrderedColumns >> KqpYql::TableRange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:15:49.186296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:15:49.186343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.186350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:15:49.186357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:15:49.186374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:15:49.186379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:15:49.186389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:15:49.186529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:15:49.198618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:15:49.198645Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.200934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:15:49.201066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:15:49.201135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:15:49.205504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:15:49.205649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:15:49.207078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.208494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.212681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.213029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.213042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.213071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:15:49.213078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.213084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:15:49.213124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:15:49.215250Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:15:49.235487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:15:49.236047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.236161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:15:49.236272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:15:49.236284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:15:49.237503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.237515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:15:49.237519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:15:49.237524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:15:49.238157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:15:49.238746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.238771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.238779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.240049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:15:49.240982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:15:49.241558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:15:49.241846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:15:49.241885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:15:49.241895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.241989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:15:49.241999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:15:49.242041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:15:49.242056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:15:49.242768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:15:49.242790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:15:49.242832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:15:49.242838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:15:49.242916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:15:49.242925Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:15:49.242940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:15:49.242944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.242951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:15:49.242957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:15:49.242962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:15:49.242966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:15:49.242983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:15:49.242990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:15:49.242994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:47.475587Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:16:47.475616Z node 222 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" took 32us result status StatusSuccess 2024-11-21T09:16:47.475679Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:47.475735Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:16:47.475762Z node 222 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" took 28us result status StatusSuccess 2024-11-21T09:16:47.475855Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" PathDescription { Self { Name: "UserDefinedIndexByValue0CoveringValue1" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 9 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndexByValue0CoveringValue1" LocalPathId: 9 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "value1" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:47.475927Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:16:47.475955Z node 222 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" took 31us result status StatusSuccess 2024-11-21T09:16:47.476009Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 9 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch >> KqpYql::EvaluateIf [GOOD] >> KqpYql::EvaluateFor >> KqpScripting::UnsafeTimestampCast >> KqpScripting::StreamExecuteYqlScriptData >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> KqpYql::FromBytes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 25764, MsgBus: 14759 2024-11-21T09:16:47.621065Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659059256952861:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:47.621120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00491f/r3tmp/tmpSpXAAj/pdisk_1.dat 2024-11-21T09:16:47.678580Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25764, node 1 2024-11-21T09:16:47.695340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:47.695353Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:47.695355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:47.695405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14759 2024-11-21T09:16:47.721407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:47.721436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:47.722484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:47.742597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.752771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:47.768280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.787567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.796791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:47.922768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659059256954397:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.922795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:47.961763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.968512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.977334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.984268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:47.991197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.046068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.055704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659063551922211:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.055728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659063551922216:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.055729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.056378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:48.060009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659063551922218:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 27596, MsgBus: 62655 2024-11-21T09:16:48.492189Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659064895952903:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.492246Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00491f/r3tmp/tmp1Qsw73/pdisk_1.dat 2024-11-21T09:16:48.502687Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27596, node 2 2024-11-21T09:16:48.511508Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.511522Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.511523Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.511557Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62655 TClient is connected to server localhost:62655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.592790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.592817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.593839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:48.595030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.597858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.607989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.624037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.636398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.769900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659064895954438:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.769929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.775104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.780964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.789346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.796241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.851368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.859259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.867881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659064895954953:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.867900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.867938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659064895954958:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.868543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:48.872073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659064895954960:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce >> KqpYql::EvaluateFor [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> KqpScripting::Pure [GOOD] >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> KqpYql::TableRange [GOOD] >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> KqpScripting::SelectNullType >> KqpYql::ColumnTypeMismatch [GOOD] >> KqpYql::UpdateBadType >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> KqpYql::UuidPrimaryKey >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 2228, MsgBus: 32640 2024-11-21T09:16:48.121310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659064363510511:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.121327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00491c/r3tmp/tmp3ISbiB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2228, node 1 2024-11-21T09:16:48.174706Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:48.180772Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.180784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.180785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.180814Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32640 TClient is connected to server localhost:32640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:16:48.221453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.221481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.222517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.248994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.258322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.271853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.288598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.299920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.405207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659064363512049:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.405235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.438566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.493522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.502193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.508925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.515709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.522967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.531467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659064363512565:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.531473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659064363512570:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.531489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.532010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:48.535823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659064363512572:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 30771, MsgBus: 21037 2024-11-21T09:16:48.999254Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659065635077721:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.999473Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00491c/r3tmp/tmppHXPdQ/pdisk_1.dat 2024-11-21T09:16:49.010805Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30771, node 2 2024-11-21T09:16:49.016670Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.016680Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.016681Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.016716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21037 TClient is connected to server localhost:21037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.099510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.099534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.100739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.101715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.102826Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.104933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.119669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.134768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.191314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.305191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069930046561:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.305216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.310626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.316987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.328143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.335266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.342051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.349049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.360686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069930047055:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.360708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069930047060:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.360714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.361340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.369236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659069930047062:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 64533, MsgBus: 28488 2024-11-21T09:16:48.166213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659065228321112:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.166576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00491a/r3tmp/tmpII4SgY/pdisk_1.dat 2024-11-21T09:16:48.229944Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64533, node 1 2024-11-21T09:16:48.247745Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.247764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.247766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.247804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28488 2024-11-21T09:16:48.266192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.266220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.267267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.307051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.316145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.331526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.347637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.357613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.445342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659065228322646:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.445373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.474864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.481672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.495631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.502188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.508976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.516108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.524969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659065228323150:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.524983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659065228323155:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.524992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.525572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:48.529096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659065228323157:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:48.812611Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659065228323561:2487] TxId: 281474976715674. Ctx: { TraceId: 01jd7051re0j1vpssextp3rj41, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjcyYzA0OWQtMjYyZTQ3NjktNmIyNzI2OGItOWFmMjEzYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:16:48.815729Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659065228323571:2487] TxId: 281474976715675. Ctx: { TraceId: 01jd7051re0j1vpssextp3rj41, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjcyYzA0OWQtMjYyZTQ3NjktNmIyNzI2OGItOWFmMjEzYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:16:48.816304Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180608858, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 16536, MsgBus: 17662 2024-11-21T09:16:49.063147Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659069982366916:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.063164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00491a/r3tmp/tmpMWaTMB/pdisk_1.dat 2024-11-21T09:16:49.073328Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16536, node 2 2024-11-21T09:16:49.083959Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.083974Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.083976Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.084018Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17662 TClient is connected to server localhost:17662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.163233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.163261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.164341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.166970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.167888Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.171871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.182120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.200517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.210190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.349326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069982368447:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.349348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.354648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.409477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.419528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.426055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.433074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.440620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.455906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069982368961:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.455930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.455959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069982368966:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.456482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.460524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659069982368968:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] Test command err: Trying to start YDB, gRPC: 2749, MsgBus: 16940 2024-11-21T09:16:48.907688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659065505944274:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.907707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004904/r3tmp/tmpyiaVzI/pdisk_1.dat 2024-11-21T09:16:48.972239Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2749, node 1 2024-11-21T09:16:48.989996Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.990010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.990013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.990047Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:49.008074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.008103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.009234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16940 TClient is connected to server localhost:16940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.052646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.063587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.126645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.145603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.156304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.207967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069800913097:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.207998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.245422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.252909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.308224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.363884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.377687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.391217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.399810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069800913615:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.399829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.399873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069800913620:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.400429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.404043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659069800913622:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 22577, MsgBus: 16366 2024-11-21T09:16:48.086350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659065403831923:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.086423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00491d/r3tmp/tmpVJn0ir/pdisk_1.dat 2024-11-21T09:16:48.131743Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22577, node 1 2024-11-21T09:16:48.153274Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.153292Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.153300Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.153339Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16366 2024-11-21T09:16:48.185920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.185956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:16366 2024-11-21T09:16:48.187046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.215072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.228724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.244993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.263577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.273150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.376782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659065403833248:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.376811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.410816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.417575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.425387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.432163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.439425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.445851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.455049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659065403833754:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.455074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.455076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659065403833759:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.455710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:48.459341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659065403833761:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 14280, MsgBus: 6160 2024-11-21T09:16:48.964314Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659064914538041:2191];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00491d/r3tmp/tmpFiRgAI/pdisk_1.dat 2024-11-21T09:16:48.969438Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:48.973680Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14280, node 2 2024-11-21T09:16:48.984683Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.984697Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.984699Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.984737Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6160 TClient is connected to server localhost:6160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.065471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.065497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.066249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.066540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:49.067764Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.075785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.085379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.103689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.163773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.262184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069209506730:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.262211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.266732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.321852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.328036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.335525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.342446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.350146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.365288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069209507243:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.365313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069209507248:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.365314Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.365886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.369505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659069209507250:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:49.579218Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659069209507545:2459], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:49.579322Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmQ5NDQ5YTctZTExZWJkZDYtZGI4NmM0NzMtMjVhNDNkMWM=, ActorId: [2:7439659069209507537:2454], ActorState: ExecuteState, TraceId: 01jd7052h8f03y5g3zq1sy21tn, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 >> KqpScripting::ExecuteYqlScriptPg [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> KqpYql::Closure [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 17130, MsgBus: 20441 2024-11-21T09:16:48.636693Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659064192215391:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.636975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004914/r3tmp/tmpVq4pDU/pdisk_1.dat 2024-11-21T09:16:48.685405Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17130, node 1 2024-11-21T09:16:48.703372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.703384Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.703386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.703417Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20441 2024-11-21T09:16:48.739356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.739393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.740681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.768142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.774121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.789416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.806559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.818663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.932113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659064192216925:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.932145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.964909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.971894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.985362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.992603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.048340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.055399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.070825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659068487184738:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.070856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.070876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659068487184743:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.071319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.075084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659068487184745:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:49.260692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.318662Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609362, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 5691, MsgBus: 19478 2024-11-21T09:16:49.510577Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659066498900851:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.510763Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004914/r3tmp/tmpyqS43X/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5691, node 2 2024-11-21T09:16:49.524143Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:49.526725Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.526744Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.526745Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.526776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19478 TClient is connected to server localhost:19478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.610950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.610981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.612103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.613322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.623675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.632410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.649614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.659121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.803548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659066498902391:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.803577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.809576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.815771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.825317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.832186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.839264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.846120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.855164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659066498902903:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.855185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.855227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659066498902908:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.855832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.859114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659066498902910:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpYql::TableNameConflict >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> BasicUsage::SimpleHandlers [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> KqpYql::UpdateBadType [GOOD] >> KqpScripting::SystemTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 31720, MsgBus: 64648 2024-11-21T09:16:48.734462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659065619190730:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.734808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00490e/r3tmp/tmpel0VXv/pdisk_1.dat 2024-11-21T09:16:48.788635Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31720, node 1 2024-11-21T09:16:48.806863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.806874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.806877Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.806917Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64648 2024-11-21T09:16:48.834956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.834982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.836032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.850727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.861151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.875582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.892706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.903439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.029066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069914159558:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.029102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.059607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.065389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.076113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.083834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.098026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.111210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.120683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069914160071:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.120719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.120738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069914160076:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.121342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.124697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659069914160078:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:49.384746Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609425, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 21500, MsgBus: 62787 2024-11-21T09:16:49.607551Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659069459344306:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.607743Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00490e/r3tmp/tmpeDQcAa/pdisk_1.dat 2024-11-21T09:16:49.616663Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21500, node 2 2024-11-21T09:16:49.625807Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.625818Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.625822Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.625873Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62787 TClient is connected to server localhost:62787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.708465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.708499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.709535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.710795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.712403Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.717084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.728045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.747725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.757602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.919336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069459345853:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.919449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.925407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.932143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.986165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.041549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.096833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.105565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.121591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073754313682:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.121622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.121682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073754313687:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.122305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.126191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659073754313689:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 6395, MsgBus: 5535 2024-11-21T09:16:48.724077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659066093760708:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.724425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004912/r3tmp/tmpr7W4kH/pdisk_1.dat 2024-11-21T09:16:48.783474Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6395, node 1 2024-11-21T09:16:48.801460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.801477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.801479Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.801521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5535 2024-11-21T09:16:48.824040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.824072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.825180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.850172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.859247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.921662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.939584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.997245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.026087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659070388729545:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.026113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.058389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.064484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.076114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.091113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.104455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.111392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.127467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659070388730045:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.127504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.127509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659070388730050:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.128166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.131991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659070388730052:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:49.384824Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609425, txId: 281474976710671] shutting down 2024-11-21T09:16:49.417034Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609460, txId: 281474976710673] shutting down 2024-11-21T09:16:49.477967Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609523, txId: 281474976710677] shutting down Trying to start YDB, gRPC: 13613, MsgBus: 10337 2024-11-21T09:16:49.610367Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659069799265667:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.610545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004912/r3tmp/tmpBxINJc/pdisk_1.dat 2024-11-21T09:16:49.618287Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13613, node 2 2024-11-21T09:16:49.626925Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.626937Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.626939Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.626967Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10337 TClient is connected to server localhost:10337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.710857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.710881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.712439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.713494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.719388Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.725765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.734281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.753281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.763610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.913344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069799267198:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.913381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.917455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.924745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.938116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.952071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.958264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.965494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.974166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069799267710:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.974192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659069799267715:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.974193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.974750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.978313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659069799267717:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:50.223561Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610258, txId: 281474976715671] shutting down 2024-11-21T09:16:50.271763Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610314, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 9681, MsgBus: 25398 2024-11-21T09:16:48.549768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659063900929173:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.549985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004916/r3tmp/tmpoA9PIp/pdisk_1.dat 2024-11-21T09:16:48.599797Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9681, node 1 2024-11-21T09:16:48.614047Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.614059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.614060Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.614092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25398 TClient is connected to server localhost:25398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:16:48.650168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.650209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.651245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.677789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.680089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:48.689782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.706199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.725995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.736823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.833149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659063900930712:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.833174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.867789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.876266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.887432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.894715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.901415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.908476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.917535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659063900931215:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.917556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.917581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659063900931220:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.918105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:48.921377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659063900931222:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:49.116087Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTYwZjMzYjktYTg0ZDM1MTEtYjFiN2M4MWYtODg0OGE5MGI=, ActorId: [1:7439659068195898812:2454], ActorState: ExecuteState, TraceId: 01jd70522sc4rwt75xjpv89cmm, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.123738Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2JlMTM4ZmItNzBiOTA4NTctODE4NTQwYzktOTVkNTljZWQ=, ActorId: [1:7439659068195898823:2459], ActorState: ExecuteState, TraceId: 01jd70522x9cn4bgnvwnqw3c9s, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.129990Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTM4MTVlZWYtYjIwZDEyODMtOWUxNGM2YzEtYzcxZThmYjI=, ActorId: [1:7439659068195898842:2468], ActorState: ExecuteState, TraceId: 01jd705234cepc2msy1x6f3s2t, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.137045Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGIyZDZlMjQtNjc4YTZlOWMtY2RmOGMxMy1kNWFiNzA3NQ==, ActorId: [1:7439659068195898854:2474], ActorState: ExecuteState, TraceId: 01jd70523b0wg55qxda9y3x6rf, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.147380Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjVjNDM0ZjgtOTE5NWY5ZjQtYzZmZDBiZTEtYWExOTk5MA==, ActorId: [1:7439659068195898913:2483], ActorState: ExecuteState, TraceId: 01jd70523j4e75sg092r1jh2gb, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.152096Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609180, txId: 281474976715671] shutting down 2024-11-21T09:16:49.156148Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609180, txId: 281474976715672] shutting down 2024-11-21T09:16:49.160421Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGE5YjBmMmEtMjI4ODIyOGEtMWQ5NzM1NmUtNTRkMzg5YzY=, ActorId: [1:7439659068195899055:2510], ActorState: ExecuteState, TraceId: 01jd705240dbt1bbcejk04eaar, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.164581Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609201, txId: 281474976715675] shutting down 2024-11-21T09:16:49.172772Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWNmOWUwNjktODZhMjE4OGUtOTMyZDJiNzItZWY2ZGJjMmQ=, ActorId: [1:7439659068195899161:2528], ActorState: ExecuteState, TraceId: 01jd70524cdgjdzhgyrj51kmz8, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.175075Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609215, txId: 281474976715677] shutting down 2024-11-21T09:16:49.184970Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609229, txId: 281474976715680] shutting down 2024-11-21T09:16:49.185210Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609229, txId: 281474976715679] shutting down 2024-11-21T09:16:49.188689Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTM0Y2JiY2UtNmM2ZGY1ZjYtOWU2NGEwZjgtMjc4MGVkYmQ=, ActorId: [1:7439659068195899264:2543], ActorState: ExecuteState, TraceId: 01jd70524w3mdr2asga85ds9gs, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.199451Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQyNGRjZTUtM2E4ZDcwMmItODcyMjg0NGYtYzFkODM1Yw==, ActorId: [1:7439659068195899410:2570], ActorState: ExecuteState, TraceId: 01jd70525598ysaxp17hebjp16, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.209857Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609250, txId: 281474976715683] shutting down 2024-11-21T09:16:49.210257Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609250, txId: 281474976715684] shutting down 2024-11-21T09:16:49.213251Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTE2YzRjMDQtM2NkN2YyYWEtYTE1NWY1MmUtNTA4ZWMyZGQ=, ActorId: [1:7439659068195899438:2576], ActorState: ExecuteState, TraceId: 01jd70525j3m8r1m3ahk08g1wy, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.227371Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTNmNWI5ZDItMWMxZGViZmQtOTRiZTk4NDQtZmVhMDI5ZQ==, ActorId: [1:7439659068195899596:2603], ActorState: ExecuteState, TraceId: 01jd70525y4w4ms02f85ec8jmv, ... T09:16:49.273193Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzgxMTk4Y2ItN2Y1NmIxMTEtMTZjODc4NjAtNTIxZTRmYjc=, ActorId: [1:7439659068195899893:2654], ActorState: ExecuteState, TraceId: 01jd70527abbdnmexfrss7nbw0, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.282218Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609327, txId: 281474976715695] shutting down 2024-11-21T09:16:49.290575Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE1ZDg3ZWUtYjM4ZTYxMGEtN2JjNzc3ZGYtN2RjNmE3MTY=, ActorId: [1:7439659068195900007:2672], ActorState: ExecuteState, TraceId: 01jd70527vc5zx2r650k816ats, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.297253Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609341, txId: 281474976715697] shutting down 2024-11-21T09:16:49.309028Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwN2UxYjktZjAyZDE2NGItNDMxN2ExYmEtYjZjNGIwMGM=, ActorId: [1:7439659068195900122:2690], ActorState: ExecuteState, TraceId: 01jd70528c1by0g0gzqd1q0tqw, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.315776Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609362, txId: 281474976715699] shutting down 2024-11-21T09:16:49.328572Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjYwNmI4M2UtM2EyYmQ1MDUtZTIyYmIxYmUtNWQ5ODY0ZDc=, ActorId: [1:7439659068195900230:2708], ActorState: ExecuteState, TraceId: 01jd70528yaejz1hy58pcadk5p, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.334892Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609376, txId: 281474976715701] shutting down 2024-11-21T09:16:49.348966Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2U5MDk4ZGYtM2ViZTdjZDItZjI0NmMyNGQtMjRiNWNiMmU=, ActorId: [1:7439659068195900338:2726], ActorState: ExecuteState, TraceId: 01jd70529jfvzr0k2nknqdc1qc, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.355369Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609397, txId: 281474976715703] shutting down 2024-11-21T09:16:49.370427Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjZlNjRiZWQtNGI5OWQzMjEtZmZkM2NkNWYtNTkwMGFkMjQ=, ActorId: [1:7439659068195900455:2744], ActorState: ExecuteState, TraceId: 01jd7052a6cgsshvw1gjv7d47x, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.377817Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609418, txId: 281474976715705] shutting down 2024-11-21T09:16:49.392459Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmIxYTMxYmItNzk5MTI1YTQtM2MxZTNhNmQtOGRjNzE1OTg=, ActorId: [1:7439659068195900556:2762], ActorState: ExecuteState, TraceId: 01jd7052av9p4swzekpttqhb5e, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.394372Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609439, txId: 281474976715707] shutting down 2024-11-21T09:16:49.416035Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjI0YWQ5MS1mYzJhNTE5Mi04NmZmOTY3OC1mMjQwZjZmNQ==, ActorId: [1:7439659068195900741:2789], ActorState: ExecuteState, TraceId: 01jd7052bj990e6nnwbnd9zvmk, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.418801Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609460, txId: 281474976715709] shutting down 2024-11-21T09:16:49.440125Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609481, txId: 281474976715711] shutting down 2024-11-21T09:16:49.466156Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGQzNDU0NTktMjcwMWMzMTUtN2RhZWIwOWEtYzAzMjg3NzY=, ActorId: [1:7439659068195900983:2825], ActorState: ExecuteState, TraceId: 01jd7052d2ac1b18zpr8q16q8b, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.466358Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609509, txId: 281474976715713] shutting down 2024-11-21T09:16:49.488621Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609530, txId: 281474976715715] shutting down 2024-11-21T09:16:49.513249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609558, txId: 281474976715717] shutting down Trying to start YDB, gRPC: 32471, MsgBus: 22684 2024-11-21T09:16:49.855415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659069393007482:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.855642Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004916/r3tmp/tmpb0eC0Z/pdisk_1.dat 2024-11-21T09:16:49.869794Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32471, node 2 2024-11-21T09:16:49.878171Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.878186Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.878188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.878227Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22684 TClient is connected to server localhost:22684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.955799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.955844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.956929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.958096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.969322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.979271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.995551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:50.009021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.177180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073687976318:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.177201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.181361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.187975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.196713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.210905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.224467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.231530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.247016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073687976819:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.247040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.247048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073687976824:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.247574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.251382Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659073687976826:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpYql::UuidPrimaryKey [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 13760, MsgBus: 1983 2024-11-21T09:16:48.869932Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659065038478228:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.870003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004908/r3tmp/tmprrGF5s/pdisk_1.dat 2024-11-21T09:16:48.914696Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13760, node 1 2024-11-21T09:16:48.932074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.932095Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.932098Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.932141Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1983 2024-11-21T09:16:48.969872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.969899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.972787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.995647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.998360Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.004003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.021192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.081834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.093945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.175949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069333446914:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.175975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.206011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.211169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.223368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.230224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.285166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.294143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.308854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069333447430:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.308876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.308881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069333447435:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.309560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.313612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659069333447437:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:49.495278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.538221Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609579, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 26928, MsgBus: 21715 2024-11-21T09:16:49.744667Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659067311741497:2261];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004908/r3tmp/tmpgUKsDw/pdisk_1.dat 2024-11-21T09:16:49.750053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:49.753925Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26928, node 2 2024-11-21T09:16:49.763829Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.763843Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.763845Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.763882Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21715 TClient is connected to server localhost:21715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.844169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.844201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.845372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.846488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.847154Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.851072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.863705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.879269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.889808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.067007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659071606710117:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.067040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.073339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.079629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.091464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.146118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.154012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.161446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.169742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659071606710620:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.169765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.169781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659071606710625:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.170448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.174216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659071606710627:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:50.345847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.408749Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610454, txId: 281474976715675] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 2457, MsgBus: 27725 2024-11-21T09:16:48.756723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659063021526931:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.756800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00490c/r3tmp/tmpWUumu9/pdisk_1.dat 2024-11-21T09:16:48.809231Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2457, node 1 2024-11-21T09:16:48.821109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.821121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.821122Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.821150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27725 2024-11-21T09:16:48.856815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.856846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:27725 2024-11-21T09:16:48.857995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.872755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.879135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.893207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.910494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.923988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.058533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659067316495770:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.058571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.089293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.095422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.106130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.118623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.133522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.147348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.161926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659067316496282:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.161954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.161990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659067316496287:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.162634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.167805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659067316496289:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 7235, MsgBus: 25749 2024-11-21T09:16:49.642730Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659068312617330:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00490c/r3tmp/tmpuMApeF/pdisk_1.dat 2024-11-21T09:16:49.645706Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:49.652009Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7235, node 2 2024-11-21T09:16:49.662235Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.662250Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.662251Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.662288Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25749 TClient is connected to server localhost:25749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.742082Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.742110Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.743201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.744358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.748636Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.750687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.759041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:49.776950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.786639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.960316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659068312618722:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.960338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.964898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.023625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.035966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.049917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.063447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.077836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.093159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659072607586523:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.093196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.093217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659072607586528:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.094089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.097386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659072607586530:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 14092, MsgBus: 4132 2024-11-21T09:16:48.832885Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659064289264065:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.832901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00490a/r3tmp/tmp0Gn0SI/pdisk_1.dat 2024-11-21T09:16:48.882213Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14092, node 1 2024-11-21T09:16:48.898104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.898117Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.898119Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.898161Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4132 2024-11-21T09:16:48.933093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.933124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:48.934177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.961675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.964219Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:48.966003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.985296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.045209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.056068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.140638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659068584232900:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.140666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.173816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.182903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.195181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.201957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.209387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.216457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.231933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659068584233405:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.231962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.231978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659068584233410:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.232783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.236508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659068584233412:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 21318, MsgBus: 9582 2024-11-21T09:16:49.709056Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659067496572632:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.709392Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00490a/r3tmp/tmpbRWmLp/pdisk_1.dat 2024-11-21T09:16:49.718947Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21318, node 2 2024-11-21T09:16:49.729738Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.729753Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.729756Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.729798Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9582 TClient is connected to server localhost:9582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.809117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.809165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.810237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.811913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.821068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.829543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.845377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.855136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.048613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659071791541474:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.048648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.053324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.059928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.073414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.084489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.091493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.105426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.114111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659071791541965:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.114135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.114140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659071791541970:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.114711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.118139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659071791541972:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 10899, MsgBus: 61242 2024-11-21T09:16:49.041907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659069780193556:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.042085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048fe/r3tmp/tmp9TfhOn/pdisk_1.dat 2024-11-21T09:16:49.099234Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10899, node 1 2024-11-21T09:16:49.117272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.117285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.117287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.117322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61242 2024-11-21T09:16:49.141904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.141930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.143014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.175521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.183031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.201572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.217925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.231641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.342002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069780195099:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.342029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.381395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.387234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.441973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.454563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.468490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.475210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.483796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069780195615:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.483819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.483819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659069780195620:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.484499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.488368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659069780195622:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:49.713141Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609754, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 24642, MsgBus: 19943 2024-11-21T09:16:49.930577Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659069558187694:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.930593Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048fe/r3tmp/tmpqFDVQa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24642, node 2 2024-11-21T09:16:49.946257Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:49.948884Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.948897Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.948899Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.948940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19943 TClient is connected to server localhost:19943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:50.030776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:50.030810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:50.031898Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:50.036785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.040722Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:50.044636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.100768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.119138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.129805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.210631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073853156529:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.210659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.215915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.222849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.231206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.238222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.245342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.251877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.260401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073853157020:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.260427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073853157025:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.260431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.261042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.266048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659073853157027:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:50.457209Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610503, txId: 281474976715671] shutting down 2024-11-21T09:16:50.471717Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610517, txId: 281474976715673] shutting down >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 23068, MsgBus: 2644 2024-11-21T09:16:49.906684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659070279700752:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.906792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048e9/r3tmp/tmpi7Fcw0/pdisk_1.dat 2024-11-21T09:16:49.958633Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23068, node 1 2024-11-21T09:16:49.977388Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.977404Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.977410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.977447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2644 2024-11-21T09:16:50.006157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:50.006186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:50.007200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:50.037751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.046253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.108883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.127995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.137590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.220617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659074574669444:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.220649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.258656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.264986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.272832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.280030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.334627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.343202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.351453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659074574669959:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.351472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.351493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659074574669964:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.351959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.355923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659074574669966:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2024-11-21T09:15:59.640443Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1732180559640433 2024-11-21T09:15:59.789201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658855963651619:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:15:59.789275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:15:59.800025Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658852619238015:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:15:59.800070Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0032a5/r3tmp/tmp8VpQD9/pdisk_1.dat 2024-11-21T09:15:59.836753Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:15:59.848707Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:15:59.897958Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:15:59.902103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:15:59.902143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:15:59.908831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23956, node 1 2024-11-21T09:15:59.934705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:15:59.934743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:15:59.940528Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:15:59.941035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:15:59.969138Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0032a5/r3tmp/yandexUTp6zZ.tmp 2024-11-21T09:15:59.969151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0032a5/r3tmp/yandexUTp6zZ.tmp 2024-11-21T09:15:59.969205Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0032a5/r3tmp/yandexUTp6zZ.tmp 2024-11-21T09:15:59.969246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:00.016463Z INFO: TTestServer started on Port 6602 GrpcPort 23956 TClient is connected to server localhost:6602 PQClient connected to localhost:23956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:00.046088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T09:16:00.190726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658856914205456:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:00.190763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:00.190889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439658856914205468:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:00.201254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T09:16:00.220583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439658856914205470:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T09:16:00.330722Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439658860258619688:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:00.330724Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439658856914205513:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:00.332309Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjU4ZDRlY2EtMWM0NTE2MjgtYjUwZTdmODQtZThkZDUwNzY=, ActorId: [2:7439658856914205454:2277], ActorState: ExecuteState, TraceId: 01jd703j9x9mhvy2d7zye39194, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:00.332788Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWRlMmYxMGQtOTA0MDYzZC05NDllY2VmLTk0NjkzOTBi, ActorId: [1:7439658860258619646:2298], ActorState: ExecuteState, TraceId: 01jd703jan4qcwmh9p2gp9phsd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:00.333024Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:16:00.333018Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:16:00.358670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:00.431890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:00.458586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:23956", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T09:16:00.555552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd703jjy1fdag5nch4p86wcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJlZDY4NjItZjliMGM1MGItZTg5ZDZhLWNkY2JjYTg2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439658860258620071:2921] 2024-11-21T09:16:04.790434Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439658855963651619:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.790486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:16:04.800005Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439658852619238015:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:04.800039Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:16:06.617328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:23956 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:23956 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lz ... 024-11-21T09:16:50.241833Z :INFO: [/Root] [/Root] [2ae523d2-4aa2ee0a-3d604852-83aa06e5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 161 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:50.241872Z :INFO: [/Root] [/Root] [d6b61c1c-a64f5c58-2fed01c0-89a0d2c9] Closing read session. Close timeout: 18446744073709.551615s 2024-11-21T09:16:50.241878Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2024-11-21T09:16:50.241868Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2054811199406502475_v1 grpc read done: success# 0, data# { } 2024-11-21T09:16:50.241883Z :INFO: [/Root] [/Root] [d6b61c1c-a64f5c58-2fed01c0-89a0d2c9] Counters: { Errors: 0 CurrentSessionLifetimeMs: 161 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:50.241876Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2054811199406502475_v1 grpc read failed 2024-11-21T09:16:50.241879Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2054811199406502475_v1 grpc closed 2024-11-21T09:16:50.241884Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2054811199406502475_v1 is DEAD 2024-11-21T09:16:50.241924Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0] Write session: close. Timeout = 0 ms 2024-11-21T09:16:50.241928Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0] Write session will now close 2024-11-21T09:16:50.241932Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0] Write session: aborting 2024-11-21T09:16:50.241955Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:16:50.241961Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0] Write session: destroy 2024-11-21T09:16:50.242026Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439659072411673460:2482] disconnected; active server actors: 1 2024-11-21T09:16:50.242038Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439659072411673460:2482] client user disconnected session shared/user_3_1_2054811199406502475_v1 2024-11-21T09:16:50.242047Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T09:16:50.242056Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2024-11-21T09:16:50.242067Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2024-11-21T09:16:50.242086Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000006s 2024-11-21T09:16:50.242114Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_70723718853492539_v1 grpc read done: success# 0, data# { } 2024-11-21T09:16:50.242115Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_70723718853492539_v1 grpc read failed 2024-11-21T09:16:50.242119Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_70723718853492539_v1 closed 2024-11-21T09:16:50.242145Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_7953664404073762163_v1 grpc read done: success# 0, data# { } 2024-11-21T09:16:50.242153Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_70723718853492539_v1 is DEAD 2024-11-21T09:16:50.242154Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_7953664404073762163_v1 grpc read failed 2024-11-21T09:16:50.242158Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_7953664404073762163_v1 grpc closed 2024-11-21T09:16:50.242166Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_7953664404073762163_v1 is DEAD 2024-11-21T09:16:50.242224Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439659072411673459:2483] disconnected; active server actors: 1 2024-11-21T09:16:50.242231Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439659072411673459:2483] client user disconnected session shared/user_3_2_70723718853492539_v1 2024-11-21T09:16:50.242234Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T09:16:50.242268Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0 grpc read done: success: 0 data: 2024-11-21T09:16:50.242270Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0 grpc read failed 2024-11-21T09:16:50.242274Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0 grpc closed 2024-11-21T09:16:50.242277Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|3b1dc9ea-8302361c-e3dee9a0-cac2278_0 is DEAD 2024-11-21T09:16:50.242331Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:16:50.242345Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_7953664404073762163_v1 2024-11-21T09:16:50.242359Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439659072411673473:2494] destroyed 2024-11-21T09:16:50.242377Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_3_7953664404073762163_v1 2024-11-21T09:16:50.242394Z :INFO: [/Root] [/Root] [41e102b7-a6534eca-42e86cfe-4041ae96] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:50.242400Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T09:16:50.242405Z :INFO: [/Root] [/Root] [41e102b7-a6534eca-42e86cfe-4041ae96] Counters: { Errors: 0 CurrentSessionLifetimeMs: 162 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:50.242409Z :INFO: [/Root] [/Root] [2ae523d2-4aa2ee0a-3d604852-83aa06e5] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:50.242412Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T09:16:50.242414Z :INFO: [/Root] [/Root] [2ae523d2-4aa2ee0a-3d604852-83aa06e5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 162 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:50.242417Z :INFO: [/Root] [/Root] [d6b61c1c-a64f5c58-2fed01c0-89a0d2c9] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:50.242420Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2024-11-21T09:16:50.242424Z :INFO: [/Root] [/Root] [d6b61c1c-a64f5c58-2fed01c0-89a0d2c9] Counters: { Errors: 0 CurrentSessionLifetimeMs: 162 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:50.242431Z :INFO: [/Root] [/Root] [d6b61c1c-a64f5c58-2fed01c0-89a0d2c9] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:50.242434Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2024-11-21T09:16:50.242435Z :INFO: [/Root] [/Root] [d6b61c1c-a64f5c58-2fed01c0-89a0d2c9] Counters: { Errors: 0 CurrentSessionLifetimeMs: 162 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:50.242451Z :NOTICE: [/Root] [/Root] [d6b61c1c-a64f5c58-2fed01c0-89a0d2c9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:16:50.242462Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:16:50.242475Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439659072411673500:2481] destroyed 2024-11-21T09:16:50.242482Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:16:50.242359Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2024-11-21T09:16:50.242369Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T09:16:50.242372Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000005s 2024-11-21T09:16:50.242378Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:16:50.242416Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439659072411673461:2484] disconnected; active server actors: 1 2024-11-21T09:16:50.242418Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439659072411673461:2484] client user disconnected session shared/user_3_3_7953664404073762163_v1 2024-11-21T09:16:50.242536Z :INFO: [/Root] [/Root] [2ae523d2-4aa2ee0a-3d604852-83aa06e5] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:50.242540Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T09:16:50.242542Z :INFO: [/Root] [/Root] [2ae523d2-4aa2ee0a-3d604852-83aa06e5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 162 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:50.242547Z :NOTICE: [/Root] [/Root] [2ae523d2-4aa2ee0a-3d604852-83aa06e5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:16:50.242589Z :INFO: [/Root] [/Root] [41e102b7-a6534eca-42e86cfe-4041ae96] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:50.242592Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T09:16:50.242594Z :INFO: [/Root] [/Root] [41e102b7-a6534eca-42e86cfe-4041ae96] Counters: { Errors: 0 CurrentSessionLifetimeMs: 163 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:50.242598Z :NOTICE: [/Root] [/Root] [41e102b7-a6534eca-42e86cfe-4041ae96] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> KqpPragma::Auth ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 6277, MsgBus: 22100 2024-11-21T09:16:50.032566Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659071826604925:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:50.032778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048e2/r3tmp/tmp1KVJ6B/pdisk_1.dat 2024-11-21T09:16:50.092070Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6277, node 1 2024-11-21T09:16:50.104853Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:50.104870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:50.104873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:50.104919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22100 2024-11-21T09:16:50.132910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:50.132954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:50.134075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:50.163371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.351024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659071826605523:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.351066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.374999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.435749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659071826605623:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.435774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.435835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659071826605628:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.436459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.440253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659071826605630:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T09:16:50.618247Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659071826605845:2363], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2024-11-21T09:16:50.618305Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTAyNDc2NDItYjUyMDVkNzAtOTQ4NTllMjMtNWJmNzlkMzM=, ActorId: [1:7439659071826605505:2297], ActorState: ExecuteState, TraceId: 01jd7053hs0rfm9jbf0bhmwpdx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 6844, MsgBus: 21878 2024-11-21T09:16:49.028790Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659067447459819:2113];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.029102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004901/r3tmp/tmpSLSv7u/pdisk_1.dat 2024-11-21T09:16:49.080676Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6844, node 1 2024-11-21T09:16:49.100018Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.100030Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.100032Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.100068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21878 2024-11-21T09:16:49.127912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.127957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.129034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.162012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.171086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.186780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.209760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.218320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.331870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659067447461300:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.331911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.366571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.372419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.384174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.391042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.398789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.412301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.420655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659067447461806:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.420673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659067447461811:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.420680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.421170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.425140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659067447461813:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:49.612088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1634, MsgBus: 14377 2024-11-21T09:16:49.897368Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659068396978772:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.897512Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004901/r3tmp/tmplg5lKt/pdisk_1.dat 2024-11-21T09:16:49.914001Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1634, node 2 2024-11-21T09:16:49.921681Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.921698Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.921699Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.921752Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14377 TClient is connected to server localhost:14377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.997808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.997841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.998910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.999554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.009890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.022624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.045951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.056644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.182373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659072691947604:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.182442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.187527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.194584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.203047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.210681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.224322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.232488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.247071Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659072691948117:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.247096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.247104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659072691948123:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.247713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.251663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659072691948125:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:50.456949Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610454, txId: 281474976715671] shutting down 2024-11-21T09:16:50.473166Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610470, txId: 281474976715673] shutting down 2024-11-21T09:16:50.565646Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610608, txId: 281474976715675] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 21209, MsgBus: 62879 2024-11-21T09:16:48.392227Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659063317183944:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.392304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004917/r3tmp/tmpRWtgFV/pdisk_1.dat 2024-11-21T09:16:48.437857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21209, node 1 2024-11-21T09:16:48.457449Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.457462Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.457465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.457502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62879 TClient is connected to server localhost:62879 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:16:48.492653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.492685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:16:48.493711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.524703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.533676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.548115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.563330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.572521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.667316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659063317185476:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.667339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.701899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.707878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.719221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.726161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.733308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.787919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:48.797901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659063317185992:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.797925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.797928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659063317185997:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:48.798525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:48.802244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659063317185999:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:49.035633Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609061, txId: 281474976715671] shutting down 2024-11-21T09:16:49.036435Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609061, txId: 281474976715672] shutting down 2024-11-21T09:16:49.040766Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609082, txId: 281474976715676] shutting down 2024-11-21T09:16:49.040928Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609082, txId: 281474976715675] shutting down 2024-11-21T09:16:49.059073Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609103, txId: 281474976715680] shutting down 2024-11-21T09:16:49.059106Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609103, txId: 281474976715679] shutting down 2024-11-21T09:16:49.076412Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609117, txId: 281474976715683] shutting down 2024-11-21T09:16:49.098589Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609131, txId: 281474976715685] shutting down 2024-11-21T09:16:49.112648Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609145, txId: 281474976715687] shutting down 2024-11-21T09:16:49.117785Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609159, txId: 281474976715690] shutting down 2024-11-21T09:16:49.117898Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609159, txId: 281474976715689] shutting down 2024-11-21T09:16:49.139196Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609180, txId: 281474976715693] shutting down 2024-11-21T09:16:49.153960Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609194, txId: 281474976715695] shutting down 2024-11-21T09:16:49.170018Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609215, txId: 281474976715697] shutting down 2024-11-21T09:16:49.191878Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609229, txId: 281474976715699] shutting down 2024-11-21T09:16:49.203795Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609243, txId: 281474976715701] shutting down 2024-11-21T09:16:49.222849Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609264, txId: 281474976715703] shutting down 2024-11-21T09:16:49.246914Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609285, txId: 281474976715705] shutting down 2024-11-21T09:16:49.259412Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609306, txId: 281474976715707] shutting down 2024-11-21T09:16:49.285435Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609327, txId: 281474976715709] shutting down 2024-11-21T09:16:49.310078Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609355, txId: 281474976715711] shutting down 2024-11-21T09:16:49.334762Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609376, txId: 281474976715713] shutting down 2024-11-21T09:16:49.359178Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609404, txId: 281474976715715] shutting down 2024-11-21T09:16:49.383974Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609425, txId: 281474976715717] shutting down 2024-11-21T09:16:49.409353Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609453, txId: 281474976715719] shutting down Trying to start YDB, gRPC: 29115, MsgBus: 2023 2024-11-21T09:16:49.677344Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659069405473089:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.677364Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.c ... 1-21T09:16:49.696985Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2023 TClient is connected to server localhost:2023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.777742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.777771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.778781Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:49.781246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.782926Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:49.794064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.802275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.820665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.831056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.009094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073700441930:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.009115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.014589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.020046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.028519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.035355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.041994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.057529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.074198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073700442434:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.074228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659073700442439:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.074233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.074891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.083679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659073700442441:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:50.289779Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjI3MzQ5MDktMWZjMWQwMDMtZDQ5ZTRmMzEtMmJmYTBhNzQ=, ActorId: [2:7439659073700442731:2457], ActorState: ExecuteState, TraceId: 01jd70537bex7ct86n6d02nt06, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.293058Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODNmN2I0MTYtMTY1MzJiMmEtODE5YjZhZGItZmZjYzVkNDE=, ActorId: [2:7439659073700442744:2463], ActorState: ExecuteState, TraceId: 01jd70537e14epx68d236509aa, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.331254Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610356, txId: 281474976715672] shutting down 2024-11-21T09:16:50.331383Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610356, txId: 281474976715671] shutting down 2024-11-21T09:16:50.331522Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610356, txId: 281474976715673] shutting down 2024-11-21T09:16:50.341471Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610370, txId: 281474976715677] shutting down 2024-11-21T09:16:50.342562Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610370, txId: 281474976715678] shutting down 2024-11-21T09:16:50.356117Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610384, txId: 281474976715681] shutting down 2024-11-21T09:16:50.356900Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610384, txId: 281474976715682] shutting down 2024-11-21T09:16:50.367525Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610405, txId: 281474976715685] shutting down 2024-11-21T09:16:50.367840Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610405, txId: 281474976715686] shutting down 2024-11-21T09:16:50.376172Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610419, txId: 281474976715689] shutting down 2024-11-21T09:16:50.376403Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610419, txId: 281474976715690] shutting down 2024-11-21T09:16:50.397915Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610440, txId: 281474976715693] shutting down 2024-11-21T09:16:50.409066Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610454, txId: 281474976715695] shutting down 2024-11-21T09:16:50.425879Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610468, txId: 281474976715697] shutting down 2024-11-21T09:16:50.442942Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610482, txId: 281474976715699] shutting down 2024-11-21T09:16:50.454988Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610496, txId: 281474976715701] shutting down 2024-11-21T09:16:50.473799Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610517, txId: 281474976715703] shutting down 2024-11-21T09:16:50.493955Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610538, txId: 281474976715705] shutting down 2024-11-21T09:16:50.514804Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610559, txId: 281474976715707] shutting down 2024-11-21T09:16:50.538048Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610580, txId: 281474976715709] shutting down 2024-11-21T09:16:50.559144Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610601, txId: 281474976715711] shutting down 2024-11-21T09:16:50.580731Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610622, txId: 281474976715713] shutting down 2024-11-21T09:16:50.605644Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWM4N2U0NWEtZjcxY2IyZWItNzczMTY2NmUtMzZmOWRhMDQ=, ActorId: [2:7439659073700444934:2837], ActorState: ExecuteState, TraceId: 01jd7053gn8vswqxnkkz5m5tz4, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.607271Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610650, txId: 281474976715715] shutting down 2024-11-21T09:16:50.629274Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610671, txId: 281474976715717] shutting down 2024-11-21T09:16:50.656409Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610699, txId: 281474976715719] shutting down 2024-11-21T09:16:50.683173Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610727, txId: 281474976715721] shutting down 2024-11-21T09:16:50.711030Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610755, txId: 281474976715723] shutting down >> KqpYql::TableNameConflict [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 9979, MsgBus: 30551 2024-11-21T09:16:48.717151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659062485175752:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:48.717410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004910/r3tmp/tmpjnS77V/pdisk_1.dat 2024-11-21T09:16:48.764496Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9979, node 1 2024-11-21T09:16:48.782486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:48.782498Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:48.782499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:48.782531Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30551 2024-11-21T09:16:48.817404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:48.817433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:30551 2024-11-21T09:16:48.818554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:48.845896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.850000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.867726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.886246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:48.898258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.026717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659066780144585:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.026751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.067337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.075219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.084180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.090397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.096705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.104909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.119702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659066780145101:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.119731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659066780145106:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.119753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:49.120396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:49.124473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659066780145108:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:49.300978Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145392:2453] 2024-11-21T09:16:49.307620Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145405:2458] 2024-11-21T09:16:49.312036Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145415:2462] 2024-11-21T09:16:49.316707Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145427:2467] 2024-11-21T09:16:49.322615Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145445:2474] 2024-11-21T09:16:49.331565Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145465:2482] 2024-11-21T09:16:49.340694Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145487:2491] 2024-11-21T09:16:49.348618Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145501:2497] 2024-11-21T09:16:49.358668Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145519:2505] 2024-11-21T09:16:49.369602Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145539:2512] 2024-11-21T09:16:49.381669Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659066780145563:2521] 2024-11-21T09:16:49.382393Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659066780145602:2526] TxId: 281474976715672. Ctx: { TraceId: 01jd7052at6f2qcnetzr9k26c6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VlZmNjMzEtOTBmNjBjYzItZmNiNjcwODAtMjUzMzdlMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:16:49.383487Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659066780145611:2530], TxId: 281474976715672, task: 4. Ctx: { TraceId : 01jd7052at6f2qcnetzr9k26c6. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2VlZmNjMzEtOTBmNjBjYzItZmNiNjcwODAtMjUzMzdlMjA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659066780145602:2526], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:49.383579Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2VlZmNjMzEtOTBmNjBjYzItZmNiNjcwODAtMjUzMzdlMjA=, ActorId: [1:7439659066780145574:2526], ActorState: ExecuteState, TraceId: 01jd7052at6f2qcnetzr9k26c6, Create QueryResponse for error on request, msg: 2024-11-21T09:16:49.383686Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180609425, txId: 281474976715671] shutting down 2024-11-21T09:16:49.383763Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659066780145612:2531], TxId: 281474976715672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2VlZmNjMzEtOTBmNjBjYzItZmNiNjcwODAtMjUzMzdlMjA=. TraceId : 01jd7052at6f2qcnetzr9k26c6. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659066780145602:2526], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:49.383831Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659066780145607:2527], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2VlZmNjMzEtOTBmNjBjYzItZmNiNjcwODAtMjUzMzdlMjA=. TraceId : 01jd7052at6f2qcnetzr9k26c6. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659066780145602:2526], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:49.383848Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659066780145608:2528], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jd7052at6f2qcnetzr9k26c6. SessionId : ydb://session/3?node_id=1&id=M2VlZmNjMzEtOTBmNjBjYzItZmNiNjcwODAtMjUzMzdlMjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659066780145602:2526], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:49.383934Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659066780145609:2529], TxId: 281474976715672, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2VlZmNjMzEtOTBmNjBjYzItZmNiNjcwODAtMjUzMzdlMjA=. TraceId : 01jd7052at6f2qcnetzr9k26c6. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659066780145602:2526], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:49.385566Z node 1 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not valid, tabletId: 72075186224037893, step: 1732180609425 2024-11-21T09:16:49.385693Z node 1 :TX_DATASHARD ERROR: TxId: 281474976715672. Snapshot is not vali ... not loaded TServer::EnableGrpc on GrpcPort 9252, node 2 2024-11-21T09:16:50.025271Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:50.025285Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:50.025287Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:50.025325Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17922 TClient is connected to server localhost:17922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:50.104595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:50.104629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:50.105689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:50.108443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.109165Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:50.114189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.122414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.140640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.149845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.299711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659074693721729:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.299754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.305116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.312035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.366935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.378595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.385363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.399611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.415411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659074693722243:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.415437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.415445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659074693722248:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.416124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.419383Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659074693722250:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:50.610457Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjVjOWQ2NzctNzlmNzQ2MzgtNTZkNzg2OTctNDBmN2NmMTY=, ActorId: [2:7439659074693722558:2463], ActorState: ExecuteState, TraceId: 01jd7053haan1e4xbcdv56q38w, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.611898Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWQzODE5ODAtZjA2ZDExOTQtZjZlYzA5N2ItN2RhMDk4NDA=, ActorId: [2:7439659074693722572:2469], ActorState: ExecuteState, TraceId: 01jd7053he8dvtgkgr1kdx6j60, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.650937Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmNhMGI2OTctNjU0NzdhZTgtZTFjYTM1MjQtNWZmYzFiMQ==, ActorId: [2:7439659074693722671:2507], ActorState: ExecuteState, TraceId: 01jd7053jhf4esdhkq7rttweek, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.674444Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTRlZWFhOGMtYWEwMDBkMDYtNDNlYTUyMTYtODIyY2I2YWE=, ActorId: [2:7439659074693722750:2522], ActorState: ExecuteState, TraceId: 01jd7053k694vjcertqs8btnvr, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.701607Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDg0ODgyMDgtY2E1ODA5OWYtZTMwMGU3MGItZGY1M2M2NjY=, ActorId: [2:7439659074693722802:2540], ActorState: ExecuteState, TraceId: 01jd7053kz4arap1y5hn866n9r, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.713528Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610755, txId: 281474976715672] shutting down 2024-11-21T09:16:50.785459Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzkxNTE2NjYtZDQyYzg5M2EtYWI1MjU4ZjQtNDRkMzY2ZWM=, ActorId: [2:7439659074693723011:2591], ActorState: ExecuteState, TraceId: 01jd7053pe5z5e3pm42r1hyk0f, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.850451Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzhjYmZmM2MtNTkwODY5YjktZGU5YWU1MTItZTA0MTExNg==, ActorId: [2:7439659074693723155:2618], ActorState: ExecuteState, TraceId: 01jd7053rc58m3f7qwdbvf11fv, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.874036Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZkZDUwMmYtM2M5MzVlZjEtZjlkYzFlYTgtMzM5MjQ2ZTY=, ActorId: [2:7439659074693723239:2627], ActorState: ExecuteState, TraceId: 01jd7053s34sm9rpxkswbvcrj0, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.898903Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659074693723358:2640] TxId: 281474976715680. Ctx: { TraceId: 01jd7053st7fv6m2vjvc8qyexq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTQ4ZTI4YTktNGQxZjg1YTYtMzgzNzQwMmMtZjljODRmY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:16:50.898966Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659074693723368:2648], TxId: 281474976715680, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jd7053st7fv6m2vjvc8qyexq. SessionId : ydb://session/3?node_id=2&id=YTQ4ZTI4YTktNGQxZjg1YTYtMzgzNzQwMmMtZjljODRmY2Y=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439659074693723358:2640], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:50.899045Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTQ4ZTI4YTktNGQxZjg1YTYtMzgzNzQwMmMtZjljODRmY2Y=, ActorId: [2:7439659074693723324:2640], ActorState: ExecuteState, TraceId: 01jd7053st7fv6m2vjvc8qyexq, Create QueryResponse for error on request, msg: 2024-11-21T09:16:50.899153Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610944, txId: 281474976715679] shutting down 2024-11-21T09:16:50.899196Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659074693723363:2644], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YTQ4ZTI4YTktNGQxZjg1YTYtMzgzNzQwMmMtZjljODRmY2Y=. TraceId : 01jd7053st7fv6m2vjvc8qyexq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659074693723358:2640], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:50.899271Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659074693723367:2647], TxId: 281474976715680, task: 4. Ctx: { SessionId : ydb://session/3?node_id=2&id=YTQ4ZTI4YTktNGQxZjg1YTYtMzgzNzQwMmMtZjljODRmY2Y=. TraceId : 01jd7053st7fv6m2vjvc8qyexq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439659074693723358:2640], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:50.921983Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610965, txId: 281474976715682] shutting down 2024-11-21T09:16:50.946961Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610993, txId: 281474976715684] shutting down |94.3%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 14158, MsgBus: 22484 2024-11-21T09:16:50.664403Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659074552367812:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:50.664523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048e0/r3tmp/tmpagrL8l/pdisk_1.dat 2024-11-21T09:16:50.714461Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14158, node 1 2024-11-21T09:16:50.728946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:50.728968Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:50.728971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:50.729022Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22484 2024-11-21T09:16:50.764347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:50.764377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:50.765466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:50.794006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.797298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.810505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.827162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.836822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.945407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659074552369352:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.945450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.983178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.989948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.001266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.008125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.015212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.021935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.030403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659078847337164:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.030420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659078847337169:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.030422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.030887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:51.035285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659078847337171:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. >> KqpScripting::StreamDdlAndDml [GOOD] |94.4%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 2257, MsgBus: 25048 2024-11-21T09:16:49.855851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659069342159838:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.856062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ec/r3tmp/tmpC9jo8E/pdisk_1.dat 2024-11-21T09:16:49.922395Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2257, node 1 2024-11-21T09:16:49.935124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.935140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.935143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.935177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25048 2024-11-21T09:16:49.956098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.956117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.957262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.989473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.000011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.016113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.036424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.047323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.167060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659073637128677:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.167086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.203690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.210335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.265922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.273302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.280278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.287156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.296000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659073637129193:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.296023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.296037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659073637129198:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.296714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.300482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659073637129200:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:50.492017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.533241Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610580, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 12770, MsgBus: 24935 2024-11-21T09:16:50.737250Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659072577522177:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:50.737537Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ec/r3tmp/tmphZxnpo/pdisk_1.dat 2024-11-21T09:16:50.745432Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12770, node 2 2024-11-21T09:16:50.754404Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:50.754420Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:50.754422Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:50.754461Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24935 TClient is connected to server localhost:24935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:50.837787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:50.837822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:50.838928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:50.839629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.850932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.859382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.876617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.887220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.025732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659076872491011:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.025761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.030231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.035865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.042977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.050197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.056909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.064224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.072680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659076872491525:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.072710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.072712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659076872491530:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.073316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:51.077177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659076872491532:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:51.259709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.310274Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180611357, txId: 281474976715673] shutting down >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> KqpExtractPredicateLookup::SqlInJoin-EnableKqpDataQueryStreamLookup [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] [GOOD] >> KqpScripting::StreamExecuteYqlScriptScan |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::SqlInJoin-EnableKqpDataQueryStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10141, MsgBus: 17239 2024-11-21T09:16:18.148058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658934896125334:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:18.148385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002802/r3tmp/tmpWhi0tD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10141, node 1 2024-11-21T09:16:18.223717Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:18.236964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:18.236977Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:18.236979Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:18.237022Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:18.248550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:18.248583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:18.249718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17239 TClient is connected to server localhost:17239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:18.317396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.328497Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:18.331409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.355869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.377020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.394203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:18.493453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658934896126863:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.493496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.528799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.538225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.551927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.563979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.581175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.592127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.614755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658934896127377:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.614779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.614918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658934896127382:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:18.615668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:18.618918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439658934896127384:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:18.876136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.882711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.894448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.907278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.921343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.935294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.952151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.959626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.969269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:16:18.984401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15894, MsgBus: 12298 2024-11-21T09:16:19.467488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658940395476626:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:19.467551Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002802/r3tmp/tmpJC6beC/pdisk_1.dat 2024-11-21T09:16:19.481569Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15894, node 2 2024-11-21T09:16:19.493148Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:19.493164Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:19.493165Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:19.493211Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12298 TClient is connected to server localhost:12298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success ... essage; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002802/r3tmp/tmpldUCHq/pdisk_1.dat 2024-11-21T09:16:50.976579Z node 29 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10156, node 29 2024-11-21T09:16:50.986622Z node 29 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:50.986651Z node 29 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:50.986652Z node 29 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:50.986687Z node 29 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13803 TClient is connected to server localhost:13803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:51.068802Z node 29 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:51.068851Z node 29 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:51.069865Z node 29 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:51.070512Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.073055Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.086029Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.102395Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.114959Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.288795Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [29:7439659077952803285:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.288821Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.294576Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.300615Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.309109Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.316231Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.323784Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.337709Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.345778Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [29:7439659077952803787:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.345816Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.345854Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [29:7439659077952803792:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.346492Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:51.351547Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [29:7439659077952803794:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:51.554126Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.609303Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.617615Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.631573Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.646165Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.659629Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.666547Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.673629Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.688056Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.702083Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"Tables":["PgComplexKey"],"PlanNodeId":9,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"TopBy":"","Name":"Top","Limit":"1001"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","ReadColumns":["Fk","Key","Value"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"PgComplexKey"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Top-TablePointLookup-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"TopSort","Limit":"1001","TopSortBy":""}],"Node Type":"TopSort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":7,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["PgKey"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"PgKey","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"HashShuffle","KeyColumns":["Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"PartitionByKey","Input":"NarrowMap"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/PgComplexKey","reads":[{"columns":["Fk","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/PgKey","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"PgKey","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","ReadColumns":["Fk","Key","Value"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"PgComplexKey"}],"Node Type":"TablePointLookup"}],"Operators":[{"TopBy":"","Name":"Top","Limit":"1001"}],"Node Type":"Top"}],"Operators":[{"Name":"TopSort","Limit":"1001","TopSortBy":""}],"Node Type":"TopSort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} >> KqpStats::JoinNoStatsYql >> KqpParams::ImplicitParameterTypes >> KqpLimits::QueryReplySize >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpLimits::DatashardProgramSize >> KqpTypes::QuerySpecialTypes >> KqpExplain::PureExpr >> KqpQuery::RowsLimitServiceOverride >> KqpLimits::KqpMkqlMemoryLimitException >> KqpQuery::Now >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> KqpQuery::UdfTerminate >> KqpTypes::UnsafeTimestampCastV0 >> KqpAnalyze::AnalyzeTable+ColumnStore >> KqpExplain::Explain >> KqpLimits::TooBigQuery >> KqpQuery::QueryCacheTtl >> KqpStats::MultiTxStatsFullYql >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 1532, MsgBus: 11847 2024-11-21T09:16:51.166618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659075445164190:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:51.166635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048dd/r3tmp/tmploFurD/pdisk_1.dat 2024-11-21T09:16:51.214913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1532, node 1 2024-11-21T09:16:51.233864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:51.233892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:51.233894Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:51.233938Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11847 2024-11-21T09:16:51.266903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:51.266935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:51.268041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:51.295521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.301987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.318348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.336420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.347354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:51.463566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659075445165737:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.463591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.493154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.499480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.554231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.609124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.664956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.674588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:51.689682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659075445166272:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.689709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.689716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659075445166277:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:51.690429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:51.693186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659075445166279:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:51.853466Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659075445166572:2459], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2024-11-21T09:16:51.853539Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODk4NzEwOGItMzhlNWI1MTUtZTViMTdlODMtZjFhNDAwNjk=, ActorId: [1:7439659075445166564:2454], ActorState: ExecuteState, TraceId: 01jd7054ra41q73pbdgapefsw2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 29017, MsgBus: 23664 2024-11-21T09:16:52.032759Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659080897833352:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.033137Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048dd/r3tmp/tmpC6uNGg/pdisk_1.dat 2024-11-21T09:16:52.042824Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29017, node 2 2024-11-21T09:16:52.052955Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.052968Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.052970Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.053002Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23664 TClient is connected to server localhost:23664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:52.133204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.133242Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.134293Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:52.135534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.141946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.150798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.165448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.178152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.304329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659080897834885:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.304359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.308783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.315214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.323973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.331463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.338268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.345590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.354047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659080897835389:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.354072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659080897835394:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.354077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.354747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:52.358141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659080897835396:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:52.531138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.590388Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180612631, txId: 281474976715675] shutting down >> KqpQuery::DdlInDataQuery >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario >> KqpLimits::BigParameter |94.4%| [TA] $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::OutOfSpaceBulkUpsertFail |94.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectWhereInSubquery >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRanges >> KqpQuery::Now [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpStats::MultiTxStatsFullScan >> KqpParams::CheckCacheByAst >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> KqpExplain::Explain [GOOD] >> KqpExplain::ComplexJoin >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::DeleteWhereInSubquery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 4859, MsgBus: 9446 2024-11-21T09:16:49.695856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659069588764074:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:49.696200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048fb/r3tmp/tmpwsLHAs/pdisk_1.dat 2024-11-21T09:16:49.746836Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4859, node 1 2024-11-21T09:16:49.763985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:49.763997Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:49.763999Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:49.764029Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9446 2024-11-21T09:16:49.796070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:49.796094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:49.797254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:49.824156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.832115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:49.894224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:49.913773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:49.924041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:50.006834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659073883732905:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.006857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.043549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.049926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.056383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.062825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.069984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.077720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:50.085026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659073883733406:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.085050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659073883733411:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.085052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:50.085606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:50.090196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659073883733413:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:50.291797Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659073883733757:2459] TxId: 281474976715672. Ctx: { TraceId: 01jd70536yd1tcfjjgr2n7j8v8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:16:51.270981Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439659073883733699:2453] 2024-11-21T09:16:51.271252Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659073883733757:2459] TxId: 281474976715672. Ctx: { TraceId: 01jd70536yd1tcfjjgr2n7j8v8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:16:51.272353Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659073883733767:2467], TxId: 281474976715672, task: 5. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=. CustomerSuppliedId : . TraceId : 01jd70536yd1tcfjjgr2n7j8v8. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659073883733757:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:51.272535Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=, ActorId: [1:7439659073883733712:2459], ActorState: ExecuteState, TraceId: 01jd70536yd1tcfjjgr2n7j8v8, Create QueryResponse for error on request, msg: 2024-11-21T09:16:51.272582Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659073883733768:2468], TxId: 281474976715672, task: 6. Ctx: { TraceId : 01jd70536yd1tcfjjgr2n7j8v8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659073883733757:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:51.272630Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659073883733769:2469], TxId: 281474976715672, task: 7. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=. TraceId : 01jd70536yd1tcfjjgr2n7j8v8. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659073883733757:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:51.272652Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180610335, txId: 281474976715671] shutting down 2024-11-21T09:16:51.272671Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659073883733770:2470], TxId: 281474976715672, task: 8. Ctx: { CustomerSuppliedId : . TraceId : 01jd70536yd1tcfjjgr2n7j8v8. SessionId : ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659073883733757:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:51.272689Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659073883733765:2465], TxId: 281474976715672, task: 3. Ctx: { TraceId : 01jd70536yd1tcfjjgr2n7j8v8. SessionId : ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659073883733757:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:51.272780Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659073883733766:2466], TxId: 281474976715672, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=. TraceId : 01jd70536yd1tcfjjgr2n7j8v8. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659073883733757:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:51.272820Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659073883733771:2471], TxId: 281474976715672, task: 9. Ctx: { TraceId : 01jd70536yd1tcfjjgr2n7j8v8. SessionId : ydb://session/3?node_id=1&id=ZDVkOTljOGUtNzU3OGUzNzItZTMyZjcyYzAtNTU3OTg0NjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort executio ... 16:53.204535Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439659087211016279:2616]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037897, actor_id: [2:7439659082916046917:2301] 2024-11-21T09:16:53.204545Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016279:2616]. TKqpScanFetcherActor: broken tablet for this request 72075186224037897, retries limit exceeded (0/20) 2024-11-21T09:16:53.221659Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439659087211016291:2617] 2024-11-21T09:16:53.270754Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659087211016556:2640] TxId: 281474976715677. Ctx: { TraceId: 01jd70563yacryb55hx7awp0fx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmM5YjI4NTktMjI5ZWFkM2YtZDE0YjQ2YmQtNTZmMzcyYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:16:53.270864Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmM5YjI4NTktMjI5ZWFkM2YtZDE0YjQ2YmQtNTZmMzcyYjk=, ActorId: [2:7439659087211016522:2640], ActorState: ExecuteState, TraceId: 01jd70563yacryb55hx7awp0fx, Create QueryResponse for error on request, msg: 2024-11-21T09:16:53.270981Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613317, txId: 281474976715676] shutting down 2024-11-21T09:16:53.271213Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016560:2644], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd70563yacryb55hx7awp0fx. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NmM5YjI4NTktMjI5ZWFkM2YtZDE0YjQ2YmQtNTZmMzcyYjk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016556:2640], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.271356Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016564:2647], TxId: 281474976715677, task: 4. Ctx: { SessionId : ydb://session/3?node_id=2&id=NmM5YjI4NTktMjI5ZWFkM2YtZDE0YjQ2YmQtNTZmMzcyYjk=. CustomerSuppliedId : . TraceId : 01jd70563yacryb55hx7awp0fx. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016556:2640], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.271405Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016563:2646], TxId: 281474976715677, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=NmM5YjI4NTktMjI5ZWFkM2YtZDE0YjQ2YmQtNTZmMzcyYjk=. TraceId : 01jd70563yacryb55hx7awp0fx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439659087211016556:2640], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.271450Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016565:2648], TxId: 281474976715677, task: 5. Ctx: { TraceId : 01jd70563yacryb55hx7awp0fx. SessionId : ydb://session/3?node_id=2&id=NmM5YjI4NTktMjI5ZWFkM2YtZDE0YjQ2YmQtNTZmMzcyYjk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016556:2640], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.271558Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016561:2645], TxId: 281474976715677, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NmM5YjI4NTktMjI5ZWFkM2YtZDE0YjQ2YmQtNTZmMzcyYjk=. TraceId : 01jd70563yacryb55hx7awp0fx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016556:2640], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.271563Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7439659087211016601:2058], tablet: [2:7439659082916046913:2297], scanId: 5, table: /Root/EightShard 2024-11-21T09:16:53.271792Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715677. Snapshot is not valid, tabletId: 72075186224037895, step: 1732180613317 2024-11-21T09:16:53.271892Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715677. Snapshot is not valid, tabletId: 72075186224037897, step: 1732180613317 2024-11-21T09:16:53.271929Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715677. Snapshot is not valid, tabletId: 72075186224037896, step: 1732180613317 2024-11-21T09:16:53.298645Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTk5NGFkOGUtNjVlNzRiOTItMWQ0OWEyZjQtMjY5ODc2Mw==, ActorId: [2:7439659087211016614:2654], ActorState: ExecuteState, TraceId: 01jd70564rewmtmq95yr2v4bfj, Create QueryResponse for error on request, msg: 2024-11-21T09:16:53.354379Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659087211016787:2676] TxId: 281474976715682. Ctx: { TraceId: 01jd70566e7tsj7nf89ys21cgw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTczYzVmN2ItYjI3ZTA0MTQtODZkOWNjMTUtOGJhZDY1M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:16:53.354473Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTczYzVmN2ItYjI3ZTA0MTQtODZkOWNjMTUtOGJhZDY1M2U=, ActorId: [2:7439659087211016748:2676], ActorState: ExecuteState, TraceId: 01jd70566e7tsj7nf89ys21cgw, Create QueryResponse for error on request, msg: 2024-11-21T09:16:53.354573Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613394, txId: 281474976715681] shutting down 2024-11-21T09:16:53.355603Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016792:2680], TxId: 281474976715682, task: 1. Ctx: { TraceId : 01jd70566e7tsj7nf89ys21cgw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NTczYzVmN2ItYjI3ZTA0MTQtODZkOWNjMTUtOGJhZDY1M2U=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016787:2676], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.356298Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016794:2682], TxId: 281474976715682, task: 3. Ctx: { TraceId : 01jd70566e7tsj7nf89ys21cgw. SessionId : ydb://session/3?node_id=2&id=NTczYzVmN2ItYjI3ZTA0MTQtODZkOWNjMTUtOGJhZDY1M2U=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439659087211016787:2676], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.356436Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016795:2683], TxId: 281474976715682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=2&id=NTczYzVmN2ItYjI3ZTA0MTQtODZkOWNjMTUtOGJhZDY1M2U=. CustomerSuppliedId : . TraceId : 01jd70566e7tsj7nf89ys21cgw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016787:2676], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.356562Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016793:2681], TxId: 281474976715682, task: 2. Ctx: { TraceId : 01jd70566e7tsj7nf89ys21cgw. SessionId : ydb://session/3?node_id=2&id=NTczYzVmN2ItYjI3ZTA0MTQtODZkOWNjMTUtOGJhZDY1M2U=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439659087211016787:2676], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.356771Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016796:2684], TxId: 281474976715682, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jd70566e7tsj7nf89ys21cgw. SessionId : ydb://session/3?node_id=2&id=NTczYzVmN2ItYjI3ZTA0MTQtODZkOWNjMTUtOGJhZDY1M2U=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016787:2676], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.357093Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7439659087211016828:2061], tablet: [2:7439659082916046915:2299], scanId: 9, table: /Root/EightShard 2024-11-21T09:16:53.357106Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7439659087211016824:2059], tablet: [2:7439659082916046914:2298], scanId: 11, table: /Root/EightShard 2024-11-21T09:16:53.357115Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7439659087211016826:2060], tablet: [2:7439659082916046912:2296], scanId: 12, table: /Root/EightShard 2024-11-21T09:16:53.357129Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7439659087211016830:2062], tablet: [2:7439659082916046916:2300], scanId: 10, table: /Root/EightShard 2024-11-21T09:16:53.383346Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659087211016904:2695] TxId: 281474976715685. Ctx: { TraceId: 01jd70567cb68p3wkcp92jd4j4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODYyMmI5YjctNTkwMmFiOGMtYjdkZTQ3NGEtMzNhMzMzNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:16:53.383476Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODYyMmI5YjctNTkwMmFiOGMtYjdkZTQ3NGEtMzNhMzMzNTI=, ActorId: [2:7439659087211016871:2695], ActorState: ExecuteState, TraceId: 01jd70567cb68p3wkcp92jd4j4, Create QueryResponse for error on request, msg: 2024-11-21T09:16:53.383598Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613429, txId: 281474976715684] shutting down 2024-11-21T09:16:53.383636Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439659087211016822:2689] 2024-11-21T09:16:53.383708Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016910:2700], TxId: 281474976715685, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODYyMmI5YjctNTkwMmFiOGMtYjdkZTQ3NGEtMzNhMzMzNTI=. TraceId : 01jd70567cb68p3wkcp92jd4j4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016904:2695], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.383979Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016913:2702], TxId: 281474976715685, task: 4. Ctx: { SessionId : ydb://session/3?node_id=2&id=ODYyMmI5YjctNTkwMmFiOGMtYjdkZTQ3NGEtMzNhMzMzNTI=. TraceId : 01jd70567cb68p3wkcp92jd4j4. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439659087211016904:2695], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.383996Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659087211016914:2703], TxId: 281474976715685, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jd70567cb68p3wkcp92jd4j4. SessionId : ydb://session/3?node_id=2&id=ODYyMmI5YjctNTkwMmFiOGMtYjdkZTQ3NGEtMzNhMzMzNTI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659087211016904:2695], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.410572Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613450, txId: 281474976715687] shutting down 2024-11-21T09:16:53.438176Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613478, txId: 281474976715689] shutting down 2024-11-21T09:16:53.466205Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613506, txId: 281474976715691] shutting down >> KqpQuery::RewriteIfPresentToMap >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::ReadsetCountLimit >> KqpLimits::DatashardProgramSize [GOOD] >> KqpLimits::ComputeNodeMemoryLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 12374, MsgBus: 14012 2024-11-21T09:16:52.250675Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659082838688093:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.250731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048d7/r3tmp/tmpKDA6fH/pdisk_1.dat 2024-11-21T09:16:52.301746Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12374, node 1 2024-11-21T09:16:52.315821Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.315832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.315833Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.315867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14012 2024-11-21T09:16:52.350820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.350844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.352057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:52.378906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.384478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.397873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.417817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.427499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:52.515889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659082838689632:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.515919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.556281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.611230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.618129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.625689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.631712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.639178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:52.647680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659082838690148:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.647711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.647725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659082838690153:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:52.648312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:52.651941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659082838690155:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:52.937309Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180612974, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 26870, MsgBus: 3512 2024-11-21T09:16:53.182010Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659084908959365:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.182041Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048d7/r3tmp/tmpOENpm8/pdisk_1.dat 2024-11-21T09:16:53.194644Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26870, node 2 2024-11-21T09:16:53.204744Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.204761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.204763Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.204801Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3512 TClient is connected to server localhost:3512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.282190Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.282223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.283356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.285073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.291806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.348462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.364063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.375099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.486144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659084908960902:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.486170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.492461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.499117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.507118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.514265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.521625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.536873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.596126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659084908961418:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.596166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.596197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659084908961423:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.596922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.604988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659084908961425:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:53.788355Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613828, txId: 281474976715671] shutting down 2024-11-21T09:16:53.818893Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613863, txId: 281474976715673] shutting down 2024-11-21T09:16:53.846254Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613891, txId: 281474976715675] shutting down >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> KqpTypes::SelectNull [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpStats::JoinStatsBasicScan >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::RequestUnitForBadRequestExecute >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::UpdateWhereInSubquery >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpExplain::ComplexJoin [GOOD] >> KqpExplain::CompoundKeyRange >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> KqpStats::StatsProfile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::UnsafeTimestampCastV1 [GOOD] Test command err: Trying to start YDB, gRPC: 4945, MsgBus: 1476 2024-11-21T09:16:52.797175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659083570265126:2093];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.797316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bd1/r3tmp/tmp7jUjvG/pdisk_1.dat 2024-11-21T09:16:52.860486Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4945, node 1 2024-11-21T09:16:52.900459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.900491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.901340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:52.911027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.911043Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.911044Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.911084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1476 TClient is connected to server localhost:1476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.012651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.020611Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.032961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.105579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.128264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.140106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.160995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087865233912:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.161032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.288030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.304683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.310714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.332491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.347636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087865234426:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.347661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.347663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087865234431:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.352801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659087865234433:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:53.582187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 Trying to start YDB, gRPC: 62868, MsgBus: 12352 2024-11-21T09:16:53.904484Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659085063615881:2087];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.904510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bd1/r3tmp/tmpftP16f/pdisk_1.dat 2024-11-21T09:16:53.923404Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62868, node 2 2024-11-21T09:16:53.942892Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.942907Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.942910Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.942951Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12352 TClient is connected to server localhost:12352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.008579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.008608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.008938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.009822Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:54.010189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.020370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.028818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.047374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.055962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.226871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089358584687:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.226896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.232623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.239863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.248810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.255973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.263007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.269911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.278473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089358585189:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.278498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089358585194:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.278499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.279083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.283454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659089358585196:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:54.488929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.503383Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659089358585557:2465], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2024-11-21T09:16:54.503738Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Njk0ODJlN2MtNjgyZDQ4MjQtZjBhMmVlZmUtOTdhODViOGE=, ActorId: [2:7439659089358585481:2454], ActorState: ExecuteState, TraceId: 01jd7057b42nqps78s8qs1tbt1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RandomUuid >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpExplain::CompoundKeyRange [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> KqpQuery::QueryCache >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpStats::RequestUnitForExecute [GOOD] >> KqpExplain::Predicates [GOOD] >> KqpQuery::SelectCountAsteriskFromVar [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] [GOOD] >> KqpQuery::UpdateWhereInSubquery [GOOD] >> KqpStats::JoinStatsBasicScan [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForBadRequestExecute [GOOD] Test command err: Trying to start YDB, gRPC: 8252, MsgBus: 63830 2024-11-21T09:16:53.007616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659085998187200:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.007646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b97/r3tmp/tmparCNd1/pdisk_1.dat 2024-11-21T09:16:53.060982Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8252, node 1 2024-11-21T09:16:53.075061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.075072Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.075073Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.075105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63830 TClient is connected to server localhost:63830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.139602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.139625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.140012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.140671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T09:16:53.153415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.176525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.192295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.201934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.312720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659085998188740:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.312748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.345197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.352035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.360866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.374516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.381418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.388125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.397084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659085998189253:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.397104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.397129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659085998189258:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.397866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.402086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659085998189260:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:53.630680Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659085998189592:2459] TxId: 281474976715672. Ctx: { TraceId: 01jd7056f638nrxn13vkab7msg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTAzNDA4ODEtODI1OWE5ZTgtZTU3ZDI2OC1kZTc5ZjVl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:16:53.645216Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180613674, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 14403, MsgBus: 9686 2024-11-21T09:16:53.909050Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659085291467035:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.909098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b97/r3tmp/tmp3DIZL2/pdisk_1.dat 2024-11-21T09:16:53.921944Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14403, node 2 2024-11-21T09:16:53.928808Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.928822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.928832Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.928869Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9686 TClient is connected to server localhost:9686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.966024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.967261Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.977099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.009393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.009413Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.010481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.032550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.050375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.061500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.223505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089586435882:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.223535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.229176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.236004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.249463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.256109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.262832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.270147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.278533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089586436385:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.278557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.278579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089586436390:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.279097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.283206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659089586436392:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:54.494138Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180614535, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 14465, MsgBus: 26075 2024-11-21T09:16:54.818170Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659090956931961:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.818182Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b97/r3tmp/tmpbdHtcd/pdisk_1.dat 2024-11-21T09:16:54.832196Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14465, node 3 2024-11-21T09:16:54.841285Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.841297Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.841299Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.841345Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26075 TClient is connected to server localhost:26075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.920760Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.920786Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.921121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.922171Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.928560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.939821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.961360Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:54.973148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.138551Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095251900805:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.138572Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.144523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.151231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.159003Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.166186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.173520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.188160Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.202655Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095251901308:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.202674Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095251901313:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.202680Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.203193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.208094Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659095251901315:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:55.383519Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659095251901607:2459], status: GENERIC_ERROR, issues:
:2:12: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2024-11-21T09:16:55.383582Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWIxY2VkMGItNmYzNjdkNzktOGNkNmM3N2ItODI5OWU4MjA=, ActorId: [3:7439659095251901599:2454], ActorState: ExecuteState, TraceId: 01jd70586pfq70aph36nbwd2pt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:12: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange [GOOD] Test command err: Trying to start YDB, gRPC: 26072, MsgBus: 17265 2024-11-21T09:16:52.856321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659082883731953:2157];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dbc/r3tmp/tmp6AmlgN/pdisk_1.dat 2024-11-21T09:16:52.877151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:52.903292Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26072, node 1 2024-11-21T09:16:52.918612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.918647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.919757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:52.924400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.924413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.924414Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.924445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17265 TClient is connected to server localhost:17265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.021608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.027159Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.035433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.054992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.074380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.082357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.182023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087178700659:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.182045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.306936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.325633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.340686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.356282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087178701173:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.356317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.356813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087178701178:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.357509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.359351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659087178701180:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 26936, MsgBus: 29626 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dbc/r3tmp/tmprFb193/pdisk_1.dat 2024-11-21T09:16:53.910822Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:53.911151Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26936, node 2 2024-11-21T09:16:53.924472Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.924483Z node 2 :NET_CLASSIFIER WARN: ... o unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.228140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.234970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.242198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.249394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.258056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659090675929909:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.258075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659090675929914:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.258079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.258606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.262224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659090675929916:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:54.442758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.481888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.495975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5733, MsgBus: 4403 2024-11-21T09:16:54.810025Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659088709123799:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.810055Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dbc/r3tmp/tmpG3IkTb/pdisk_1.dat 2024-11-21T09:16:54.823439Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5733, node 3 2024-11-21T09:16:54.833062Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.833077Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.833079Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.833119Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4403 TClient is connected to server localhost:4403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.910257Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.910290Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.911660Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.914232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:54.920777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.931575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.961880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.972027Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.114508Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659093004092639:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.114538Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.119455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.126161Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.138478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.145550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.159519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.173472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.181704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659093004093152:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.181741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659093004093157:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.181742Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.182320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.186609Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659093004093159:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"lookup_by":["App (new_app_1)","Ts (49)"],"columns":["App","Host","Message","Ts"],"scan_by":["Host (null, xyz)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 18660, MsgBus: 29208 2024-11-21T09:16:52.825766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659083045880611:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.825936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c1e/r3tmp/tmp8F4BLF/pdisk_1.dat 2024-11-21T09:16:52.918609Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18660, node 1 2024-11-21T09:16:52.932342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.932360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.932361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.932388Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29208 TClient is connected to server localhost:29208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:16:52.994071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.994093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.995087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.002087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:53.016798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.087065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.108956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.168238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.189922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087340849443:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.189950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.302660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.314173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.331263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.339382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.353359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.361854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087340849956:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.361882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.361957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087340849962:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.362729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.366455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659087340849964:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 16504, MsgBus: 1060 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c1e/r3tmp/tmpvwOc8K/pdisk_1.dat 2024-11-21T09:16:53.926196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:53.934790Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16504, node 2 2024-11-21T09:16:53.948469Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.948480Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.948481Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.948525Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1060 TClient is connected to server localhost:1060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.020597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.020623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.020967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.022886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:54.025142Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:54.039110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.049776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.074757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.085948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.238227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659091572057398:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.238253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.244132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.250468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.255943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.269835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.277077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.284271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.292299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659091572057893:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.292326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.292353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659091572057898:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.292903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.297045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659091572057900:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 15661, MsgBus: 18019 2024-11-21T09:16:54.637526Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659090990846597:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.637734Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c1e/r3tmp/tmpVMYm6k/pdisk_1.dat 2024-11-21T09:16:54.649112Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15661, node 3 2024-11-21T09:16:54.659624Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.659639Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.659641Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.659676Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18019 TClient is connected to server localhost:18019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.740469Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.740501Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.740939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.741510Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.750302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:54.760831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.799794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.817570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.971514Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659090990848142:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.971536Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.975970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.983086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.995739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.005937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.019662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.026513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.034951Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095285815943:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.034974Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.034977Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095285815948:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.035562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.039244Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659095285815950:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQuery::DictJoin [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::RowsLimit >> KqpStats::StatsProfile [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 6932, MsgBus: 21724 2024-11-21T09:16:52.764538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659080123474720:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.764569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bc1/r3tmp/tmp7D3XBP/pdisk_1.dat 2024-11-21T09:16:52.864442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.864468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.878260Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:52.886329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6932, node 1 2024-11-21T09:16:52.913140Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.913152Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.913153Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.913181Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21724 TClient is connected to server localhost:21724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.003816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.020990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.086427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:53.107564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.116577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.154003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084418443339:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.154037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.288006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.304346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.334050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.347117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.363798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084418443853:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.363826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.363842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084418443858:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.364491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.374105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659084418443860:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:53.575488Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659084418444155:2459], status: GENERIC_ERROR, issues:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2024-11-21T09:16:53.575570Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjdhNzk0MDctMzhjMDBmYzUtMmVmM2Y3ZDctZTE3ZGVlYTI=, ActorId: [1:7439659084418444147:2454], ActorState: ExecuteState, TraceId: 01jd7056e6dzpzvvrd47nh1a09, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... Trying to start YDB, gRPC: 9966, MsgBus: 3342 2024-11-21T09:16:53.847282Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659085495025890:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bc1/r3tmp/tmpcZfJsE/pdisk_1.dat 2024-11-21T09:16:53.854774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 9966, node 2 2024-11-21T09:16:53.865133Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:53.868639Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.868653Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.868654Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.868687Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3342 TClient is connected to server localhost:3342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.947185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.947213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.948365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.949012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.951619Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.957429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.973294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.995630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.011453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.163739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089789994581:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.163768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.169856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.176525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.186356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.192999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.200235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.207373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.216117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089789995086:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.216140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.216296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089789995091:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.216981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.220720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659089789995093:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 22360, MsgBus: 21951 2024-11-21T09:16:54.784559Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659090134157602:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.784577Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bc1/r3tmp/tmpOwASrg/pdisk_1.dat 2024-11-21T09:16:54.796932Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22360, node 3 2024-11-21T09:16:54.804192Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.804223Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.804225Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.804268Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21951 TClient is connected to server localhost:21951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.887257Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.887287Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.887544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.888342Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.899062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.907953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.928603Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.938437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.101646Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094429126441:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.101673Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.107010Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.112682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.126494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.138394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.145792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.151921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.160601Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094429126947:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.160627Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.160631Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094429126952:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.161250Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.165648Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659094429126954:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Consumed units: 18 Consumed units: 6 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] Test command err: Trying to start YDB, gRPC: 12513, MsgBus: 19383 2024-11-21T09:16:52.764612Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659081195586504:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.764637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dc2/r3tmp/tmpR8kQSa/pdisk_1.dat 2024-11-21T09:16:52.863218Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:52.864419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.864429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.867283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12513, node 1 2024-11-21T09:16:52.913889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.913908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.913910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.913943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19383 TClient is connected to server localhost:19383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.031111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.034754Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.037213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.052135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:53.069780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.085050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.225516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659085490555125:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.225542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.303134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.313160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.325971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.332148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.339399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.354851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659085490555637:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.354870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659085490555642:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.354878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.355432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.359378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659085490555644:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 19493, MsgBus: 8268 2024-11-21T09:16:53.851252Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659085564264337:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.851278Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dc2/r3tmp/tmpSUgqPe/pdisk_1.dat 2024-11-21T09:16:53.875373Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19493, node 2 2024-11-21T09:16:53.916030Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.916049Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.916050Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.916093Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8268 2024-11-21T09:16:53.951392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.951419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.952579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.967391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.968609Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.976507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.986811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.006334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:54.016310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.237860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089859233188:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.237904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.242591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.248718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.256223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.263003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.269833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.325664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.334908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089859233684:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.334937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089859233689:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.334936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.335504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.339457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659089859233691:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 9856, MsgBus: 19927 2024-11-21T09:16:54.759226Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659090948874719:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dc2/r3tmp/tmpqjgBEH/pdisk_1.dat 2024-11-21T09:16:54.762485Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:54.771058Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9856, node 3 2024-11-21T09:16:54.778904Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.778916Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.778917Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.778949Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19927 TClient is connected to server localhost:19927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.861803Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.861831Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.862194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.862751Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.866921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.875276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.891299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.901425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.073390Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095243843416:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.073415Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.078811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.086063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.097652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.110463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.117531Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.131527Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.139913Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095243843916:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.139931Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.139937Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095243843921:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.140570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.144202Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659095243843923:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: Trying to start YDB, gRPC: 6508, MsgBus: 14071 2024-11-21T09:16:52.764741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659079875570047:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.764770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ce7/r3tmp/tmpRT47nJ/pdisk_1.dat 2024-11-21T09:16:52.854683Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:52.865945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.865976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6508, node 1 2024-11-21T09:16:52.867099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:52.912843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.912856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.912857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.912890Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14071 TClient is connected to server localhost:14071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:53.004477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.007094Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.014480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.083825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.112173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.122651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.151972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084170538664:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.151999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.304458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.311031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.319138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.333420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.347687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084170539176:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.347710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.347755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084170539181:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.353812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659084170539183:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"[{column0: 1,column1: 2,column2: 3},{column0: 4,column1: 5,column2: 6}]","Name":"Iterator"}],"Node Type":"ConstantExpr"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 22151, MsgBus: 1937 2024-11-21T09:16:53.842406Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659087861259992:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.842427Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ce7/r3tmp/tmpg7w0tQ/pdisk_1.dat 2024-11-21T09:16:53.870734Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22151, node 2 2024-11-21T09:16:53.884425Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.884441Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.884443Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.884482Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1937 TClient is connected to server localhost:1937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.942979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.943018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.944093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.944721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.946348Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.955432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.969037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.991962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.008106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: EScheme ... 94046644480 2024-11-21T09:16:54.228366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.242502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.249347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.256463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.264535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659092156229344:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.264559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659092156229349:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.264567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.265158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.269518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659092156229351:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:54.469508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.515945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.526074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, 100)","Key [2000, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 28230, MsgBus: 31276 2024-11-21T09:16:54.764963Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659088986112676:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ce7/r3tmp/tmpxVyyHg/pdisk_1.dat 2024-11-21T09:16:54.768677Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:54.779156Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28230, node 3 2024-11-21T09:16:54.797776Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.797789Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.797790Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.797831Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31276 TClient is connected to server localhost:31276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.865065Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.865104Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.866193Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.868346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.870609Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:54.875472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.885099Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.902974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.915813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.064806Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659093281081349:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.064830Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.069237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.076760Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.090317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.103771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.117367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.124601Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.139586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659093281081860:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.139609Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.139633Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659093281081865:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.140260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.144834Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659093281081867:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:55.320312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 20252, MsgBus: 8182 2024-11-21T09:16:52.764544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659081730335868:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.764572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001db0/r3tmp/tmp789G5z/pdisk_1.dat 2024-11-21T09:16:52.859482Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:52.872506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.872530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.876432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20252, node 1 2024-11-21T09:16:52.912345Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.912356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.912357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.912389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8182 TClient is connected to server localhost:8182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.011850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.019383Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.026823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.052440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.113136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.122950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.185469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086025304502:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.185496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.288353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.304147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.311796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.325388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.339809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.347381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086025305017:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.347409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.347431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086025305022:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.347975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.352826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659086025305024:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 20415, MsgBus: 4968 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001db0/r3tmp/tmprHL4tH/pdisk_1.dat 2024-11-21T09:16:53.860327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:53.864477Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20415, node 2 2024-11-21T09:16:53.871822Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.871835Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.871837Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.871880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4968 TClient is connected to server localhost:4968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.951318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.951363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.952627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.953085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.954397Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.959343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.973399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.006216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.018349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.166479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088219315483:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.166542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.170192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.176730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.186444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.193023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.200226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.207332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.215959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088219315985:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.215985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088219315990:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.215988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.216558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.220511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659088219315992:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 20436, MsgBus: 29574 2024-11-21T09:16:54.772348Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659091607756568:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001db0/r3tmp/tmpS5K1aP/pdisk_1.dat 2024-11-21T09:16:54.785417Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:54.792002Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20436, node 3 2024-11-21T09:16:54.798626Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.798640Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.798641Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.798688Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29574 TClient is connected to server localhost:29574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.874409Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.874438Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.874787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.875507Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.886353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.899142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.921358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.931647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.105074Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095902725259:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.105110Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.107481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.115156Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.124718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.131711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.138379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.145644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.161216Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095902725759:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.161238Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659095902725764:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.161246Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.161864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.165662Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659095902725766:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |94.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateWhereInSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 15893, MsgBus: 17559 2024-11-21T09:16:52.810939Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659079438384006:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.811028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bb3/r3tmp/tmpUz4wCR/pdisk_1.dat 2024-11-21T09:16:52.879826Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15893, node 1 2024-11-21T09:16:52.912237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.912266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.914059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.914063Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.914064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.914088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:52.914465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17559 TClient is connected to server localhost:17559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:53.024560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.027586Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.035393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.104433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.135639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.147838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.175439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659083733352706:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.175461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.305844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.319045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.332281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.340982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.355492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659083733353221:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.355510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659083733353226:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.355517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.356030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.359525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659083733353228:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:53.632155Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659083733353531:2462], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YjJhMGY4NTMtY2JhOTM0ODAtZDkyNjNmZDAtNTFhZTllZmQ=. CustomerSuppliedId : . TraceId : 01jd7056e8cjwnqszvt0rfpexb. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. }. 2024-11-21T09:16:53.632499Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659083733353532:2463], TxId: 281474976710671, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd7056e8cjwnqszvt0rfpexb. SessionId : ydb://session/3?node_id=1&id=YjJhMGY4NTMtY2JhOTM0ODAtZDkyNjNmZDAtNTFhZTllZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659083733353527:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:16:53.633728Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjJhMGY4NTMtY2JhOTM0ODAtZDkyNjNmZDAtNTFhZTllZmQ=, ActorId: [1:7439659083733353512:2454], ActorState: ExecuteState, TraceId: 01jd7056e8cjwnqszvt0rfpexb, Create QueryResponse for error on request, msg:
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 2403, MsgBus: 27218 2024-11-21T09:16:53.894920Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659086938679712:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.894937Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bb3/r3tmp/tmpRdi6Dv/pdisk_1.dat 2024-11-21T09:16:53.909966Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2403, node 2 2024-11-21T09:16:53.920966Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.920980Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.920982Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.921034Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27218 TClient is connected to server localhost:27218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.995178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.995209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:16:53.997273Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.998463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.000501Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.008788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.018195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.037529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.047449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.204415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659091233648553:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.204440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.209294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.215389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.228180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.235186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.242090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.249463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.258234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659091233649056:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.258254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659091233649061:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.258261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.258708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.262123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659091233649063:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 1096, MsgBus: 8349 2024-11-21T09:16:54.802334Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659088032938620:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.802344Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bb3/r3tmp/tmpbyYhIm/pdisk_1.dat 2024-11-21T09:16:54.812474Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1096, node 3 2024-11-21T09:16:54.822233Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.822246Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.822248Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.822278Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8349 TClient is connected to server localhost:8349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.902465Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.902508Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.903572Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.905331Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.908646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.923560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.944269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.957423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.110107Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659092327907457:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.110129Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.114320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.121646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.134465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.145401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.152794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.158962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.168669Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659092327907961:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.168698Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.168727Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659092327907966:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.169485Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.172574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659092327907968:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpLimits::ComputeActorMemoryAllocationFailure ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 17024, MsgBus: 11956 2024-11-21T09:16:52.764830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659079761465361:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.764854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c69/r3tmp/tmp9nHDxJ/pdisk_1.dat 2024-11-21T09:16:52.865504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.865522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.867142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:52.868130Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17024, node 1 2024-11-21T09:16:52.916378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.916390Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.916391Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.916421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11956 TClient is connected to server localhost:11956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.010970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.019966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.086049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:53.104301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.115667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.149136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084056433979:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.149160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.303886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.311157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.319453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.333597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.348335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084056434493:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084056434498:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.357100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659084056434500:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 19557, MsgBus: 14886 2024-11-21T09:16:53.843821Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659085629412746:2053];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c69/r3tmp/tmpfS8Vkx/pdisk_1.dat 2024-11-21T09:16:53.844097Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:53.863085Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19557, node 2 2024-11-21T09:16:53.872384Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.872403Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.872404Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.872449Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14886 TClient is connected to server localhost:14886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.945468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.945499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.945844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.946486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.947106Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.952893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.963827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.985356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.995534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.201593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089924381599:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.201628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.207488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.215150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.230765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.242478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.297502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.305116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.313646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089924382093:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.313671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.313677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089924382098:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.314380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.318673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659089924382100:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 18217, MsgBus: 15418 2024-11-21T09:16:54.769097Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659090577996997:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001c69/r3tmp/tmpmA6XHr/pdisk_1.dat 2024-11-21T09:16:54.770834Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:54.781168Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18217, node 3 2024-11-21T09:16:54.792307Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.792321Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.792323Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.792393Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15418 TClient is connected to server localhost:15418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.869027Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.869059Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.870196Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.871389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.880476Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.888558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.909172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:54.920131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.083075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094872965682:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.083124Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.088107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.095005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.103127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.157775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.165995Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.173283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.181321Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094872966187:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.181345Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094872966192:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.181353Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.181874Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.186402Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659094872966194:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:55.633961Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180615466, txId: 281474976715671] shutting down >> KqpQuery::QueryCache [GOOD] >> KqpQuery::QueryCacheInvalidate >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> KqpExplain::ExplainStream >> TTicketParserTest::LoginEmptyTicketBad >> KqpLimits::WaitCAsStateOnAbort >> KqpStats::SysViewClientLost >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::PreparedQueryInvalidate >> KqpLimits::ReplySizeExceeded [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> KqpStats::MultiTxStatsFullExpYql >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpExplain::LimitOffset >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> KqpParams::RowsList >> KqpLimits::DatashardReplySize [GOOD] >> KqpQuery::QueryCacheTtl [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin >> KqpExplain::ExplainStream [GOOD] >> TTicketParserTest::LoginEmptyTicketBad [GOOD] >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> KqpExplain::FewEffects >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpExplain::ExplainDataQuery >> KqpQuery::Pure >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpExplain::LimitOffset [GOOD] >> KqpQuery::Pure [GOOD] >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpStats::MultiTxStatsFullExpScan ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 4294, MsgBus: 26363 2024-11-21T09:16:53.222000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659084579272279:2126];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.222702Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b93/r3tmp/tmpXwC73D/pdisk_1.dat 2024-11-21T09:16:53.288959Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4294, node 1 2024-11-21T09:16:53.294930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.294942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.294944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.294991Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26363 2024-11-21T09:16:53.321896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.321928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.323309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.362483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.372573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.434087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.452796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:53.464404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.531651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084579273738:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.531691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.557977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.566403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.624390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.634369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.647812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.661706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.677130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084579274254:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.677153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084579274259:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.677157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.677789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.681334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659084579274261:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:53.894868Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659084579274553:2459], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2024-11-21T09:16:53.895226Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2U3NjcyZGYtNTc0NDY4ZjQtNzczZThmODktZjYwNzQzNjg=, ActorId: [1:7439659084579274545:2454], ActorState: ExecuteState, TraceId: 01jd7056r2ag0g3er3gvanbpnr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2024-11-21T09:16:53.900111Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659084579274566:2462], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2024-11-21T09:16:53.900567Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2U3NjcyZGYtNTc0NDY4ZjQtNzczZThmODktZjYwNzQzNjg=, ActorId: [1:7439659084579274545:2454], ActorState: ExecuteState, TraceId: 01jd7056r875nag0ddqmf129x2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2024-11-21T09:16:53.906248Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659084579274576:2466], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2024-11-21T09:16:53.906342Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2U3NjcyZGYtNTc0NDY4ZjQtNzczZThmODktZjYwNzQzNjg=, ActorId: [1:7439659084579274545:2454], ActorState: ExecuteState, TraceId: 01jd7056re7jm93zyfp16a0z3h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 6078, MsgBus: 3835 2024-11-21T09:16:54.107960Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659088735284215:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.108306Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b93/r3tmp/tmpR9GKkY/pdisk_1.dat 2024-11-21T09:16:54.121674Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6078, node 2 2024-11-21T09:16:54.131532Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.131545Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.131547Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.131592Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3835 TClient is connected to server localhost:3835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.208423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.208454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-1 ... 9:16:54.209545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.210747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.218221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.229725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.247189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.256954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.431678Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088735285757:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.431735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.434174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.442922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.452796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.459377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.473835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.488032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.504254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088735286258:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.504281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.504376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088735286263:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.504965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.507105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659088735286265:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 19662, MsgBus: 61238 2024-11-21T09:16:55.023749Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659094505434454:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:55.023782Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b93/r3tmp/tmpJXHHQD/pdisk_1.dat 2024-11-21T09:16:55.032737Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19662, node 3 2024-11-21T09:16:55.044352Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:55.044367Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:55.044368Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:55.044404Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61238 TClient is connected to server localhost:61238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:55.126426Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:55.126462Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:55.126709Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.127467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:55.127879Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:55.132928Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.141819Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.160785Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.168807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.373151Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094505436014:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.373196Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.377771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.384233Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.390496Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.397336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.452291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.460729Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.468613Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094505436506:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.468644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.468707Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094505436511:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.469276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.473453Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659094505436513:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpParams::DefaultParameterValue >> KqpExplain::MergeConnection >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpExplain::FewEffects [GOOD] >> KqpExplain::FullOuterJoin >> KqpParams::RowsList [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> KqpQuery::CurrentUtcTimestamp >> TOlapReboots::CreateStandaloneTable [GOOD] >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::ExplainDataQuery [GOOD] >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> TOlapReboots::CreateStore [GOOD] >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal+QueryService >> KqpExplain::ExplainDataQueryWithParams >> KqpExplain::IdxFullscan >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpParams::Decimal-QueryService >> KqpQuery::OltpCreateAsSelect_Disable >> KqpQuery::QueryCancelWrite >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> KqpExplain::FullOuterJoin [GOOD] >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpStats::JoinNoStatsScan >> KqpParams::Decimal-QueryService [GOOD] >> KqpExplain::IdxFullscan [GOOD] >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpQuery::CreateAsSelect_BadCases ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 3025, MsgBus: 28400 2024-11-21T09:16:52.814607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659082660914897:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.814659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bb4/r3tmp/tmplXatiO/pdisk_1.dat 2024-11-21T09:16:52.888931Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3025, node 1 2024-11-21T09:16:52.912168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.912184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.912185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.912232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:52.916131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.916160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.917202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28400 TClient is connected to server localhost:28400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:53.033042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.035187Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.042267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.107180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.124093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.137835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.164487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086955883599:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.164514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.293451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.304028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.310698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.373325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.382827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086955884118:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.382857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.382885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086955884123:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.383544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.387241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659086955884125:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 19104, MsgBus: 65232 2024-11-21T09:16:53.892072Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659087352630618:2149];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bb4/r3tmp/tmpyPlyno/pdisk_1.dat 2024-11-21T09:16:53.897827Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:53.911344Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19104, node 2 2024-11-21T09:16:53.921784Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.921794Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.921795Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.921823Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65232 TClient is connected to server localhost:65232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.994436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.994461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.994796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.995401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:53.996640Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:54.209471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659091647598397:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.209495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.209504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659091647598424:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.210014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.211645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659091647598426:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:16:54.306585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... 8;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:55.602790Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:16:55.602813Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:16:55.602835Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:16:55.602856Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:16:55.602874Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:16:55.602897Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:16:55.602919Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:16:55.602941Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[3:7439659095181595938:2444];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:16:55.603022Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:16:55.603032Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:16:55.603042Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:16:55.603049Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:16:55.603060Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:16:55.603067Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:16:55.603073Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:16:55.603082Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:16:55.603088Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:16:55.603091Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:16:55.603095Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:16:55.603102Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:16:55.603143Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:16:55.603152Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:16:55.603163Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:16:55.603166Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:16:55.603173Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:16:55.603176Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:16:55.603186Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:16:55.603193Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:16:55.603200Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:16:55.603202Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:16:55.603442Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:16:55.603457Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:16:55.603470Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:16:55.603479Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:16:55.603495Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:16:55.603505Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:16:55.603515Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:16:55.603526Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:16:55.603535Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:16:55.603545Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:16:55.603552Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:16:55.603561Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:16:55.603599Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:16:55.603611Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:16:55.603626Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:16:55.603635Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:16:55.603647Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:16:55.603657Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:16:55.603673Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:16:55.603681Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:16:55.603692Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:16:55.603700Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 11295, MsgBus: 10229 2024-11-21T09:16:52.788690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659079334610099:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.788774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dbd/r3tmp/tmpb4XoDg/pdisk_1.dat 2024-11-21T09:16:52.856414Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11295, node 1 2024-11-21T09:16:52.891521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.891553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.892664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:52.912412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.912425Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.912427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.912462Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10229 TClient is connected to server localhost:10229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.002650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.014416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.084510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.148429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.155411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.182526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659083629578806:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.182547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.288036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.296840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.306730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.325772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.339384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.348645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659083629579323:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659083629579328:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.349401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.352102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659083629579330:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 18298, MsgBus: 27337 2024-11-21T09:16:53.868277Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659087843842703:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.868438Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001dbd/r3tmp/tmp3kP6mB/pdisk_1.dat 2024-11-21T09:16:53.881186Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18298, node 2 2024-11-21T09:16:53.888095Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.888108Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.888109Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.888158Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27337 TClient is connected to server localhost:27337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.968703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.968728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.969866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.970940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.976808Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.979945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.990910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.007953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.063809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.165889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659092138811532:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.165913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.171485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo u ... EBUG: [0x5503a08a200] received request Name# TopicService/AlterTopic ok# false data# peer# 2024-11-21T09:16:57.250750Z node 5 :GRPC_SERVER DEBUG: [0x5503f95c200] received request Name# TopicService/DropTopic ok# false data# peer# 2024-11-21T09:16:57.250785Z node 5 :GRPC_SERVER DEBUG: [0x5503f953e00] received request Name# Coordination/CreateNode ok# false data# peer# 2024-11-21T09:16:57.250805Z node 5 :GRPC_SERVER DEBUG: [0x5503f948a00] received request Name# Coordination/AlterNode ok# false data# peer# 2024-11-21T09:16:57.250818Z node 5 :GRPC_SERVER DEBUG: [0x5503f94f600] received request Name# Coordination/DropNode ok# false data# peer# 2024-11-21T09:16:57.250831Z node 5 :GRPC_SERVER DEBUG: [0x5503f97de00] received request Name# Coordination/DescribeNode ok# false data# peer# 2024-11-21T09:16:57.250854Z node 5 :GRPC_SERVER DEBUG: [0x5503f97b400] received request Name# CreateDatabase ok# false data# peer# 2024-11-21T09:16:57.250861Z node 5 :GRPC_SERVER DEBUG: [0x5503f94c600] received request Name# GetDatabaseStatus ok# false data# peer# 2024-11-21T09:16:57.250888Z node 5 :GRPC_SERVER DEBUG: [0x5503f94e400] received request Name# AlterDatabase ok# false data# peer# 2024-11-21T09:16:57.250891Z node 5 :GRPC_SERVER DEBUG: [0x5503a090e00] received request Name# ListDatabases ok# false data# peer# 2024-11-21T09:16:57.250922Z node 5 :GRPC_SERVER DEBUG: [0x5503a09c200] received request Name# RemoveDatabase ok# false data# peer# 2024-11-21T09:16:57.250925Z node 5 :GRPC_SERVER DEBUG: [0x5503f952c00] received request Name# DescribeDatabaseOptions ok# false data# peer# 2024-11-21T09:16:57.250949Z node 5 :GRPC_SERVER DEBUG: [0x5503f952000] received request Name# GetScaleRecommendation ok# false data# peer# 2024-11-21T09:16:57.250968Z node 5 :GRPC_SERVER DEBUG: [0x5503a09ec00] received request Name# ListEndpoints ok# false data# peer# 2024-11-21T09:16:57.250981Z node 5 :GRPC_SERVER DEBUG: [0x5503a097a00] received request Name# WhoAmI ok# false data# peer# 2024-11-21T09:16:57.250993Z node 5 :GRPC_SERVER DEBUG: [0x5503f949600] received request Name# NodeRegistration ok# false data# peer# 2024-11-21T09:16:57.251010Z node 5 :GRPC_SERVER DEBUG: [0x5503f950800] received request Name# Scan ok# false data# peer# 2024-11-21T09:16:57.251027Z node 5 :GRPC_SERVER DEBUG: [0x5503a085400] received request Name# GetShardLocations ok# false data# peer# 2024-11-21T09:16:57.251040Z node 5 :GRPC_SERVER DEBUG: [0x5503f950200] received request Name# DescribeTable ok# false data# peer# 2024-11-21T09:16:57.251061Z node 5 :GRPC_SERVER DEBUG: [0x5503f94c000] received request Name# CreateSnapshot ok# false data# peer# 2024-11-21T09:16:57.251067Z node 5 :GRPC_SERVER DEBUG: [0x5503f953800] received request Name# RefreshSnapshot ok# false data# peer# 2024-11-21T09:16:57.251099Z node 5 :GRPC_SERVER DEBUG: [0x5503a08cc00] received request Name# DiscardSnapshot ok# false data# peer# 2024-11-21T09:16:57.251103Z node 5 :GRPC_SERVER DEBUG: [0x5503f953200] received request Name# List ok# false data# peer# 2024-11-21T09:16:57.251126Z node 5 :GRPC_SERVER DEBUG: [0x5503f975a00] received request Name# RateLimiter/CreateResource ok# false data# peer# 2024-11-21T09:16:57.251137Z node 5 :GRPC_SERVER DEBUG: [0x5503a08ea00] received request Name# RateLimiter/AlterResource ok# false data# peer# 2024-11-21T09:16:57.251153Z node 5 :GRPC_SERVER DEBUG: [0x5503a087200] received request Name# RateLimiter/DropResource ok# false data# peer# 2024-11-21T09:16:57.251172Z node 5 :GRPC_SERVER DEBUG: [0x5503f94de00] received request Name# RateLimiter/ListResources ok# false data# peer# 2024-11-21T09:16:57.251182Z node 5 :GRPC_SERVER DEBUG: [0x5503f940c00] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2024-11-21T09:16:57.251199Z node 5 :GRPC_SERVER DEBUG: [0x5503a080600] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2024-11-21T09:16:57.251209Z node 5 :GRPC_SERVER DEBUG: [0x5503a09f800] received request Name# CreateStream ok# false data# peer# 2024-11-21T09:16:57.251232Z node 5 :GRPC_SERVER DEBUG: [0x5503a0a3400] received request Name# ListStreams ok# false data# peer# 2024-11-21T09:16:57.251236Z node 5 :GRPC_SERVER DEBUG: [0x5503f949c00] received request Name# DeleteStream ok# false data# peer# 2024-11-21T09:16:57.251267Z node 5 :GRPC_SERVER DEBUG: [0x5503f973600] received request Name# DescribeStream ok# false data# peer# 2024-11-21T09:16:57.251267Z node 5 :GRPC_SERVER DEBUG: [0x5503f96dc00] received request Name# ListShards ok# false data# peer# 2024-11-21T09:16:57.251295Z node 5 :GRPC_SERVER DEBUG: [0x5503f945400] received request Name# SetWriteQuota ok# false data# peer# 2024-11-21T09:16:57.251330Z node 5 :GRPC_SERVER DEBUG: [0x5503f94cc00] received request Name# PutRecord ok# false data# peer# 2024-11-21T09:16:57.251332Z node 5 :GRPC_SERVER DEBUG: [0x5503f942400] received request Name# UpdateStream ok# false data# peer# 2024-11-21T09:16:57.251360Z node 5 :GRPC_SERVER DEBUG: [0x5503a0a2e00] received request Name# PutRecords ok# false data# peer# 2024-11-21T09:16:57.251371Z node 5 :GRPC_SERVER DEBUG: [0x5503a09f200] received request Name# GetRecords ok# false data# peer# 2024-11-21T09:16:57.251388Z node 5 :GRPC_SERVER DEBUG: [0x5503f97f600] received request Name# GetShardIterator ok# false data# peer# 2024-11-21T09:16:57.251413Z node 5 :GRPC_SERVER DEBUG: [0x5503f965800] received request Name# SubscribeToShard ok# false data# peer# 2024-11-21T09:16:57.251416Z node 5 :GRPC_SERVER DEBUG: [0x5503f96f400] received request Name# DescribeLimits ok# false data# peer# 2024-11-21T09:16:57.251438Z node 5 :GRPC_SERVER DEBUG: [0x5503f95bc00] received request Name# DescribeStreamSummary ok# false data# peer# 2024-11-21T09:16:57.251447Z node 5 :GRPC_SERVER DEBUG: [0x5503f952600] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2024-11-21T09:16:57.251464Z node 5 :GRPC_SERVER DEBUG: [0x5503f94f000] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2024-11-21T09:16:57.251477Z node 5 :GRPC_SERVER DEBUG: [0x5503f94b400] received request Name# UpdateShardCount ok# false data# peer# 2024-11-21T09:16:57.251502Z node 5 :GRPC_SERVER DEBUG: [0x5503f94ea00] received request Name# UpdateStreamMode ok# false data# peer# 2024-11-21T09:16:57.251526Z node 5 :GRPC_SERVER DEBUG: [0x5503a0a2200] received request Name# DeregisterStreamConsumer ok# false data# peer# 2024-11-21T09:16:57.251527Z node 5 :GRPC_SERVER DEBUG: [0x5503a0a0a00] received request Name# RegisterStreamConsumer ok# false data# peer# 2024-11-21T09:16:57.251550Z node 5 :GRPC_SERVER DEBUG: [0x5503a084e00] received request Name# DescribeStreamConsumer ok# false data# peer# 2024-11-21T09:16:57.251558Z node 5 :GRPC_SERVER DEBUG: [0x5503f94ae00] received request Name# ListStreamConsumers ok# false data# peer# 2024-11-21T09:16:57.251574Z node 5 :GRPC_SERVER DEBUG: [0x5503f959800] received request Name# AddTagsToStream ok# false data# peer# 2024-11-21T09:16:57.251590Z node 5 :GRPC_SERVER DEBUG: [0x5503a0a0400] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2024-11-21T09:16:57.251603Z node 5 :GRPC_SERVER DEBUG: [0x5503a0a1000] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2024-11-21T09:16:57.251621Z node 5 :GRPC_SERVER DEBUG: [0x5503a0a1600] received request Name# ListTagsForStream ok# false data# peer# 2024-11-21T09:16:57.251636Z node 5 :GRPC_SERVER DEBUG: [0x5503f969400] received request Name# MergeShards ok# false data# peer# 2024-11-21T09:16:57.251652Z node 5 :GRPC_SERVER DEBUG: [0x5503f94d800] received request Name# RemoveTagsFromStream ok# false data# peer# 2024-11-21T09:16:57.251663Z node 5 :GRPC_SERVER DEBUG: [0x5503a081800] received request Name# SplitShard ok# false data# peer# 2024-11-21T09:16:57.251684Z node 5 :GRPC_SERVER DEBUG: [0x5503f95f200] received request Name# StartStreamEncryption ok# false data# peer# 2024-11-21T09:16:57.251693Z node 5 :GRPC_SERVER DEBUG: [0x5503a096e00] received request Name# StopStreamEncryption ok# false data# peer# 2024-11-21T09:16:57.251724Z node 5 :GRPC_SERVER DEBUG: [0x5503f97d800] received request Name# SelfCheck ok# false data# peer# 2024-11-21T09:16:57.251730Z node 5 :GRPC_SERVER DEBUG: [0x5503a095600] received request Name# NodeCheck ok# false data# peer# 2024-11-21T09:16:57.251762Z node 5 :GRPC_SERVER DEBUG: [0x5503f971800] received request Name# CreateSession ok# false data# peer# 2024-11-21T09:16:57.251768Z node 5 :GRPC_SERVER DEBUG: [0x5503f962800] received request Name# DeleteSession ok# false data# peer# 2024-11-21T09:16:57.251801Z node 5 :GRPC_SERVER DEBUG: [0x5503a09c800] received request Name# AttachSession ok# false data# peer# 2024-11-21T09:16:57.251801Z node 5 :GRPC_SERVER DEBUG: [0x5503f97a200] received request Name# BeginTransaction ok# false data# peer# 2024-11-21T09:16:57.251831Z node 5 :GRPC_SERVER DEBUG: [0x5503f974e00] received request Name# CommitTransaction ok# false data# peer# 2024-11-21T09:16:57.251837Z node 5 :GRPC_SERVER DEBUG: [0x5503a09ce00] received request Name# RollbackTransaction ok# false data# peer# 2024-11-21T09:16:57.251861Z node 5 :GRPC_SERVER DEBUG: [0x5503f944800] received request Name# ExecuteQuery ok# false data# peer# 2024-11-21T09:16:57.251873Z node 5 :GRPC_SERVER DEBUG: [0x5503f978a00] received request Name# ExecuteScript ok# false data# peer# 2024-11-21T09:16:57.251891Z node 5 :GRPC_SERVER DEBUG: [0x5503f96e800] received request Name# FetchScriptResults ok# false data# peer# 2024-11-21T09:16:57.251905Z node 5 :GRPC_SERVER DEBUG: [0x5503f96a600] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2024-11-21T09:16:57.251917Z node 5 :GRPC_SERVER DEBUG: [0x5503f979600] received request Name# ChangeTabletSchema ok# false data# peer# 2024-11-21T09:16:57.251939Z node 5 :GRPC_SERVER DEBUG: [0x5503f96a000] received request Name# RestartTablet ok# false data# peer# 2024-11-21T09:16:57.251945Z node 5 :GRPC_SERVER DEBUG: [0x5503f977200] received request Name# CreateLogStore ok# false data# peer# 2024-11-21T09:16:57.251970Z node 5 :GRPC_SERVER DEBUG: [0x5503f96b800] received request Name# DescribeLogStore ok# false data# peer# 2024-11-21T09:16:57.251972Z node 5 :GRPC_SERVER DEBUG: [0x5503f970000] received request Name# DropLogStore ok# false data# peer# 2024-11-21T09:16:57.252001Z node 5 :GRPC_SERVER DEBUG: [0x5503f964000] received request Name# AlterLogStore ok# false data# peer# 2024-11-21T09:16:57.252002Z node 5 :GRPC_SERVER DEBUG: [0x5503f97e400] received request Name# CreateLogTable ok# false data# peer# 2024-11-21T09:16:57.252026Z node 5 :GRPC_SERVER DEBUG: [0x5503f972400] received request Name# DescribeLogTable ok# false data# peer# 2024-11-21T09:16:57.252034Z node 5 :GRPC_SERVER DEBUG: [0x5503f97d200] received request Name# DropLogTable ok# false data# peer# 2024-11-21T09:16:57.252057Z node 5 :GRPC_SERVER DEBUG: [0x5503f973000] received request Name# AlterLogTable ok# false data# peer# 2024-11-21T09:16:57.252073Z node 5 :GRPC_SERVER DEBUG: [0x5503f977e00] received request Name# Login ok# false data# peer# 2024-11-21T09:16:57.252099Z node 5 :GRPC_SERVER DEBUG: [0x5503f961600] received request Name# DescribeReplication ok# false data# peer# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 32127, MsgBus: 22058 2024-11-21T09:16:55.275848Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659096096754751:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:55.276259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b47/r3tmp/tmpc4Xl2H/pdisk_1.dat 2024-11-21T09:16:55.326853Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32127, node 1 2024-11-21T09:16:55.343283Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:55.343300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:55.343302Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:55.343338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22058 2024-11-21T09:16:55.376546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:55.376581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:55.377716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:55.407120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.413235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.427275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.444727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.454669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.578252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659096096756311:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.578280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.610950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.666147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.677780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.684900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.691099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.698320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.707371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659096096756829:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.707401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.707460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659096096756834:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.708374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.711111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659096096756836:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":null,"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Aggregate"}],"Node Type":"Aggregate","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1732180615960,"Host":"ghrun-qcxhsi27zq","OutputRows":1,"StartTimeMs":1732180615960,"IngressRows":3,"ComputeTimeUs":12,"NodeId":1,"OutputChannels":[{"ChannelId":2,"Rows":1,"DstStageId":1,"Bytes":3}],"WaitInputTimeUs":278,"TaskId":2,"OutputBytes":3}],"PeakMemoryUsageBytes":65536,"CpuTimeUs":355},{"Tasks":[{"FinishTimeMs":1732180615960,"Host":"ghrun-qcxhsi27zq","OutputRows":1,"StartTimeMs":1732180615960,"IngressRows":3,"ComputeTimeUs":11,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":1,"DstStageId":1,"Bytes":3}],"WaitInputTimeUs":401,"TaskId":1,"OutputBytes":3}],"PeakMemoryUsageBytes":65536,"CpuTimeUs":456}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":2,"Max":1,"Min":1},"Bytes":{"Count":2,"Sum":6,"Max":3,"Min":3}},"Name":"4","Push":{"WaitTimeUs":{"Count":2,"Sum":688,"Max":405,"Min":283},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576},"Tasks":2,"OutputRows":{"Count":2,"Sum":2,"Max":1,"Min":1},"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1732180615960,"WaitInputTimeUs":{"Count":2,"Sum":679,"Max":401,"Min":278},"OutputBytes":{"Count":2,"Sum":6,"Max":3,"Min":3},"CpuTimeUs":{"Count":2,"Sum":397,"Max":203,"Min":194},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Bytes":{"Count":2,"Sum":48,"Max":24,"Min":24}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"WaitTimeUs":{"Count":2,"Sum":692,"Max":407,"Min":285},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Bytes":{"Count":2,"Sum":48,"Max":24,"Min":24}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit","Stats":{"ComputeNodes":[{"Tasks":[{"InputBytes":6,"FinishTimeMs":1732180615960,"Host":"ghrun-qcxhsi27zq","ResultRows":1,"ResultBytes":3,"OutputRows":1,"StartTimeMs":1732180615960,"InputRows":2,"ComputeTimeUs":49,"InputChannels":[{"WaitTimeUs":177,"ChannelId":1,"Rows":1,"SrcStageId":0,"Bytes":3},{"ChannelId":2,"Rows":1,"SrcStageId":0,"Bytes":3}],"NodeId":1,"OutputChannels":[{"ChannelId":3,"Rows":1,"DstStageId":0,"Bytes":3}],"WaitInputTimeUs":147,"TaskId":3,"OutputBytes":3}],"PeakMemoryUsageBytes":131072,"CpuTimeUs":355}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":3,"Max":3,"Min":3}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":191,"Max":191,"Min":191},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResultRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Tasks":1,"ResultBytes":{"Count":1,"Sum":3,"Max":3,"Min":3},"OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"StageDurationUs":0,"BaseTimeMs":1732180615960,"WaitInputTimeUs":{"Count":1,"Sum":147,"Max":147,"Min":147},"OutputBytes":{"Count":1,"Sum":3,"Max":3,"Min":3},"CpuTimeUs":{"Count":1,"Sum":249,"Max":249,"Min":249},"Input":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":2,"Max":1,"Min":1},"Bytes":{"Count":2,"Sum":6,"Max":3,"Min":3}},"Name":"2","Push":{"WaitTimeUs":{"Count":1,"Sum":177,"Max":177,"Min":177},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":2,"Max":1,"Min":1},"Bytes":{"Count":2,"Sum":6,"Max":3,"Min":3}}}]}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":38186,"CpuTimeUs":37382},"ProcessCpuTimeUs":71,"TotalDurationUs":40966,"ResourcePoolId":"default" ... node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.470706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.492941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659103591724751:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.492972Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.492978Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659103591724756:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.493701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.496574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659103591724758:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":8,"Columns":["Key","Value1","Value2"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","Stats":{"UseLlvm":"undefined","DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"RESULT","Push":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":1000,"BaseTimeMs":1732180617679,"CpuTimeUs":{"Count":1,"Sum":200,"Max":200,"Min":200},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-Filter","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"11","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":1310,"Max":1310,"Min":1310},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"StageDurationUs":0,"BaseTimeMs":1732180617679,"WaitInputTimeUs":{"Count":1,"Sum":1291,"Max":1291,"Min":1291},"OutputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"CpuTimeUs":{"Count":1,"Sum":186,"Max":186,"Min":186},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"7","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":1113,"Max":1113,"Min":1113},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":1274,"Max":1274,"Min":1274},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":2,"StageDurationUs":0,"BaseTimeMs":1732180617679,"WaitInputTimeUs":{"Count":1,"Sum":1238,"Max":1238,"Min":1238},"OutputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"CpuTimeUs":{"Count":1,"Sum":95,"Max":95,"Min":95},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"9","Push":{"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"WaitTimeUs":{"Count":1,"Sum":1265,"Max":1265,"Min":1265},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":240,"Max":240,"Min":240},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1732180617678,"WaitInputTimeUs":{"Count":1,"Sum":233,"Max":233,"Min":233},"CpuTimeUs":{"Count":1,"Sum":133,"Max":133,"Min":133},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"WaitTimeUs":{"Count":1,"Sum":231,"Max":231,"Min":231},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":395,"Max":395,"Min":395},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"StageDurationUs":0,"BaseTimeMs":1732180617678,"WaitInputTimeUs":{"Count":1,"Sum":360,"Max":360,"Min":360},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":154,"Max":154,"Min":154},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"2","Push":{"WaitTimeUs":{"Count":1,"Sum":403,"Max":403,"Min":403},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}}}]}}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":27650,"CpuTimeUs":26997},"ProcessCpuTimeUs":144,"TotalDurationUs":32989,"ResourcePoolId":"default","QueuedTimeUs":168},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["Key","Value1","Value2"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-Cpu":0.186,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-Cpu":0.281,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 63911, MsgBus: 19460 2024-11-21T09:16:57.274671Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659103303584818:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.274721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a19/r3tmp/tmpHXwHWn/pdisk_1.dat 2024-11-21T09:16:57.330444Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63911, node 1 2024-11-21T09:16:57.340500Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.340517Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.340519Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.340559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19460 TClient is connected to server localhost:19460 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:16:57.376002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.376040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:16:57.377203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.386482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.396697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.470076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.490011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.502178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.568990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659103303586239:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.569032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.595953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.602163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.608943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.616501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.623377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.630475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.639271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659103303586730:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.639311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659103303586735:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.639316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.640004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.643634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659103303586737:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:57.869310Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180617909, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 24404, MsgBus: 22911 2024-11-21T09:16:58.137655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659105514440856:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.137850Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a19/r3tmp/tmpFIViQn/pdisk_1.dat 2024-11-21T09:16:58.145380Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24404, node 2 2024-11-21T09:16:58.154643Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.154656Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.154659Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.154712Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22911 TClient is connected to server localhost:22911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.237710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.237741Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.238900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:58.239627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.243407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.253978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.276756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.289276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.418843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659105514442385:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.418883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.423805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.430148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.442426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.449380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.463588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.477490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.485702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659105514442899:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.485726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.485734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659105514442904:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.486324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:58.490517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659105514442906:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:58.697144Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180618735, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 29705, MsgBus: 62498 2024-11-21T09:16:58.833822Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659105796936889:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.834060Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a19/r3tmp/tmpOi5mxQ/pdisk_1.dat 2024-11-21T09:16:58.842922Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29705, node 3 2024-11-21T09:16:58.852730Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.852749Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.852750Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.852809Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62498 TClient is connected to server localhost:62498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.934005Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.934044Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.935219Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:58.935938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.939132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.950952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.970167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.979414Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.137998Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110091905726:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.138044Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.143333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.199016Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.212698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.219370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.226259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.233061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.242287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110091906233:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.242312Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110091906238:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.242315Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.242909Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:59.246155Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659110091906240:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 21622, MsgBus: 24764 2024-11-21T09:16:56.908874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659098292180185:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:56.909127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a86/r3tmp/tmpeDmLSJ/pdisk_1.dat 2024-11-21T09:16:56.962200Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21622, node 1 2024-11-21T09:16:56.973377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:56.973388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:56.973390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:56.973422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24764 2024-11-21T09:16:57.010026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.010072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:24764 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:16:57.011214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.042141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.051989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.113241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.132432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.143455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.212093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659102587149029:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.212132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.242630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.252497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.259354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.273646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.287708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.301499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.317597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659102587149534:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.317624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.317676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659102587149539:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.318351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.321040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659102587149541:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 26775, MsgBus: 15750 2024-11-21T09:16:57.771188Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659102311806695:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.771228Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a86/r3tmp/tmpwyJ5Sw/pdisk_1.dat 2024-11-21T09:16:57.779990Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26775, node 2 2024-11-21T09:16:57.789054Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T0 ... :"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":17,"Plans":[{"Tables":["EightShard"],"PlanNodeId":16,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_3_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_3_0"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Inputs":[],"ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_3_0","Node Type":"Precompute_3","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":8,"Plans":[{"Tables":["EightShard"],"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"Effect"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (350, +∞)"],"type":"Scan"},{"columns":["Data","Key"],"scan_by":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"type":"Scan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"},{"columns":["Data","Key"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Delete","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":18,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":24,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} Trying to start YDB, gRPC: 24055, MsgBus: 18739 2024-11-21T09:16:58.480201Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659106625424798:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.480433Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a86/r3tmp/tmpIUPzmb/pdisk_1.dat 2024-11-21T09:16:58.489376Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24055, node 3 2024-11-21T09:16:58.501054Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.501070Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.501071Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.501119Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18739 TClient is connected to server localhost:18739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.581883Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.581912Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.582702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.582933Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:58.589500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.598635Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.617188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.626727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.782843Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659106625426342:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.782865Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.786946Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.794159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.806566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.813341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.820026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.827404Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.843282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659106625426848:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.843325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.843352Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659106625426853:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.844034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:58.847545Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659106625426855:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:59.053242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.087640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.096400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWrite [GOOD] Test command err: Trying to start YDB, gRPC: 24249, MsgBus: 12629 2024-11-21T09:16:52.838893Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659080374994365:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.839038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bef/r3tmp/tmpVJXlQZ/pdisk_1.dat 2024-11-21T09:16:52.912478Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24249, node 1 2024-11-21T09:16:52.931495Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.931507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.931509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.931542Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12629 2024-11-21T09:16:52.990623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.990651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:12629 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:16:52.995781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:53.010892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.012779Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.014355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.081980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.102209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.111508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.255557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084669963208:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.255609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.293778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.304684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.325546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.339789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.348986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084669963722:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.349040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084669963727:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.349042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.349566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.352612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659084669963729:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:57.839145Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659080374994365:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.839200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4074, MsgBus: 9010 2024-11-21T09:16:57.917651Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659103184551966:2062];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bef/r3tmp/tmpejqq4D/pdisk_1.dat 2024-11-21T09:16:57.923502Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:57.928127Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4074, node 2 2024-11-21T09:16:57.936389Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.936403Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.936405Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.936451Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9010 TClient is connected to server localhost:9010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.018988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.019014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.019816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.020038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:58.031403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.039802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.058291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.067182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.224836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659107479520790:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ... ssionId: ydb://session/3?node_id=2&id=NjFiZWZkMy0yYjEzZmI3ZS1iZDc0MTgxNi1iNGU3NzMwZA==, ActorId: [2:7439659107479521980:2587], ActorState: ExecuteState, TraceId: 01jd705bep2t8979hec457n0sj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:58.717632Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439659107479522004:3635], for# user0@builtin, access# DescribeSchema 2024-11-21T09:16:58.717646Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439659107479522004:3635], for# user0@builtin, access# DescribeSchema 2024-11-21T09:16:58.718043Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659107479522001:2598], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:58.718132Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODEzNGZjY2MtYWJiNWZiMzktNWI3MDJmMmEtOGYwOWY4N2E=, ActorId: [2:7439659107479521997:2596], ActorState: ExecuteState, TraceId: 01jd705bev77yd3s5cas87tmhf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:58.719216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.726554Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439659107479522037:3653], for# user0@builtin, access# DescribeSchema 2024-11-21T09:16:58.726576Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439659107479522037:3653], for# user0@builtin, access# DescribeSchema 2024-11-21T09:16:58.726856Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659107479522030:2608], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:58.726928Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWE5MDIzMDgtZTY2MTAyYWMtZDE2OTU3ZTMtZDVhNmRlODA=, ActorId: [2:7439659107479522022:2606], ActorState: ExecuteState, TraceId: 01jd705bf4923g3e1jyh14xe0z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:58.732859Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439659107479522055:3658], for# user0@builtin, access# DescribeSchema 2024-11-21T09:16:58.732876Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439659107479522055:3658], for# user0@builtin, access# DescribeSchema 2024-11-21T09:16:58.733370Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659107479522051:2617], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:58.733488Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDA3YTRjYTktYjk0NGI4M2ItYTE3MjdhMDgtZjZlY2Y0ZWQ=, ActorId: [2:7439659107479522047:2615], ActorState: ExecuteState, TraceId: 01jd705bfa5pza5rbqrjwg03hx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:58.739928Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439659107479522075:3665], for# user0@builtin, access# DescribeSchema 2024-11-21T09:16:58.739943Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439659107479522075:3665], for# user0@builtin, access# DescribeSchema 2024-11-21T09:16:58.740359Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659107479522072:2626], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:58.740458Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDg5YzRlN2QtZWMyODc3ODYtMTIwYmU0NjUtMTYyMGVmNWM=, ActorId: [2:7439659107479522068:2624], ActorState: ExecuteState, TraceId: 01jd705bfh36jfq6t8rryye8tj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 15388, MsgBus: 1220 2024-11-21T09:16:59.000919Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659110029189165:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:59.000961Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bef/r3tmp/tmp3gvOvq/pdisk_1.dat 2024-11-21T09:16:59.010839Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15388, node 3 2024-11-21T09:16:59.021178Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:59.021190Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:59.021192Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:59.021229Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1220 TClient is connected to server localhost:1220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:59.102858Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:59.102879Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:59.103720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.103954Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:59.106733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.114846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.131350Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.142719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.276490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110029190702:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.276513Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.281261Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.286914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.296277Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.303172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.310614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.324326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.332763Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110029191216:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.332780Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110029191221:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.332788Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.333459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:59.337162Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659110029191223:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 7465, MsgBus: 11865 2024-11-21T09:16:57.176935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659104371037638:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.177109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a5d/r3tmp/tmpQWJh25/pdisk_1.dat 2024-11-21T09:16:57.232137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7465, node 1 2024-11-21T09:16:57.243063Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.243073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.243076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.243129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11865 2024-11-21T09:16:57.278520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.278549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:57.279656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.306648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.314071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.376723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.441037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.451598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.484760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659104371039191:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.484794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.521870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.530154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.539880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.553567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.560076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.567805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.576044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659104371039683:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.576083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659104371039688:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.576107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.576846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.580458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659104371039690:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:57.776017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18937, MsgBus: 9027 2024-11-21T09:16:58.031203Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659107551701552:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.031370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a5d/r3tmp/tmps3uQqa/pdisk_1.dat 2024-11-21T09:16:58.040111Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18937, node 2 2024-11-21T09:16:58.049640Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.049655Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.049657Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.049697Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9027 TClient is connected to server localhost:9027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.131535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.131573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.132659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:58.133910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.310826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659107551702146:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.310853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659107551702155:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.310861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.311519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:16:58.312987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659107551702160:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:16:58.369696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.399521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8504, MsgBus: 22467 2024-11-21T09:16:58.722306Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659109153158565:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.722327Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a5d/r3tmp/tmpQXU8k7/pdisk_1.dat 2024-11-21T09:16:58.731529Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8504, node 3 2024-11-21T09:16:58.742559Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.742575Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.742577Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.742623Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22467 TClient is connected to server localhost:22467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.822686Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.822722Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.823797Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:58.824467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.026406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659113448126439:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.026429Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.026476Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659113448126466:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.027192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:16:59.029043Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659113448126468:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:16:59.130656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.155212Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659113448126652:2324], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2024-11-21T09:16:59.155291Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGM4ZjAwZGUtZWQ1ZjRiMTgtZTc1NDRkOWYtMzJlZTdmYTY=, ActorId: [3:7439659113448126650:2323], ActorState: ExecuteState, TraceId: 01jd705bwe4max15znpq0542q8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpParams::Decimal+QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateStandaloneTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.461286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.461315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.461325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.461335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.461339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.461347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.474254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.474273Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.476252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.476327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.476350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.480070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.480147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.481709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.482656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.484112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.486595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.486600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.486640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.487809Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.504005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.504301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.504369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.504423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.504431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.505455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.505471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.505475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.506070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.506424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.506440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.506930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.507281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.507831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.508035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.508158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.508202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.508573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.508613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.508675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.508687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.508690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.508693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.508697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.508700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.508702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.508710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.508714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.508717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1002 at step: 5000003 2024-11-21T09:16:58.605238Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:58.605260Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 292057778277 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:58.605268Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TPropose operationId#1002:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2024-11-21T09:16:58.605362Z node 68 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 129 2024-11-21T09:16:58.605402Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:58.605416Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T09:16:58.607166Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:58.607178Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:58.607230Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:58.607256Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:58.607259Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T09:16:58.607263Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T09:16:58.607303Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:16:58.607311Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId#1002:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:16:58.607319Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId#1002:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T09:16:58.607482Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.607498Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.607503Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:16:58.607508Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:58.607515Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:58.607693Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.607706Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.607710Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:16:58.607714Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:16:58.607718Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:16:58.607730Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T09:16:58.608084Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:16:58.608478Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.608781Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.619776Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 2024-11-21T09:16:58.619802Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:16:58.619843Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 2024-11-21T09:16:58.619853Z node 68 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:16:58.619964Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2024-11-21T09:16:58.619969Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:16:58.619983Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2024-11-21T09:16:58.620758Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:16:58.620878Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:16:58.620902Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:16:58.620911Z node 68 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T09:16:58.620924Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:16:58.620927Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:16:58.620931Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T09:16:58.620945Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [68:360:2340] message: TxId: 1002 2024-11-21T09:16:58.620950Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:16:58.620955Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:16:58.620958Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:16:58.620984Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:58.621480Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:16:58.621496Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [68:361:2341] TestWaitNotification: OK eventTxId 1002 2024-11-21T09:16:58.621613Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:58.621682Z node 68 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 77us result status StatusSuccess 2024-11-21T09:16:58.621809Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2024-11-21T09:16:34.130804Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.130810Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.130813Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.130895Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.132049Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.132090Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.132176Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.132321Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.132355Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:16:34.132392Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.132414Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:16:34.132549Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.132552Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.132554Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.132592Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.132709Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.132742Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.132777Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.132823Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.132843Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:16:34.132876Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.132889Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:16:34.133163Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.133166Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.133169Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.133222Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.133331Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.133382Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.133425Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.133679Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.133761Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:16:34.133805Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.133813Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:16:34.134019Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.134022Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.134025Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.134080Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.134166Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.134186Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.134257Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.135869Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.137214Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:16:34.137273Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.137287Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:16:34.137521Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.137525Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.137545Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.137597Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.137696Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.137735Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.137766Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.137831Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.137849Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:16:34.137875Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.137885Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:16:34.138024Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.138028Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.138031Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.138077Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.138205Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.138263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.138298Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.138357Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.138378Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:16:34.138400Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.138405Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:16:34.138611Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.138614Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.138616Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.138686Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.139085Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.139119Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.139160Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.139357Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.139417Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:16:34.139456Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.139463Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:16:34.139654Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.139658Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.139660Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.139716Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:16:34.139865Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:16:34.139920Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.139952Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:16:34.140623Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.140655Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:16:34.140666Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:16:34.140671Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:16:34.163333Z :ReadSession INFO: Random seed for debugging is 1732180594163326 2024-11-21T09:16:34.278721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659004942130917:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.278754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:34.286426Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659004206701523:2262];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.315312Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/bui ... 423-d88e7c1d-c7087ec8-de95dd9c_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 1 } 2024-11-21T09:16:48.236182Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0] Write session: acknoledged message 1 2024-11-21T09:16:48.236278Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1732180608234 CreateTimestampMS: 1732180608233 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 20 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 2 } 2024-11-21T09:16:48.236314Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 4 from offset3 2024-11-21T09:16:48.236326Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 95b0bc29-2e723b84-769e3110-290de9dd has messages 1 2024-11-21T09:16:48.236361Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 read done: guid# 95b0bc29-2e723b84-769e3110-290de9dd, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 200 2024-11-21T09:16:48.236378Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 response to read: guid# 95b0bc29-2e723b84-769e3110-290de9dd 2024-11-21T09:16:48.236506Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 Process answer. Aval parts: 0 2024-11-21T09:16:48.236678Z :DEBUG: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:48.236725Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T09:16:48.236749Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T09:16:48.233000Z WriteTime: 2024-11-21T09:16:48.234000Z Ip: "ipv6:[::1]:55512" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:55512" } } } 2024-11-21T09:16:48.236834Z :DEBUG: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] [dc1] Commit offsets [2, 3). Partition stream id: 1 2024-11-21T09:16:48.236852Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T09:16:48.236881Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 got read request: guid# fc0ae517-88cf07ea-18459fa9-d5852de5 2024-11-21T09:16:48.236941Z :DEBUG: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:16:48.237086Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2024-11-21T09:16:48.237172Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2024-11-21T09:16:48.237292Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:16:48.237306Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:16:48.237342Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_15978456524826038295_v1 2024-11-21T09:16:48.237378Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:16:48.238448Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:16:48.238465Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:16:48.238465Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2024-11-21T09:16:48.238541Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2024-11-21T09:16:48.238564Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2024-11-21T09:16:48.238582Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2024-11-21T09:16:48.238782Z :DEBUG: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2024-11-21T09:16:48.333526Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0] Write session will now close 2024-11-21T09:16:48.333569Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0] Write session: aborting 2024-11-21T09:16:48.333762Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:16:48.333772Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0] Write session: destroy 2024-11-21T09:16:48.333953Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0 grpc read done: success: 0 data: 2024-11-21T09:16:48.333980Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0 grpc read failed 2024-11-21T09:16:48.333987Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0 grpc closed 2024-11-21T09:16:48.333993Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|15724423-d88e7c1d-c7087ec8-de95dd9c_0 is DEAD 2024-11-21T09:16:48.334261Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:16:48.334409Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:16:48.334434Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439659065071675749:2605] destroyed 2024-11-21T09:16:48.334447Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:16:49.364094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:16:49.364114Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:50.995940Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:16:51.007693Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T09:16:55.996189Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:16:58.236583Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T09:16:58.334048Z :INFO: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] Closing read session. Close timeout: 0.000000s 2024-11-21T09:16:58.334089Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T09:16:58.334104Z :INFO: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16333 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:16:58.334144Z :NOTICE: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:16:58.334167Z :DEBUG: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] [dc1] Abort session to cluster 2024-11-21T09:16:58.334548Z :NOTICE: [/Root] [/Root] [73a5b0d0-fdfee1c-ba36661e-c51127e4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:16:58.334752Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 grpc read done: success# 0, data# { } 2024-11-21T09:16:58.334773Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 grpc read failed 2024-11-21T09:16:58.334781Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 grpc closed 2024-11-21T09:16:58.334801Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15978456524826038295_v1 is DEAD 2024-11-21T09:16:58.335099Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439659039301871448:2502] disconnected; active server actors: 1 2024-11-21T09:16:58.335108Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439659039301871448:2502] client user disconnected session shared/user_1_1_15978456524826038295_v1 2024-11-21T09:16:58.335511Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:16:58.335529Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_15978456524826038295_v1 2024-11-21T09:16:58.335548Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439659039301871451:2505] destroyed 2024-11-21T09:16:58.335571Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_15978456524826038295_v1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.469390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.469416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.469421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.469425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.469438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.469441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.469449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.469517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.480447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.480464Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.482503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.482591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.482619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.485126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.485194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.485292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.485520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.486105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.486620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.486625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.486659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.487708Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.504989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.505057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.505160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.505168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.505862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.505874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.505879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.506344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.506857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.506890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.507547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.508025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.508070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.508273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.508388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.508431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.508895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.508934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.509006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509024Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.509033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.509035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.509039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.509043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.509046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.509048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.509058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.509061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.509064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... EBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1002 at step: 5000003 2024-11-21T09:16:58.589664Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:58.589687Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 292057778277 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:58.589695Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId#1002:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2024-11-21T09:16:58.589750Z node 68 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 129 2024-11-21T09:16:58.589784Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:58.589796Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T09:16:58.590399Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:58.590409Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:58.590466Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:16:58.590496Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:58.590502Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T09:16:58.590507Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T09:16:58.590554Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:16:58.590560Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId#1002:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:16:58.590568Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId#1002:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T09:16:58.590720Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.590734Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.590742Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:16:58.590747Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:16:58.590752Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:16:58.590916Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.590928Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.590932Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:16:58.590936Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:16:58.590940Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:16:58.590950Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T09:16:58.591327Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:16:58.592957Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.593230Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:16:58.604048Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 2024-11-21T09:16:58.604075Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:16:58.604104Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 2024-11-21T09:16:58.604119Z node 68 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:16:58.604274Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2024-11-21T09:16:58.604281Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:16:58.604297Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2024-11-21T09:16:58.605256Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:16:58.605420Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:16:58.605451Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:16:58.605462Z node 68 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T09:16:58.605482Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:16:58.605487Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:16:58.605495Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T09:16:58.605514Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [68:360:2340] message: TxId: 1002 2024-11-21T09:16:58.605522Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:16:58.605530Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:16:58.605535Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:16:58.605601Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:16:58.606115Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:16:58.606131Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [68:361:2341] TestWaitNotification: OK eventTxId 1002 2024-11-21T09:16:58.606251Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:16:58.606324Z node 68 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 80us result status StatusSuccess 2024-11-21T09:16:58.606469Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 14178, MsgBus: 5935 2024-11-21T09:16:55.612285Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659096302511059:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:55.612311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b1b/r3tmp/tmpEPBvNn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14178, node 1 2024-11-21T09:16:55.680328Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:55.682465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:55.682475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:55.682477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:55.682510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5935 2024-11-21T09:16:55.713159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:55.713189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:55.714279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:55.746521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.758662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.772844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.791230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.800821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.908495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659096302512405:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.908530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.939384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.947006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.958070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.971791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.985523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.992373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.004980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659100597480205:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.005022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.005029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659100597480210:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.005708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:56.012835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659100597480212:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 20936, MsgBus: 16611 2024-11-21T09:16:56.507854Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659100709577590:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:56.508150Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b1b/r3tmp/tmpcDP0ng/pdisk_1.dat 2024-11-21T09:16:56.518911Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20936, node 2 2024-11-21T09:16:56.525678Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:56.525690Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:56.525692Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:56.525741Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16611 TClient is connected to server localhost:16611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:56.609719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:56.609763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:56.610511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.612193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:56.612617Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.622792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.634276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.653643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.665396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.876681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659100709579156:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.876710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.881660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.888803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.902512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.909324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.924151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.938224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.953798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659100709579646:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.953852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.953874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659100709579651:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.954578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:56.957054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659100709579653:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:57.157828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10473, MsgBus: 29912 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b1b/r3tmp/tmpEwG6BT/pdisk_1.dat 2024-11-21T09:16:57.428356Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 10473, node 3 2024-11-21T09:16:57.432449Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:57.435327Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.435343Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.435345Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.435392Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29912 TClient is connected to server localhost:29912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.514912Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.514943Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:57.516032Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:57.517783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.524178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.533825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.551830Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.561752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.776165Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659103743433888:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.776201Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.781936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.789949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.798560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.805504Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.860810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.868326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.876350Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659103743434383:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.876376Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.876413Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659103743434388:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.877025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.881467Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659103743434390:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::RowsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 1265, MsgBus: 26310 2024-11-21T09:16:54.437324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659092019015652:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.437450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b73/r3tmp/tmpMOF27W/pdisk_1.dat 2024-11-21T09:16:54.493618Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1265, node 1 2024-11-21T09:16:54.509614Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.509627Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.509629Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.509660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26310 2024-11-21T09:16:54.536884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.536911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.537991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.553404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.563953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.626011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.642378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.652474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.741762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659092019017048:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.741789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.775013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.781959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.797795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.809696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.864157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.872794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.880605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659092019017563:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.880627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.880647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659092019017568:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.881249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.885633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659092019017570:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 11607, MsgBus: 25385 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b73/r3tmp/tmpe8FHkv/pdisk_1.dat 2024-11-21T09:16:55.303158Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:55.303289Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 11607, node 2 2024-11-21T09:16:55.313095Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:55.313110Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:55.313112Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:55.313150Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25385 TClient is connected to server localhost:25385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:55.396536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:55.396568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:55.396866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.397600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:55.399562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.411719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.429139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.440395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.607740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659094960271116:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.607780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.612096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.618105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.628407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.635287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.642045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.649310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.658404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659094960271617:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.658437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.658443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659094960271622:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.659004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.662490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659094960271624:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 29955, MsgBus: 26475 2024-11-21T09:16:55.984072Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659095367780573:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:55.984345Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b73/r3tmp/tmpJzE8VL/pdisk_1.dat 2024-11-21T09:16:55.995583Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29955, node 3 2024-11-21T09:16:56.004715Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:56.004730Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:56.004732Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:56.004786Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26475 TClient is connected to server localhost:26475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:56.084193Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:56.084241Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:56.085315Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:56.087056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.095567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.104981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.126309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.137800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.302424Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659099662749412:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.302449Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.308935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.315665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.328854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.343138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.357087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.371058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.386189Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659099662749915:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.386221Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.386224Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659099662749920:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.386976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:56.390386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659099662749922:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 6069, MsgBus: 61384 2024-11-21T09:16:52.764665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659082788761894:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.764686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001d4d/r3tmp/tmpeBXBFt/pdisk_1.dat 2024-11-21T09:16:52.860050Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:52.863789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.863820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6069, node 1 2024-11-21T09:16:52.865645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:52.915753Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.915774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.915776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.915808Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61384 TClient is connected to server localhost:61384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.017100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.027169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:53.092545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.112311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:53.122789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.166026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087083730533:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.166060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.293855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.305207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.325365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.340647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.354392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087083731047:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.354409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.354463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087083731052:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.354969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.359316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659087083731054:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:53.571622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.157335Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjNhMzk4ZDEtNmE2NGE5OTMtOTNlMmU5NTEtNGVhODFlMDU=, ActorId: [1:7439659091378699487:2560], ActorState: ExecuteState, TraceId: 01jd7056x29e5zatdneytaa6bv, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (80001703 > 50331648), code: 2013 Trying to start YDB, gRPC: 11819, MsgBus: 6034 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001d4d/r3tmp/tmpzWBPim/pdisk_1.dat 2024-11-21T09:16:54.499397Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:54.505459Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11819, node 2 2024-11-21T09:16:54.525830Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.525842Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.525843Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.525881Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6034 TClient is connected to server localhost:6034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.595450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.595481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.596611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.598340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.603171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.614177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.631874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.642473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.809067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088546104525:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.809089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.813626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.872618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.879093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.886570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.893013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.900999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.917014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088546105028:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.917040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.917056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088546105033:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.917733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.920592Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659088546105035:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:55.090137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 28029, MsgBus: 9088 2024-11-21T09:16:55.406116Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659094044233219:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:55.406348Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001d4d/r3tmp/tmpEKtU3f/pdisk_1.dat 2024-11-21T09:16:55.415134Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28029, node 3 2024-11-21T09:16:55.424086Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:55.424101Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:55.424103Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:55.424147Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9088 TClient is connected to server localhost:9088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:55.506294Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:55.506330Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:55.507367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:55.508125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.509133Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:55.512008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.524569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.543479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.553098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.744722Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094044234772:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.744749Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.750619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.758060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.768465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.782800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.789407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.803833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.812017Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094044235263:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.812043Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094044235268:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.812049Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.812678Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.816493Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659094044235270:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:55.986206Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.744965Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2FmM2ZmN2ItYzNjNzA2MmYtNzdiMDQ2M2UtMjg3ZDk0N2M=, ActorId: [3:7439659094044235554:2454], ActorState: ExecuteState, TraceId: 01jd7059ffbpf2dzdqg36jjq9y, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::DatashardReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 25097, MsgBus: 61080 2024-11-21T09:16:52.766609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659082426478878:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.766625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ba1/r3tmp/tmp6OPcCn/pdisk_1.dat 2024-11-21T09:16:52.857353Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:52.870520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.870539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.874917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25097, node 1 2024-11-21T09:16:52.914486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.914495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.914496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.914514Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61080 TClient is connected to server localhost:61080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.014078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.024325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.089114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.110553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.120434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.158380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086721447723:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.158418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.288008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.304111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.310626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.332516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.352839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086721448237:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.352868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.352888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086721448242:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.353439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.359239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659086721448244:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:53.574853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.158592Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659091016416717:2560] TxId: 281474976710672. Ctx: { TraceId: 01jd7056zk8tcnv08415y1xd6c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTg5YzZiZGQtZGM5NmMxODYtNjhlYjY0YjctNTA4OTBjODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. } 2024-11-21T09:16:54.160094Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTg5YzZiZGQtZGM5NmMxODYtNjhlYjY0YjctNTA4OTBjODY=, ActorId: [1:7439659091016416699:2560], ActorState: ExecuteState, TraceId: 01jd7056zk8tcnv08415y1xd6c, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. Trying to start YDB, gRPC: 14900, MsgBus: 29298 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ba1/r3tmp/tmpvQfInF/pdisk_1.dat 2024-11-21T09:16:54.507954Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:54.508001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 14900, node 2 2024-11-21T09:16:54.518444Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.518455Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.518458Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.518486Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29298 TClient is connected to server localhost:29298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.599958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.599994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.600250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.601233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.605786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.613959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.632811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.642512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.824149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088211022707:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.824188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.826911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.833740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.845237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.859450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.872604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.879197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.889237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088211023209:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.889268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.889320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659088211023214:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.889949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.892262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659088211023216:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:55.057580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.092019Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2FmNWRjYTMtNWYwMGM0MzctMzdiZmEwNDgtNmEwNzRkNTU=, ActorId: [2:7439659092505991103:2479], ActorState: ExecuteState, TraceId: 01jd7057x9b0vehh5m9yjtyx4k, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:915: Memory limit exception at ExecuteState, current limit is 1024 bytes.
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:915: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 7688, MsgBus: 14521 2024-11-21T09:16:55.428046Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659094859188966:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:55.428090Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001ba1/r3tmp/tmpTeG250/pdisk_1.dat 2024-11-21T09:16:55.440413Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7688, node 3 2024-11-21T09:16:55.450477Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:55.450492Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:55.450494Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:55.450551Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14521 TClient is connected to server localhost:14521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:55.528366Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:55.528416Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:55.529453Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:55.532255Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.538642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.547407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.565965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.576148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.719063Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094859190502:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.719105Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.723576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.729793Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.740935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.746905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.754203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.761478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.770087Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094859191014:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.770111Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.770236Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659094859191019:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.770940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.774517Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659094859191021:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:55.981336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.343244Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWI2ZjA2YjItZTM0ZGY2OWYtNzFmNDNmNGItZDhkMzIzNWY=, ActorId: [3:7439659103449127464:2681], ActorState: ExecuteState, TraceId: 01jd7059vec4gywgsevkm2va8c, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (200003976 > 50331648), code: 2013 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 30043, MsgBus: 10763 2024-11-21T09:16:57.374250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659102243123150:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.374308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001987/r3tmp/tmp4lMQ6c/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30043, node 1 2024-11-21T09:16:57.433763Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:57.437230Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.437242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.437243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.437272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10763 2024-11-21T09:16:57.474957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.474981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:57.476078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.495964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.504148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.563826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:57.580918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.591187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.665561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659102243124698:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.665591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.690194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.697199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.707080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.761619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.770154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.777317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.786298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659102243125205:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.786316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659102243125210:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.786327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.787165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.790734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659102243125212:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"SUM(10,15)"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"SUM(10,15)"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 2116, MsgBus: 17059 2024-11-21T09:16:58.262284Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659108615906586:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.262567Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001987/r3tmp/tmp66ntns/pdisk_1.dat 2024-11-21T09:16:58.275514Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2116, node 2 2024-11-21T09:16:58.284248Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.284269Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.284271Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.284325Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17059 TClient is connected to server localhost:17059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.362321Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.362348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-1 ... lId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 14553, MsgBus: 3441 2024-11-21T09:16:58.946672Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659106531102055:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.946850Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001987/r3tmp/tmpi4YfcI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14553, node 3 2024-11-21T09:16:58.962051Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:58.964024Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.964045Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.964047Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.964095Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3441 TClient is connected to server localhost:3441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:59.046930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:59.046972Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:59.047996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:59.048671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.058978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.067482Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.086723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.096696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.265077Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110826070897:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.265112Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.268981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.274965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.281879Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.288963Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.296326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.302695Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.312044Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110826071397:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.312075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.312094Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110826071402:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.312689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:59.316838Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659110826071404:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:59.472159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.501046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.507259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["id"],"Node Type":"TableLookupJoin","PlanNodeId":3,"Columns":["Value","complex_field","id","str_field"],"E-Rows":"No estimate","Table":"test_table_idx","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"},{"Operators":[{"E-Rows":"No estimate","Columns":["Value","complex_field","id","str_field"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["id"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2024-11-21T09:16:25.218713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658964769177645:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.218887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3e/r3tmp/tmpZM2pii/pdisk_1.dat 2024-11-21T09:16:25.287592Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21551, node 1 2024-11-21T09:16:25.340189Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.340223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.340225Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.340254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:25.349655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.349683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.350765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.425458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.477155Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:25.485422Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****4EhQ (4AEE5A52) () has now valid token of user1 2024-11-21T09:16:25.697858Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439658964148674082:2124];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:25.697988Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3e/r3tmp/tmpDqhl00/pdisk_1.dat 2024-11-21T09:16:25.709944Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26049, node 2 2024-11-21T09:16:25.725536Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:25.725554Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:25.725558Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:25.725605Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:25.797691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:25.797722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:25.798783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:25.800544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:25.801716Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:25.952902Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:25.957316Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****k30A (26EB6AA0) () has now valid token of user1 2024-11-21T09:16:26.195069Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439658969763042231:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:26.195220Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3e/r3tmp/tmpHo8VGm/pdisk_1.dat 2024-11-21T09:16:26.204976Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12959, node 3 2024-11-21T09:16:26.220837Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:26.220853Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:26.220854Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:26.220900Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:26.297381Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:26.297419Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:16:26.297868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:26.298383Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:26.371741Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:26.375165Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****wYiw (62B7E661) () has now valid token of user1 2024-11-21T09:16:26.375316Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:30.197095Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****wYiw (62B7E661) 2024-11-21T09:16:30.197161Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****wYiw (62B7E661) () has now valid token of user1 2024-11-21T09:16:31.195493Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439658969763042231:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:31.195537Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:16:35.199331Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****wYiw (62B7E661) 2024-11-21T09:16:35.199382Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****wYiw (62B7E661) () has now valid token of user1 2024-11-21T09:16:36.375585Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:39.201121Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****wYiw (62B7E661) 2024-11-21T09:16:39.201160Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****wYiw (62B7E661) () has now valid token of user1 2024-11-21T09:16:41.203541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:16:41.203553Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:43.202918Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****wYiw (62B7E661) 2024-11-21T09:16:43.202979Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****wYiw (62B7E661) () has now valid token of user1 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3e/r3tmp/tmp9YNejB/pdisk_1.dat 2024-11-21T09:16:46.573223Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:46.574660Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 29121, node 4 2024-11-21T09:16:46.584283Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.584294Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.584295Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.584334Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.662233Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.662280Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:46.663459Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:46.664745Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.670190Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:46.724637Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:46.728760Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****IXyA (5C9ABB11) () has now valid token of user1 2024-11-21T09:16:46.728895Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:50.564474Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****IXyA (5C9ABB11) 2024-11-21T09:16:50.564499Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****IXyA (5C9ABB11) () has now permanent error message 'User not found' 2024-11-21T09:16:53.566249Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****IXyA (5C9ABB11) 2024-11-21T09:16:56.887963Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439659097428272550:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:56.888193Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003a3e/r3tmp/tmpQiukFm/pdisk_1.dat 2024-11-21T09:16:56.900128Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13229, node 5 2024-11-21T09:16:56.915272Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:56.915289Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:56.915293Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:56.915346Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:56.990875Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:56.990907Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:56.991300Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.991750Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:57.031992Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T09:16:57.037901Z node 5 :TICKET_PARSER ERROR: Ticket **** (00000000): Ticket is empty ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 13829, MsgBus: 9289 2024-11-21T09:16:57.197453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659104477559935:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.197513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a4e/r3tmp/tmpXXh9dN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13829, node 1 2024-11-21T09:16:57.255917Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:57.260255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.260264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.260266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.260298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9289 2024-11-21T09:16:57.297133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.297177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:57.298262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.332890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.338102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.400093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:57.417770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.427689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.542201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659104477561471:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.542234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.575318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.582169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.595077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.602345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.656952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.665368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.673931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659104477561986:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.673953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659104477561991:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.673957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.674542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.678239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659104477561993:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 3646, MsgBus: 17331 2024-11-21T09:16:58.065421Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659105692584486:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.065579Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a4e/r3tmp/tmpZTKJyL/pdisk_1.dat 2024-11-21T09:16:58.073306Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3646, node 2 2024-11-21T09:16:58.082731Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.082746Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.082747Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.082780Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17331 TClient is connected to server localhost:17331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.165940Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.165972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.167105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:58.167797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.177050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.187175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.203414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.214059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.360179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659105692586022:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.360240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.364016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.419276Z node 2 :FLAT_TX_SCHEMESHARD W ... bleRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle","KeyColumns":["Value"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":12}],"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":14}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key","Value"],"limit":"1001","type":"Scan"},{"columns":["Key","Value"],"limit":"1001","type":"Scan"},{"columns":["Value"],"scan_by":["Key (20, 120]"],"type":"Scan"},{"columns":["Value"],"scan_by":["Key [10, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [10, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_4","PlanNodeType":"ResultSet"},{"PlanNodeId":12,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Sequential","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_4","Name":"TableRangeScan","ReadLimit":"1001","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_0","PlanNodeType":"ResultSet"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":24,"Plans":[{"PlanNodeId":26,"Plans":[{"PlanNodeId":27,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (20, 120]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_2","PlanNodeType":"ResultSet"},{"PlanNodeId":28,"Plans":[{"PlanNodeId":32,"Plans":[{"PlanNodeId":33,"Plans":[{"PlanNodeId":35,"Plans":[{"PlanNodeId":37,"Plans":[{"PlanNodeId":38,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_3","PlanNodeType":"ResultSet"},{"PlanNodeId":39,"Plans":[{"PlanNodeId":40,"Plans":[{"PlanNodeId":42,"Plans":[{"PlanNodeId":43,"Operators":[{"Scan":"Sequential","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_1","Name":"TableRangeScan","ReadLimit":"1001","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 9155, MsgBus: 62749 2024-11-21T09:16:58.973670Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659107479201690:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.973971Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a4e/r3tmp/tmpY4gGA6/pdisk_1.dat 2024-11-21T09:16:58.983424Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9155, node 3 2024-11-21T09:16:58.993291Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.993328Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.993331Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.993374Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62749 TClient is connected to server localhost:62749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:59.074233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:59.074259Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:59.075325Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:59.076630Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.080477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.094420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.111437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.121900Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.262966Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659111774170524:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.262999Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.266795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.321649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.331491Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.338119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.344929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.352297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.360336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659111774171039:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.360366Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659111774171044:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.360367Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.360972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:59.365298Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659111774171046:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27512, MsgBus: 2653 2024-11-21T09:16:57.322239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659103307117027:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.322291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00199e/r3tmp/tmp34iUXy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27512, node 1 2024-11-21T09:16:57.393819Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:57.398686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.398698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.398700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.398730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2653 2024-11-21T09:16:57.423490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.423508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:57.424420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.457369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.460950Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:57.471385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.534062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:57.551990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.564966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.648860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659103307118421:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.648915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.654502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.659818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.672955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.678528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.686214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.693368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.701687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659103307118913:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.701709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.701712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659103307118918:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.702290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.706200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659103307118920:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 1513, MsgBus: 64321 2024-11-21T09:16:58.190585Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659107602753174:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.190623Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00199e/r3tmp/tmpHDN51Q/pdisk_1.dat 2024-11-21T09:16:58.197498Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1513, node 2 2024-11-21T09:16:58.206644Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.206664Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.206666Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.206725Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64321 TClient is connected to server localhost:64321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.291182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.291206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.292344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:58.292649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.304833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.314284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.332954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.345519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.508528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659107602754721:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.508564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.512781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe ... , DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.619027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659107602755231:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.619035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.619715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:58.623250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659107602755233:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 7929, MsgBus: 6767 2024-11-21T09:16:59.084220Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659112035159278:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:59.084251Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00199e/r3tmp/tmpOOmMjy/pdisk_1.dat 2024-11-21T09:16:59.094808Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7929, node 3 2024-11-21T09:16:59.102906Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:59.102920Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:59.102923Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:59.102969Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6767 TClient is connected to server localhost:6767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:59.184616Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:59.184644Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:59.185882Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:59.186455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.192426Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.200631Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.221385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.232031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.368397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659112035160821:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.368428Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.372302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.378356Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.387171Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.393985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.448884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.457387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.465935Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659112035161337:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.465956Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.465963Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659112035161342:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.466648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:59.470556Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659112035161344:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:59.659825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.746592Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659112035161784:2485], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2024-11-21T09:16:59.746707Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTg3OTc4N2YtMjY0ZjEzODctZmI4MDEzMmItYmY3Yzk1YzA=, ActorId: [3:7439659112035161628:2454], ActorState: ExecuteState, TraceId: 01jd705cez4xc3f9qbrq9r5e02, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:59.758523Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTg3OTc4N2YtMjY0ZjEzODctZmI4MDEzMmItYmY3Yzk1YzA=, ActorId: [3:7439659112035161628:2454], ActorState: ExecuteState, TraceId: 01jd705cf376je8nrw8dz65hzw, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2024-11-21T09:16:59.762090Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659112035161810:2491], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:59.762199Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTg3OTc4N2YtMjY0ZjEzODctZmI4MDEzMmItYmY3Yzk1YzA=, ActorId: [3:7439659112035161628:2454], ActorState: ExecuteState, TraceId: 01jd705cffbykchhc5nf8yv089, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:59.764646Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659112035161819:2495], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:59.764724Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTg3OTc4N2YtMjY0ZjEzODctZmI4MDEzMmItYmY3Yzk1YzA=, ActorId: [3:7439659112035161628:2454], ActorState: ExecuteState, TraceId: 01jd705cfkexkja6sxny856s4k, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 23441, MsgBus: 4376 2024-11-21T09:16:57.396585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659101141197862:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.396606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001984/r3tmp/tmpIqxMHg/pdisk_1.dat 2024-11-21T09:16:57.471485Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23441, node 1 2024-11-21T09:16:57.483337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.483349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.483351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.483380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:57.498009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.498047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:57.499098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4376 TClient is connected to server localhost:4376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.542495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.550496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.612167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.629099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.638805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.714299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659101141199406:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.714328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.746119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.752956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.763439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.769812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.824995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.833453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.849675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659101141199922:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.849698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659101141199927:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.849718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.850408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.853282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659101141199929:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 18435, MsgBus: 26732 2024-11-21T09:16:58.263924Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659106452608174:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.264146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001984/r3tmp/tmpGsI1oN/pdisk_1.dat 2024-11-21T09:16:58.275565Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18435, node 2 2024-11-21T09:16:58.284043Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.284059Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.284062Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.284106Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26732 TClient is connected to server localhost:26732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.364509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.364538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.365446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:58.366630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.369134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.380185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.397557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.408303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.595166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659106452609725:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.595228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.599950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.607240Z node 2 :FLAT_TX_SCHEMESHARD ... atabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.647567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659106452610223:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.647574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.648316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:58.651450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659106452610225:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 6529, MsgBus: 28955 2024-11-21T09:16:59.156154Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659110071265439:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:59.156171Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001984/r3tmp/tmpaDX1Jn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6529, node 3 2024-11-21T09:16:59.172379Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:59.174989Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:59.175003Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:59.175006Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:59.175063Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28955 TClient is connected to server localhost:28955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:59.256305Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:59.256330Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:59.257405Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:59.258614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.262619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.276427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.297500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.306053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.436711Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110071266975:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.436745Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.442125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.448760Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.457342Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.471758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.527017Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.534536Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.550105Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110071267490:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.550134Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.550137Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659110071267495:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.550679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:59.554552Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659110071267497:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:59.719808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.795409Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659110071267933:2490], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2024-11-21T09:16:59.795489Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGI1NmM3MGMtZWJlNjIwM2MtMmQ4M2JmMjEtYjU1ZWI3NGI=, ActorId: [3:7439659110071267931:2489], ActorState: ExecuteState, TraceId: 01jd705cgg0vqxxrn7gc5nnax8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:59.804913Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmRhMzJlMWMtYjk4ZDhlYjMtYjY1OWQ0MmMtOWM4NDg2Mzc=, ActorId: [3:7439659110071267944:2492], ActorState: ExecuteState, TraceId: 01jd705cgm4y2mgvjampxky0nr, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2024-11-21T09:16:59.808546Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659110071267965:2498], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:59.808621Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGRkNGMwNDctY2YzN2FkNmUtYjlmMTI4MTctY2FmMzBmM2M=, ActorId: [3:7439659110071267963:2497], ActorState: ExecuteState, TraceId: 01jd705cgxb8z23c0ye50tbj99, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T09:16:59.811832Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659110071267976:2503], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T09:16:59.811890Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODBjYjFlYzYtZWEzMGYxNjAtZjJhMGUyOWItZGFjOTY1YzU=, ActorId: [3:7439659110071267974:2502], ActorState: ExecuteState, TraceId: 01jd705ch120cy8xa1251xs5ft, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects |94.4%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> KqpExplain::PrecomputeRange >> KqpQuery::QueryTimeout >> KqpQuery::GenericQueryNoRowsLimit >> KqpLimits::QSReplySizeEnsureMemoryLimits >> KqpLimits::CancelAfterRoTxWithFollowerLegacyDependedRead >> KqpParams::MissingParameter >> KqpQuery::TryToUpdateNonExistentColumn >> KqpQuery::QueryResultsTruncated >> KqpQuery::QueryClientTimeout >> KqpQuery::YqlSyntaxV0 >> KqpExplain::SqlIn >> KqpLimits::ManyPartitions >> KqpTypes::Time64Columns+EnableTableDatetime64 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelect_BadCases [GOOD] Test command err: Trying to start YDB, gRPC: 26072, MsgBus: 23123 2024-11-21T09:16:57.609366Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659102062658386:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.609380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00196b/r3tmp/tmpww8XF1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26072, node 1 2024-11-21T09:16:57.665522Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:57.666529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.666541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.666543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.666583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23123 TClient is connected to server localhost:23123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:16:57.711324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.711354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.712487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:57.742022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.753871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.769325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.787794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.799062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.905813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659102062659943:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.905839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.931828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.938334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.993120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.001527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.056633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.064136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.072997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659106357627756:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.073045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659106357627761:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.073051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.073672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:58.077086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659106357627763:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 24084, MsgBus: 22048 2024-11-21T09:16:58.517241Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659105733076118:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.517279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00196b/r3tmp/tmpwvwEql/pdisk_1.dat 2024-11-21T09:16:58.530153Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24084, node 2 2024-11-21T09:16:58.537366Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:58.537382Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:58.537384Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:58.537428Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22048 TClient is connected to server localhost:22048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:58.617554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:58.617589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:58.618720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:58.620465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.630690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.639696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.658334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.667110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.879611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659105733077672:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.879635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.884810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:58.892272Z node 2 :FLAT_TX_SCHEMESH ... ma::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:17:00.356852Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:17:00.356858Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:17:00.356873Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:17:00.356879Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:17:00.498842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T09:17:00.511822Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038008 not found 2024-11-21T09:17:00.513228Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038018 not found 2024-11-21T09:17:00.513244Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037999 not found 2024-11-21T09:17:00.513246Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038022 not found 2024-11-21T09:17:00.513248Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038007 not found 2024-11-21T09:17:00.513249Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037991 not found 2024-11-21T09:17:00.513251Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038016 not found 2024-11-21T09:17:00.513253Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038005 not found 2024-11-21T09:17:00.513254Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038024 not found 2024-11-21T09:17:00.513256Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038003 not found 2024-11-21T09:17:00.513258Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038001 not found 2024-11-21T09:17:00.513259Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037997 not found 2024-11-21T09:17:00.513261Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037995 not found 2024-11-21T09:17:00.513262Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038020 not found 2024-11-21T09:17:00.513264Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038012 not found 2024-11-21T09:17:00.513266Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038014 not found 2024-11-21T09:17:00.513267Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037993 not found 2024-11-21T09:17:00.513269Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038027 not found 2024-11-21T09:17:00.513270Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037983 not found 2024-11-21T09:17:00.513272Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037968 not found 2024-11-21T09:17:00.513274Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038010 not found 2024-11-21T09:17:00.513275Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037970 not found 2024-11-21T09:17:00.513277Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038025 not found 2024-11-21T09:17:00.513278Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037972 not found 2024-11-21T09:17:00.513280Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037987 not found 2024-11-21T09:17:00.513281Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037989 not found 2024-11-21T09:17:00.513283Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037966 not found 2024-11-21T09:17:00.515644Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037979 not found 2024-11-21T09:17:00.519996Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038023 not found 2024-11-21T09:17:00.524457Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037985 not found 2024-11-21T09:17:00.524472Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037977 not found 2024-11-21T09:17:00.524474Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038006 not found 2024-11-21T09:17:00.524476Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038019 not found 2024-11-21T09:17:00.524480Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038002 not found 2024-11-21T09:17:00.524481Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038004 not found 2024-11-21T09:17:00.524483Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038017 not found 2024-11-21T09:17:00.524485Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038000 not found 2024-11-21T09:17:00.524487Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037998 not found 2024-11-21T09:17:00.524676Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037981 not found 2024-11-21T09:17:00.526020Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037990 not found 2024-11-21T09:17:00.526034Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038021 not found 2024-11-21T09:17:00.526810Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037986 not found 2024-11-21T09:17:00.528280Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037980 not found 2024-11-21T09:17:00.529107Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037975 not found 2024-11-21T09:17:00.529121Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038026 not found 2024-11-21T09:17:00.529123Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037971 not found 2024-11-21T09:17:00.529125Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037973 not found 2024-11-21T09:17:00.529126Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037969 not found 2024-11-21T09:17:00.529129Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037992 not found 2024-11-21T09:17:00.529134Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037965 not found 2024-11-21T09:17:00.529137Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037967 not found 2024-11-21T09:17:00.529139Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037982 not found 2024-11-21T09:17:00.529142Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037988 not found 2024-11-21T09:17:00.529143Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037996 not found 2024-11-21T09:17:00.529145Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037984 not found 2024-11-21T09:17:00.529148Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038028 not found 2024-11-21T09:17:00.529149Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038015 not found 2024-11-21T09:17:00.529152Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037994 not found 2024-11-21T09:17:00.529153Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038009 not found 2024-11-21T09:17:00.529155Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038013 not found 2024-11-21T09:17:00.529577Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038011 not found 2024-11-21T09:17:00.529587Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037974 not found 2024-11-21T09:17:00.529589Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037976 not found 2024-11-21T09:17:00.529591Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037978 not found 2024-11-21T09:17:00.570020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:1, at schemeshard: 72057594046644480 2024-11-21T09:17:00.597962Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmRlZjU2NDktYjZmZmZlNWMtMmE1YTBlMC0yNzBmZmI2Yg==, ActorId: [3:7439659117153619602:3810], ActorState: ExecuteState, TraceId: 01jd705d8h6znx6skzky8sdy7n, Create QueryResponse for error on request, msg: 2024-11-21T09:17:00.633049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:1, at schemeshard: 72057594046644480 2024-11-21T09:17:00.822749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:1, at schemeshard: 72057594046644480 >> KqpStats::DeferredEffects [GOOD] >> KqpStats::DataQueryWithEffects >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::MultiUsedStage >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting >> KqpParams::MissingParameter [GOOD] >> KqpParams::ParameterTypes >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryExplain >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> KqpStats::DataQueryWithEffects [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::QueryStats >> KqpQuery::YqlTableSample [GOOD] >> KqpStats::DataQueryMulti >> KqpExplain::SortStage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryWithEffects [GOOD] Test command err: Trying to start YDB, gRPC: 7172, MsgBus: 21716 2024-11-21T09:16:59.651736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659113011922735:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:59.652039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001928/r3tmp/tmphuAkHA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7172, node 1 2024-11-21T09:16:59.708766Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:59.713829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:59.713843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:59.713845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:59.713875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21716 TClient is connected to server localhost:21716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:16:59.753181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:59.753213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:59.754333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:59.782813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.788720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.850973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.867785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.877667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:59.941734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659113011924285:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.941766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:59.970243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.976625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.990079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:00.003625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:00.059028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:00.066330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:00.075721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659117306892096:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:00.075744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:00.075783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659117306892101:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:00.076528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:00.079067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659117306892103:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:00.336757Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659117306892439:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd705czf59jz8b8e6c4jfx7g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQwMGMxNGMtOGZkNzU0NGYtZDY0MzhmMGUtZDE0ODU2MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:17:00.455688Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659117306892527:2454] TxId: 281474976715673. Ctx: { TraceId: 01jd705czf59jz8b8e6c4jfx7g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQwMGMxNGMtOGZkNzU0NGYtZDY0MzhmMGUtZDE0ODU2MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:17:00.456683Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180620380, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 19432, MsgBus: 22210 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001928/r3tmp/tmp9uy7JJ/pdisk_1.dat 2024-11-21T09:17:00.735805Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659114740024604:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:00.740446Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:00.745770Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19432, node 2 2024-11-21T09:17:00.756480Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:00.756495Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:00.756499Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:00.756536Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22210 TClient is connected to server localhost:22210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:17:00.836570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:00.836599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:00.837766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:00.837983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:00.839391Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:00.843736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:00.856662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:00.876025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:00.894950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.022988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659119034993271:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.023054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.025611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.031815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.039803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.046006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.053180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.060804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.068473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659119034993774:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.068481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659119034993779:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.068489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.069029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.073359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659119034993781:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 10217, MsgBus: 61404 2024-11-21T09:17:01.667014Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659120016745892:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.667231Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001928/r3tmp/tmpx5toCe/pdisk_1.dat 2024-11-21T09:17:01.677768Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10217, node 3 2024-11-21T09:17:01.687712Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.687728Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.687730Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.687776Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61404 TClient is connected to server localhost:61404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.736562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.745769Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.767349Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.767390Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.768539Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:01.802113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.821559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.831606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.963728Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659120016747449:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.963782Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.969587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.976762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.984839Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.991293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.998345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.005354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.017312Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659124311715249:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.017338Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.017344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659124311715254:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.018157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.025579Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659124311715256:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MultiJoinCteLinks |94.4%| [TA] $(B)/ydb/tests/functional/suite_tests/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 3520, MsgBus: 11489 2024-11-21T09:17:01.438754Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659119010970991:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.438787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00172a/r3tmp/tmprNLmxt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3520, node 1 2024-11-21T09:17:01.490077Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:01.493759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.493770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.493771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.493801Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11489 TClient is connected to server localhost:11489 2024-11-21T09:17:01.539357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.539381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:17:01.540319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.563239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.568622Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:01.754128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119010971593:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.754154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.776349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.837336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119010971693:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.837373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119010971698:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.837376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.838124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.843078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659119010971700:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 12243, MsgBus: 31356 2024-11-21T09:17:02.103495Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659126482978374:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.103624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00172a/r3tmp/tmp39LqOk/pdisk_1.dat 2024-11-21T09:17:02.114884Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12243, node 2 2024-11-21T09:17:02.126546Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.126560Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.126562Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.126600Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31356 TClient is connected to server localhost:31356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.203759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.203794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.204842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:02.206668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.207704Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:02.424752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659126482978969:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.424777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> KqpParams::ParameterTypes [GOOD] >> KqpParams::InvalidJson >> KqpQuery::QueryExplain [GOOD] >> KqpQuery::QueryFromSqs >> TraverseDatashard::TraverseOneTable >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpLimits::AffectedShardsLimit |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> KqpStats::DataQueryMulti [GOOD] >> KqpQuery::QueryStats [GOOD] >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] |94.5%| [TA] {RESULT} $(B)/ydb/tests/functional/suite_tests/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 14973, MsgBus: 4225 2024-11-21T09:17:01.424524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659122207999312:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.424670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001744/r3tmp/tmpc3212l/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14973, node 1 2024-11-21T09:17:01.481627Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:01.486246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.486259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.486261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.486292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4225 TClient is connected to server localhost:4225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:17:01.524651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.524678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.525802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:17:01.550650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.564567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.632323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.657641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.668882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.706212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659122208000713:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.706249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.728818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.734379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.746146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.753348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.760218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.766986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.775756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659122208001203:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.775776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.775799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659122208001208:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.776450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.781458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659122208001210:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:01.948759Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659122208001517:2463], status: GENERIC_ERROR, issues:
:3:26: Error: Unexpected token '[' : cannot match to any predicted input...
:3:32: Error: Unexpected token '/' : cannot match to any predicted input... 2024-11-21T09:17:01.948840Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTcxYWRiOTItMTYxODdlMDEtY2MxNTVjNzgtZTBmOTBlZDg=, ActorId: [1:7439659122208001489:2454], ActorState: ExecuteState, TraceId: 01jd705ekv63cdv6aapqdnvw43, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:3:26: Error: Unexpected token '[' : cannot match to any predicted input...
:3:32: Error: Unexpected token '/' : cannot match to any predicted input... Trying to start YDB, gRPC: 10609, MsgBus: 17381 2024-11-21T09:17:02.085729Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659122612304687:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.085888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001744/r3tmp/tmpXweXZ7/pdisk_1.dat 2024-11-21T09:17:02.094133Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10609, node 2 2024-11-21T09:17:02.103513Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.103525Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.103527Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.103566Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17381 TClient is connected to server localhost:17381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.186154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.186180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.187318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:02.187942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.192467Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:02.199231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.208138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.226442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.237721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.374838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122612306221:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.374861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.380684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.388159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.397679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.405740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.419303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.432502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.441077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122612306733:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.441089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122612306738:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.441096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.441743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.445707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659122612306740:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:02.616911Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659122612307032:2459], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2024-11-21T09:17:02.617023Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWZlOWFlNDktNjhmYTdkMTktNzcyNzc0MDktNjhiYjcyOGI=, ActorId: [2:7439659122612307024:2454], ActorState: ExecuteState, TraceId: 01jd705f8nbv3e8c9ybm2q4fc6, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: Trying to start YDB, gRPC: 29696, MsgBus: 9455 2024-11-21T09:17:02.769420Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659125794015877:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.769565Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001744/r3tmp/tmp9KWlEm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29696, node 3 2024-11-21T09:17:02.785985Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:02.786347Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.786357Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.786358Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.786396Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9455 TClient is connected to server localhost:9455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.869785Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.869817Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.870842Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:02.871515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.883542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.894970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.919554Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.932696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.058757Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130088984701:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.058781Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.063575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.071109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.083433Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.090087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.097524Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.104711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.120751Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130088985206:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.120781Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.120788Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130088985211:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.121606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.124481Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659130088985213:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpExplain::MultiJoinCteLinks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 16820, MsgBus: 21170 2024-11-21T09:17:01.457465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659119809208030:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.457480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016da/r3tmp/tmpHvLxkl/pdisk_1.dat 2024-11-21T09:17:01.517241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16820, node 1 2024-11-21T09:17:01.532268Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.532279Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.532281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.532315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21170 2024-11-21T09:17:01.558217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.558240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.559191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.594817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.597591Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.605037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.822963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119809212657:2603], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.822973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119809212649:2600], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.822995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.823583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.825221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659119809212663:2604], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 16820, MsgBus: 21171 2024-11-21T09:17:02.168430Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659124104175333:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.168621Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016da/r3tmp/tmp7baZ5w/pdisk_1.dat 2024-11-21T09:17:02.179042Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16820, node 2 2024-11-21T09:17:02.190663Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.190676Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.190677Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.190719Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21171 TClient is connected to server localhost:21171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.270187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.270217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.270951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.271403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:17:02.297602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.520479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659124104180005:2601], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.520499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.520498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659124104180015:2604], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.521234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.523386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659124104180019:2605], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } query_phases { duration_us: 8145 table_access { name: "/Root/ManyShardsTable" reads { rows: 1100 bytes: 8800 } partitions_count: 100 } cpu_time_us: 6735 affected_shards: 100 } compilation { duration_us: 9246 cpu_time_us: 8657 } process_cpu_time_us: 97 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"ManyShardsTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"DurationUs\":{\"Count\":4,\"Sum\":19000,\"Max\":5000,\"Min\":4000},\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":20,\"Max\":6,\"Min\":4},\"FirstMessageMs\":{\"Count\":4,\"Sum\":1,\"Max\":1,\"Min\":0},\"Bytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":19000,\"Max\":5000,\"Min\":4000}},\"Name\":\"4\",\"Push\":{\"LastMessageMs\":{\"Count\":4,\"Sum\":20,\"Max\":6,\"Min\":4},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"Chunks\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":20,\"Max\":6,\"Min\":4},\"FirstMessageMs\":{\"Count\":4,\"Sum\":1,\"Max\":1,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":19000,\"Max\":5000,\"Min\":4000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":20770,\"Max\":5557,\"Min\":4744},\"WaitPeriods\":{\"Count\":4,\"Sum\":5,\"Max\":2,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":4194304,\"Max\":1048576,\"Min\":1048576},\"Tasks\":4,\"OutputRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"IngressRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"PhysicalStageId\":0,\"StageDurationUs\":6000,\"BaseTimeMs\":1732180622614,\"WaitInputTimeUs\":{\"Count\":4,\"Sum\":20088,\"Max\":5422,\"Min\":4590},\"OutputBytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"CpuTimeUs\":{\"Count\":4,\"Sum\":1029,\"Max\":345,\"Min\":188},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":20,\"Max\":6,\"Min\":4},\"FirstMessageMs\":{\"Count\":4,\"Sum\":1,\"Max\":1,\"Min\":0},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":19000,\"Max\":5000,\"Min\":4000}},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":4,\"Sum\":20,\"Max\":6,\"Min\":4},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":20,\"Max\":6,\"Min\":4},\"FirstMessageMs\":{\"Count\":4,\"Sum\":1,\"Max\":1,\"Min\":0},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":19000,\"Max\":5000,\"Min\":4000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":20736,\"Max\":5572,\"Min\":4845},\"WaitPeriods\":{\"Count\":4,\"Sum\":7,\"Max\":3,\"Min\":1}}}]}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":21,\"Max\":21,\"Min\":21},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"ActiveMessageMs\":{\"Count\":1,\"Max\":6,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":7695,\"Max\":7695,\"Min\":7695},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":5000,\"Max\":5000,\"Min\":5000}},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveMessageMs\":{\"Count\":1,\"Max\":6,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":5000,\"Max\":5000,\"Min\":5000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":5073,\"Max\":5073,\"Min\":5073},\"WaitPeriods\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"DurationUs\":{\"Count\":1,\"Sum\":6000,\"Max\":6000,\"Min\":6000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7695,\"Max\":7695,\"Min\":7695},\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":6000,\"BaseTimeMs\":1732180622614,\"WaitInputTimeUs\":{\"Count\":1,\"Sum\":4762,\"Max\":4762,\"Min\":4762},\"OutputBytes\":{\"Count\":1,\"Sum\":7695,\"Max\":7695,\"Min\":7695},\"CpuTimeUs\":{\"Count\":1,\"Sum\":993,\"Max\":993,\"Min\":993},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":6000,\"Max\":6000,\"Min\":6000}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":6000,\"Max\":6000,\"Min\":6000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1242,\"Max\":1242,\"Min\":1242},\"WaitPeriods\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":9246,\"CpuTimeUs\":8657},\"ProcessCpuTimeUs\":97,\"TotalDurationUs\":101094,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":82916},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'353) \'(\'\"_id\" \'\"f20dee95-338d9eff-8b6956a5-c82bcb4e\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'365) \'(\'\"_id\" \'\"47544e74-df12accb-fc5765a9-e0baf15b\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 101094 total_cpu_time_us: 15489 Trying to start YDB, gRPC: 16820, MsgBus: 21172 2024-11-21T09:17:02.921767Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659124104175487:2191];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016da/r3tmp/tmpoyARB1/pdisk_1.dat 2024-11-21T09:17:02.926045Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:02.933421Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16820, node 3 2024-11-21T09:17:02.947108Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.947127Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.947129Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.947180Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21172 TClient is connected to server localhost:21172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:03.021261Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:03.021297Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:03.022437Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:03.023642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.034543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.307085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659128399147335:2607], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.307106Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659128399147327:2604], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.307165Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.307734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.310380Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659128399147341:2608], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryStats [GOOD] Test command err: Trying to start YDB, gRPC: 18756, MsgBus: 63375 2024-11-21T09:17:01.228304Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659118549644573:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.228325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0018fd/r3tmp/tmpTAUNeO/pdisk_1.dat 2024-11-21T09:17:01.283047Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18756, node 1 2024-11-21T09:17:01.293936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.293951Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.293953Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.293982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63375 2024-11-21T09:17:01.329452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.329479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.330541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.350092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.361500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.379751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.406627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.419302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.522722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118549646126:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.522756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.555567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.565013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.580597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.593632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.614158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.630821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.643548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118549646630:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.643574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.643688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118549646635:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.644297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.647234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659118549646637:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:17:01.897847Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659118549646951:2463], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YWVjMWJjN2UtNjc4YWVlZWItNTNlY2FiMTktYjUzN2Y0Yzk=. CustomerSuppliedId : . TraceId : 01jd705egpc8vr9hzwwypp6422. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659118549646945:2454], status: TIMEOUT, reason: {
: Error: Terminate execution } 2024-11-21T09:17:01.898114Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWVjMWJjN2UtNjc4YWVlZWItNTNlY2FiMTktYjUzN2Y0Yzk=, ActorId: [1:7439659118549646931:2454], ActorState: ExecuteState, TraceId: 01jd705egpc8vr9hzwwypp6422, Create QueryResponse for error on request, msg:
: Error: Task execution timeout 37ms exceeded, terminating after 37ms Trying to start YDB, gRPC: 14864, MsgBus: 9311 2024-11-21T09:17:02.117782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659123056093823:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.117803Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0018fd/r3tmp/tmpyCXWZJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14864, node 2 2024-11-21T09:17:02.133112Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:02.135846Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.135857Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.135859Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.135894Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9311 TClient is connected to server localhost:9311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.218259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.218284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.219388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:02.220049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.224488Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.232096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.240590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.257896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.268747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.407759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659123056095371:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.407792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.413134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.419774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.432370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.439064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.446553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.453567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.469693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659123056095874:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.469725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.469789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659123056095879:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.470497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.473796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659123056095881:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 63341, MsgBus: 18460 2024-11-21T09:17:02.810489Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659124333717367:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.810650Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0018fd/r3tmp/tmp7Rv6c0/pdisk_1.dat 2024-11-21T09:17:02.822680Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63341, node 3 2024-11-21T09:17:02.830014Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.830031Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.830032Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.830068Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18460 TClient is connected to server localhost:18460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.911014Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.911046Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.912145Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:02.913406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.916864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.931012Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.949353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.964072Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.123252Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659128628686198:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.123280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.128356Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.134599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.146566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.153274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.159983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.167582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.182902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659128628686704:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.182912Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659128628686709:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.182920Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.183574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.187294Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659128628686711:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } query_phases { duration_us: 1344 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 1061 affected_shards: 1 } query_phases { duration_us: 4249 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 680 affected_shards: 2 } compilation { duration_us: 21370 cpu_time_us: 20751 } process_cpu_time_us: 195 total_duration_us: 27723 total_cpu_time_us: 22687 >> KqpQuery::QueryFromSqs [GOOD] >> KqpStats::SysViewClientLost [GOOD] >> KqpStats::SysViewCancelled >> KqpParams::InvalidJson [GOOD] >> KqpQuery::NoEvaluate [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7674, MsgBus: 5353 2024-11-21T09:17:01.322888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659121064551150:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.322917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017cc/r3tmp/tmpe8qFQ8/pdisk_1.dat 2024-11-21T09:17:01.385784Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7674, node 1 2024-11-21T09:17:01.398038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.398050Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.398051Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.398085Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5353 2024-11-21T09:17:01.428594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.428642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.432591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.481580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.484493Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:01.487319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.502826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.521313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.531837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.662056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121064552703:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.662130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.669396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.677070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.683988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.690519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.697319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.704802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.713084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121064553207:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.713112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.713157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121064553212:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.713818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.717404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659121064553214:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:01.883534Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659121064553506:2459], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2024-11-21T09:17:01.883639Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTIyZmIyNDEtNjNkNmY3YjctMzk3NjY4NzEtZGQ1MDQ0M2I=, ActorId: [1:7439659121064553498:2454], ActorState: ExecuteState, TraceId: 01jd705ehq0cf5f8yk34db003g, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 10111, MsgBus: 11291 2024-11-21T09:17:02.014177Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659122265159343:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.014261Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017cc/r3tmp/tmp5rIMWo/pdisk_1.dat 2024-11-21T09:17:02.024446Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10111, node 2 2024-11-21T09:17:02.035132Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.035146Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.035147Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.035191Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11291 TClient is connected to server localhost:11291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.115912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.115947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.116365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.116981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:17:02.117980Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:02.329561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122265159948:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.329595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.338834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.358976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.389111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122265161212:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.389134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.389138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122265161217:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.389819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.391348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659122265161219:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } Trying to start YDB, gRPC: 9640, MsgBus: 6137 2024-11-21T09:17:02.906356Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659125760723759:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.906581Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017cc/r3tmp/tmpO7uv3c/pdisk_1.dat 2024-11-21T09:17:02.918786Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9640, node 3 2024-11-21T09:17:02.932980Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.933006Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.933008Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.933051Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6137 TClient is connected to server localhost:6137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:03.009397Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:03.009426Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:03.009854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.010645Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:17:03.011837Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:03.218653Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130055691660:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.218679Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.222379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.247760Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.274239Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130055692930:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.274266Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130055692935:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.274272Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.275048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.279116Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659130055692937:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } >> KqpLimits::AffectedShardsLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 20826, MsgBus: 18763 2024-11-21T09:17:01.374928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659119412456470:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.375082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017c1/r3tmp/tmprKIi1P/pdisk_1.dat 2024-11-21T09:17:01.444819Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20826, node 1 2024-11-21T09:17:01.456119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.456131Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.456133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.456156Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18763 2024-11-21T09:17:01.476197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.476254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.477841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.508837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.510935Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:01.515812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.542535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.565224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.576795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.704346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119412458015:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.704377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.739383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.745421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.753775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.767307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.774425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.781667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.797588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119412458526:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.797618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119412458531:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.797622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.798248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.801242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659119412458533:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:02.005216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64618, MsgBus: 17620 2024-11-21T09:17:02.247287Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659126364645854:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.247346Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017c1/r3tmp/tmpJOlSSp/pdisk_1.dat 2024-11-21T09:17:02.257202Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64618, node 2 2024-11-21T09:17:02.268311Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.268327Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.268329Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.268366Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17620 TClient is connected to server localhost:17620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.349747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.349792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.350257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.350861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:17:02.351946Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:02.355953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.367796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.386760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.396551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.577187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659126364647408:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access p ... ORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659126364647909:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.629988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659126364647914:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.630000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.630626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.634560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659126364647916:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 (Uint32 '1)) (let $5 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) '((KqlKeyExc $4 (String '"Name")) (KqlKeyInc $4)))) (let $6 (OptionalType (DataType 'String))) (let $7 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $6) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $6))) (let $8 '('('"_logical_id" '643) '('"_id" '"840b5bdf-b999d556-13165674-18afc679") '('"_wide_channels" $7))) (let $9 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $5)) (lambda '($13) (block '( (let $14 (lambda '($15) (Member $15 '"Amount") (Member $15 '"Comment") (Member $15 '"Group") (Member $15 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $13) $3) $14))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '"0"))) (let $11 (DqPhyStage '($10) (lambda '($16) (FromFlow (NarrowMap (Take (ToFlow $16) $3) (lambda '($17 $18 $19 $20) (AsStruct '('"Amount" $17) '('"Comment" $18) '('"Group" $19) '('"Name" $20)))))) '('('"_logical_id" '656) '('"_id" '"befba1e6-b330bddd-9f54e2b9-a981175")))) (let $12 (DqCnResult (TDqOutput $11 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($9 $11) '($12) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $7) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","ReadLimit":"1001","Inputs":[],"E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","ReadLimit":"1001","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 29269, MsgBus: 15865 2024-11-21T09:17:03.161561Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659129960721824:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:03.161838Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017c1/r3tmp/tmp3KnBAm/pdisk_1.dat 2024-11-21T09:17:03.173202Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29269, node 3 2024-11-21T09:17:03.184580Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:03.184595Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:03.184597Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:03.184640Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15865 TClient is connected to server localhost:15865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:03.264793Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:03.264825Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:03.265191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.266601Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:03.273168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.285464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.305271Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.316744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.465574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659129960723365:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.465608Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.471561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.478561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.489118Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.496443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.503288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.510590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.519133Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659129960723877:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.519153Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659129960723882:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.519159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.519824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.523628Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659129960723884:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:03.712333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 16700, MsgBus: 27563 2024-11-21T09:17:01.238132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659121861771764:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.238445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017ee/r3tmp/tmpzBeugc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16700, node 1 2024-11-21T09:17:01.305332Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.305346Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.305347Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.305377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:01.305543Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27563 2024-11-21T09:17:01.338296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.338328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.339291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.368629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.375623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.397257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.420955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.431775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.595382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121861773306:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.595539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.602545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.612642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.620054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.635147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.649426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.662540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.670631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121861773817:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.670664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121861773822:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.670676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.671413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.675544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659121861773824:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 16833, MsgBus: 6866 2024-11-21T09:17:02.115668Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659123670026349:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.115723Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017ee/r3tmp/tmpWjIuR2/pdisk_1.dat 2024-11-21T09:17:02.127264Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16833, node 2 2024-11-21T09:17:02.144387Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.144403Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.144405Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.144448Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6866 TClient is connected to server localhost:6866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.216294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.216325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.217123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.217368Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:17:02.219439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... Join (MapJoin)"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 5259, MsgBus: 10648 2024-11-21T09:17:03.039949Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659127662415089:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017ee/r3tmp/tmp8JgW5U/pdisk_1.dat 2024-11-21T09:17:03.047129Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:03.051217Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5259, node 3 2024-11-21T09:17:03.060875Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:03.060889Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:03.060892Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:03.060938Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10648 TClient is connected to server localhost:10648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:03.138892Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:03.138925Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:03.139994Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:03.141231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.148135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.157284Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.176333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.186954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.371547Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659127662416478:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.371585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.375397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.382318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.391407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.405784Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.419973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.434456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.453057Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659127662416979:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.453084Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659127662416984:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.453090Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.453863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.460899Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659127662416986:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":5}],"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":4}],"E-Rows":"No estimate","ReadColumns":["Data","Key","Text"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"},{"Inputs":[{"InternalOperatorId":6}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"}],"Node Type":"Limit-InnerJoin (MapJoin)-Filter-TablePointLookup-ConstantExpr-Filter-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":11}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"Aggregate","CTE Name":"precompute_0_0"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","ReadColumns":["Data","Key","Text"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"EightShard"}],"Node Type":"TablePointLookup"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":16,"Plans":[{"PlanNodeId":22,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> AnalyzeColumnshard::AnalyzeStatus ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::NoEvaluate [GOOD] Test command err: Trying to start YDB, gRPC: 5240, MsgBus: 64150 2024-11-21T09:17:01.240499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659121082779105:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.240672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001811/r3tmp/tmpwlLnTj/pdisk_1.dat 2024-11-21T09:17:01.292617Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5240, node 1 2024-11-21T09:17:01.301228Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.301239Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.301240Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.301263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64150 TClient is connected to server localhost:64150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:17:01.342225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.342255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.343333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.370797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.373103Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:01.382200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.397975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.421166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.432419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.521607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121082780657:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.521640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.575300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.581160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.593193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.606614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.622761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.635101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.692574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121082781175:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.692603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.692642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659121082781180:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.693290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.696315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659121082781182:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 63212, MsgBus: 15705 2024-11-21T09:17:02.123855Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659124642149133:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.127005Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001811/r3tmp/tmpH3ZY7q/pdisk_1.dat 2024-11-21T09:17:02.137708Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63212, node 2 2024-11-21T09:17:02.141976Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.141993Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.141995Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.142037Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15705 TClient is connected to server localhost:15705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.223825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.223857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.224637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.224912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:17:02.230013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.239319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.256286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.268622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.434921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659124642150523:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.434952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.439853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.446224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.453629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.459931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.467518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.524255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.539624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659124642151028:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.539650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.539724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659124642151033:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.540532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.543127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659124642151035:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:02.707096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 3184, MsgBus: 4398 2024-11-21T09:17:03.229428Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659129130970304:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:03.229449Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001811/r3tmp/tmprg8vOe/pdisk_1.dat 2024-11-21T09:17:03.246126Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3184, node 3 2024-11-21T09:17:03.253364Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:03.253379Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:03.253381Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:03.253419Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4398 TClient is connected to server localhost:4398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:03.329785Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:03.329826Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:03.330906Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:03.332254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.334473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.346431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.369088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.380395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.548637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659129130971851:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.548670Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.554084Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.561164Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.573626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.581088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.594711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.608397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.616707Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659129130972351:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.616737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.616744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659129130972356:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.617359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.621463Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659129130972358:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:03.792231Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659129130972651:2459], status: UNSUPPORTED, issues:
: Error: Default error
:7:24: Error: EVALUATE IF is not supported in YDB queries., code: 2030 2024-11-21T09:17:03.792349Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmEzMTViNGUtNDYyYWU3NDItN2UzZmY5MGItOTA5YThiNTk=, ActorId: [3:7439659129130972643:2454], ActorState: ExecuteState, TraceId: 01jd705gdcepezmxgb5880zpt4, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2024-11-21T09:17:03.795861Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659129130972655:2461], status: UNSUPPORTED, issues:
: Error: Default error
:4:28: Error: EVALUATE is not supported in YDB queries., code: 2030 2024-11-21T09:17:03.796016Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmEzMTViNGUtNDYyYWU3NDItN2UzZmY5MGItOTA5YThiNTk=, ActorId: [3:7439659129130972643:2454], ActorState: ExecuteState, TraceId: 01jd705gdh6m1fr0kmd1bqg0md, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2024-11-21T09:17:03.808568Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659129130972663:2465], status: UNSUPPORTED, issues:
: Error: Default error
:8:78: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2024-11-21T09:17:03.808858Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmEzMTViNGUtNDYyYWU3NDItN2UzZmY5MGItOTA5YThiNTk=, ActorId: [3:7439659129130972643:2454], ActorState: ExecuteState, TraceId: 01jd705gdyfh8mq2seg1wmajd4, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 22144, MsgBus: 30180 2024-11-21T09:17:01.341810Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659118261152155:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.341837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001758/r3tmp/tmp0XLaWM/pdisk_1.dat 2024-11-21T09:17:01.400782Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22144, node 1 2024-11-21T09:17:01.416378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.416391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.416392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.416432Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30180 2024-11-21T09:17:01.442976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.443006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.443783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.483368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.488613Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:01.491610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.507309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.522592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.531902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.630292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118261153698:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.630323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.656453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.661944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.669017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.676365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.733578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.746433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.754819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118261154215:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.754846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.754877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118261154220:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.755579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.759392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659118261154222:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:01.959548Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzJlZTM4MWItOTI5MmZlMi1kNTA2OThmOS0xNjJjZWViMg==, ActorId: [1:7439659118261154508:2454], ActorState: ExecuteState, TraceId: 01jd705ekx5mmqj5bfmf2jqwvc, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Missing value for parameter: $group Trying to start YDB, gRPC: 1170, MsgBus: 12442 2024-11-21T09:17:02.218199Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659122828508269:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.218354Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001758/r3tmp/tmpBt2nNL/pdisk_1.dat 2024-11-21T09:17:02.228746Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1170, node 2 2024-11-21T09:17:02.236087Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.236101Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.236103Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.236136Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12442 TClient is connected to server localhost:12442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.318621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.318652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.319759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:02.320422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.332606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:02.340832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.357260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.367865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.520451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122828509809:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.520520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.525181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.531336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.537227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.544189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.551205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.558268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.567194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122828510313:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.567219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.567225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659122828510318:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.567925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.571353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659122828510320:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 10940, MsgBus: 21886 2024-11-21T09:17:03.143831Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659130441467811:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:03.143852Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001758/r3tmp/tmpqRgnKe/pdisk_1.dat 2024-11-21T09:17:03.155148Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10940, node 3 2024-11-21T09:17:03.167443Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:03.167457Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:03.167459Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:03.167493Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21886 TClient is connected to server localhost:21886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:03.243839Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:03.243890Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:03.245053Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:03.246710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.249551Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.259056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.278368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.293605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.478971Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130441469353:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.479002Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.485093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.494619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.503005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.511334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.524549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.538977Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.546396Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130441469855:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.546422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.546426Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130441469860:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.547044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.551510Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659130441469862:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:03.721842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.741861Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTdmZDAwNy00NjAwM2MyZi1lNTZiMTk3Yy1jNWQxYTVj, ActorId: [3:7439659130441470146:2454], ActorState: ExecuteState, TraceId: 01jd705gbk0f9cz8tmkxk6ryg3, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:913: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:913: Invalid Json value ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 14295, MsgBus: 19831 2024-11-21T09:17:01.439476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659120506872813:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.439714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001715/r3tmp/tmpfieNkA/pdisk_1.dat 2024-11-21T09:17:01.504223Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14295, node 1 2024-11-21T09:17:01.511832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.511841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.511843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.511866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19831 2024-11-21T09:17:01.540358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.540382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.541438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.576221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.587955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.603805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.620851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.636496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.758567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659120506874352:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.758605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.790153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.797114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.809459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.864945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.872391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.879077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.887520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659120506874867:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.887542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659120506874872:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.887548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.888167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.892317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659120506874874:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:02.055512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.089734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.097556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [1, 4)","Key [42, 42]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key [1, 4)","Key [42, 42]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [1, 4)","Key [42, 42]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 5423, MsgBus: 32527 2024-11-21T09:17:02.329498Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659123366500403:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.329680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001715/r3tmp/tmpsKKhv1/pdisk_1.dat 2024-11-21T09:17:02.345032Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5423, node 2 2024-11-21T09:17:02.350374Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.350387Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.350389Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.350426Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32527 TClient is connected to server localhost:32527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.429777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.429807Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.430861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0 ... id=RestoreV1Chunks_V2; 2024-11-21T09:17:03.008094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:17:03.008097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:17:03.008107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:17:03.008110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["OlapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"Value \u003E 0","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Value \u003E 0","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 21691, MsgBus: 27604 2024-11-21T09:17:03.280062Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659128969236777:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:03.280283Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001715/r3tmp/tmpvPDCLz/pdisk_1.dat 2024-11-21T09:17:03.292893Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21691, node 3 2024-11-21T09:17:03.303669Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:03.303692Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:03.303694Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:03.303742Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27604 TClient is connected to server localhost:27604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:03.383059Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:03.383089Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:03.383449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.384132Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:17:03.385048Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:03.390297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.402210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.463140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.474337Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.602962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659128969238321:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.602990Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.607086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.614338Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.622429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.629097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.636548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.653505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.666835Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659128969238833:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.666865Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659128969238838:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.666875Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.667711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.670163Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659128969238840:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:03.875089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.900740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.995802Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7439659128969240798:2579] TxId: 281474976715674. Ctx: { TraceId: 01jd705gjv3bh09c7wwsr04s78, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NkYzI3YTQtMTRiYzAxYTYtYjYyOThiN2YtYWJiNzA2OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Too many affected shards: datashardTasks=21, limit: 20 2024-11-21T09:17:03.996824Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2NkYzI3YTQtMTRiYzAxYTYtYjYyOThiN2YtYWJiNzA2OWE=, ActorId: [3:7439659128969240631:2579], ActorState: ExecuteState, TraceId: 01jd705gjv3bh09c7wwsr04s78, Create QueryResponse for error on request, msg:
: Error: Affected too many shards: 0, code: 2029 >> AnalyzeColumnshard::Analyze >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::ReadTableRangesFullScan >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> KqpService::PatternCache [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs >> TraverseDatashard::TraverseTwoTables >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> KqpExplain::ReadTableRangesFullScan [GOOD] >> AnalyzeColumnshard::AnalyzeServerless |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService [GOOD] >> KqpLimits::CancelAfterRwTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRangesFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 2813, MsgBus: 10065 2024-11-21T09:17:02.856617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659126046073624:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.856797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001665/r3tmp/tmpCpHxQi/pdisk_1.dat 2024-11-21T09:17:02.926323Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2813, node 1 2024-11-21T09:17:02.936418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.936430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.936432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.936476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10065 2024-11-21T09:17:02.957805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:02.957837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:02.959114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:02.997676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.004414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.066827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.085921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.098463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.187622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659130341042464:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.187652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.221744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.229008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.284350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.293386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.307620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.314484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.330218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659130341042980:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.330257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.330347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659130341042985:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:03.331067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:03.334523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659130341042987:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.Text","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 17696, MsgBus: 15642 2024-11-21T09:17:03.749047Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659127072520921:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:03.749210Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001665/r3tmp/tmpQ1RL92/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17696, node 2 2024-11-21T09:17:03.763550Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:03.768776Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:03.768796Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:03.768798Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:03.768839Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15642 TClient is connected to server localhost:15642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:03.849226Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:03.849263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:03.850339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:03.851662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.855270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:03.869118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"t1.Key = t2.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"GroupBy":"item.t1.Key","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.t1.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":21,"Plans":[{"PlanNodeId":25,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"Foo.t1.Key = t1.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"SortBy":"row.Key","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":4},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 24511, MsgBus: 3269 2024-11-21T09:17:04.644557Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659130935250382:2126];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001665/r3tmp/tmpbnNj9Y/pdisk_1.dat 2024-11-21T09:17:04.647606Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:04.651579Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24511, node 3 2024-11-21T09:17:04.660887Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:04.660900Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:04.660901Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:04.660932Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3269 TClient is connected to server localhost:3269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:04.746211Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:04.746243Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:04.746544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:04.747217Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:04.757871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:04.768498Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:04.791243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.809237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:04.934076Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130935251840:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.934101Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.940571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.947344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.959663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.973230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.979814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.986967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.995542Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130935252345:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.995563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.995571Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659130935252350:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.996065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:05.000395Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659130935252352:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:05.184753Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave >> TraverseDatashard::TraverseTwoTablesServerless ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 2426, MsgBus: 10107 2024-11-21T09:16:52.764725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659080590879261:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.764750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001d14/r3tmp/tmpxzlEIu/pdisk_1.dat 2024-11-21T09:16:52.859851Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:52.864187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.864222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.865353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2426, node 1 2024-11-21T09:16:52.916647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.916660Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.916662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.916689Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10107 TClient is connected to server localhost:10107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.016683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.020658Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:53.044989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.074534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.133607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.148379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.175414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084885847888:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.175432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.288115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.293880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.304598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.311559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.332595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.345428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084885848402:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.345464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.345626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084885848407:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.347224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.352906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659084885848409:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:53.570643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.154827Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659084885849118:2479] TxId: 281474976715672. Ctx: { TraceId: 01jd7056ky0deky8ajaf25mv9j, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBkYzRlZjMtNzg0NTc5MjUtODQ0ZjIzNjgtNzg1OGFjNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 100442498 > 50331648 2024-11-21T09:16:54.156071Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDBkYzRlZjMtNzg0NTc5MjUtODQ0ZjIzNjgtNzg1OGFjNWI=, ActorId: [1:7439659084885849007:2479], ActorState: ExecuteState, TraceId: 01jd7056ky0deky8ajaf25mv9j, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (100442498 > 50331648), code: 200509 Trying to start YDB, gRPC: 26458, MsgBus: 5873 2024-11-21T09:16:54.455855Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659089120124707:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.456019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001d14/r3tmp/tmpkJEC0E/pdisk_1.dat 2024-11-21T09:16:54.467551Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26458, node 2 2024-11-21T09:16:54.476416Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:54.476431Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:54.476432Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:54.476473Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5873 TClient is connected to server localhost:5873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:54.558490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:54.558527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:54.558870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.559533Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:54.561589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.573217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.592057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, sub ... part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.766182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.774469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.781413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.797077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.809489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.828745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089120126776:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.828787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.828920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659089120126781:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.829790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.836624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659089120126783:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:59.457141Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659089120124707:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:59.457196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:04.356356Z node 2 :KQP_EXECUTER WARN: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7057vz6r43ya59r438xcj1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OThhYTNhMmUtN2RiMWM2YmMtZDJjNmQzMTMtNGY0Y2ZlMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, memory limit exceeded. 2024-11-21T09:17:04.356589Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OThhYTNhMmUtN2RiMWM2YmMtZDJjNmQzMTMtNGY0Y2ZlMWM=, ActorId: [2:7439659093415094374:2454], ActorState: ExecuteState, TraceId: 01jd7057vz6r43ya59r438xcj1, Create QueryResponse for error on request, msg:
: Warning: Type annotation, code: 1030
:2:13: Warning: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:2:20: Warning: At function: ToDict
:5:38: Warning: At function: OrderedMap
:5:53: Warning: At function: +
:5:53: Warning: Integral type implicit bitcast: Uint64 and Int32, code: 1107
: Error: Memory limit exceeded, code: 2029 Trying to start YDB, gRPC: 21478, MsgBus: 6704 2024-11-21T09:17:04.675206Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659131534323161:2069];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001d14/r3tmp/tmptrjfKd/pdisk_1.dat 2024-11-21T09:17:04.678858Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 21478, node 3 2024-11-21T09:17:04.692002Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:04.694495Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:04.694509Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:04.694512Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:04.694555Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6704 TClient is connected to server localhost:6704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:04.774937Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:04.774990Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:04.776052Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:04.777852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:04.783615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:04.792162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:04.809499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:04.821057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:04.970419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659131534324700:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.970441Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.973232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.978585Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.986907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.993996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.049130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.057532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.066007Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659135829292510:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:05.066033Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659135829292515:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:05.066036Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:05.066591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:05.070159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659135829292517:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:05.241465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.866778Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjFkMmQxOTYtNTM4MjIxZTgtNzAyNjg5MzEtNDgyNDIwMzQ=, ActorId: [3:7439659135829292802:2454], ActorState: ExecuteState, TraceId: 01jd705jc77a1dmrhgkmce6ke7, Create QueryResponse for error on request, msg: |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse >> GroupWriteTest::WriteHardRateDispatcher [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 14932131298965292735 2024-11-21T09:16:42.794376Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T09:16:42.797109Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T09:16:42.797126Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T09:16:42.797488Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T09:16:42.806572Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:16:42.806996Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T09:16:47.182359Z 6 00h01m04.907035s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:5:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 7492 2024-11-21T09:16:50.626192Z 5 00h01m06.134286s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:4:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 14332 2024-11-21T09:16:54.679549Z 2 00h01m07.358815s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:1:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 17297 2024-11-21T09:17:04.985936Z 8 00h01m09.476696s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:7:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 31223 2024-11-21T09:17:06.696040Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T09:17:06.696062Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:17:06.696068Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T09:17:06.696071Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T09:17:06.733191Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2024-11-21T09:17:06.733222Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> AnalyzeColumnshard::AnalyzeSameOperationId >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType |94.5%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerLegacy |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: 2024-11-21T09:17:03.709985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:03.710035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:03.710044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0026df/r3tmp/tmpVlbnVL/pdisk_1.dat 2024-11-21T09:17:03.830087Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6657, node 1 2024-11-21T09:17:04.003168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:04.003186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:04.003190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:04.003271Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:04.012691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.089569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:04.089602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:04.101239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5687 2024-11-21T09:17:04.509924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.321478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.321512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.355134Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:05.356120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:05.406846Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:05.415820Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:05.415843Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:05.424563Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:05.424718Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:05.424737Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:05.424742Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:05.424746Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:05.424750Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:05.424754Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:05.424759Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:05.424854Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:05.599967Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:05.599993Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:05.601010Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:05.602585Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:05.602700Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:05.603345Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:05.607691Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:05.607707Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:05.607716Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:05.609538Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.609568Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.610925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:05.612698Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:05.612733Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:05.615391Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:05.627319Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:05.649341Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:05.771843Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:05.939783Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:06.734764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.734799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.761439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:06.938117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2433:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.938177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.938686Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2438:3075]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:06.938731Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:06.938744Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2440:3077] 2024-11-21T09:17:06.938769Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2440:3077] 2024-11-21T09:17:06.938964Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2441:2945] 2024-11-21T09:17:06.939039Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2440:3077], server id = [2:2441:2945], tablet id = 72075186224037897, status = OK 2024-11-21T09:17:06.939075Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2441:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:17:06.939084Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T09:17:06.939136Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:17:06.939163Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2438:3075], StatRequests.size() = 1 2024-11-21T09:17:06.950592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.950651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.950743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2450:3086], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.952814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:17:07.104557Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:17:07.104584Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:17:07.177284Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2440:3077], schemeshard count = 1 2024-11-21T09:17:07.425036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2452:3088], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:17:07.524582Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2592:3177]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:07.524630Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:07.524635Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2592:3177], StatRequests.size() = 1 2024-11-21T09:17:07.581313Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd705kfj3wshnf1rbn3dwzg3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUxZWMyOTktNWEyMTNiMDktZGQ1YmE2ZmEtNzRjMmEzMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:07.610595Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2637:2994]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:07.611061Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:07.611069Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:17:07.611143Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:07.611153Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T09:17:07.611161Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:17:07.613685Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T09:17:07.614890Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> KqpParams::BadParameterType [GOOD] >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::BadParameterType [GOOD] Test command err: Trying to start YDB, gRPC: 15904, MsgBus: 30032 2024-11-21T09:16:57.570972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:16:57.571370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:16:57.571400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a4c/r3tmp/tmp8e98KR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15904, node 1 TClient is connected to server localhost:30032 TClient is connected to server localhost:30032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.732560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.732581Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.732584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.732647Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:57.795182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.795219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:57.795509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.797122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:57.911308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.115493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.395496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.624454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.940888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1727:3348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.940943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:58.944783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.146351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.399648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.622635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:59.878003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:00.116613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:00.425924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2298:3791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:00.425971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:00.426034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2303:3796], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:00.427272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:00.602673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2305:3798], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:00.870955Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:2605:4015] TxId: 281474976715671. Ctx: { TraceId: 01jd705dj576shkv7z1wv09rcr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM1NzQxNGEtYzQ5NjY1ZTktZTc2ODViNDMtNWQ5ZTJjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2024-11-21T09:17:00.872447Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2613:4056], TxId: 281474976715671, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTM1NzQxNGEtYzQ5NjY1ZTktZTc2ODViNDMtNWQ5ZTJjNQ==. CustomerSuppliedId : . TraceId : 01jd705dj576shkv7z1wv09rcr. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:00.872679Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2611:4054], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTM1NzQxNGEtYzQ5NjY1ZTktZTc2ODViNDMtNWQ5ZTJjNQ==. CustomerSuppliedId : . TraceId : 01jd705dj576shkv7z1wv09rcr. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:00.872743Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2612:4055], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jd705dj576shkv7z1wv09rcr. SessionId : ydb://session/3?node_id=1&id=OTM1NzQxNGEtYzQ5NjY1ZTktZTc2ODViNDMtNWQ5ZTJjNQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:00.873070Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTM1NzQxNGEtYzQ5NjY1ZTktZTc2ODViNDMtNWQ5ZTJjNQ==, ActorId: [1:2569:4015], ActorState: ExecuteState, TraceId: 01jd705dj576shkv7z1wv09rcr, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 9741, MsgBus: 29299 2024-11-21T09:17:01.428892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:01.428962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:17:01.429020Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a4c/r3tmp/tmpK423ds/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9741, node 2 TClient is connected to server localhost:29299 TClient is connected to server localhost:29299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.559022Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.559039Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.559043Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.559136Z node 2 :NET_CLASSIFIER ERROR: got bad distributable config ... 09:17:03.459825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.706085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:03.949636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.243642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2293:3787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.243686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.243743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2298:3792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:04.244796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:04.412648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2300:3794], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:04.635031Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:2602:4028] TxId: 281474976715671. Ctx: { TraceId: 01jd705h7s6q7at8cpyrnsk8hx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWQxN2E2NzEtMzYxMTQ5M2MtNmYzNDZhYzYtZmZjOWZmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2024-11-21T09:17:04.635238Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2610:4054], TxId: 281474976715671, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZWQxN2E2NzEtMzYxMTQ5M2MtNmYzNDZhYzYtZmZjOWZmODI=. TraceId : 01jd705h7s6q7at8cpyrnsk8hx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2602:4028], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:04.635303Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2608:4052], TxId: 281474976715671, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZWQxN2E2NzEtMzYxMTQ5M2MtNmYzNDZhYzYtZmZjOWZmODI=. TraceId : 01jd705h7s6q7at8cpyrnsk8hx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2602:4028], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:04.635331Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2609:4053], TxId: 281474976715671, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZWQxN2E2NzEtMzYxMTQ5M2MtNmYzNDZhYzYtZmZjOWZmODI=. TraceId : 01jd705h7s6q7at8cpyrnsk8hx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2602:4028], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:07.184196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:17:07.184240Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:07.495135Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWQxN2E2NzEtMzYxMTQ5M2MtNmYzNDZhYzYtZmZjOWZmODI=, ActorId: [2:2581:4028], ActorState: ExecuteState, TraceId: 01jd705h7s6q7at8cpyrnsk8hx, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5115, MsgBus: 12480 2024-11-21T09:17:07.664658Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659145744505180:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:07.664866Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a4c/r3tmp/tmpuU6xj8/pdisk_1.dat 2024-11-21T09:17:07.674335Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5115, node 3 2024-11-21T09:17:07.684176Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:07.684189Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:07.684190Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:07.684249Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12480 TClient is connected to server localhost:12480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:07.767136Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.767173Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.767494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.768116Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.778912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.786461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.802591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.812646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.951615Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659145744506738:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.951641Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.956002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.011197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.017978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.025243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.032466Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.039586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.048046Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659150039474550:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.048070Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659150039474555:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.048081Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.048714Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:08.052766Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659150039474557:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:08.224123Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWMyZjE2NjAtMWE5ODYzYjAtNDhkZjQxNzAtMjE1MzM3MDU=, ActorId: [3:7439659150039474840:2454], ActorState: ExecuteState, TraceId: 01jd705mqmdg5rhkgpjwvf8cdd, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $group type mismatch, expected: { Kind: Data Data { Scheme: 2 } }, actual: Type (Data), schemeType: Int32, schemeTypeId: 1 |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve >> KqpStats::SysViewCancelled [GOOD] >> KqpTypes::DyNumberCompare >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> TraverseDatashard::TraverseTwoTables [GOOD] >> KqpTypes::DyNumberCompare [GOOD] >> KqpQuery::QueryCancelWriteImmediate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2024-11-21T09:17:05.542430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:05.542468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:05.542477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002593/r3tmp/tmpM4C3Pc/pdisk_1.dat 2024-11-21T09:17:05.614018Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7152, node 1 2024-11-21T09:17:05.707512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:05.707530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:05.707534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:05.707625Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:05.714065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.790007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.790051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.801871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30071 2024-11-21T09:17:06.203252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.967212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.967234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.000060Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:07.001088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.048086Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:07.056301Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:07.056326Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:07.062206Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:07.062311Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:07.062325Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:07.062328Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:07.062332Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:07.062337Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:07.062340Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:07.062345Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:07.062440Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:07.238861Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.238904Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.240191Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:07.242511Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:07.242636Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:07.243405Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:07.248781Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:07.248804Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:07.248816Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:07.251060Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.251091Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.252536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:07.254236Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:07.254271Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:07.257118Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:07.269376Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.291400Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:07.404402Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:07.560153Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:08.288336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.288364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.291296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:08.467409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2432:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.467442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.467798Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2437:3073]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:08.467831Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:08.467838Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2439:3075] 2024-11-21T09:17:08.467854Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2439:3075] 2024-11-21T09:17:08.467993Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2440:2944] 2024-11-21T09:17:08.468049Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2439:3075], server id = [2:2440:2944], tablet id = 72075186224037897, status = OK 2024-11-21T09:17:08.468086Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2440:2944], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:17:08.468092Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T09:17:08.468126Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:17:08.468134Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2437:3073], StatRequests.size() = 1 2024-11-21T09:17:08.469922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2444:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.469941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.470002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2449:3084], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.471655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:17:08.623683Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:17:08.623713Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:17:08.695672Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2439:3075], schemeshard count = 1 2024-11-21T09:17:08.951835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2451:3086], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:17:09.040570Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2592:3175]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:09.040606Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:09.040611Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2592:3175], StatRequests.size() = 1 2024-11-21T09:17:09.048347Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd705mzhdsfffa83ycra7zhf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE1YWQ3NzktZjg3NGE1Yi01ODA4MmE2NS05OWMzNDQ4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:09.071382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037889 2024-11-21T09:17:09.199333Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2925:3241]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:09.199386Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:17:09.199393Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2925:3241], StatRequests.size() = 1 2024-11-21T09:17:09.202132Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:2934:3250]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:09.202188Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2024-11-21T09:17:09.202195Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:2934:3250], StatRequests.size() = 1 2024-11-21T09:17:09.208356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd705npd21p69vhtq6ebw8fz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRlYjhkNWItZjU1NWM5MzYtMzdlNjVkMzQtMmUxMjAxMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:09.279532Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2983:3207]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:09.279965Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:09.279971Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:17:09.280016Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:09.280022Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T09:17:09.280027Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:17:09.281414Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T09:17:09.281483Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2024-11-21T09:17:09.281569Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3008:3220]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:09.281987Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:09.281993Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:17:09.282033Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:09.282037Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T09:17:09.282042Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:17:09.282366Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2024-11-21T09:17:09.282398Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::DyNumberCompare [GOOD] Test command err: Trying to start YDB, gRPC: 30224, MsgBus: 23659 2024-11-21T09:16:57.114249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659101166680351:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.114457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a80/r3tmp/tmpeH9mFq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30224, node 1 2024-11-21T09:16:57.174017Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:57.176788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:57.176799Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:57.176800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:57.176842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23659 TClient is connected to server localhost:23659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:16:57.215065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:57.215090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:57.216284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:57.222955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.233193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.247263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:57.267179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.275569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.413797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659101166681900:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.413822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.443175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.498156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.511810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.519104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.574076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.581667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:57.596941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659101166682421:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.596981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.597037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659101166682431:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:57.597701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:57.601692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659101166682433:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:57.805974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.114450Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659101166680351:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.114509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:02.657654Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180622654, txId: 281474976710672] shutting down 2024-11-21T09:17:02.737523Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659122641520392:2603] TxId: 281474976710675. Ctx: { TraceId: 01jd705fa2ft5ggb6cprwf9k6x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM5OTZhN2MtM2YyMDJmMTEtMzljNjg4YjctYzUzNzYz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:17:02.738929Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzM5OTZhN2MtM2YyMDJmMTEtMzljNjg4YjctYzUzNzYz, ActorId: [1:7439659122641520345:2603], ActorState: ExecuteState, TraceId: 01jd705fa2ft5ggb6cprwf9k6x, Create QueryResponse for error on request, msg: 2024-11-21T09:17:02.739033Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180622746, txId: 281474976710674] shutting down 2024-11-21T09:17:02.739059Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T09:17:02.741334Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659122641520397:2607], TxId: 281474976710675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YzM5OTZhN2MtM2YyMDJmMTEtMzljNjg4YjctYzUzNzYz. CustomerSuppliedId : . TraceId : 01jd705fa2ft5ggb6cprwf9k6x. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659122641520392:2603], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.742157Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659122641520399:2608], TxId: 281474976710675, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YzM5OTZhN2MtM2YyMDJmMTEtMzljNjg4YjctYzUzNzYz. CustomerSuppliedId : . TraceId : 01jd705fa2ft5ggb6cprwf9k6x. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659122641520392:2603], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.774271Z node 1 :TX_DATASHARD ERROR: TxId: 281474976710675. Snapshot is not valid, tabletId: 72075186224037919, step: 1732180622746 2024-11-21T09:17:02.774370Z node 1 :TX_DATASHARD ERROR: TxId: 281474976710675. Snapshot is not valid, tabletId: 72075186224037920, step: 1732180622746 2024-11-21T09:17:02.774400Z node 1 :TX_DATASHARD ERROR: TxId: 281474976710675. Snapshot is not valid, tabletId: 72075186224037921, step: 1732180622746 2024-11-21T09:17:02.774469Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659122641520401:2610], TxId: 281474976710675, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=YzM5OTZhN2MtM2YyMDJmMTEtMzljNjg4YjctYzUzNzYz. CustomerSuppliedId : . TraceId : 01jd705fa2ft5ggb6cprwf9k6x. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659122641520392:2603], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.775186Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439659122641520405:2614]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/LargeTable' scheme changed., code: 2028 , tablet id: 72075186224037919, actor_id: [1:7439659101166682804:2459] 2024-11-21T09:17:02.775215Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439659122641520406:2615]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/LargeTable' scheme changed., code: 2028 , tablet id: 72075186224037920, actor_id: [1:7439659101166682807:2461] 2024-11-21T09:17:02.775252Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439659122641520403:2612]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/LargeTable' scheme changed., code: 2028 , tablet id: 72075186224037921, actor_id: [1:7439659101166682790:2457] 2024-11-21T09:17:02.775400Z node 1 :TX_DATASHARD ERROR: TxId: 281474976710675. Snapshot is not valid, tabletId: 72075186224037922, step: 1732180622746 2024-11-21T09:17:02.775483Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:743965912264152040 ... , suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.320311Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:09.325937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.334426Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.352125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.362938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.450962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659156280704493:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.450991Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.456126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.461538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.466939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.473721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.481034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.487968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.496227Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659156280704984:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.496254Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659156280704989:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.496254Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.496840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:09.501296Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659156280704991:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:09.682795Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659156280705290:2462], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Double
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Double
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Double
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Double 2024-11-21T09:17:09.682863Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTZiNTA2YmYtY2I4NWQ4MDktNjc4NjljZjMtNDAzNmI5ZWU=, ActorId: [3:7439659156280705277:2454], ActorState: ExecuteState, TraceId: 01jd705p5h0z6recxcecwfex45, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Double
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Double
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Double
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Double 2024-11-21T09:17:09.685552Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659156280705298:2464], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2024-11-21T09:17:09.685625Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTZiNTA2YmYtY2I4NWQ4MDktNjc4NjljZjMtNDAzNmI5ZWU=, ActorId: [3:7439659156280705277:2454], ActorState: ExecuteState, TraceId: 01jd705p5k0dnn1vgy0zaed7yx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2024-11-21T09:17:09.687670Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439659156280705306:2466], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2024-11-21T09:17:09.687722Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTZiNTA2YmYtY2I4NWQ4MDktNjc4NjljZjMtNDAzNmI5ZWU=, ActorId: [3:7439659156280705277:2454], ActorState: ExecuteState, TraceId: 01jd705p5pdeck5z3nfdqfmyy9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] Test command err: Trying to start YDB, gRPC: 11926, MsgBus: 24008 2024-11-21T09:17:01.456468Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659120582700798:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.456571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00177e/r3tmp/tmpBNW2SY/pdisk_1.dat 2024-11-21T09:17:01.503637Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11926, node 1 2024-11-21T09:17:01.519395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.519407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.519408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.519443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24008 2024-11-21T09:17:01.556781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.556806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.560784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.583009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.589785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.596447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.660448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.680947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.690927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.758810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659120582702191:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.758845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.791250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.797760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.809614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.816569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.871665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.879718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.887664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659120582702706:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.887685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.887693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659120582702711:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.888363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.892312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659120582702713:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:02.080948Z node 1 :GRPC_SERVER DEBUG: [0x57f37f750800] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=ZTAzNzQzY2MtOTIyNWQ2ZTItOGFlOWE5OGQtZmY5ZGFjN2E=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:39210 2024-11-21T09:17:02.080977Z node 1 :GRPC_SERVER DEBUG: [0x57f37ac07200] created request Name# ExecuteDataQuery 2024-11-21T09:17:02.081024Z node 1 :GRPC_SERVER DEBUG: [0x57f37f750800] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=ZTAzNzQzY2MtOTIyNWQ2ZTItOGFlOWE5OGQtZmY5ZGFjN2E=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:39210 database# /Root 2024-11-21T09:17:02.081079Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jd705er1ah4mdbgbz6bn71kt, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:39210, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 3.009136s
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11926 2024-11-21T09:17:05.080951Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659124877670309:2454] TxId: 281474976715671. Ctx: { TraceId: 01jd705er1ah4mdbgbz6bn71kt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTAzNzQzY2MtOTIyNWQ2ZTItOGFlOWE5OGQtZmY5ZGFjN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:17:05.081145Z node 1 :GRPC_SERVER DEBUG: [0x57f37f750800] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:39210 2024-11-21T09:17:05.081284Z node 1 :GRPC_SERVER DEBUG: [0x57f37f750800] finished request Name# ExecuteDataQuery ok# false peer# unknown 2024-11-21T09:17:05.081317Z node 1 :GRPC_SERVER DEBUG: [0x57f37ac07200] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=ZTAzNzQzY2MtOTIyNWQ2ZTItOGFlOWE5OGQtZmY5ZGFjN2E=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:39210 2024-11-21T09:17:05.081331Z node 1 :GRPC_SERVER DEBUG: [0x57f37ac13800] created request Name# ExecuteDataQuery 2024-11-21T09:17:05.081358Z node 1 :GRPC_SERVER DEBUG: [0x57f37ac07200] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=ZTAzNzQzY2MtOTIyNWQ2ZTItOGFlOWE5OGQtZmY5ZGFjN2E=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:39210 database# /Root 2024-11-21T09:17:05.082411Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659124877670315:2463], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jd705er1ah4mdbgbz6bn71kt. SessionId : ydb://session/3?node_id=1&id=ZTAzNzQzY2MtOTIyNWQ2ZTItOGFlOWE5OGQtZmY5ZGFjN2E=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659124877670309:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:05.082594Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659124877670316:2464], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTAzNzQzY2MtOTIyNWQ2ZTItOGFlOWE5OGQtZmY5ZGFjN2E=. TraceId : 01jd705er1ah4mdbgbz6bn71kt. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659124877670309:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:05.082618Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jd705hns484sn2z09as0ywh4, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:39210, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T09:17:05.082902Z ... eamConsumers ok# false data# peer# 2024-11-21T09:17:08.952372Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76f400] received request Name# AddTagsToStream ok# false data# peer# 2024-11-21T09:17:08.952374Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76e800] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2024-11-21T09:17:08.952395Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76dc00] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2024-11-21T09:17:08.952400Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76d000] received request Name# ListTagsForStream ok# false data# peer# 2024-11-21T09:17:08.952420Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76c400] received request Name# MergeShards ok# false data# peer# 2024-11-21T09:17:08.952423Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76b800] received request Name# RemoveTagsFromStream ok# false data# peer# 2024-11-21T09:17:08.952443Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76a000] received request Name# StartStreamEncryption ok# false data# peer# 2024-11-21T09:17:08.952443Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76ac00] received request Name# SplitShard ok# false data# peer# 2024-11-21T09:17:08.952463Z node 2 :GRPC_SERVER DEBUG: [0x57f37f769400] received request Name# StopStreamEncryption ok# false data# peer# 2024-11-21T09:17:08.952468Z node 2 :GRPC_SERVER DEBUG: [0x57f37f775a00] received request Name# SelfCheck ok# false data# peer# 2024-11-21T09:17:08.952485Z node 2 :GRPC_SERVER DEBUG: [0x57f37f775400] received request Name# NodeCheck ok# false data# peer# 2024-11-21T09:17:08.952486Z node 2 :GRPC_SERVER DEBUG: [0x57f37f770c00] received request Name# CreateSession ok# false data# peer# 2024-11-21T09:17:08.952507Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76ee00] received request Name# AttachSession ok# false data# peer# 2024-11-21T09:17:08.952507Z node 2 :GRPC_SERVER DEBUG: [0x57f37f770000] received request Name# DeleteSession ok# false data# peer# 2024-11-21T09:17:08.952527Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76e200] received request Name# BeginTransaction ok# false data# peer# 2024-11-21T09:17:08.952531Z node 2 :GRPC_SERVER DEBUG: [0x57f37f76d600] received request Name# CommitTransaction ok# false data# peer# 2024-11-21T09:17:08.952554Z node 2 :GRPC_SERVER DEBUG: [0x57f37f779000] received request Name# RollbackTransaction ok# false data# peer# 2024-11-21T09:17:08.952559Z node 2 :GRPC_SERVER DEBUG: [0x57f37f773600] received request Name# ExecuteQuery ok# false data# peer# 2024-11-21T09:17:08.952581Z node 2 :GRPC_SERVER DEBUG: [0x57f37f771e00] received request Name# ExecuteScript ok# false data# peer# 2024-11-21T09:17:08.952587Z node 2 :GRPC_SERVER DEBUG: [0x57f37f771200] received request Name# FetchScriptResults ok# false data# peer# 2024-11-21T09:17:08.952610Z node 2 :GRPC_SERVER DEBUG: [0x57f37f75bc00] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2024-11-21T09:17:08.952613Z node 2 :GRPC_SERVER DEBUG: [0x57f37f765e00] received request Name# ChangeTabletSchema ok# false data# peer# 2024-11-21T09:17:08.952631Z node 2 :GRPC_SERVER DEBUG: [0x57f37f765800] received request Name# RestartTablet ok# false data# peer# 2024-11-21T09:17:08.952650Z node 2 :GRPC_SERVER DEBUG: [0x57f37f764c00] received request Name# CreateLogStore ok# false data# peer# 2024-11-21T09:17:08.952651Z node 2 :GRPC_SERVER DEBUG: [0x57f37f764000] received request Name# DescribeLogStore ok# false data# peer# 2024-11-21T09:17:08.952668Z node 2 :GRPC_SERVER DEBUG: [0x57f37f763400] received request Name# DropLogStore ok# false data# peer# 2024-11-21T09:17:08.952688Z node 2 :GRPC_SERVER DEBUG: [0x57f37f762e00] received request Name# AlterLogStore ok# false data# peer# 2024-11-21T09:17:08.952691Z node 2 :GRPC_SERVER DEBUG: [0x57f37f761c00] received request Name# CreateLogTable ok# false data# peer# 2024-11-21T09:17:08.952709Z node 2 :GRPC_SERVER DEBUG: [0x57f37f762200] received request Name# DescribeLogTable ok# false data# peer# 2024-11-21T09:17:08.952722Z node 2 :GRPC_SERVER DEBUG: [0x57f37f760a00] received request Name# DropLogTable ok# false data# peer# 2024-11-21T09:17:08.952730Z node 2 :GRPC_SERVER DEBUG: [0x57f37f768800] received request Name# AlterLogTable ok# false data# peer# 2024-11-21T09:17:08.952758Z node 2 :GRPC_SERVER DEBUG: [0x57f37f767000] received request Name# Login ok# false data# peer# 2024-11-21T09:17:08.952761Z node 2 :GRPC_SERVER DEBUG: [0x57f37f766a00] received request Name# DescribeReplication ok# false data# peer# Trying to start YDB, gRPC: 23656, MsgBus: 24936 2024-11-21T09:17:09.231803Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659153795066681:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:09.232030Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00177e/r3tmp/tmpctxOlh/pdisk_1.dat 2024-11-21T09:17:09.240198Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23656, node 3 2024-11-21T09:17:09.249561Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:09.249583Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:09.249584Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:09.249625Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24936 TClient is connected to server localhost:24936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:09.333836Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:09.333860Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:09.334146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.334900Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:09.345121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.354126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.372805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.384429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:09.554471Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659153795068237:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.554501Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.559410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.565067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.571967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.579247Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.586503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.600470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.615289Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659153795068739:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.615308Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659153795068744:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.615315Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.615815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:09.620271Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659153795068746:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TOlapReboots::CreateTable [GOOD] >> AnalyzeColumnshard::AnalyzeTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.461276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.461300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.461310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.461323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.461327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.461337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.473671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.473693Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.475975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.476067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.476107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.480078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.480169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.481685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.482670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.484232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.486585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.486590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.486629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.487954Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.503584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.504299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.504373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.504422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.504429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.505128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.505141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.505145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.505539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.505977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.506001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.506574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.506998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.507840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.508074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.508192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.508258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.508753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.508797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.508879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508885Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.508895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.508900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.508905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.508910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.508915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.508918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.508929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.508935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.508940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... RD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T09:17:10.321815Z node 104 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:10.321824Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:17:10.321874Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:17:10.321902Z node 104 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:10.321907Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [104:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:17:10.321912Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [104:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T09:17:10.321995Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:10.322002Z node 104 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId#1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:17:10.322010Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId#1003:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T09:17:10.322102Z node 104 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:10.322111Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:10.322115Z node 104 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:17:10.322120Z node 104 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:17:10.322125Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:17:10.322398Z node 104 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:10.322409Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:10.322413Z node 104 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:17:10.322417Z node 104 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:17:10.322421Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:17:10.322430Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:17:10.322572Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:17:10.322587Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697639 2024-11-21T09:17:10.322609Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72057594037968897 2024-11-21T09:17:10.322812Z node 104 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 1003 TxPartId: 0 2024-11-21T09:17:10.322836Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 1003 TxPartId: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:10.322847Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 1003 TxPartId: 0 2024-11-21T09:17:10.322942Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:17:10.323185Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:17:10.323409Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:10.334343Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000004 2024-11-21T09:17:10.334367Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:10.334392Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000004 2024-11-21T09:17:10.334414Z node 104 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000004 2024-11-21T09:17:10.334507Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2024-11-21T09:17:10.334512Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:10.334522Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:17:10.335456Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:10.335539Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:10.335563Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:10.335572Z node 104 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:17:10.335592Z node 104 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:17:10.335597Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:17:10.335603Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:17:10.335617Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [104:362:2342] message: TxId: 1003 2024-11-21T09:17:10.335628Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:17:10.335634Z node 104 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:17:10.335638Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:17:10.335677Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:17:10.336187Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:17:10.336198Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [104:417:2396] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:17:10.336320Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:10.336376Z node 104 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 66us result status StatusSuccess 2024-11-21T09:17:10.336468Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] Test command err: 2024-11-21T09:17:03.710005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:03.710045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:03.710054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0026f8/r3tmp/tmpgJMbqB/pdisk_1.dat 2024-11-21T09:17:03.830314Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12985, node 1 2024-11-21T09:17:04.003164Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:04.003180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:04.003182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:04.003243Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:04.010441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.086900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:04.086937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:04.098816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25996 2024-11-21T09:17:04.509899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.306543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.306583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.341708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:05.342549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:05.398774Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:05.409038Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:05.409070Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:05.416621Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:05.416773Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:05.416794Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:05.416800Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:05.416806Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:05.416813Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:05.416818Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:05.416825Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:05.416943Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:05.596035Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:05.596071Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:05.597392Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:05.599563Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:05.599674Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:05.600484Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:05.605278Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:05.605292Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:05.605300Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:05.607084Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.607115Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.608263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:05.609797Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:05.609829Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:05.612496Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:05.624794Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:05.647192Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:05.770282Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:05.927188Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:06.734823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.734853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.761481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:06.789538Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:06.789622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:06.789663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:06.789679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:06.789692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:06.789705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:06.789717Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:06.789730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:06.789743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:06.789757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:06.789769Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:06.789781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:06.794282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:06.794306Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:06.794342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:06.794351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:06.794371Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:06.794378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:06.794389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:06.794396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:06.794408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:06.794413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:06.794421Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:06.794428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:06.794494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:06.794504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:06.794523Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:06.794531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:06.794545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:06.794551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:17:06.794570Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:17:06.794576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:17:06.794591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:17:06.794597Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:17:07.788445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2440:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.788503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.789606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2024-11-21T09:17:08.430836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2534:3109], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.430876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.431704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000012s FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> TOlapReboots::CreateDropTable [GOOD] >> TOlapReboots::CreateDropStore >> KqpCost::Range >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> TOlapReboots::DropTableThenStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: 2024-11-21T09:17:05.519272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:05.519316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:05.519328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00259f/r3tmp/tmp2IshdY/pdisk_1.dat 2024-11-21T09:17:05.598876Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11793, node 1 2024-11-21T09:17:05.697081Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:05.697106Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:05.697111Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:05.697213Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:05.705143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.781979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.782021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.793877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1356 2024-11-21T09:17:06.195684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.992944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.992979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.026238Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:07.027258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.083476Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:07.093232Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:07.093262Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:07.099533Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:07.099688Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:07.099709Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:07.099715Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:07.099721Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:07.099727Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:07.099732Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:07.099739Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:07.099831Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:07.275785Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.275837Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.277015Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:07.278926Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:07.279031Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:07.279798Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T09:17:07.284584Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:07.284598Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:07.284607Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T09:17:07.286640Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.286673Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.288151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:07.289872Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:07.289905Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:07.292762Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:07.304957Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.327125Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:07.446944Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:07.603501Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:08.329912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.847684Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:08.948748Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T09:17:08.948771Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T09:17:08.948782Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2489:2902], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T09:17:08.948954Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2490:2903] 2024-11-21T09:17:08.949005Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2490:2903], schemeshard id = 72075186224037899 2024-11-21T09:17:09.656607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.971965Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:10.155749Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2024-11-21T09:17:10.155770Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2024-11-21T09:17:10.155779Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2976:3108], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2024-11-21T09:17:10.156227Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2985:3113] 2024-11-21T09:17:10.156299Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2985:3113], schemeshard id = 72075186224037905 2024-11-21T09:17:10.867098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3102:3358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.867161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.870491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2024-11-21T09:17:10.972745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3396:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.972793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.974632Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3401:3409]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:10.974705Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:10.974758Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T09:17:10.974765Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3404:3412] 2024-11-21T09:17:10.974783Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3404:3412] 2024-11-21T09:17:10.974959Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3405:3340] 2024-11-21T09:17:10.975013Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3404:3412], server id = [2:3405:3340], tablet id = 72075186224037897, status = OK 2024-11-21T09:17:10.975071Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:3405:3340], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:17:10.975079Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T09:17:10.975136Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:17:10.975161Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3401:3409], StatRequests.size() = 1 2024-11-21T09:17:10.977300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3409:3416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.977337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.977451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3414:3421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.978886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2024-11-21T09:17:11.172367Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:17:11.172399Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:17:11.267523Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3404:3412], schemeshard count = 1 2024-11-21T09:17:11.513675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3416:3423], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2024-11-21T09:17:11.570723Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3563:3514]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.570772Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:11.570777Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3563:3514], StatRequests.size() = 1 2024-11-21T09:17:11.578132Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd705qdw1ar20vzpq9f9svtz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWE0ZTdlZWEtYWFhNGY4MjktZmNmYTZhMS04NGE1ZjEyNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:11.611622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905 2024-11-21T09:17:11.763094Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3903:3578]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.763145Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:17:11.763269Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2024-11-21T09:17:11.763277Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T09:17:11.763330Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:17:11.763337Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3903:3578], StatRequests.size() = 1 2024-11-21T09:17:11.766872Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3912:3587]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.766946Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2024-11-21T09:17:11.766954Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3912:3587], StatRequests.size() = 1 2024-11-21T09:17:11.775604Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd705r6h200m2tj09nh4qz6d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmZlNGExOWQtNjA0MWU3YTMtODY5ZGY5ODYtNjQwZTk3OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:11.792839Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3952:3598]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.793382Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:11.793391Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:17:11.793434Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:11.793439Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T09:17:11.793445Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:17:11.794689Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T09:17:11.794755Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2024-11-21T09:17:11.794807Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3977:3611]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.795206Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:11.795211Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:17:11.795254Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:11.795258Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T09:17:11.795262Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:17:11.795485Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2024-11-21T09:17:11.795514Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2024-11-21T09:17:07.043519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:07.043555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:07.043564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0023b6/r3tmp/tmp9LeSuP/pdisk_1.dat 2024-11-21T09:17:07.119118Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23024, node 1 2024-11-21T09:17:07.214970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:07.214990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:07.214995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:07.215082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:07.221425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:07.297499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.297528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.308638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1904 2024-11-21T09:17:07.705709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.451696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.451722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.484467Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:08.485298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:08.542711Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:08.552968Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:08.553011Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:08.560509Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:08.560732Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:08.560758Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:08.560764Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:08.560771Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:08.560777Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:08.560783Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:08.560790Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:08.560926Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:08.734431Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:08.734463Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:08.735301Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T09:17:08.736790Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T09:17:08.737029Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T09:17:08.737595Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T09:17:08.743124Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:08.743145Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:08.743154Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T09:17:08.744131Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.744157Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.746641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:08.748127Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:08.748156Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:08.750958Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:08.762620Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:08.784481Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:08.883237Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:09.059552Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:09.801221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:10.252572Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:10.338112Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T09:17:10.338130Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T09:17:10.338139Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2490:2902], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T09:17:10.338341Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2491:2903] 2024-11-21T09:17:10.338371Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2491:2903], schemeshard id = 72075186224037899 2024-11-21T09:17:11.037452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2622:3195], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.037493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.040925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T09:17:11.153823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2918:3243], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.154988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.155375Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2923:3247]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.155396Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:11.155427Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T09:17:11.155433Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2926:3250] 2024-11-21T09:17:11.155449Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2926:3250] 2024-11-21T09:17:11.155577Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2927:3137] 2024-11-21T09:17:11.155648Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2926:3250], server id = [2:2927:3137], tablet id = 72075186224037897, status = OK 2024-11-21T09:17:11.155685Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2927:3137], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:17:11.155692Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T09:17:11.155736Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:17:11.155741Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2923:3247], StatRequests.size() = 1 2024-11-21T09:17:11.159834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2931:3254], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.159876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.159981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2936:3259], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.161229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T09:17:11.249820Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:17:11.249852Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:17:11.301982Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2926:3250], schemeshard count = 1 2024-11-21T09:17:11.607977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2938:3261], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T09:17:11.792348Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3084:3347]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.792419Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:11.792428Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3084:3347], StatRequests.size() = 1 2024-11-21T09:17:11.803028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd705qkh0z2smdwhkvkf23eg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGFkZjRiNi0yNzMyYWFkZi1kODVlYTc3Zi0xOTBiOTJhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:11.827664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899 2024-11-21T09:17:11.960530Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3396:3408]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.960590Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:17:11.960598Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3396:3408], StatRequests.size() = 1 2024-11-21T09:17:11.964258Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3405:3417]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:11.964313Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2024-11-21T09:17:11.964320Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3405:3417], StatRequests.size() = 1 2024-11-21T09:17:11.972490Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd705rcp4bs884mwf9evanbb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZGQ4ZmItZDhjNGU5ZWQtMWJlNmE5NTMtNjEwNzg0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:12.011410Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3444:3382]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:12.012070Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:12.012086Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:17:12.012139Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:17:12.012149Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T09:17:12.012157Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:17:12.013924Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T09:17:12.013990Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2024-11-21T09:17:12.014067Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3469:3395]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:17:12.014551Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:12.014563Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:17:12.014641Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:17:12.014649Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T09:17:12.014655Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:17:12.015028Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2024-11-21T09:17:12.015062Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> KqpLimits::QueryExecTimeout [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropTableThenStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.461325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.461356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.461365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.461380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.461384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.461393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.473599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.473618Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.475449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.475557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.475592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.479977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.480063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.481703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.482604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.484180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.486574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.486579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.486636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.487808Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.506492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.506555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.506673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.506681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.507410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.507421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.507426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.507858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.508308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.508916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.509384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.509431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.509607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.509711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.509718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.509745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.509755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.510183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.510193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.510221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.510227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.510286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.510292Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.510300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.510305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.510310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.510315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.510319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.510323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.510332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.510337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.510341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T09:17:12.432139Z node 85 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:12.432147Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:12.432172Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:17:12.432192Z node 85 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:12.432196Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [85:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T09:17:12.432200Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [85:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2024-11-21T09:17:12.432272Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:12.432280Z node 85 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId#1005:0 ProgressState at schemeshard: 72057594046678944 2024-11-21T09:17:12.432286Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId#1005:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T09:17:12.432328Z node 85 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:12.432353Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:12.432357Z node 85 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:17:12.432362Z node 85 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:17:12.432366Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:17:12.432449Z node 85 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:12.432459Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:12.432462Z node 85 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:17:12.432469Z node 85 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T09:17:12.432473Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:17:12.432481Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T09:17:12.432789Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:17:12.432812Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 0, tablet: 72075186233409546 2024-11-21T09:17:12.432966Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2024-11-21T09:17:12.432974Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:12.432987Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2024-11-21T09:17:12.433020Z node 85 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 130 2024-11-21T09:17:12.433294Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:17:12.433316Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:17:12.433585Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:12.433609Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:12.433615Z node 85 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId#1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:12.433628Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:17:12.433653Z node 85 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T09:17:12.433657Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:17:12.433662Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T09:17:12.433666Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:17:12.433670Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:17:12.433673Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:17:12.433693Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:17:12.434240Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:17:12.434314Z node 85 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:17:12.434397Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:12.434552Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2024-11-21T09:17:12.435492Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:17:12.435501Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:17:12.435513Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:12.436249Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:17:12.436266Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:17:12.436349Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1005 2024-11-21T09:17:12.436384Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T09:17:12.436389Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T09:17:12.436447Z node 85 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T09:17:12.436461Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:17:12.436465Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [85:541:2519] TestWaitNotification: OK eventTxId 1005 2024-11-21T09:17:12.436522Z node 85 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:12.436546Z node 85 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 32us result status StatusPathDoesNotExist 2024-11-21T09:17:12.436578Z node 85 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:17:12.436635Z node 85 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:12.436648Z node 85 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 15us result status StatusPathDoesNotExist 2024-11-21T09:17:12.436664Z node 85 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpCost::Range [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] >> TOlapReboots::CreateDropStandaloneTable [GOOD] >> TOlapReboots::AlterTtlSettings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::PatternCache [GOOD] Test command err: forced failure at ydb/core/kqp/ut/service/kqp_service_ut.cpp:70, virtual void NKikimr::NKqp::NTestSuiteKqpService::TTestCaseCloseSessionsWithLoad::Execute_(NUnitTest::TTestContext &): Fast fail to avoid 10 min time waste, https://github.com/ydb-platform/ydb/issues/5349 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x1548ED99) NKikimr::NKqp::NTestSuiteKqpService::TTestCaseCloseSessionsWithLoad::Execute_(NUnitTest::TTestContext&)+307 (0x1520A563) NKikimr::NKqp::NTestSuiteKqpService::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x152112E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15490D4E) NKikimr::NKqp::NTestSuiteKqpService::TCurrentTest::Execute()+428 (0x15210CAC) NUnitTest::TTestFactory::Execute()+803 (0x154914C3) NUnitTest::RunMain(int, char**)+3005 (0x154A46DD) ??+0 (0x7F72E511DD90) __libc_start_main+128 (0x7F72E511DE40) _start+41 (0x14401029) Trying to start YDB, gRPC: 3462, MsgBus: 20370 2024-11-21T09:16:32.876282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658996329795581:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:32.876367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a31/r3tmp/tmptDx5l4/pdisk_1.dat 2024-11-21T09:16:32.939104Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3462, node 1 2024-11-21T09:16:32.949548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:32.949558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:32.949559Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:32.949585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20370 2024-11-21T09:16:32.975415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:32.975451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:32.976477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:32.996427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.170652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763380:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763387:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763355:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763383:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763381:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763384:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763382:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763405:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763415:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763416:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763428:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.170950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659000624763429:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.171490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T09:16:33.174025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763440:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763425:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763441:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763399:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763398:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763400:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763442:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763401:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763402:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T09:16:33.174088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659000624763397:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } Trying to start YDB, gRPC: 64246, MsgBus: 7117 2024-11-21T09:16:37.813558Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659018453462399:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:37.813797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a31/r3tmp/tmpRyLo19/pdisk_1.dat 2024-11-21T09:16:37.825778Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64246, node 2 2024-11-21T09:16:37.840494Z node ... 1-21T09:16:52.564722Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439659081783319356:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } Trying to start YDB, gRPC: 23022, MsgBus: 18309 2024-11-21T09:16:55.091683Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439659096246883255:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:55.092042Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a31/r3tmp/tmpVBzEFB/pdisk_1.dat 2024-11-21T09:16:55.103662Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23022, node 6 2024-11-21T09:16:55.123520Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:55.123532Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:55.123534Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:55.123572Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18309 TClient is connected to server localhost:18309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:55.192705Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:55.192743Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:55.193786Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:55.194491Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.410428Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883890:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410441Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883887:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410457Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883886:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410462Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883870:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410538Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410773Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883929:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410874Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883926:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410878Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883934:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410883Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410918Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883959:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410925Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883946:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.410931Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883960:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.411110Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883979:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.411127Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883977:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.411146Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.411191Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:16:55.411213Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883999:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.411230Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439659096246883995:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.411241Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.413282Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246884006:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413290Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883958:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413300Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883896:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413301Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883966:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413307Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883897:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413308Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883991:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413313Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883967:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413314Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883947:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413319Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883895:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:16:55.413335Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439659096246883965:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:17:00.092275Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439659096246883255:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:00.092309Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 19733, MsgBus: 30184 2024-11-21T09:17:11.968378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659163155986641:2185];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:11.968449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004576/r3tmp/tmpCVjOCf/pdisk_1.dat 2024-11-21T09:17:12.053682Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19733, node 1 2024-11-21T09:17:12.067951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:12.067982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:12.069064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:12.105037Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:12.105048Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:12.105049Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:12.105080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30184 TClient is connected to server localhost:30184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:12.194713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.208040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.274643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.284043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.295625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.334679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659167450955334:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.334710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.464413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.469536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.524230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.579830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.589649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.604169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.611779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659167450955854:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.611805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.611807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659167450955859:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.612500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:12.616351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659167450955861:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> TOlapReboots::CreateMultipleTables [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 24828, MsgBus: 13080 2024-11-21T09:17:01.294424Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659118268642965:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.294681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017d0/r3tmp/tmpYa9EC7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24828, node 1 2024-11-21T09:17:01.357266Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:01.361260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.361274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.361275Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.361311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13080 2024-11-21T09:17:01.395646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.395681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.396791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.408592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.422386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.448561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.467277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.480083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.610769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118268644513:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.610797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.644931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.651555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.662004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.669019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.676754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.690292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.698701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118268645024:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.698724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.698752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659118268645029:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.699398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.703202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659118268645031:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:17:01.894452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:02.068290Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659122563613030:2487] TxId: 281474976710672. Ctx: { TraceId: 01jd705epg70r5jm9bfvpg0vt1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxNGVhMGUtNzNjNmFhNWYtM2MzMjJiNWItZTJhMWQ0ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. } 2024-11-21T09:17:02.069544Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659122563613042:2501], TxId: 281474976710672, task: 5. Ctx: { TraceId : 01jd705epg70r5jm9bfvpg0vt1. SessionId : ydb://session/3?node_id=1&id=M2UxNGVhMGUtNzNjNmFhNWYtM2MzMjJiNWItZTJhMWQ0ODM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659122563613030:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.070790Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659122563613037:2497], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2UxNGVhMGUtNzNjNmFhNWYtM2MzMjJiNWItZTJhMWQ0ODM=. TraceId : 01jd705epg70r5jm9bfvpg0vt1. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659122563613030:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.071677Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659122563613040:2499], TxId: 281474976710672, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2UxNGVhMGUtNzNjNmFhNWYtM2MzMjJiNWItZTJhMWQ0ODM=. TraceId : 01jd705epg70r5jm9bfvpg0vt1. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439659122563613030:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.071939Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2UxNGVhMGUtNzNjNmFhNWYtM2MzMjJiNWItZTJhMWQ0ODM=, ActorId: [1:7439659122563613012:2487], ActorState: ExecuteState, TraceId: 01jd705epg70r5jm9bfvpg0vt1, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 28852, MsgBus: 6818 2024-11-21T09:17:02.395972Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659123477995366:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:02.396173Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017d0/r3tmp/tmpf47C43/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28852, node 2 2024-11-21T09:17:02.420803Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:02.420976Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:02.420984Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:02.420986Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:02.421043Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6818 TClient is connected to server localhost:6818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { ... elf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.789495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:02.798011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659123477997413:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.798038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.798068Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659123477997418:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:02.798777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:02.802368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659123477997420:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:02.986302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.396168Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659123477995366:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:07.396224Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:08.162583Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg0ZWJhMzctNjM2MTdmNi1kYjRjNDVjZC05ZTc0ZWVhMw==, ActorId: [2:7439659149247802593:2589], ActorState: ExecuteState, TraceId: 01jd705mjtbz9yfv4j81zsw4j2, Create QueryResponse for error on request, msg:
: Error: Task execution timeout 98ms exceeded, terminating after 101ms 2024-11-21T09:17:08.266565Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659149247802687:2589] TxId: 281474976715674. Ctx: { TraceId: 01jd705mp504rmjm4aqcd6xpjt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTg0ZWJhMzctNjM2MTdmNi1kYjRjNDVjZC05ZTc0ZWVhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 100ms during execution } ] 2024-11-21T09:17:08.266655Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659149247802702:2619], TxId: 281474976715674, task: 9. Ctx: { TraceId : 01jd705mp504rmjm4aqcd6xpjt. SessionId : ydb://session/3?node_id=2&id=OTg0ZWJhMzctNjM2MTdmNi1kYjRjNDVjZC05ZTc0ZWVhMw==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439659149247802687:2589], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:08.328985Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659149247802700:2617], TxId: 281474976715674, task: 7. Ctx: { TraceId : 01jd705mp504rmjm4aqcd6xpjt. SessionId : ydb://session/3?node_id=2&id=OTg0ZWJhMzctNjM2MTdmNi1kYjRjNDVjZC05ZTc0ZWVhMw==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439659149247802687:2589], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:08.329391Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg0ZWJhMzctNjM2MTdmNi1kYjRjNDVjZC05ZTc0ZWVhMw==, ActorId: [2:7439659149247802593:2589], ActorState: ExecuteState, TraceId: 01jd705mp504rmjm4aqcd6xpjt, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 100ms during execution Trying to start YDB, gRPC: 18751, MsgBus: 29904 2024-11-21T09:17:08.571085Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659150063252511:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:08.571120Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017d0/r3tmp/tmpnlw9t2/pdisk_1.dat 2024-11-21T09:17:08.580880Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18751, node 3 2024-11-21T09:17:08.591231Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:08.591245Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:08.591247Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:08.591286Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29904 TClient is connected to server localhost:29904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:08.673197Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.673325Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.673553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:08.674221Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:08.685782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:08.693633Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:08.709780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:08.718993Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:08.870671Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659150063254050:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.870705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.875679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.882383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.893114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.900004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.907052Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.913643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.922435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659150063254562:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.922454Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.922476Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659150063254567:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.922982Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:08.927291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659150063254569:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=3&id=ZWQzOGNlMDEtZmRhNTZjZGMtODAyNjMwNTEtNjIzNzZlYTI= |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 14744, MsgBus: 5319 2024-11-21T09:17:12.251624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659167894924763:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:12.251851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00456c/r3tmp/tmpDPKRll/pdisk_1.dat 2024-11-21T09:17:12.295600Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14744, node 1 2024-11-21T09:17:12.305180Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:12.305197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:12.305199Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:12.305227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5319 TClient is connected to server localhost:5319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:12.346334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.352509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:12.352544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:12.353611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:12.357885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.417625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.432134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.443056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:12.497232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659167894926301:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.497264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.517968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.522674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.533040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.539723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.547764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.553861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:12.565909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659167894926793:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.565934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659167894926798:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.565936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.567165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:12.574594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659167894926800:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:17:12.818053Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2024-11-21T09:17:12.839324Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7439659167894927122:2454] 2024-11-21T09:17:12.839346Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7439659167894927108:2454] 2024-11-21T09:17:12.839673Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037914 2024-11-21T09:17:12.839724Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710671 at tablet 72075186224037914 2024-11-21T09:17:12.840958Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037914 2024-11-21T09:17:12.842028Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710671 at step 1732180632889 at tablet 72075186224037914 { Transactions { TxId: 281474976710671 AckTo { RawX1: 7439659167894925209 RawX2: 4294969512 } } Step: 1732180632889 MediatorID: 72057594046382081 TabletID: 72075186224037914 } 2024-11-21T09:17:12.842043Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2024-11-21T09:17:12.842067Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037914 2024-11-21T09:17:12.842072Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:17:12.842079Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1732180632889:281474976710671] in PlanQueue unit at 72075186224037914 2024-11-21T09:17:12.842122Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037914 loaded tx from db 1732180632889:281474976710671 keys extracted: 0 2024-11-21T09:17:12.842189Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:17:12.842949Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037914 step# 1732180632889 txid# 281474976710671} 2024-11-21T09:17:12.842962Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037914 step# 1732180632889} 2024-11-21T09:17:12.842973Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037914 2024-11-21T09:17:12.842983Z node 1 :TX_DATASHARD DEBUG: Complete [1732180632889 : 281474976710671] from 72075186224037914 at tablet 72075186224037914 send result to client [1:7439659167894927124:3484], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T09:17:12.842995Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2024-11-21T09:17:12.843010Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1732180632889:281474976710671 created 2024-11-21T09:17:12.843082Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2024-11-21T09:17:12.843109Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2024-11-21T09:17:12.843117Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2024-11-21T09:17:12.843166Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2024-11-21T09:17:12.843201Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T09:17:12.843212Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2024-11-21T09:17:12.843231Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2024-11-21T09:17:12.843242Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { ... 8:2461]. returned async data processed rows 3 left freeSpace 8388548 received rows 3 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T09:17:12.864082Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927138:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T09:17:12.864090Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927138:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:12.864097Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T09:17:12.864108Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927140:2462], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2024-11-21T09:17:12.864122Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Finish input channelId: 1, from: [1:7439659167894927138:2461] 2024-11-21T09:17:12.864136Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927140:2462], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T09:17:12.864195Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927140:2462], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T09:17:12.864222Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927140:2462], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:12.864226Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T09:17:12.864229Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T09:17:12.864232Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927138:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646927 2024-11-21T09:17:12.864236Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927138:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T09:17:12.864243Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927138:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:12.864246Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2024-11-21T09:17:12.864248Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927138:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T09:17:12.864273Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2024-11-21T09:17:12.864313Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:17:12.864371Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T09:17:12.864433Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7439659167894927108:2454], seqNo: 1, nRows: 1 2024-11-21T09:17:12.864481Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439659167894927138:2461], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2730 DurationUs: 3000 Tasks { TaskId: 1 CpuTimeUs: 2088 FinishTimeMs: 1732180632864 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 35 BuildCpuTimeUs: 2053 WaitInputTimeUs: 470 HostName: "ghrun-qcxhsi27zq" NodeId: 1 StartTimeMs: 1732180632861 } MaxMemoryUsage: 1048576 } 2024-11-21T09:17:12.864491Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439659167894927138:2461] 2024-11-21T09:17:12.864503Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7439659167894927140:2462], 2024-11-21T09:17:12.865701Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7439659167894927141:2462] 2024-11-21T09:17:12.865738Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927140:2462], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T09:17:12.865751Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927140:2462], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:12.865759Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T09:17:12.865763Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2024-11-21T09:17:12.865767Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659167894927140:2462], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd705s7b1hk5v0h4by0ymg9y. SessionId : ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T09:17:12.865800Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2024-11-21T09:17:12.865833Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:17:12.865851Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439659167894927140:2462], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 563 DurationUs: 3000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 199 FinishTimeMs: 1732180632865 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 50 BuildCpuTimeUs: 149 WaitInputTimeUs: 1615 HostName: "ghrun-qcxhsi27zq" NodeId: 1 StartTimeMs: 1732180632862 } MaxMemoryUsage: 1048576 } 2024-11-21T09:17:12.865866Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439659167894927140:2462] 2024-11-21T09:17:12.865893Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:17:12.865900Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T09:17:12.865909Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659167894927133:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705s7b1hk5v0h4by0ymg9y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIyYjczMGQtZWEyMzNmYzMtN2JhOTA0NTEtZWI5NmM5M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003293s ReadRows: 1 ReadBytes: 20 ru: 2 rate limiter was not found force flag: 1 2024-11-21T09:17:12.866119Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180632889, txId: 281474976710671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateMultipleTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.461301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.461320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.461329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.461339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.461343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.461350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.475301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.475320Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.477416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.477530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.477563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.480360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.480438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.481718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.482619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.484306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.486578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.486581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.486626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.487915Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.506858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.506918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.507023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.507030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.507731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.507742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.507746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.508180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.508675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.509272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.509644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.509687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.509830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.509931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.509939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.509964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.509977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.510410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.510421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.510449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.510453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.510510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.510516Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.510525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.510529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.510534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.510538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.510542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.510545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.510555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.510560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.510563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... r: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:13.347783Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:13.347786Z node 89 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:17:13.347790Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T09:17:13.347793Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:17:13.347800Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:17:13.348071Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:17:13.348085Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:5 msg type: 268697639 2024-11-21T09:17:13.348098Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72057594037968897 2024-11-21T09:17:13.348317Z node 89 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 1003 TxPartId: 0 2024-11-21T09:17:13.348341Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 1003 TxPartId: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:13.348353Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 1003 TxPartId: 0 2024-11-21T09:17:13.348489Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:17:13.348625Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:17:13.348688Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:13.359435Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000005 2024-11-21T09:17:13.359453Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:13.359471Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000005 2024-11-21T09:17:13.359479Z node 89 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000005 2024-11-21T09:17:13.359540Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2024-11-21T09:17:13.359543Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:13.359551Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:17:13.360135Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:13.360164Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:13.360180Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:13.360187Z node 89 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:17:13.360218Z node 89 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:17:13.360223Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:17:13.360230Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:17:13.360243Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [89:357:2337] message: TxId: 1003 2024-11-21T09:17:13.360250Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:17:13.360255Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:17:13.360260Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:17:13.360299Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:17:13.360652Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:17:13.360665Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [89:422:2401] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:17:13.360775Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:13.360842Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable1" took 73us result status StatusSuccess 2024-11-21T09:17:13.360963Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable1" PathDescription { Self { Name: "ColumnTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable1" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:13.361115Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:13.361141Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable2" took 29us result status StatusSuccess 2024-11-21T09:17:13.361174Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable2" PathDescription { Self { Name: "ColumnTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable2" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.6%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan >> KqpCost::ScanScriptingRangeFullScan+SourceRead |94.6%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpCost::ScanQueryRangeFullScan-SourceRead |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> test_public_api.py::TestBadSession::test_simple >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 11609, MsgBus: 23642 2024-11-21T09:17:14.189610Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659175178056739:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:14.189662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004549/r3tmp/tmpi4CzSZ/pdisk_1.dat 2024-11-21T09:17:14.229377Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11609, node 1 2024-11-21T09:17:14.239910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:14.239925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:14.239926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:14.239958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23642 TClient is connected to server localhost:23642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:14.289907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:14.289933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:14.290985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:14.311903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.318175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.380294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.400009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.410693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.435778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659175178058141:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.435805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.459159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.465085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.471958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.478815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.485941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.493268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.501233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659175178058632:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.501254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659175178058637:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.501262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.501837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:14.506245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659175178058639:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:14.694014Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659175178058955:2459] TxId: 281474976715672. Ctx: { TraceId: 01jd705v1f8st77bt4na9kqcea, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRmNzQyNDgtNzM5MzFkZWItOThjYjIwZC1hMzA2NDg3Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:17:14.696863Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180634737, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 25181, MsgBus: 6802 2024-11-21T09:17:14.164288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659176407463532:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:14.164363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00454a/r3tmp/tmpgsQlrD/pdisk_1.dat 2024-11-21T09:17:14.213940Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25181, node 1 2024-11-21T09:17:14.226080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:14.226095Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:14.226097Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:14.226129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6802 TClient is connected to server localhost:6802 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:17:14.265670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:14.265698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:14.266833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:14.293707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.301262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.317217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.331522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.339795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.419528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659176407464931:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.419554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.445618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.451434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.458120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.464623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.471964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.479070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.487128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659176407465424:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.487149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659176407465429:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.487154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.487701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:14.492201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659176407465431:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup >> KqpCost::ScanScriptingRangeFullScan-SourceRead |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 29869, MsgBus: 14339 2024-11-21T09:17:14.276083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659176405582114:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:14.276133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004546/r3tmp/tmpOrYNlh/pdisk_1.dat 2024-11-21T09:17:14.322025Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29869, node 1 2024-11-21T09:17:14.329658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:14.329681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:14.329683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:14.329728Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14339 TClient is connected to server localhost:14339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:14.373813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.376595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:14.376626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:14.377795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:14.386432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.449252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.464281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.473493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:14.545971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659176405583517:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.545996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.571074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.577705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.632155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.639879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.647081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.654069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:14.662933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659176405584033:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.662966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.662973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659176405584038:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:14.663577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:14.667203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659176405584040:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:17:14.831507Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2024-11-21T09:17:14.844762Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7439659176405584340:2454] 2024-11-21T09:17:14.844778Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7439659176405584326:2454] 2024-11-21T09:17:14.845165Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037914 2024-11-21T09:17:14.845199Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710671 at tablet 72075186224037914 2024-11-21T09:17:14.846109Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037914 2024-11-21T09:17:14.847105Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710671 at step 1732180634891 at tablet 72075186224037914 { Transactions { TxId: 281474976710671 AckTo { RawX1: 7439659176405582432 RawX2: 4294969517 } } Step: 1732180634891 MediatorID: 72057594046382081 TabletID: 72075186224037914 } 2024-11-21T09:17:14.847117Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2024-11-21T09:17:14.847144Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037914 2024-11-21T09:17:14.847147Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:17:14.847152Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1732180634891:281474976710671] in PlanQueue unit at 72075186224037914 2024-11-21T09:17:14.847182Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037914 loaded tx from db 1732180634891:281474976710671 keys extracted: 0 2024-11-21T09:17:14.847237Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:17:14.848144Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037914 step# 1732180634891 txid# 281474976710671} 2024-11-21T09:17:14.848157Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037914 step# 1732180634891} 2024-11-21T09:17:14.848174Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037914 2024-11-21T09:17:14.848188Z node 1 :TX_DATASHARD DEBUG: Complete [1732180634891 : 281474976710671] from 72075186224037914 at tablet 72075186224037914 send result to client [1:7439659176405584342:3489], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T09:17:14.848192Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2024-11-21T09:17:14.848262Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1732180634891:281474976710671 created 2024-11-21T09:17:14.848317Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2024-11-21T09:17:14.848345Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T09:17:14.848357Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2024-11-21T09:17:14.848428Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2024-11-21T09:17:14.848462Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T09:17:14.848469Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2024-11-21T09:17:14.848491Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2024-11-21T09:17:14.848496Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: ... BUG: SelfId: [1:7439659176405584358:2463]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, pending resolve shards: 0, average read rows: 3, average read bytes: 0, 2024-11-21T09:17:14.850869Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:383;event=wait_all_scanner_finished;scans=0; 2024-11-21T09:17:14.850876Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584358:2463]. EVLOGKQP(max_in_flight:1) InFlightScans:InFlightShards:;wScans=0;wShards=0; {SHARD(72075186224037914):CHUNKS=1;D=0.000000s;PacksCount=1;RowsCount=3;BytesCount=0;MinPackSize=3;MaxPackSize=3;CAVG=0.000000s;CMIN=0.000000s;CMAX=0.000000s;}; 2024-11-21T09:17:14.850904Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584356:2462], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd705v6d63zaxp4bczx6km3e. SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2024-11-21T09:17:14.850908Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:177 :TEvFetcherFinished: [1:7439659176405584358:2463] 2024-11-21T09:17:14.850911Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584355:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. TraceId : 01jd705v6d63zaxp4bczx6km3e. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:14.850917Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T09:17:14.850918Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Finish input channelId: 1, from: [1:7439659176405584355:2461] 2024-11-21T09:17:14.850920Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584355:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. TraceId : 01jd705v6d63zaxp4bczx6km3e. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:14.850922Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T09:17:14.850925Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584356:2462], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd705v6d63zaxp4bczx6km3e. SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T09:17:14.850929Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584355:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. TraceId : 01jd705v6d63zaxp4bczx6km3e. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:14.850930Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2024-11-21T09:17:14.850932Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584355:2461], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. TraceId : 01jd705v6d63zaxp4bczx6km3e. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T09:17:14.850951Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2024-11-21T09:17:14.850967Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584356:2462], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd705v6d63zaxp4bczx6km3e. SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T09:17:14.850973Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584356:2462], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd705v6d63zaxp4bczx6km3e. SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:14.850975Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T09:17:14.850977Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T09:17:14.850978Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:17:14.850998Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439659176405584355:2461], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 546 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 211 FinishTimeMs: 1732180634850 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 29 BuildCpuTimeUs: 182 WaitInputTimeUs: 453 HostName: "ghrun-qcxhsi27zq" NodeId: 1 StartTimeMs: 1732180634849 } MaxMemoryUsage: 1048576 } 2024-11-21T09:17:14.851006Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439659176405584355:2461] 2024-11-21T09:17:14.851014Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7439659176405584356:2462], 2024-11-21T09:17:14.851030Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T09:17:14.851048Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7439659176405584326:2454], seqNo: 1, nRows: 1 2024-11-21T09:17:14.851221Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7439659176405584359:2462] 2024-11-21T09:17:14.851234Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584356:2462], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd705v6d63zaxp4bczx6km3e. SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T09:17:14.851241Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584356:2462], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd705v6d63zaxp4bczx6km3e. SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T09:17:14.851250Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T09:17:14.851251Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2024-11-21T09:17:14.851254Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439659176405584356:2462], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd705v6d63zaxp4bczx6km3e. SessionId : ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T09:17:14.851264Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2024-11-21T09:17:14.851275Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439659176405584356:2462], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1336 DurationUs: 2000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 144 FinishTimeMs: 1732180634851 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 37 BuildCpuTimeUs: 107 WaitInputTimeUs: 1616 HostName: "ghrun-qcxhsi27zq" NodeId: 1 StartTimeMs: 1732180634849 } MaxMemoryUsage: 1048576 } 2024-11-21T09:17:14.851277Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439659176405584356:2462] 2024-11-21T09:17:14.851279Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:17:14.851293Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:17:14.851299Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T09:17:14.851303Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439659176405584351:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd705v6d63zaxp4bczx6km3e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIwZTJkZjUtMWRjYjNlNGMtZTUzY2E2MTUtZTQ3NTdmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001882s ReadRows: 3 ReadBytes: 96 ru: 3 rate limiter was not found force flag: 1 2024-11-21T09:17:14.851416Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180634891, txId: 281474976710671] shutting down |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::RangeFullScan |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15316, MsgBus: 29464 2024-11-21T09:17:15.350678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659178223265556:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:15.350694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044fa/r3tmp/tmpGCzn7i/pdisk_1.dat 2024-11-21T09:17:15.406260Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15316, node 1 2024-11-21T09:17:15.414743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:15.414758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:15.414759Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:15.414784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29464 TClient is connected to server localhost:29464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:17:15.451663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:15.451692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:15.452751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:15.456126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.468898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.483279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.498136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.508887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.621775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659178223267094:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.621801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.649350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.655080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.661981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.668670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.676086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.684425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.698497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659178223267608:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.698516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659178223267613:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.698525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.699004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:15.703356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659178223267615:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 29286, MsgBus: 18271 2024-11-21T09:17:15.349986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659178299738445:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:15.350006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044ff/r3tmp/tmpFaz4Ag/pdisk_1.dat 2024-11-21T09:17:15.399535Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29286, node 1 2024-11-21T09:17:15.413123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:15.413143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:15.413145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:15.413182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18271 TClient is connected to server localhost:18271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:17:15.450199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:15.450225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:15.451354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:15.460169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.469515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.530320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.545553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.558433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.614487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659178299739976:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.614517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.637764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.643280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.655054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.662049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.668660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.676410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:15.684879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659178299740468:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.684910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.684934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659178299740473:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:15.685564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:15.689216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659178299740475:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:17:15.860529Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659178299740792:2459] TxId: 281474976710672. Ctx: { TraceId: 01jd705w612zd2hakw8rjt45es, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJlZDQ3ZTUtMmJlMzAwM2MtNDhjYmEyZGQtZGZlN2I5NWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:17:15.863186Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180635906, txId: 281474976710671] shutting down >> KqpCost::RangeFullScan [GOOD] >> KqpLimits::TooBigQuery [GOOD] >> KqpLimits::TooBigKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::RangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 27610, MsgBus: 12031 2024-11-21T09:17:15.717043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659182043319507:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:15.717465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0044ec/r3tmp/tmp5Sm9xy/pdisk_1.dat 2024-11-21T09:17:15.760168Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27610, node 1 2024-11-21T09:17:15.772504Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:15.772519Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:15.772521Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:15.772553Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12031 TClient is connected to server localhost:12031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:15.817944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:15.817971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:15.819180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:15.843928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.847032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.907782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.923385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:15.934251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:16.002297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659182043321054:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:16.002373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:16.008337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:16.014525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:16.069675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:16.125399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:16.131639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:16.138230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:16.147115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659186338288868:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:16.147137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659186338288873:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:16.147146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:16.147789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:16.151555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659186338288875:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } query_phases { duration_us: 1140 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1100 affected_shards: 1 } compilation { duration_us: 18658 cpu_time_us: 18005 } process_cpu_time_us: 84 total_duration_us: 20692 total_cpu_time_us: 19189 >> KqpLimits::TooBigKey [GOOD] >> KqpLimits::TooBigColumn >> KqpLimits::CancelAfterRoTxWithFollowerLegacy [GOOD] >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerLegacy [GOOD] Test command err: Trying to start YDB, gRPC: 5058, MsgBus: 1951 2024-11-21T09:16:53.467368Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659086846861272:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.467624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b82/r3tmp/tmp8OmzCb/pdisk_1.dat 2024-11-21T09:16:53.532043Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5058, node 1 2024-11-21T09:16:53.546210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.546223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.546224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.546260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1951 2024-11-21T09:16:53.566799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.566825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.567946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.606761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.613934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.677964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.695805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.706635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.778234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086846862831:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.778279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.810978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.819029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.830152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.844145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.859152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.871391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.886427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086846863335:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.886447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.886494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659086846863340:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.887250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.891714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659086846863342:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:54.115287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25691, MsgBus: 63449 2024-11-21T09:16:54.994044Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659088060401370:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:54.994283Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b82/r3tmp/tmpRU98w2/pdisk_1.dat 2024-11-21T09:16:55.004955Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25691, node 2 2024-11-21T09:16:55.014907Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:55.014928Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:55.014931Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:55.014981Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63449 TClient is connected to server localhost:63449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:55.094113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:55.094141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:55.095162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:55.096923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.099520Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:55.109593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.118034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:16:55.133940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:16:55.146332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:55.326951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659092355370205:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:55.326996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_ ... 2024-11-21T09:17:07.821070Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28476, node 3 2024-11-21T09:17:07.829626Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:07.829639Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:07.829641Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:07.829689Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15733 TClient is connected to server localhost:15733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:07.910532Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.910642Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.911646Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.912832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.916771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.927783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.944588Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:07.956023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:08.072058Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659150774713195:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.072083Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.075043Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.130369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.136954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.144073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.151343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.158215Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.166553Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659150774713711:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.166576Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659150774713716:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.166585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.167170Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:08.171474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659150774713718:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:08.334855Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.370155Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705mwgf12mst42teskzzcf, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.373217Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705mwj4tq5qsn2pznpseq7, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.376785Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705mwnbx1w98mqjqr6c9e3, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.381397Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705mwsfr5kqke733pq3q75, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.390129Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705mwyf10eak9m9cyqdz6r, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.397305Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705mx7e5wfmgny6t85peaz, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.410099Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705mxjakb359pavg3dvp3s, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.427500Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705my16ep6hyqj2ma6ztf7, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.444920Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705myg34qy69b45h511m8n, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.458750Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705myx64sgt2yg96xn7zm1, Create QueryResponse for error on request, msg: 2024-11-21T09:17:08.545751Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439659150774714310:2454] TxId: 281474976715681. Ctx: { TraceId: 01jd705n1eejqmmhfwqyyn4vps, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 19ms } {
: Error: Cancelling after 19ms during execution } ] 2024-11-21T09:17:08.545838Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659150774714315:2539], TxId: 281474976715681, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=. TraceId : 01jd705n1eejqmmhfwqyyn4vps. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439659150774714310:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:08.545850Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659150774714314:2538], TxId: 281474976715681, task: 1. Ctx: { TraceId : 01jd705n1eejqmmhfwqyyn4vps. SessionId : ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659150774714310:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:08.546079Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzAwMzYxNjQtZTczMmI0MmUtZWY0ZTU0OWQtMjJiNjIwZGY=, ActorId: [3:7439659150774714000:2454], ActorState: ExecuteState, TraceId: 01jd705n1eejqmmhfwqyyn4vps, Create QueryResponse for error on request, msg: 2024-11-21T09:17:12.810731Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439659146479744356:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:12.810764Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail >> KqpLimits::TooBigColumn [GOOD] >> YdbIndexTable::OnlineBuild >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> YdbIndexTable::MultiShardTableOneUniqIndex >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> YdbIndexTable::MultiShardTableOneIndex >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |94.7%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn [GOOD] Test command err: Trying to start YDB, gRPC: 62121, MsgBus: 2030 2024-11-21T09:16:52.826455Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659080003563247:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.826510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bf8/r3tmp/tmphYM1rC/pdisk_1.dat 2024-11-21T09:16:52.881629Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62121, node 1 2024-11-21T09:16:52.909643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.909654Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.909656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.909695Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:52.959788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:52.959823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:52.960783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2030 TClient is connected to server localhost:2030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.013542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.023572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.097512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.119794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.137643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.202561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084298531933:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.202588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.287891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.294031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.305403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.318873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.332165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.339712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.348710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084298532438:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659084298532443:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.348734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.349351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:53.352714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659084298532445:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:53.575609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.826269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659080003563247:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:57.826312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:07.876700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:17:07.876719Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:16.580798Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659183082781396:2479] TxId: 281474976710672. Ctx: { TraceId: 01jd7056jn3sdyjtr0x6cpf8rc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFkYzYxOTMtYWQ1MjZjNDgtNTc2M2M0MjQtNjljOGI1MTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 50663458 > 50331648 2024-11-21T09:17:16.583109Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjFkYzYxOTMtYWQ1MjZjNDgtNTc2M2M0MjQtNjljOGI1MTE=, ActorId: [1:7439659084298533041:2479], ActorState: ExecuteState, TraceId: 01jd7056jn3sdyjtr0x6cpf8rc, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (50663458 > 50331648), code: 200509 Trying to start YDB, gRPC: 61003, MsgBus: 13364 2024-11-21T09:17:16.863927Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659183732909296:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:16.864168Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bf8/r3tmp/tmpTwyzTb/pdisk_1.dat 2024-11-21T09:17:16.874891Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61003, node 2 2024-11-21T09:17:16.881223Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:16.881238Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:16.881239Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:16.881269Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13364 TClient is connected to server localhost:13364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:16.964327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:16.964379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:16.965457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:16.966023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:16.975349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at ... ose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:17.243773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:17.251118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:17.259715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659188027878644:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:17.259756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:17.259761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659188027878649:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:17.260420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:17.264326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659188027878651:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:17.438882Z node 2 :TX_DATASHARD ERROR: Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 2024-11-21T09:17:17.438944Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715671 at tablet 72075186224037914 status: BAD_REQUEST errors: BAD_ARGUMENT (Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914) | 2024-11-21T09:17:17.439017Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659188027878950:2454] TxId: 281474976715671. Ctx: { TraceId: 01jd705xq34hsjyk40y2yatkfk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjJmZGYzYTEtYWI2M2Y0MGUtNzA0MzAyYjMtODY5NGRiOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: [BAD_ARGUMENT] Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914; 2024-11-21T09:17:17.440194Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjJmZGYzYTEtYWI2M2Y0MGUtNzA0MzAyYjMtODY5NGRiOWU=, ActorId: [2:7439659188027878930:2454], ActorState: ExecuteState, TraceId: 01jd705xq34hsjyk40y2yatkfk, Create QueryResponse for error on request, msg:
: Error: Bad request., code: 2017
: Error: [BAD_ARGUMENT] Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 Trying to start YDB, gRPC: 15028, MsgBus: 11236 2024-11-21T09:17:17.729525Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659190760285747:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:17.729542Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001bf8/r3tmp/tmpU6vj00/pdisk_1.dat 2024-11-21T09:17:17.738009Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15028, node 3 2024-11-21T09:17:17.746428Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:17.746440Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:17.746442Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:17.746471Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11236 TClient is connected to server localhost:11236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:17.830127Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:17.830156Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:17.831220Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:17.831943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:17.840756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:17.848480Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:17.861809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:17.874994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:17.969454Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659190760287276:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:17.969477Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:17.973167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:17.978799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:17.985868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:17.992614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:17.999615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.006941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.014816Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659195055255065:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.014838Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659195055255070:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.014840Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.015258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:18.020094Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659195055255072:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:18.280656Z node 3 :TX_DATASHARD ERROR: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2024-11-21T09:17:18.280728Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715671 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2024-11-21T09:17:18.280870Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439659195055255390:2454] TxId: 281474976715671. Ctx: { TraceId: 01jd705yf268rax6v3ktnjg3na, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGNkOWQxNTktYjNkNzRlODAtZDQwYjhjYzYtMzExNGVkY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2024-11-21T09:17:18.281022Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGNkOWQxNTktYjNkNzRlODAtZDQwYjhjYzYtMzExNGVkY2M=, ActorId: [3:7439659195055255348:2454], ActorState: ExecuteState, TraceId: 01jd705yf268rax6v3ktnjg3na, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold >> TOlapReboots::DropMultipleTables [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropMultipleTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.461294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.461317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.461335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.461345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.461349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.461356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.474589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.474614Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.476843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.476960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.477004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.480084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.480167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.481715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.482621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.484305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.486587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.486592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.486630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.487902Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.505349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.505415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.505506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.505511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.506218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.506230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.506234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.506593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.507083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.507106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.507703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.508115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.508168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.508357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.508462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.508500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.509054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.509092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.509159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.509175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.509179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.509185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.509190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.509194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.509198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.509209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.509214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.509218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... eason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:17:18.797493Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2024-11-21T09:17:18.797986Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:18.797994Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:17:18.798050Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:17:18.798074Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:18.798077Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2024-11-21T09:17:18.798080Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2024-11-21T09:17:18.798174Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:18.798180Z node 83 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedWaitParts operationId#1005:0 ProgressState at schemeshard: 72057594046678944 2024-11-21T09:17:18.798189Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId#1005:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T09:17:18.798255Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:18.798266Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:18.798273Z node 83 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:17:18.798279Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:17:18.798284Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:17:18.798335Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:18.798342Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:18.798345Z node 83 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:17:18.798349Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 11 2024-11-21T09:17:18.798352Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:17:18.798360Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T09:17:18.798703Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:17:18.798939Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:17:18.798953Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:17:18.809919Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000007 2024-11-21T09:17:18.809943Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:18.809966Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000007 2024-11-21T09:17:18.809973Z node 83 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000007 2024-11-21T09:17:18.810049Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2024-11-21T09:17:18.810054Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:18.810065Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2024-11-21T09:17:18.810076Z node 83 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T09:17:18.810639Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:18.810977Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:18.811010Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:18.811018Z node 83 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId#1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:18.811043Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:17:18.811060Z node 83 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T09:17:18.811065Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:17:18.811073Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T09:17:18.811106Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [83:358:2338] message: TxId: 1005 2024-11-21T09:17:18.811113Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:17:18.811118Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:17:18.811122Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:17:18.811150Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:17:18.811243Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:17:18.811251Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:17:18.811264Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:17:18.811835Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:17:18.811850Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [83:523:2501] 2024-11-21T09:17:18.811941Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2024-11-21T09:17:18.812037Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:18.812078Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable1" took 50us result status StatusPathDoesNotExist 2024-11-21T09:17:18.812115Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/OlapStore\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore/ColumnTable1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/OlapStore" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:17:18.812180Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:18.812194Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable2" took 15us result status StatusPathDoesNotExist 2024-11-21T09:17:18.812225Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/OlapStore\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore/ColumnTable2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/OlapStore" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2024-11-21T09:17:18.506618Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:17:18.510387Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "/home/runner/.ya/build/build_root/jptk/001f7b/r3tmp/tmpgyqpDa//pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2024-11-21T09:17:18.511335Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "/home/runner/.ya/build/build_root/jptk/001f7b/r3tmp/tmpgyqpDa//pdisk0.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T09:17:18.511520Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:18.511946Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [3e000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T09:17:18.511965Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:18.512071Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [3e000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T09:17:18.512078Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:18.512157Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [3e000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T09:17:18.512164Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:18.512281Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [3e000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T09:17:18.512301Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 1040187392 2024-11-21T09:17:18.512444Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 31 PipeClientId# [1:36:2074] ControllerId# 72057594037932033 2024-11-21T09:17:18.512449Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:17:18.512507Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:17:18.512576Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:17:18.516345Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:17:18.516503Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:17:18.517648Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2024-11-21T09:17:18.517683Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 1040187392 2024-11-21T09:17:18.517818Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 31 PipeClientId# [2:82:2059] ControllerId# 72057594037932033 2024-11-21T09:17:18.517823Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:17:18.517835Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:17:18.517873Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:17:18.521359Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:17:18.521566Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:17:18.521637Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:17:18.522438Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:18.522451Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:17:18.522510Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:18.522515Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:17:18.559732Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:18.559751Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:17:18.560177Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:17:18.560270Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:18.560275Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:17:18.560285Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:17:18.566413Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:17:18.566460Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:17:18.567510Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:18.568336Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:18.568421Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2024-11-21T09:17:18.568459Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:17:18.587446Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "/home/runner/.ya/build/build_root/jptk/001f7b/r3tmp/tmpgyqpDa//pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2024-11-21T09:17:18.587611Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:17:18.587751Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T09:17:18.587758Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:17:18.587781Z node 1 :BS_NODE DEBUG: {NWDC18@dis ... 1 PDiskId: 0 VSlotId: 1 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:17:18.928049Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.928057Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } NodeId: 1 PDiskId: 0 VSlotId: 3 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:17:18.928070Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } NodeId: 1 PDiskId: 0 VSlotId: 2 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:17:18.928088Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 0 } Success: true } 2024-11-21T09:17:18.928097Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 1 } 2024-11-21T09:17:18.928171Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 0 } } 2024-11-21T09:17:18.928271Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.933393Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.933434Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 1 } Success: true } 2024-11-21T09:17:18.933443Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 2 } 2024-11-21T09:17:18.933506Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.933513Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 1 } } 2024-11-21T09:17:18.933567Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.935555Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.935616Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 2 } Success: true } 2024-11-21T09:17:18.935629Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 3 } 2024-11-21T09:17:18.935702Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.935717Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 2 } } 2024-11-21T09:17:18.935788Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.938495Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.938541Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 3 } Success: true } 2024-11-21T09:17:18.938626Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:18.938644Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 3 } } 2024-11-21T09:17:18.957442Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:17:18.957630Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/jptk/001f7b/r3tmp/tmpvVyVch/new_pdisk.dat" PDiskGuid: 4349374367343715763 PDiskCategory: 0 PDiskConfig { ChunkSize: 33554432 } EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } } InstanceId: "a26b8ac-e2e71422-5fab3f18-262544c4" AvailDomain: 31 } 2024-11-21T09:17:18.957648Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/jptk/001f7b/r3tmp/tmpvVyVch/new_pdisk.dat" PDiskGuid: 4349374367343715763 PDiskCategory: 0 PDiskConfig { ChunkSize: 33554432 } EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } } 2024-11-21T09:17:18.957673Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1000 Path# "/home/runner/.ya/build/build_root/jptk/001f7b/r3tmp/tmpvVyVch/new_pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T09:17:18.957848Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T09:17:19.048586Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2024-11-21T09:17:19.079330Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { VDisks { VDiskID { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 4349374367343715763 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "" } Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 4349374367343715763 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "a26b8ac-e2e71422-5fab3f18-262544c4" AvailDomain: 31 } 2024-11-21T09:17:19.079387Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {VDisks { VDiskID { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 4349374367343715763 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "" } Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 4349374367343715763 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2024-11-21T09:17:19.079454Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [be000000:1:0:0:0] VSlotId# 1:1000:1000 PDiskGuid# 4349374367343715763 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:19.079663Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [be000000:1:0:0:0] VSlotId# 1:1000:1000 PDiskGuid# 4349374367343715763 2024-11-21T09:17:19.220831Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 4349374367343715763 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2024-11-21T09:17:19.221085Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:17:19.223567Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } } 2024-11-21T09:17:19.230459Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:19.232001Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 4349374367343715763 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:17:19.232144Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:19.232168Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 4349374367343715763 Status: READY OnlyPhantomsRemain: false } } Sending TEvPut 2024-11-21T09:17:19.232247Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 3187671040 EnableProxyMock# false NoGroup# false 2024-11-21T09:17:19.232253Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 3187671040 Sending TEvGet Sending TEvVGet Sending TEvPut 2024-11-21T09:17:19.236368Z node 2 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 3187671040 EnableProxyMock# false NoGroup# false 2024-11-21T09:17:19.236380Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 3187671040 2024-11-21T09:17:19.236398Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 3187671040 2024-11-21T09:17:19.236510Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 3187671040 2024-11-21T09:17:19.236593Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:20:2050] Cookie# 0 Recipient# [1:430:2376] RecipientRewrite# [1:389:2346] Request# {NodeID: 2 GroupIDs: 3187671040 } StopGivingGroups# false 2024-11-21T09:17:19.236616Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 3187671040 } 2024-11-21T09:17:19.263663Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 4349374367343715763 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2024-11-21T09:17:19.263714Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 4349374367343715763 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2024-11-21T09:17:19.264154Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 3187671040 Sending TEvGet |94.7%| [TA] $(B)/ydb/tests/functional/sqs/large/test-results/py3test/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/large/test-results/py3test/{meta.json ... results_accumulator.log} >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup >> TPQTest::TestMessageNo >> TPQTabletTests::Multiple_PQTablets >> TPQTabletTests::Partition_Send_Predicate_With_False >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters >> TPartitionTests::CorrectRange_Commit >> TPQTabletTests::UpdateConfig_2 >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> TPartitionTests::CommitOffsetRanges >> TPartitionTests::GetPartitionWriteInfoSuccess >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TPQTabletTests::Multiple_PQTablets [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPartitionTests::TestNonConflictingActsBatchOk >> TPQTabletTests::ProposeTx_Missing_Operations >> TPartitionTests::CorrectRange_Multiple_Consumers >> TPQTest::DirectReadBadSessionOrPipe >> TPQTest::TestPartitionTotalQuota |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TPartitionTests::ChangeConfig >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPartitionTests::ChangeConfig [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 128 Create chunk: 0.000013s Read by index: 0.000008s Iterate: 0.000005s Size: 252 Create chunk: 0.000024s Read by index: 0.000008s Iterate: 0.000007s Size: 1887 Create chunk: 0.000018s Read by index: 0.000023s Iterate: 0.000008s Size: 1658 Create chunk: 0.000023s Read by index: 0.000022s Iterate: 0.000008s Size: 1889 Create chunk: 0.000018s Read by index: 0.000019s Iterate: 0.000008s Size: 1660 Create chunk: 0.000023s Read by index: 0.000022s Iterate: 0.000022s Size: 2407 Create chunk: 0.000032s Read by index: 0.000024s Iterate: 0.000013s Size: 2061 Create chunk: 0.000046s Read by index: 0.000039s Iterate: 0.000014s >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> TPartitionTests::ConflictingTxIsAborted >> TPQTabletTests::One_Tablet_For_All_Partitions >> TPartitionTests::ConflictingActsInSeveralBatches >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> KqpLimits::CancelAfterRwTx [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> TPartitionTests::ConflictingTxProceedAfterRollback >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestLowWatermark >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TPQTabletTests::ProposeTx_Command_After_Propose >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx [GOOD] Test command err: Trying to start YDB, gRPC: 22876, MsgBus: 29345 2024-11-21T09:16:56.379872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659097980081409:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:56.380173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001af9/r3tmp/tmpN1GgB0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22876, node 1 2024-11-21T09:16:56.449823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:56.449848Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:56.449850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:56.449893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:56.450528Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29345 2024-11-21T09:16:56.481156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:56.481187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:56.482206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:56.512838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.515727Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:56.520694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.583763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.605568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.615443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:56.702753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659097980082974:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.702795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.731291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.738155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.748064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.755004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.762605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.776445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:56.785884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659097980083477:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.785908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.785960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659097980083482:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:56.786590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:56.789257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659097980083484:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:56.973499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:57.003990Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976715672;task_id=1;memory=8388608; 2024-11-21T09:16:57.004006Z node 1 :KQP_COMPUTE WARN: TxId: 281474976715672, task: 1. [Mem] memory 8388608 NOT granted 2024-11-21T09:17:01.380372Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659097980081409:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.380455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:04.668135Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659102275051401:2488], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MThkNmVmMTQtYTc0NmMwODUtYTZiNmY3ODMtYTRmNDFmMmY=. CustomerSuppliedId : . TraceId : 01jd7059rvdfn11s6jrhpeqqcb. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-qcxhsi27zq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715672, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 8MiB, tx largest successful memory allocation: 8MiB, tx largest failed memory allocation: 8MiB, tx total execution units: 2, started at: 2024-11-21T09:16:57.002339Z } TxMaxAllocationBacktrace: 0. /-S/ydb/core/kqp/rm_service/kqp_rm_service.h:251: Allocated @ 0x20A61B64 1. /-S/ydb/core/kqp/rm_service/kqp_rm_service.cpp:335: AllocateResources @ 0x20A5F39B 2. /-S/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp:36: AllocateExtraQuota @ 0x21DB21CA 3. /-S/ydb/library/yql/dq/actors/compute/dq_compute_actor.h:296: ?? @ 0x21DB1FBE 4. /-S/ydb/library/yql/dq/actors/compute/dq_compute_memory_quota.h:144: RequestExtraMemory @ 0x21328A88 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x14513C26 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x14513C26 7. /-S/yql/essentials/minikql/aligned_page_pool.cpp:592: TryIncreaseLimit @ 0x14513C26 8. /-S/yql/essentials/minikql/aligned_page_pool.cpp:372: GetPage @ 0x14513C26 9. /-S/yql/essentials/minikql/mkql_alloc.cpp:193: MKQLAllocSlow @ 0x14524584 10. /-S/yql/essentials/minikql/mkql_alloc.h:343: MKQLAllocFastWithSize @ 0x209AF341 11. /-S/yql/essentials/minikql/mkql_alloc.h:462: AllocateOn, NKikimr::NMiniKQL::TMemoryUsageInfo *, const NKikimr::NMiniKQL::TType *&, TVector >, std::__y1::allocator > > >, NYql::NDq::TDqMeteringStats::TInputStatsMeter &> @ 0x209AF341 12. /-S/yql/essentials/minikql/computation/mkql_computation_node_holders.h:841: Create, const NKikimr::NMiniKQL::TType *&, TVector >, std::__y1::allocator > > >, NYql::NDq::TDqMeteringStats::TInputStatsMeter &> @ 0x209AF341 13. /-S/ydb/library/yql/dq/runtime/dq_input_producer.cpp:747: CreateInputUnionValue @ 0x209AEBFE 14. /-S/ydb/library/yql/dq/runtime/dq_tasks_runner.cpp:150: DqBuildInputValue @ 0x2099BD19 15. /-S/ydb/library/yql/dq/runtime/dq_tasks_runner.cpp:590: Prepare @ 0x2099E1DF 16. /-S/ydb/library/yql/dq/actors/compute/dq_sync_compute_actor_base.h:219: PrepareTaskRunner @ 0x21DB7C84 17. /-S/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp:80: DoBootstrap @ 0x21DB6B1A 18. /-S/ydb/library/yql/dq/actors/compute/dq_compute_actor_impl.h:149: Bootstrap @ 0x21DC886A 19. /-S/ydb/library/actors/core/executor_thread.cpp:248: Execute @ 0x136B05E8 20. /-S/ydb/library/actors/core/executor_thread.cpp:425: operator() @ 0x136B4186 21. /-S/ydb/library/actors/core/executor_thread.cpp:479: ProcessExecutorPool @ 0x136B3C32 22. /-S/ydb/library/actors/core/executor_thread.cp ... 24], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.248992Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.249022Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659142055926159:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.249702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:06.253408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659142055926161:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:06.432695Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705jzz35aaejvr9de7n5km, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.435767Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k01dsfjej552q25mm47, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.439364Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k0410eqwn21jbmbxrpm, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.443968Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k08cftfgqw7hmjg808y, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.450141Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k0ca9dtb3atsne35c86, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.458773Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439659142055926463:2454] TxId: 281474976715671. Ctx: { TraceId: 01jd705k0k2qdxket08aftmyfv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 6ms } {
: Error: Cancelling after 7ms during execution } ] 2024-11-21T09:17:06.458855Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926474:2471], TxId: 281474976715671, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. TraceId : 01jd705k0k2qdxket08aftmyfv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.458948Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926475:2472], TxId: 281474976715671, task: 5. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. CustomerSuppliedId : . TraceId : 01jd705k0k2qdxket08aftmyfv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.458962Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926471:2468], TxId: 281474976715671, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. TraceId : 01jd705k0k2qdxket08aftmyfv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.458983Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926476:2473], TxId: 281474976715671, task: 6. Ctx: { TraceId : 01jd705k0k2qdxket08aftmyfv. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.459039Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926472:2469], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jd705k0k2qdxket08aftmyfv. SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.459085Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926473:2470], TxId: 281474976715671, task: 3. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. TraceId : 01jd705k0k2qdxket08aftmyfv. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.459162Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926477:2474], TxId: 281474976715671, task: 7. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. CustomerSuppliedId : . TraceId : 01jd705k0k2qdxket08aftmyfv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.459204Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926478:2475], TxId: 281474976715671, task: 8. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. CustomerSuppliedId : . TraceId : 01jd705k0k2qdxket08aftmyfv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.459252Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659142055926479:2476], TxId: 281474976715671, task: 9. Ctx: { TraceId : 01jd705k0k2qdxket08aftmyfv. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659142055926463:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:06.459546Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k0k2qdxket08aftmyfv, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.468099Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k0w7m86ga5nss8zck7s, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.477276Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k156arb1vq88r4cv533, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.496724Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k1pfpbr2s4w6jmv2r98, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.524373Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k2gcspaxv94jnez4r0x, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.551506Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k3927t184nahgtd3a3r, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.583774Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k476a4m7xg0jjwa6hwk, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.610918Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k508jaaxbh47se2jxwt, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.644219Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k5z01t3rnf7kpz3j8jt, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.679529Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439659142055926914:2454] TxId: 281474976715687. Ctx: { TraceId: 01jd705k70939x711ajjqf2rp7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 22ms } {
: Error: Cancelling after 22ms during execution } ] 2024-11-21T09:17:06.679626Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705k70939x711ajjqf2rp7, Create QueryResponse for error on request, msg: 2024-11-21T09:17:06.812651Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDJiYTM3ZmMtYmU0ZDczNjYtYTUyNjg1NDktMzFjODU2Yzk=, ActorId: [3:7439659142055926444:2454], ActorState: ExecuteState, TraceId: 01jd705kb1and1mff7tfpg6p29, Create QueryResponse for error on request, msg: 2024-11-21T09:17:10.897314Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439659137760956820:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:10.897339Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:20.907445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:17:20.907481Z node 3 :IMPORT WARN: Table profiles were not loaded >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TPQTabletTests::Huge_ProposeTransacton >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TPQTest::TestWaitInOwners >> TPartitionTests::GetPartitionWriteInfoError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2024-11-21T09:17:20.702435Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:17:20.703099Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpJGr2fX//pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2024-11-21T09:17:20.703164Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpJGr2fX//pdisk0.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T09:17:20.703307Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:20.703523Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [3e000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T09:17:20.703536Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:20.703632Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [3e000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T09:17:20.703638Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:20.703714Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [3e000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T09:17:20.703720Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:20.703807Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [3e000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T09:17:20.703814Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 1040187392 2024-11-21T09:17:20.703947Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 31 PipeClientId# [1:36:2074] ControllerId# 72057594037932033 2024-11-21T09:17:20.703952Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:17:20.703976Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:17:20.704050Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:17:20.707584Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:17:20.707753Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:17:20.708371Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2024-11-21T09:17:20.708409Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 1040187392 2024-11-21T09:17:20.708550Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 31 PipeClientId# [2:82:2059] ControllerId# 72057594037932033 2024-11-21T09:17:20.708554Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:17:20.708566Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:17:20.708602Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:17:20.709541Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:17:20.709724Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:17:20.709791Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:17:20.709827Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:20.709831Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:17:20.709870Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:20.709873Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:17:20.741781Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:20.741802Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:17:20.742316Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:17:20.742400Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:20.742405Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:17:20.742415Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:17:20.742993Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:17:20.743011Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:17:20.743144Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:20.743199Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:17:20.743249Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2024-11-21T09:17:20.743276Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:17:20.751348Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpJGr2fX//pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2024-11-21T09:17:20.751438Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:17:20.751554Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T09:17:20.751562Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:17:20.751582Z node 1 :BS_NODE DEBUG: {NWDC18@dis ... LLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 2 } } 2024-11-21T09:17:21.024017Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:21.025199Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:21.025227Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 3 } Success: true } 2024-11-21T09:17:21.025266Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:21.025279Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 0 VSlotId: 3 } } 2024-11-21T09:17:21.040804Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:17:21.041019Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpFFIzrP/new_pdisk.dat" PDiskGuid: 1321631664736476827 PDiskCategory: 0 PDiskConfig { ChunkSize: 33554432 } EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } } InstanceId: "8f33cc52-69ae12d2-c1516772-7d80d46f" AvailDomain: 31 } 2024-11-21T09:17:21.041042Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpFFIzrP/new_pdisk.dat" PDiskGuid: 1321631664736476827 PDiskCategory: 0 PDiskConfig { ChunkSize: 33554432 } EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } } 2024-11-21T09:17:21.041065Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1000 Path# "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpFFIzrP/new_pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T09:17:21.041292Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T09:17:21.043958Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } EncryptionMode: 1 } } Command { QueryBaseConfig { } } } 2024-11-21T09:17:21.207774Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { VDisks { VDiskID { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 1321631664736476827 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "" } Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 1321631664736476827 } } } EncryptionMode: 1 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "8f33cc52-69ae12d2-c1516772-7d80d46f" AvailDomain: 31 } 2024-11-21T09:17:21.207821Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {VDisks { VDiskID { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 1321631664736476827 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "" } Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 1321631664736476827 } } } EncryptionMode: 1 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2024-11-21T09:17:21.207872Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [be000000:1:0:0:0] VSlotId# 1:1000:1000 PDiskGuid# 1321631664736476827 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:17:21.208045Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [be000000:1:0:0:0] VSlotId# 1:1000:1000 PDiskGuid# 1321631664736476827 2024-11-21T09:17:21.360350Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 1321631664736476827 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2024-11-21T09:17:21.360947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:17:21.363735Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } } 2024-11-21T09:17:21.373814Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:21.375619Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 1321631664736476827 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:17:21.375830Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:17:21.375863Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 1321631664736476827 Status: READY OnlyPhantomsRemain: false } } Sending TEvPut 2024-11-21T09:17:21.375930Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 3187671040 EnableProxyMock# false NoGroup# false 2024-11-21T09:17:21.375937Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 3187671040 2024-11-21T09:17:21.376165Z node 1 :BS_NODE DEBUG: {NW68@node_warden_group.cpp:84} ConfigureLocalProxy propose GroupId# 3187671040 MainKey# {Id# '/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpQHrbNQ//key.txt' Version# 1} 2024-11-21T09:17:21.376250Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK11@propose_group_key.cpp:119} Handle TEvControllerProposeGroupKey Request# {NodeId: 1 GroupId: 3187671040 LifeCyclePhase: 1 MainKeyId: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpQHrbNQ//key.txt" EncryptedGroupKey: "|V,\375L\'SYa-]\305\232\356\253\241\376\277\243KX\247I\242\007k\301\207\242`\0217f\017\352\223" MainKeyVersion: 1 GroupKeyNonce: 3187671040 } 2024-11-21T09:17:21.376259Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK07@propose_group_key.cpp:82} TTxProposeGroupKey Execute 2024-11-21T09:17:21.403956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK08@propose_group_key.cpp:96} TTxProposeGroupKey Complete 2024-11-21T09:17:21.404036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:389:2346] Cookie# 0 Recipient# [1:389:2346] RecipientRewrite# [1:389:2346] Request# {NodeID: 1 GroupIDs: 3187671040 } StopGivingGroups# false 2024-11-21T09:17:21.404053Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 3187671040 } 2024-11-21T09:17:21.404132Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 1321631664736476827 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpQHrbNQ//key.txt" EncryptedGroupKey: "|V,\375L\'SYa-]\305\232\356\253\241\376\277\243KX\247I\242\007k\301\207\242`\0217f\017\352\223" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } } 2024-11-21T09:17:21.404153Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 1321631664736476827 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpQHrbNQ//key.txt" EncryptedGroupKey: "|V,\375L\'SYa-]\305\232\356\253\241\376\277\243KX\247I\242\007k\301\207\242`\0217f\017\352\223" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } Sending TEvGet Sending TEvVGet Sending TEvPut 2024-11-21T09:17:21.408168Z node 2 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 3187671040 EnableProxyMock# false NoGroup# false 2024-11-21T09:17:21.408183Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 3187671040 2024-11-21T09:17:21.408189Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 3187671040 2024-11-21T09:17:21.408265Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 3187671040 2024-11-21T09:17:21.408366Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:20:2050] Cookie# 0 Recipient# [1:430:2376] RecipientRewrite# [1:389:2346] Request# {NodeID: 2 GroupIDs: 3187671040 } StopGivingGroups# false 2024-11-21T09:17:21.408395Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 3187671040 } 2024-11-21T09:17:21.408500Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 1321631664736476827 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpQHrbNQ//key.txt" EncryptedGroupKey: "|V,\375L\'SYa-]\305\232\356\253\241\376\277\243KX\247I\242\007k\301\207\242`\0217f\017\352\223" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } } 2024-11-21T09:17:21.408520Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 1321631664736476827 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/001f4c/r3tmp/tmpQHrbNQ//key.txt" EncryptedGroupKey: "|V,\375L\'SYa-]\305\232\356\253\241\376\277\243KX\247I\242\007k\301\207\242`\0217f\017\352\223" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } 2024-11-21T09:17:21.408950Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 3187671040 Sending TEvGet >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TPQTest::TestUserInfoCompatibility >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TDataShardLocksTest::UseLocksCache [GOOD] >> TPartitionTests::DifferentWriteTxBatchingOptions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] Test command err: 2024-11-21T09:17:20.975053Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T09:17:20.977565Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T09:17:20.977668Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T09:17:20.977682Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T09:17:20.977687Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T09:17:20.977691Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T09:17:20.977704Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.977710Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T09:17:20.977715Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:20.984137Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.984161Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2210], now have 1 active actors on pipe 2024-11-21T09:17:20.984177Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T09:17:20.986337Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.987152Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T09:17:20.987169Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.987375Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.987419Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T09:17:20.987466Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T09:17:20.987536Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:213:2217] 2024-11-21T09:17:20.987711Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T09:17:20.987716Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:213:2217] 2024-11-21T09:17:20.987721Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:20.987853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T09:17:20.987858Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T09:17:20.987893Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.987911Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:20.987961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.988403Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:20.988455Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.988461Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:220:2222], now have 1 active actors on pipe 2024-11-21T09:17:20.988689Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.988695Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:225:2226], now have 1 active actors on pipe 2024-11-21T09:17:20.988840Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2024-11-21T09:17:20.988848Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T09:17:20.988861Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T09:17:20.988866Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T09:17:20.988873Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T09:17:20.988896Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T09:17:20.988909Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.989565Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:20.989574Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T09:17:20.989578Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T09:17:20.990291Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T09:17:20.990300Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2024-11-21T09:17:20.990304Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2024-11-21T09:17:20.990309Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2024-11-21T09:17:20.990331Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T09:17:20.990341Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.990922Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:20.990931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2024-11-21T09:17:20.990935Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2024-11-21T09:17:20.990938Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2024-11-21T09:17:20.990943Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2024-11-21T09:17:20.990957Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2024-11-21T09:17:20.990969Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2024-11-21T09:17:20.990979Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'user' Bad request (behind the last offset) EndOffset 0 End 2 2024-11-21T09:17:20.991001Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 0 2024-11-21T09:17:20.991006Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2024-11-21T09:17:20.991010Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2024-11-21T09:17:20.991013Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T09:17:20.991017Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T09:17:20.991020Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T09:17:20.991042Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 175 RawX2 ... 56 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 5 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } 2024-11-21T09:17:21.946597Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.946730Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 5 actor [5:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 5 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.946759Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T09:17:21.946819Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T09:17:21.946854Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:183:2196] 2024-11-21T09:17:21.946982Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T09:17:21.946987Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:183:2196] 2024-11-21T09:17:21.946995Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:21.947047Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 5 2024-11-21T09:17:21.947051Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 5 done 2024-11-21T09:17:21.947069Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.947087Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:21.947130Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.947558Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:21.947623Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:21.947628Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:190:2201], now have 1 active actors on pipe 2024-11-21T09:17:21.947867Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:21.947875Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:195:2205], now have 1 active actors on pipe 2024-11-21T09:17:21.947884Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T09:17:21.947887Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T09:17:21.947892Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] partition {0, {0, 3}, 100000} for WriteId {0, 3} 2024-11-21T09:17:21.947907Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvSubscribeLock for WriteId {0, 3} 2024-11-21T09:17:21.947938Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.948545Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:21.948611Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitConfigStep 2024-11-21T09:17:21.948665Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitInternalFieldsStep 2024-11-21T09:17:21.948692Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] bootstrapping {0, {0, 3}, 100000} [5:202:2211] 2024-11-21T09:17:21.948797Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.948942Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitMetaStep 2024-11-21T09:17:21.948963Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitInfoRangeStep 2024-11-21T09:17:21.949004Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitDataRangeStep 2024-11-21T09:17:21.949020Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitDataStep 2024-11-21T09:17:21.949024Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Completed. 2024-11-21T09:17:21.949028Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {0, 3}, 100000} generation 2 [5:202:2211] 2024-11-21T09:17:21.949033Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] SYNC INIT topic topic partitition {0, {0, 3}, 100000} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:21.949085Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user reinit request with generation 5 2024-11-21T09:17:21.949089Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user reinit with generation 5 done 2024-11-21T09:17:21.949106Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:21.949125Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:21.949157Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId {0, {0, 3}, 100000} 2024-11-21T09:17:21.949177Z node 5 :PERSQUEUE INFO: new Cookie -=[ 0wn3r ]=-|4241ad69-6e1c4297-fc715588-31979c49_0 generated for partition {0, {0, 3}, 100000} topic 'topic' owner -=[ 0wn3r ]=- Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.949529Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:21.949555Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {0, {0, 3}, 100000} 2024-11-21T09:17:21.949569Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2024-11-21T09:17:21.949629Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:17:21.949633Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server disconnected, pipe [5:195:2205] destroyed 2024-11-21T09:17:21.949638Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:17:21.949654Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:21.949658Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:218:2221], now have 1 active actors on pipe 2024-11-21T09:17:21.949690Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 21474838668 } TxId: 2 Data { Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Immediate: false WriteId { NodeId: 0 KeyId: 3 } } 2024-11-21T09:17:21.949693Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PartitionId {0, {0, 3}, 100000} for WriteId {0, 3} 2024-11-21T09:17:21.949697Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 has WriteId {0, 3} 2024-11-21T09:17:21.949699Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T09:17:21.949707Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, State UNKNOWN 2024-11-21T09:17:21.949710Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T09:17:21.949713Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, NewState PREPARING 2024-11-21T09:17:21.949730Z node 5 :PERSQUEUE DEBUG: [TxId: 2] save tx TxId: 2 State: PREPARED MinStep: 233 MaxStep: 30233 Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } WriteId { NodeId: 0 KeyId: 3 } Partitions { } 2024-11-21T09:17:21.949740Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.950600Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:21.950615Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, State PREPARING 2024-11-21T09:17:21.950618Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, NewState PREPARED Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.951623Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:21.951634Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:233:2235], now have 1 active actors on pipe 2024-11-21T09:17:21.951647Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T09:17:21.951653Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T09:17:21.951659Z node 5 :PERSQUEUE WARN: tablet 72057594037927937 topic 'topic error: it is forbidden to write after a commit 2024-11-21T09:17:21.951670Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 2, Error it is forbidden to write after a commit 2024-11-21T09:17:21.951675Z node 5 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId: error: it is forbidden to write after a commit |94.7%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::TestAccountReadQuota ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2024-11-21T09:17:18.744270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:18.744704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:18.744726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0030e8/r3tmp/tmp9yxdoV/pdisk_1.dat 2024-11-21T09:17:18.889573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.906233Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:18.948525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:18.948555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:18.959073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:19.063492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.084619Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:19.084789Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:19.084853Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2024-11-21T09:17:19.084886Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:19.092282Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:19.092313Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:19.092597Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:19.092681Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:640:2542] 2024-11-21T09:17:19.092718Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:19.093509Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:19.093564Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:19.093586Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:17:19.093708Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T09:17:19.093723Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T09:17:19.093728Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T09:17:19.093800Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:17:19.096305Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T09:17:19.096352Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:17:19.097437Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2559] 2024-11-21T09:17:19.097442Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T09:17:19.097446Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T09:17:19.097450Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:17:19.097539Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:17:19.097545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:17:19.097635Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T09:17:19.097649Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T09:17:19.097680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:17:19.097685Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:17:19.097691Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T09:17:19.097695Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T09:17:19.097698Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T09:17:19.097701Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T09:17:19.097705Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:17:19.097810Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2548], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:19.097815Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:19.097821Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:631:2536], serverId# [1:653:2548], sessionId# [0:0:0] 2024-11-21T09:17:19.097840Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2548] 2024-11-21T09:17:19.097844Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:17:19.097868Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:17:19.097980Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T09:17:19.097998Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T09:17:19.098020Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T09:17:19.098030Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T09:17:19.098033Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T09:17:19.098054Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T09:17:19.098057Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:17:19.098093Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T09:17:19.098095Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T09:17:19.098098Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T09:17:19.098100Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:17:19.098109Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T09:17:19.098111Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:17:19.098112Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T09:17:19.098114Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:17:19.098118Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T09:17:19.098146Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:19.098165Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:17:19.098243Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T09:17:19.098249Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T09:17:19.098252Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T09:17:19.098274Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:17:19.098278Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T09:17:19.098286Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:17:19.098296Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:672:2560] 2024-11-21T09:17:19.098298Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T09:17:19.098301Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T09:17:19.098303Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T09:17:19.098413Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:640:2542], Recipient [1:640:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:17:19.098418Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:17:19.098466Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T09:17:19.098473Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T09:17:19.098492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:661:2555], Recipient [1:640:2542]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:19.098494Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:19.098498Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:632:2537], serverId# [1:661:2555], sessionId# [0:0:0] 2024-11-21T09:17:19.098508Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T09:17:19.098511Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:17:19.098513Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2024-11-21T09:17:19.098516Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T09:17:19.098518Z node 1 :TX_DATASHARD TRACE: Uni ... 8 is DelayComplete 2024-11-21T09:17:22.116949Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:17:22.116953Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T09:17:22.116957Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CompletedOperations 2024-11-21T09:17:22.116970Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2024-11-21T09:17:22.116973Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T09:17:22.116977Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2024-11-21T09:17:22.127362Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:17:22.127395Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715662] at 72075186224037888 on unit CompleteOperation 2024-11-21T09:17:22.127429Z node 2 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [2:898:2691], exec latency: 9 ms, propose latency: 9 ms 2024-11-21T09:17:22.127447Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T09:17:22.127454Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:17:22.127461Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:17:22.127465Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2024-11-21T09:17:22.127472Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T09:17:22.127495Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:17:22.127671Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:938:2748], Recipient [2:639:2541]: {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T09:17:22.127681Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:17:22.127699Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2024-11-21T09:17:22.128136Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:639:2541]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T09:17:22.148387Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7062ag2sdkfej616p1wvqf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI1ZThhZGEtYTQwNjkwNzEtOTg2YTNmNGYtODc5YTc1NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:22.149232Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:979:2774], Recipient [2:938:2748]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T09:17:22.149287Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T09:17:22.149310Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2024-11-21T09:17:22.149332Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T09:17:22.149337Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2024-11-21T09:17:22.149342Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T09:17:22.149346Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T09:17:22.149363Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2024-11-21T09:17:22.149369Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T09:17:22.149373Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T09:17:22.149377Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T09:17:22.149380Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2024-11-21T09:17:22.149398Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T09:17:22.149464Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2024-11-21T09:17:22.149473Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:979:2774], 0} after executionsCount# 1 2024-11-21T09:17:22.149481Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:979:2774], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T09:17:22.149498Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:979:2774], 0} finished in read 2024-11-21T09:17:22.149510Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T09:17:22.149513Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T09:17:22.149517Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T09:17:22.149521Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2024-11-21T09:17:22.149533Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T09:17:22.149535Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T09:17:22.149539Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2024-11-21T09:17:22.149544Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T09:17:22.149570Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T09:17:22.149818Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:979:2774], Recipient [2:938:2748]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T09:17:22.149829Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T09:17:22.149889Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:979:2774], Recipient [2:639:2541]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2024-11-21T09:17:22.149903Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T09:17:22.149910Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2024-11-21T09:17:22.149918Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T09:17:22.149922Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2024-11-21T09:17:22.149925Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T09:17:22.149929Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T09:17:22.149937Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2024-11-21T09:17:22.149943Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T09:17:22.149946Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T09:17:22.149949Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T09:17:22.149952Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2024-11-21T09:17:22.149963Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2024-11-21T09:17:22.149985Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2024-11-21T09:17:22.149989Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:979:2774], 1} after executionsCount# 1 2024-11-21T09:17:22.149994Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:979:2774], 1} sends rowCount# 2, bytes# 48, quota rows left# 997, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T09:17:22.150002Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:979:2774], 1} finished in read 2024-11-21T09:17:22.150008Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T09:17:22.150011Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T09:17:22.150014Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T09:17:22.150017Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2024-11-21T09:17:22.150023Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T09:17:22.150026Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T09:17:22.150029Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2024-11-21T09:17:22.150032Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T09:17:22.150044Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T09:17:22.150108Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:979:2774], Recipient [2:639:2541]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2024-11-21T09:17:22.150112Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } >> KqpLimits::CancelAfterRoTxWithFollowerLegacyDependedRead [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> TPartitionTests::TooManyImmediateTxs >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestTimeRetention |94.7%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> TPartitionTests::CorrectRange_Rollback >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TPQTest::TestReadRuleVersions >> TOlapReboots::CreateMultipleStandaloneTables [GOOD] >> TPartitionTests::CorrectRange_Rollback [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> TPQTabletTests::DropTablet_And_Tx >> TPartitionTests::DataTxCalcPredicateOk >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TPartitionTests::TestTxBatchInFederation >> TPQTabletTests::DropTablet >> TPQTabletTests::DropTablet [GOOD] >> TPQTabletTests::DropTablet_Before_Write ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateMultipleStandaloneTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.963612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.963634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.963639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.963644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.963651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.963655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.963663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.963756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.972873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.972891Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.975097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.975215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.975255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.978074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.978162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.978254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.978470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.979245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.979514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.979525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.979537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.979543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.979549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.979588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.980942Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.997768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.997837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.997898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.997957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.997965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.998591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.998614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.998661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.998671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.998675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.998680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.999067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.999077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.999082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.999405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.999412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.999419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.999425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.000018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:42.000402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:42.000446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:42.000623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:42.000647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:42.000654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.000716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:42.000724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:42.000753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:42.000766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:42.001178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:42.001189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:42.001216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:42.001221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:42.001283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:42.001291Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:42.001300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:42.001305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.001310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:42.001315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:42.001320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:42.001324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:42.001333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:42.001338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:42.001342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... letionResult 2024-11-21T09:17:23.510174Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [118:414:2383] 2024-11-21T09:17:23.510331Z node 118 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:17:23.510345Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:17:23.510350Z node 118 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:17:23.510355Z node 118 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T09:17:23.510360Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:17:23.510444Z node 118 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:17:23.510453Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:17:23.510456Z node 118 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:17:23.510460Z node 118 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:17:23.510463Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:17:23.510471Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T09:17:23.510516Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2024-11-21T09:17:23.511030Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:17:23.511078Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:17:23.521946Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1002 MinStep: 0 Step: 5000004 2024-11-21T09:17:23.521969Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409547, partId: 0 2024-11-21T09:17:23.521993Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1002 MinStep: 0 Step: 5000004 2024-11-21T09:17:23.522004Z node 118 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1002 MinStep: 0 Step: 5000004 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:17:23.522098Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1002 2024-11-21T09:17:23.522103Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409547, partId: 0 2024-11-21T09:17:23.522113Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1002 2024-11-21T09:17:23.522613Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:17:23.522879Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:17:23.522905Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:17:23.522912Z node 118 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T09:17:23.522928Z node 118 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:17:23.522932Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:17:23.522939Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T09:17:23.522951Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [118:413:2382] message: TxId: 1002 2024-11-21T09:17:23.522958Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:17:23.522964Z node 118 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:17:23.522968Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:17:23.523013Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:17:23.523473Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:17:23.523489Z node 118 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [118:414:2383] TestWaitNotification: OK eventTxId 1002 2024-11-21T09:17:23.523595Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:23.523656Z node 118 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable1" took 67us result status StatusSuccess 2024-11-21T09:17:23.523781Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable1" PathDescription { Self { Name: "ColumnTable1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable1" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:23.523909Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:23.523936Z node 118 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable2" took 29us result status StatusSuccess 2024-11-21T09:17:23.523983Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable2" PathDescription { Self { Name: "ColumnTable2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable2" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409547 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestPartitionedBlobFails >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> TPQTabletTests::Cancel_Tx [GOOD] >> TPartitionTests::ConflictingCommitFails >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] Test command err: 2024-11-21T09:17:23.780041Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T09:17:23.781300Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T09:17:23.781373Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T09:17:23.781392Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T09:17:23.781396Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T09:17:23.781400Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T09:17:23.781408Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.781415Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T09:17:23.781420Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:23.784074Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:23.784094Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T09:17:23.784109Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T09:17:23.785961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:23.786757Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T09:17:23.786779Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.787112Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.787150Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.787160Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitConfigStep 2024-11-21T09:17:23.787236Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T09:17:23.787309Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T09:17:23.787467Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T09:17:23.787472Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2024-11-21T09:17:23.787477Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:23.787553Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T09:17:23.787557Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T09:17:23.787582Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.787599Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:23.787646Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T09:17:23.787660Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2024-11-21T09:17:23.787763Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Completed. 2024-11-21T09:17:23.787769Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2024-11-21T09:17:23.787773Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:23.787837Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2024-11-21T09:17:23.787841Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2024-11-21T09:17:23.787850Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.787862Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:23.787898Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:17:23.787926Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:23.788399Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:23.788419Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:23.788462Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:23.788466Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2024-11-21T09:17:23.788706Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:23.788712Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2024-11-21T09:17:23.788872Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2024-11-21T09:17:23.788884Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T09:17:23.788899Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T09:17:23.788902Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T09:17:23.788906Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T09:17:23.788924Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T09:17:23.788936Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T09:17:23.788963Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:23.789971Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:23.789981Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T09:17:23.789984Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T09:17:23.790065Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67891 Data { Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: true } 2024-11-21T09:17:23.790072Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 invalid PQ tablet state (EDropped) 2024-11-21T09:17:23.790077Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2024-11-21T09:17:23.790507Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T09:17:23.790518Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2024-11-21T09:17:23.790522Z node 1 :PERSQUEUE DEBUG: ... tate: StateInit] bootstrapping 0 [5:308:2294] 2024-11-21T09:17:24.762098Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:24.762327Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitMetaStep 2024-11-21T09:17:24.762376Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInfoRangeStep 2024-11-21T09:17:24.762481Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDataRangeStep 2024-11-21T09:17:24.762512Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDataStep 2024-11-21T09:17:24.762518Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T09:17:24.762525Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [5:308:2294] 2024-11-21T09:17:24.762533Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:24.762553Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 5 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:24.762595Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2024-11-21T09:17:24.762598Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2024-11-21T09:17:24.762601Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2024-11-21T09:17:24.762623Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2024-11-21T09:17:24.762634Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 100, TxId 67890 2024-11-21T09:17:24.762679Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:17:24.762702Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvProposePartitionConfigResult Step 100, TxId 67890, Partition 0 2024-11-21T09:17:24.762706Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvProposePartitionConfigResult 2024-11-21T09:17:24.762711Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2024-11-21T09:17:24.762714Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T09:17:24.762718Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T09:17:24.762722Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T09:17:24.762772Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { Partition { PartitionId: 0 } } 2024-11-21T09:17:24.762791Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:24.763654Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:24.763665Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2024-11-21T09:17:24.763669Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2024-11-21T09:17:24.763691Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2024-11-21T09:17:24.763695Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:24.764419Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:24.764430Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:332:2311], now have 1 active actors on pipe 2024-11-21T09:17:24.764461Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2024-11-21T09:17:24.764471Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2024-11-21T09:17:24.764478Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2024-11-21T09:17:24.764482Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2024-11-21T09:17:24.764485Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2024-11-21T09:17:24.764496Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2024-11-21T09:17:24.764499Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2024-11-21T09:17:24.764508Z node 5 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2024-11-21T09:17:24.764518Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T09:17:24.764543Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2024-11-21T09:17:24.764547Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2024-11-21T09:17:24.764550Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2024-11-21T09:17:24.764589Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:24.765357Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:24.765397Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2024-11-21T09:17:24.765404Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T09:17:24.765407Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T09:17:24.765414Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2024-11-21T09:17:24.765458Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2024-11-21T09:17:24.765466Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:24.765473Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2024-11-21T09:17:24.765477Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2024-11-21T09:17:24.765514Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { Partition { PartitionId: 0 } } 2024-11-21T09:17:24.765545Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:24.766454Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:24.766467Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2024-11-21T09:17:24.766471Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T09:17:24.766479Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2024-11-21T09:17:24.766484Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2024-11-21T09:17:24.766489Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2024-11-21T09:17:24.766492Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T09:17:24.766495Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2024-11-21T09:17:24.766499Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2024-11-21T09:17:24.766504Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2024-11-21T09:17:24.766509Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2024-11-21T09:17:24.766518Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:24.768333Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:24.768347Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State DELETING 2024-11-21T09:17:24.768350Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T09:17:24.768353Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete TxId 67890 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TFetchRequestTests::HappyWay >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> TPartitionTests::DataTxCalcPredicateError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] Test command err: 2024-11-21T09:17:20.977685Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.977714Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.982812Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.983108Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:21.220194Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.220239Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.224108Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.224403Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2024-11-21T09:17:21.476818Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.476839Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:21.711715Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.711734Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:21.715419Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.715702Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Wait batch completion Got batch complete: 2 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait batch completion Wait kv request Wait tx committed for tx 2 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 2024-11-21T09:17:23.364239Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.364262Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:23.367324Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.367610Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Wait batch completion Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait kv request Got batch complete: 1 Wait batch completion Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait batch completion Wait batch completion Got batch complete: 1 Wait kv request Wait tx committed for tx 3 Wait tx committed for tx 4 Create distr tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 3 Wait batch completion Wait kv request Wait immediate tx complete 8 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 8 Wait immediate tx complete 9 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 9 >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Huge_ProposeTransacton [GOOD] Test command err: 2024-11-21T09:17:20.975439Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T09:17:20.977561Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T09:17:20.977659Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T09:17:20.977675Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T09:17:20.977679Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T09:17:20.977685Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T09:17:20.977690Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.977696Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T09:17:20.977699Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:20.984200Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.984240Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2210], now have 1 active actors on pipe 2024-11-21T09:17:20.984252Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T09:17:20.985887Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.986730Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T09:17:20.986748Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.986982Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.987026Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T09:17:20.987091Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T09:17:20.987178Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:213:2217] 2024-11-21T09:17:20.987333Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T09:17:20.987341Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:213:2217] 2024-11-21T09:17:20.987347Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:20.987471Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T09:17:20.987478Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T09:17:20.987530Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.987554Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:20.987617Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.988090Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:20.988151Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.988157Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:220:2222], now have 1 active actors on pipe 2024-11-21T09:17:20.988479Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.988488Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:225:2226], now have 1 active actors on pipe 2024-11-21T09:17:20.988659Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2024-11-21T09:17:20.988668Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T09:17:20.988683Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T09:17:20.988686Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T09:17:20.988693Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T09:17:20.988717Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T09:17:20.988729Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.989450Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:20.989463Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T09:17:20.989466Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T09:17:20.989524Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2024-11-21T09:17:20.989530Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T09:17:20.989537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2024-11-21T09:17:20.989541Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T09:17:20.989544Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2024-11-21T09:17:20.989563Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 137 MaxStep: 30137 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T09:17:20.989574Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.990348Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:20.990360Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREPARING 2024-11-21T09:17:20.990363Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARED Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.991931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67891 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T09:17:20.991944Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREPARED 2024-11-21T09:17:20.991948Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PLANNING 2024-11-21T09:17:20.991951Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67891 2024-11-21T09:17:20.991971Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PLANNED MinStep: 137 MaxStep: 30137 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T09:17:20.991979Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T09:17:20.992023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 200 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.992683Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKey ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2205 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:26.658948Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-1200 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:26.659244Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-66 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:26.659254Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-1377 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:26.666480Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:26.685039Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:26.794942Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 32222, MsgBus: 23566 2024-11-21T09:17:18.491931Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659194060214174:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:18.491997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e6a/r3tmp/tmpTlqDFM/pdisk_1.dat 2024-11-21T09:17:18.575262Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32222, node 1 2024-11-21T09:17:18.591654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:18.591685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:18.592700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:18.628960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:18.628973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:18.628975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:18.629018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23566 TClient is connected to server localhost:23566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:18.711831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.721395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.787080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.797908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.805185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.834640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659194060215495:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.834673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.960634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.965845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.972971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.980084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.987353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.993798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.005371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659198355183306:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.005401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.005465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659198355183311:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.006804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:19.014301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659198355183313:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:17:19.227569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.305092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jd705zhy4d76zsdc9bhxv41s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM3OTA5ZGUtNDU5MGQxZDItODExMzExNTQtYTAyNDNhZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.305136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd705zhyfzamhk2x5hv8ag4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk0YzAwODEtODA0ODBiMjQtZjE0MzkxNmUtZGQ1OTk1MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.305202Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jd705zhy7ra4ck35njf7a4vn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA2MDIxY2UtODJiMTY3NjktNGMxNzczY2QtNzJjMDU0NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.309750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jd705zhy4152jjdfbv5dx1n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJiNjBhZjItMzg3MGI4MWMtNTc4NDU2ZDYtMTI4YmQxM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.310359Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jd705zhy2k44s7ba36k0rry0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWM5MDAwNTgtZWU3ZWVkMWQtZmQ5ZTAwMzktNTEwOWRiY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.310487Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jd705zhycjgfq7nk03p1yt5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2MwNmY3MmItMjA4MjA0ZWItMmU4MzExNTYtNTk0Mjc0NDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.311805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jd705zhycgbbakd7fnqwrx1g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGRkZDY2MDQtZGE4OGM1OGItOTU1ZTM1N2MtNTM0NzE3MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.311891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jd705zhy1t9wetfprdpez05y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTUwNmRjNjQtNWM5YjQxZDgtMzczNjVhODEtZGU1NTYxNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.312136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jd705zhy4d76zsdc9bhxv41s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM3OTA5ZGUtNDU5MGQxZDItODExMzExNTQtYTAyNDNhZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.313368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jd705zhyfzamhk2x5hv8ag4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk0YzAwODEtODA0ODBiMjQtZjE0MzkxNmUtZGQ1OTk1MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.313417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710685. Ctx: { TraceId: 01jd705zhy4152jjdfbv5dx1n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJiNjBhZjItMzg3MGI4MWMtNTc4NDU2ZDYtMTI4YmQxM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.313484Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jd705zhy1z4abmg4dtzrpxkd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThhZWVjODEtNmNmN2IyNmYtZDYzZTg5MDItMzJkZDRhYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.313726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710687. Ctx: { TraceId: 01jd705zhycjgfq7nk03p1yt5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2MwNmY3MmItMjA4MjA0ZWItMmU4MzExNTYtNTk0Mjc0NDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.313769Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710684. Ctx: { TraceId: 01jd705zhy7ra4ck35njf7a4vn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA2MDI ... abase: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.734356Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721618. Ctx: { TraceId: 01jd7067sk05t26a2r93b1xejj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWJlZTM1ZTMtZjFkOTRkMzMtNDY4NTNkMmItNTQxZGVlNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.734481Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721619. Ctx: { TraceId: 01jd7067skfk0dn2g84jwdvx7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIyYjJiMzctYTNiM2ZmZmItODEyZjQxZmEtZTJjNDU1MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.734629Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721620. Ctx: { TraceId: 01jd7067sk95p4txwmqdb4a9wa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg1OWEyYmMtNTk1YTliMTUtZjRlMzEzOTUtOTU1ODExYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.735032Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721621. Ctx: { TraceId: 01jd7067skfk0dn2g84jwdvx7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIyYjJiMzctYTNiM2ZmZmItODEyZjQxZmEtZTJjNDU1MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.735160Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721622. Ctx: { TraceId: 01jd7067sk05t26a2r93b1xejj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWJlZTM1ZTMtZjFkOTRkMzMtNDY4NTNkMmItNTQxZGVlNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.735209Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721623. Ctx: { TraceId: 01jd7067sn32r14rxdtyhdpaa2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.735336Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721624. Ctx: { TraceId: 01jd7067sk95p4txwmqdb4a9wa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg1OWEyYmMtNTk1YTliMTUtZjRlMzEzOTUtOTU1ODExYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.736648Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721625. Ctx: { TraceId: 01jd7067sq00qa9psmd4yp1brw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4MWQwYTgtM2NkZWEyN2MtMWZkZWM0MjAtYjY0OGFkOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.737423Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721627. Ctx: { TraceId: 01jd7067sq00qa9psmd4yp1brw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4MWQwYTgtM2NkZWEyN2MtMWZkZWM0MjAtYjY0OGFkOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.737893Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721630. Ctx: { TraceId: 01jd7067sq00qa9psmd4yp1brw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4MWQwYTgtM2NkZWEyN2MtMWZkZWM0MjAtYjY0OGFkOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.738067Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721626. Ctx: { TraceId: 01jd7067srb1q0j39mfyvyjayg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ5MDE4NDUtZTM5NzQyN2ItNGI2MmYwNzctZDdmYjJjNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.738072Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721628. Ctx: { TraceId: 01jd7067srakv40mzxj74zddrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGQ2M2U3ODktOGI2YjhkODItYmE2M2Q3OTktOTY0NmY0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.738141Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721629. Ctx: { TraceId: 01jd7067ssbfv9xmgbgyk20b2g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.739060Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721631. Ctx: { TraceId: 01jd7067srb1q0j39mfyvyjayg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ5MDE4NDUtZTM5NzQyN2ItNGI2MmYwNzctZDdmYjJjNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.739329Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721632. Ctx: { TraceId: 01jd7067srakv40mzxj74zddrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGQ2M2U3ODktOGI2YjhkODItYmE2M2Q3OTktOTY0NmY0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.739384Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721633. Ctx: { TraceId: 01jd7067srb1q0j39mfyvyjayg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ5MDE4NDUtZTM5NzQyN2ItNGI2MmYwNzctZDdmYjJjNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.739798Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721635. Ctx: { TraceId: 01jd7067ssbfv9xmgbgyk20b2g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.739886Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721634. Ctx: { TraceId: 01jd7067srb1q0j39mfyvyjayg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ5MDE4NDUtZTM5NzQyN2ItNGI2MmYwNzctZDdmYjJjNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.739973Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721636. Ctx: { TraceId: 01jd7067srakv40mzxj74zddrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGQ2M2U3ODktOGI2YjhkODItYmE2M2Q3OTktOTY0NmY0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.740478Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721638. Ctx: { TraceId: 01jd7067ssbfv9xmgbgyk20b2g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.740731Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721639. Ctx: { TraceId: 01jd7067ssbfv9xmgbgyk20b2g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.741237Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jd7067sv5tgva4k8adz4bzd9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWJlZTM1ZTMtZjFkOTRkMzMtNDY4NTNkMmItNTQxZGVlNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.741724Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721640. Ctx: { TraceId: 01jd7067sv5tgva4k8adz4bzd9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWJlZTM1ZTMtZjFkOTRkMzMtNDY4NTNkMmItNTQxZGVlNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.742059Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jd7067sv5tgva4k8adz4bzd9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWJlZTM1ZTMtZjFkOTRkMzMtNDY4NTNkMmItNTQxZGVlNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.742196Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jd7067sx0v1tyt7nnyzzyp2j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg1OWEyYmMtNTk1YTliMTUtZjRlMzEzOTUtOTU1ODExYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.742281Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jd7067sxcqcsqhk02e7vdty7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4MWQwYTgtM2NkZWEyN2MtMWZkZWM0MjAtYjY0OGFkOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.742904Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jd7067sv5tgva4k8adz4bzd9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWJlZTM1ZTMtZjFkOTRkMzMtNDY4NTNkMmItNTQxZGVlNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.743400Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jd7067sxcqcsqhk02e7vdty7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4MWQwYTgtM2NkZWEyN2MtMWZkZWM0MjAtYjY0OGFkOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.743401Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jd7067sx0v1tyt7nnyzzyp2j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg1OWEyYmMtNTk1YTliMTUtZjRlMzEzOTUtOTU1ODExYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.743432Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jd7067sy12rtaj4bt0pm95m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ5MDE4NDUtZTM5NzQyN2ItNGI2MmYwNzctZDdmYjJjNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:27.744138Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jd7067sx0v1tyt7nnyzzyp2j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg1OWEyYmMtNTk1YTliMTUtZjRlMzEzOTUtOTU1ODExYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.744328Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jd7067sy0wkn5jwn33cqj2ww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.744400Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jd7067sy12rtaj4bt0pm95m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmQ5MDE4NDUtZTM5NzQyN2ItNGI2MmYwNzctZDdmYjJjNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2024-11-21T09:17:27.745529Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721651. Ctx: { TraceId: 01jd7067sy0wkn5jwn33cqj2ww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:27.745876Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721652. Ctx: { TraceId: 01jd7067sy0wkn5jwn33cqj2ww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkOTAyNi1iZjMxYzY0Zi01ZWUxNDItYTRkZTJiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS >> TPartitionTests::DataTxCalcPredicateError [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:17:28.007477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:28.007510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:28.007531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:28.007536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:28.008152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:28.008158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:28.008168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:28.008278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:28.018636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:28.018656Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:28.020912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:28.021437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:28.021474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:17:28.022660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:28.022817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:28.024226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:28.024337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:17:28.025114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:28.029017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:28.029034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:28.029073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:28.029083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:28.029089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:28.029107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.030768Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:17:28.045245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:17:28.045990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.046050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:17:28.046113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:17:28.046120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.046863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:28.046886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:17:28.046933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.046941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:17:28.046952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:17:28.046956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:17:28.047361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.047373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:28.047378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:17:28.047754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.047765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.047770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:28.048452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:17:28.049015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:17:28.049459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:17:28.050309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:17:28.051031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:28.051056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:28.051062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:28.051109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:17:28.051114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:28.051139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:28.051149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:17:28.051550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:28.051556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:28.051588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:28.051592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:17:28.051650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.051656Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:17:28.051665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:17:28.051669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:28.051675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:17:28.051679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:28.051684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:17:28.051687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:17:28.051698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:28.051702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:17:28.051704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:17:28.051929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:28.051941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:28.051945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:17:28.051950Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:17:28.051954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:28.051965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... serAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueue, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:17:28.149792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueueReadBalancer, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:17:28.149801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 8, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:17:28.149854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.149981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:17:28.150497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:28.150675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:511:2448], Recipient [1:511:2448]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T09:17:28.150682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T09:17:28.150916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:28.150921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:28.151146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:28.151165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:28.151175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:28.151179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:28.151638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:546:2448], Recipient [1:511:2448]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T09:17:28.151649Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T09:17:28.151652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:511:2448] sender: [1:567:2058] recipient: [1:15:2062] 2024-11-21T09:17:28.192522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:566:2492], Recipient [1:511:2448]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:17:28.192538Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:17:28.192559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:28.192610Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 41us result status StatusSuccess 2024-11-21T09:17:28.192717Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:28.192786Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:568:2493], Recipient [1:511:2448]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2024-11-21T09:17:28.192791Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2024-11-21T09:17:28.192797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2024-11-21T09:17:28.192803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T09:17:28.192812Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2024-11-21T09:17:28.192844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:569:2494], Recipient [1:511:2448]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:17:28.192847Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:17:28.192851Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:28.192864Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 12us result status StatusSuccess 2024-11-21T09:17:28.192895Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionTests::DataTxCalcPredicateOrder >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TPQRBDescribes::PartitionLocations [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> KqpScripting::StreamExecuteYqlScriptScanScalar >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestTxBatchInFederation [GOOD] Test command err: 2024-11-21T09:17:23.005763Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.005785Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:23.009955Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.010230Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Create distr tx with id = 0 and act no: 1 Got batch complete: 1001 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000000" Value: "\010\001\020\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\350\007\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\350\003\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\352\007\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\352\003\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2024-11-21T09:17:23.241783Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.241801Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.244513Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.244822Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [2:177:2192] 2024-11-21T09:17:23.244872Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2024-11-21T09:17:23.244884Z node 2 :PERSQUEUE INFO: new Cookie owner1|73ba571b-ba5f3947-2f5ba3f7-928f5c23_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:23.488900Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.488923Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:23.491814Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.491874Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T09:17:23.491913Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:177:2192] 2024-11-21T09:17:23.492068Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.492092Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitMetaStep 2024-11-21T09:17:23.492103Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitInfoRangeStep 2024-11-21T09:17:23.492159Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDataRangeStep 2024-11-21T09:17:23.492179Z node 3 :PERSQUEUE DEBUG: Got data topic Root/PQ/rt3.dc1--account--topic partition 1 offset 0 count 10 size 0 so 0 eo 10 d0000000001_00000000000000000000_00000_0000000010_00000 2024-11-21T09:17:23.492185Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDataStep 2024-11-21T09:17:23.492188Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Completed. 2024-11-21T09:17:23.492191Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:177:2192] 2024-11-21T09:17:23.492197Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 10 Head Offset 10 PartNo 0 PackedSize 0 count 0 nextOffset 10 batches 0 SYNC INIT DATA KEY: d0000000001_00000000000000000000_00000_0000000010_00000 size 0 2024-11-21T09:17:23.492254Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:23.492260Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 send read request for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T09:17:23.492281Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2024-11-21T09:17:23.492294Z node 3 :PERSQUEUE INFO: new Cookie owner1|7c7ab311-2d25abe1-77eec34a-dfb1cdaf_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:23.492338Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 offset 3 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 3 2024-11-21T09:17:23.492350Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] read cookie 0 added 1 blobs, size 0 count 7 last offset 4 2024-11-21T09:17:23.492358Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Reading cookie 0. Send blob request. Send disk status response with cookie: 0 2024-11-21T09:17:23.492393Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:23.492409Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2024-11-21T09:17:23.492448Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2024-11-21T09:17:23.492463Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2024-11-21T09:17:23.492476Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2024-11-21T09:17:23.492501Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2024-11-21T09:17:23.492541Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 10 endOffset 10 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:23.523118Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T09:17:23.523170Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2024-11-21T09:17:23.523194Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:23.808122Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2024-11-21T09:17:23.828415Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2024-11-21T09:17:23.828462Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2024-11-21T09:17:23.828504Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2024-11-21T09:17:23.828534Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2024-11-21T09:1 ... TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Create distr tx with id = 8 and act no: 9 Create distr tx with id = 10 and act no: 11 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 17 Wait batch completion Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait kv request Wait tx committed for tx 0 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 >> TOlapReboots::DropMultipleStandaloneTables [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2024-11-21T09:17:21.009314Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659205259522115:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:21.009390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:21.015282Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659204910110694:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:21.015434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:21.037476Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0013a8/r3tmp/tmpcmMJ2G/pdisk_1.dat 2024-11-21T09:17:21.038583Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:17:21.077654Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65307, node 1 2024-11-21T09:17:21.109413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:21.109442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:21.111138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:21.122340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0013a8/r3tmp/yandexnZA5zS.tmp 2024-11-21T09:17:21.122352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0013a8/r3tmp/yandexnZA5zS.tmp 2024-11-21T09:17:21.122416Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0013a8/r3tmp/yandexnZA5zS.tmp 2024-11-21T09:17:21.122449Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:21.139046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:21.139075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:21.140603Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:21.140898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:21.166893Z INFO: TTestServer started on Port 25215 GrpcPort 65307 TClient is connected to server localhost:25215 PQClient connected to localhost:65307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:21.195328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:21.209448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T09:17:21.308878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659204910111045:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:21.308878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659204910111056:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:21.308906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:21.310057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T09:17:21.313879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659204910111059:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T09:17:21.444351Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659204910111100:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:17:21.444489Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODZkNWIwMjYtODhlZjMxNmEtYmJhZmY2ZjItMjVlODc4Njg=, ActorId: [2:7439659204910111043:2280], ActorState: ExecuteState, TraceId: 01jd7061gwebrsr8tmph1kq2zp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:17:21.444383Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659205259522961:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:17:21.455176Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:17:21.455595Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmVkNWE1MzgtMTA0YmYwYmQtNWZlMDMxMjgtZThmZjBkYzc=, ActorId: [1:7439659205259522920:2300], ActorState: ExecuteState, TraceId: 01jd7061j046e2a9g0mb6kbsvw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:17:21.455796Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:17:21.466498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:21.525181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:21.594320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T09:17:21.671152Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd7061t7afvwn7kegea0c7vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ1MDg4YmEtYmJjNWViYjQtNDFhY2U4ZGQtOGExMGE3ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659205259523381:3030] 2024-11-21T09:17:26.009087Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659205259522115:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:26.009118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:26.015738Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659204910110694:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:26.015768Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 5 partitions CallPersQueueGRPC request to localhost:65307 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2024-11-21T09:17:27.646573Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:65307 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } ... count--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:29.162364Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-3 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:29.162382Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:29.162436Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:29.162928Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:29.163072Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:29.163077Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [3:338:2315], now have 1 active actors on pipe 2024-11-21T09:17:29.163098Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2024-11-21T09:17:29.163102Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2024-11-21T09:17:29.163106Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2024-11-21T09:17:29.163111Z node 3 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:29.295293Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 100, TxId 67890 2024-11-21T09:17:29.295361Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvProposePartitionConfigResult Step 100, TxId 67890, Partition 1 2024-11-21T09:17:29.295370Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvProposePartitionConfigResult 2024-11-21T09:17:29.295377Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 2/2 2024-11-21T09:17:29.295384Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T09:17:29.295389Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2024-11-21T09:17:29.295395Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T09:17:29.295513Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 135 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 12884904078 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2024-11-21T09:17:29.295541Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:29.296794Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:29.296812Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2024-11-21T09:17:29.296816Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2024-11-21T09:17:29.296831Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2024-11-21T09:17:29.296836Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2024-11-21T09:17:29.296854Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2024-11-21T09:17:29.296858Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 2 2024-11-21T09:17:29.296884Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T09:17:29.296918Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2024-11-21T09:17:29.296922Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2024-11-21T09:17:29.296926Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2024-11-21T09:17:29.296969Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T09:17:29.297033Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:17:29.297102Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:29.297850Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:29.297887Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2024-11-21T09:17:29.297891Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T09:17:29.297893Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 2 2024-11-21T09:17:29.298189Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:29.298205Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2024-11-21T09:17:29.298208Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T09:17:29.298210Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2024-11-21T09:17:29.298216Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2024-11-21T09:17:29.298269Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2024-11-21T09:17:29.298275Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:29.298282Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2024-11-21T09:17:29.298284Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2024-11-21T09:17:29.298317Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 135 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 12884904078 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2024-11-21T09:17:29.298339Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:29.298881Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:29.298890Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2024-11-21T09:17:29.298898Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T09:17:29.298905Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2024-11-21T09:17:29.298910Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2024-11-21T09:17:29.298915Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2024-11-21T09:17:29.298918Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T09:17:29.298920Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2024-11-21T09:17:29.298924Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2024-11-21T09:17:29.298927Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2024-11-21T09:17:29.298930Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2024-11-21T09:17:29.298935Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:29.300448Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T09:17:29.300459Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State DELETING 2024-11-21T09:17:29.300462Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T09:17:29.300464Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete TxId 67890 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQCompact >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropMultipleStandaloneTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.461296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.461320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.461330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.461341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.461344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.461353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.474214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.474230Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.476149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.476268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.476301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.480353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.480445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.481687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.482658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.484302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.486615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.486618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.486647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.487986Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.503237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.504297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.504369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.504423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.504429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.505241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.505251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.505255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.505810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.505844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.506298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.506315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.506320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.506863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.507290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.507843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.508071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.508192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.508242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.508257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.508742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.508796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.508877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.508895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.508899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.508905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.508910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.508914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.508918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.508930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.508936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.508940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 107 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:17:29.507617Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:17:29.507730Z node 107 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:17:29.507739Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:17:29.507741Z node 107 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:17:29.507743Z node 107 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T09:17:29.507746Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:17:29.507751Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T09:17:29.507789Z node 107 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T09:17:29.507893Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.507951Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:17:29.508883Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2024-11-21T09:17:29.508974Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:17:29.508994Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:17:29.509005Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:17:29.509586Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:17:29.509632Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:17:29.509905Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:17:29.509917Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:17:29.509933Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2024-11-21T09:17:29.520689Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1004 MinStep: 0 Step: 5000006 2024-11-21T09:17:29.520707Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2024-11-21T09:17:29.520724Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1004 MinStep: 0 Step: 5000006 2024-11-21T09:17:29.520733Z node 107 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1004 MinStep: 0 Step: 5000006 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:17:29.520802Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1004 2024-11-21T09:17:29.520805Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2024-11-21T09:17:29.520811Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1004 2024-11-21T09:17:29.520818Z node 107 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 130 2024-11-21T09:17:29.521429Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.521482Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.521499Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.521509Z node 107 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId#1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:29.521529Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:17:29.521549Z node 107 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:17:29.521553Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:17:29.521558Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T09:17:29.521570Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [107:356:2336] message: TxId: 1004 2024-11-21T09:17:29.521574Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:17:29.521579Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:17:29.521582Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:17:29.521609Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:17:29.521968Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:17:29.521991Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:17:29.521996Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [107:519:2486] 2024-11-21T09:17:29.522085Z node 107 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:17:29.522136Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:29.522192Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T09:17:29.523080Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:17:29.523094Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:17:29.523109Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:29.523778Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:17:29.523788Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:17:29.523814Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 2024-11-21T09:17:29.523886Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:29.523912Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable1" took 35us result status StatusPathDoesNotExist 2024-11-21T09:17:29.523938Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ColumnTable1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:17:29.523986Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:29.523994Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable2" took 8us result status StatusPathDoesNotExist 2024-11-21T09:17:29.524002Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ColumnTable2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 27522, MsgBus: 21889 2024-11-21T09:16:45.985206Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659051916494751:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.985232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004927/r3tmp/tmpj495u0/pdisk_1.dat 2024-11-21T09:16:46.035468Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27522, node 1 2024-11-21T09:16:46.052276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:46.052288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:46.052303Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:46.052340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21889 2024-11-21T09:16:46.085090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:46.085112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost2024-11-21T09:16:46.086205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected :21889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:46.114285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.118144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.180087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.200266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.211059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:46.279994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056211463586:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.280023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.308742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.314782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.325351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.331524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.339019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.346594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:46.354557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056211464081:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.354581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.354584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659056211464086:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:46.355402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:46.359357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659056211464088:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:46.593633Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606632, txId: 281474976715672] shutting down 2024-11-21T09:16:46.638705Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606681, txId: 281474976715675] shutting down 2024-11-21T09:16:46.679761Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606723, txId: 281474976715678] shutting down 2024-11-21T09:16:46.720791Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606765, txId: 281474976715681] shutting down 2024-11-21T09:16:46.763147Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606807, txId: 281474976715684] shutting down 2024-11-21T09:16:46.798168Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606842, txId: 281474976715687] shutting down 2024-11-21T09:16:46.837232Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606877, txId: 281474976715690] shutting down 2024-11-21T09:16:46.875386Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606919, txId: 281474976715693] shutting down 2024-11-21T09:16:46.918221Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606961, txId: 281474976715696] shutting down 2024-11-21T09:16:46.959194Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607003, txId: 281474976715699] shutting down 2024-11-21T09:16:46.997708Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607038, txId: 281474976715702] shutting down 2024-11-21T09:16:47.031775Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607073, txId: 281474976715705] shutting down 2024-11-21T09:16:47.067854Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607108, txId: 281474976715708] shutting down 2024-11-21T09:16:47.104109Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607150, txId: 281474976715711] shutting down 2024-11-21T09:16:47.143422Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607185, txId: 281474976715714] shutting down 2024-11-21T09:16:47.178802Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607220, txId: 281474976715717] shutting down 2024-11-21T09:16:47.216504Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607262, txId: 281474976715720] shutting down 2024-11-21T09:16:47.255159Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607297, txId: 281474976715723] shutting down 2024-11-21T09:16:47.294190Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607339, txId: 281474976715726] shutting down 2024-11-21T09:16:47.332843Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607374, txId: 281474976715729] shutting down 2024-11-21T09:16:47.371175Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607416, txId: 281474976715732] shutting down 2024-11-21T09:16:47.408297Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607451, txId: 281474976715735] shutting down 2024-11-21T09:16:47.449622Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607493, txId: 281474976715738] shutting down 2024-11-21T09:16:47.493637Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607535, txId: 281474976715741] shutting down 2024-11-21T09:16:47.558152Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607598, txId: 281474976715744] shutting down 2024-11-21T09:16:47.596929Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607640, txId: 281474976715747] shutting down 2024-11-21T09:16:47.634932Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607675, txId: 281474976715750] shut ... T09:17:27.601361Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647638, txId: 281474976718594] shutting down 2024-11-21T09:17:27.645883Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647687, txId: 281474976718597] shutting down 2024-11-21T09:17:27.690975Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647729, txId: 281474976718600] shutting down 2024-11-21T09:17:27.733028Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647771, txId: 281474976718603] shutting down 2024-11-21T09:17:27.774932Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647813, txId: 281474976718606] shutting down 2024-11-21T09:17:27.819184Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647862, txId: 281474976718609] shutting down 2024-11-21T09:17:27.862185Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647904, txId: 281474976718612] shutting down 2024-11-21T09:17:27.905076Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647946, txId: 281474976718615] shutting down 2024-11-21T09:17:27.947639Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180647988, txId: 281474976718618] shutting down 2024-11-21T09:17:27.991755Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648030, txId: 281474976718621] shutting down 2024-11-21T09:17:28.035517Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648079, txId: 281474976718624] shutting down 2024-11-21T09:17:28.080942Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648121, txId: 281474976718627] shutting down 2024-11-21T09:17:28.123070Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648163, txId: 281474976718630] shutting down 2024-11-21T09:17:28.164008Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648205, txId: 281474976718633] shutting down 2024-11-21T09:17:28.204058Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648247, txId: 281474976718636] shutting down 2024-11-21T09:17:28.245960Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648289, txId: 281474976718639] shutting down 2024-11-21T09:17:28.287977Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648331, txId: 281474976718642] shutting down 2024-11-21T09:17:28.327815Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648366, txId: 281474976718645] shutting down 2024-11-21T09:17:28.376565Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648415, txId: 281474976718648] shutting down 2024-11-21T09:17:28.421969Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648464, txId: 281474976718651] shutting down 2024-11-21T09:17:28.471644Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648513, txId: 281474976718654] shutting down 2024-11-21T09:17:28.522465Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648562, txId: 281474976718657] shutting down 2024-11-21T09:17:28.567244Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648604, txId: 281474976718660] shutting down 2024-11-21T09:17:28.617360Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648653, txId: 281474976718663] shutting down 2024-11-21T09:17:28.668867Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648702, txId: 281474976718666] shutting down 2024-11-21T09:17:28.719440Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180648758, txId: 281474976718669] shutting down Trying to start YDB, gRPC: 11219, MsgBus: 18316 2024-11-21T09:17:29.124766Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659239338401829:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:29.124995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004927/r3tmp/tmpV91lsL/pdisk_1.dat 2024-11-21T09:17:29.136811Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11219, node 2 2024-11-21T09:17:29.142523Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:29.142535Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:29.142537Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:29.142571Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18316 TClient is connected to server localhost:18316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:29.225219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:29.225252Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:29.226375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:29.227071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:29.230886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:29.243105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:29.261744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:29.270844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:29.401936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659239338403364:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:29.401974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:29.406047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:29.412469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:29.424338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:29.431381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:29.445535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:29.452295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:29.461417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659239338403877:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:29.461426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659239338403882:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:29.461440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:29.462144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:29.465328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659239338403884:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:29.719185Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180649759, txId: 281474976715671] shutting down >> TPartitionTests::ConflictingCommitFails [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> test_public_api.py::TestBadSession::test_simple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:17:28.672272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:28.672297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:28.672302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:28.672311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:28.672324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:28.672328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:28.672337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:28.672414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:28.683314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:28.683336Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:28.686376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:28.687152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:28.687195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:17:28.688688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:28.688867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:28.688955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:28.689047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:17:28.689968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:28.690239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:28.690249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:28.690285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:28.690292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:28.690298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:28.690310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.691559Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:17:28.708719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:17:28.708798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.708856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:17:28.708919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:17:28.708927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.709785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:28.709811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:17:28.709855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.709864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:17:28.709869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:17:28.709874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:17:28.710227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.710236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:28.710241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:17:28.710547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.710553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.710558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:28.710564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:17:28.711193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:17:28.711562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:17:28.711608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:17:28.711804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:28.711836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:28.711846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:28.711897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:17:28.711903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:28.711928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:28.711939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:17:28.712380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:28.712388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:28.712427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:28.712432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:17:28.712504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:28.712510Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:17:28.712520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:17:28.712524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:28.712530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:17:28.712535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:28.712539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:17:28.712543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:17:28.712553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:28.712558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:17:28.712562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:17:28.712852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:28.712864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:28.712868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:17:28.712873Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:17:28.712878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:28.712891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:29.944431Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T09:17:29.944461Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2024-11-21T09:17:29.944529Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546] Handle TEvPersQueue::TEvStatus 2024-11-21T09:17:29.944610Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T09:17:29.944630Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T09:17:29.944644Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T09:17:29.944738Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 16975298 2024-11-21T09:17:29.944759Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2024-11-21T09:17:29.944795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2024-11-21T09:17:29.955970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:17:29.966263Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:29.966322Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 79us result status StatusSuccess 2024-11-21T09:17:29.966468Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.435126Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T09:17:30.435164Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2024-11-21T09:17:30.435257Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546] Handle TEvPersQueue::TEvStatus 2024-11-21T09:17:30.435350Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T09:17:30.435373Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T09:17:30.435390Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T09:17:30.435538Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2024-11-21T09:17:30.435562Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2024-11-21T09:17:30.435617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2024-11-21T09:17:30.446754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:17:30.457073Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.457173Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 117us result status StatusSuccess 2024-11-21T09:17:30.457333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.488098Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:30.488186Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 102us result status StatusSuccess 2024-11-21T09:17:30.488359Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:17:29.182553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:29.182576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:29.182581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:29.182587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:29.182597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:29.182600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:29.182606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:29.182688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:29.193092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:29.193114Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:29.196250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:29.197042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:29.197089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:17:29.198527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:29.198800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:29.198896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.198999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:17:29.200020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.200343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:29.200359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.200398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:29.200407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:29.200413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:29.200427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.201844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:17:29.215917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:17:29.215984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.216046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:17:29.216110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:17:29.216118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.217030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.217058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:17:29.217103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.217113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:17:29.217117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:17:29.217122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:17:29.217532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.217542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:29.217546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:17:29.218052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.218069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.218075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.218082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.218670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:17:29.219113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:17:29.219167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:17:29.219372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.219401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:29.219410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.219463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:17:29.219469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.219495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:29.219509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:17:29.219957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:29.219966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:29.220008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.220013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:17:29.220084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.220090Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:17:29.220100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:17:29.220105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.220111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:17:29.220115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.220119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:17:29.220123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:17:29.220134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:29.220139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:17:29.220143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:17:29.220458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:29.220475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:29.220479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:17:29.220484Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:17:29.220489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:29.220504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... de: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.442085Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T09:17:30.442118Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2024-11-21T09:17:30.442338Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2024-11-21T09:17:30.442355Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2024-11-21T09:17:30.442395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2024-11-21T09:17:30.453784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:17:30.464064Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.464160Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 99us result status StatusSuccess 2024-11-21T09:17:30.464305Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.933127Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T09:17:30.933160Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2024-11-21T09:17:30.933429Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2024-11-21T09:17:30.933457Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2024-11-21T09:17:30.933518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2024-11-21T09:17:30.944889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:17:30.955204Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.955309Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 118us result status StatusSuccess 2024-11-21T09:17:30.955482Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.986308Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:30.986406Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 114us result status StatusSuccess 2024-11-21T09:17:30.986535Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.986671Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:569:2499] connected; active server actors: 1 2024-11-21T09:17:30.989989Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2024-11-21T09:17:30.990055Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2024-11-21T09:17:30.990483Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.990526Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 53us result status StatusSuccess 2024-11-21T09:17:30.990643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.990678Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2024-11-21T09:17:30.990718Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2024-11-21T09:17:31.011653Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:616:2534] connected; active server actors: 1 >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2024-11-21T09:17:21.003266Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.003288Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.006692Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.006924Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:21.254176Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.254204Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.258744Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send change config 2024-11-21T09:17:21.259199Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:21.495404Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.495429Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:21.498482Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.498671Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:176:2191] 2024-11-21T09:17:21.498707Z node 3 :PERSQUEUE INFO: new Cookie src1|4448ea07-55de6862-1dfc8d34-ef486307_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2024-11-21T09:17:21.498753Z node 3 :PERSQUEUE INFO: new Cookie src4|45400c26-c3adf8d5-9f80cfbe-d84a1e_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 11 and act no: 12 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_U ... NC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Wait batch completion Got batch complete: 1 Wait kv request Wait tx committed for tx 2 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Wait for no tx committed Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 4 and act no: 5 Created Tx with id 7 as act# 7 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait batch completion Wait kv request Got batch complete: 1 Wait batch completion Wait kv request Create distr tx with id = 8 and act no: 9 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 3 Wait kv request Wait immediate tx complete 10 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2024-11-21T09:17:30.612447Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:30.612468Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:30.615099Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:30.615449Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Wait batch completion Wait kv request Wait tx committed for tx 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Wait for no tx committed Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] Test command err: 2024-11-21T09:17:23.341035Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.341065Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.345079Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.345326Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:23.584130Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.584154Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.586727Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.586904Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:23.827609Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.827635Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:23.830964Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.831200Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait first predicate result Got batch complete: 1 Create distr tx with id = 2 and act no: 3 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait second predicate result Got batch complete: 1 Send disk status response with cookie: 0 2024-11-21T09:17:26.256504Z node 3 :PERSQUEUE INFO: new Cookie owner1|60ef839b-d31c9ec2-ae45ff9f-2c47a997_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got batch complete: 1 Got batch complete: 1 Send disk status response with cookie: 0 Wait third predicate result Create distr tx with id = 4 and act no: 5 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSyste ... 7594037927937] doesn't have tx writes info 2024-11-21T09:17:27.489380Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:27.489578Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:27.489635Z node 4 :PERSQUEUE INFO: new Cookie SourceId|68cf92fd-a5f968e0-f44a75e2-8a888f8_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner SourceId Got batch complete: 1 Wait write response Wait kv request Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Wait second predicate result Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2024-11-21T09:17:28.924660Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:28.924680Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:28.927943Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:28.928180Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Create distr tx with id = 2 and act no: 3 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Send disk status response with cookie: 0 Wait tx committed for tx 0 Wait tx committed for tx 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2024-11-21T09:17:20.977632Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.977657Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.982832Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.983254Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2024-11-21T09:17:21.209964Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.209985Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:21.213418Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.213724Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:176:2191] 2024-11-21T09:17:21.213771Z node 2 :PERSQUEUE INFO: new Cookie src3|cee62855-49a1bb31-4005f860-bde319ee_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src3 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2024-11-21T09:17:21.213855Z node 2 :PERSQUEUE INFO: new Cookie src4|5c3b156-c6da0d29-d95e9dfa-63935f24_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Create distr tx with id = 8 and act no: 9 Create distr tx with id = 10 and act no: 11 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUN ... Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 2 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got batch complete: 1 Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 2024-11-21T09:17:29.124789Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:29.124813Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:29.128445Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:29.128742Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 2 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 2 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got batch complete: 1 Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:17:31.591338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:31.591363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:31.591367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:31.591376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:31.591390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:31.591393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:31.591400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:31.591470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:31.599296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:31.599315Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:31.602081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:31.602793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:31.602843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:17:31.604410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:31.604612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:31.604684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:31.604751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:17:31.605728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:31.605977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:31.605984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:31.606015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:31.606020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:31.606024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:31.606035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:17:31.607312Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:17:31.618864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:17:31.618937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:31.618996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:17:31.619052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:17:31.619058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:31.619844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:31.619874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:17:31.619918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:31.619926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:17:31.619929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:17:31.619933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:17:31.620444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:31.620459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:31.620464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:17:31.620911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:31.620920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:31.620926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:31.620934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:17:31.621510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:17:31.621914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:17:31.621964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:17:31.622130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:31.622153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:31.622160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:31.622209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:17:31.622215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:31.622247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:31.622259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:17:31.622701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:31.622707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:31.622759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:31.622762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:17:31.622825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:31.622840Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:17:31.622849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:17:31.622852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:31.622856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:17:31.622860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:31.622863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:17:31.622865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:17:31.622872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:31.622877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:17:31.622879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:17:31.623104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:31.623114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:31.623118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:17:31.623121Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:17:31.623124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:31.623134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:17:31.740771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T09:17:31.740774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:17:31.740838Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T09:17:31.740858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:856:2740], Recipient [1:282:2270]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T09:17:31.740862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T09:17:31.740867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046678944 2024-11-21T09:17:31.740884Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:215:2215], Recipient [1:282:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 2024-11-21T09:17:31.740889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2024-11-21T09:17:31.740895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:17:31.740948Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:215:2215], Recipient [1:282:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 2024-11-21T09:17:31.740954Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2024-11-21T09:17:31.740960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:17:31.741306Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2024-11-21T09:17:31.741351Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2024-11-21T09:17:31.741469Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:31.741978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T09:17:31.741984Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:31.742001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:17:31.742020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T09:17:31.742023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:31.742281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:17:31.742298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:17:31.742319Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [1:856:2740], Recipient [1:282:2270]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:17:31.742324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:17:31.742328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T09:17:31.742400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T09:17:31.742406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T09:17:31.742461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:872:2756], Recipient [1:282:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:31.742466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:31.742469Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:17:31.742492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:495:2436], Recipient [1:282:2270]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2024-11-21T09:17:31.742497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:17:31.742506Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T09:17:31.742523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T09:17:31.742527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:870:2754] 2024-11-21T09:17:31.742545Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:872:2756], Recipient [1:282:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:17:31.742551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:17:31.742555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2024-11-21T09:17:31.742636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:873:2757], Recipient [1:282:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:17:31.742641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:17:31.742651Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:31.742686Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 35us result status StatusSuccess 2024-11-21T09:17:31.742789Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:31.742875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:874:2758], Recipient [1:282:2270]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2024-11-21T09:17:31.742881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2024-11-21T09:17:31.742886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2024-11-21T09:17:31.742891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T09:17:31.742949Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:875:2759], Recipient [1:282:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:17:31.742953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:17:31.742961Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:31.743267Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 12us result status StatusSuccess 2024-11-21T09:17:31.743328Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 22528, MsgBus: 4856 2024-11-21T09:17:18.511314Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659192355956455:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:18.511372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e46/r3tmp/tmpc9O8OP/pdisk_1.dat 2024-11-21T09:17:18.574872Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22528, node 1 2024-11-21T09:17:18.612752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:18.612784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:18.613778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:18.629170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:18.629182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:18.629183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:18.629209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4856 TClient is connected to server localhost:4856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:18.711319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.720407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.787032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.796426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.805285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.834546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659192355957995:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.834570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.960566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.965682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.972476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.979967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.987226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.993548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.005293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659196650925805:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.005304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659196650925810:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.005311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.006725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:19.014224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659196650925812:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:19.236032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.304925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd705zhx4tetcmyjwjama4af, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiY2RkMjgtOWQ1MTJlZmYtOTQ5Y2U1YTgtM2E5NzFlMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.304926Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd705zhx70f65475w7yfd75y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWZkOWU0M2YtZTY1OWZkOWYtNjcxY2RiOWMtNTViYzY4MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.305430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd705zhxbn2gvhbjnqzbsxbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNmMWE4YTYtZDNhZjcwYmUtNDYwZWZmZjYtOTRlYTM0YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.307739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd705zhx98aa5ceqzhca3a7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBlMWQ0OTEtZGY5NGE2Yi0xZjVhN2VlOC1jM2IwODY0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.308636Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd705zhx8gfe3sxk24j0xfjs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NjZjAyY2EtYjgyZGQ0NDctYTc0MDMzNDktMzI0OWU4OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.310549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd705zhxc2c9eqpgecsgc41f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ3M2YzNDktMmQzOGVhOTYtZjVlN2JmMDktNWE2MjQ3Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.310671Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd705zhx7fw0pcpf0beg6v9j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFlMjkzODUtYmMzNzRlNmYtMjgwNmI4MDEtZmU4NDEyYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.310751Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd705zhxa3jeyjcs99b4tx0b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTdiYmFhM2YtN2UzMTI2YzItNGY4ODUwMTItM2Y4MmZkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.311667Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd705zhx9a6qvx237pz2x3sc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE3ZWI5Zi01YzE0YzM4OS1lMDk2NmQ0MS1jOTYxZmM3Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.311821Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd705zhx981ftast3wvz2j5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjNjMDkzZDUtYWYzZWU0NzctNWUzZmU5NTItNGYyY2UzN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.312047Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd705zhx4tetcmyjwjama4af, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiY2RkMjgtOWQ1MTJlZmYtOTQ5Y2U1YTgtM2E5NzFlMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.312112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd705zhx70f65475w7yfd75y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWZkOWU0M2YtZTY1OWZkOWYtNjcxY2RiOWMtNTViYzY4MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.312718Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd705zhx8gfe3sxk24j0xfjs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NjZjAyY2EtYjgyZGQ0NDctYTc0MDMzNDktMzI0OWU4OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.313474Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd705zhxbn2gvhbjnqzbsxbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNmMWE4YT ... sion/3?node_id=3&id=Njg5MDE3YjUtNDAyN2Q1ZDUtY2FhYTY3Y2ItYTFiMzdkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.703036Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721508. Ctx: { TraceId: 01jd706cmxd14n1d2pzb44sx9h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWU1MTAzNzQtYTMxOTk1ZTctYjI3Y2RlNjAtYTY4NGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.703173Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721509. Ctx: { TraceId: 01jd706cmwf4m28psvbcxc9qt4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Njg5MDE3YjUtNDAyN2Q1ZDUtY2FhYTY3Y2ItYTFiMzdkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.703398Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721507. Ctx: { TraceId: 01jd706cmx6bfd12yt09rqah9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGZjODAxMWQtM2NjMWQ1ZWEtZDY1ZWRjZTktNjJlYjQ3N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.703795Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721510. Ctx: { TraceId: 01jd706cmwf4m28psvbcxc9qt4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Njg5MDE3YjUtNDAyN2Q1ZDUtY2FhYTY3Y2ItYTFiMzdkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.704296Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721511. Ctx: { TraceId: 01jd706cmwf4m28psvbcxc9qt4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Njg5MDE3YjUtNDAyN2Q1ZDUtY2FhYTY3Y2ItYTFiMzdkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.704607Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721512. Ctx: { TraceId: 01jd706cmx6bfd12yt09rqah9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGZjODAxMWQtM2NjMWQ1ZWEtZDY1ZWRjZTktNjJlYjQ3N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.705083Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721513. Ctx: { TraceId: 01jd706cmx6bfd12yt09rqah9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGZjODAxMWQtM2NjMWQ1ZWEtZDY1ZWRjZTktNjJlYjQ3N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.705549Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721514. Ctx: { TraceId: 01jd706cn0737x9befv9wthq00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODk1NTM1MWUtNzUwMzM5YzYtZTIwMzljNDgtZjZlNWJiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.706190Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721517. Ctx: { TraceId: 01jd706cn0737x9befv9wthq00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODk1NTM1MWUtNzUwMzM5YzYtZTIwMzljNDgtZjZlNWJiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.706495Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721515. Ctx: { TraceId: 01jd706cn17za3gy73ztg90v34, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWU1MTAzNzQtYTMxOTk1ZTctYjI3Y2RlNjAtYTY4NGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.706498Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721516. Ctx: { TraceId: 01jd706cn1ajf89r5hxsdzf9cr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGJmYjY1NGEtMTJkODk3MTEtNDY5MGIxNzktNzI4MmQ1OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.707165Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721518. Ctx: { TraceId: 01jd706cn17za3gy73ztg90v34, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWU1MTAzNzQtYTMxOTk1ZTctYjI3Y2RlNjAtYTY4NGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.707367Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721520. Ctx: { TraceId: 01jd706cn2ax2emypn5rkt2dqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGZjODAxMWQtM2NjMWQ1ZWEtZDY1ZWRjZTktNjJlYjQ3N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.707483Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721519. Ctx: { TraceId: 01jd706cn256nhcezrknecw5dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWU0NWFiNTctZDUwZGY1ZWYtZTQ5Mjk5MDItOWVkYTZjNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.708031Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721521. Ctx: { TraceId: 01jd706cn17za3gy73ztg90v34, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWU1MTAzNzQtYTMxOTk1ZTctYjI3Y2RlNjAtYTY4NGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.708234Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721522. Ctx: { TraceId: 01jd706cn1ajf89r5hxsdzf9cr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGJmYjY1NGEtMTJkODk3MTEtNDY5MGIxNzktNzI4MmQ1OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.708523Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721523. Ctx: { TraceId: 01jd706cn2ax2emypn5rkt2dqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGZjODAxMWQtM2NjMWQ1ZWEtZDY1ZWRjZTktNjJlYjQ3N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.708566Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721524. Ctx: { TraceId: 01jd706cn1ajf89r5hxsdzf9cr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGJmYjY1NGEtMTJkODk3MTEtNDY5MGIxNzktNzI4MmQ1OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.708589Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721525. Ctx: { TraceId: 01jd706cn256nhcezrknecw5dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWU0NWFiNTctZDUwZGY1ZWYtZTQ5Mjk5MDItOWVkYTZjNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.708934Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721526. Ctx: { TraceId: 01jd706cn256nhcezrknecw5dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWU0NWFiNTctZDUwZGY1ZWYtZTQ5Mjk5MDItOWVkYTZjNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.710550Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721527. Ctx: { TraceId: 01jd706cn50f2g5gakt3kvpjhg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Njg5MDE3YjUtNDAyN2Q1ZDUtY2FhYTY3Y2ItYTFiMzdkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.711041Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721528. Ctx: { TraceId: 01jd706cn63h64b7p6jxnvprc2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODk1NTM1MWUtNzUwMzM5YzYtZTIwMzljNDgtZjZlNWJiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.711237Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721529. Ctx: { TraceId: 01jd706cn50f2g5gakt3kvpjhg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Njg5MDE3YjUtNDAyN2Q1ZDUtY2FhYTY3Y2ItYTFiMzdkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.711738Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721532. Ctx: { TraceId: 01jd706cn50f2g5gakt3kvpjhg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Njg5MDE3YjUtNDAyN2Q1ZDUtY2FhYTY3Y2ItYTFiMzdkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.711739Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721531. Ctx: { TraceId: 01jd706cn7eett43my1n86wc3p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGJmYjY1NGEtMTJkODk3MTEtNDY5MGIxNzktNzI4MmQ1OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.712172Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721533. Ctx: { TraceId: 01jd706cn63h64b7p6jxnvprc2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODk1NTM1MWUtNzUwMzM5YzYtZTIwMzljNDgtZjZlNWJiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:32.712515Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721530. Ctx: { TraceId: 01jd706cn6e1svbrnm665edb7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjNjOTczOTEtMjYwOTcxMTktODRkMTMxZDEtY2FkYjA1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.712641Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721535. Ctx: { TraceId: 01jd706cn63h64b7p6jxnvprc2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODk1NTM1MWUtNzUwMzM5YzYtZTIwMzljNDgtZjZlNWJiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.712647Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721536. Ctx: { TraceId: 01jd706cn7eett43my1n86wc3p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGJmYjY1NGEtMTJkODk3MTEtNDY5MGIxNzktNzI4MmQ1OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.712756Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721534. Ctx: { TraceId: 01jd706cn7bm9z3pfrkzjk7882, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWU1MTAzNzQtYTMxOTk1ZTctYjI3Y2RlNjAtYTY4NGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:32.713704Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721537. Ctx: { TraceId: 01jd706cn7bm9z3pfrkzjk7882, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWU1MTAzNzQtYTMxOTk1ZTctYjI3Y2RlNjAtYTY4NGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.713749Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721538. Ctx: { TraceId: 01jd706cn6e1svbrnm665edb7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjNjOTczOTEtMjYwOTcxMTktODRkMTMxZDEtY2FkYjA1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.713940Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721539. Ctx: { TraceId: 01jd706cn7bm9z3pfrkzjk7882, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWU1MTAzNzQtYTMxOTk1ZTctYjI3Y2RlNjAtYTY4NGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:32.714056Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721540. Ctx: { TraceId: 01jd706cn6e1svbrnm665edb7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjNjOTczOTEtMjYwOTcxMTktODRkMTMxZDEtY2FkYjA1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:32.714750Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721541. Ctx: { TraceId: 01jd706cn7bm9z3pfrkzjk7882, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWU1MTAzNzQtYTMxOTk1ZTctYjI3Y2RlNjAtYTY4NGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> TPartitionTests::FailedTxsDontBlock >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:17:29.930748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:29.930770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:29.930773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:29.930776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:29.930790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:29.930794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:29.930802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:29.930891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:29.938617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:29.938636Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:29.940957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:29.941666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:29.941710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:17:29.943184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:29.943353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:29.943448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.943564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:17:29.944773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.945162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:29.945179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.945225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:29.945234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:29.945240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:29.945258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.946712Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:17:29.961422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:17:29.961503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.961575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:17:29.961641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:17:29.961649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.962314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.962346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:17:29.962395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.962404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:17:29.962408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:17:29.962412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:17:29.963301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.963316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:29.963321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:17:29.963813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.963823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.963829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.963837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.964421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:17:29.964804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:17:29.964879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:17:29.965098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.965125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:29.965135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.965191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:17:29.965197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.965228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:29.965241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:17:29.965703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:29.965710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:29.965757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.965762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:17:29.965837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.965843Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:17:29.965855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:17:29.965859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.965865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:17:29.965870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.965875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:17:29.965879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:17:29.965889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:29.965895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:17:29.965899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:17:29.966193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:29.966206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:29.966210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:17:29.966215Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:17:29.966219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:29.966233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T WARN: Table profiles were not loaded 2024-11-21T09:17:35.265040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:35.265109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:17:35.265131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:17:35.265279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265314Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:17:35.265327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:35.265591Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:17:35.267136Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:35.267587Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1012:2959], Recipient [1:1012:2959]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T09:17:35.267598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T09:17:35.267801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:35.267808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:35.268026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1012:2959], Recipient [1:1012:2959]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:17:35.268032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:17:35.268085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:35.268092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:35.268097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:35.268100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:35.268468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1047:2959], Recipient [1:1012:2959]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T09:17:35.268477Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T09:17:35.268479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1012:2959] sender: [1:1067:2058] recipient: [1:15:2062] 2024-11-21T09:17:35.289912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1066:3003], Recipient [1:1012:2959]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T09:17:35.289930Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:17:35.289954Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:17:35.290051Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 86us result status StatusSuccess 2024-11-21T09:17:35.290245Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13184 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 82136 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13184 DataSize: 13184 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestReadSessions >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::BadTopicName |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TOlapReboots::CreateDropStore [GOOD] >> Initializer::Simple |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateDropStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.481966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.481988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.481993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.481998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.482011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.482015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.482025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.482097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.492834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.492852Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.495046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.495153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.495191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.498011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.498098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.498205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.498417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.499102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.499392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.499402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.499415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.499421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.499426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.499479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.500805Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.515712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.515760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.515802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.515850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.515856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.516345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.516365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.516394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.516400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.516403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.516406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.516730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.516748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.516753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.517076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.517083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.517086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.517090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.517474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.517921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.517977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.518152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.518190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.518196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.518255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.518261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.518285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.518296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.518758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.518768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.518792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.518797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.518850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.518857Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.518866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.518869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.518874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.518879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.518883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.518886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.518896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.518901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.518905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... e 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T09:17:36.845380Z node 188 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:36.845399Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 807453853802 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:36.845406Z node 188 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TPropose operationId#1003:0 HandleReply TEvOperationPlan at schemeshard: 72057594046678944, stepId: 5000004 2024-11-21T09:17:36.845426Z node 188 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2024-11-21T09:17:36.845444Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:36.845452Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:17:36.845811Z node 188 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:36.845818Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:36.845847Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:17:36.845867Z node 188 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:36.845871Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [188:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T09:17:36.845875Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [188:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:17:36.845945Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:36.845951Z node 188 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId#1003:0 ProgressState at schemeshard: 72057594046678944 2024-11-21T09:17:36.845958Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId#1003:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T09:17:36.845999Z node 188 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:36.846010Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:36.846013Z node 188 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:17:36.846018Z node 188 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:17:36.846022Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:17:36.846070Z node 188 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:36.846077Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:17:36.846080Z node 188 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:17:36.846084Z node 188 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T09:17:36.846087Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:17:36.846093Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:17:36.846609Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:17:36.846630Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72075186233409546 2024-11-21T09:17:36.846656Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:17:36.846728Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2024-11-21T09:17:36.846734Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:36.846745Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2024-11-21T09:17:36.846751Z node 188 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 130 2024-11-21T09:17:36.846807Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:17:36.847070Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:36.847089Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:17:36.847095Z node 188 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId#1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:36.847106Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:17:36.847132Z node 188 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:17:36.847135Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:17:36.847141Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:17:36.847144Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:17:36.847150Z node 188 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:17:36.847154Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:17:36.847170Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:17:36.847505Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:17:36.847583Z node 188 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T09:17:36.848542Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:36.848693Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:17:36.849491Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:17:36.849503Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:17:36.849515Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:36.850274Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:17:36.850289Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:17:36.850340Z node 188 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T09:17:36.850371Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:17:36.850376Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:17:36.850431Z node 188 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:17:36.850445Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:17:36.850448Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [188:444:2423] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:17:36.850493Z node 188 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:36.850530Z node 188 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 44us result status StatusPathDoesNotExist 2024-11-21T09:17:36.850553Z node 188 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:17:34.599817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:34.599838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:34.599844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:34.599848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:34.599860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:34.599864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:34.599871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:34.599939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:34.610836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:34.610856Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:34.613418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:34.613974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:34.614005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:17:34.615073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:34.615211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:34.615274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:34.615331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:17:34.616025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:34.616254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:34.616263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:34.616291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:34.616297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:34.616301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:34.616310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:17:34.617293Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:17:34.628106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:17:34.628152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:34.628196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:17:34.628262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:17:34.628268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:34.628921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:34.628940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:17:34.628973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:34.628993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:17:34.628996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:17:34.629000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:17:34.629334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:34.629341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:34.629344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:17:34.629620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:34.629626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:34.629629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:34.629633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:17:34.630015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:17:34.630297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:17:34.630340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:17:34.630636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:34.630684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:34.630696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:34.630764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:17:34.630773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:34.630806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:34.630824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:17:34.631419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:34.631430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:34.631480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:34.631487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:17:34.631565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:34.631571Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:17:34.631585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:17:34.631590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:34.631595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:17:34.631600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:34.631604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:17:34.631608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:17:34.631619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:34.631624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:17:34.631628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:17:34.631935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:34.631952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:34.631957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:17:34.631961Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:17:34.631966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:34.631979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... etID 72057594046678944 is [1:121:2147] sender: [1:751:2058] recipient: [1:750:2707] Leader for TabletID 72057594046678944 is [1:752:2708] sender: [1:753:2058] recipient: [1:750:2707] 2024-11-21T09:17:37.313252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:37.313287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:37.313294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:37.313301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:37.313308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:37.313313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:37.313324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:37.313404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:37.314347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:37.314716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:37.314760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:37.314810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:37.314816Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:37.314839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:37.314932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:37.314950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SomeTable, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:17:37.314956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.314977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:17:37.315098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:17:37.315137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.315486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.317335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:37.317354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:37.317548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:37.317559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:37.317565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:37.317795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:752:2708] sender: [1:805:2058] recipient: [1:15:2062] 2024-11-21T09:17:37.348909Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:37.349022Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 144us result status StatusSuccess 2024-11-21T09:17:37.349141Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 82136 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:37.349270Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:37.349284Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 16us result status StatusSuccess 2024-11-21T09:17:37.349327Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:17:30.895568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:30.895597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:30.895602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:30.895608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:30.895623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:30.895628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:30.895639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:30.895719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:30.908293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:30.908314Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:30.911475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:30.912389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:30.912435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:17:30.913896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:30.914102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:30.914202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:30.914280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:17:30.915211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:30.915512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:30.915521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:30.915573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:30.915580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:30.915587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:30.915602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:17:30.916894Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:17:30.931264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:17:30.931317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:30.931361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:17:30.931443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:17:30.931447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:30.932000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:30.932019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:17:30.932048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:30.932054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:17:30.932057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:17:30.932060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:17:30.932362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:30.932368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:30.932371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:17:30.932613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:30.932620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:30.932622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:30.932626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:17:30.933012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:17:30.933279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:17:30.933314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:17:30.933458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:30.933478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:30.933485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:30.933518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:17:30.933522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:30.933543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:30.933551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:17:30.933850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:30.933854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:30.933884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:30.933887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:17:30.933943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:30.933947Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:17:30.933955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:17:30.933957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:30.933961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:17:30.933964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:30.933966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:17:30.933968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:17:30.933974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:30.933978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:17:30.933980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:17:30.934180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:30.934189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:30.934192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:17:30.934195Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:17:30.934197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:30.934207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 944 2024-11-21T09:17:37.981114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:17:37.981231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T09:17:37.981264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:17:37.981317Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 3], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:17:37.981338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:37.981736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:17:37.983097Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:37.983699Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1133:3069], Recipient [1:1133:3069]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T09:17:37.983711Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T09:17:37.983993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:37.984003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:37.984092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1133:3069], Recipient [1:1133:3069]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:17:37.984098Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:17:37.984179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:37.984188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:37.984194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:37.984198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:37.984503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1168:3069], Recipient [1:1133:3069]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T09:17:37.984513Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T09:17:37.984518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1133:3069] sender: [1:1188:2058] recipient: [1:15:2062] 2024-11-21T09:17:38.005789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1187:3113], Recipient [1:1133:3069]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T09:17:38.005809Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:17:38.005836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:17:38.005941Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 87us result status StatusSuccess 2024-11-21T09:17:38.006137Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13184 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 3817 Memory: 132824 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13184 DataSize: 13184 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TPartitionTests::GetUsedStorage |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout >> TPartitionTests::GetUsedStorage [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 24500, MsgBus: 1863 2024-11-21T09:17:18.492017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659194178727761:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:18.492052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e58/r3tmp/tmpaivwXG/pdisk_1.dat 2024-11-21T09:17:18.574724Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24500, node 1 2024-11-21T09:17:18.591462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:18.591484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:18.592530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:18.628973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:18.628984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:18.628995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:18.629021Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1863 TClient is connected to server localhost:1863 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:18.712662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.720410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.787028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.798089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.805552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.834528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659194178729082:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.834547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.960554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.966264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.972445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.980013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.987239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.993681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.005400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659198473696893:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.005432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.005456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659198473696898:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.006804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:19.014424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659198473696900:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:19.227592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.349180Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd705zkk692x5ped2cxrjck2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc4Yzk0MTMtMTBlZjVkMzUtMjA3MzViNjEtZjYxZjhlMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.350371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd705zkk8zh2jdt7mm73fv0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRiNTBiZTQtYTc4NjU2MDYtNjgyNGQyMzctYTIzYTFhY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.350380Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd705zkk014w1ek4y02wvskf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY2ZGIxZDItMzIxNDJmNGYtZTdjZjIwMTEtOTY3YzMxZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.350481Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd705zkk6p0bxyrnd68w7zpd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRiMjI5ZDItY2FiN2Y2ZTQtYmJhNzQxYzQtMzFiNGQyN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.350552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd705zkk82swfd8vhxvsg1ch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVhOWE0ZmEtZjZmNDA0NWQtOGViMzgwOWUtMzY1NWRmZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.350571Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd705zkk8hf65qeyx3dgvhn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTZiNWExMTctYmY1YTJiNzEtYTYwMTBlZWUtOTQ1Y2ZlNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.350659Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd705zkm3y5r6a7tmm4m2ba0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQ2OTYwZDktMTdiNTUxZGItY2JlM2U5OGUtNmY4YzM4Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.351215Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd705zkkfw9vpteptgk8t3kv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY5M2ZmZWUtOGU0MTNlMTUtNWI2NzM0YjktM2VhYjY2NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.351339Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd705zkkbbfcmay82ha3w9s6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY0MWQyNTEtNTNlZGI2MzItOGFhMDdmNWItZWU1MjcyNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.352646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd705zkk692x5ped2cxrjck2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc4Yzk0MTMtMTBlZjVkMzUtMjA3MzViNjEtZjYxZjhlMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.354499Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd705zkk82swfd8vhxvsg1ch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVhOWE0ZmEtZjZmNDA0NWQtOGViMzgwOWUtMzY1NWRmZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.354553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd705zkk8hf65qeyx3dgvhn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTZiNWExMTctYmY1YTJiNzEtYTYwMTBlZWUtOTQ1Y2ZlNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.354622Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd705zkk0j0hg9sj8ntr6pc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y5MDQ1ZTMtNmQ0NTlhYWQtOWIwY2QxMjMtMTZmODI1MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.354661Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jd705zkk014w1ek4y02wvskf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY2ZGIxZD ... jY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.463654Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jd706j8x349pp3tza77eswh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzUzNWFkZTYtNDFmMTgxNDAtM2M2ZDAyOTItYThkNDkxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.463976Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jd706j8vev5c2qapjvvnjc92, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTdiZWU0YjEtNzljYzZiZjQtMjM4MzUyMWEtOGYyZWQxZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.464380Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jd706j8vekmdsf6ptxfk7twp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTUwOGJmYjctZjg2OTAxNDMtOTZhZjI0OTQtYjkzODZkMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.465416Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jd706j8x349pp3tza77eswh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzUzNWFkZTYtNDFmMTgxNDAtM2M2ZDAyOTItYThkNDkxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.466273Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jd706j8x349pp3tza77eswh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzUzNWFkZTYtNDFmMTgxNDAtM2M2ZDAyOTItYThkNDkxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.466923Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jd706j8z8rbe6d3ezggy5z45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjcxZGQwNmItN2ViZTBjNjEtMzcxYjBhOTUtYmMzNmFjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.467835Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jd706j9286rmnhs7a2pnc1jf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIzZTcyNDEtZTY1MTAyYi1jYWU2NWZiYS03YTljOTg2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.468076Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jd706j8z8rbe6d3ezggy5z45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjcxZGQwNmItN2ViZTBjNjEtMzcxYjBhOTUtYmMzNmFjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.469245Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jd706j9286rmnhs7a2pnc1jf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIzZTcyNDEtZTY1MTAyYi1jYWU2NWZiYS03YTljOTg2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.469424Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jd706j8z8rbe6d3ezggy5z45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjcxZGQwNmItN2ViZTBjNjEtMzcxYjBhOTUtYmMzNmFjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.469716Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jd706j93232wfgkqbznz2jq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjA5YzU0MDctYTg3YTNkZWQtYjU1MGVhODEtYjU5MGUwMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.469885Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721651. Ctx: { TraceId: 01jd706j9286rmnhs7a2pnc1jf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIzZTcyNDEtZTY1MTAyYi1jYWU2NWZiYS03YTljOTg2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.470746Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721654. Ctx: { TraceId: 01jd706j93232wfgkqbznz2jq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjA5YzU0MDctYTg3YTNkZWQtYjU1MGVhODEtYjU5MGUwMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.470776Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721653. Ctx: { TraceId: 01jd706j9286rmnhs7a2pnc1jf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIzZTcyNDEtZTY1MTAyYi1jYWU2NWZiYS03YTljOTg2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.470832Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721652. Ctx: { TraceId: 01jd706j95766xywm5x1ehx6rk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTUwOGJmYjctZjg2OTAxNDMtOTZhZjI0OTQtYjkzODZkMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.471663Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721655. Ctx: { TraceId: 01jd706j93232wfgkqbznz2jq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjA5YzU0MDctYTg3YTNkZWQtYjU1MGVhODEtYjU5MGUwMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.472253Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721657. Ctx: { TraceId: 01jd706j95766xywm5x1ehx6rk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTUwOGJmYjctZjg2OTAxNDMtOTZhZjI0OTQtYjkzODZkMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.472263Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721656. Ctx: { TraceId: 01jd706j97811pdaxwqccht5tk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTdiZWU0YjEtNzljYzZiZjQtMjM4MzUyMWEtOGYyZWQxZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.472900Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721658. Ctx: { TraceId: 01jd706j93232wfgkqbznz2jq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjA5YzU0MDctYTg3YTNkZWQtYjU1MGVhODEtYjU5MGUwMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.473721Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721659. Ctx: { TraceId: 01jd706j95766xywm5x1ehx6rk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTUwOGJmYjctZjg2OTAxNDMtOTZhZjI0OTQtYjkzODZkMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.473962Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721660. Ctx: { TraceId: 01jd706j97811pdaxwqccht5tk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTdiZWU0YjEtNzljYzZiZjQtMjM4MzUyMWEtOGYyZWQxZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.476640Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721661. Ctx: { TraceId: 01jd706j9a989dgrt9t8camd7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzUzNWFkZTYtNDFmMTgxNDAtM2M2ZDAyOTItYThkNDkxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:38.477680Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721663. Ctx: { TraceId: 01jd706j9a989dgrt9t8camd7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzUzNWFkZTYtNDFmMTgxNDAtM2M2ZDAyOTItYThkNDkxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.477931Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721664. Ctx: { TraceId: 01jd706j9c34yjee6t9xp2et6r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIzZTcyNDEtZTY1MTAyYi1jYWU2NWZiYS03YTljOTg2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.478019Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721662. Ctx: { TraceId: 01jd706j9be8zq3v0dbvkwkbnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjcxZGQwNmItN2ViZTBjNjEtMzcxYjBhOTUtYmMzNmFjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.478722Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721666. Ctx: { TraceId: 01jd706j9c34yjee6t9xp2et6r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWIzZTcyNDEtZTY1MTAyYi1jYWU2NWZiYS03YTljOTg2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.478750Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721665. Ctx: { TraceId: 01jd706j9a989dgrt9t8camd7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzUzNWFkZTYtNDFmMTgxNDAtM2M2ZDAyOTItYThkNDkxMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.478763Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721667. Ctx: { TraceId: 01jd706j9be8zq3v0dbvkwkbnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjcxZGQwNmItN2ViZTBjNjEtMzcxYjBhOTUtYmMzNmFjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:38.479760Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721669. Ctx: { TraceId: 01jd706j9be8zq3v0dbvkwkbnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjcxZGQwNmItN2ViZTBjNjEtMzcxYjBhOTUtYmMzNmFjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.480233Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721668. Ctx: { TraceId: 01jd706j9e3jfx011ftg67rjqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTdiZWU0YjEtNzljYzZiZjQtMjM4MzUyMWEtOGYyZWQxZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.480296Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721670. Ctx: { TraceId: 01jd706j9be8zq3v0dbvkwkbnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjcxZGQwNmItN2ViZTBjNjEtMzcxYjBhOTUtYmMzNmFjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.481047Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721671. Ctx: { TraceId: 01jd706j9e3jfx011ftg67rjqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTdiZWU0YjEtNzljYzZiZjQtMjM4MzUyMWEtOGYyZWQxZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:38.481339Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721672. Ctx: { TraceId: 01jd706j9e3jfx011ftg67rjqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTdiZWU0YjEtNzljYzZiZjQtMjM4MzUyMWEtOGYyZWQxZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.482051Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721673. Ctx: { TraceId: 01jd706j9e3jfx011ftg67rjqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTdiZWU0YjEtNzljYzZiZjQtMjM4MzUyMWEtOGYyZWQxZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2024-11-21T09:17:38.509697Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659256210669031:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:38.509730Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2024-11-21T09:17:21.003140Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.003169Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:17:21.005999Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001}. Step TInitConfigStep 2024-11-21T09:17:21.006043Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001}. Step TInitInternalFieldsStep 2024-11-21T09:17:21.006089Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [1:176:2191] 2024-11-21T09:17:21.006199Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001}. Completed. 2024-11-21T09:17:21.006204Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [1:176:2191] 2024-11-21T09:17:21.006209Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.006268Z node 1 :PERSQUEUE INFO: new Cookie owner1|d4dd473-19b690d5-2aa1eb03-43c746da_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2024-11-21T09:17:21.006286Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:21.006345Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2024-11-21T09:17:21.006365Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2024-11-21T09:17:21.006401Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:21.036882Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T09:17:21.036921Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2024-11-21T09:17:21.036937Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR 2024-11-21T09:17:21.301787Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2024-11-21T09:17:21.301837Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2024-11-21T09:17:21.301878Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1129 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.342563Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T09:17:21.342602Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2024-11-21T09:17:21.342621Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX 2024-11-21T09:17:21.535673Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2024-11-21T09:17:21.535713Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2024-11-21T09:17:21.535780Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2130 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.576394Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T09:17:21.576429Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2024-11-21T09:17:21.576445Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 6, partNo: 0, Offset: 102 is stored on disk Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX 2024-11-21T09:17:21.800144Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 7 partNo 0 2024-11-21T09:17:21.800199Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob sourceId 'SourceId' seqNo 7 partNo 0 result is X0000100001_00000000000000000100_00000_0000000003_00000 size 312 2024-11-21T09:17:21.800230Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} old key X0000100001_00000000000000000100_00000_0000000003_00000 new key D0000100001_00000000000000000100_00000_0000000003_00000 size 312 WTime 3231 2024-11-21T09:17:21.800264Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 7 partNo 0 FormedBlobsCount 1 NewHead: Offset 110 PartNo 0 PackedSize 118 count 1 nextOffset 111 batches 1 2024-11-21T09:17:21.800333Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 110,1 HeadOffset 100 endOffset 103 curOffset 111 D0000100001_00000000000000000110_00000_0000000001_00000| size 104 WTime 3231 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.830849Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T09:17:21.830893Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2024-11-21T09:17:21.830912Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 7, partNo: 0, Offset: 110 is stored on disk Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured T ... c71fa31-8184a0a7-6a470795-5a2ad084_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2024-11-21T09:17:35.389175Z node 4 :PERSQUEUE INFO: new Cookie src2|60e82b2e-1767d1c-1d34f629-7865e20c_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src2 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 3 and act no: 4 Create immediate tx with id = 5 and act no: 6 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 6 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Send disk status response with cookie: 0 Got batch complete: 2 Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 5 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 5 Got batch complete: 10 Send disk status response with cookie: 0 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 3 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 10 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2024-11-21T09:17:39.439689Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:39.439713Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:39.443056Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 10518, MsgBus: 8508 2024-11-21T09:17:18.492184Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659194584590459:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:18.492295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e50/r3tmp/tmp4m0aAe/pdisk_1.dat 2024-11-21T09:17:18.574677Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10518, node 1 2024-11-21T09:17:18.591111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:18.591137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:18.592266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:18.628959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:18.628973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:18.628975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:18.629018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8508 TClient is connected to server localhost:8508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:18.711326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.720452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.787009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.797415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.805346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.834821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659194584591783:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.834838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.960545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.965565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.019621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.028507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.035955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.042909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.050908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659198879559595:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.050929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.050964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659198879559600:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.051414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:19.056146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659198879559602:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:17:19.227528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.348661Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd705zkd7e9bmkg2mtb8h05j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE3ZDRjZmUtZGRjZjJkZC01OWJlZDE2NC1lZGMwNjBmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.348663Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jd705zkd75pm3eb7werpw78p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJkNTQxOTEtNjcxMWNkODgtZmZlYzA4NGMtM2ZmNTFiNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.348807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jd705zkc23qahhben8xd6sbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNlOTAxYTItYTljMzA2Y2MtMjdmYjU5ZTMtMTZkMWFlYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.348880Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jd705zkd5xpeecnxqx1pev1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNmNDE5MGQtNzkwZDlhZGUtMjI0NWY0YjctMjRhODVlMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.348955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jd705zkce0rzaatq3jm82fzf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM1OGQ0NzQtYjc0ODE1MjYtNThiZmYyYTUtMTJkMDYyNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.349585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jd705zkdedb8bddd6885044c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk1ZGUzMmEtMTlkYWUxZGItMTQ4MTAzOTEtZGI2ZTQzYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.349698Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jd705zkdaym4hnzn579cfh5d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUzYjk1MS0zNDNkNTBlYy01ZjcxYmMzZS00NjZlMGVmMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.349745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jd705zkd6afrd7eawp67v0vk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFiNjEyOWQtMWE4N2JlMWYtNzhlNjY3MjktZWQ4ZTgyYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.349981Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jd705zkdcdveqw5fdx8bbbb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMyODRlYmQtZjJjYWRhMDUtMzdiYjE0MWItNTMwYmRhY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.350063Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jd705zkd2r1m8bs104334sk8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFhMTQ5NzUtNGYzZTI1ZDYtYzFiNmMyYjAtNzZkYzNiYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.350656Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jd705zkd7e9bmkg2mtb8h05j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE3ZDRjZmUtZGRjZjJkZC01OWJlZDE2NC1lZGMwNjBmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.351676Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jd705zkc23qahhben8xd6sbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNlOTAxYTItYTljMzA2Y2MtMjdmYjU5ZTMtMTZkMWFlYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.352514Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710684. Ctx: { TraceId: 01jd705zkd5xpeecnxqx1pev1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNmNDE5MGQtNzkwZDlhZGUtMjI0NWY0YjctMjRhODVlMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.352975Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710686. Ctx: { TraceId: 01jd705zkdedb8bddd6885044c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk1ZGUzMm ... /Root, SessionId: ydb://session/3?node_id=2&id=ODk1ZDlmOS1kMWY4OThiZS05OTUwZWFjNS04Y2M3N2E0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.783972Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731499. Ctx: { TraceId: 01jd706jjwa1rzkbnjm6e0j11n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzVlZTg1ZTMtNTg1MGZjNi1mMDFjYzZlMS04NDdmZTYx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.784501Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731500. Ctx: { TraceId: 01jd706jjwfa9zd08z63fqgy7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODk1ZDlmOS1kMWY4OThiZS05OTUwZWFjNS04Y2M3N2E0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.784737Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731501. Ctx: { TraceId: 01jd706jjw8pqdh4yrj8wtm99m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGVmYjJhN2EtYTBlYjYwNGItYjIzNTgwZTctMzBjMjRmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.785013Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731496. Ctx: { TraceId: 01jd706jjwfwqp7jtht64a7vsk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDk3NzZjM2QtODQwZDYyYzQtMmVmNjM2OS1mNzBlMjE2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.785520Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731502. Ctx: { TraceId: 01jd706jjwa1rzkbnjm6e0j11n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzVlZTg1ZTMtNTg1MGZjNi1mMDFjYzZlMS04NDdmZTYx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.785892Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731503. Ctx: { TraceId: 01jd706jjwfwqp7jtht64a7vsk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDk3NzZjM2QtODQwZDYyYzQtMmVmNjM2OS1mNzBlMjE2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.785952Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731504. Ctx: { TraceId: 01jd706jjw8pqdh4yrj8wtm99m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGVmYjJhN2EtYTBlYjYwNGItYjIzNTgwZTctMzBjMjRmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.786390Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731505. Ctx: { TraceId: 01jd706jjwfwqp7jtht64a7vsk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDk3NzZjM2QtODQwZDYyYzQtMmVmNjM2OS1mNzBlMjE2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.786659Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731506. Ctx: { TraceId: 01jd706jjwa1rzkbnjm6e0j11n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzVlZTg1ZTMtNTg1MGZjNi1mMDFjYzZlMS04NDdmZTYx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.786926Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731508. Ctx: { TraceId: 01jd706jjwfwqp7jtht64a7vsk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDk3NzZjM2QtODQwZDYyYzQtMmVmNjM2OS1mNzBlMjE2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.787012Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731507. Ctx: { TraceId: 01jd706jjw8pqdh4yrj8wtm99m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGVmYjJhN2EtYTBlYjYwNGItYjIzNTgwZTctMzBjMjRmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.789197Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731509. Ctx: { TraceId: 01jd706jk4cd2djgw3qe0xdyz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0NTQ0N2QtZTE2OWZjYTEtNDMwMDVhOTAtMzFlNzdkNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.789676Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731510. Ctx: { TraceId: 01jd706jk42cftchv7s1jyn2mj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTg5MDBjNzAtMTI4OGEyZmQtNjhlYTdiYjctNDRlNTdhYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.790166Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731512. Ctx: { TraceId: 01jd706jk4cd2djgw3qe0xdyz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0NTQ0N2QtZTE2OWZjYTEtNDMwMDVhOTAtMzFlNzdkNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.790168Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731511. Ctx: { TraceId: 01jd706jk4bfm0pspqmtacx6yy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODk1ZDlmOS1kMWY4OThiZS05OTUwZWFjNS04Y2M3N2E0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.790481Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731513. Ctx: { TraceId: 01jd706jk42cftchv7s1jyn2mj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTg5MDBjNzAtMTI4OGEyZmQtNjhlYTdiYjctNDRlNTdhYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.790886Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731514. Ctx: { TraceId: 01jd706jk42cftchv7s1jyn2mj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTg5MDBjNzAtMTI4OGEyZmQtNjhlYTdiYjctNDRlNTdhYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.791440Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731516. Ctx: { TraceId: 01jd706jk4bfm0pspqmtacx6yy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODk1ZDlmOS1kMWY4OThiZS05OTUwZWFjNS04Y2M3N2E0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.791477Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731518. Ctx: { TraceId: 01jd706jk79wv90vq5vqq5px3s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGVmYjJhN2EtYTBlYjYwNGItYjIzNTgwZTctMzBjMjRmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.791836Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731515. Ctx: { TraceId: 01jd706jk4cd2djgw3qe0xdyz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0NTQ0N2QtZTE2OWZjYTEtNDMwMDVhOTAtMzFlNzdkNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.791839Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731517. Ctx: { TraceId: 01jd706jk6c0t65w9trzvb7gyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU5NTliMC0yNDJlYTM5ZC1lM2NjM2I4MC05Mzc4OTM1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.792386Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731519. Ctx: { TraceId: 01jd706jk79wv90vq5vqq5px3s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGVmYjJhN2EtYTBlYjYwNGItYjIzNTgwZTctMzBjMjRmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.792566Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731520. Ctx: { TraceId: 01jd706jk4cd2djgw3qe0xdyz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0NTQ0N2QtZTE2OWZjYTEtNDMwMDVhOTAtMzFlNzdkNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.792661Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731521. Ctx: { TraceId: 01jd706jk4bfm0pspqmtacx6yy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODk1ZDlmOS1kMWY4OThiZS05OTUwZWFjNS04Y2M3N2E0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.793087Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731522. Ctx: { TraceId: 01jd706jk6c0t65w9trzvb7gyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU5NTliMC0yNDJlYTM5ZC1lM2NjM2I4MC05Mzc4OTM1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:38.793519Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731523. Ctx: { TraceId: 01jd706jk4bfm0pspqmtacx6yy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODk1ZDlmOS1kMWY4OThiZS05OTUwZWFjNS04Y2M3N2E0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.793730Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731524. Ctx: { TraceId: 01jd706jk4cd2djgw3qe0xdyz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0NTQ0N2QtZTE2OWZjYTEtNDMwMDVhOTAtMzFlNzdkNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.793786Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731525. Ctx: { TraceId: 01jd706jk6c0t65w9trzvb7gyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU5NTliMC0yNDJlYTM5ZC1lM2NjM2I4MC05Mzc4OTM1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.794012Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731526. Ctx: { TraceId: 01jd706jk4bfm0pspqmtacx6yy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODk1ZDlmOS1kMWY4OThiZS05OTUwZWFjNS04Y2M3N2E0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.794247Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731527. Ctx: { TraceId: 01jd706jk6c0t65w9trzvb7gyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU5NTliMC0yNDJlYTM5ZC1lM2NjM2I4MC05Mzc4OTM1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.794398Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731528. Ctx: { TraceId: 01jd706jk4cd2djgw3qe0xdyz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGU0NTQ0N2QtZTE2OWZjYTEtNDMwMDVhOTAtMzFlNzdkNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:38.795697Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731529. Ctx: { TraceId: 01jd706jkabxkvngtmnamf35am, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzVlZTg1ZTMtNTg1MGZjNi1mMDFjYzZlMS04NDdmZTYx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:38.796800Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731530. Ctx: { TraceId: 01jd706jkabxkvngtmnamf35am, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzVlZTg1ZTMtNTg1MGZjNi1mMDFjYzZlMS04NDdmZTYx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T09:17:38.797519Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731531. Ctx: { TraceId: 01jd706jkabxkvngtmnamf35am, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzVlZTg1ZTMtNTg1MGZjNi1mMDFjYzZlMS04NDdmZTYx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:38.797878Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731532. Ctx: { TraceId: 01jd706jkabxkvngtmnamf35am, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzVlZTg1ZTMtNTg1MGZjNi1mMDFjYzZlMS04NDdmZTYx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> KqpScripting::StreamOperationTimeout [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 24439, MsgBus: 10020 2024-11-21T09:16:45.502031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659052381958443:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:45.502048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00495e/r3tmp/tmpRNu1DM/pdisk_1.dat 2024-11-21T09:16:45.554382Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24439, node 1 2024-11-21T09:16:45.588283Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:45.588294Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:45.588296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:45.588336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:45.602106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:45.602137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:45.603125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10020 TClient is connected to server localhost:10020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:45.687418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.695982Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:16:45.709217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.773318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.792537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.802606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:45.833248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659052381959976:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.833273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.921429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.930425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.940799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.952549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.961318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.968433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:45.978100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659052381960479:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.978129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.978169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659052381960484:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:45.978835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:45.981270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659052381960486:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:46.200504Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606226, txId: 281474976715672] shutting down 2024-11-21T09:16:46.239519Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606282, txId: 281474976715675] shutting down 2024-11-21T09:16:46.277477Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606317, txId: 281474976715678] shutting down 2024-11-21T09:16:46.317174Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606359, txId: 281474976715681] shutting down 2024-11-21T09:16:46.356544Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606401, txId: 281474976715684] shutting down 2024-11-21T09:16:46.397375Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606443, txId: 281474976715687] shutting down 2024-11-21T09:16:46.434693Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606478, txId: 281474976715690] shutting down 2024-11-21T09:16:46.471291Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606513, txId: 281474976715693] shutting down 2024-11-21T09:16:46.511808Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606555, txId: 281474976715696] shutting down 2024-11-21T09:16:46.551249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606597, txId: 281474976715699] shutting down 2024-11-21T09:16:46.589477Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606632, txId: 281474976715702] shutting down 2024-11-21T09:16:46.629454Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606674, txId: 281474976715705] shutting down 2024-11-21T09:16:46.674255Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606716, txId: 281474976715708] shutting down 2024-11-21T09:16:46.719831Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606758, txId: 281474976715711] shutting down 2024-11-21T09:16:46.762378Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606807, txId: 281474976715714] shutting down 2024-11-21T09:16:46.800678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606842, txId: 281474976715717] shutting down 2024-11-21T09:16:46.843034Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606884, txId: 281474976715720] shutting down 2024-11-21T09:16:46.882583Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606926, txId: 281474976715723] shutting down 2024-11-21T09:16:46.920465Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180606961, txId: 281474976715726] shutting down 2024-11-21T09:16:46.960225Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607003, txId: 281474976715729] shutting down 2024-11-21T09:16:46.995428Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607038, txId: 281474976715732] shutting down 2024-11-21T09:16:47.047148Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607087, txId: 281474976715735] shutting down 2024-11-21T09:16:47.087138Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607129, txId: 281474976715738] shutting down 2024-11-21T09:16:47.126473Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607171, txId: 281474976715741] shutting down 2024-11-21T09:16:47.163159Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607206, txId: 281474976715744] shutting down 2024-11-21T09:16:47.201198Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180607241, txId: 281474976715747] shutting down 2024-11 ... T09:17:37.179382Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657179, txId: 281474976718591] shutting down 2024-11-21T09:17:37.262889Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657270, txId: 281474976718594] shutting down 2024-11-21T09:17:37.337104Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657354, txId: 281474976718597] shutting down 2024-11-21T09:17:37.414157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657424, txId: 281474976718600] shutting down 2024-11-21T09:17:37.481981Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657501, txId: 281474976718603] shutting down 2024-11-21T09:17:37.555520Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657571, txId: 281474976718606] shutting down 2024-11-21T09:17:37.645095Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657648, txId: 281474976718609] shutting down 2024-11-21T09:17:37.721789Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657732, txId: 281474976718612] shutting down 2024-11-21T09:17:37.797596Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657809, txId: 281474976718615] shutting down 2024-11-21T09:17:37.875315Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657886, txId: 281474976718618] shutting down 2024-11-21T09:17:37.956073Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180657970, txId: 281474976718621] shutting down 2024-11-21T09:17:38.043906Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658040, txId: 281474976718624] shutting down 2024-11-21T09:17:38.115087Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658131, txId: 281474976718627] shutting down 2024-11-21T09:17:38.189570Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658201, txId: 281474976718630] shutting down 2024-11-21T09:17:38.270659Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658278, txId: 281474976718633] shutting down 2024-11-21T09:17:38.351979Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658355, txId: 281474976718636] shutting down 2024-11-21T09:17:38.427878Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658439, txId: 281474976718639] shutting down 2024-11-21T09:17:38.506744Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658516, txId: 281474976718642] shutting down 2024-11-21T09:17:38.581640Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658593, txId: 281474976718645] shutting down 2024-11-21T09:17:38.659837Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658670, txId: 281474976718648] shutting down 2024-11-21T09:17:38.739899Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658747, txId: 281474976718651] shutting down 2024-11-21T09:17:38.812805Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658831, txId: 281474976718654] shutting down 2024-11-21T09:17:38.877882Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658894, txId: 281474976718657] shutting down 2024-11-21T09:17:38.948023Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180658964, txId: 281474976718660] shutting down 2024-11-21T09:17:39.034911Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180659041, txId: 281474976718663] shutting down 2024-11-21T09:17:39.126905Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180659125, txId: 281474976718666] shutting down 2024-11-21T09:17:39.197823Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180659216, txId: 281474976718669] shutting down Trying to start YDB, gRPC: 21006, MsgBus: 64724 2024-11-21T09:17:39.430884Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659283041506734:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:39.431010Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00495e/r3tmp/tmpYYHBkk/pdisk_1.dat 2024-11-21T09:17:39.441716Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21006, node 2 2024-11-21T09:17:39.448943Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:39.448954Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:39.448955Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:39.449002Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64724 TClient is connected to server localhost:64724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:39.531058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:39.531094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:39.532086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:39.533401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:39.536174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:39.546876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:39.563682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:39.574893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:39.658061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659283041508280:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:39.658080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:39.663460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:39.718405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:39.728156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:39.736107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:39.749789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:39.756246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:39.771869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659283041508786:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:39.771900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:39.771905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659283041508791:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:39.772502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:39.776453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659283041508793:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TColumnShardTestSchema::InternalTTL_Types |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer >> TColumnShardTestSchema::Drop >> TColumnShardTestSchema::InternalTTL >> TColumnShardTestSchema::RebootOneTier >> TColumnShardTestSchema::CreateTable >> TColumnShardTestSchema::OneTier |94.9%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootHotTiersWithStat >> TColumnShardTestSchema::ExportWithLostAnswer >> TColumnShardTestSchema::RebootForgetAfterFail >> TColumnShardTestSchema::HotTiersRevCompression >> TColumnShardTestSchema::RebootHotTiersRevCompression >> TColumnShardTestSchema::RebootOneTierExternalTtl >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> TColumnShardTestSchema::ForgetWithLostAnswer |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootInternalTTL >> TColumnShardTestSchema::CreateTable [GOOD] >> TColumnShardTestSchema::Drop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2024-11-21T09:17:40.886196Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:40.918285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:40.920124Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:40.920563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:40.920642Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:40.923668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:40.923707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:40.923738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:40.923751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:40.923764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:40.923774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:40.923782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:40.923795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:40.923808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:40.923818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.923827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:40.923858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:40.927790Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:40.928662Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:40.928732Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:40.928738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:40.928756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:40.929402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:40.929405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:40.929411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:40.929421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:40.929424Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:40.929443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:40.929457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:40.929460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:40.929465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:40.929474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:40.929476Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:40.929482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:40.929486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:40.929489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:40.929494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:40.929499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:40.929501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:40.929523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:40.929528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:40.929534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:40.929541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:40.929564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:40.929569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:40.929571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:40.929585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:40.929589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.929591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.929599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:40.929603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:17:40.929605Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T09:17:40.929615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:17:40.929619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:17:40.929622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T09:17:40.929629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=23133759538800;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T09:17:41.789381Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=23133713766496;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=23133759538800;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=tx_controller.cpp:371;event=start;tx_info=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;; 2024-11-21T09:17:41.789388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=23133713766496;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=23133759538800;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T09:17:41.789401Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=23133713766496;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=23133759538800;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2024-11-21T09:17:41.789459Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1018 at tablet 9437184, mediator 0 2024-11-21T09:17:41.789468Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2024-11-21T09:17:41.789492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:17:41.789543Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2024-11-21T09:17:41.789556Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=tables_manager.cpp:259;method=RegisterTable;path_id=19; 2024-11-21T09:17:41.789560Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine.h:339;event=RegisterTable;path_id=19; 2024-11-21T09:17:41.789633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T09:17:41.800325Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2024-11-21T09:17:41.800370Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T09:17:41.800585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=23133713766816;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T09:17:41.800610Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=23133713766816;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T09:17:41.811300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;this=23133713766816;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T09:17:41.811322Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;this=23133713766816;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T09:17:41.811518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=23133713766816;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T09:17:41.811544Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=23133713766816;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T09:17:41.822276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;this=23133713766816;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T09:17:41.822297Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;this=23133713766816;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T09:17:41.822502Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=23133713766816;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T09:17:41.822529Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=23133713766816;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T09:17:41.833203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;this=23133713766816;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T09:17:41.833221Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;this=23133713766816;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; >> TColumnShardTestSchema::RebootOneTier [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop [GOOD] Test command err: 2024-11-21T09:17:40.898801Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:40.918266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:40.919985Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:40.920547Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:40.920595Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:40.923665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:40.923710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:40.923740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:40.923753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:40.923763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:40.923773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:40.923782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:40.923795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:40.923807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:40.923817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.923827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:40.923847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:40.927568Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:40.928499Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:40.928571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:40.928577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:40.928594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:40.929403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:40.929406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:40.929413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:40.929424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:40.929427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:40.929444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:40.929454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:40.929456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:40.929462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:40.929472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:40.929475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:40.929482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:40.929486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:40.929488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:40.929493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:40.929498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:40.929500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:40.929521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=5; 2024-11-21T09:17:40.929527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:40.929532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=2; 2024-11-21T09:17:40.929539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:40.929552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:40.929556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:40.929559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:40.929572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:40.929576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.929579Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.929586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:40.929590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:17:40.929593Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T09:17:40.929604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:17:40.929608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:17:40.929611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T09:17:40.929618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... n_bytes=1822956;portion_raw_bytes=2640160; 2024-11-21T09:17:41.941200Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ERASE;path_id=1;portion=4;before_size=3649636;after_size=1826680;before_rows=53432;after_rows=26764; 2024-11-21T09:17:41.941203Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1822928;portion_bytes=1822956;portion_raw_bytes=2640160; 2024-11-21T09:17:41.941216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1816344;portion_bytes=1816372;portion_raw_bytes=2639764; 2024-11-21T09:17:41.941220Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ERASE;path_id=1;portion=5;before_size=1826680;after_size=10308;before_rows=26764;after_rows=100; 2024-11-21T09:17:41.941222Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1816344;portion_bytes=1816372;portion_raw_bytes=2639764; 2024-11-21T09:17:41.941232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=10280;portion_bytes=10308;portion_raw_bytes=8378; 2024-11-21T09:17:41.941236Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ERASE;path_id=1;portion=6;before_size=10308;after_size=0;before_rows=100;after_rows=0; 2024-11-21T09:17:41.941238Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=10280;portion_bytes=10308;portion_raw_bytes=8378; 2024-11-21T09:17:41.941248Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;fline=manager.cpp:14;event=unlock;process_id=CS::CLEANUP::PORTIONS::75ea432a-a7e911ef-b13127d2-2591846d; 2024-11-21T09:17:41.941258Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:17:41.941266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:17:41.941273Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T09:17:41.941280Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700004;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2024-11-21T09:17:41.941283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:17:41.941289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:17:41.941295Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:17:41.941299Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:17:41.941311Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=75ea432a-a7e911ef-b13127d2-2591846d;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:17:41.941328Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:1:1347832:0] 2024-11-21T09:17:41.941333Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:5:4:0:8760:0] 2024-11-21T09:17:41.941336Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:0:1351840:0] 2024-11-21T09:17:41.941339Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:1286032:0] 2024-11-21T09:17:41.941345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:17:41.941355Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:17:41.941719Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "saved_at" } } } ; 2024-11-21T09:17:41.941731Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[saved_at;];};]; 2024-11-21T09:17:41.942839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:309:2323];trace_detailed=; 2024-11-21T09:17:41.943899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=9;column_names=saved_at;);; 2024-11-21T09:17:41.943923Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2024-11-21T09:17:41.943948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:41.943955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:41.944355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:41.944373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:41.944381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:41.944385Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:309:2323] finished for tablet 9437184 2024-11-21T09:17:41.944393Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:309:2323] send ScanData to [1:299:2313] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:17:41.944443Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:309:2323] and sent to [1:299:2313] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1732180661942826,"name":"_full_task","f":1732180661942826,"d_finished":0,"c":0,"l":1732180661944398,"d":1572},"events":[{"name":"bootstrap","f":1732180661943784,"d_finished":174,"c":1,"l":1732180661943958,"d":174},{"a":1732180661944349,"name":"ack","f":1732180661944349,"d_finished":0,"c":0,"l":1732180661944398,"d":49},{"a":1732180661944338,"name":"processing","f":1732180661944338,"d_finished":0,"c":0,"l":1732180661944398,"d":60},{"name":"ProduceResults","f":1732180661943941,"d_finished":36,"c":2,"l":1732180661944383,"d":36},{"a":1732180661944384,"name":"Finish","f":1732180661944384,"d_finished":0,"c":0,"l":1732180661944398,"d":14}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T09:17:41.944454Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:17:41.941747Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:17:41.944458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:17:41.944462Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:17:41.944467Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:309:2323];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TColumnShardTestSchema::ColdCompactionSmoke ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTier [GOOD] Test command err: 2024-11-21T09:17:40.928260Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:40.940064Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:40.941972Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:40.942046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:40.943767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:40.943783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:40.943820Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:40.944296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:40.944324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:40.944345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:40.944360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:40.944375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:40.944390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:40.944400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:40.944414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:40.944429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:40.944440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.944459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:40.944469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:40.947506Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:40.947551Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T09:17:40.947563Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:40.947571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:40.948317Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:40.948364Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:40.948369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:40.948385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.948417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:40.948425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:40.948428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:40.948433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:40.948440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:40.948444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:40.948447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:40.948460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.948465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:40.948469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:40.948471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:40.948477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:40.948480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:40.948485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:40.948489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:40.948496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:40.948500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:40.948502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:40.948507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:40.948510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:40.948513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:40.948534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:40.948539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:40.948545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:40.948552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:40.948564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:40.948569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:40.948571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:40.948584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:40.948587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.948590Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.948597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:40.948602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... it exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:42.246192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:42.246201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:2;records_count:53332;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:42.246213Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=40002; 2024-11-21T09:17:42.246220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=320016;num_rows=40002;batch_columns=timestamp; 2024-11-21T09:17:42.246244Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:546:2537] send ScanData to [1:545:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 320016 rows: 40002 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T09:17:42.246251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246259Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:42.246262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:42.246355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:42.246370Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:42.246382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=13330; 2024-11-21T09:17:42.246390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=106640;num_rows=13330;batch_columns=timestamp; 2024-11-21T09:17:42.246415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:546:2537] send ScanData to [1:545:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 106640 rows: 13330 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T09:17:42.246424Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:42.246460Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:42.246467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:546:2537] finished for tablet 9437184 2024-11-21T09:17:42.246475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:546:2537] send ScanData to [1:545:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:17:42.246511Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:546:2537] and sent to [1:545:2536] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.003}],"full":{"a":1732180662242618,"name":"_full_task","f":1732180662242618,"d_finished":0,"c":0,"l":1732180662246481,"d":3863},"events":[{"name":"bootstrap","f":1732180662242645,"d_finished":613,"c":1,"l":1732180662243258,"d":613},{"a":1732180662246451,"name":"ack","f":1732180662246053,"d_finished":251,"c":3,"l":1732180662246442,"d":281},{"a":1732180662246449,"name":"processing","f":1732180662243370,"d_finished":970,"c":24,"l":1732180662246442,"d":1002},{"name":"ProduceResults","f":1732180662242932,"d_finished":601,"c":29,"l":1732180662246465,"d":601},{"a":1732180662246466,"name":"Finish","f":1732180662246466,"d_finished":0,"c":0,"l":1732180662246481,"d":15},{"name":"task_result","f":1732180662243374,"d_finished":674,"c":21,"l":1732180662246034,"d":674}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2024-11-21T09:17:42.246519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:17:42.242422Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4710140;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4710140;selected_rows=0; 2024-11-21T09:17:42.246522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:17:42.246528Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:17:42.246532Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:546:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TColumnShardTestSchema::OneTier [GOOD] >> TColumnShardTestSchema::RebootExportAfterFail >> TColumnShardTestSchema::InternalTTL [GOOD] >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] >> TColumnShardTestSchema::RebootInternalTTL [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterTtl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] Test command err: 2024-11-21T09:17:41.763576Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:41.776253Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:41.778041Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:41.778119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:41.779822Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:41.779839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:41.779874Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:41.780388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:41.780417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:41.780440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:41.780454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:41.780465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:41.780477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:41.780489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:41.780501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:41.780513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:41.780524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.780535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:41.780556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:41.783555Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:41.783610Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T09:17:41.783621Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:41.783628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:41.784501Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:41.784556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:41.784562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:41.784583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.784618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:41.784627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:41.784631Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:41.784638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:41.784645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:41.784649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:41.784652Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:41.784664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.784669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:41.784673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:41.784676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:41.784682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:41.784687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:41.784692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:41.784696Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:41.784704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:41.784709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:41.784711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:41.784717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:41.784721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:41.784723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:41.784747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:41.784754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T09:17:41.784759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:41.784766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:41.784781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:41.784786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:41.784789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:41.784805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:41.784810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.784813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.784821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:41.784826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... mit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:43.075345Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:43.075355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:2;records_count:53332;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:43.075364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=40002; 2024-11-21T09:17:43.075369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=320016;num_rows=40002;batch_columns=timestamp; 2024-11-21T09:17:43.075389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:555:2546] send ScanData to [1:554:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 320016 rows: 40002 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T09:17:43.075396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075408Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:43.075410Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:43.075479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:43.075486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075488Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:43.075493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=13330; 2024-11-21T09:17:43.075497Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=106640;num_rows=13330;batch_columns=timestamp; 2024-11-21T09:17:43.075510Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:555:2546] send ScanData to [1:554:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 106640 rows: 13330 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T09:17:43.075516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:43.075547Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075554Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.075558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:555:2546] finished for tablet 9437184 2024-11-21T09:17:43.075569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:555:2546] send ScanData to [1:554:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:17:43.075620Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:555:2546] and sent to [1:554:2545] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.003}],"full":{"a":1732180663072157,"name":"_full_task","f":1732180663072157,"d_finished":0,"c":0,"l":1732180663075576,"d":3419},"events":[{"name":"bootstrap","f":1732180663072187,"d_finished":535,"c":1,"l":1732180663072722,"d":535},{"a":1732180663075538,"name":"ack","f":1732180663075200,"d_finished":188,"c":3,"l":1732180663075532,"d":226},{"a":1732180663075537,"name":"processing","f":1732180663072809,"d_finished":822,"c":24,"l":1732180663075533,"d":861},{"name":"ProduceResults","f":1732180663072471,"d_finished":486,"c":29,"l":1732180663075556,"d":486},{"a":1732180663075556,"name":"Finish","f":1732180663075556,"d_finished":0,"c":0,"l":1732180663075576,"d":20},{"name":"task_result","f":1732180663072812,"d_finished":597,"c":21,"l":1732180663075185,"d":597}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2024-11-21T09:17:43.075631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:17:43.071936Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4710140;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4710140;selected_rows=0; 2024-11-21T09:17:43.075635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:17:43.075645Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:17:43.075678Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTier [GOOD] Test command err: 2024-11-21T09:17:40.888920Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:40.913380Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:40.917092Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:40.918252Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:40.919872Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:40.920541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:40.920622Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:40.923660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:40.923698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:40.923744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:40.923762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:40.923772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:40.923781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:40.923790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:40.923802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:40.923813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:40.923822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.923844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:40.923854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:40.927761Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:40.927806Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T09:17:40.927816Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:40.927821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:40.928642Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:40.928688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:40.928693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:40.928707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:40.929476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:40.929484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:40.929499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:40.929515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:40.929520Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:40.929539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:40.929553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:40.929558Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:40.929565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:40.929578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:40.929584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:40.929594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:40.929600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:40.929604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:40.929612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:40.929618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:40.929622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:40.929650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:40.929659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T09:17:40.929667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T09:17:40.929677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2024-11-21T09:17:40.929692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:40.929699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:40.929703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:40.929723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:40.929729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.929733Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.929746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:40.929753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2024-11-21T09:17:43.072322Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2024-11-21T09:17:43.072405Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[25] (CS::INDEXATION) apply at tablet 9437184 2024-11-21T09:17:43.072873Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 3 2024-11-21T09:17:43.072893Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9575952;raw_bytes=15906843; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:17:43.072897Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=768162be-a7e911ef-89f369b6-7f6aae7c;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:17:43.072963Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:368:2379], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2024-11-21T09:17:43.072998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T09:17:43.073004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T09:17:43.073008Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=4; 2024-11-21T09:17:43.073013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=4; 2024-11-21T09:17:43.073018Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T09:17:43.073027Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.073031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T09:17:43.073035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:43.073117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:43.073138Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.073143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:43.073150Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=80000; 2024-11-21T09:17:43.073157Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=timestamp; 2024-11-21T09:17:43.073176Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:369:2380] send ScanData to [1:364:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T09:17:43.073184Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.073193Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.073197Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.073211Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:43.073215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.073221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:17:43.073224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:369:2380] finished for tablet 9437184 2024-11-21T09:17:43.073232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:369:2380] send ScanData to [1:364:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:17:43.073285Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:369:2380] and sent to [1:364:2375] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.024},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.025}],"full":{"a":1732180663048129,"name":"_full_task","f":1732180663048129,"d_finished":0,"c":0,"l":1732180663073238,"d":25109},"events":[{"name":"bootstrap","f":1732180663048336,"d_finished":516,"c":1,"l":1732180663048852,"d":516},{"a":1732180663073209,"name":"ack","f":1732180663073113,"d_finished":86,"c":1,"l":1732180663073199,"d":115},{"a":1732180663073208,"name":"processing","f":1732180663049026,"d_finished":423,"c":5,"l":1732180663073199,"d":453},{"name":"ProduceResults","f":1732180663048647,"d_finished":211,"c":8,"l":1732180663073222,"d":211},{"a":1732180663073222,"name":"Finish","f":1732180663073222,"d_finished":0,"c":0,"l":1732180663073238,"d":16},{"name":"task_result","f":1732180663049030,"d_finished":320,"c":4,"l":1732180663073045,"d":320}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2024-11-21T09:17:43.073299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:17:43.047861Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T09:17:43.073304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:17:43.073319Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.023241s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.019210s;size=0.0063152;details={columns=1;};};]};; 2024-11-21T09:17:43.073324Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:369:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL [GOOD] Test command err: 2024-11-21T09:17:40.897343Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:40.913378Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:40.917419Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:40.918278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:40.921289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:40.921310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:40.921354Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:40.923675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:40.923719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:40.923754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:40.923777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:40.923793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:40.923809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:40.923824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:40.923844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:40.923863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:40.923879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.923909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:40.923926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:40.929115Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:40.929171Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T09:17:40.929184Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:40.929191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:40.930218Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:40.930283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:40.930290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:40.930308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.930344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:40.930353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:40.930358Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:40.930367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:40.930376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:40.930383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:40.930387Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:40.930408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.930415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:40.930421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:40.930426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:40.930434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:40.930440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:40.930448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:40.930454Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:40.930465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:40.930471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:40.930475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:40.930483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:40.930489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:40.930493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:40.930516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:40.930523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:40.930530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:40.930538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:40.930554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:40.930561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:40.930565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:40.930585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:40.930592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.930596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.930609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:40.930615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... ;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=4; 2024-11-21T09:17:42.923827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T09:17:42.923840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:42.923845Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T09:17:42.923848Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:42.923917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T09:17:42.923920Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T09:17:42.923924Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=4; 2024-11-21T09:17:42.923928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=4; 2024-11-21T09:17:42.923932Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T09:17:42.923940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:42.923943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T09:17:42.923946Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:42.923974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:42.923987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:42.923990Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:42.923996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2024-11-21T09:17:42.924001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2024-11-21T09:17:42.924016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] send ScanData to [1:363:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T09:17:42.924023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:42.924029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:42.924035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:42.924046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:42.924050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:42.924054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:42.924056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] finished for tablet 9437184 2024-11-21T09:17:42.924062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] send ScanData to [1:363:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:17:42.924101Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:368:2380] and sent to [1:363:2375] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.019}],"full":{"a":1732180662904623,"name":"_full_task","f":1732180662904623,"d_finished":0,"c":0,"l":1732180662924067,"d":19444},"events":[{"name":"bootstrap","f":1732180662904715,"d_finished":241,"c":1,"l":1732180662904956,"d":241},{"a":1732180662924045,"name":"ack","f":1732180662923972,"d_finished":64,"c":1,"l":1732180662924036,"d":86},{"a":1732180662924044,"name":"processing","f":1732180662905064,"d_finished":250,"c":5,"l":1732180662924036,"d":273},{"name":"ProduceResults","f":1732180662904849,"d_finished":136,"c":8,"l":1732180662924055,"d":136},{"a":1732180662924055,"name":"Finish","f":1732180662924055,"d_finished":0,"c":0,"l":1732180662924067,"d":12},{"name":"task_result","f":1732180662905067,"d_finished":174,"c":4,"l":1732180662923952,"d":174}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T09:17:42.924111Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:17:42.904471Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T09:17:42.924115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:17:42.924125Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.018838s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.017708s;size=0.0063152;details={columns=9;};};]};; 2024-11-21T09:17:42.924129Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootInternalTTL [GOOD] Test command err: 2024-11-21T09:17:41.878428Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:41.891139Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:41.893168Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:41.893264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:41.895139Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:41.895163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:41.895216Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:41.895952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:41.895992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:41.896028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:41.896048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:41.896061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:41.896076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:41.896091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:41.896111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:41.896130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:41.896146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.896162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:41.896192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:41.899577Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:41.899646Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T09:17:41.899660Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:41.899667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:41.900887Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:41.900967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:41.900985Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:41.901015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.901059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:41.901070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:41.901074Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:41.901080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:41.901088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:41.901094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:41.901096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:41.901107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.901112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:41.901116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:41.901119Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:41.901125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:41.901129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:41.901136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:41.901140Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:41.901148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:41.901153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:41.901155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:41.901160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:41.901165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:41.901168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:41.901195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:41.901203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T09:17:41.901209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T09:17:41.901217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T09:17:41.901230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:41.901235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:41.901238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:41.901254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:41.901259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.901261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.901274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:41.901294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... it exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:43.186541Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:43.186550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:2;records_count:53332;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:43.186558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26668; 2024-11-21T09:17:43.186563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=saved_at; 2024-11-21T09:17:43.186589Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:542:2537] send ScanData to [1:541:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T09:17:43.186596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186602Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:43.186607Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:43.186668Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:43.186673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186676Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:43.186680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26664; 2024-11-21T09:17:43.186685Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=saved_at; 2024-11-21T09:17:43.186699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:542:2537] send ScanData to [1:541:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T09:17:43.186704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186773Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:43.186792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:43.186799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:542:2537] finished for tablet 9437184 2024-11-21T09:17:43.186811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:542:2537] send ScanData to [1:541:2536] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:17:43.186865Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:542:2537] and sent to [1:541:2536] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.048},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.049}],"full":{"a":1732180663137378,"name":"_full_task","f":1732180663137378,"d_finished":0,"c":0,"l":1732180663186817,"d":49439},"events":[{"name":"bootstrap","f":1732180663137413,"d_finished":678,"c":1,"l":1732180663138091,"d":678},{"a":1732180663186772,"name":"ack","f":1732180663186391,"d_finished":191,"c":3,"l":1732180663186716,"d":236},{"a":1732180663186771,"name":"processing","f":1732180663138213,"d_finished":46675,"c":24,"l":1732180663186716,"d":46721},{"name":"ProduceResults","f":1732180663137736,"d_finished":46270,"c":29,"l":1732180663186798,"d":46270},{"a":1732180663186798,"name":"Finish","f":1732180663186798,"d_finished":0,"c":0,"l":1732180663186817,"d":19},{"name":"task_result","f":1732180663138216,"d_finished":46451,"c":21,"l":1732180663186364,"d":46451}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T09:17:43.186875Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:17:43.137173Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4997532;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4997532;selected_rows=0; 2024-11-21T09:17:43.186879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:17:43.186889Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:17:43.186894Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:542:2537];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TColumnShardTestSchema::RebootColdTiersWithStat >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> TColumnShardTestSchema::HotTiers |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> TColumnShardTestSchema::RebootHotTiers >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:17:29.727833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:17:29.727855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:29.727859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:17:29.727862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:17:29.727873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:17:29.727875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:17:29.727881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:17:29.727950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:17:29.735721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:17:29.735738Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:29.738207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:17:29.738758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:17:29.738793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:17:29.739951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:17:29.740103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:29.740167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.740245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:17:29.741039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.741266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:29.741275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.741308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:29.741314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:29.741318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:29.741327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.742557Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:17:29.754410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:17:29.754492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.754564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:17:29.754626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:17:29.754631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.755399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.755422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:17:29.755463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.755470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:17:29.755473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:17:29.755477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:17:29.755993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.756002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:17:29.756005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:17:29.756318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.756323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.756328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.756333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.756757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:17:29.757034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:17:29.757077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:17:29.757239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:29.757263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:29.757271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.757314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:17:29.757318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:17:29.757341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:29.757352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:17:29.757659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:29.757664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:29.757706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:29.757711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:17:29.757785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:17:29.757789Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:17:29.757797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:17:29.757800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.757804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:17:29.757807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:17:29.757810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:17:29.757812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:17:29.757820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:17:29.757824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:17:29.757826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:17:29.758029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:29.758038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:17:29.758041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:17:29.758044Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:17:29.758047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:17:29.758057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... N: Table profiles were not loaded 2024-11-21T09:17:44.598846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:17:44.598897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:44.598913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:17:44.598919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.598925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.598967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:44.598994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T09:17:44.599015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:17:44.599058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:17:44.599294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:17:44.600620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:44.600995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1747:3674], Recipient [1:1747:3674]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T09:17:44.601005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T09:17:44.601313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:44.601321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:44.601381Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1747:3674], Recipient [1:1747:3674]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:17:44.601388Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:17:44.601494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:17:44.601504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:17:44.601511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:17:44.601515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:17:44.602049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1784:3674], Recipient [1:1747:3674]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T09:17:44.602058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T09:17:44.602061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1747:3674] sender: [1:1804:2058] recipient: [1:15:2062] 2024-11-21T09:17:44.625185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1803:3720], Recipient [1:1747:3674]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T09:17:44.625208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:17:44.625242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:17:44.625370Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 112us result status StatusSuccess 2024-11-21T09:17:44.625618Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 25856 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 4138 Memory: 156376 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 25856 DataSize: 25856 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TOlapReboots::AlterTtlSettings [GOOD] |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 18890, MsgBus: 8212 2024-11-21T09:17:18.492020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659191349149136:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:18.492094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e61/r3tmp/tmp7ByhiY/pdisk_1.dat 2024-11-21T09:17:18.576617Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18890, node 1 2024-11-21T09:17:18.591308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:18.591331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:18.592421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:18.628980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:18.628999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:18.629000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:18.629025Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8212 TClient is connected to server localhost:8212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:18.711335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.720445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.787298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.798400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.805189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:18.834688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659191349150460:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.834714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:18.960555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.965649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.972649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.979996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.987376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:18.994140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.005400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659195644118270:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.005433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.005462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659195644118275:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:19.006774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:19.014638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659195644118277:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:19.236190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:19.304516Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd705zj323ft279yt609bt54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5OWZiODctNzRlNTM3NGMtY2JhYzNiMTEtYTZmNDUyNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.311715Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd705zj323ft279yt609bt54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5OWZiODctNzRlNTM3NGMtY2JhYzNiMTEtYTZmNDUyNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.313199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd705zj323ft279yt609bt54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5OWZiODctNzRlNTM3NGMtY2JhYzNiMTEtYTZmNDUyNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.318770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd705zjn6m6vhc5zrdq8xapr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRlMmEyNTMtMzQ4MWRlMzktOWEyMDQ4MTktOTM0MDFkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.319507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd705zjn6m6vhc5zrdq8xapr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRlMmEyNTMtMzQ4MWRlMzktOWEyMDQ4MTktOTM0MDFkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.319780Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd705zjn6m6vhc5zrdq8xapr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRlMmEyNTMtMzQ4MWRlMzktOWEyMDQ4MTktOTM0MDFkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.323448Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd705zjt4q5h8wpqzzh8fxyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5OWZiODctNzRlNTM3NGMtY2JhYzNiMTEtYTZmNDUyNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.324305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd705zjt4q5h8wpqzzh8fxyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5OWZiODctNzRlNTM3NGMtY2JhYzNiMTEtYTZmNDUyNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.324636Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd705zjt4q5h8wpqzzh8fxyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5OWZiODctNzRlNTM3NGMtY2JhYzNiMTEtYTZmNDUyNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.328537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd705zjz87nv8axjad99g5m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRlMmEyNTMtMzQ4MWRlMzktOWEyMDQ4MTktOTM0MDFkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.329399Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd705zjz87nv8axjad99g5m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRlMmEyNTMtMzQ4MWRlMzktOWEyMDQ4MTktOTM0MDFkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.329770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd705zjz87nv8axjad99g5m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRlMmEyNTMtMzQ4MWRlMzktOWEyMDQ4MTktOTM0MDFkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.333597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd705zk4ad6e465fcqveec91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5OWZiODctNzRlNTM3NGMtY2JhYzNiMTEtYTZmNDUyNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:19.334214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd705zk4ad6e465fcqveec91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5OWZiOD ... : TxId: 281474976723132. Ctx: { TraceId: 01jd706rat9k8mf3qatxabbtwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.667441Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723133. Ctx: { TraceId: 01jd706rat9k8mf3qatxabbtwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.667749Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723134. Ctx: { TraceId: 01jd706rat9k8mf3qatxabbtwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.671464Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723135. Ctx: { TraceId: 01jd706ray475hqgp3kx6h0r4r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.672097Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723136. Ctx: { TraceId: 01jd706ray475hqgp3kx6h0r4r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.672508Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723137. Ctx: { TraceId: 01jd706ray475hqgp3kx6h0r4r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.676064Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723138. Ctx: { TraceId: 01jd706rb35vegh58s6hwc8396, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.677081Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723139. Ctx: { TraceId: 01jd706rb35vegh58s6hwc8396, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.677510Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723140. Ctx: { TraceId: 01jd706rb35vegh58s6hwc8396, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.680664Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723141. Ctx: { TraceId: 01jd706rb8bw2c527exdx9y68c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.681297Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723142. Ctx: { TraceId: 01jd706rb8bw2c527exdx9y68c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.681575Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723143. Ctx: { TraceId: 01jd706rb8bw2c527exdx9y68c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.684798Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723144. Ctx: { TraceId: 01jd706rbc8q7sev5qdjz63a22, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.685454Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723145. Ctx: { TraceId: 01jd706rbc8q7sev5qdjz63a22, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.685816Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723146. Ctx: { TraceId: 01jd706rbc8q7sev5qdjz63a22, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.689362Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723147. Ctx: { TraceId: 01jd706rbg009mnjwz3pyr3cd8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.690002Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723148. Ctx: { TraceId: 01jd706rbg009mnjwz3pyr3cd8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.690422Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723149. Ctx: { TraceId: 01jd706rbg009mnjwz3pyr3cd8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.693994Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723150. Ctx: { TraceId: 01jd706rbn6y18e72xk1jw00qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.694607Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723151. Ctx: { TraceId: 01jd706rbn6y18e72xk1jw00qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.694857Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723152. Ctx: { TraceId: 01jd706rbn6y18e72xk1jw00qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.698074Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723153. Ctx: { TraceId: 01jd706rbs5mt7cxr0dd8g4xcr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.698694Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723154. Ctx: { TraceId: 01jd706rbs5mt7cxr0dd8g4xcr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.698992Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723155. Ctx: { TraceId: 01jd706rbs5mt7cxr0dd8g4xcr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.702199Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723156. Ctx: { TraceId: 01jd706rbxc04nhpngv6c1q1zw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.702881Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723157. Ctx: { TraceId: 01jd706rbxc04nhpngv6c1q1zw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.703184Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723158. Ctx: { TraceId: 01jd706rbxc04nhpngv6c1q1zw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.707289Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723159. Ctx: { TraceId: 01jd706rc2bp8v7a95y6jk08n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.707851Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723160. Ctx: { TraceId: 01jd706rc2bp8v7a95y6jk08n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.708243Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723161. Ctx: { TraceId: 01jd706rc2bp8v7a95y6jk08n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.711431Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723162. Ctx: { TraceId: 01jd706rc6009zahpn0yggbzg6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.712166Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723163. Ctx: { TraceId: 01jd706rc6009zahpn0yggbzg6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.712547Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723164. Ctx: { TraceId: 01jd706rc6009zahpn0yggbzg6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VmMWU5MWUtMWY3ZjNmZC02MmM4YmE2Zi1hNjFlMjcwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.716000Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723165. Ctx: { TraceId: 01jd706rcb5rk43509eetx5fxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.716588Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723166. Ctx: { TraceId: 01jd706rcb5rk43509eetx5fxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:17:44.716938Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723167. Ctx: { TraceId: 01jd706rcb5rk43509eetx5fxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjUzOTMwOC05OTIzYWY4MS0zZmM2NGUzNS1mMWM0NzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::AlterTtlSettings [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:16:41.461299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:16:41.461320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:16:41.461326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:16:41.461334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:16:41.461337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:16:41.461343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:16:41.461418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:16:41.475200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:16:41.475221Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.477557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:16:41.477663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:16:41.477694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:16:41.480352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:16:41.480432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:16:41.481691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.482666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.484388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.486564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:16:41.486571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.486576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:16:41.486631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:16:41.487829Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:16:41.507735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:16:41.507796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.507845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:16:41.507908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:16:41.507916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:16:41.508652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.508659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:16:41.508663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:16:41.508668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:16:41.509049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:16:41.509379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.509391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.509396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.510010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:16:41.510409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:16:41.510453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:16:41.510647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:16:41.510676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:16:41.510682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.510760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:16:41.510769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:16:41.510801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:16:41.510814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:16:41.511281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:16:41.511294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:16:41.511323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:16:41.511328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:16:41.511389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:16:41.511396Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:16:41.511406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:16:41.511413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.511418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:16:41.511423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:16:41.511428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:16:41.511431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:16:41.511442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:16:41.511447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:16:41.511454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1005 at step: 5000006 2024-11-21T09:17:45.205713Z node 190 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:17:45.205736Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 816043788393 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:45.205742Z node 190 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#1005:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000006 2024-11-21T09:17:45.205846Z node 190 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 128 -> 129 2024-11-21T09:17:45.205868Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:17:45.205877Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2024-11-21T09:17:45.206383Z node 190 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:17:45.206389Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:17:45.206427Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:17:45.206443Z node 190 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:17:45.206447Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [190:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2024-11-21T09:17:45.206450Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [190:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T09:17:45.206522Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:45.206527Z node 190 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId#1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:17:45.206532Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId#1005:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T09:17:45.206602Z node 190 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:45.206610Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:45.206613Z node 190 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:17:45.206617Z node 190 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2024-11-21T09:17:45.206622Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:17:45.206697Z node 190 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:45.206704Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:17:45.206706Z node 190 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:17:45.206708Z node 190 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T09:17:45.206710Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:17:45.206716Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T09:17:45.206932Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:17:45.207120Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:17:45.207249Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:17:45.217919Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000006 2024-11-21T09:17:45.217938Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:45.217955Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000006 2024-11-21T09:17:45.217964Z node 190 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000006 2024-11-21T09:17:45.218039Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2024-11-21T09:17:45.218043Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T09:17:45.218054Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T09:17:45.218442Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:45.218504Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:45.218518Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:17:45.218526Z node 190 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T09:17:45.218540Z node 190 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T09:17:45.218543Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:17:45.218548Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T09:17:45.218558Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [190:356:2336] message: TxId: 1005 2024-11-21T09:17:45.218564Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:17:45.218567Z node 190 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:17:45.218570Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:17:45.218597Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:17:45.218906Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:17:45.218914Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [190:499:2477] TestWaitNotification: OK eventTxId 1005 2024-11-21T09:17:45.219014Z node 190 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:17:45.219069Z node 190 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 61us result status StatusSuccess 2024-11-21T09:17:45.219164Z node 190 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } TtlSettings { Disabled { } Version: 3 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootForgetWithLostAnswer |94.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootColdTiers >> TColumnShardTestSchema::RebootHotTiersTtl >> TColumnShardTestSchema::RebootOneColdTier >> TPQTest::TestAlreadyWritten [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestAlreadyWritten [GOOD] Test command err: 2024-11-21T09:17:20.975041Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T09:17:20.977566Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T09:17:20.977643Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T09:17:20.977653Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T09:17:20.977655Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T09:17:20.977658Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T09:17:20.977663Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.977682Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T09:17:20.977684Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:17:20.981132Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.981153Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T09:17:20.981166Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T09:17:20.983132Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.984092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T09:17:20.984111Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.984441Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.984477Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.984487Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitConfigStep 2024-11-21T09:17:20.984560Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T09:17:20.984617Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T09:17:20.984729Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T09:17:20.984733Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2024-11-21T09:17:20.984738Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:20.984809Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T09:17:20.984812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T09:17:20.984840Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.984856Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:20.984937Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T09:17:20.984959Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2024-11-21T09:17:20.985056Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Completed. 2024-11-21T09:17:20.985059Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2024-11-21T09:17:20.985062Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:20.985146Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2024-11-21T09:17:20.985151Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2024-11-21T09:17:20.985166Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:20.985186Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:20.985237Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:17:20.985267Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.985808Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:20.985828Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:17:20.985867Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.985872Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2024-11-21T09:17:20.986156Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:20.986164Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2024-11-21T09:17:20.986301Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67891 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "client-1" Generation: 1 Important: false } Consumers { Name: "client-2" Generation: 1 Important: false } } BootstrapConfig { } } 2024-11-21T09:17:20.986326Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2024-11-21T09:17:20.986329Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T09:17:20.986333Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2024-11-21T09:17:20.986361Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 231 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-2" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T09:17:20.986376Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T09:17:20.986404Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67892 Data { Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "client-2" Path: "/topic" } Operations { PartitionId: 2 Begin: 0 End: 0 Consumer: "client-1" Path: "/topic" } Immediate: false } 2024-11-21T09:17:20.986410Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY ... eup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [23:232:2234] sender: [23:332:2057] recipient: [23:14:2061] 2024-11-21T09:17:46.468836Z node 23 :PERSQUEUE INFO: new Cookie default|8b85d1a8-a9c67cd3-a68d0504-cfb1fe16_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] 2024-11-21T09:17:46.611478Z node 24 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:46.611494Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [24:147:2057] recipient: [24:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [24:147:2057] recipient: [24:145:2168] Leader for TabletID 72057594037927938 is [24:151:2172] sender: [24:152:2057] recipient: [24:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:177:2057] recipient: [24:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:46.614659Z node 24 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:46.614862Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 24 actor [24:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 24 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 24 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 24 Important: false } 2024-11-21T09:17:46.614988Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [24:184:2197] 2024-11-21T09:17:46.615385Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [24:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:46.615652Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [24:185:2198] 2024-11-21T09:17:46.615945Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [24:185:2198] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:46.617140Z node 24 :PERSQUEUE INFO: new Cookie default|d912be2d-c7c9dce5-da257487-b937330e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:46.617791Z node 24 :PERSQUEUE INFO: new Cookie default|919d7cdc-9c614214-c255957e-81320cda_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:46.618384Z node 24 :PERSQUEUE INFO: new Cookie default|fd0d22a5-89b309da-2026f7ad-317a1a37_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] 2024-11-21T09:17:46.851858Z node 25 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:46.851874Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [25:147:2057] recipient: [25:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [25:147:2057] recipient: [25:145:2168] Leader for TabletID 72057594037927938 is [25:151:2172] sender: [25:152:2057] recipient: [25:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:177:2057] recipient: [25:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:46.855057Z node 25 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:46.855224Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 25 actor [25:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 25 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 25 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 25 Important: false } 2024-11-21T09:17:46.855307Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [25:184:2197] 2024-11-21T09:17:46.855762Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [25:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:46.856024Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [25:185:2198] 2024-11-21T09:17:46.856354Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [25:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:46.857578Z node 25 :PERSQUEUE INFO: new Cookie default|7190baa4-e1b01faf-9353f46-2a50cdd3_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:46.858437Z node 25 :PERSQUEUE INFO: new Cookie default|66fb02db-97f94506-a27b0803-21ecd340_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:46.859056Z node 25 :PERSQUEUE INFO: new Cookie default|eb3f5e0e-6a7d4ab-61f7659d-35a0d76e_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> TColumnShardTestSchema::InternalTTL_Types [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL_Types [GOOD] Test command err: 2024-11-21T09:17:40.886246Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:40.913394Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:40.917193Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:40.918291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:40.920305Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:40.920571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:40.920619Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:40.923712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:40.923764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:40.923809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:40.923840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:40.923863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:40.923886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:40.923907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:40.923928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:40.923945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:40.923960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.923975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:40.924005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:40.927514Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:40.927569Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T09:17:40.927580Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:40.927586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:40.928554Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:40.928609Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:40.928614Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:40.928632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:40.929478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:40.929483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:40.929494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:40.929509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:40.929513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:40.929537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:40.929550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:40.929554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:40.929563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:40.929569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:40.929577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:40.929582Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:40.929593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:40.929599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:40.929603Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:40.929611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:40.929617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:40.929621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:40.929648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:40.929656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:40.929664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T09:17:40.929672Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:40.929689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:40.929696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:40.929700Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:40.929720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:40.929727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.929731Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:40.929744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:40.929750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... d=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=16; 2024-11-21T09:17:50.636640Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T09:17:50.636655Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:50.636661Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T09:17:50.636668Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:50.636692Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T09:17:50.636696Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T09:17:50.636700Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=16; 2024-11-21T09:17:50.636704Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=16; 2024-11-21T09:17:50.636709Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T09:17:50.636714Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:50.636716Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T09:17:50.636720Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:50.636748Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:50.636763Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:50.636766Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:50.636772Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2024-11-21T09:17:50.636780Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2024-11-21T09:17:50.636798Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: uint64 2024-11-21T09:17:50.636806Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:50.636813Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:50.636817Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:50.636830Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:50.636834Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:50.636838Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:50.636841Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] finished for tablet 9437184 2024-11-21T09:17:50.636849Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:17:50.636896Z node 4 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [4:277:2289] and sent to [4:272:2284] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.017}],"full":{"a":1732180670619493,"name":"_full_task","f":1732180670619493,"d_finished":0,"c":0,"l":1732180670636855,"d":17362},"events":[{"name":"bootstrap","f":1732180670619597,"d_finished":319,"c":1,"l":1732180670619916,"d":319},{"a":1732180670636829,"name":"ack","f":1732180670636745,"d_finished":74,"c":1,"l":1732180670636819,"d":100},{"a":1732180670636828,"name":"processing","f":1732180670619978,"d_finished":291,"c":5,"l":1732180670636819,"d":318},{"name":"ProduceResults","f":1732180670619777,"d_finished":159,"c":8,"l":1732180670636840,"d":159},{"a":1732180670636840,"name":"Finish","f":1732180670636840,"d_finished":0,"c":0,"l":1732180670636855,"d":15},{"name":"task_result","f":1732180670619980,"d_finished":205,"c":4,"l":1732180670636726,"d":205}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T09:17:50.636906Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:17:50.619292Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T09:17:50.636909Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:17:50.636921Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.016696s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.015565s;size=0.0063152;details={columns=9;};};]};; 2024-11-21T09:17:50.636926Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> TColumnShardTestSchema::DropWriteRace >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] >> TColumnShardTestSchema::DropWriteRace [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2024-11-21T09:17:51.853920Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:51.866197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:51.868289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:51.868305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:51.868332Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:51.868763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:51.868786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:51.868805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:51.868822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:51.868837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:51.868851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:51.868863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:51.868877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:51.868891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:51.868901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:51.868922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:51.868932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:51.871841Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:51.872507Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:51.872551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:51.872556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:51.872572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:51.872600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:51.872608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:51.872611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:51.872616Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:51.872622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:51.872626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:51.872628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:51.872637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:51.872642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:51.872645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:51.872648Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:51.872653Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:51.872657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:51.872661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:51.872664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:51.872670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:51.872674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:51.872676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:51.872681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:51.872686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:51.872688Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:51.872706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:51.872711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=2; 2024-11-21T09:17:51.872716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=2; 2024-11-21T09:17:51.872722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=3; 2024-11-21T09:17:51.872734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:51.872738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:51.872741Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:51.872753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:51.872757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:51.872759Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:51.872767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:51.872770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:17:51.872773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T09:17:51.872783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:17:51.872787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:17:51.872789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T09:17:51.872796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... _SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=5755202335616;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=5755248108240;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T09:17:52.333358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=5755202335616;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=5755248108240;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=tx_controller.cpp:371;event=start;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;; 2024-11-21T09:17:52.333362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=5755202335616;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=5755248108240;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:97:2132]; 2024-11-21T09:17:52.333368Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=5755202335616;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=5755248108240;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2024-11-21T09:17:52.333427Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000001 at tablet 9437184, mediator 0 2024-11-21T09:17:52.333435Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2024-11-21T09:17:52.333455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:17:52.333515Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2024-11-21T09:17:52.334143Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T09:17:52.334159Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=tables_manager.cpp:259;method=RegisterTable;path_id=1; 2024-11-21T09:17:52.334164Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine.h:339;event=RegisterTable;path_id=1; 2024-11-21T09:17:52.334539Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T09:17:52.355530Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2024-11-21T09:17:52.355576Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6120;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6120;columns=10; 2024-11-21T09:17:52.356186Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvWrite;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:17:52.356192Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvWrite;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:17:52.356198Z node 1 :TX_COLUMNSHARD DEBUG: Write (blob) 6120 bytes into pathId 1 {object=write_monitor;count=1;size=6120} at tablet 9437184 2024-11-21T09:17:52.357455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];fline=actor.cpp:22;event=flush_writing;size=6120;count=1; 2024-11-21T09:17:52.357823Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 at tablet 9437184 2024-11-21T09:17:52.357898Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2024-11-21T09:17:52.368439Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2024-11-21T09:17:52.368471Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:17:52.368543Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=102;this=5755202339296;method=TTxController::StartProposeOnExecute;tx_info=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:311;event=start; 2024-11-21T09:17:52.368619Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=102;this=5755202339296;method=TTxController::StartProposeOnExecute;tx_info=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:340;event=registered; 2024-11-21T09:17:52.379257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;this=5755202339296;op_tx=102:TX_KIND_COMMIT;min=1732180672873;max=1732180702873;plan=0;src=[1:97:2132];cookie=0;int_op_tx=102:TX_KIND_COMMIT;min=1732180672873;max=1732180702873;plan=0;src=[1:97:2132];cookie=0;int_this=5755250722656;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T09:17:52.379284Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;this=5755202339296;op_tx=102:TX_KIND_COMMIT;min=1732180672873;max=1732180702873;plan=0;src=[1:97:2132];cookie=0;int_op_tx=102:TX_KIND_COMMIT;min=1732180672873;max=1732180702873;plan=0;src=[1:97:2132];cookie=0;int_this=5755250722656;method=TTxController::FinishProposeOnComplete;tx_id=102;fline=tx_controller.cpp:371;event=start;tx_info=102:TX_KIND_COMMIT;min=1732180672873;max=1732180702873;plan=0;src=[1:97:2132];cookie=0; 2024-11-21T09:17:52.379293Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;this=5755202339296;op_tx=102:TX_KIND_COMMIT;min=1732180672873;max=1732180702873;plan=0;src=[1:97:2132];cookie=0;int_op_tx=102:TX_KIND_COMMIT;min=1732180672873;max=1732180702873;plan=0;src=[1:97:2132];cookie=0;int_this=5755250722656;method=TTxController::FinishProposeOnComplete;tx_id=102;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=102; 2024-11-21T09:17:52.379395Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=103;this=5755202339296;method=TTxController::StartProposeOnExecute;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;fline=tx_controller.cpp:311;event=start; 2024-11-21T09:17:52.379469Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=103;this=5755202339296;method=TTxController::StartProposeOnExecute;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;fline=tx_controller.cpp:340;event=registered; 2024-11-21T09:17:52.390065Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=5755202339296;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=5755248134080;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T09:17:52.390086Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=5755202339296;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=5755248134080;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=tx_controller.cpp:371;event=start;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;; 2024-11-21T09:17:52.390096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=5755202339296;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=5755248134080;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:97:2132]; 2024-11-21T09:17:52.390104Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=5755202339296;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=5755248134080;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2024-11-21T09:17:52.390156Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000002 at tablet 9437184, mediator 0 2024-11-21T09:17:52.390167Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2024-11-21T09:17:52.390193Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:17:52.390249Z node 1 :TX_COLUMNSHARD DEBUG: DropTable for pathId: 1 at tablet 9437184 2024-11-21T09:17:52.400825Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2024-11-21T09:17:52.400868Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:17:52.400921Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000003 at tablet 9437184, mediator 0 2024-11-21T09:17:52.400933Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[7] execute at tablet 9437184 2024-11-21T09:17:52.400957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:17:52.401023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=execute;fline=insert_table.cpp:50;event=abort_insertion;path_id=1;blob_range={ Blob: DS:0:[9437184:2:1:3:0:7080:0] Offset: 0 Size: 7080 }; 2024-11-21T09:17:52.411662Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[7] complete at tablet 9437184 2024-11-21T09:17:52.411704Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 9249, MsgBus: 13680 2024-11-21T09:17:01.337815Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659119998466099:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:01.338053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017df/r3tmp/tmpwZYIex/pdisk_1.dat 2024-11-21T09:17:01.397396Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9249, node 1 2024-11-21T09:17:01.408469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:01.408495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:01.408496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:01.408528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13680 2024-11-21T09:17:01.439651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:01.439683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:01.441282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:01.471403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.484703Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:17:01.498463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:01.569625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.593586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.609940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:01.676556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119998467661:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.676587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.711766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.718180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.725388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.732225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.739520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.746021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.755365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119998468153:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.755390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.755401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659119998468158:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:01.756064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:17:01.759868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659119998468160:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:17:01.951715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:17:01.987385Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, ActorId: [1:7439659119998468455:2454], ActorState: ExecuteState, TraceId: 01jd705en24ncpgrpgqsjpnddr, Create QueryResponse for error on request, msg: 2024-11-21T09:17:01.992920Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, ActorId: [1:7439659119998468455:2454], ActorState: ExecuteState, TraceId: 01jd705en4fa3qjm6t17wmbaw9, Create QueryResponse for error on request, msg: 2024-11-21T09:17:01.997647Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, ActorId: [1:7439659119998468455:2454], ActorState: ExecuteState, TraceId: 01jd705ena1hj2sz3rmrmnk0mz, Create QueryResponse for error on request, msg: 2024-11-21T09:17:02.002258Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, ActorId: [1:7439659119998468455:2454], ActorState: ExecuteState, TraceId: 01jd705ened7d5qajzae9aqs2j, Create QueryResponse for error on request, msg: 2024-11-21T09:17:02.007885Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, ActorId: [1:7439659119998468455:2454], ActorState: ExecuteState, TraceId: 01jd705enk32fq364315mh98v7, Create QueryResponse for error on request, msg: 2024-11-21T09:17:02.015028Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, ActorId: [1:7439659119998468455:2454], ActorState: ExecuteState, TraceId: 01jd705enr9ycsm7kcvqckw04t, Create QueryResponse for error on request, msg: 2024-11-21T09:17:02.023256Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, ActorId: [1:7439659119998468455:2454], ActorState: ExecuteState, TraceId: 01jd705enzf2psz3tcdfy14x3x, Create QueryResponse for error on request, msg: 2024-11-21T09:17:02.032460Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, ActorId: [1:7439659119998468455:2454], ActorState: ExecuteState, TraceId: 01jd705ep875m1z32q041920xz, Create QueryResponse for error on request, msg: 2024-11-21T09:17:02.042542Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659124293435883:2454] TxId: 281474976715673. Ctx: { TraceId: 01jd705ephf6bapyefq9vep491, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 9ms } {
: Error: Cancelling after 9ms during execution } ] 2024-11-21T09:17:02.043738Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659124293435892:2485], TxId: 281474976715673, task: 3. Ctx: { TraceId : 01jd705ephf6bapyefq9vep491. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659124293435883:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.043854Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659124293435889:2483], TxId: 281474976715673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd705ephf6bapyefq9vep491. SessionId : ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659124293435883:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.043944Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659124293435891:2484], TxId: 281474976715673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=Yjc4NDc4MjAtMWQyZWQ4YWItYTgwMTZkODAtODk0YjNmYzA=. CustomerSuppliedId : . TraceId : 01jd705ephf6bapyefq9vep491. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439659124293435883:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:02.044021Z node 1 :KQP_COMPUTE E ... ssion/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e3g9gcvqx5h74abkfhb, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.198851Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e3kdxqv1nngbx2k582x, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.203456Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e3qbfpvsps3myysehch, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.209076Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e3w6bxwz57hzt2hnwka, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.216612Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e41b8y88d1eyhgmmcj2, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.224481Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e490kpfhm2a5s8wcp6n, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.233678Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e4hdem2anr5dxwf1c30, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.243420Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e4t8tez6qxtysew1jmm, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.254680Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e543k7zvws2xrs0mr40, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.273106Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e5n8cv9h9dqj18m6et7, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.287436Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e61b8mkvbktqgxgnhz7, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.312557Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e6s7nrk5s5pqx2qmpv6, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.329954Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e79atzepnet4cpe8r6e, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.348388Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e7t3nfmzf4ye6svw5d3, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.367817Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e8d97zb37kyfkhbemd6, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.395991Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706e972d4m44530vvdaqgd, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.437991Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706eaf6530nwp7rbm3j8gn, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.474057Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706ebh6qsjbnn4ea2v1q99, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.510114Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706ecm9jajyjvy8269bqzx, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.546495Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706edp8g7xezjw5sfnpnhg, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.582341Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706eer7755jw65mvphq2h1, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.620778Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706efw7029zpncxpeqhtyn, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.731881Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439659260941881056:2454] TxId: 281474976715685. Ctx: { TraceId: 01jd706ek7evwdsh1ywca3sq0y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 36ms } {
: Error: Cancelling after 35ms during execution } ] 2024-11-21T09:17:34.731952Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659260941881062:2613], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=. TraceId : 01jd706ek7evwdsh1ywca3sq0y. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659260941881056:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:34.732016Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659260941881064:2614], TxId: 281474976715685, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd706ek7evwdsh1ywca3sq0y. SessionId : ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659260941881056:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:34.732054Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659260941881065:2615], TxId: 281474976715685, task: 3. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=. CustomerSuppliedId : . TraceId : 01jd706ek7evwdsh1ywca3sq0y. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439659260941881056:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:34.732066Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439659260941881066:2616], TxId: 281474976715685, task: 4. Ctx: { TraceId : 01jd706ek7evwdsh1ywca3sq0y. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439659260941881056:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T09:17:34.732307Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706ek7evwdsh1ywca3sq0y, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.809728Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706enkfskyy8vbzy7b64qd, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.863507Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706eq6dn9zrt98d7p7p8pj, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.917221Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706erta4dsdk7t9422q5ey, Create QueryResponse for error on request, msg: 2024-11-21T09:17:34.971023Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706ete9e85k4qcqjkjxfmt, Create QueryResponse for error on request, msg: 2024-11-21T09:17:35.023228Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706ew18j78sbycez944qts, Create QueryResponse for error on request, msg: 2024-11-21T09:17:35.076954Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439659265236848582:2454] TxId: 281474976715693. Ctx: { TraceId: 01jd706exn2mx2m73rmsdydsd0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 48ms } {
: Error: Cancelling after 47ms during execution } ] 2024-11-21T09:17:35.077063Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjVmYWE4YzAtYzA5MDM2MTktNWVlMDNlYTktMmMzZmI1OGU=, ActorId: [3:7439659260941880472:2454], ActorState: ExecuteState, TraceId: 01jd706exn2mx2m73rmsdydsd0, Create QueryResponse for error on request, msg: 2024-11-21T09:17:38.628666Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439659256646910840:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:38.628695Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:48.633627Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:17:48.633662Z node 3 :IMPORT WARN: Table profiles were not loaded |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExternalTTL >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2024-11-21T09:17:42.794667Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:42.810167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:42.812741Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:42.812768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:42.812811Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:42.813562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:42.813600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:42.813633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:42.813652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:42.813672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:42.813688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:42.813701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:42.813721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:42.813736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:42.813750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:42.813765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:42.813796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:42.817956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:42.818984Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:42.819047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:42.819054Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:42.819080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:42.819113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:42.819123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:42.819131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:42.819138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:42.819147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:42.819152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:42.819157Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:42.819171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:42.819178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:42.819184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:42.819188Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:42.819195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:42.819202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:42.819209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:42.819213Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:42.819223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:42.819229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:42.819233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:42.819241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:42.819247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:42.819251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:42.819273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:42.819280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:42.819285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:42.819291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:42.819303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:42.819308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:42.819311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:42.819325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:42.819329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:42.819332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:42.819340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:42.819343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:17:42.819346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T09:17:42.819357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:17:42.819361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:17:42.819364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T09:17:42.819371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... son=in_progress;count=1;insert_overload_size=6265200;indexing_debug={task_ids=7d15ea6e-a7e911ef-b70d0745-4973ca72,;}; 2024-11-21T09:17:53.958331Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=61;external_task_id=7cfd279a-a7e911ef-bf4c3ded-75bcb2e8;mem=19099692;cpu=0; 2024-11-21T09:17:53.958372Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[204] complete at tablet 9437184 2024-11-21T09:17:53.958389Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:723;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=6265200;indexing_debug={task_ids=7d15ea6e-a7e911ef-b70d0745-4973ca72,;}; 2024-11-21T09:17:53.958411Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=6364460;external_task_id=7d15ea6e-a7e911ef-b70d0745-4973ca72;type=CS::INDEXATION;priority=0;; 2024-11-21T09:17:53.958466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=1;fline=storage.cpp:86;event=granule_compaction_weight;priority=(10,19996862644); 2024-11-21T09:17:53.958479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=1;fline=optimizer.h:893;stop_instant=NO_VALUE_OPTIONAL;size=6274712;next=;count=2;info={bytes=3137356;count=1;records=53332};event=start_optimization;stop_point=;main_portion=88; 2024-11-21T09:17:53.958501Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=manager.cpp:9;event=lock;process_id=CS::GENERAL::7d15f72a-a7e911ef-96ece836-f15256b; 2024-11-21T09:17:53.958504Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=ro_controller.cpp:45;event=CS::GENERAL;tablet_id=9437184; 2024-11-21T09:17:53.958517Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=manager.h:99;event=ask_data;request=request_id=209;1={portions_count=1};; 2024-11-21T09:17:53.958581Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=62;task=cpu=0;mem=6364460;external_task_id=7d15ea6e-a7e911ef-b70d0745-4973ca72;type=CS::INDEXATION;priority=0;; 2024-11-21T09:17:53.958587Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=7d15ea6e-a7e911ef-b70d0745-4973ca72;mem=6364460;cpu=0; 2024-11-21T09:17:53.958590Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=7d15ea6e-a7e911ef-b70d0745-4973ca72;task_id=62;mem=6364460;cpu=0; 2024-11-21T09:17:53.958600Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=manager.h:99;event=ask_data;request=request_id=210;1={portions_count=2};; 2024-11-21T09:17:53.958651Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=columnshard_impl.cpp:811;event=compaction;external_task_id=7d15f72a-a7e911ef-96ece836-f15256b; 2024-11-21T09:17:53.958656Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=columnshard_impl.cpp:609;event=start_changes;type=CS::GENERAL;task_id=7d15f72a-a7e911ef-96ece836-f15256b; 2024-11-21T09:17:53.958684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=19099692;external_task_id=7d15f72a-a7e911ef-96ece836-f15256b;type=CS::GENERAL;priority=0;; 2024-11-21T09:17:53.958694Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=7d15ea6e-a7e911ef-b70d0745-4973ca72;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=7d15ea6e-a7e911ef-b70d0745-4973ca72; 2024-11-21T09:17:54.081755Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=7d15ea6e-a7e911ef-b70d0745-4973ca72;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2024-11-21T09:17:54.081931Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2024-11-21T09:17:54.082128Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=63;task=cpu=0;mem=19099692;external_task_id=7d15f72a-a7e911ef-96ece836-f15256b;type=CS::GENERAL;priority=0;; 2024-11-21T09:17:54.082133Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=7d15f72a-a7e911ef-96ece836-f15256b;mem=19099692;cpu=0; 2024-11-21T09:17:54.082137Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=7d15f72a-a7e911ef-96ece836-f15256b;task_id=63;mem=19099692;cpu=0; 2024-11-21T09:17:54.082210Z node 1 :S3_WRAPPER DEBUG: external_task_id=7d15f72a-a7e911ef-96ece836-f15256b;fline=fake_storage.cpp:90;method=GetObject;id=[9437184:2:61:255:1:823712:0];range=bytes=0-823711;object_exists=1; 2024-11-21T09:17:54.083785Z node 1 :S3_WRAPPER DEBUG: external_task_id=7d15f72a-a7e911ef-96ece836-f15256b;fline=fake_storage.cpp:90;method=GetObject;id=[9437184:2:60:255:2:823712:0];range=bytes=0-823711;object_exists=1; 2024-11-21T09:17:54.085664Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=7d15f72a-a7e911ef-96ece836-f15256b;fline=actor.cpp:48;task=agents_waiting=2;additional_info=();; 2024-11-21T09:17:54.096080Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=7d15f72a-a7e911ef-96ece836-f15256b; 2024-11-21T09:17:54.195923Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:136:2168];fline=general_compaction.cpp:203;event=blobs_created_diff;appended=0;;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:0:103496];;column_id:4;chunk_idx:1;blob_range:[NO_BLOB:103496:103456];;column_id:4;chunk_idx:2;blob_range:[NO_BLOB:206952:103392];;column_id:4;chunk_idx:3;blob_range:[NO_BLOB:310344:101512];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:411856:103496];;column_id:6;chunk_idx:1;blob_range:[NO_BLOB:515352:103456];;column_id:6;chunk_idx:2;blob_range:[NO_BLOB:618808:103392];;column_id:6;chunk_idx:3;blob_range:[NO_BLOB:722200:101512];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:0:156904];;column_id:3;chunk_idx:1;blob_range:[NO_BLOB:156904:156864];;column_id:3;chunk_idx:2;blob_range:[NO_BLOB:313768:156800];;column_id:3;chunk_idx:3;blob_range:[NO_BLOB:470568:154904];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:625472:103496];;column_id:2;chunk_idx:1;blob_range:[NO_BLOB:728968:103456];;column_id:2;chunk_idx:2;blob_range:[NO_BLOB:832424:103392];;column_id:2;chunk_idx:3;blob_range:[NO_BLOB:935816:101512];;column_id:10;chunk_idx:0;blob_range:[NO_BLOB:1037328:103496];;column_id:10;chunk_idx:1;blob_range:[NO_BLOB:1140824:103456];;column_id:10;chunk_idx:2;blob_range:[NO_BLOB:1244280:103392];;column_id:10;chunk_idx:3;blob_range:[NO_BLOB:1347672:101512];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:1449184:53688];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:1502872:53688];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:1556560:53688];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:1610248:53672];;column_id:8;chunk_idx:0;blob_range:[NO_BLOB:1663920:53592];;column_id:8;chunk_idx:1;blob_range:[NO_BLOB:1717512:53608];;column_id:8;chunk_idx:2;blob_range:[NO_BLOB:1771120:53600];;column_id:8;chunk_idx:3;blob_range:[NO_BLOB:1824720:53592];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:1878312:53592];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:1931904:53608];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:1985512:53600];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:2039112:53592];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:2092704:53520];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:2146224:53216];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:2199440:53288];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:2252728:53240];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:2305968:640];;column_id:4294967040;chunk_idx:1;blob_range:[NO_BLOB:2306608:640];;column_id:4294967040;chunk_idx:2;blob_range:[NO_BLOB:2307248:640];;column_id:4294967040;chunk_idx:3;blob_range:[NO_BLOB:2307888:640];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:2308528:640];;column_id:4294967041;chunk_idx:1;blob_range:[NO_BLOB:2309168:640];;column_id:4294967041;chunk_idx:2;blob_range:[NO_BLOB:2309808:640];;column_id:4294967041;chunk_idx:3;blob_range:[NO_BLOB:2310448:640];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:2311088:632];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:2311720:632];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:2312352:632];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:2312984:632];;;;switched=(portion_id:87;path_id:1;records_count:53332;min_schema_snapshot:(plan_step=101;tx_id=101;);schema_version:1;level:0;column_size:3137328;index_size:28;meta:((produced=INSERTED;)););(portion_id:88;path_id:1;records_count:53332;min_schema_snapshot:(plan_step=101;tx_id=101;);schema_version:1;level:0;column_size:3137328;index_size:28;meta:((produced=SPLIT_COMPACTED;)););; 2024-11-21T09:17:54.195946Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:136:2168];fline=general_compaction.cpp:205;event=blobs_created;appended=1;switched=2; 2024-11-21T09:17:54.196010Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'hot' stopped at tablet 9437184 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] Test command err: 2024-11-21T09:17:23.653924Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659216288472054:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:23.654172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:23.658911Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659213002285658:2245];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001225/r3tmp/tmpXGG36k/pdisk_1.dat 2024-11-21T09:17:23.678115Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:17:23.679249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:23.679960Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:17:23.695759Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28976, node 1 2024-11-21T09:17:23.711960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/001225/r3tmp/yandexU7qTc3.tmp 2024-11-21T09:17:23.711978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/001225/r3tmp/yandexU7qTc3.tmp 2024-11-21T09:17:23.712067Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/001225/r3tmp/yandexU7qTc3.tmp 2024-11-21T09:17:23.712119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:23.715469Z INFO: TTestServer started on Port 11915 GrpcPort 28976 TClient is connected to server localhost:11915 PQClient connected to localhost:28976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:23.734675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:17:23.748753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:17:23.754103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:23.754124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:23.755586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:23.776020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:23.776071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:23.777708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:23.777944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T09:17:23.901243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659213002285754:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:23.901275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659213002285743:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:23.901323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:23.902407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T09:17:23.906826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659213002285758:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T09:17:23.933598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:23.933632Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659216288473153:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:17:23.933707Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mjc4ZmJjMTYtOWQ2YmU5ZTUtYTU5MmMyYTctYzIxZDdjMzY=, ActorId: [1:7439659216288473113:2303], ActorState: ExecuteState, TraceId: 01jd70642413dhddg6thpab10c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:17:23.934121Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:17:23.975324Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659213002285838:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:17:23.975401Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWQ2NmVkYS1lODY5ZTlmYi1kN2FhZTlkMC00ZjIzMGIzZg==, ActorId: [2:7439659213002285727:2277], ActorState: ExecuteState, TraceId: 01jd70641w8wk86eftmyx1azn3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:17:23.975614Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:17:23.993323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:17:24.013441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T09:17:24.041539Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd70645r3f9rjcn7gznmaevx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmMzMGM1ZGEtN2NiZWFiM2EtZDc0MWQ1MmUtZmEyZGMyMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659220583440808:3022] 2024-11-21T09:17:28.654426Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659216288472054:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:28.654455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:28.656970Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659213002285658:2245];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:28.657018Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:17:29.074963Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T09:17:29.074979Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:17:29.074981Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; ... hemeshard: 72057594046644480 2024-11-21T09:17:49.223983Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439659325587042835:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:17:49.224047Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NmVmN2FlYTctOGM5ZTU1M2MtZTk3ODE2ODQtOWE5ZjJiMGQ=, ActorId: [10:7439659325587042742:2280], ActorState: ExecuteState, TraceId: 01jd706wprd0y9cq1kv5z6a0ff, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:17:49.224269Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:17:49.235039Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T09:17:49.262197Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd706wszfg5p13xwn6t179br, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MmNkYjZiMmYtZTc2MDFiMzUtZTBkMzJkZTItMWIwMzM3Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7439659328295380219:3039] 2024-11-21T09:17:53.918530Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7439659324000411435:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:53.918563Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:53.919564Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439659321292075116:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:53.919599Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:17:54.279576Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T09:17:54.279589Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:17:54.279590Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T09:17:54.279594Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) InitTable: SourceId=A_Source TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2024-11-21T09:17:54.280467Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T09:17:54.329915Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:17:54.372292Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:17:54.423594Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:17:54.471317Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:17:54.514677Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:17:54.552721Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7439659324000411425:2049], Recipient [9:7439659349770217218:3330]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2024-11-21T09:17:54.552737Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) StartKqpSession 2024-11-21T09:17:54.553112Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7439659324000411546:2155], Recipient [9:7439659349770217218:3330]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=OGY3ODcxZTYtMmU3YzUwNmYtNzk1YTZlYWQtYjAyMmI0MjA=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2024-11-21T09:17:54.553120Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) Select from the table 2024-11-21T09:17:54.565124Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7439659324000411546:2155], Recipient [9:7439659349770217218:3330]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=OGY3ODcxZTYtMmU3YzUwNmYtNzk1YTZlYWQtYjAyMmI0MjA=" PreparedQuery: "6b6f0e0a-eaa45148-5ddb3a75-4c2de91d" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jd707203dgqng9q2ek99hb0h" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 6 2024-11-21T09:17:54.565154Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) Selected from table PartitionId=(NULL) SeqNo=(NULL) 2024-11-21T09:17:54.565159Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) OnPartitionChosen 2024-11-21T09:17:54.565162Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) Update the table 2024-11-21T09:17:54.575603Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7439659324000411546:2155], Recipient [9:7439659349770217218:3330]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=OGY3ODcxZTYtMmU3YzUwNmYtNzk1YTZlYWQtYjAyMmI0MjA=" PreparedQuery: "d8ecf612-8eb0e69a-2f90be36-723a1586" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 5 2024-11-21T09:17:54.575616Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T09:17:54.575626Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T09:17:54.575633Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439659349770217218:3330] (SourceId=A_Source, PreferedPartition=0) Start idle Received TEvChooseResult: 0 2024-11-21T09:17:54.929762Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7439659349770217914:2581] TxId: 281474976715697. Ctx: { TraceId: 01jd7072b9ddpavf78jdp20nhk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=Yzc1MDIyMy01MjZlMWVlNC0yZjYzZjhkNS0xOTUyNzdlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2024-11-21T09:17:54.929775Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7439659349770217915:2575] TxId: 281474976715698. Ctx: { TraceId: 01jd7072b79xs9qc309t4ydp37, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NzM2MzdkZDktODNlYzMyMzQtMmZlNjY3MzktN2RhMDVkZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2024-11-21T09:17:54.929841Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7439659349770217926:2581], TxId: 281474976715697, task: 2. Ctx: { TraceId : 01jd7072b9ddpavf78jdp20nhk. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=9&id=Yzc1MDIyMy01MjZlMWVlNC0yZjYzZjhkNS0xOTUyNzdlNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [9:7439659349770217914:2581], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T09:17:54.929841Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7439659349770217925:2575], TxId: 281474976715698, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd7072b79xs9qc309t4ydp37. SessionId : ydb://session/3?node_id=9&id=NzM2MzdkZDktODNlYzMyMzQtMmZlNjY3MzktN2RhMDVkZDM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [9:7439659349770217915:2575], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] >> TColumnShardTestSchema::RebootExternalTTL [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExternalTTL [GOOD] Test command err: 2024-11-21T09:17:54.182530Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:54.192810Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:17:54.194696Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:17:54.194784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:54.196826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:54.196844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:54.196873Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:54.197333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:54.197358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:54.197380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:54.197393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:54.197403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:54.197413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:54.197422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:54.197436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:54.197449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:54.197459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:54.197469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:54.197488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:54.201148Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:17:54.201217Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T09:17:54.201239Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:54.201248Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:54.202229Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:54.202280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:54.202285Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:54.202302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:54.202329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:54.202338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:54.202341Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:54.202346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:54.202352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:54.202356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:54.202360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:54.202374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:54.202380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:54.202386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:54.202389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:54.202398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:54.202404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:54.202411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:54.202417Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:54.202428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:54.202434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:54.202438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:54.202445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:54.202451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:54.202454Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:54.202471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:54.202477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:54.202482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:54.202488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:54.202500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:54.202504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:54.202507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:54.202520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:54.202524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:54.202527Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:17:54.202535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:17:54.202538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:55.375383Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:55.375390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:2;records_count:53332;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:55.375398Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26668; 2024-11-21T09:17:55.375401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=saved_at; 2024-11-21T09:17:55.375419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:554:2546] send ScanData to [1:553:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T09:17:55.375425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375432Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:55.375437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:17:55.375481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:55.375487Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:17:55.375494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26664; 2024-11-21T09:17:55.375497Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=saved_at; 2024-11-21T09:17:55.375508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:554:2546] send ScanData to [1:553:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T09:17:55.375514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375524Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:17:55.375589Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375593Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:17:55.375595Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:554:2546] finished for tablet 9437184 2024-11-21T09:17:55.375603Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:554:2546] send ScanData to [1:553:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:17:55.375642Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:554:2546] and sent to [1:553:2545] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1732180675372811,"name":"_full_task","f":1732180675372811,"d_finished":0,"c":0,"l":1732180675375609,"d":2798},"events":[{"name":"bootstrap","f":1732180675372833,"d_finished":466,"c":1,"l":1732180675373299,"d":466},{"a":1732180675375582,"name":"ack","f":1732180675375256,"d_finished":167,"c":3,"l":1732180675375526,"d":194},{"a":1732180675375581,"name":"processing","f":1732180675373386,"d_finished":671,"c":24,"l":1732180675375526,"d":699},{"name":"ProduceResults","f":1732180675373073,"d_finished":410,"c":29,"l":1732180675375594,"d":410},{"a":1732180675375594,"name":"Finish","f":1732180675375594,"d_finished":0,"c":0,"l":1732180675375609,"d":15},{"name":"task_result","f":1732180675373388,"d_finished":479,"c":21,"l":1732180675375243,"d":479}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T09:17:55.375651Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:17:55.372661Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4997532;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4997532;selected_rows=0; 2024-11-21T09:17:55.375655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:17:55.375662Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:17:55.375666Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:554:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> TColumnShardTestSchema::HotTiersAfterTtl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:14:08.068333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:14:08.068370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:14:08.068375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:14:08.068380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:14:08.068387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:14:08.068391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:14:08.068399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:14:08.068517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:14:08.092633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:14:08.092667Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:14:08.095295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:14:08.095408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:14:08.095450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:14:08.102552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:14:08.102667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:14:08.102768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:08.103013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:14:08.104007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:08.104803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:14:08.104822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:08.104837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:14:08.104847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:14:08.104854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:14:08.104906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:14:08.112049Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:14:08.134397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:14:08.134496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:08.134564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:14:08.134615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:14:08.134625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:08.135640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:08.135671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:14:08.135735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:08.135746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:14:08.135749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:14:08.135755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:14:08.136151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:08.136161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:14:08.136165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:14:08.140260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:08.140286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:08.140294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:08.140303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:14:08.141015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:14:08.141724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:14:08.141785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:14:08.142002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:14:08.142035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:14:08.142042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:08.142106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:14:08.142114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:14:08.142148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:14:08.142160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:14:08.142560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:14:08.142570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:14:08.142617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:14:08.142621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:14:08.142705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:14:08.142712Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:14:08.142723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:14:08.142727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:14:08.142733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:14:08.142740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:14:08.142744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:14:08.142748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:14:08.142759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:14:08.142764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:14:08.142768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... kerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:55.093588Z node 163 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:17:55.093630Z node 163 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 51us result status StatusSuccess 2024-11-21T09:17:55.093716Z node 163 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:17:55.114050Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1173:2915] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:17:55.114074Z node 163 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][163:1110:2915] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T09:17:55.114096Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1173:2915] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732180675089128 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732180675089128 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1732180675089128 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:17:55.114750Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1173:2915] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2024-11-21T09:17:55.114770Z node 163 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][163:1110:2915] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TColumnShardTestSchema::ColdTiers >> TColumnShardTestSchema::HotTiersTtl >> TColumnShardTestSchema::HotTiersTtlWithStat >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription >> TFetchRequestTests::CheckAccess [GOOD] >> PQCountersSimple::PartitionWriteQuota >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> KqpLimits::OutOfSpaceYQLUpsertFail [GOOD] >> KqpLimits::QSReplySize >> TPQTest::TestReadSubscription [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> TPQTest::TestReadAndDeleteConsumer [FAIL] >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] >> KqpLimits::QSReplySize [GOOD] >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2024-11-21T09:17:26.064414Z :HappyWay INFO: Random seed for debugging is 1732180646064410 2024-11-21T09:17:26.140150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659229132188442:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:26.140169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:17:26.166927Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001191/r3tmp/tmpkRYG0x/pdisk_1.dat 2024-11-21T09:17:26.175011Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:17:26.183305Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:26.191408Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19528, node 1 2024-11-21T09:17:26.204384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/001191/r3tmp/yandexU3er8z.tmp 2024-11-21T09:17:26.204395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/001191/r3tmp/yandexU3er8z.tmp 2024-11-21T09:17:26.204468Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/001191/r3tmp/yandexU3er8z.tmp 2024-11-21T09:17:26.204510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:26.207991Z INFO: TTestServer started on Port 10235 GrpcPort 19528 TClient is connected to server localhost:10235 PQClient connected to localhost:19528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:17:26.226996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T09:17:26.240316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:26.240337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:26.241735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:17:26.269004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:26.269025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:26.270364Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:26.270683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:26.399952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659227645712871:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:26.399977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659227645712895:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:26.399985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:26.401053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T09:17:26.405093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659227645712899:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T09:17:26.426918Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659229132189416:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:17:26.427156Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWFmZGNlNWYtMTRlMGE0YzgtMTcxMmE5OGYtZGU2ZDI5YjQ=, ActorId: [1:7439659229132189338:2299], ActorState: ExecuteState, TraceId: 01jd7066g201ymf91rh71ytvb5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:17:26.427589Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:17:26.428021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:17:26.467202Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659227645712979:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:17:26.467281Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjVlMGE3MjYtNzlmZjI2MWEtYjE5MGMyNmQtMTBkMWUyM2I=, ActorId: [2:7439659227645712868:2277], ActorState: ExecuteState, TraceId: 01jd7066fzd9ffqmqy6hkrfbk5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:17:26.467521Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:17:26.490554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:26.554182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:19528", true, true, 1000); 2024-11-21T09:17:26.583670Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd7066n7cdzq1t9cbr8xzt5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQwNTY4MWMtMjRhYTE1ZmQtMzVlMzc2MjEtZWFlNWE3OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659229132189796:2926] 2024-11-21T09:17:31.140619Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659229132188442:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:17:31.140648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:17:32.620499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:19528 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T09:17:32.634918Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:19528 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710680 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = test-topic, dc = dc1 2024-11-21T09:17:32.643333Z node 1 :PERSQUEUE_REA ... keup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:04.012608Z node 7 :PERSQUEUE INFO: new Cookie default|e7b390be-55fdad51-c38774d4-b8822fe2_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:04.247205Z node 7 :PERSQUEUE INFO: new Cookie default|f906446e-71d677ac-bb12295-80c4db70_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR **** Total histogram: ****
Interval=0ms: 1
Interval=10000ms: 0
Interval=1000ms: 3
Interval=100ms: 0
Interval=10ms: 0
Interval=1ms: 0
Interval=20ms: 0
Interval=2500ms: 2
Interval=5000ms: 0
Interval=500ms: 0
Interval=50ms: 0
Interval=5ms: 0
Interval=999999ms: 0
**** **** **** **** 2024-11-21T09:18:04.651242Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:04.651259Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:04.653932Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:04.654063Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [8:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:04.654142Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:185:2198] 2024-11-21T09:18:04.654621Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [8:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:04.655587Z node 8 :PERSQUEUE INFO: new Cookie default|4b2484f9-966939da-849aec9d-7095d192_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured kesus quota request event from [8:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:04.656472Z node 8 :PERSQUEUE INFO: new Cookie default|a761383f-d046188f-a5c9e6bd-80fabb40_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:04.972651Z node 8 :PERSQUEUE INFO: new Cookie default|8cbdc43e-c3ebb293-47f13f98-1b5700ca_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured kesus quota request event from [8:203:2213] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:05.217205Z node 8 :PERSQUEUE INFO: new Cookie default|2defabb7-4d86dfb8-13765a55-9252d65f_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured kesus quota request event from [8:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:05.462396Z node 8 :PERSQUEUE INFO: new Cookie default|10714932-caa1d268-e8776e7f-f1a5bb9_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:05.686528Z node 8 :PERSQUEUE INFO: new Cookie default|353bcdc9-55b11f31-ad3d38-1d3385c1_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:203:2213] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 19485, MsgBus: 16564 2024-11-21T09:16:53.705453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659086782985732:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.705472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b77/r3tmp/tmpToFEHC/pdisk_1.dat 2024-11-21T09:16:53.769867Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19485, node 1 2024-11-21T09:16:53.785553Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:53.785567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:53.785569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:53.785610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16564 TClient is connected to server localhost:16564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:53.838207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.838242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.839103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.839454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:16:53.856494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.899572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.930668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:53.954586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:54.029311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659091077955299:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.029344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.062579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.086938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.099187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.111458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.124525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.141333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:54.163460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659091077956184:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.163484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.163631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659091077956189:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:54.164345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:54.173396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659091077956191:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:16:54.360074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:58.705744Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659086782985732:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:58.705818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:17:08.769992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:17:08.770010Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:09.426425Z node 1 :TX_DATASHARD ERROR: CPU usage 114.147% is higher than threshold of 60% in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2024-11-21T09:17:14.658506Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096283961024}: tablet 72075186224037919 could not find a group for channel 0 pool /Root:test 2024-11-21T09:17:14.658524Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096283961024}: tablet 72075186224037919 could not find a group for channel 1 pool /Root:test 2024-11-21T09:17:14.658527Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096283961024}: tablet 72075186224037919 wasn't changed 2024-11-21T09:17:14.658529Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096283961024}: tablet 72075186224037919 skipped channel 0 2024-11-21T09:17:14.658532Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096283961024}: tablet 72075186224037919 skipped channel 1 2024-11-21T09:17:17.837439Z node 1 :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: LIGHT_ORANGE ZONE Marker# BSVSOOST01 2024-11-21T09:17:18.017498Z node 1 :BS_PROXY_PUT ERROR: [ff84c162ea2e77e3] Result# TEvPutResult {Id# [72075186224037919:1:443:1:69684:8388608:0] Status# ERROR StatusFlags# { Valid Cyan LightYellow Yellow LightOrange PreOrange Orange } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.0641025} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:17:18.025670Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037919.1.443, eph 100} put [72075186224037919:1:443:1:69684:8388608:0] result ERROR flags { Valid Cyan LightYellow Yellow LightOrange PreOrange Orange } left 8388608b 2024-11-21T09:17:18.025700Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037919.1.443, eph 100} end=0, 54blobs 0r (max 3000), put Spent{1.612s wa 0.124s cnt 18} 2024-11-21T09:17:18.025730Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037919:1:471} Compact 293 on TGenCompactionParams{1001: gen 3 epoch 0, 5 parts} step 443, product {0 parts epoch 0} failed 2024-11-21T09:17:18.025741Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037919:1:471} Broken on compaction error 2024-11-21T09:17:18.026723Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528653280}: tablet 72075186224037919 could not find a group for channel 0 pool /Root:test 2024-11-21T09:17:18.026742Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528653280}: tablet 72075186224037919 could not find a group for channel 1 pool /Root:test 2024-11-21T09:17:18.026745Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528653280}: tablet 72075186224037919 wasn't changed 2024-11-21T09:17:18.026747Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528653280}: tablet 72075186224037919 skipped channel 0 2024-11-21T09:17:18.026751Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528653280}: tablet 72075186224037919 skipped channel 1 2024-11-21T09:17:18.030122Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096283837024}: tablet 72075186224037919 could not find a group for channel 0 pool /Root:test 2024-11-21T09:17:18.030138Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096283837024}: tablet 72075186224037919 wasn't changed 2024-11-21T09:17:18.030140Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096283837024}: tablet 72075186224037919 skipped channel 0 2024-11-21T09:17:18.031156Z node 1 :BS_PROXY_PUT ERROR: [9ff9e1defb70ff21] Result# TEvPutResult {Id# [72057594037968897:2:37:0:0:313:0] Status# ERROR Status ... 9387480867200:2458] TxId: 281474976715782. Ctx: { TraceId: 01jd707b1r9ytva04w77ta34cs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWYzOTk1ZjgtM2Q3NTU5YzMtMjIyY2M2MGItYWVkYWU4ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [OUT_OF_SPACE] Cannot perform transaction: out of disk space at tablet 72075186224037921 txId 281474976715782; 2024-11-21T09:18:04.042248Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWYzOTk1ZjgtM2Q3NTU5YzMtMjIyY2M2MGItYWVkYWU4ZTY=, ActorId: [2:7439659198502294044:2458], ActorState: ExecuteState, TraceId: 01jd707b1r9ytva04w77ta34cs, Create QueryResponse for error on request, msg: 2024-11-21T09:18:04.058818Z node 2 :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: LIGHT_ORANGE ZONE Marker# BSVSOOST01
: Error: Kikimr cluster or one of its subsystems was unavailable., code: 2005
: Error: [OUT_OF_SPACE] Cannot perform transaction: out of disk space at tablet 72075186224037921 txId 281474976715782 2024-11-21T09:18:04.096082Z node 2 :BS_PROXY_PUT ERROR: [8a51c47222d32fe4] Result# TEvPutResult {Id# [72075186224037921:1:32:1:69707:8388608:0] Status# ERROR StatusFlags# { Valid Cyan LightYellow Yellow LightOrange PreOrange Orange } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.0555555} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:18:04.105553Z node 2 :OPS_COMPACT ERROR: Compact{72075186224037921.1.45, eph 108} put [72075186224037921:1:45:1:69640:8388608:0] result ERROR flags { Valid Cyan LightYellow Yellow LightOrange PreOrange Orange } left 8388608b 2024-11-21T09:18:04.105578Z node 2 :OPS_COMPACT ERROR: Compact{72075186224037921.1.45, eph 108} end=0, 10blobs 0r (max 150), put Spent{0.278s wa 0.006s cnt 3} 2024-11-21T09:18:04.105620Z node 2 :TABLET_EXECUTOR ERROR: Leader{72075186224037921:1:46} Compact 25 on TGenCompactionParams{1001: gen 1 epoch 0, 5 parts} step 45, product {0 parts epoch 0} failed 2024-11-21T09:18:04.105630Z node 2 :TABLET_EXECUTOR ERROR: Leader{72075186224037921:1:46} Broken on compaction error 2024-11-21T09:18:04.105917Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528656320}: tablet 72075186224037921 could not find a group for channel 0 pool /Root:test 2024-11-21T09:18:04.105927Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528656320}: tablet 72075186224037921 could not find a group for channel 1 pool /Root:test 2024-11-21T09:18:04.105929Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528656320}: tablet 72075186224037921 wasn't changed 2024-11-21T09:18:04.105931Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528656320}: tablet 72075186224037921 skipped channel 0 2024-11-21T09:18:04.105933Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{96096528656320}: tablet 72075186224037921 skipped channel 1 2024-11-21T09:18:04.116226Z node 2 :OPS_COMPACT ERROR: Compact{72075186224037921.1.32, eph 98} put [72075186224037921:1:32:1:69707:8388608:0] result ERROR flags { Valid Cyan LightYellow Yellow LightOrange PreOrange Orange } left 16777216b 2024-11-21T09:18:04.116250Z node 2 :OPS_COMPACT ERROR: Compact{72075186224037921.1.32, eph 98} end=0, 78blobs 0r (max 2940), put Spent{1.672s wa 0.077s cnt 24} 2024-11-21T09:18:04.116299Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [72075186224037921:1:32:1:69709:8388608:1] status# {Status# BLOCKED} Marker# BSVS03 2024-11-21T09:18:04.116341Z node 2 :BS_PROXY_PUT ERROR: [301a4a742c517157] Result# TEvPutResult {Id# [72075186224037921:1:32:1:69709:8388608:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Trying to start YDB, gRPC: 32251, MsgBus: 4717 2024-11-21T09:18:04.751453Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659391701886823:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:18:04.751470Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001b77/r3tmp/tmptVWeJN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32251, node 3 2024-11-21T09:18:04.764632Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:04.766020Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:04.766029Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:04.766030Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:04.766055Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4717 TClient is connected to server localhost:4717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:18:04.853060Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:04.853081Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:04.853356Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:04.854177Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:04.856943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:04.864809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:04.880124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:04.892913Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:04.979995Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659391701888369:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:04.980018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:04.984049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:18:04.988771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:18:04.997133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:18:05.004338Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:18:05.010939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:18:05.018228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:18:05.026692Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659395996856156:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:05.026705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:05.026719Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659395996856161:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:05.027125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:18:05.031649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659395996856163:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:18:05.158919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:05.998243Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjhmMGY3NGItOTk5MDQyODYtMTI3Mjc1MDctM2Q3ZGI5ZjU=, ActorId: [3:7439659395996857318:2559], ActorState: ExecuteState, TraceId: 01jd707cxf3vv2vcgzyy2xry5n, Create QueryResponse for error on request, msg:
: Error: Intermediate data materialization exceeded size limit (88240924 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> TColumnShardTestSchema::RebootDrop >> TColumnShardTestSchema::RebootDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootDrop [GOOD] Test command err: 2024-11-21T09:18:08.045841Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:18:08.058591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:18:08.060272Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:18:08.060292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:18:08.060324Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:18:08.060797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:08.060823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:08.060841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:08.060855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:08.060865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:08.060876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:08.060885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:08.060898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:08.060909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:08.060920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:08.060939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:08.060950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:08.063783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:18:08.064429Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:18:08.064479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:18:08.064484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:18:08.064501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:08.064538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:18:08.064545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:18:08.064548Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:18:08.064554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:18:08.064560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:18:08.064564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:18:08.064566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:18:08.064576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:08.064580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:18:08.064584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:18:08.064586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:18:08.064593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:18:08.064597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:18:08.064601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:18:08.064603Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:18:08.064610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:18:08.064614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:18:08.064616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:18:08.064621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:18:08.064625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:18:08.064628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:18:08.064648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:18:08.064654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:18:08.064659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:18:08.064665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:18:08.064679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:18:08.064683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:18:08.064685Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:18:08.064699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:18:08.064702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:18:08.064705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:18:08.064713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:18:08.064716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:18:08.064719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T09:18:08.064743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:18:08.064749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:18:08.064753Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T09:18:08.064766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 2628;after_size=3639328;before_rows=80000;after_rows=53332; 2024-11-21T09:18:09.330837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1713272;portion_bytes=1713300;portion_raw_bytes=2584610; 2024-11-21T09:18:09.330882Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1822928;portion_bytes=1822956;portion_raw_bytes=2640160; 2024-11-21T09:18:09.330886Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ERASE;path_id=1;portion=5;before_size=3639328;after_size=1816372;before_rows=53332;after_rows=26664; 2024-11-21T09:18:09.330888Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1822928;portion_bytes=1822956;portion_raw_bytes=2640160; 2024-11-21T09:18:09.330899Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1816344;portion_bytes=1816372;portion_raw_bytes=2639764; 2024-11-21T09:18:09.330902Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ERASE;path_id=1;portion=6;before_size=1816372;after_size=0;before_rows=26664;after_rows=0; 2024-11-21T09:18:09.330905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1816344;portion_bytes=1816372;portion_raw_bytes=2639764; 2024-11-21T09:18:09.330921Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;fline=manager.cpp:14;event=unlock;process_id=CS::CLEANUP::PORTIONS::8639264c-a7e911ef-a3994eb8-1b76cc07; 2024-11-21T09:18:09.330928Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:09.330938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:09.330946Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T09:18:09.330956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700004;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2024-11-21T09:18:09.330960Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:09.330969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:09.330976Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:09.330981Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:09.330993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=8639264c-a7e911ef-a3994eb8-1b76cc07;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:09.331014Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:0:1351840:0] 2024-11-21T09:18:09.331020Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:1:1347832:0] 2024-11-21T09:18:09.331023Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:3:2:0:1286024:0] 2024-11-21T09:18:09.331030Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:09.362364Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000003:max} readable: {1000000004:max} at tablet 9437184 2024-11-21T09:18:09.362406Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:18:09.362758Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:382:2380];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "saved_at" } } } ; 2024-11-21T09:18:09.362773Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:382:2380];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[saved_at;];};]; 2024-11-21T09:18:09.362923Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:382:2380];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:438:2428];trace_detailed=; 2024-11-21T09:18:09.363064Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=9;column_names=saved_at;);; 2024-11-21T09:18:09.363086Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2024-11-21T09:18:09.363115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:09.363122Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:09.363160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:18:09.363165Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:09.363170Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:09.363175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:438:2428] finished for tablet 9437184 2024-11-21T09:18:09.363185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:438:2428] send ScanData to [1:431:2422] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:09.363225Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:438:2428] and sent to [1:431:2422] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180689362916,"name":"_full_task","f":1732180689362916,"d_finished":0,"c":0,"l":1732180689363189,"d":273},"events":[{"name":"bootstrap","f":1732180689362959,"d_finished":176,"c":1,"l":1732180689363135,"d":176},{"a":1732180689363156,"name":"ack","f":1732180689363156,"d_finished":0,"c":0,"l":1732180689363189,"d":33},{"a":1732180689363154,"name":"processing","f":1732180689363154,"d_finished":0,"c":0,"l":1732180689363189,"d":35},{"name":"ProduceResults","f":1732180689363108,"d_finished":37,"c":2,"l":1732180689363173,"d":37},{"a":1732180689363174,"name":"Finish","f":1732180689363174,"d_finished":0,"c":0,"l":1732180689363189,"d":15}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T09:18:09.363236Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:09.362795Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:18:09.363239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:09.363244Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:18:09.363248Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:438:2428];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [FAIL] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T09:17:23.685806Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.685821Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:23.689453Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:23.691298Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2024-11-21T09:17:23.691487Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T09:17:23.691872Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:23.692470Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T09:17:23.692746Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:23.693199Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:186:2199] 2024-11-21T09:17:23.693489Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:186:2199] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:23.695191Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:23.695201Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:208:2214], now have 1 active actors on pipe 2024-11-21T09:17:23.695210Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T09:17:23.695214Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T09:17:23.695267Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2024-11-21T09:17:23.695270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2024-11-21T09:17:23.695281Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2024-11-21T09:17:23.695290Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2024-11-21T09:17:23.695293Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2024-11-21T09:17:23.695316Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:23.695319Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:210:2216], now have 1 active actors on pipe 2024-11-21T09:17:23.695322Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T09:17:23.695324Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T09:17:23.695339Z node 1 :PERSQUEUE INFO: new Cookie default|730f17f8-7502526d-f49ea8c7-3f928ab1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2024-11-21T09:17:23.695354Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T09:17:23.695370Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:17:23.695394Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:23.695397Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:212:2218], now have 1 active actors on pipe 2024-11-21T09:17:23.695403Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T09:17:23.695405Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T09:17:23.695408Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2024-11-21T09:17:23.695410Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2024-11-21T09:17:23.695425Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2024-11-21T09:17:23.695448Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2024-11-21T09:17:23.695454Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2024-11-21T09:17:23.695457Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2024-11-21T09:17:23.695485Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--asdfgs--topic' partition 0 compactOffset 0,2 HeadOffset 0 endOffset 0 curOffset 2 d0000000000_00000000000000000000_00000_0000000002_00000| size 94 WTime 332 2024-11-21T09:17:23.695497Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:23.695855Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 18 2024-11-21T09:17:23.695867Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:17:23.695876Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T09:17:23.695882Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:17:23.695887Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T09:17:23.695903Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2024-11-21T09:17:23.695907Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2024-11-21T09:17:23.695912Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2024-11-21T09:17:23.695915Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 1 2024-11-21T09:17:23.695936Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:17:23.695945Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2024-11-21T09:17:23.695949Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T09:17:23.695987Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T09:17:23.695990Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:17:23.696002Z node 1 :PERSQUEUE DEBUG: Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp done, result 332 queuesize 2 startOffset 0 2024-11-21T09:17:23.696005Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 0 rrg 1 2024-1 ... stem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:18:05.370610Z node 62 :PERSQUEUE INFO: new Cookie default|221c14b9-f04bef1f-16f66c90-da9412ad_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:05.371725Z node 62 :PERSQUEUE INFO: new Cookie default|a1f00da2-aad1de3b-ca327d6a-f15cddbb_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:101:2057] recipient: [63:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:101:2057] recipient: [63:99:2133] Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:106:2057] recipient: [63:99:2133] 2024-11-21T09:18:05.434580Z node 63 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:05.434599Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [63:147:2057] recipient: [63:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [63:147:2057] recipient: [63:145:2168] Leader for TabletID 72057594037927938 is [63:151:2172] sender: [63:152:2057] recipient: [63:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:177:2057] recipient: [63:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:05.437606Z node 63 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:05.437720Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 66 actor [63:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 66 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 66 ReadRuleGenerations: 66 ReadRuleGenerations: 66 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 66 Important: false } Consumers { Name: "user1" Generation: 66 Important: true } Consumers { Name: "user2" Generation: 66 Important: true } 2024-11-21T09:18:05.437792Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [63:184:2197] 2024-11-21T09:18:05.438176Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [63:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:05.439898Z node 63 :PERSQUEUE INFO: new Cookie default|8c549869-d622428b-a3353d33-2a17afe0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:05.527136Z node 63 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2024-11-21T09:18:05.533129Z node 63 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:241:2057] recipient: [63:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:244:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:245:2057] recipient: [63:243:2244] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:246:2245] sender: [63:247:2057] recipient: [63:243:2244] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:05.537771Z node 63 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:05.537783Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:18:05.537838Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [63:297:2288] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:05.541243Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [63:297:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:18:05.545634Z node 63 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 2024-11-21T09:18:05.549819Z node 63 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 2024-11-21T09:18:05.553693Z node 63 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:246:2245] sender: [63:328:2057] recipient: [63:14:2061] 2024-11-21T09:18:05.553920Z node 63 :PERSQUEUE ERROR: [PQ: 72057594037927937] Config has too small version 42 actual 66 actor [63:325:2307] txId 42 config: PartitionIds: 0 Version: 42 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } Consumers { Name: "user2" Important: true } 2024-11-21T09:18:05.557787Z node 63 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 assertion failed at ydb/core/persqueue/ut/pq_ut.cpp:2405, auto NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TString &, std::function, bool &) const: ((int)consumerDeleteResult->Record.GetStatus() == (int)NKikimrPQ::EStatus::OK) failed: (2 != 0) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x12866799) ??+0 (0x125058DD) ??+0 (0x125043B0) NKikimr::RunTestWithReboots(TVector> const&, std::__y1::function&)> ()>, std::__y1::function> const&, std::__y1::function, bool&)>, unsigned int, unsigned long, unsigned int, unsigned int, bool)+193 (0x1E83DC21) NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&)+114 (0x124A4672) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x124A80A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1286874E) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()+422 (0x124A7A66) NUnitTest::TTestFactory::Execute()+803 (0x12868EC3) NUnitTest::RunMain(int, char**)+3005 (0x1287C0ED) ??+0 (0x7F1731C0AD90) __libc_start_main+128 (0x7F1731C0AE40) _start+41 (0x11883029) forced failure at ydb/core/testlib/tablet_helpers.cpp:806, void NKikimr::RunTestWithReboots(const TVector &, std::function, std::function, bool &)>, ui32, ui64, ui32, ui32, bool): Failed at dispatch Trace with exception NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x12866799) NKikimr::RunTestWithReboots(TVector> const&, std::__y1::function&)> ()>, std::__y1::function> const&, std::__y1::function, bool&)>, unsigned int, unsigned long, unsigned int, unsigned int, bool)+3359 (0x1E83E87F) NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&)+114 (0x124A4672) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x124A80A7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1286874E) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()+422 (0x124A7A66) NUnitTest::TTestFactory::Execute()+803 (0x12868EC3) NUnitTest::RunMain(int, char**)+3005 (0x1287C0ED) ??+0 (0x7F1731C0AD90) __libc_start_main+128 (0x7F1731C0AE40) _start+41 (0x11883029) >> TColumnShardTestSchema::OneColdTier >> TColumnShardTestSchema::ExternalTTL [GOOD] >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPQSmallRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL [GOOD] Test command err: 2024-11-21T09:18:12.465268Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:18:12.475206Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:18:12.476973Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:18:12.477050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:18:12.478623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:18:12.478637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:18:12.478668Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:18:12.479123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:12.479150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:12.479169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:12.479183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:12.479194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:12.479204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:12.479213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:12.479227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:12.479238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:12.479248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:12.479270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:12.479281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:12.482074Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:18:12.482118Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T09:18:12.482125Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:18:12.482131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:18:12.482807Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:18:12.482856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:18:12.482861Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:18:12.482878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:12.482909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:18:12.482916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:18:12.482919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:18:12.482925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:18:12.482932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:18:12.482936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:18:12.482938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:18:12.482948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:12.482952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:18:12.482956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:18:12.482958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:18:12.482964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:18:12.482968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:18:12.482974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:18:12.482977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:18:12.482984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:18:12.482987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:18:12.482990Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:18:12.482995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:18:12.482998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:18:12.483001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:18:12.483020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:18:12.483026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:18:12.483031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:18:12.483038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:18:12.483050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:18:12.483055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:18:12.483057Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:18:12.483071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:18:12.483075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:18:12.483078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T09:18:12.483086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:18:12.483090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... ;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=4; 2024-11-21T09:18:14.425742Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T09:18:14.425754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:14.425760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T09:18:14.425763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:18:14.425785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T09:18:14.425789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T09:18:14.425793Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=4; 2024-11-21T09:18:14.425797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=4; 2024-11-21T09:18:14.425801Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T09:18:14.425806Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:14.425809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T09:18:14.425812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:18:14.425839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:18:14.425853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:14.425857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:18:14.425863Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2024-11-21T09:18:14.425869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2024-11-21T09:18:14.425887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] send ScanData to [1:363:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T09:18:14.425894Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:14.425901Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:14.425906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:14.425919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:18:14.425923Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:14.425928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T09:18:14.425931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] finished for tablet 9437184 2024-11-21T09:18:14.425938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] send ScanData to [1:363:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:14.425982Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:368:2380] and sent to [1:363:2375] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.017}],"full":{"a":1732180694408147,"name":"_full_task","f":1732180694408147,"d_finished":0,"c":0,"l":1732180694425943,"d":17796},"events":[{"name":"bootstrap","f":1732180694408270,"d_finished":282,"c":1,"l":1732180694408552,"d":282},{"a":1732180694425917,"name":"ack","f":1732180694425836,"d_finished":71,"c":1,"l":1732180694425907,"d":97},{"a":1732180694425916,"name":"processing","f":1732180694408597,"d_finished":273,"c":5,"l":1732180694425908,"d":300},{"name":"ProduceResults","f":1732180694408428,"d_finished":148,"c":8,"l":1732180694425930,"d":148},{"a":1732180694425930,"name":"Finish","f":1732180694425930,"d_finished":0,"c":0,"l":1732180694425943,"d":13},{"name":"task_result","f":1732180694408599,"d_finished":190,"c":4,"l":1732180694425818,"d":190}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T09:18:14.425993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:14.407965Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T09:18:14.425996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:14.426006Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.017104s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.015973s;size=0.0063152;details={columns=9;};};]};; 2024-11-21T09:18:14.426010Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> KqpQueryService::DdlColumnTable [GOOD] >> KqpQueryService::DdlCache ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/botocore/py3/botocore/auth.py:419: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> KqpQueryService::DdlCache [GOOD] >> KqpQueryService::DdlExecuteScript |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail |94.9%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpQueryService::DdlExecuteScript [GOOD] |94.9%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlExecuteScript [GOOD] Test command err: Trying to start YDB, gRPC: 8514, MsgBus: 29329 2024-11-21T09:16:33.636938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439658998956851764:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:33.637218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002854/r3tmp/tmpQu9ZwZ/pdisk_1.dat 2024-11-21T09:16:33.701443Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8514, node 1 2024-11-21T09:16:33.724617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:33.724636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:33.724638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:33.724679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:33.736877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:33.736926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:33.737969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29329 TClient is connected to server localhost:29329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:16:33.773620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:33.785183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.847430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.863939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.872600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:16:33.958961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439658998956853303:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:33.959004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.001495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.007480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.019347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.026543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.032781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.040068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.048712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659003251821104:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.048736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.048753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659003251821109:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.049322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.053426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659003251821111:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:16:34.240422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.257749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:34.257780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7439659003251821544:2462];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:34.257802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:34.257830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:16:34.257834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7439659003251821544:2462];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:34.257864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:34.257870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7439659003251821544:2462];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:16:34.257887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7439659003251821544:2462];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:34.257888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:16:34.257905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:16:34.257911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7439659003251821544:2462];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:16:34.257929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:16:34.257931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7439659003251821544:2462];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:16:34.257948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7439659003251821544:2462];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:16:34.257951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:16:34.257964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439659003251821553:2467];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:16:34.257971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7439659003251821544:2462];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11- ... KLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659436964710160:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.124261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.128036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.133000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.187296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.241632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.252728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.259270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.267822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659436964710678:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.267849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.267868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659436964710683:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.268318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:18:15.273107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659436964710685:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:18:15.397771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.414387Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037919 not found 2024-11-21T09:18:15.416013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 65332, MsgBus: 9607 2024-11-21T09:18:15.574776Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659438304768908:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:18:15.574808Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002854/r3tmp/tmpkhzVEt/pdisk_1.dat 2024-11-21T09:18:15.581967Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65332, node 3 2024-11-21T09:18:15.589968Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:15.589977Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:15.589978Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:15.589996Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9607 TClient is connected to server localhost:9607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:18:15.675022Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:15.675054Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:15.676085Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:15.676773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:15.681455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:15.687810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:15.702507Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:15.714887Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:15.790343Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659438304770438:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.790362Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.794632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.849226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.854492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.861141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.868285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.875569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:18:15.883388Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659438304770944:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.883403Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.883428Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659438304770949:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:15.883807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:18:15.889031Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659438304770951:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:18:16.054037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:18:16.054236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:18:16.054461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:18:16.174103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> TColumnShardTestSchema::ColdTiersWithStat >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] |94.9%| [TA] $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T09:17:22.253295Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:22.253321Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:22.257759Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:22.260371Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T09:17:22.260606Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T09:17:22.261099Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T09:17:22.261451Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T09:17:22.261763Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:22.263204Z node 1 :PERSQUEUE INFO: new Cookie owner|f3c5f2a4-e30862aa-3a74b8eb-bfde39a5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner 2024-11-21T09:17:22.263271Z node 1 :PERSQUEUE INFO: new Cookie owner|b3affca6-9cc49e33-a927e111-ecb0c18c_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner 2024-11-21T09:17:22.263328Z node 1 :PERSQUEUE INFO: new Cookie owner|1fa43c57-d54963f8-d174dab9-36634f7_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR 2024-11-21T09:17:23.523907Z node 1 :PERSQUEUE INFO: new Cookie default|730ab8b4-7a113f82-d6e9ae5a-dba7beac_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2024-11-21T09:17:23.524022Z node 1 :PERSQUEUE INFO: new Cookie owner2|cefc64b-12753029-607779c8-63595dda_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2024-11-21T09:17:23.524159Z node 1 :PERSQUEUE INFO: new Cookie owner|7bc2b840-fc552603-b85cd7a1-bebe972d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents: ... :TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:16.738290Z node 86 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2024-11-21T09:18:16.738974Z node 86 :PERSQUEUE INFO: new Cookie default|7462dfb-81e83933-c67957ba-64991745_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [87:101:2057] recipient: [87:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [87:101:2057] recipient: [87:99:2133] Leader for TabletID 72057594037927937 is [87:105:2137] sender: [87:106:2057] recipient: [87:99:2133] 2024-11-21T09:18:16.913966Z node 87 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:16.913983Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [87:147:2057] recipient: [87:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [87:147:2057] recipient: [87:145:2168] Leader for TabletID 72057594037927938 is [87:151:2172] sender: [87:152:2057] recipient: [87:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [87:105:2137] sender: [87:177:2057] recipient: [87:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:16.916807Z node 87 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:16.917005Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 87 actor [87:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 87 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 87 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 87 Important: true } 2024-11-21T09:18:16.917117Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [87:184:2197] 2024-11-21T09:18:16.917527Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [87:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:16.917789Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [87:185:2198] 2024-11-21T09:18:16.918053Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [87:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:18:16.918251Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [87:186:2199] 2024-11-21T09:18:16.918500Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [87:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:16.921799Z node 87 :PERSQUEUE INFO: new Cookie default|22fa6c5b-d1cd1ef5-28f7895-49427ebb_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:16.930384Z node 87 :PERSQUEUE INFO: new Cookie default|fad82ec0-2994b110-90d99292-20476389_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:16.939294Z node 87 :PERSQUEUE INFO: new Cookie default|44c4520-d7557969-220eab96-dd42eaa_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:16.946595Z node 87 :PERSQUEUE INFO: new Cookie default|fa825bc2-f58e75ea-7fb706e7-7cf15fcb_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:16.951096Z node 87 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2024-11-21T09:18:16.951710Z node 87 :PERSQUEUE INFO: new Cookie default|c3ed9042-f97b567a-16818342-d552dd35_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [88:101:2057] recipient: [88:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [88:101:2057] recipient: [88:99:2133] Leader for TabletID 72057594037927937 is [88:105:2137] sender: [88:106:2057] recipient: [88:99:2133] 2024-11-21T09:18:17.149139Z node 88 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:17.149155Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [88:147:2057] recipient: [88:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [88:147:2057] recipient: [88:145:2168] Leader for TabletID 72057594037927938 is [88:151:2172] sender: [88:152:2057] recipient: [88:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [88:105:2137] sender: [88:175:2057] recipient: [88:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:17.151697Z node 88 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:17.151866Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 88 actor [88:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 88 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 88 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 88 Important: true } 2024-11-21T09:18:17.151966Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [88:182:2195] 2024-11-21T09:18:17.152351Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [88:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:17.152596Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [88:183:2196] 2024-11-21T09:18:17.152831Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [88:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:18:17.153015Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [88:184:2197] 2024-11-21T09:18:17.153244Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [88:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:17.156687Z node 88 :PERSQUEUE INFO: new Cookie default|69aa9649-2778c09c-31227f3b-94e1f6b4_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:17.165974Z node 88 :PERSQUEUE INFO: new Cookie default|d03e1d78-9cfefd6d-b12dc052-938ee943_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:17.175623Z node 88 :PERSQUEUE INFO: new Cookie default|6a65738c-da198189-189e86f5-710f80c3_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:17.182977Z node 88 :PERSQUEUE INFO: new Cookie default|d5fe4ba5-71ac60e7-d73ca3b9-2e8e1996_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:17.187262Z node 88 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2024-11-21T09:18:17.187867Z node 88 :PERSQUEUE INFO: new Cookie default|5e4cf507-8e2c3e61-45ed9497-53485dba_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR |94.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> test_public_api.py::TestJsonExample::test_json_unexpected_failure |94.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |94.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> KqpDataIntegrityTrails::Upsert+LogEnabled >> test_public_api.py::TestJsonExample::test_json_success [GOOD] |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled [GOOD] >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-Streaming ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled [GOOD] Test command err: Trying to start YDB, gRPC: 22261, MsgBus: 2232 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f07/r3tmp/tmpqd08uP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22261, node 1 TClient is connected to server localhost:2232 TClient is connected to server localhost:2232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-Streaming [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-Streaming [GOOD] Test command err: Trying to start YDB, gRPC: 14399, MsgBus: 9088 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f04/r3tmp/tmp9IDiEj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14399, node 1 TClient is connected to server localhost:9088 TClient is connected to server localhost:9088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled [GOOD] Test command err: Trying to start YDB, gRPC: 62923, MsgBus: 63536 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f01/r3tmp/tmptfSe3E/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62923, node 1 TClient is connected to server localhost:63536 TClient is connected to server localhost:63536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestPQReadAhead >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+Streaming >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+Streaming [GOOD] >> KqpDataIntegrityTrails::UpsertEvWrite ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+Streaming [GOOD] Test command err: Trying to start YDB, gRPC: 13700, MsgBus: 21542 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003efe/r3tmp/tmptE5GXI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13700, node 1 TClient is connected to server localhost:21542 TClient is connected to server localhost:21542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWrite [FAIL] |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TPQTest::TestPQReadAhead [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQReadAhead [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T09:17:21.223897Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.223918Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.228436Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.230901Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2024-11-21T09:17:21.231143Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T09:17:21.231700Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.237249Z node 1 :PERSQUEUE INFO: new Cookie default|f421a39c-8b7f3ef-b12d89a3-d9df4367_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX C ... TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [34:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [34:243:2244] sender: [34:354:2057] recipient: [34:14:2061] 2024-11-21T09:18:31.902406Z node 34 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8365317 2024-11-21T09:18:31.902426Z node 34 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 12 size 7877895 Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] 2024-11-21T09:18:32.001846Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:32.001860Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [35:147:2057] recipient: [35:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [35:147:2057] recipient: [35:145:2168] Leader for TabletID 72057594037927938 is [35:151:2172] sender: [35:152:2057] recipient: [35:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:175:2057] recipient: [35:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:32.004137Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:32.004274Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 35 actor [35:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 35 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 35 ReadRuleGenerations: 35 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 35 Important: false } Consumers { Name: "aaa" Generation: 35 Important: true } 2024-11-21T09:18:32.004350Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:182:2195] 2024-11-21T09:18:32.004726Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [35:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:32.005117Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:183:2196] 2024-11-21T09:18:32.005384Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [35:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:32.009551Z node 35 :PERSQUEUE INFO: new Cookie default|fb6a7903-ffbb8faf-866ad6b2-47c02e0c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] 2024-11-21T09:18:32.235483Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:32.235500Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:147:2057] recipient: [36:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:147:2057] recipient: [36:145:2168] Leader for TabletID 72057594037927938 is [36:151:2172] sender: [36:152:2057] recipient: [36:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:177:2057] recipient: [36:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:32.238136Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:32.238263Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 36 actor [36:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 36 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 36 ReadRuleGenerations: 36 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 36 Important: false } Consumers { Name: "aaa" Generation: 36 Important: true } 2024-11-21T09:18:32.238342Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [36:184:2197] 2024-11-21T09:18:32.238759Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [36:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:18:32.239161Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [36:185:2198] 2024-11-21T09:18:32.239438Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [36:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:32.243500Z node 36 :PERSQUEUE INFO: new Cookie default|e192dce3-2d50a133-41996774-5cab9ebe_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWrite [FAIL] Test command err: Trying to start YDB, gRPC: 25855, MsgBus: 12210 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ef9/r3tmp/tmpOdDmB6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25855, node 1 TClient is connected to server localhost:12210 TClient is connected to server localhost:12210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... assertion failed at ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpDataIntegrityTrails::TTestCaseUpsertEvWrite::Execute_(NUnitTest::TTestContext &): (CountSubstr(ss.Str(), "DATA_INTEGRITY INFO: Component: Executer") == 2) failed: (0 != 2) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1260C048 1. /-S/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:76: Execute_ @ 0x123857B7 2. /-S/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:19: operator() @ 0x12390AF6 3. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1260DFFD 4. /-S/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:19: Execute @ 0x12390290 5. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1260E772 6. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1262198C 7. ??:0: ?? @ 0x7FDDDE449D8F 8. ??:0: ?? @ 0x7FDDDE449E3F 9. ??:0: ?? @ 0x11748028 |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TPQTest::TestLowWatermark [GOOD] >> TPQTest::TestGetTimestamps |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test_public_api.py::TestRecursiveCreation::test_mkdir |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181263.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181263.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180063.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2024-11-21T09:17:43.397552Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:43.410542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:43.412539Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:43.412562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:43.412600Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:43.413144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:43.413182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:43.413213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:43.413230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:43.413245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:43.413261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:43.413276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:43.413291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:43.413305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:43.413328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:43.413343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:43.413358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:43.416305Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:43.416319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:43.417130Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:43.417184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:43.417191Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:43.417214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:43.417277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:43.417286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:43.417290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:43.417299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:43.417307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:43.417314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:43.417318Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:43.417333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:43.417339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:43.417346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:43.417349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:43.417356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:43.417360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:43.417365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:43.417367Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:43.417374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:43.417378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:43.417380Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:43.417387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:43.417391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:43.417394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:43.417417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:43.417424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:43.417429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:43.417436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:43.417450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:43.417455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:43.417457Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:43.417472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:43.417476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:43.417478Z node 1 :TX_COLUM ... t=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:18:37.227513Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2024-11-21T09:18:37.227520Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=4; 2024-11-21T09:18:37.227526Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:18:37.227542Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12; 2024-11-21T09:18:37.227573Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=28; 2024-11-21T09:18:37.227580Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:18:37.227584Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:18:37.227588Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T09:18:37.227591Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T09:18:37.227595Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2024-11-21T09:18:37.227602Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=4; 2024-11-21T09:18:37.227605Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2024-11-21T09:18:37.227612Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=3; 2024-11-21T09:18:37.227616Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T09:18:37.227622Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=3; 2024-11-21T09:18:37.227625Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1142; 2024-11-21T09:18:37.227653Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=6;blobs=6;rows=160000;bytes=9495312;raw_bytes=16084618; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:18:37.227664Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:18:37.227669Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:18:37.227672Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:18:37.227683Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:18:37.227694Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=0; 2024-11-21T09:18:37.227706Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:18:37.227709Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:18:37.227718Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:18:37.227721Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:37.227728Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:37.227736Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:18:37.227742Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:18:37.227746Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:37.227752Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:37.227755Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:37.227764Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:37.227774Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:37.227820Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:37.227938Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1162:3059];tablet_id=9437184;parent=[1:1125:3029];fline=manager.h:99;event=ask_data;request=request_id=99;1={portions_count=12};; 2024-11-21T09:18:37.227961Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:18:37.228215Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1162:3059];tablet_id=9437184;parent=[1:1125:3029];fline=manager.h:99;event=ask_data;request=request_id=101;1={portions_count=6};; 2024-11-21T09:18:37.228280Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:18:37.228285Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:18:37.228288Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:18:37.228291Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:37.228299Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:37.228308Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:18:37.228314Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:18:37.228317Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:37.228322Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:37.228325Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:37.228328Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:37.228338Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:37.228410Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2024-11-21T09:18:37.228460Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2024-11-21T09:18:37.228516Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:18:37.228519Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1125:3029];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 160000/9495312 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 61520, MsgBus: 22010 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ec1/r3tmp/tmpUFz55D/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61520, node 1 TClient is connected to server localhost:22010 TClient is connected to server localhost:22010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181266.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181266.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181266.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181266.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181266.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112181266.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=132181266.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181266.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112180066.000000s;Name=;Codec=}; 2024-11-21T09:17:46.833590Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:46.845386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:46.846944Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:46.846961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:46.846991Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:46.847459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:46.847483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:46.847501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:46.847513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:46.847523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:46.847535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:46.847544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:46.847555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:46.847566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:46.847584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:46.847594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:46.847606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:46.850261Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:46.850271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:46.851071Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:46.851128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:46.851134Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:46.851152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:46.851213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:46.851221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:46.851225Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:46.851231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:46.851237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:46.851241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:46.851243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:46.851253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:46.851258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:46.851262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:46.851266Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:46.851272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:46.851276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:46.851280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:46.851283Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:46.851289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:46.851293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:46.851295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:46.851300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:46.851304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:46.851306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:46.851327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:46.851333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:46.851339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:46.851345Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:46.851358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUp ... CHARGE:tx_controllerLoadingTime=8; 2024-11-21T09:18:37.603397Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:18:37.603402Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=11; 2024-11-21T09:18:37.603410Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=5; 2024-11-21T09:18:37.603418Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=2; 2024-11-21T09:18:37.603442Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=21; 2024-11-21T09:18:37.603485Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=39; 2024-11-21T09:18:37.603494Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=5; 2024-11-21T09:18:37.603498Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:18:37.603502Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T09:18:37.603505Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T09:18:37.603509Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2024-11-21T09:18:37.603517Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=6; 2024-11-21T09:18:37.603521Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T09:18:37.603529Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=5; 2024-11-21T09:18:37.603532Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T09:18:37.603538Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=3; 2024-11-21T09:18:37.603541Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=9408; 2024-11-21T09:18:37.603560Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=15;blobs=30;rows=400000;bytes=23741012;raw_bytes=40211545; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:18:37.603579Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:18:37.603583Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:18:37.603586Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:18:37.603607Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:18:37.603627Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:18:37.603642Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:18:37.603646Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:18:37.603658Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:18:37.603662Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:37.603669Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:37.603680Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T09:18:37.603688Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:18:37.603691Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:37.603699Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:37.603702Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:37.603706Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:37.603718Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:37.603845Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:37.603855Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1486:3384];tablet_id=9437184;parent=[1:1437:3342];fline=manager.h:99;event=ask_data;request=request_id=114;1={portions_count=15};; 2024-11-21T09:18:37.604024Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:18:37.604425Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:18:37.604437Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:18:37.604440Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:18:37.604444Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:37.604454Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:37.604468Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T09:18:37.604476Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:18:37.604481Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:37.604488Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:37.604492Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:37.604498Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:37.604510Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:37.604616Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=15;path_id=1; 2024-11-21T09:18:37.604678Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=15;path_id=1; 2024-11-21T09:18:37.604777Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:18:37.604780Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |95.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> BasicStatistics::TwoTables >> BasicStatistics::TwoServerlessDbs |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |95.0%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> Initializer::Simple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T09:17:22.374386Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:22.374419Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] 2024-11-21T09:17:22.378364Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:17:22.378387Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T09:17:22.378401Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T09:17:22.380421Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:22.381371Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2024-11-21T09:17:22.381401Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:22.381929Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2024-11-21T09:17:22.381962Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Step TInitConfigStep 2024-11-21T09:17:22.381971Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 1. Step TInitConfigStep 2024-11-21T09:17:22.381976Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 2. Step TInitConfigStep 2024-11-21T09:17:22.381981Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 3. Step TInitConfigStep 2024-11-21T09:17:22.382075Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T09:17:22.382136Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T09:17:22.382708Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Completed. 2024-11-21T09:17:22.382717Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T09:17:22.382723Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:22.383259Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2024-11-21T09:17:22.383267Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2024-11-21T09:17:22.383270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2024-11-21T09:17:22.383273Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2024-11-21T09:17:22.383303Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:22.383306Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:22.383360Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T09:17:22.383380Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T09:17:22.383631Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 1. Completed. 2024-11-21T09:17:22.383634Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] 2024-11-21T09:17:22.383639Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:22.383906Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2024-11-21T09:17:22.383912Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2024-11-21T09:17:22.383916Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2024-11-21T09:17:22.383919Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2024-11-21T09:17:22.383934Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:22.383938Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:22.383964Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:17:22.384024Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 2. Step TInitInternalFieldsStep 2024-11-21T09:17:22.384080Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:186:2199] 2024-11-21T09:17:22.384490Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 2. Completed. 2024-11-21T09:17:22.384499Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:186:2199] 2024-11-21T09:17:22.384504Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:17:22.384877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user reinit request with generation 1 2024-11-21T09:17:22.384882Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user reinit with generation 1 done 2024-11-21T09:17:22.384885Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test reinit request with generation 1 2024-11-21T09:17:22.384887Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test reinit with generation 1 done 2024-11-21T09:17:22.384899Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:17:22.384902Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NP ... 024-11-21T09:18:41.331585Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:650:2644] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.331833Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:655:2649] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.332075Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:660:2654] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.332337Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:665:2659] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.332586Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:670:2664] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.332830Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:675:2669] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.333089Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:680:2674] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.333332Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:685:2679] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.333574Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:690:2684] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.333845Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:695:2689] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.334088Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:700:2694] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.334341Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:705:2699] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.334591Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:710:2704] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.334846Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:715:2709] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.335104Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:720:2714] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.335347Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:725:2719] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.335591Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:730:2724] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.335847Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:735:2729] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.336126Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:740:2734] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.336417Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:745:2739] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.336669Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:750:2744] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.336932Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:41.336954Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [116:755:2749], now have 1 active actors on pipe 2024-11-21T09:18:41.336969Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:41.337087Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:41.337096Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [116:758:2752], now have 1 active actors on pipe 2024-11-21T09:18:41.337106Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:41.337171Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:41.337174Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [116:761:2755], now have 1 active actors on pipe 2024-11-21T09:18:41.337181Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:41.337244Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:764:2758] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NSchemeShard::TFindSubDomainPathIdActor Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.535102Z node 117 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:41.535122Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.540536Z node 117 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:41.540548Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.541126Z node 117 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:41.541284Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 227 actor [117:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 227 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 227 ReadRuleGenerations: 227 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 227 Important: false } Consumers { Name: "aaa" Generation: 227 Important: true } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:41.541381Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [117:246:2246] 2024-11-21T09:18:41.541505Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [117:246:2246] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:41.541658Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [117:244:2244] 2024-11-21T09:18:41.541733Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [117:244:2244] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:18:41.545712Z node 117 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:18:41.545722Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:41.545790Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [117:325:2308] 2024-11-21T09:18:41.545897Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [117:327:2310] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:18:41.546332Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [117:325:2308] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T09:18:41.546368Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [117:327:2310] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2024-11-21T09:17:37.474536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:37.474969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:37.474987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004073/r3tmp/tmp9ZArda/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12675, node 1 TClient is connected to server localhost:28155 2024-11-21T09:17:37.701223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:37.719470Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:37.720131Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:37.720141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:37.720144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:37.720190Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:37.761929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:37.761960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:37.772430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:47.894015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:648:2540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:47.894038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:658:2545], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:47.894045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:47.895862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T09:17:48.020014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:662:2548], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T09:17:48.162546Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:744:2598], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:17:48.164111Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTNkMzQ1NjgtMjZlNjdkN2QtNzQxODNmYjctMWFhMzcxMDg=, ActorId: [1:644:2537], ActorState: ExecuteState, TraceId: 01jd706vfm3njsbx4c3wt11190, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2024-11-21T09:17:48.292407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2024-11-21T09:17:48.688368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:17:48.758579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:17:49.079910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480 Initialization finished 2024-11-21T09:17:59.557125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd7076vxdp3ef6sg13mm9d92, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdhZTQyNDYtN2ViZTdkMjUtOTY3MjBhZWEtMjA0OWE4NmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2024-11-21T09:18:10.072852Z node 1 :TX_PROXY ERROR: Access denied for root@builtin with access RemoveSchema to path Root/.metadata/test REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin to path Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2024-11-21T09:18:20.494124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd707va38zz5y9zgpvw8363g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI0NDFkYjgtM2VkZTg4NGItNTJkMTNlNjMtZjZlNTgyZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:941 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2024-11-21T09:18:41.273610Z node 1 :TX_PROXY ERROR: Access denied for root@builtin with access RemoveSchema to path Root/.metadata/initialization/migrations REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin to path Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 >> HttpRequest::Probe |95.0%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::TwoDatabases |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181260.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132181260.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181260.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112181260.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180060.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112180060.000000s;Name=;Codec=}; 2024-11-21T09:17:41.168236Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:41.183471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:41.185281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:41.185298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:41.185334Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:41.185789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:41.185813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:41.185837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:41.185848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:41.185858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:41.185868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:41.185877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:41.185891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:41.185902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:41.185921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.185932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:41.185943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:41.188860Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:41.188871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:41.189535Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:41.189579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:41.189586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:41.189608Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.189667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:41.189675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:41.189679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:41.189684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:41.189690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:41.189695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:41.189697Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:41.189711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.189715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:41.189719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:41.189722Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:41.189728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:41.189731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:41.189737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:41.189740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:41.189746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:41.189749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:41.189752Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:41.189756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:41.189760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:41.189763Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:41.189782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:41.189788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:41.189793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:41.189799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=3; 2024-11-21T09:17:41.189811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:41.189815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:41.189818Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:41.189830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:41.189835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41. ... ng=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:42.945909Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:42.945912Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] finished for tablet 9437184 2024-11-21T09:18:42.945918Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] send ScanData to [1:685:2688] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:42.945949Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:686:2689] and sent to [1:685:2688] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1732180722943187,"name":"_full_task","f":1732180722943187,"d_finished":0,"c":0,"l":1732180722945922,"d":2735},"events":[{"name":"bootstrap","f":1732180722943217,"d_finished":430,"c":1,"l":1732180722943647,"d":430},{"a":1732180722945899,"name":"ack","f":1732180722945084,"d_finished":119,"c":3,"l":1732180722945866,"d":142},{"a":1732180722945898,"name":"processing","f":1732180722943658,"d_finished":647,"c":24,"l":1732180722945866,"d":671},{"name":"ProduceResults","f":1732180722943427,"d_finished":392,"c":29,"l":1732180722945911,"d":392},{"a":1732180722945911,"name":"Finish","f":1732180722945911,"d_finished":0,"c":0,"l":1732180722945922,"d":11},{"name":"task_result","f":1732180722943660,"d_finished":504,"c":21,"l":1732180722945689,"d":504}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:686:2689]->[1:685:2688] 2024-11-21T09:18:42.945957Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:42.943083Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2024-11-21T09:18:42.945960Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:42.945965Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:18:42.945970Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:18:42.946132Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2024-11-21T09:18:42.946155Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2024-11-21T09:18:42.946169Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:18:42.946188Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:18:42.946194Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:18:42.946243Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:702:2705];trace_detailed=; 2024-11-21T09:18:42.946272Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:18:42.946285Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:18:42.946296Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:42.946300Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:42.946316Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:18:42.946320Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:42.946326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:42.946329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] finished for tablet 9437184 2024-11-21T09:18:42.946333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] send ScanData to [1:701:2704] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:42.946356Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:702:2705] and sent to [1:701:2704] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180722946238,"name":"_full_task","f":1732180722946238,"d_finished":0,"c":0,"l":1732180722946337,"d":99},"events":[{"name":"bootstrap","f":1732180722946252,"d_finished":50,"c":1,"l":1732180722946302,"d":50},{"a":1732180722946314,"name":"ack","f":1732180722946314,"d_finished":0,"c":0,"l":1732180722946337,"d":23},{"a":1732180722946313,"name":"processing","f":1732180722946313,"d_finished":0,"c":0,"l":1732180722946337,"d":24},{"name":"ProduceResults","f":1732180722946292,"d_finished":21,"c":2,"l":1732180722946328,"d":21},{"a":1732180722946328,"name":"Finish","f":1732180722946328,"d_finished":0,"c":0,"l":1732180722946337,"d":9}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:702:2705]->[1:701:2704] 2024-11-21T09:18:42.946368Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:42.946198Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:18:42.946371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:42.946374Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:18:42.946378Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181261.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181261.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180061.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112180061.000000s;Name=;Codec=}; 2024-11-21T09:17:42.068894Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:42.083899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:42.085743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:42.085766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:42.085804Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:42.086346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:42.086372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:42.086395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:42.086408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:42.086419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:42.086430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:42.086440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:42.086452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:42.086464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:42.086486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:42.086499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:42.086510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:42.089577Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:42.089590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:42.090277Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:42.090321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:42.090326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:42.090347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:42.090417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:42.090426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:42.090429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:42.090436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:42.090443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:42.090447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:42.090450Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:42.090460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:42.090465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:42.090469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:42.090472Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:42.090479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:42.090485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:42.090489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:42.090492Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:42.090500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:42.090504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:42.090506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:42.090512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:42.090516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:42.090519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:42.090542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:42.090548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:42.090554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:42.090561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:42.090574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:42.090578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:42.090581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:42.090596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:42.090601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:42. ... =(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:43.246555Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:43.246557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] finished for tablet 9437184 2024-11-21T09:18:43.246564Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] send ScanData to [1:685:2688] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:43.246602Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:686:2689] and sent to [1:685:2688] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":1732180723243400,"name":"_full_task","f":1732180723243400,"d_finished":0,"c":0,"l":1732180723246570,"d":3170},"events":[{"name":"bootstrap","f":1732180723243433,"d_finished":520,"c":1,"l":1732180723243953,"d":520},{"a":1732180723246544,"name":"ack","f":1732180723245711,"d_finished":125,"c":3,"l":1732180723246510,"d":151},{"a":1732180723246543,"name":"processing","f":1732180723244064,"d_finished":744,"c":24,"l":1732180723246510,"d":771},{"name":"ProduceResults","f":1732180723243723,"d_finished":444,"c":29,"l":1732180723246556,"d":444},{"a":1732180723246556,"name":"Finish","f":1732180723246556,"d_finished":0,"c":0,"l":1732180723246570,"d":14},{"name":"task_result","f":1732180723244066,"d_finished":592,"c":21,"l":1732180723246320,"d":592}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:686:2689]->[1:685:2688] 2024-11-21T09:18:43.246612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:43.243323Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2024-11-21T09:18:43.246615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:43.246622Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:18:43.246626Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:18:43.246805Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2024-11-21T09:18:43.246832Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2024-11-21T09:18:43.246846Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:18:43.246870Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:18:43.246877Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:18:43.246932Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:702:2705];trace_detailed=; 2024-11-21T09:18:43.246967Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:18:43.246980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:18:43.246991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:43.246996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:43.247027Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:18:43.247032Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:43.247038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:43.247041Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] finished for tablet 9437184 2024-11-21T09:18:43.247046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] send ScanData to [1:701:2704] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:43.247069Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:702:2705] and sent to [1:701:2704] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180723246926,"name":"_full_task","f":1732180723246926,"d_finished":0,"c":0,"l":1732180723247050,"d":124},"events":[{"name":"bootstrap","f":1732180723246943,"d_finished":55,"c":1,"l":1732180723246998,"d":55},{"a":1732180723247025,"name":"ack","f":1732180723247025,"d_finished":0,"c":0,"l":1732180723247050,"d":25},{"a":1732180723247008,"name":"processing","f":1732180723247008,"d_finished":0,"c":0,"l":1732180723247050,"d":42},{"name":"ProduceResults","f":1732180723246987,"d_finished":22,"c":2,"l":1732180723247040,"d":22},{"a":1732180723247040,"name":"Finish","f":1732180723247040,"d_finished":0,"c":0,"l":1732180723247050,"d":10}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:702:2705]->[1:701:2704] 2024-11-21T09:18:43.247076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:43.246882Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:18:43.247079Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:43.247082Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:18:43.247086Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181260.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132181260.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181260.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112181260.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180060.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112180060.000000s;Name=;Codec=}; 2024-11-21T09:17:41.087295Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:41.101064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:41.102940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:41.102961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:41.103015Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:41.103502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:41.103532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:41.103557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:41.103569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:41.103580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:41.103590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:41.103602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:41.103612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:41.103630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:41.103649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.103659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:41.103669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:41.106689Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:41.106705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:41.107717Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:41.107797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:41.107807Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:41.107832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.107891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:41.107900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:41.107903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:41.107909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:41.107915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:41.107920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:41.107922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:41.107937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.107942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:41.107946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:41.107948Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:41.107954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:41.107958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:41.107963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:41.107965Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:41.107972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:41.107976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:41.107979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:41.107985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:41.107989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:41.107992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:41.108011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=5; 2024-11-21T09:17:41.108016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=2; 2024-11-21T09:17:41.108021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:41.108028Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:41.108041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:41.108046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:41.108048Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:41.108062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:41.108066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41. ... EST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=3; 2024-11-21T09:18:43.440625Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:18:43.440628Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=8; 2024-11-21T09:18:43.440634Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=3; 2024-11-21T09:18:43.440640Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:18:43.440652Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9; 2024-11-21T09:18:43.440680Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=25; 2024-11-21T09:18:43.440686Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:18:43.440690Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=1; 2024-11-21T09:18:43.440693Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T09:18:43.440696Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T09:18:43.440701Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T09:18:43.440706Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=3; 2024-11-21T09:18:43.440709Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2024-11-21T09:18:43.440716Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=4; 2024-11-21T09:18:43.440719Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:18:43.440724Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=2; 2024-11-21T09:18:43.440726Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=959; 2024-11-21T09:18:43.440739Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:18:43.440749Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:18:43.440753Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:18:43.440756Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:18:43.440765Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:18:43.440775Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:18:43.440784Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:18:43.440787Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:18:43.440795Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:18:43.440799Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:43.440804Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:43.440812Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:18:43.440819Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:18:43.440822Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:43.440829Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:43.440831Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:43.440835Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:43.440844Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:43.440913Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1188:3084];tablet_id=9437184;parent=[1:1151:3054];fline=manager.h:99;event=ask_data;request=request_id=84;1={portions_count=6};; 2024-11-21T09:18:43.440935Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:43.441007Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:18:43.441194Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:18:43.441198Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:18:43.441200Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:18:43.441203Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:43.441208Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:43.441214Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:18:43.441218Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:18:43.441221Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:43.441225Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:43.441228Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:43.441233Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:43.441239Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:43.441292Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=6;path_id=1; 2024-11-21T09:18:43.441320Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=6;path_id=1; 2024-11-21T09:18:43.441364Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:18:43.441367Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> ColumnShardTiers::DSConfigsWithQueryServiceDdl |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> ColumnShardTiers::TTLUsage |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TPQTest::TestManyConsumers [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T09:17:20.977628Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.977653Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.982432Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:20.984863Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T09:17:20.985115Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T09:17:20.985694Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T09:17:20.986116Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T09:17:20.986537Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:20.988015Z node 1 :PERSQUEUE INFO: new Cookie default|ca365b94-fe70e1ce-223927fb-b0627408_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T09:17:21.213297Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.213317Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.217918Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.218118Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T09:17:21.218237Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T09:17:21.218860Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T09:17:21.219173Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T09:17:21.219594Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.221318Z node 2 :PERSQUEUE INFO: new Cookie default|84c119e2-a9dcc77c-634f3ae1-3bbb96e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] 2024-11-21T09:17:21.455149Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.455171Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] Leader for TabletID 72057594037927938 is [3:151:2172] sender: [3:152:2057] recipient: [3:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:177:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.459634Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.459795Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2024-11-21T09:17:21.459914Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:184:2197] 2024-11-21T09:17:21.460521Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T09:17:21.460867Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:185:2198] 2024-11-21T09:17:21.461363Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.462789Z node 3 :PERSQUEUE INFO: new Cookie default|b98a6d75-12e65e13-41fb05c5-8a9e5c1e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] 2024-11-21T09:17:21.690722Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.690742Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:147:2057] recipient: [4:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:147:2057] recipient: [4:145:2168] Leader for TabletID 72057594037927938 is [4:151:2172] sender: [4:152:2057] recipient: [4:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:177:2057] recipient: [4:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T09:17:21.694805Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:17:21.694996Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 ... s 2024-11-21T09:18:45.434057Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.434865Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.436725Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.436734Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1010:3006], now have 1 active actors on pipe 2024-11-21T09:18:45.436740Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T09:18:45.436881Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.437700Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.439509Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.439516Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1013:3009], now have 1 active actors on pipe 2024-11-21T09:18:45.439529Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T09:18:45.439660Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.440479Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.442266Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.442272Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1016:3012], now have 1 active actors on pipe 2024-11-21T09:18:45.442279Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T09:18:45.442407Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.443205Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.444978Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.444985Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1019:3015], now have 1 active actors on pipe 2024-11-21T09:18:45.444996Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T09:18:45.445120Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.445906Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.447723Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.447737Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1022:3018], now have 1 active actors on pipe 2024-11-21T09:18:45.447746Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T09:18:45.447870Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.448751Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.450528Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.450535Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1025:3021], now have 1 active actors on pipe 2024-11-21T09:18:45.450542Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T09:18:45.450662Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.451449Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.453233Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.453240Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1028:3024], now have 1 active actors on pipe 2024-11-21T09:18:45.453246Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T09:18:45.453366Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.454172Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.455899Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.455905Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1031:3027], now have 1 active actors on pipe 2024-11-21T09:18:45.455911Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T09:18:45.456027Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.456872Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.458571Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:18:45.458578Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1034:3030], now have 1 active actors on pipe 2024-11-21T09:18:45.458589Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T09:18:45.458712Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.459458Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T09:18:45.461159Z node 87 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [87:1037:3033] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NSchemeShard::TFindSubDomainPathIdActor Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TVectorIndexTests::CreateTable >> TVectorIndexTests::CreateTable [GOOD] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:18:46.093541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:18:46.093558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:46.093561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:18:46.093564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:18:46.093568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:18:46.093571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:18:46.093579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:46.093644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:18:46.100113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:18:46.100125Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:46.101799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:18:46.102276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:18:46.102308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:18:46.103375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:18:46.103500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:18:46.103560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:46.103621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:18:46.104279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:46.104459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:46.104464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:46.104489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:18:46.104493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:46.104497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:18:46.104506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.105225Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:18:46.114576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:18:46.114621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.114654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:18:46.114679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:18:46.114684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.115094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:46.115115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:18:46.115148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.115155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:18:46.115158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:18:46.115161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:18:46.115423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.115429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:46.115432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:18:46.115612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.115617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.115621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:46.115626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:18:46.116015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:18:46.116281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:18:46.116312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:18:46.116405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:46.116420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:46.116426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:46.116456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:18:46.116459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:46.116481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:46.116488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:18:46.116716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:46.116720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:46.116739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:46.116742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:18:46.116789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.116793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:18:46.116800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:18:46.116802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:46.116806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:18:46.116809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:46.116812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:18:46.116814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:18:46.116820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:18:46.116823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:18:46.116825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:18:46.117001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:18:46.117009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:18:46.117011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:18:46.117014Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:18:46.117016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:46.117025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... calPathId: 2] was 5 2024-11-21T09:18:46.202792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:18:46.202800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:18:46.202804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:18:46.202819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:18:46.202870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:18:46.202878Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:18:46.202880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:18:46.202899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:18:46.202970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.202976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:18:46.203204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.203218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.203221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:18:46.203224Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:18:46.203229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:18:46.203276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.203280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:18:46.203282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:18:46.203284Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:18:46.203286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:18:46.203291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/4, is published: true 2024-11-21T09:18:46.203391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2024-11-21T09:18:46.203396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:46.203429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:18:46.203442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2024-11-21T09:18:46.203445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2024-11-21T09:18:46.203447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2024-11-21T09:18:46.203715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T09:18:46.203724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:46.203757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:18:46.203766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2024-11-21T09:18:46.203768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2024-11-21T09:18:46.203771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2024-11-21T09:18:46.203815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:18:46.203817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:46.203830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:18:46.203835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2024-11-21T09:18:46.203837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2024-11-21T09:18:46.203840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2024-11-21T09:18:46.203847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:420:2375] message: TxId: 102 2024-11-21T09:18:46.203852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2024-11-21T09:18:46.203854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:18:46.203857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:18:46.203866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:18:46.203870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2024-11-21T09:18:46.203872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2024-11-21T09:18:46.203874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:18:46.203876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2024-11-21T09:18:46.203877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2024-11-21T09:18:46.203881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:18:46.203883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2024-11-21T09:18:46.203885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2024-11-21T09:18:46.203888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:18:46.203989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:18:46.203999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:18:46.204002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:18:46.204008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:18:46.204010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:18:46.204013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:18:46.204282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:18:46.204294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:18:46.204520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:18:46.204526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:523:2477] TestWaitNotification: OK eventTxId 102 >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError |95.1%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError [GOOD] |95.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:18:47.405937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:18:47.405953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:47.405956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:18:47.405959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:18:47.405963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:18:47.405965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:18:47.405971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:47.406014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:18:47.412572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:18:47.412583Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:47.414135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:18:47.414613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:18:47.414639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:18:47.415349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:18:47.415464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:18:47.415527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:47.415578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:18:47.416132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:47.416312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:47.416319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:47.416348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:18:47.416352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:47.416356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:18:47.416364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.416996Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:18:47.426106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:18:47.426153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.426186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:18:47.426211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:18:47.426215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.426551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:47.426567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:18:47.426598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.426604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:18:47.426607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:18:47.426610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:18:47.426841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.426846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:47.426849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:18:47.427027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.427032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.427036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:47.427040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:18:47.427388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:18:47.427642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:18:47.427675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:18:47.427765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:47.427779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:47.427795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:47.427823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:18:47.427827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:47.427845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:47.427852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:18:47.428080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:47.428083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:47.428103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:47.428106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:18:47.428152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.428156Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:18:47.428163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:18:47.428166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:47.428169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:18:47.428172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:47.428175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:18:47.428178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:18:47.428184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:18:47.428187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:18:47.428190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:18:47.428368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:18:47.428377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:18:47.428380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:18:47.428383Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:18:47.428385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:47.428393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... o populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 106, path id: 2 2024-11-21T09:18:47.440484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.440488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 106:0 ProgressState, operation type: TxAlterFileStore, at tablet72057594046678944 2024-11-21T09:18:47.440506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 106:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T09:18:47.440588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T09:18:47.440593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T09:18:47.440595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2024-11-21T09:18:47.440598Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T09:18:47.440601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:18:47.440609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2024-11-21T09:18:47.440879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:1 msg type: 268697601 2024-11-21T09:18:47.440893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72057594037968897 2024-11-21T09:18:47.440896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 106, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T09:18:47.440929Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T09:18:47.440987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:18:47.440992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 106, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T09:18:47.441002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:18:47.441006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 106:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T09:18:47.441009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 106:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:18:47.441018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 2 -> 3 2024-11-21T09:18:47.441056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T09:18:47.441231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#106:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T09:18:47.441420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2024-11-21T09:18:47.441437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2024-11-21T09:18:47.441447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: TxId: 106 Origin: 72075186233409546 Status: OK 2024-11-21T09:18:47.441450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#106:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 3 -> 128 2024-11-21T09:18:47.441610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#106:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2024-11-21T09:18:47.441644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:18:47.441810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2024-11-21T09:18:47.441822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000005 2024-11-21T09:18:47.441853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#106:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T09:18:47.441879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2024-11-21T09:18:47.441881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T09:18:47.441886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:18:47.441891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2024-11-21T09:18:47.441894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T09:18:47.441896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T09:18:47.441898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T09:18:47.441906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:18:47.441908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2024-11-21T09:18:47.441911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T09:18:47.442134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:47.442140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:18:47.442154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:47.442157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 106, path id: 2 FAKE_COORDINATOR: Erasing txId 106 2024-11-21T09:18:47.442203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T09:18:47.442208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T09:18:47.442211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2024-11-21T09:18:47.442213Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:18:47.442215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:18:47.442223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2024-11-21T09:18:47.442414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T09:18:47.442440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T09:18:47.442443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T09:18:47.442474Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T09:18:47.442483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T09:18:47.442485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:425:2406] TestWaitNotification: OK eventTxId 106 >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181277.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181277.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181277.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181277.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181277.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112181277.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=132181277.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181277.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112180077.000000s;Name=;Codec=}; 2024-11-21T09:17:57.230005Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:57.242262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:57.244288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:57.244305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:57.244341Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:57.244758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:57.244782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:57.244799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:57.244811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:57.244822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:57.244832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:57.244841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:57.244853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:57.244863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:57.244880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:57.244890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:57.244902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:57.247633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:57.247645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:57.248450Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:57.248508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:57.248513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:57.248532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:57.248596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:57.248606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:57.248609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:57.248615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:57.248621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:57.248625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:57.248627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:57.248637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:57.248642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:57.248646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:57.248648Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:57.248654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:57.248658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:57.248663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:57.248665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:57.248671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:57.248675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:57.248677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:57.248682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:57.248686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:57.248689Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:57.248709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:57.248716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:57.248721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:57.248728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:57.248741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUp ... ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.622809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:915:2920];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.622812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:915:2920] finished for tablet 9437184 2024-11-21T09:18:48.622819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:915:2920] send ScanData to [1:914:2919] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:48.622854Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:915:2920] and sent to [1:914:2919] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":1732180728619695,"name":"_full_task","f":1732180728619695,"d_finished":0,"c":0,"l":1732180728622824,"d":3129},"events":[{"name":"bootstrap","f":1732180728619726,"d_finished":550,"c":1,"l":1732180728620276,"d":550},{"a":1732180728622799,"name":"ack","f":1732180728622416,"d_finished":177,"c":3,"l":1732180728622766,"d":202},{"a":1732180728622798,"name":"processing","f":1732180728620409,"d_finished":702,"c":24,"l":1732180728622766,"d":728},{"name":"ProduceResults","f":1732180728619972,"d_finished":435,"c":29,"l":1732180728622810,"d":435},{"a":1732180728622810,"name":"Finish","f":1732180728622810,"d_finished":0,"c":0,"l":1732180728622824,"d":14},{"name":"task_result","f":1732180728620411,"d_finished":495,"c":21,"l":1732180728622397,"d":495}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:915:2920]->[1:914:2919] 2024-11-21T09:18:48.622863Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:915:2920];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:48.619604Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T09:18:48.622866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:915:2920];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:48.622872Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:915:2920];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:18:48.622876Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:915:2920];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:18:48.623069Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2024-11-21T09:18:48.623099Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2024-11-21T09:18:48.623112Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:18:48.623133Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:18:48.623142Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:18:48.623201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:931:2936];trace_detailed=; 2024-11-21T09:18:48.623245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:18:48.623263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:18:48.623276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.623281Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.623300Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:18:48.623305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.623309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.623313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:931:2936] finished for tablet 9437184 2024-11-21T09:18:48.623319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:931:2936] send ScanData to [1:930:2935] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:48.623342Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:931:2936] and sent to [1:930:2935] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180728623194,"name":"_full_task","f":1732180728623194,"d_finished":0,"c":0,"l":1732180728623322,"d":128},"events":[{"name":"bootstrap","f":1732180728623214,"d_finished":69,"c":1,"l":1732180728623283,"d":69},{"a":1732180728623298,"name":"ack","f":1732180728623298,"d_finished":0,"c":0,"l":1732180728623322,"d":24},{"a":1732180728623297,"name":"processing","f":1732180728623297,"d_finished":0,"c":0,"l":1732180728623322,"d":25},{"name":"ProduceResults","f":1732180728623272,"d_finished":21,"c":2,"l":1732180728623312,"d":21},{"a":1732180728623312,"name":"Finish","f":1732180728623312,"d_finished":0,"c":0,"l":1732180728623322,"d":10}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:931:2936]->[1:930:2935] 2024-11-21T09:18:48.623362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:48.623146Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:18:48.623365Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:48.623368Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:18:48.623372Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:931:2936];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181276.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181276.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181276.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181276.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181276.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112181276.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=132181276.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181276.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112180076.000000s;Name=;Codec=}; 2024-11-21T09:17:57.027347Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:57.039886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:57.041676Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:57.041695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:57.041729Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:57.042144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:57.042168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:57.042188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:57.042199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:57.042209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:57.042219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:57.042228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:57.042240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:57.042261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:57.042276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:57.042290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:57.042306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:57.045013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:57.045023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:57.045667Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:57.045712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:57.045718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:57.045734Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:57.045788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:57.045796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:57.045800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:57.045805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:57.045811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:57.045815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:57.045818Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:57.045828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:57.045833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:57.045836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:57.045838Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:57.045844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:57.045848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:57.045852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:57.045855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:57.045862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:57.045865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:57.045867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:57.045874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:57.045877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:57.045880Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:57.045900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:57.045907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:57.045912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:57.045918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:57.045931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUp ... ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.587792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.587795Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:913:2918] finished for tablet 9437184 2024-11-21T09:18:48.587803Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:913:2918] send ScanData to [1:912:2917] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:48.587842Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:913:2918] and sent to [1:912:2917] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":1732180728584726,"name":"_full_task","f":1732180728584726,"d_finished":0,"c":0,"l":1732180728587808,"d":3082},"events":[{"name":"bootstrap","f":1732180728584755,"d_finished":502,"c":1,"l":1732180728585257,"d":502},{"a":1732180728587782,"name":"ack","f":1732180728587366,"d_finished":201,"c":3,"l":1732180728587741,"d":227},{"a":1732180728587781,"name":"processing","f":1732180728585355,"d_finished":763,"c":24,"l":1732180728587742,"d":790},{"name":"ProduceResults","f":1732180728585035,"d_finished":455,"c":29,"l":1732180728587793,"d":455},{"a":1732180728587793,"name":"Finish","f":1732180728587793,"d_finished":0,"c":0,"l":1732180728587808,"d":15},{"name":"task_result","f":1732180728585357,"d_finished":516,"c":21,"l":1732180728587345,"d":516}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:913:2918]->[1:912:2917] 2024-11-21T09:18:48.587852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:48.584645Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T09:18:48.587856Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:48.587863Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:18:48.587868Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:913:2918];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:18:48.588071Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2024-11-21T09:18:48.588102Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2024-11-21T09:18:48.588118Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:18:48.588142Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:18:48.588153Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:18:48.588238Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:929:2934];trace_detailed=; 2024-11-21T09:18:48.588283Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:18:48.588297Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:18:48.588310Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.588315Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.588335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:18:48.588340Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.588344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:18:48.588348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:929:2934] finished for tablet 9437184 2024-11-21T09:18:48.588353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:929:2934] send ScanData to [1:928:2933] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:18:48.588376Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:929:2934] and sent to [1:928:2933] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180728588229,"name":"_full_task","f":1732180728588229,"d_finished":0,"c":0,"l":1732180728588357,"d":128},"events":[{"name":"bootstrap","f":1732180728588256,"d_finished":61,"c":1,"l":1732180728588317,"d":61},{"a":1732180728588333,"name":"ack","f":1732180728588333,"d_finished":0,"c":0,"l":1732180728588357,"d":24},{"a":1732180728588331,"name":"processing","f":1732180728588331,"d_finished":0,"c":0,"l":1732180728588357,"d":26},{"name":"ProduceResults","f":1732180728588305,"d_finished":23,"c":2,"l":1732180728588347,"d":23},{"a":1732180728588347,"name":"Finish","f":1732180728588347,"d_finished":0,"c":0,"l":1732180728588357,"d":10}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:929:2934]->[1:928:2933] 2024-11-21T09:18:48.588403Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:18:48.588158Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:18:48.588406Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:18:48.588409Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:18:48.588413Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:929:2934];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181266.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132181266.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181266.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112181266.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180066.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112180066.000000s;Name=;Codec=}; 2024-11-21T09:17:46.385521Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:46.397559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:46.399101Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:46.399116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:46.399147Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:46.399582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:46.399603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:46.399622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:46.399632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:46.399642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:46.399651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:46.399661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:46.399671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:46.399682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:46.399696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:46.399706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:46.399715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:46.402367Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:46.402379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:46.403150Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:46.403250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:46.403259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:46.403284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:46.403364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:46.403374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:46.403377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:46.403383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:46.403390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:46.403395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:46.403397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:46.403409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:46.403421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:46.403426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:46.403429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:46.403435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:46.403439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:46.403447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:46.403449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:46.403458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:46.403462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:46.403465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:46.403471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:46.403476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:46.403479Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:46.403515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:46.403523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T09:17:46.403529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T09:17:46.403537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:46.403553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:46.403558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:46.403560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:46.403575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:46.403580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:46. ... T_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=3; 2024-11-21T09:18:49.060378Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:18:49.060381Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2024-11-21T09:18:49.060389Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=4; 2024-11-21T09:18:49.060395Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:18:49.060410Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11; 2024-11-21T09:18:49.060442Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=29; 2024-11-21T09:18:49.060449Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:18:49.060454Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:18:49.060458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T09:18:49.060461Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T09:18:49.060465Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2024-11-21T09:18:49.060472Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=4; 2024-11-21T09:18:49.060476Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T09:18:49.060483Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=5; 2024-11-21T09:18:49.060488Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:18:49.060494Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=3; 2024-11-21T09:18:49.060496Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1080; 2024-11-21T09:18:49.060510Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:18:49.060521Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:18:49.060525Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:18:49.060529Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:18:49.060539Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:18:49.060553Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:18:49.060564Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:18:49.060568Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:18:49.060578Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:18:49.060582Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:49.060588Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:49.060599Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:18:49.060608Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:18:49.060612Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:49.060619Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:49.060623Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:49.060626Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:49.060637Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:49.060709Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:49.060718Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1188:3084];tablet_id=9437184;parent=[1:1151:3054];fline=manager.h:99;event=ask_data;request=request_id=84;1={portions_count=6};; 2024-11-21T09:18:49.060825Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:18:49.060863Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:18:49.060866Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:18:49.060868Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:18:49.060871Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:49.060876Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:49.060880Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:18:49.060884Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:18:49.060887Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:49.060890Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:49.060893Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:49.060898Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:49.060904Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:49.061123Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=6;path_id=1; 2024-11-21T09:18:49.061154Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=6;path_id=1; 2024-11-21T09:18:49.061187Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:18:49.061190Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test_public_api.py::TestAttributes::test_create_table >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] >> test.py::test[produce-reduce_multi_in-empty-Debug] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181266.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132181266.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181266.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112181266.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180066.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112180066.000000s;Name=;Codec=}; 2024-11-21T09:17:47.159795Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:47.172228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:47.173880Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:47.173899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:47.173934Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:47.174418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:47.174447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:47.174470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:47.174481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:47.174492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:47.174508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:47.174522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:47.174541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:47.174552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:47.174572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:47.174583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:47.174593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:47.177493Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:47.177505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:47.178250Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:47.178302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:47.178308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:47.178326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:47.178394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:47.178415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:47.178419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:47.178424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:47.178431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:47.178435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:47.178437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:47.178451Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:47.178458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:47.178463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:47.178467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:47.178476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:47.178482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:47.178489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:47.178493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:47.178503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:47.178507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:47.178509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:47.178525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:47.178529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:47.178531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:47.178552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:47.178558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:47.178563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:47.178569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:47.178584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:47.178588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:47.178591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:47.178605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:47.178609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:47. ... STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=7; 2024-11-21T09:18:50.102604Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:18:50.102609Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2024-11-21T09:18:50.102619Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=6; 2024-11-21T09:18:50.102639Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=4; 2024-11-21T09:18:50.102662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=19; 2024-11-21T09:18:50.102708Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=42; 2024-11-21T09:18:50.102717Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=5; 2024-11-21T09:18:50.102722Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:18:50.102727Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T09:18:50.102730Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T09:18:50.102735Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T09:18:50.102745Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=7; 2024-11-21T09:18:50.102750Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2024-11-21T09:18:50.102760Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=7; 2024-11-21T09:18:50.102764Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T09:18:50.102771Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=4; 2024-11-21T09:18:50.102774Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=94722; 2024-11-21T09:18:50.102796Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:18:50.102824Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:18:50.102830Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:18:50.102834Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:18:50.102855Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:18:50.102871Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:18:50.102888Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:18:50.102892Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:18:50.102912Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:18:50.102916Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:50.102924Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:50.102938Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:18:50.102948Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:18:50.102952Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:50.102960Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:50.102963Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:50.102968Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:50.102982Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:50.103071Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:50.103248Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1188:3084];tablet_id=9437184;parent=[1:1151:3054];fline=manager.h:99;event=ask_data;request=request_id=84;1={portions_count=6};; 2024-11-21T09:18:50.103288Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:18:50.103594Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:18:50.103598Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:18:50.103600Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:18:50.103603Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:50.103608Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:50.103614Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:18:50.103619Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:18:50.103623Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:50.103627Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:50.103630Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:50.103633Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:50.103640Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:50.103727Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=6;path_id=1; 2024-11-21T09:18:50.103760Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=6;path_id=1; 2024-11-21T09:18:50.103793Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:18:50.103796Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[produce-reduce_multi_in-empty-Debug] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-ForceBlocks] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_column_partition_by-default.txt-Analyze] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[pg-select_win_column_partition_by-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_win_column_partition_by-default.txt-Debug] >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> test.py::test[produce-reduce_multi_in-empty-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Plan] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[produce-reduce_multi_in-empty-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Analyze] >> test.py::test[pg-select_win_column_partition_by-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_column_partition_by-default.txt-ForceBlocks] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Analyze] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Debug] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test.py::test[pg-select_win_column_partition_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_win_column_partition_by-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_column_partition_by-default.txt-Results] >> TAsyncIndexTests::OnlineBuild >> TAsyncIndexTests::OnlineBuild [GOOD] >> test.py::test[pg-select_win_column_partition_by-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_row_number-default.txt-Analyze] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:18:53.985069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:18:53.985086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:53.985089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:18:53.985092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:18:53.985096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:18:53.985099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:18:53.985105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:53.985173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:18:53.991575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:18:53.991587Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:53.993262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:18:53.993716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:18:53.993745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:18:53.994552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:18:53.994701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:18:53.994762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:53.994824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:18:53.995428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:53.995611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:53.995618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:53.995644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:18:53.995649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:53.995653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:18:53.995662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:18:53.996378Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:18:54.005584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:18:54.005627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.005662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:18:54.005689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:18:54.005693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.006020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:54.006034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:18:54.006063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.006068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:18:54.006071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:18:54.006074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:18:54.006278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.006283Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:54.006285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:18:54.006455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.006460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.006463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:54.006467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:18:54.006819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:18:54.007075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:18:54.007105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:18:54.007199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:54.007213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:54.007218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:54.007248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:18:54.007252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:54.007276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:54.007284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:18:54.007506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:54.007510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:54.007532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:54.007535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:18:54.007587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.007591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:18:54.007598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:18:54.007600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:54.007604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:18:54.007607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:54.007610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:18:54.007611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:18:54.007617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:18:54.007620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:18:54.007623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:18:54.007785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:18:54.007793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:18:54.007796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:18:54.007798Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:18:54.007801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:54.007809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710760, response: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T09:18:54.158257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP LOCK, path: /MyRoot/Table 2024-11-21T09:18:54.158270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2024-11-21T09:18:54.158273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2024-11-21T09:18:54.158279Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 102, cookie: 102, txId: 281474976710760, status: StatusAccepted 2024-11-21T09:18:54.158289Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2024-11-21T09:18:54.158314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.158318Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2024-11-21T09:18:54.158321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2024-11-21T09:18:54.158332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:18:54.158539Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:18:54.158548Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:18:54.158648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T09:18:54.158662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:18:54.158676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T09:18:54.158678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T09:18:54.158682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T09:18:54.158715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:54.158725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:54.158729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T09:18:54.158731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T09:18:54.158923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T09:18:54.158928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T09:18:54.158933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T09:18:54.158935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T09:18:54.158938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T09:18:54.158943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710760 2024-11-21T09:18:54.158946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T09:18:54.158948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T09:18:54.158950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T09:18:54.158955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T09:18:54.159130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T09:18:54.159136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T09:18:54.159142Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T09:18:54.159148Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:18:54.159308Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:18:54.159315Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:18:54.159318Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T09:18:54.159522Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:18:54.159530Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:18:54.159532Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T09:18:54.159544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:18:54.159547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:465:2429] TestWaitNotification: OK eventTxId 102 >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--ForceBlocks] >> test.py::test[pg-select_win_row_number-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_win_row_number-default.txt-Debug] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[pg-select_win_row_number-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_row_number-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> test.py::test[pg-select_win_row_number-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_win_row_number-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_row_number-default.txt-Results] >> test.py::test[pg-select_win_row_number-default.txt-Results] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-Analyze] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Analyze] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:18:57.238510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:18:57.238525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:57.238528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:18:57.238531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:18:57.238534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:18:57.238536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:18:57.238541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:57.238585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:18:57.244951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:18:57.244963Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:57.246484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:18:57.246934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:18:57.246957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:18:57.247857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:18:57.248008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:18:57.248082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:57.248148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:18:57.248967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:57.249154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:57.249161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:57.249184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:18:57.249189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:57.249192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:18:57.249200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:18:57.249946Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:18:57.260005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:18:57.260059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:57.260099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:18:57.260129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:18:57.260134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:57.260585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:57.260603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:18:57.260635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:57.260640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:18:57.260644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:18:57.260647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:18:57.260886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:57.260892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:57.260895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:18:57.261105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:57.261110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:57.261113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:57.261118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:18:57.261493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:18:57.261774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:18:57.261816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:18:57.261929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:57.261945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:57.261951Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:57.261985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:18:57.261989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:57.262011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:57.262024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:18:57.262279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:57.262284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:57.262307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:57.262310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:18:57.262363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:57.262367Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:18:57.262375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:18:57.262378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:57.262381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:18:57.262385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:57.262387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:18:57.262390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:18:57.262397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:18:57.262400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:18:57.262402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:18:57.262586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:18:57.262594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:18:57.262597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:18:57.262600Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:18:57.262603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:57.262613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... thTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:57.333481Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:18:57.333498Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 18us result status StatusSuccess 2024-11-21T09:18:57.333545Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:57.333589Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:18:57.333598Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 10us result status StatusSuccess 2024-11-21T09:18:57.333625Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[pg-strings_to_pg-default.txt-Analyze] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-Debug] >> test.py::test[sampling-bind_multiple_sample-default.txt-Analyze] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Debug] >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> test.py::test[pg-strings_to_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-ForceBlocks] >> test.py::test[sampling-bind_multiple_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-ForceBlocks] >> KqpExplain::AggGroupLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::AggGroupLimit [GOOD] Test command err: 2024-11-21T09:16:52.912389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659083098494160:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:52.912450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001db2/r3tmp/tmpSNulvd/pdisk_1.dat 2024-11-21T09:16:52.971039Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64613, node 1 2024-11-21T09:16:52.994053Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:16:52.994068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:16:52.994070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:16:52.994105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:53.007240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.012423Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 TClient is connected to server localhost:18560 2024-11-21T09:16:53.045217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.045245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.046040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:16:53.046843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.057044Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659085819983966:2265];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:53.061131Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:53.061351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.061372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.062833Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:16:53.063071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.077394Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:16:53.077413Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:16:53.082524Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:16:53.083820Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:16:53.083835Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:16:53.083871Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:16:53.083874Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:16:53.083878Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:16:53.083881Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:16:53.083884Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:16:53.083889Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:16:53.083946Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:16:53.106831Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7439659085819984284:2285] 2024-11-21T09:16:53.107561Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:16:53.108697Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:16:53.108699Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:16:53.108704Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:16:53.110279Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2024-11-21T09:16:53.111619Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:16:53.111631Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7439659085819984338:2287], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:16:53.114299Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:16:53.115518Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7439659085819984400:2332] 2024-11-21T09:16:53.115610Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:53.115619Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:53.117484Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7439659085819984400:2332], schemeshard id = 72075186224037889 2024-11-21T09:16:53.122809Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:53.168768Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:16:53.172198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037889 2024-11-21T09:16:53.173994Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:16:53.174008Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720658 2024-11-21T09:16:53.174770Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:16:53.238007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659087393462411:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.238034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:53.267017Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2024-11-21T09:16:53.288665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:16:53.326726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:53.326786Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:53.326830Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:16:53.326856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:16:53.326880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:16:53.326904Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:16:53.326932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:16:53.326957Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:16:53.326983Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:16:53.327009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:16:53.327029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:16:53.327039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439659085819984702:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:16:53.331330Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[2:7439659085819984699:2322];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:16:53.331362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[2:7439659085819984699:2322];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:16:53.331403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[2:7439659085819984699:2322];tablet_id=7207518 ... 11-21T09:18:57.826880Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7439659618117023168:2306]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:18:57.826904Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:18:57.826912Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7439659618117023170:2803] 2024-11-21T09:18:57.826917Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7439659618117023170:2803] 2024-11-21T09:18:57.826986Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7439659618117023170:2803], server id = [4:7439659620040386851:2537], tablet id = 72075186224037897, status = OK 2024-11-21T09:18:57.827051Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T09:18:57.827062Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7439659618117023168:2306], StatRequests.size() = 1 2024-11-21T09:18:57.826986Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:7439659620040386851:2537] 2024-11-21T09:18:57.827019Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [4:7439659620040386851:2537], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:18:57.827024Z node 4 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2024-11-21T09:18:57.829085Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2024-11-21T09:18:57.829227Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:18:57.829291Z node 3 :STATISTICS DEBUG: EvClientDestroyed, node id = 3, client id = [3:7439659618117023170:2803], server id = [4:7439659620040386851:2537], tablet id = 72075186224037897 2024-11-21T09:18:57.829299Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7439659618117023173:2804] 2024-11-21T09:18:57.829301Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7439659618117023173:2804] 2024-11-21T09:18:57.836811Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:57.926856Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:7439659618117023170:2803], schemeshard count = 1 Trying to start YDB, gRPC: 4173, MsgBus: 30765 2024-11-21T09:18:58.202678Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439659620983785881:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:18:58.202753Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001db2/r3tmp/tmprHaiVp/pdisk_1.dat 2024-11-21T09:18:58.208741Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4173, node 5 2024-11-21T09:18:58.217926Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:58.217934Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:58.217935Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:58.217951Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30765 TClient is connected to server localhost:30765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:18:58.304326Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:58.304349Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:58.304637Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:58.305410Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:58.308498Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:58.317298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:58.332007Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:58.344736Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:18:58.413455Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439659620983787421:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:58.413480Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:58.416710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:18:58.421038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:18:58.428530Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:18:58.435161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:18:58.442394Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:18:58.449688Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:18:58.457544Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439659620983787911:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:58.457562Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:58.457566Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439659620983787916:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:58.457984Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:18:58.462915Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439659620983787918:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1}],"GroupBy":"item.App","Aggregation":"{_yql_agg_0: MAX(item.Message,state._yql_agg_0),_yql_agg_1: MIN(item.Message,state._yql_agg_1)}","Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 And item.Ts \u003C= 4 Or item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["App"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"columns":["App","Message","Ts"],"scan_by":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 And item.Ts \u003C= 4 Or item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.App","Aggregation":"{_yql_agg_0: MAX(item.Message,state._yql_agg_0),_yql_agg_1: MIN(item.Message,state._yql_agg_1)}","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> test.py::test[pg-strings_to_pg-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-strings_to_pg-default.txt-Results] |95.1%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[sampling-bind_multiple_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2024-11-21T09:18:45.644591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:18:45.644904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:18:45.644918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004758/r3tmp/tmpQuYTmz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20809, node 1 TClient is connected to server localhost:17315 2024-11-21T09:18:45.745483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:18:45.760329Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:45.760764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:45.760770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:45.760773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:45.760812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:18:45.801711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:45.801733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:45.811988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:45.914671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T09:18:45.930560Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:653:2546], Recipient [1:706:2591]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:18:45.930678Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:653:2546], Recipient [1:706:2591]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:18:45.930719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:706:2591];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:18:45.932483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:706:2591];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:18:45.933093Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T09:18:45.933491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:45.933517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:45.933546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:45.933558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:45.933569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:45.933582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:45.933593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:45.933605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:45.933617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:45.933628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:45.933639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:45.933650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:45.935985Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:653:2546], Recipient [1:706:2591]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:18:45.936192Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T09:18:45.936242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:18:45.936247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:18:45.936261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:45.936272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:18:45.936278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:18:45.936281Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:18:45.936287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:18:45.936291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:18:45.936295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:18:45.936298Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:18:45.936307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:45.936312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:18:45.936316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:18:45.936318Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:18:45.936324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:18:45.936328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:18:45.936332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:18:45.936336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:18:45.936343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:18:45.936346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:18:45.936349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:18:45.936354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:18:45.936357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:18:45.936360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:18:45.936376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:18:45.936382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:18:45.936387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72 ... es=0; s-compacted portions=11;blobs=22;rows=476311;bytes=16824896;raw_bytes=567412422; inactive portions=18;blobs=24;rows=765306;bytes=26937552;raw_bytes=904134929; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 72075186224037888 2024-11-21T09:18:57.521914Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=72075186224037888;external_task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=72075186224037888; TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:73/0:size=2168;count=15;;1:size=60053;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445744;count=1;;7:size=1445360;count=1;;8:size=1445928;count=1;;9:size=1445448;count=1;;10:size=1445376;count=1;;11:size=1445528;count=1;;12:size=4720176;count=8;;13:size=849680;count=1;;14:size=1445408;count=1;;15:size=1445608;count=1;;16:size=1445400;count=1;;17:size=1445920;count=1;;18:size=808584;count=1;;19:size=1761752;count=2;;20:size=1806632;count=3;;21:size=1761880;count=3;;22:size=978968;count=1;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:73/0:size=2237;count=16;;1:size=60053;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445744;count=1;;7:size=1445360;count=1;;8:size=1445928;count=1;;9:size=1445448;count=1;;10:size=1445376;count=1;;11:size=1445528;count=1;;12:size=4720176;count=8;;13:size=849680;count=1;;14:size=1445408;count=1;;15:size=1445608;count=1;;16:size=1445400;count=1;;17:size=1445920;count=1;;18:size=808584;count=1;;19:size=1761752;count=2;;20:size=1806632;count=3;;21:size=1761880;count=3;;22:size=978968;count=1;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2024-11-21T09:18:57.532367Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2024-11-21T09:18:57.532380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=with_appended.cpp:80;portions=32,;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb; 2024-11-21T09:18:57.532437Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1945216;portion_bytes=1945216;portion_raw_bytes=65757812; 2024-11-21T09:18:57.532446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=3;portion=30;before_size=19736752;after_size=17791536;before_rows=558802;after_rows=503614; 2024-11-21T09:18:57.532448Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1945216;portion_bytes=1945216;portion_raw_bytes=65757812; 2024-11-21T09:18:57.532462Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=485688;portion_bytes=485688;portion_raw_bytes=16432836; 2024-11-21T09:18:57.532466Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=3;portion=31;before_size=17791536;after_size=17305848;before_rows=503614;after_rows=489823; 2024-11-21T09:18:57.532469Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=485688;portion_bytes=485688;portion_raw_bytes=16432836; 2024-11-21T09:18:57.532472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=2430936;portion_bytes=2430936;portion_raw_bytes=82190648; 2024-11-21T09:18:57.532476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=3;portion=32;before_size=17305848;after_size=19736784;before_rows=489823;after_rows=558802; 2024-11-21T09:18:57.532478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=2430936;portion_bytes=2430936;portion_raw_bytes=82190648; 2024-11-21T09:18:57.532487Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:32;path_id:3;records_count:68979;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:2430936;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2024-11-21T09:18:57.532527Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=manager.cpp:14;event=unlock;process_id=CS::GENERAL::a2e437e6-a7e911ef-b10e1130-6987f8cb; 2024-11-21T09:18:57.532539Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;fline=granule.cpp:99;event=OnCompactionFinished;info=(granule:3;path_id:3;size:19736784;portions_count:32;); 2024-11-21T09:18:57.532543Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:18:57.532551Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:18:57.532565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T09:18:57.532571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2024-11-21T09:18:57.532575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:18:57.532581Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:57.532584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:18:57.532594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:18:57.532607Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;tablet_id=72075186224037888;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:18:57.532641Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:20 Blob count: 1 VERIFY failed (2024-11-21T09:18:57.532660Z): tablet_id=72075186224037888;task_id=a2e437e6-a7e911ef-b10e1130-6987f8cb;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+459 (0x12339C4B) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x123313B7) ??+0 (0x12955504) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+1472 (0x23E32C90) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+1993 (0x1C3BB259) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+302 (0x141E7D5E) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+1340 (0x141E215C) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+1314 (0x14146DF2) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+786 (0x14126292) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+1981 (0x1DD3148D) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+5273 (0x1DD2E3E9) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x1DD2CE51) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+3899 (0x1E0A419B) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+1650 (0x122129E2) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x122193C7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1248F82E) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+419 (0x12218CE3) NUnitTest::TTestFactory::Execute()+803 (0x1248FFA3) NUnitTest::RunMain(int, char**)+3005 (0x124A31CD) ??+0 (0x7F7386E81D90) __libc_start_main+128 (0x7F7386E81E40) _start+41 (0x11679029) >> test.py::test[pg-strings_to_pg-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Analyze] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[select-trivial_group_by-default.txt-Plan] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Analyze] >> test.py::test[pg-tpcds-q19-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Debug] >> test.py::test[expr-double_join_with_list_from_range--Debug] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Analyze] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Debug] >> test.py::test[select-trivial_group_by-default.txt-Plan] [GOOD] >> test.py::test[action-eval_atom_wrong_type_expr--Analyze] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Debug] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Plan] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Results] [SKIPPED] >> test.py::test[action-eval_code_nested-default.txt-Analyze] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test.py::test[pg-tpcds-q19-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181261.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=132181261.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181261.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=132181261.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180061.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=112181261.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180061.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=112180061.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180061.000000s;Name=;Codec=}; 2024-11-21T09:17:41.887021Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:41.901911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:41.904294Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:41.904327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:41.904377Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:41.905099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:41.905144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:41.905181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:41.905198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:41.905213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:41.905229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:41.905244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:41.905261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:41.905280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:41.905307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.905323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:41.905340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:41.908579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:41.908591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:41.909437Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:41.909514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:41.909523Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:41.909551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.909628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:41.909641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:41.909647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:41.909655Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:41.909664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:41.909671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:41.909675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:41.909691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.909698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:41.909707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:41.909710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:41.909719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:41.909725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:41.909733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:41.909737Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:41.909748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:41.909754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:41.909757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:41.909764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:41.909771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:41.909774Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:41.909804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:41.909813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2024-11-21T09:17:41.909820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T09:17:41.909830 ... ,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:00.784433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:00.784438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] finished for tablet 9437184 2024-11-21T09:19:00.784448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] send ScanData to [1:945:2945] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:00.784504Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:946:2946] and sent to [1:945:2945] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.01}],"full":{"a":1732180740773596,"name":"_full_task","f":1732180740773596,"d_finished":0,"c":0,"l":1732180740784455,"d":10859},"events":[{"name":"bootstrap","f":1732180740773628,"d_finished":530,"c":1,"l":1732180740774158,"d":530},{"a":1732180740784418,"name":"ack","f":1732180740783965,"d_finished":206,"c":3,"l":1732180740784382,"d":243},{"a":1732180740784417,"name":"processing","f":1732180740774295,"d_finished":730,"c":24,"l":1732180740784382,"d":768},{"name":"ProduceResults","f":1732180740773913,"d_finished":458,"c":29,"l":1732180740784436,"d":458},{"a":1732180740784436,"name":"Finish","f":1732180740784436,"d_finished":0,"c":0,"l":1732180740784455,"d":19},{"name":"task_result","f":1732180740774298,"d_finished":493,"c":21,"l":1732180740783942,"d":493}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:946:2946]->[1:945:2945] 2024-11-21T09:19:00.784521Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:00.773507Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T09:19:00.784526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:00.784537Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:19:00.784545Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:00.784792Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T09:19:00.784828Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T09:19:00.784845Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:19:00.784882Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:19:00.784893Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:19:00.784978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:953:2953];trace_detailed=; 2024-11-21T09:19:00.785023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:19:00.785038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:00.785050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:00.785055Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:00.785073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:00.785078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:00.785082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:00.785085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:953:2953] finished for tablet 9437184 2024-11-21T09:19:00.785089Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:953:2953] send ScanData to [1:952:2952] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:00.785112Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:953:2953] and sent to [1:952:2952] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180740784972,"name":"_full_task","f":1732180740784972,"d_finished":0,"c":0,"l":1732180740785093,"d":121},"events":[{"name":"bootstrap","f":1732180740784990,"d_finished":67,"c":1,"l":1732180740785057,"d":67},{"a":1732180740785071,"name":"ack","f":1732180740785071,"d_finished":0,"c":0,"l":1732180740785093,"d":22},{"a":1732180740785070,"name":"processing","f":1732180740785070,"d_finished":0,"c":0,"l":1732180740785093,"d":23},{"name":"ProduceResults","f":1732180740785046,"d_finished":18,"c":2,"l":1732180740785083,"d":18},{"a":1732180740785084,"name":"Finish","f":1732180740785084,"d_finished":0,"c":0,"l":1732180740785093,"d":9}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:953:2953]->[1:952:2952] 2024-11-21T09:19:00.785122Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:00.784902Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:19:00.785124Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:00.785128Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:19:00.785131Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=132181263.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181263.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181263.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181263.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181263.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181263.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181263.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180063.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181263.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181263.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180063.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180063.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180063.000000s;Name=;Codec=}; 2024-11-21T09:17:43.849872Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:43.862297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:43.863899Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:43.863915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:43.863943Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:43.864408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:43.864431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:43.864450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:43.864462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:43.864472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:43.864481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:43.864490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:43.864501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:43.864511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:43.864528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:43.864538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:43.864549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:43.867192Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:43.867203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:43.867828Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:43.867874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:43.867878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:43.867895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:43.867954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:43.867963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:43.867968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:43.867975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:43.867984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:43.867990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:43.867994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:43.868010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:43.868015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:43.868020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:43.868024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:43.868030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:43.868034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:43.868038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:43.868040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:43.868047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:43.868051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:43.868053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:43.868058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:43.868062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:43.868064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:43.868082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:43.868088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:43.868093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:43.868098Z node 1 ... =1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:01.025728Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:01.025731Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:917:2917] finished for tablet 9437184 2024-11-21T09:19:01.025737Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:917:2917] send ScanData to [1:916:2916] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:01.025771Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:917:2917] and sent to [1:916:2916] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":1732180741014608,"name":"_full_task","f":1732180741014608,"d_finished":0,"c":0,"l":1732180741025741,"d":11133},"events":[{"name":"bootstrap","f":1732180741014651,"d_finished":551,"c":1,"l":1732180741015202,"d":551},{"a":1732180741025718,"name":"ack","f":1732180741025323,"d_finished":173,"c":3,"l":1732180741025681,"d":196},{"a":1732180741025718,"name":"processing","f":1732180741015292,"d_finished":710,"c":24,"l":1732180741025681,"d":733},{"name":"ProduceResults","f":1732180741014898,"d_finished":447,"c":29,"l":1732180741025730,"d":447},{"a":1732180741025730,"name":"Finish","f":1732180741025730,"d_finished":0,"c":0,"l":1732180741025741,"d":11},{"name":"task_result","f":1732180741015295,"d_finished":513,"c":21,"l":1732180741025299,"d":513}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:917:2917]->[1:916:2916] 2024-11-21T09:19:01.025781Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:01.014511Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T09:19:01.025784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:01.025790Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:19:01.025795Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:01.025988Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T09:19:01.026015Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T09:19:01.026029Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:19:01.026055Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:19:01.026061Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:19:01.026127Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:924:2924];trace_detailed=; 2024-11-21T09:19:01.026162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:19:01.026175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:01.026185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:01.026190Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:01.026206Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:01.026210Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:01.026216Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:01.026218Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:924:2924] finished for tablet 9437184 2024-11-21T09:19:01.026223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:924:2924] send ScanData to [1:923:2923] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:01.026244Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:924:2924] and sent to [1:923:2923] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180741026120,"name":"_full_task","f":1732180741026120,"d_finished":0,"c":0,"l":1732180741026225,"d":105},"events":[{"name":"bootstrap","f":1732180741026137,"d_finished":55,"c":1,"l":1732180741026192,"d":55},{"a":1732180741026204,"name":"ack","f":1732180741026204,"d_finished":0,"c":0,"l":1732180741026225,"d":21},{"a":1732180741026203,"name":"processing","f":1732180741026203,"d_finished":0,"c":0,"l":1732180741026225,"d":22},{"name":"ProduceResults","f":1732180741026182,"d_finished":20,"c":2,"l":1732180741026217,"d":20},{"a":1732180741026217,"name":"Finish","f":1732180741026217,"d_finished":0,"c":0,"l":1732180741026225,"d":8}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:924:2924]->[1:923:2923] 2024-11-21T09:19:01.026251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:01.026065Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:19:01.026253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:01.026256Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:19:01.026260Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] >> test.py::test[expr-double_join_with_list_from_range--Debug] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181261.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181261.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181261.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181261.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180061.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181261.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180061.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180061.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112180061.000000s;Name=;Codec=}; 2024-11-21T09:17:41.313987Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:41.325923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:41.327399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:41.327417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:41.327460Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:41.327896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:41.327921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:41.327938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:41.327950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:41.327960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:41.327970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:41.327978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:41.327989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:41.328000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:41.328015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.328025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:41.328034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:41.330739Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:41.330754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:41.331825Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:41.331926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:41.331935Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:41.331968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.332059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:41.332074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:41.332080Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:41.332089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:41.332099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:41.332106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:41.332110Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:41.332135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.332142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:41.332149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:41.332154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:41.332164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:41.332170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:41.332177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:41.332181Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:41.332194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:41.332201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:41.332226Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:41.332235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:41.332245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:41.332248Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:41.332280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2024-11-21T09:17:41.332288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T09:17:41.332296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T09:17:41.33230 ... rollerLoadingTime=3; 2024-11-21T09:19:01.124518Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:01.124521Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=8; 2024-11-21T09:19:01.124528Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=3; 2024-11-21T09:19:01.124534Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:19:01.124550Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=13; 2024-11-21T09:19:01.124585Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=33; 2024-11-21T09:19:01.124594Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:19:01.124599Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:19:01.124602Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T09:19:01.124605Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T09:19:01.124608Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2024-11-21T09:19:01.124615Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=5; 2024-11-21T09:19:01.124619Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T09:19:01.124626Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=4; 2024-11-21T09:19:01.124629Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:19:01.124635Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=2; 2024-11-21T09:19:01.124637Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1224; 2024-11-21T09:19:01.124650Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:01.124662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:01.124665Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:01.124668Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:01.124680Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:01.124697Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:19:01.124707Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:01.124712Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:01.124722Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:01.124725Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:01.124732Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.124740Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T09:19:01.124748Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:01.124751Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:01.124758Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.124761Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.124765Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:01.124774Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:01.124892Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.124927Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1578:3447];tablet_id=9437184;parent=[1:1537:3413];fline=manager.h:99;event=ask_data;request=request_id=148;1={portions_count=21};; 2024-11-21T09:19:01.125039Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:01.125257Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:01.125260Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:01.125262Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:01.125265Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:01.125272Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.125279Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T09:19:01.125284Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:01.125288Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:01.125293Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.125296Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.125299Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:01.125307Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:01.125364Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T09:19:01.125435Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T09:19:01.125532Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:01.125534Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181261.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=132181261.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181261.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=132181261.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180061.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=112181261.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181261.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180061.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=112180061.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180061.000000s;Name=;Codec=}; 2024-11-21T09:17:41.987547Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:42.002810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:42.005191Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:42.005215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:42.005256Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:42.005794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:42.005827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:42.005856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:42.005874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:42.005890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:42.005906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:42.005924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:42.005943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:42.005962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:42.005992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:42.006010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:42.006028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:42.009135Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:42.009153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:42.010006Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:42.010060Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:42.010066Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:42.010087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:42.010151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:42.010161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:42.010165Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:42.010173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:42.010183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:42.010190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:42.010194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:42.010209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:42.010214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:42.010219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:42.010221Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:42.010228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:42.010232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:42.010237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:42.010239Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:42.010246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:42.010250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:42.010252Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:42.010259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:42.010264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:42.010266Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:42.010289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:42.010296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:42.010302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:42.010310 ... rollerLoadingTime=4; 2024-11-21T09:19:01.282088Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:01.282092Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2024-11-21T09:19:01.282099Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=4; 2024-11-21T09:19:01.282105Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:19:01.282122Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14; 2024-11-21T09:19:01.282160Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=35; 2024-11-21T09:19:01.282168Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=4; 2024-11-21T09:19:01.282173Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=1; 2024-11-21T09:19:01.282177Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T09:19:01.282179Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T09:19:01.282183Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T09:19:01.282190Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=4; 2024-11-21T09:19:01.282194Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2024-11-21T09:19:01.282201Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=5; 2024-11-21T09:19:01.282205Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:19:01.282210Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=3; 2024-11-21T09:19:01.282213Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1317; 2024-11-21T09:19:01.282227Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:01.282238Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:01.282242Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:01.282246Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:01.282257Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:01.282276Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:19:01.282289Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:01.282294Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:01.282305Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:01.282309Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:01.282316Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.282327Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T09:19:01.282335Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:01.282339Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:01.282347Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.282350Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.282354Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:01.282364Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:01.282419Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.282458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1578:3447];tablet_id=9437184;parent=[1:1537:3413];fline=manager.h:99;event=ask_data;request=request_id=148;1={portions_count=21};; 2024-11-21T09:19:01.282596Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:01.282607Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:01.282610Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:01.282612Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:01.282615Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:01.282619Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.282623Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T09:19:01.282627Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:01.282632Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:01.282635Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.282638Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.282640Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:01.282646Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:01.282899Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T09:19:01.282979Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T09:19:01.283076Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:01.283079Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181261.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181261.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180061.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2024-11-21T09:17:41.285339Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:41.299344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:41.301491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:41.301513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:41.301554Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:41.302079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:41.302110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:41.302134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:41.302146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:41.302158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:41.302168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:41.302178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:41.302193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:41.302205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:41.302225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.302236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:41.302246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:41.305281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:41.305295Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:41.306305Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:41.306386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:41.306394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:41.306420Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.306484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:41.306495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:41.306498Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:41.306504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:41.306511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:41.306517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:41.306519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:41.306536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:41.306541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:41.306546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:41.306548Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:41.306555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:41.306559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:41.306564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:41.306566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:41.306574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:41.306578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:41.306581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:41.306587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:41.306592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:41.306594Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:41.306620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2024-11-21T09:17:41.306627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:41.306633Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T09:17:41.306642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T09:17:41.306657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:41.306662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:41.306664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:41.306682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:17:41.306686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:17:41.306689Z node 1 :TX_COLUM ... 9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:01.254857Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2024-11-21T09:19:01.254864Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=5; 2024-11-21T09:19:01.254870Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=2; 2024-11-21T09:19:01.254884Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11; 2024-11-21T09:19:01.254916Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=29; 2024-11-21T09:19:01.254922Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:19:01.254927Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:19:01.254930Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T09:19:01.254933Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T09:19:01.254936Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2024-11-21T09:19:01.254943Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=4; 2024-11-21T09:19:01.254946Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T09:19:01.254954Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=5; 2024-11-21T09:19:01.254957Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:19:01.254964Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=4; 2024-11-21T09:19:01.254966Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1082; 2024-11-21T09:19:01.254979Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=6;blobs=6;rows=160000;bytes=9495312;raw_bytes=16084618; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:01.254991Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:01.254995Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:01.254998Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:01.255010Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:01.255022Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=0; 2024-11-21T09:19:01.255034Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:01.255038Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:01.255046Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:01.255050Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:01.255057Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.255068Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:19:01.255074Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:19:01.255078Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:01.255085Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.255089Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.255097Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:01.255108Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:01.255167Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1248:3140];tablet_id=9437184;parent=[1:1211:3110];fline=manager.h:99;event=ask_data;request=request_id=96;1={portions_count=12};; 2024-11-21T09:19:01.255233Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.255241Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1248:3140];tablet_id=9437184;parent=[1:1211:3110];fline=manager.h:99;event=ask_data;request=request_id=98;1={portions_count=6};; 2024-11-21T09:19:01.255483Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:01.255497Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:01.255500Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:01.255502Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:01.255505Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:01.255510Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.255516Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T09:19:01.255520Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:19:01.255524Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:01.255528Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.255530Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.255533Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:01.255541Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:01.255589Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2024-11-21T09:19:01.255634Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2024-11-21T09:19:01.255691Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:01.255696Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1211:3110];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 0/0 160000/9495312 >> test.py::test[action-eval_code_nested-default.txt-Analyze] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-one-Analyze] >> test.py::test[pg-tpcds-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Results] >> test.py::test[sampling-join_right_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-ForceBlocks] >> test.py::test[action-eval_code_nested-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181263.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181263.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181263.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181263.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181263.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181263.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180063.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181263.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181263.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180063.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180063.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180063.000000s;Name=;Codec=}; 2024-11-21T09:17:43.915662Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:43.930090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:43.931859Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:43.931883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:43.931929Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:43.932513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:43.932543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:43.932566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:43.932578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:43.932588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:43.932599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:43.932613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:43.932634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:43.932649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:43.932677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:43.932689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:43.932702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:43.935718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:43.935730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:43.936594Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:43.936663Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:43.936670Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:43.936694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:43.936765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:43.936774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:43.936778Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:43.936783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:43.936790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:43.936794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:43.936797Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:43.936807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:43.936811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:43.936815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:43.936817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:43.936824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:43.936828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:43.936832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:43.936835Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:43.936842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:43.936846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:43.936848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:43.936854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:43.936859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:43.936863Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:43.936891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2024-11-21T09:17:43.936900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T09:17:43.936906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:43.936916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T09:17:43.936935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:43.936943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:43.936947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:43.936964Z n ... trollerLoadingTime=3; 2024-11-21T09:19:01.606102Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:01.606105Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=8; 2024-11-21T09:19:01.606116Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=7; 2024-11-21T09:19:01.606121Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=2; 2024-11-21T09:19:01.606133Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10; 2024-11-21T09:19:01.606165Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=29; 2024-11-21T09:19:01.606171Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=2; 2024-11-21T09:19:01.606176Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:19:01.606179Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T09:19:01.606182Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T09:19:01.606185Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2024-11-21T09:19:01.606192Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=4; 2024-11-21T09:19:01.606195Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T09:19:01.606201Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=4; 2024-11-21T09:19:01.606205Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:19:01.606211Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=2; 2024-11-21T09:19:01.606213Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1035; 2024-11-21T09:19:01.606225Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=12;blobs=18;rows=320000;bytes=18990624;raw_bytes=32169236; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:01.606236Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:01.606240Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:01.606243Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:01.606252Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:01.606265Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:19:01.606275Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:01.606278Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:01.606287Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:01.606290Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:01.606296Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.606305Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T09:19:01.606311Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:19:01.606314Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:01.606321Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.606324Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.606328Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:01.606337Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:01.606385Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.606392Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1413:3282];tablet_id=9437184;parent=[1:1374:3250];fline=manager.h:99;event=ask_data;request=request_id=117;1={portions_count=12};; 2024-11-21T09:19:01.606489Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:01.606521Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:01.606523Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:01.606525Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:01.606528Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:01.606532Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:01.606536Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T09:19:01.606540Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:19:01.606542Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:01.606546Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.606548Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:01.606551Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:01.606556Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:01.606832Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2024-11-21T09:19:01.606881Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2024-11-21T09:19:01.606938Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:01.606940Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[pg-tpcds-q19-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Analyze] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] >> ColumnShardTiers::DSConfigsStub |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test.py::test[action-eval_code_nested-default.txt-Debug] [GOOD] >> test.py::test[action-eval_code_nested-default.txt-ForceBlocks] >> test.py::test[select-trivial_where-one-Analyze] [GOOD] >> test.py::test[select-trivial_where-one-Debug] >> test.py::test[sampling-join_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Results] >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] >> test.py::test[pg-tpcds-q28-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Debug] >> test.py::test[expr-double_join_with_list_from_range--ForceBlocks] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--Plan] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--Results] >> ColumnShardTiers::TieringUsage |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test.py::test[action-eval_code_nested-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_code_nested-default.txt-Plan] [GOOD] >> test.py::test[action-eval_code_nested-default.txt-Results] >> test.py::test[select-trivial_where-one-Debug] [GOOD] >> test.py::test[select-trivial_where-one-ForceBlocks] >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[action-eval_code_nested-default.txt-Results] [GOOD] >> test.py::test[action-eval_on_modif_table_fail--Analyze] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--Debug] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--Plan] >> test.py::test[sampling-join_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Analyze] >> test.py::test[action-eval_on_modif_table_fail--Plan] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--Results] >> test.py::test[select-trivial_where-one-ForceBlocks] [GOOD] >> test.py::test[select-trivial_where-one-Plan] [GOOD] >> test.py::test[select-trivial_where-one-Results] >> test.py::test[pg-tpcds-q28-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-ForceBlocks] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[expr-double_join_with_list_from_range--Results] [GOOD] >> test.py::test[expr-empty_iterator2--Analyze] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Analyze] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Debug] >> test.py::test[action-eval_on_modif_table_fail--Results] [GOOD] >> test.py::test[action-eval_python3_ann--Analyze] [SKIPPED] >> test.py::test[action-eval_python3_ann--Debug] [SKIPPED] >> test.py::test[action-eval_python3_ann--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_python3_ann--Plan] [SKIPPED] >> test.py::test[action-eval_python3_ann--Results] [SKIPPED] >> test.py::test[action-eval_variant-default.txt-Analyze] >> test.py::test[select-trivial_where-one-Results] [GOOD] >> test.py::test[select-unlabeled--Analyze] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> test.py::test[expr-empty_iterator2--Analyze] [GOOD] >> test.py::test[expr-empty_iterator2--Debug] >> test.py::test[pg-tpcds-q28-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Results] >> test.py::test[action-eval_variant-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_variant-default.txt-Debug] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-ForceBlocks] >> test.py::test[select-unlabeled--Analyze] [GOOD] >> test.py::test[select-unlabeled--Debug] >> test.py::test[expr-empty_iterator2--Debug] [GOOD] >> test.py::test[expr-empty_iterator2--ForceBlocks] >> test.py::test[action-eval_variant-default.txt-Debug] [GOOD] >> test.py::test[action-eval_variant-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q28-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-Analyze] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TColumnShardTestSchema::HotTiers [GOOD] >> test.py::test[select-unlabeled--Debug] [GOOD] >> test.py::test[select-unlabeled--ForceBlocks] >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181264.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181264.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181264.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181264.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180064.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181264.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180064.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180064.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112180064.000000s;Name=;Codec=}; 2024-11-21T09:17:44.870278Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:44.883389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:44.885005Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:44.885021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:44.885050Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:44.885476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:44.885500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:44.885517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:44.885528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:44.885537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:44.885547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:44.885556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:44.885568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:44.885583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:44.885594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:44.885603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:44.885614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:44.888346Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:44.888357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:44.889237Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:44.889289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:44.889296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:44.889314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:44.889374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:44.889382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:44.889385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:44.889391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:44.889397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:44.889402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:44.889405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:44.889415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:44.889419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:44.889423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:44.889426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:44.889431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:44.889435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:44.889441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:44.889443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:44.889450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:44.889454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:44.889456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:44.889463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:44.889467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:44.889469Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:44.889489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:44.889494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:44.889500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:44.889506 ... ;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:06.051472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] finished for tablet 9437184 2024-11-21T09:19:06.051480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] send ScanData to [1:945:2945] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:06.051527Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:946:2946] and sent to [1:945:2945] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.017},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.019}],"full":{"a":1732180746032380,"name":"_full_task","f":1732180746032380,"d_finished":0,"c":0,"l":1732180746051486,"d":19106},"events":[{"name":"bootstrap","f":1732180746032450,"d_finished":943,"c":1,"l":1732180746033393,"d":943},{"a":1732180746051456,"name":"ack","f":1732180746049854,"d_finished":258,"c":3,"l":1732180746051196,"d":288},{"a":1732180746051455,"name":"processing","f":1732180746033684,"d_finished":815,"c":24,"l":1732180746051196,"d":846},{"name":"ProduceResults","f":1732180746033079,"d_finished":522,"c":29,"l":1732180746051471,"d":522},{"a":1732180746051471,"name":"Finish","f":1732180746051471,"d_finished":0,"c":0,"l":1732180746051486,"d":15},{"name":"task_result","f":1732180746033686,"d_finished":528,"c":21,"l":1732180746049824,"d":528}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:946:2946]->[1:945:2945] 2024-11-21T09:19:06.051542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:06.031977Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T09:19:06.051545Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:06.051559Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.015852s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.015615s;size=3.792e-06;details={columns=1;};};]};; 2024-11-21T09:19:06.051563Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:06.052493Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T09:19:06.052536Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T09:19:06.052557Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:19:06.052599Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:19:06.052609Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:19:06.052686Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:953:2953];trace_detailed=; 2024-11-21T09:19:06.052732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:19:06.052746Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:06.052761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:06.052765Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:06.052783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:06.052790Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:06.052794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:06.052796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:953:2953] finished for tablet 9437184 2024-11-21T09:19:06.052801Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:953:2953] send ScanData to [1:952:2952] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:06.052825Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:953:2953] and sent to [1:952:2952] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180746052680,"name":"_full_task","f":1732180746052680,"d_finished":0,"c":0,"l":1732180746052804,"d":124},"events":[{"name":"bootstrap","f":1732180746052699,"d_finished":68,"c":1,"l":1732180746052767,"d":68},{"a":1732180746052781,"name":"ack","f":1732180746052781,"d_finished":0,"c":0,"l":1732180746052804,"d":23},{"a":1732180746052780,"name":"processing","f":1732180746052780,"d_finished":0,"c":0,"l":1732180746052804,"d":24},{"name":"ProduceResults","f":1732180746052756,"d_finished":19,"c":2,"l":1732180746052795,"d":19},{"a":1732180746052795,"name":"Finish","f":1732180746052795,"d_finished":0,"c":0,"l":1732180746052804,"d":9}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:953:2953]->[1:952:2952] 2024-11-21T09:19:06.052840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:06.052617Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:19:06.052843Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:06.052846Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:19:06.052850Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> test.py::test[expr-empty_iterator2--ForceBlocks] [GOOD] >> TColumnShardTestSchema::RebootHotTiers [GOOD] >> test.py::test[expr-empty_iterator2--Plan] [GOOD] >> test.py::test[expr-empty_iterator2--Results] >> test.py::test[action-eval_variant-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_variant-default.txt-Plan] [GOOD] >> test.py::test[action-eval_variant-default.txt-Results] >> test.py::test[pg-tpcds-q68-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=132181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181264.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181264.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181264.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181264.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180064.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181264.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180064.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180064.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180064.000000s;Name=;Codec=}; 2024-11-21T09:17:45.032398Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:45.047912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:45.050051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:45.050076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:45.050135Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:45.050611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:45.050637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:45.050661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:45.050673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:45.050683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:45.050694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:45.050704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:45.050714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:45.050729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:45.050746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:45.050758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:45.050769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:45.053745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:45.053759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:45.054600Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:45.054658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:45.054665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:45.054691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:45.054769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:45.054778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:45.054782Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:45.054788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:45.054794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:45.054799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:45.054801Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:45.054811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:45.054816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:45.054820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:45.054823Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:45.054829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:45.054834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:45.054839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:45.054842Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:45.054849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:45.054853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:45.054855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:45.054860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:45.054864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:45.054867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:45.054887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:17:45.054893Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:45.054898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:45.054905Z node 1 ... trollerLoadingTime=4; 2024-11-21T09:19:06.295853Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:06.295857Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2024-11-21T09:19:06.295864Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=3; 2024-11-21T09:19:06.295870Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:19:06.295887Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14; 2024-11-21T09:19:06.295925Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=36; 2024-11-21T09:19:06.295933Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:19:06.295938Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:19:06.295941Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T09:19:06.295944Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T09:19:06.295947Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T09:19:06.295955Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=5; 2024-11-21T09:19:06.295958Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T09:19:06.295966Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=5; 2024-11-21T09:19:06.295969Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:19:06.295974Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=2; 2024-11-21T09:19:06.295977Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1275; 2024-11-21T09:19:06.295991Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:06.296001Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:06.296005Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:06.296009Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:06.296020Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:06.296036Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:19:06.296048Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:06.296052Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:06.296063Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:06.296066Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:06.296073Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.296084Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T09:19:06.296092Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:06.296097Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:06.296105Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.296109Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.296112Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:06.296124Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:06.296246Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.296283Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1531:3400];tablet_id=9437184;parent=[1:1492:3368];fline=manager.h:99;event=ask_data;request=request_id=140;1={portions_count=21};; 2024-11-21T09:19:06.296354Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:06.296579Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:06.296583Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:06.296585Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:06.296588Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:06.296593Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.296599Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T09:19:06.296604Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:06.296609Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:06.296613Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.296615Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.296618Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:06.296626Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:06.296674Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T09:19:06.296742Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T09:19:06.296843Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:06.296846Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 >> TColumnShardTestSchema::RebootColdTiers [GOOD] >> test.py::test[select-unlabeled--ForceBlocks] [GOOD] >> test.py::test[select-unlabeled--Plan] [GOOD] >> test.py::test[select-unlabeled--Results] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181264.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181264.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181264.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181264.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180064.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181264.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112181264.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180064.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180064.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112180064.000000s;Name=;Codec=}; 2024-11-21T09:17:45.033788Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:45.048963Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:45.051635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:45.051658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:45.051697Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:45.052344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:45.052378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:45.052406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:45.052424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:45.052440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:45.052456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:45.052471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:45.052488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:45.052513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:45.052529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:45.052545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:45.052561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:45.056606Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:45.056622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:45.057471Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:45.057532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:45.057539Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:45.057564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:45.057636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:45.057648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:45.057653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:45.057662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:45.057670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:45.057677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:45.057681Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:45.057697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:45.057704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:45.057712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:45.057716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:45.057725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:45.057731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:45.057738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:45.057742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:45.057752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:45.057758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:45.057762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:45.057769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:45.057775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:45.057779Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:45.057807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2024-11-21T09:17:45.057815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T09:17:45.057823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T09:17:45.057832 ... rollerLoadingTime=4; 2024-11-21T09:19:06.538689Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:06.538692Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=8; 2024-11-21T09:19:06.538698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=4; 2024-11-21T09:19:06.538704Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:19:06.538719Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12; 2024-11-21T09:19:06.538755Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=33; 2024-11-21T09:19:06.538761Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:19:06.538765Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:19:06.538769Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T09:19:06.538771Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T09:19:06.538775Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T09:19:06.538780Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=4; 2024-11-21T09:19:06.538783Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2024-11-21T09:19:06.538791Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=3; 2024-11-21T09:19:06.538794Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T09:19:06.538800Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=3; 2024-11-21T09:19:06.538802Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1193; 2024-11-21T09:19:06.538815Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:06.538825Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:06.538829Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:06.538832Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:06.538841Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:06.538858Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:19:06.538868Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:06.538872Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:06.538881Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:06.538885Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:06.538891Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.538900Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T09:19:06.538906Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:06.538910Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:06.538917Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.538920Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.538924Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:06.538934Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:06.538978Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.539061Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1578:3447];tablet_id=9437184;parent=[1:1537:3413];fline=manager.h:99;event=ask_data;request=request_id=148;1={portions_count=21};; 2024-11-21T09:19:06.539133Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:06.539180Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:06.539183Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:06.539185Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:06.539188Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:06.539193Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.539198Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T09:19:06.539202Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:06.539205Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:06.539209Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.539212Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.539215Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:06.539221Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:06.539447Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T09:19:06.539516Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T09:19:06.539612Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:06.539616Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181266.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181266.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181266.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181266.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181266.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181266.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180066.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181266.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181266.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180066.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180066.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180066.000000s;Name=;Codec=}; 2024-11-21T09:17:46.675522Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:46.687489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:46.689117Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:46.689136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:46.689168Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:46.689619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:46.689646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:46.689664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:46.689676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:46.689686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:46.689697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:46.689706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:46.689721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:46.689735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:46.689751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:46.689761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:46.689772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:46.692498Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:46.692510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:46.693279Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:46.693329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:46.693336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:46.693357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:46.693414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:46.693423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:46.693426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:46.693432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:46.693438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:46.693443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:46.693445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:46.693455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:46.693460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:46.693464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:46.693466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:46.693473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:46.693477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:46.693481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:46.693484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:46.693491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:46.693495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:46.693497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:46.693504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:46.693507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:46.693510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:46.693531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:46.693537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:46.693542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=2; 2024-11-21T09:17:46.693549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:17:46.693563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:46.693567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:46.693570Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:46.693587Z n ... trollerLoadingTime=4; 2024-11-21T09:19:06.714486Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:06.714490Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2024-11-21T09:19:06.714496Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=4; 2024-11-21T09:19:06.714502Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=2; 2024-11-21T09:19:06.714517Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12; 2024-11-21T09:19:06.714550Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=31; 2024-11-21T09:19:06.714557Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:19:06.714562Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:19:06.714566Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2024-11-21T09:19:06.714569Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T09:19:06.714573Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2024-11-21T09:19:06.714579Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=4; 2024-11-21T09:19:06.714582Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2024-11-21T09:19:06.714589Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=4; 2024-11-21T09:19:06.714592Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:19:06.714599Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=2; 2024-11-21T09:19:06.714602Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1106; 2024-11-21T09:19:06.714615Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=12;blobs=18;rows=320000;bytes=18990624;raw_bytes=32169236; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:06.714626Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:06.714630Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:06.714633Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:06.714644Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:06.714659Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:19:06.714669Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:06.714673Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:06.714683Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:06.714686Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:06.714692Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.714701Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T09:19:06.714708Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:19:06.714711Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:06.714717Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.714721Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.714725Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:06.714734Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:06.714802Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.714856Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1413:3282];tablet_id=9437184;parent=[1:1374:3250];fline=manager.h:99;event=ask_data;request=request_id=117;1={portions_count=12};; 2024-11-21T09:19:06.714930Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:06.715174Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:06.715178Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:06.715180Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:06.715183Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:06.715188Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:06.715193Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T09:19:06.715198Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T09:19:06.715202Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:06.715206Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.715208Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:06.715212Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:06.715218Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:06.715293Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2024-11-21T09:19:06.715367Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2024-11-21T09:19:06.715463Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:06.715468Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 >> test.py::test[action-eval_variant-default.txt-Results] [GOOD] >> test.py::test[action-subquery_accessnode-default.txt-Analyze] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[expr-empty_iterator2--Results] [GOOD] >> test.py::test[expr-expr_named_yql_lambda-default.txt-Analyze] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Analyze] >> test.py::test[select-unlabeled--Results] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Analyze] >> test.py::test[pg-tpcds-q68-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-ForceBlocks] >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181268.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181268.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181268.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181268.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181268.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181268.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180068.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181268.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181268.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180068.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180068.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180068.000000s;Name=;Codec=}; 2024-11-21T09:17:48.651131Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:48.664389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:48.666036Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:48.666055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:48.666090Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:48.666534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:48.666557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:48.666577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:48.666588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:48.666598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:48.666608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:48.666618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:48.666628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:48.666639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:48.666655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:48.666665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:48.666676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:48.669467Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:48.669480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:48.670215Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:48.670266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:48.670270Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:48.670292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:48.670360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:48.670369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:48.670372Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:48.670378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:48.670385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:48.670388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:48.670391Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:48.670401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:48.670406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:48.670410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:48.670412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:48.670419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:48.670423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:48.670427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:48.670429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:48.670436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:48.670440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:48.670443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:48.670448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:48.670453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:48.670455Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:48.670478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T09:17:48.670483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:48.670489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:48.670495Z node 1 :TX_COL ... rollerLoadingTime=4; 2024-11-21T09:19:07.474991Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:07.474994Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2024-11-21T09:19:07.475002Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=4; 2024-11-21T09:19:07.475007Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:19:07.475025Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14; 2024-11-21T09:19:07.475059Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=31; 2024-11-21T09:19:07.475067Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=4; 2024-11-21T09:19:07.475072Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=2; 2024-11-21T09:19:07.475076Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T09:19:07.475079Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T09:19:07.475083Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T09:19:07.475089Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=3; 2024-11-21T09:19:07.475092Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2024-11-21T09:19:07.475100Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=5; 2024-11-21T09:19:07.475106Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T09:19:07.475115Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=3; 2024-11-21T09:19:07.475118Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1266; 2024-11-21T09:19:07.475136Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:07.475149Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:07.475155Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:07.475160Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:07.475175Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:07.475191Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:19:07.475203Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:07.475208Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:07.475217Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:07.475221Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:07.475228Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:07.475236Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T09:19:07.475243Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:07.475247Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:07.475253Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:07.475256Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:07.475260Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:07.475270Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:07.475387Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:07.475414Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1578:3447];tablet_id=9437184;parent=[1:1537:3413];fline=manager.h:99;event=ask_data;request=request_id=148;1={portions_count=21};; 2024-11-21T09:19:07.475473Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:07.475522Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:07.475525Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:07.475527Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:07.475530Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:07.475534Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:07.475538Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T09:19:07.475543Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:07.475548Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:07.475551Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:07.475554Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:07.475557Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:07.475563Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:07.475780Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T09:19:07.475854Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T09:19:07.475962Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:07.475965Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1537:3413];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> test.py::test[action-subquery_accessnode-default.txt-Analyze] [GOOD] >> test.py::test[action-subquery_accessnode-default.txt-Debug] >> TBackupCollectionWithRebootsTests::DropWithReboots >> test.py::test[pg-tpcds-q68-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-expr_named_yql_lambda-default.txt-Analyze] [GOOD] >> test.py::test[expr-expr_named_yql_lambda-default.txt-Debug] >> test.py::test[schema-select_all-row_spec_part-Analyze] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Debug] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Analyze] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Debug] >> test_public_api.py::TestDocApiTables::test_create_table >> test.py::test[pg-tpcds-q68-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-Results] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> test.py::test[pg-tpcds-q68-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-Analyze] >> test.py::test[action-subquery_accessnode-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_accessnode-default.txt-ForceBlocks] >> test.py::test[expr-expr_named_yql_lambda-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_named_yql_lambda-default.txt-ForceBlocks] >> test.py::test[schema-select_all-row_spec_part-Debug] [GOOD] >> test.py::test[schema-select_all-row_spec_part-ForceBlocks] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Debug] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-ForceBlocks] >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots2 >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[pg-tpcds-q82-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-Debug] |95.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> test.py::test[action-subquery_accessnode-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_accessnode-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_accessnode-default.txt-Results] >> test.py::test[schema-select_all-row_spec_part-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Results] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Plan] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:19:03.011435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:03.011451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:03.011454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:03.011457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:03.011460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:03.011463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:03.011468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:03.011531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:03.018193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:03.018204Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:19:03.019471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:03.019527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:03.019556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:03.020985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:03.021036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:03.021094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:03.021218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:03.021623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:03.021799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:03.021804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:03.021812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:03.021816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:03.021820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:03.021848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:19:03.022544Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:19:03.032571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:03.032623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.032659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:03.032689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:03.032694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.033042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:03.033059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:03.033089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.033095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:03.033097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:03.033100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:03.033326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.033331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:03.033333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:03.033524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.033529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.033533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:03.033537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:03.033894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:03.034146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:03.034173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:03.034279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:03.034295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:03.034300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:03.034332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:03.034336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:03.034354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:03.034361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:03.034634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:03.034640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:03.034663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:03.034666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:03.034718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.034722Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:03.034729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:03.034731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:03.034735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:03.034738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:03.034741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:03.034743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:03.034750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:03.034753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:03.034755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.089216Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:19:09.089390Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.089401Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.089405Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:19:09.089638Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:19:09.089646Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:09.089700Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:19:09.089719Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2024-11-21T09:19:09.089723Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2024-11-21T09:19:09.089728Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2024-11-21T09:19:09.089770Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.089778Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.089782Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:19:09.089786Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:19:09.089792Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:19:09.089800Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T09:19:09.090023Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.090031Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:09.090066Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:19:09.090081Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2024-11-21T09:19:09.090085Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:19:09.090090Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T09:19:09.090094Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:19:09.090098Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:19:09.090102Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:19:09.090118Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:19:09.090122Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:19:09.090125Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:19:09.090130Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:19:09.090133Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:19:09.090137Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:19:09.090143Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:19:09.090250Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.090481Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.090496Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.090500Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.090505Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.091102Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:19:09.091794Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } TabletId: 72075186233409546 State: 4 2024-11-21T09:19:09.091808Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:19:09.092115Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:19:09.092191Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2024-11-21T09:19:09.092262Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:19:09.092305Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2024-11-21T09:19:09.092698Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:19:09.092705Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:19:09.092716Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:19:09.092720Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:19:09.092725Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:19:09.093162Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:19:09.093175Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2024-11-21T09:19:09.093372Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:19:09.093415Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:19:09.093421Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:19:09.093568Z node 26 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:19:09.093583Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:19:09.093588Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:616:2545] 2024-11-21T09:19:09.094340Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 344 RawX2: 111669152023 } TabletId: 72075186233409547 State: 4 2024-11-21T09:19:09.094354Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:19:09.094640Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:19:09.094706Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2024-11-21T09:19:09.094739Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:09.094776Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2024-11-21T09:19:09.095108Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:19:09.095120Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:19:09.095130Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:09.095539Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:19:09.095549Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2024-11-21T09:19:09.095578Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T09:19:09.095629Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:19:09.095640Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> test.py::test[expr-expr_named_yql_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-expr_named_yql_lambda-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_named_yql_lambda-default.txt-Results] >> test.py::test[action-subquery_accessnode-default.txt-Results] [GOOD] >> test.py::test[agg_apply-max-default.txt-Analyze] >> test.py::test[pg-tpcds-q82-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-ForceBlocks] >> test.py::test[expr-expr_named_yql_lambda-default.txt-Results] [GOOD] >> test.py::test[expr-list_comp-default.txt-Analyze] >> test.py::test[schema-select_all-row_spec_part-Results] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-Analyze] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Analyze] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[agg_apply-max-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-max-default.txt-Debug] >> test.py::test[pg-tpcds-q82-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-Results] >> test.py::test[expr-list_comp-default.txt-Analyze] [GOOD] >> test.py::test[expr-list_comp-default.txt-Debug] >> test.py::test[schema-user_schema_bind-default.txt-Analyze] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-Debug] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Analyze] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Debug] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> test.py::test[pg-tpcds-q82-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Analyze] >> test.py::test[join-alias_where_group--Plan] >> test.py::test[agg_apply-max-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-max-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_bind-default.txt-Debug] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q86-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Debug] >> TBackupCollectionWithRebootsTests::DropWithReboots [GOOD] >> TStorageTenantTest::Empty [GOOD] >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> test.py::test[join-alias_where_group--Plan] [GOOD] >> test.py::test[join-alias_where_group--Results] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |95.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> test.py::test[agg_apply-max-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-max-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-max-default.txt-Results] >> test.py::test[schema-user_schema_bind-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-Plan] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-Results] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] |95.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::DropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:19:08.192264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:08.192285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:08.192289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:08.192293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:08.192304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:08.192306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:08.192313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:08.192368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:08.204583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:08.204599Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:08.206964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:08.207544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:08.207565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:08.208590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:08.208638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:08.211091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:08.211883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:08.212870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:08.217098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:08.217110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:08.217141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:08.217146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:08.217151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:08.217171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:19:08.218139Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:19:08.230988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:08.232875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:08.233401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:08.233442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:08.233447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:08.233950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:08.233989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:08.234034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:08.234043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:08.234046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:08.234050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:08.234498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:08.235042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:08.235050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:08.235338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:08.235345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:08.235350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:08.235354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:08.235761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:08.236036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:08.237001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:08.237138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:08.237155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:08.238152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:08.239158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:08.239165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:08.239192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:08.239201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:08.239567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:08.239572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:08.239601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:08.239605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:19:08.239659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:08.239664Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:08.239672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:08.239674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:08.239678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:08.239681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:08.239684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:08.239686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:08.239693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:08.239697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:08.239700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:19:08.239925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:08.239936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:08.239940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:19:08.239945Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:19:08.239949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:08.239958Z node 1 ... 11-21T09:19:11.748162Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:19:11.748167Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 10 2024-11-21T09:19:11.748170Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:19:11.748376Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:19:11.748385Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:19:11.748388Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:19:11.748390Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2024-11-21T09:19:11.748393Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:19:11.748399Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T09:19:11.748607Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T09:19:11.748620Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T09:19:11.748704Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:11.748718Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:11.748724Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1006:0, HandleReply TEvOperationPlan: step# 5000007 2024-11-21T09:19:11.748736Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T09:19:11.748746Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 1 -> 240 2024-11-21T09:19:11.748761Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:19:11.748766Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T09:19:11.748981Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:19:11.749010Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T09:19:11.749223Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:11.749228Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:19:11.749248Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T09:19:11.749263Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:11.749266Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T09:19:11.749269Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 6 2024-11-21T09:19:11.749300Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T09:19:11.749304Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T09:19:11.749310Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T09:19:11.749312Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T09:19:11.749316Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T09:19:11.749319Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T09:19:11.749321Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T09:19:11.749323Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T09:19:11.749330Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T09:19:11.749333Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T09:19:11.749335Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 4], 11 2024-11-21T09:19:11.749337Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2024-11-21T09:19:11.749371Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:19:11.749376Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:19:11.749379Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:19:11.749381Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T09:19:11.749384Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T09:19:11.749407Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:19:11.749410Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T09:19:11.749414Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:19:11.749430Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:19:11.749437Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:19:11.749439Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:19:11.749441Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T09:19:11.749443Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:19:11.749448Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T09:19:11.749910Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:19:11.749926Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:19:11.749932Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T09:19:11.749959Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T09:19:11.749962Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T09:19:11.749997Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T09:19:11.750007Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T09:19:11.750010Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [16:436:2428] TestWaitNotification: OK eventTxId 1006 2024-11-21T09:19:11.750051Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:19:11.750067Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 22us result status StatusPathDoesNotExist 2024-11-21T09:19:11.750086Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] >> test.py::test[expr-list_comp-default.txt-Debug] [GOOD] >> test.py::test[expr-list_comp-default.txt-ForceBlocks] >> test.py::test[agg_apply-max-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_unsigned-default.txt-Analyze] >> test.py::test[schema-user_schema_bind-default.txt-Results] [GOOD] >> test.py::test[select-braces-default.txt-Analyze] >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Analyze] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Debug] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Results] [SKIPPED] >> test.py::test[tpch-q5-default.txt-Analyze] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots2 [GOOD] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |95.2%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[join-alias_where_group--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Analyze] >> TFileStoreWithReboots::CreateAlterChannels >> test.py::test[pg-tpcds-q86-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:19:09.213554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:09.213571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:09.213574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:09.213576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:09.213585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:09.213588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:09.213593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:09.213643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:09.220614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:09.220626Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:09.221914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:09.221952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:09.221969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:09.222877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:09.222915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:09.222983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:09.223026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:09.223283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:09.223475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:09.223481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:09.223502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:09.223506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:09.223510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:09.223521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.224409Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:19:09.233740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:09.233795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.233826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:09.233852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:09.233857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.234508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:09.234529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:09.234562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.234569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:09.234572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:09.234575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:09.234859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.234866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:09.234869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:09.235081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.235087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.235090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:09.235094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:09.235453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:09.235730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:09.235764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:09.235856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:09.235870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:09.235887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:09.235919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:09.235923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:09.235940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:09.235946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:09.236230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:09.236234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:09.236259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:09.236262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:19:09.236303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:09.236308Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:09.236314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:09.236316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:09.236320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:09.236323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:09.236326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:09.236328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:09.236335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:09.236339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:09.236341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:19:09.236519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:09.236527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:09.236530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:19:09.236533Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:19:09.236536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:09.236542Z node 1 ... 2024-11-21T09:19:12.768921Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:19:12.768923Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2024-11-21T09:19:12.768925Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:19:12.769098Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:19:12.769106Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:19:12.769108Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:19:12.769110Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2024-11-21T09:19:12.769112Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:19:12.769120Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T09:19:12.769398Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T09:19:12.769421Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T09:19:12.769484Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:12.769497Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:12.769504Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1004:0, HandleReply TEvOperationPlan: step# 5000005 2024-11-21T09:19:12.769517Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:19:12.769526Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 1 -> 240 2024-11-21T09:19:12.769553Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:19:12.769559Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:19:12.769704Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:19:12.769884Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:19:12.770154Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:12.770163Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:19:12.770179Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:19:12.770193Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:12.770196Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T09:19:12.770199Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2024-11-21T09:19:12.770231Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:19:12.770235Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:19:12.770255Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:19:12.770257Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:19:12.770260Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T09:19:12.770264Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:19:12.770267Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:19:12.770269Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:19:12.770277Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:19:12.770280Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T09:19:12.770282Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 7 2024-11-21T09:19:12.770284Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:19:12.770332Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:19:12.770341Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:19:12.770344Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:19:12.770346Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:19:12.770349Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:19:12.770385Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:19:12.770389Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:19:12.770393Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:19:12.770413Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:19:12.770418Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:19:12.770420Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:19:12.770422Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T09:19:12.770424Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:19:12.770429Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T09:19:12.770924Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:19:12.770940Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:19:12.770947Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:19:12.770972Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:19:12.770976Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:19:12.771012Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:19:12.771024Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:19:12.771027Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [16:382:2374] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:19:12.771067Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:19:12.771088Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 27us result status StatusPathDoesNotExist 2024-11-21T09:19:12.771109Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[agg_apply-sum_unsigned-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-sum_unsigned-default.txt-Debug] >> test.py::test[select-braces-default.txt-Analyze] [GOOD] >> test.py::test[select-braces-default.txt-Debug] >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] >> TColumnShardTestSchema::ColdTiers [GOOD] >> test.py::test[tpch-q5-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q5-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181276.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181276.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181276.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181276.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181276.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181276.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180076.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181276.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181276.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180076.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180076.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180076.000000s;Name=;Codec=}; 2024-11-21T09:17:56.795899Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:56.809556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:56.811265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:56.811283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:56.811314Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:56.811774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:56.811801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:56.811823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:56.811834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:56.811845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:56.811855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:56.811865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:56.811880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:56.811900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:56.811910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:56.811919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:56.811931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:56.814898Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:56.814909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:56.815473Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:56.815513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:56.815519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:56.815534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:56.815586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:56.815593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:56.815596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:56.815601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:56.815608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:56.815612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:56.815614Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:56.815623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:56.815628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:56.815631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:56.815634Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:56.815639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:56.815644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:56.815648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:56.815650Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:56.815657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:56.815660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:56.815662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:56.815668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:56.815672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:56.815674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:56.815693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:56.815699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:17:56.815704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:56.815711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T09:17:56.815723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:17:56.815727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:17:56.815730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:17:56.815742Z n ... ,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.289252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.289255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:798:2798] finished for tablet 9437184 2024-11-21T09:19:13.289262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:798:2798] send ScanData to [1:797:2797] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:13.289301Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:798:2798] and sent to [1:797:2797] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.01}],"full":{"a":1732180753278533,"name":"_full_task","f":1732180753278533,"d_finished":0,"c":0,"l":1732180753289267,"d":10734},"events":[{"name":"bootstrap","f":1732180753278566,"d_finished":481,"c":1,"l":1732180753279047,"d":481},{"a":1732180753289241,"name":"ack","f":1732180753288808,"d_finished":179,"c":3,"l":1732180753289207,"d":205},{"a":1732180753289240,"name":"processing","f":1732180753279056,"d_finished":686,"c":24,"l":1732180753289207,"d":713},{"name":"ProduceResults","f":1732180753278807,"d_finished":432,"c":29,"l":1732180753289253,"d":432},{"a":1732180753289253,"name":"Finish","f":1732180753289253,"d_finished":0,"c":0,"l":1732180753289267,"d":14},{"name":"task_result","f":1732180753279058,"d_finished":480,"c":21,"l":1732180753288781,"d":480}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:798:2798]->[1:797:2797] 2024-11-21T09:19:13.289310Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:13.278446Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2024-11-21T09:19:13.289314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:13.289322Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:19:13.289327Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:13.289546Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T09:19:13.289582Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2024-11-21T09:19:13.289599Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:19:13.289635Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:19:13.289644Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:19:13.289721Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:805:2805];trace_detailed=; 2024-11-21T09:19:13.289764Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:19:13.289779Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:13.289791Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.289797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.289813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:13.289818Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.289824Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.289827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:805:2805] finished for tablet 9437184 2024-11-21T09:19:13.289832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:805:2805] send ScanData to [1:804:2804] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:13.289855Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:805:2805] and sent to [1:804:2804] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180753289715,"name":"_full_task","f":1732180753289715,"d_finished":0,"c":0,"l":1732180753289835,"d":120},"events":[{"name":"bootstrap","f":1732180753289733,"d_finished":66,"c":1,"l":1732180753289799,"d":66},{"a":1732180753289811,"name":"ack","f":1732180753289811,"d_finished":0,"c":0,"l":1732180753289835,"d":24},{"a":1732180753289810,"name":"processing","f":1732180753289810,"d_finished":0,"c":0,"l":1732180753289835,"d":25},{"name":"ProduceResults","f":1732180753289787,"d_finished":21,"c":2,"l":1732180753289825,"d":21},{"a":1732180753289825,"name":"Finish","f":1732180753289825,"d_finished":0,"c":0,"l":1732180753289835,"d":10}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:805:2805]->[1:804:2804] 2024-11-21T09:19:13.289863Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:13.289651Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:19:13.289865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:13.289869Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:19:13.289872Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=132181276.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181276.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181276.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181276.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181276.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181276.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181276.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180076.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181276.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112181276.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180076.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180076.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112180076.000000s;Name=;Codec=}; 2024-11-21T09:17:56.570823Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:56.582602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:56.584776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:56.584801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:56.584844Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:56.585423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:56.585450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:56.585474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:56.585485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:56.585495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:56.585505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:56.585514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:56.585525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:56.585538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:56.585554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:56.585564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:56.585574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:56.588182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:56.588192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:56.588829Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:56.588887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:56.588892Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:56.588907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:56.588971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:56.588978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:56.588981Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:56.588987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:56.588993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:56.588997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:56.589000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:56.589009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:56.589013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:56.589019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:56.589021Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:56.589027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:56.589031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:56.589035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:56.589037Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:56.589057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:56.589061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:56.589064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:56.589069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:56.589073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:56.589075Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:56.589095Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:56.589101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:56.589105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:17:5 ... ;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.213369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:917:2917] finished for tablet 9437184 2024-11-21T09:19:13.213376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:917:2917] send ScanData to [1:916:2916] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:13.213424Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:917:2917] and sent to [1:916:2916] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1732180753201101,"name":"_full_task","f":1732180753201101,"d_finished":0,"c":0,"l":1732180753213382,"d":12281},"events":[{"name":"bootstrap","f":1732180753201178,"d_finished":836,"c":1,"l":1732180753202014,"d":836},{"a":1732180753213354,"name":"ack","f":1732180753212887,"d_finished":201,"c":3,"l":1732180753213320,"d":229},{"a":1732180753213354,"name":"processing","f":1732180753205262,"d_finished":876,"c":24,"l":1732180753213321,"d":904},{"name":"ProduceResults","f":1732180753201603,"d_finished":533,"c":29,"l":1732180753213367,"d":533},{"a":1732180753213367,"name":"Finish","f":1732180753213367,"d_finished":0,"c":0,"l":1732180753213382,"d":15},{"name":"task_result","f":1732180753205266,"d_finished":636,"c":21,"l":1732180753212859,"d":636}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:917:2917]->[1:916:2916] 2024-11-21T09:19:13.213435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:13.200980Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T09:19:13.213440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:13.213457Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.010201s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.010208s;size=3.792e-06;details={columns=1;};};]};; 2024-11-21T09:19:13.213465Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:13.213727Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T09:19:13.213768Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T09:19:13.213792Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:19:13.213839Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:19:13.213849Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:19:13.213933Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:924:2924];trace_detailed=; 2024-11-21T09:19:13.213982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:19:13.213997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:13.214010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.214015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.214035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:13.214042Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.214046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:13.214049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:924:2924] finished for tablet 9437184 2024-11-21T09:19:13.214055Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:924:2924] send ScanData to [1:923:2923] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:13.214080Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:924:2924] and sent to [1:923:2923] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180753213926,"name":"_full_task","f":1732180753213926,"d_finished":0,"c":0,"l":1732180753214058,"d":132},"events":[{"name":"bootstrap","f":1732180753213946,"d_finished":71,"c":1,"l":1732180753214017,"d":71},{"a":1732180753214033,"name":"ack","f":1732180753214033,"d_finished":0,"c":0,"l":1732180753214058,"d":25},{"a":1732180753214032,"name":"processing","f":1732180753214032,"d_finished":0,"c":0,"l":1732180753214058,"d":26},{"name":"ProduceResults","f":1732180753214006,"d_finished":20,"c":2,"l":1732180753214048,"d":20},{"a":1732180753214048,"name":"Finish","f":1732180753214048,"d_finished":0,"c":0,"l":1732180753214058,"d":10}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:924:2924]->[1:923:2923] 2024-11-21T09:19:13.214088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:13.213857Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:19:13.214091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:13.214094Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:19:13.214099Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 |95.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[join-anyjoin_merge_nodup--Analyze] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Debug] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] >> test.py::test[agg_apply-sum_unsigned-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_unsigned-default.txt-ForceBlocks] >> test.py::test[select-braces-default.txt-Debug] [GOOD] >> test.py::test[select-braces-default.txt-ForceBlocks] >> test.py::test[expr-list_comp-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-list_comp-default.txt-Plan] [GOOD] >> test.py::test[expr-list_comp-default.txt-Results] |95.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Results] >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots |95.3%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[tpch-q5-default.txt-Debug] [GOOD] >> test.py::test[tpch-q5-default.txt-ForceBlocks] >> test.py::test[agg_apply-sum_unsigned-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-sum_unsigned-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-sum_unsigned-default.txt-Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> test.py::test[select-braces-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-braces-default.txt-Plan] [GOOD] >> test.py::test[select-braces-default.txt-Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> test.py::test[join-anyjoin_merge_nodup--Debug] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] |95.3%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[agg_apply-sum_unsigned-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Analyze] >> test.py::test[agg_phases-avg_null-default.txt-Analyze] >> test.py::test[select-braces-default.txt-Results] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-Analyze] >> test.py::test[tpch-q5-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q5-default.txt-Plan] [GOOD] >> test.py::test[tpch-q5-default.txt-Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |95.3%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> test.py::test[expr-list_comp-default.txt-Results] [GOOD] >> test.py::test[file-file_list_bools--Analyze] >> test.py::test[agg_phases-avg_null-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-avg_null-default.txt-Debug] >> test.py::test[pg-tpcds-q95-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Debug] >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Plan] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] >> test.py::test[select-cast_double_to_uint32-default.txt-Analyze] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-Debug] >> TPersQueueTest::DirectReadPreCached >> test.py::test[file-file_list_bools--Analyze] [GOOD] >> test.py::test[file-file_list_bools--Debug] >> test.py::test[tpch-q5-default.txt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Analyze] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[pg-tpcds-q95-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-ForceBlocks] >> test.py::test[select-cast_double_to_uint32-default.txt-Debug] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-ForceBlocks] >> test.py::test[file-file_list_bools--Debug] [GOOD] >> test.py::test[file-file_list_bools--ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint--Analyze] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Debug] >> test.py::test[pg-tpcds-q95-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[select-cast_double_to_uint32-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-Plan] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] >> test.py::test[pg-tpcds-q95-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Analyze] >> test.py::test[file-file_list_bools--ForceBlocks] [GOOD] >> test.py::test[file-file_list_bools--Plan] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> test.py::test[join-bush_in_in_in--Analyze] >> test.py::test[type_v3-ignore_v3_hint--Debug] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--ForceBlocks] >> test.py::test[file-file_list_bools--Results] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-Analyze] >> test.py::test[agg_phases-avg_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-avg_null-default.txt-ForceBlocks] >> TColumnShardTestSchema::OneColdTier [GOOD] >> test.py::test[file-file_list_bools--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Analyze] >> test.py::test[join-premap_merge_extrasort1-off-Debug] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off-Plan] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort2--Debug] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> test.py::test[pg-sublink_having_in-default.txt-Debug] >> test.py::test[flatten_by-flatten_columns_with_opt_struct-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181293.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132181293.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181293.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112181293.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180093.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112180093.000000s;Name=;Codec=}; 2024-11-21T09:18:13.847195Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:18:13.858558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:18:13.860689Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:18:13.860704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:18:13.860730Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:18:13.861171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:13.861192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:13.861214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:13.861225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:13.861235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:13.861244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:13.861252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:13.861263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:13.861282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:13.861292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:13.861303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:13.861314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:13.864035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:18:13.864044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:18:13.864616Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:18:13.864656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:18:13.864660Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:18:13.864674Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:13.864725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:18:13.864733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:18:13.864736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:18:13.864741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:18:13.864747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:18:13.864751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:18:13.864753Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:18:13.864762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:13.864766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:18:13.864769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:18:13.864772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:18:13.864777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:18:13.864782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:18:13.864786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:18:13.864789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:18:13.864795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:18:13.864798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:18:13.864800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:18:13.864805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:18:13.864808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:18:13.864811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:18:13.864827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=5; 2024-11-21T09:18:13.864832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:18:13.864837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=2; 2024-11-21T09:18:13.864843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=3; 2024-11-21T09:18:13.864854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:18:13.864857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:18:13.864860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:18:13.864885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:18:13.864889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:18:13. ... g=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:18.030066Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:18.030068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] finished for tablet 9437184 2024-11-21T09:19:18.030074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] send ScanData to [1:685:2688] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:18.030108Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:686:2689] and sent to [1:685:2688] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":1732180758026867,"name":"_full_task","f":1732180758026867,"d_finished":0,"c":0,"l":1732180758030079,"d":3212},"events":[{"name":"bootstrap","f":1732180758026898,"d_finished":470,"c":1,"l":1732180758027368,"d":470},{"a":1732180758030055,"name":"ack","f":1732180758029186,"d_finished":122,"c":3,"l":1732180758030025,"d":146},{"a":1732180758030054,"name":"processing","f":1732180758027455,"d_finished":744,"c":24,"l":1732180758030025,"d":769},{"name":"ProduceResults","f":1732180758027123,"d_finished":436,"c":29,"l":1732180758030067,"d":436},{"a":1732180758030067,"name":"Finish","f":1732180758030067,"d_finished":0,"c":0,"l":1732180758030079,"d":12},{"name":"task_result","f":1732180758027457,"d_finished":591,"c":21,"l":1732180758029837,"d":591}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:686:2689]->[1:685:2688] 2024-11-21T09:19:18.030116Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:18.026793Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2024-11-21T09:19:18.030119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:18.030126Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:19:18.030130Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:18.030290Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2024-11-21T09:19:18.030316Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2024-11-21T09:19:18.030330Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:19:18.030350Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:19:18.030357Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:19:18.030408Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:702:2705];trace_detailed=; 2024-11-21T09:19:18.030439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:19:18.030453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:18.030481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:18.030486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:18.030502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:18.030506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:18.030512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:18.030515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] finished for tablet 9437184 2024-11-21T09:19:18.030520Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] send ScanData to [1:701:2704] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:18.030541Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:702:2705] and sent to [1:701:2704] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180758030403,"name":"_full_task","f":1732180758030403,"d_finished":0,"c":0,"l":1732180758030523,"d":120},"events":[{"name":"bootstrap","f":1732180758030418,"d_finished":70,"c":1,"l":1732180758030488,"d":70},{"a":1732180758030499,"name":"ack","f":1732180758030499,"d_finished":0,"c":0,"l":1732180758030523,"d":24},{"a":1732180758030498,"name":"processing","f":1732180758030498,"d_finished":0,"c":0,"l":1732180758030523,"d":25},{"name":"ProduceResults","f":1732180758030477,"d_finished":22,"c":2,"l":1732180758030514,"d":22},{"a":1732180758030514,"name":"Finish","f":1732180758030514,"d_finished":0,"c":0,"l":1732180758030523,"d":9}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:702:2705]->[1:701:2704] 2024-11-21T09:19:18.030550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:18.030362Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:19:18.030553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:18.030556Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:19:18.030559Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-Debug] >> test.py::test[join-bush_in_in_in--Analyze] [GOOD] >> test.py::test[join-bush_in_in_in--Debug] >> test.py::test[type_v3-ignore_v3_hint--ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Plan] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Results] >> test.py::test[select-qualified_all_and_group_by-default.txt-Analyze] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-Debug] >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Debug] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Analyze] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: 2024-11-21T09:17:04.530475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:04.530524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:04.530536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002615/r3tmp/tmpsue7u5/pdisk_1.dat 2024-11-21T09:17:04.618341Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12320, node 1 2024-11-21T09:17:04.714926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:04.714945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:04.714948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:04.715023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:04.719491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.795766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:04.795798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:04.807543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8705 2024-11-21T09:17:05.214671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.025425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.025455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.058512Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:06.059422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:06.113910Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:06.122582Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:06.122608Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:06.129187Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:06.129292Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:06.129305Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:06.129308Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:06.129312Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:06.129315Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:06.129319Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:06.129323Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:06.129401Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:06.305266Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:06.305306Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:06.306589Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:06.308971Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:06.309094Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:06.309988Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:06.315230Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:06.315247Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:06.315255Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:06.316694Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.316714Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.317660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:06.318883Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:06.318905Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:06.321761Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:06.333426Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:06.355540Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:06.467349Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:06.625318Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:07.359985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.360022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.363190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:07.401105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:07.401147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:07.401188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:07.401204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:07.401217Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:07.401236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:07.401254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:07.401271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:07.401284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:07.401296Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:07.401311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:07.401324Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:07.407283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:07.407319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:07.407368Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:07.407387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:07.407406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:07.407423Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... 126814Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:18.126823Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:18.127039Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:18.138241Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:18.138290Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:18.138402Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8423:6327], server id = [2:8428:6332], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:18.138485Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8423:6327], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.138700Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:18.138755Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8424:6328], server id = [2:8429:6333], tablet id = 72075186224037900, status = OK 2024-11-21T09:19:18.138762Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8424:6328], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.138790Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8425:6329], server id = [2:8430:6334], tablet id = 72075186224037901, status = OK 2024-11-21T09:19:18.138794Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8425:6329], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.138865Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8423:6327], server id = [2:8428:6332], tablet id = 72075186224037899 2024-11-21T09:19:18.138868Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.138939Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8426:6330], server id = [2:8431:6335], tablet id = 72075186224037902, status = OK 2024-11-21T09:19:18.138944Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8426:6330], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.138951Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8427:6331], server id = [2:8433:6337], tablet id = 72075186224037903, status = OK 2024-11-21T09:19:18.138954Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8427:6331], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.139022Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T09:19:18.139087Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T09:19:18.139115Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8432:6336], server id = [2:8434:6338], tablet id = 72075186224037904, status = OK 2024-11-21T09:19:18.139120Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8432:6336], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.139149Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T09:19:18.139219Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T09:19:18.139245Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8424:6328], server id = [2:8429:6333], tablet id = 72075186224037900 2024-11-21T09:19:18.139247Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139278Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8425:6329], server id = [2:8430:6334], tablet id = 72075186224037901 2024-11-21T09:19:18.139280Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139299Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T09:19:18.139316Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8426:6330], server id = [2:8431:6335], tablet id = 72075186224037902 2024-11-21T09:19:18.139318Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139336Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8435:6339], server id = [2:8437:6341], tablet id = 72075186224037905, status = OK 2024-11-21T09:19:18.139340Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8435:6339], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.139359Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8427:6331], server id = [2:8433:6337], tablet id = 72075186224037903 2024-11-21T09:19:18.139360Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139409Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8436:6340], server id = [2:8439:6343], tablet id = 72075186224037906, status = OK 2024-11-21T09:19:18.139413Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8436:6340], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.139425Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8438:6342], server id = [2:8441:6345], tablet id = 72075186224037907, status = OK 2024-11-21T09:19:18.139428Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8438:6342], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.139447Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8432:6336], server id = [2:8434:6338], tablet id = 72075186224037904 2024-11-21T09:19:18.139448Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139548Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8440:6344], server id = [2:8442:6346], tablet id = 72075186224037908, status = OK 2024-11-21T09:19:18.139554Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8440:6344], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.139565Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T09:19:18.139617Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T09:19:18.139633Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T09:19:18.139658Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T09:19:18.139661Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:18.139675Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:18.139685Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:18.139712Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8435:6339], server id = [2:8437:6341], tablet id = 72075186224037905 2024-11-21T09:19:18.139714Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139742Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8438:6342], server id = [2:8441:6345], tablet id = 72075186224037907 2024-11-21T09:19:18.139744Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139759Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8436:6340], server id = [2:8439:6343], tablet id = 72075186224037906 2024-11-21T09:19:18.139762Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139777Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8440:6344], server id = [2:8442:6346], tablet id = 72075186224037908 2024-11-21T09:19:18.139779Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.139790Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:18.161320Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:18.161368Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:18.161486Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8447:6349], server id = [2:8448:6350], tablet id = 72075186224037900, status = OK 2024-11-21T09:19:18.161503Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8447:6349], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:18.161626Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T09:19:18.161632Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:18.161655Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8447:6349], server id = [2:8448:6350], tablet id = 72075186224037900 2024-11-21T09:19:18.161657Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:18.161667Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:18.161682Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:18.161733Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:18.162115Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:18.165804Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8465:6367]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:18.165847Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:18.165852Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8465:6367], StatRequests.size() = 1 2024-11-21T09:19:18.193388Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWNiODVhYzYtOWE4ODA0YzgtYjNhMDdhOS1jYTUwNTlmYw==, TxId: 2024-11-21T09:19:18.193403Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWNiODVhYzYtOWE4ODA0YzgtYjNhMDdhOS1jYTUwNTlmYw==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T09:19:18.193512Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8474:6373]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:18.193562Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:18.193633Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:19:18.193637Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:19:18.194157Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:19:18.194166Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T09:19:18.194171Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:19:18.195220Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=132181277.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181277.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181277.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181277.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181277.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181277.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132181277.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180077.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181277.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112181277.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180077.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180077.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112180077.000000s;Name=;Codec=}; 2024-11-21T09:17:57.696095Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:17:57.707909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:17:57.709723Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:17:57.709739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:17:57.709769Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:17:57.710227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:57.710249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:57.710269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:57.710280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:57.710295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:57.710311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:57.710320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:57.710331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:57.710342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:57.710357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:57.710367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:57.710377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:57.713177Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:17:57.713188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:17:57.713957Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:17:57.714024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:17:57.714033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:17:57.714053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:57.714127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:57.714137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:57.714142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:17:57.714150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:17:57.714158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:57.714164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:57.714168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:17:57.714182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:17:57.714188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:57.714194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:57.714198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:17:57.714206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:17:57.714211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:17:57.714217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:17:57.714221Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:17:57.714230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:17:57.714236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:17:57.714240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:17:57.714248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:17:57.714254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:17:57.714258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:17:57.714281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:17:57.714289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:17:57.714296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T09:17:5 ... rollerLoadingTime=5; 2024-11-21T09:19:18.457419Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T09:19:18.457424Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=10; 2024-11-21T09:19:18.457433Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=5; 2024-11-21T09:19:18.457439Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T09:19:18.457464Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=20; 2024-11-21T09:19:18.457505Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=37; 2024-11-21T09:19:18.457513Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T09:19:18.457518Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=1; 2024-11-21T09:19:18.457522Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T09:19:18.457525Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T09:19:18.457529Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T09:19:18.457537Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=5; 2024-11-21T09:19:18.457540Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T09:19:18.457548Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=5; 2024-11-21T09:19:18.457552Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T09:19:18.457558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=4; 2024-11-21T09:19:18.457561Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1408; 2024-11-21T09:19:18.457578Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T09:19:18.457590Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T09:19:18.457594Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard_impl.cpp:1502;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T09:19:18.457597Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=1; 2024-11-21T09:19:18.457611Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T09:19:18.457631Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T09:19:18.457644Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T09:19:18.457649Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T09:19:18.457661Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797645824;kff=0.3; 2024-11-21T09:19:18.457665Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:18.457672Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:18.457683Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T09:19:18.457692Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:18.457696Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:18.457704Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:18.457707Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:18.457711Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:18.457724Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:18.457773Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1531:3400];tablet_id=9437184;parent=[1:1492:3368];fline=manager.h:99;event=ask_data;request=request_id=140;1={portions_count=21};; 2024-11-21T09:19:18.457830Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:18.457929Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T09:19:18.458009Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T09:19:18.458012Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T09:19:18.458014Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T09:19:18.458017Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:19:18.458022Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:19:18.458026Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T09:19:18.458030Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T09:19:18.458035Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T09:19:18.458039Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:978;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:18.458041Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:19:18.458044Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:19:18.458050Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:19:18.458321Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1342;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T09:19:18.458393Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1363;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T09:19:18.458521Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1397;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T09:19:18.458524Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1401;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 >> test.py::test[pg-sublink_having_in-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_having_in-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_having_in-default.txt-Results] >> test.py::test[blocks-date_greater_scalar--Results] >> test.py::test[flatten_by-flatten_columns_with_opt_struct-default.txt-Debug] [GOOD] >> test.py::test[flatten_by-flatten_columns_with_opt_struct-default.txt-Plan] [GOOD] >> test.py::test[flatten_by-flatten_columns_with_opt_struct-default.txt-Results] >> test.py::test[type_v3-ignore_v3_hint--Results] [GOOD] >> test.py::test[view-file_outer--Analyze] [SKIPPED] >> test.py::test[view-file_outer--Debug] [SKIPPED] >> test.py::test[view-file_outer--ForceBlocks] [SKIPPED] >> test.py::test[view-file_outer--Plan] [SKIPPED] >> test.py::test[view-file_outer--Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-Debug] [GOOD] >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-ForceBlocks] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[join-premap_merge_extrasort2--Debug] [GOOD] >> test.py::test[join-premap_merge_extrasort2--Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort2--Results] >> test.py::test[join-bush_in_in_in--Debug] [GOOD] >> test.py::test[join-bush_in_in_in--ForceBlocks] >> test.py::test[flatten_by-flatten_columns_with_opt_struct-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--Debug] >> test.py::test[select-qualified_all_and_group_by-default.txt-Debug] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Debug] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-ForceBlocks] >> test.py::test[view-file_outer--Results] [GOOD] >> test.py::test[view-file_outer_library--Analyze] [SKIPPED] >> test.py::test[view-file_outer_library--Debug] [SKIPPED] >> test.py::test[view-file_outer_library--ForceBlocks] [SKIPPED] >> test.py::test[view-file_outer_library--Plan] [SKIPPED] >> test.py::test[pg-sublink_having_in-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_projection_any_corr-default.txt-Debug] >> TPersQueueTest::BadTopic >> test.py::test[view-file_outer_library--Results] >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-Plan] [GOOD] >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-Results] >> test.py::test[agg_phases-avg_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-avg_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-avg_null-default.txt-Results] >> test.py::test[pg-tpch-q17-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-ForceBlocks] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[join-premap_merge_extrasort2--Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap-off-Debug] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap-off-Plan] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap-off-Results] [SKIPPED] >> test.py::test[join-pullup_random--Debug] >> test.py::test[select-qualified_all_and_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-Plan] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Plan] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] >> test.py::test[expr-list_flat_map_deprecated_opt-default.txt-Results] [GOOD] >> test.py::test[expr-list_uniq-default.txt-Analyze] >> test.py::test[join-bush_in_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in--Plan] [GOOD] >> test.py::test[join-bush_in_in_in--Results] >> test.py::test[pg-sublink_projection_any_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_projection_any_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_any_corr-default.txt-Results] >> test.py::test[view-file_outer_library--Results] [GOOD] >> test.py::test[window-full/session_incompat_sort--Analyze] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Analyze] >> TPersQueueTest::ReadFromSeveralPartitionsMigrated >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Analyze] >> DemoTx::Scenario_1 >> test.py::test[expr-list_uniq-default.txt-Analyze] [GOOD] >> test.py::test[expr-list_uniq-default.txt-Debug] >> test.py::test[flatten_by-flatten_dict_by_opt--Debug] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--Plan] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] >> test.py::test[pg-sublink_projection_any_corr-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Debug] >> test.py::test[window-full/session_incompat_sort--Analyze] [GOOD] >> test.py::test[window-full/session_incompat_sort--Debug] >> test.py::test[pg-tpch-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[flatten_by-flatten_with_group_by--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Debug] >> test.py::test[join-pullup_random--Debug] [GOOD] >> test.py::test[join-pullup_random--Plan] [GOOD] >> test.py::test[join-pullup_random--Results] >> test.py::test[select-scalar_subquery-default.txt-Analyze] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Debug] >> test.py::test[agg_phases-avg_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-avg-default.txt-Analyze] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Results] >> test.py::test[join-bush_in_in_in--Results] [GOOD] >> test.py::test[join-bush_in_in_in-off-Analyze] >> test.py::test[expr-list_uniq-default.txt-Debug] [GOOD] >> test.py::test[expr-list_uniq-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--Debug] >> TPersQueueTest::UpdatePartitionLocation >> test.py::test[select-scalar_subquery-default.txt-Debug] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-ForceBlocks] >> test.py::test[agg_phases_agg_apply-avg-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases_agg_apply-avg-default.txt-Debug] >> test.py::test[flatten_by-flatten_with_group_by--Debug] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--ForceBlocks] >> test.py::test[join-pullup_random--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Debug] >> test.py::test[join-bush_in_in_in-off-Analyze] [GOOD] >> test.py::test[join-bush_in_in_in-off-Debug] >> test.py::test[window-full/session_incompat_sort--Debug] [GOOD] >> test.py::test[window-full/session_incompat_sort--ForceBlocks] >> test.py::test[expr-list_uniq-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-list_uniq-default.txt-Plan] [GOOD] >> test.py::test[expr-list_uniq-default.txt-Results] >> test.py::test[pg-sublink_projection_array_corr-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_where_all_corr-default.txt-Debug] >> test.py::test[pg-tpch-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Analyze] >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached >> test.py::test[blocks-date_greater_scalar--Results] [GOOD] >> test.py::test[blocks-pg_sort--Analyze] >> test.py::test[select-scalar_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Plan] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Results] >> test.py::test[flatten_by-flatten_one_field_another--Debug] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--Plan] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--Results] >> test.py::test[flatten_by-flatten_with_group_by--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Plan] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Results] >> test.py::test[expr-list_uniq-default.txt-Results] [GOOD] >> test.py::test[expr-to_hashed_set_dict_key-default.txt-Analyze] >> test.py::test[join-bush_in_in_in-off-Debug] [GOOD] >> test.py::test[join-bush_in_in_in-off-ForceBlocks] [SKIPPED] >> test.py::test[join-bush_in_in_in-off-Plan] [GOOD] >> test.py::test[join-bush_in_in_in-off-Results] [GOOD] >> test.py::test[join-commonjoin_unused_keys--Analyze] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--Debug] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--Plan] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-full_trivial--Analyze] >> test.py::test[pg-sublink_where_all_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_all_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_where_all_corr-default.txt-Results] >> test.py::test[select-scalar_subquery-default.txt-Results] [GOOD] >> test.py::test[select-select_all-default.txt-Analyze] >> test.py::test[blocks-pg_sort--Analyze] [GOOD] >> test.py::test[blocks-pg_sort--Debug] >> test.py::test[window-full/session_incompat_sort--ForceBlocks] [GOOD] >> test.py::test[window-full/session_incompat_sort--Plan] [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] >> test.py::test[join-pushdown_filter_over_left--Debug] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Plan] >> test.py::test[join-pushdown_filter_over_left--Plan] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Results] >> test.py::test[flatten_by-flatten_one_field_another--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Debug] >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> test.py::test[expr-to_hashed_set_dict_key-default.txt-Analyze] [GOOD] >> test.py::test[expr-to_hashed_set_dict_key-default.txt-Debug] >> test.py::test[join-full_trivial--Analyze] [GOOD] >> test.py::test[join-full_trivial--Debug] >> test.py::test[pg-tpch-q22-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Debug] >> test.py::test[flatten_by-flatten_with_group_by--Results] [GOOD] >> test.py::test[hor_join-out_sampling--Analyze] >> test.py::test[blocks-pg_sort--Debug] [GOOD] >> test.py::test[blocks-pg_sort--ForceBlocks] >> test.py::test[select-select_all-default.txt-Analyze] [GOOD] >> test.py::test[select-select_all-default.txt-Debug] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> test.py::test[expr-to_hashed_set_dict_key-default.txt-Debug] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Results] [GOOD] >> test.py::test[join-right_trivial-off-Debug] [SKIPPED] >> test.py::test[expr-to_hashed_set_dict_key-default.txt-ForceBlocks] >> test.py::test[join-right_trivial-off-Plan] [SKIPPED] >> test.py::test[join-right_trivial-off-Results] [SKIPPED] >> test.py::test[join-star_join--Debug] >> test.py::test[join-full_trivial--Debug] [GOOD] >> test.py::test[join-full_trivial--ForceBlocks] >> test.py::test[pg-sublink_where_all_corr-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_where_exists_corr-default.txt-Debug] >> test.py::test[hor_join-out_sampling--Analyze] [GOOD] >> test.py::test[hor_join-out_sampling--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181287.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181287.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181287.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181287.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181287.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181287.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180087.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181287.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181287.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180087.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180087.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180087.000000s;Name=;Codec=}; 2024-11-21T09:18:07.663584Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:18:07.674947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:18:07.676530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:18:07.676547Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:18:07.676579Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:18:07.677043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:07.677067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:07.677086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:07.677097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:07.677108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:07.677118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:07.677127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:07.677138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:07.677149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:07.677164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:07.677176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:07.677187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:07.679784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:18:07.679794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:18:07.680412Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:18:07.680462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:18:07.680466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:18:07.680481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:07.680541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:18:07.680550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:18:07.680553Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:18:07.680559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:18:07.680565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:18:07.680569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:18:07.680571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:18:07.680581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:07.680585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:18:07.680588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:18:07.680591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:18:07.680597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:18:07.680602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:18:07.680606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:18:07.680608Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:18:07.680615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:18:07.680618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:18:07.680620Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:18:07.680625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:18:07.680629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:18:07.680631Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:18:07.680650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T09:18:07.680656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:18:07.680661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=2; 2024-11-21T09:18:07.680667Z node 1 :TX_COL ... 1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:25.219439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:25.219442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] finished for tablet 9437184 2024-11-21T09:19:25.219449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:946:2946] send ScanData to [1:945:2945] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:25.219512Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:946:2946] and sent to [1:945:2945] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":1732180765208257,"name":"_full_task","f":1732180765208257,"d_finished":0,"c":0,"l":1732180765219454,"d":11197},"events":[{"name":"bootstrap","f":1732180765208300,"d_finished":650,"c":1,"l":1732180765208950,"d":650},{"a":1732180765219428,"name":"ack","f":1732180765218976,"d_finished":191,"c":3,"l":1732180765219395,"d":217},{"a":1732180765219427,"name":"processing","f":1732180765209071,"d_finished":732,"c":24,"l":1732180765219396,"d":759},{"name":"ProduceResults","f":1732180765208689,"d_finished":435,"c":29,"l":1732180765219440,"d":435},{"a":1732180765219440,"name":"Finish","f":1732180765219440,"d_finished":0,"c":0,"l":1732180765219454,"d":14},{"name":"task_result","f":1732180765209073,"d_finished":516,"c":21,"l":1732180765218952,"d":516}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:946:2946]->[1:945:2945] 2024-11-21T09:19:25.219523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:25.208078Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T09:19:25.219527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:25.219536Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T09:19:25.219541Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:946:2946];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:25.219767Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T09:19:25.219806Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T09:19:25.219822Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:19:25.219861Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:19:25.219871Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:19:25.219947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:953:2953];trace_detailed=; 2024-11-21T09:19:25.219991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:19:25.220004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:25.220016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:25.220021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:25.220040Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:25.220044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:25.220050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:25.220053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:953:2953] finished for tablet 9437184 2024-11-21T09:19:25.220058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:953:2953] send ScanData to [1:952:2952] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:25.220080Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:953:2953] and sent to [1:952:2952] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180765219941,"name":"_full_task","f":1732180765219941,"d_finished":0,"c":0,"l":1732180765220061,"d":120},"events":[{"name":"bootstrap","f":1732180765219957,"d_finished":66,"c":1,"l":1732180765220023,"d":66},{"a":1732180765220038,"name":"ack","f":1732180765220038,"d_finished":0,"c":0,"l":1732180765220061,"d":23},{"a":1732180765220036,"name":"processing","f":1732180765220036,"d_finished":0,"c":0,"l":1732180765220061,"d":25},{"name":"ProduceResults","f":1732180765220012,"d_finished":22,"c":2,"l":1732180765220052,"d":22},{"a":1732180765220052,"name":"Finish","f":1732180765220052,"d_finished":0,"c":0,"l":1732180765220061,"d":9}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:953:2953]->[1:952:2952] 2024-11-21T09:19:25.220091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:25.219879Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:19:25.220093Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:25.220096Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:19:25.220100Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:953:2953];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> test.py::test[select-select_all-default.txt-Debug] [GOOD] >> test.py::test[select-select_all-default.txt-ForceBlocks] >> test.py::test[blocks-pg_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_sort--Plan] [GOOD] >> test.py::test[blocks-pg_sort--Results] >> test.py::test[agg_phases_agg_apply-avg-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-avg-default.txt-ForceBlocks] >> test.py::test[window-full/session_incompat_sort--Results] [GOOD] >> test.py::test[window-win_expr_bounds--Analyze] >> test.py::test[pg-tpch-q22-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-ForceBlocks] >> test.py::test[pg-sublink_where_exists_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_exists_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_where_exists_corr-default.txt-Results] >> test.py::test[expr-to_hashed_set_dict_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-to_hashed_set_dict_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_set_dict_key-default.txt-Results] >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test.py::test[blocks-pg_top_sort--Analyze] >> test.py::test[flatten_by-flatten_with_group_by--Debug] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Plan] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Results] >> test.py::test[join-full_trivial--ForceBlocks] [GOOD] >> test.py::test[join-full_trivial--Plan] [GOOD] >> test.py::test[join-full_trivial--Results] >> test.py::test[select-select_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all-default.txt-Plan] [GOOD] >> test.py::test[select-select_all-default.txt-Results] >> TPersQueueTest::WriteExisting ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:46: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> DemoTx::Scenario_1 [GOOD] >> test.py::test[expr-to_hashed_set_dict_key-default.txt-Results] [GOOD] >> test.py::test[hor_join-filters--Analyze] >> test.py::test[select-select_all-default.txt-Results] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Analyze] >> test.py::test[window-win_expr_bounds--Analyze] [GOOD] >> test.py::test[window-win_expr_bounds--Debug] >> test.py::test[hor_join-out_sampling--Debug] [GOOD] >> test.py::test[hor_join-out_sampling--ForceBlocks] >> test.py::test[blocks-pg_top_sort--Analyze] [GOOD] >> test.py::test[blocks-pg_top_sort--Debug] >> DemoTx::Scenario_2 >> test.py::test[join-full_trivial--Results] [GOOD] >> test.py::test[join-full_trivial-off-Analyze] >> test.py::test[pg-sublink_where_exists_corr-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Debug] >> test.py::test[hor_join-filters--Analyze] [GOOD] >> test.py::test[hor_join-filters--Debug] >> test.py::test[join-star_join--Debug] [GOOD] >> test.py::test[join-star_join--Plan] [GOOD] >> test.py::test[join-star_join--Results] >> test.py::test[flatten_by-flatten_with_group_by--Results] [GOOD] >> test.py::test[flexible_types-group_by2-default.txt-Debug] [SKIPPED] >> test.py::test[flexible_types-group_by2-default.txt-Plan] [SKIPPED] >> test.py::test[flexible_types-group_by2-default.txt-Results] [SKIPPED] >> test.py::test[flexible_types-with_typeof-default.txt-Debug] [SKIPPED] >> test.py::test[flexible_types-with_typeof-default.txt-Plan] [SKIPPED] >> test.py::test[flexible_types-with_typeof-default.txt-Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Debug] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Plan] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-max_outtables--Debug] [SKIPPED] >> test.py::test[hor_join-max_outtables--Plan] [SKIPPED] >> test.py::test[hor_join-max_outtables--Results] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse--Debug] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse--Plan] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse--Results] [SKIPPED] >> test.py::test[hor_join-out_hor_join-default.txt-Debug] >> test.py::test[select-table_funcs_spec-default.txt-Analyze] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Debug] >> test.py::test[pg-tpch-q22-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] >> test.py::test[blocks-pg_top_sort--Debug] [GOOD] >> test.py::test[blocks-pg_top_sort--ForceBlocks] >> test.py::test[window-win_expr_bounds--Debug] [GOOD] >> test.py::test[window-win_expr_bounds--ForceBlocks] >> TPersQueueTest::BadTopic [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable >> test.py::test[join-full_trivial-off-Analyze] [GOOD] >> test.py::test[join-full_trivial-off-Debug] >> test.py::test[pg-tpcds-q16-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Results] >> test.py::test[hor_join-out_sampling--ForceBlocks] [GOOD] >> test.py::test[hor_join-out_sampling--Plan] [GOOD] >> test.py::test[hor_join-out_sampling--Results] >> test.py::test[select-table_funcs_spec-default.txt-Debug] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-ForceBlocks] >> test.py::test[hor_join-filters--Debug] [GOOD] >> test.py::test[hor_join-filters--ForceBlocks] >> test.py::test[agg_phases_agg_apply-avg-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases_agg_apply-avg-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-avg-default.txt-Results] >> test.py::test[blocks-pg_top_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_top_sort--Plan] [GOOD] >> test.py::test[blocks-pg_top_sort--Results] >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset >> test.py::test[pg-tpcds-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Debug] >> test.py::test[window-win_expr_bounds--ForceBlocks] [GOOD] >> test.py::test[window-win_expr_bounds--Plan] [GOOD] >> test.py::test[window-win_expr_bounds--Results] >> test.py::test[join-full_trivial-off-Debug] [GOOD] >> test.py::test[join-full_trivial-off-ForceBlocks] [SKIPPED] >> test.py::test[join-full_trivial-off-Plan] [GOOD] >> test.py::test[join-full_trivial-off-Results] [GOOD] >> test.py::test[join-group_compact_by--Analyze] >> test.py::test[join-star_join--Results] [GOOD] >> test.py::test[join-star_join_semionly-off-Debug] [SKIPPED] >> test.py::test[join-star_join_semionly-off-Plan] [SKIPPED] >> test.py::test[join-star_join_semionly-off-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren-off-Debug] [SKIPPED] >> test.py::test[join-three_equalities_paren-off-Plan] [SKIPPED] >> test.py::test[join-three_equalities_paren-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left--Debug] >> test.py::test[blocks-pg_top_sort--Results] [GOOD] >> test.py::test[blocks-string_as_agg_key--Analyze] >> test.py::test[select-table_funcs_spec-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Plan] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Results] >> test.py::test[pg-tpcds-q17-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] >> test.py::test[hor_join-filters--ForceBlocks] [GOOD] >> test.py::test[hor_join-filters--Plan] [GOOD] >> test.py::test[hor_join-filters--Results] >> test.py::test[window-win_expr_bounds--Results] [GOOD] >> test.py::test[window-win_func_cume_dist-default.txt-Analyze] >> test.py::test[join-group_compact_by--Analyze] [GOOD] >> test.py::test[join-group_compact_by--Debug] >> test.py::test[select-table_funcs_spec-default.txt-Results] [GOOD] >> test.py::test[select-type_assert-default.txt-Analyze] >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Debug] >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[hor_join-sorted_out--Analyze] >> test.py::test[pg-tpch-q22-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Analyze] >> test.py::test[hor_join-filters--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Analyze] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Debug] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Plan] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Results] >> test.py::test[blocks-string_as_agg_key--Analyze] [GOOD] >> test.py::test[blocks-string_as_agg_key--Debug] >> test.py::test[hor_join-out_hor_join-default.txt-Debug] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Plan] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Results] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Analyze] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Debug] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Plan] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Results] [SKIPPED] >> test.py::test[insert-append-proto_fail-Analyze] [SKIPPED] >> test.py::test[insert-append-proto_fail-Debug] [SKIPPED] >> test.py::test[insert-append-proto_fail-ForceBlocks] [SKIPPED] >> test.py::test[insert-append-proto_fail-Plan] [SKIPPED] >> test.py::test[insert-append-proto_fail-Results] >> TFileStoreWithReboots::CreateAlterChannels [GOOD] >> test.py::test[window-win_func_cume_dist-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_cume_dist-default.txt-Debug] >> test.py::test[pg-select_join_inner_equi_and_one-default.txt-Results] >> test.py::test[pg-tpcds-q38-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Results] >> test.py::test[hor_join-sorted_out--Analyze] [GOOD] >> test.py::test[hor_join-sorted_out--Debug] >> test.py::test[join-group_compact_by--Debug] [GOOD] >> test.py::test[join-group_compact_by--ForceBlocks] >> test.py::test[select-type_assert-default.txt-Analyze] [GOOD] >> test.py::test[select-type_assert-default.txt-Debug] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlterChannels [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:19:13.112518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:13.112536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:13.112539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:13.112542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:13.112546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:13.112548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:13.112555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:13.112605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:13.119534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:13.119546Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:19:13.121425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:13.121496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:13.121525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:13.123289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:13.123334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:13.123416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:13.123566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:13.124006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:13.124198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:13.124219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:13.124233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:13.124238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:13.124243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:13.124269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:19:13.125113Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:19:13.135758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:13.135813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:13.135854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:13.135885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:13.135889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:13.136390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:13.136409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:13.136446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:13.136452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:13.136455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:13.136458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:13.136699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:13.136709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:13.136713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:13.136988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:13.136997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:13.137003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:13.137010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:13.137415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:13.137723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:13.137758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:13.137894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:13.137910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:13.137921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:13.137955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:13.137959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:13.137981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:13.137989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:13.138243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:13.138249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:13.138275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:13.138278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:13.138347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:13.138352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:13.138363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:13.138365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:13.138368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:13.138372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:13.138375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:13.138377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:13.138384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:13.138388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:13.138390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-2" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-2" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T09:19:30.087586Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:19:30.087590Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T09:19:30.087599Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:19:30.087603Z node 72 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T09:19:30.087606Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:19:30.087617Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 2024-11-21T09:19:30.087660Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:19:30.087871Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:19:30.087885Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:19:30.087889Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:30.088112Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T09:19:30.088125Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72075186233409546 2024-11-21T09:19:30.088147Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:19:30.088160Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxId: 1002 Origin: 72075186233409546 Status: OK 2024-11-21T09:19:30.088164Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T09:19:30.088167Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 3 -> 128 2024-11-21T09:19:30.088523Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:19:30.088539Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:19:30.088542Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:30.088549Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T09:19:30.088565Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:30.088821Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T09:19:30.088834Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2024-11-21T09:19:30.088869Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:30.088880Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 309237647465 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:30.088884Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T09:19:30.088903Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:19:30.088905Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:19:30.088911Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:19:30.088916Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T09:19:30.088932Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:19:30.088935Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:19:30.088937Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:19:30.088949Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:19:30.088952Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 1, subscribers: 0 2024-11-21T09:19:30.088955Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T09:19:30.089211Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:30.089216Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:19:30.089231Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:30.089234Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:19:30.089282Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:19:30.089289Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:19:30.089292Z node 72 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:19:30.089294Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:19:30.089296Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:19:30.089304Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T09:19:30.089525Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T09:19:30.089555Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T09:19:30.089559Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T09:19:30.089596Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:19:30.089605Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:19:30.089607Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:397:2378] TestWaitNotification: OK eventTxId 1002 2024-11-21T09:19:30.089645Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:19:30.089662Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_2" took 22us result status StatusSuccess 2024-11-21T09:19:30.089701Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_2" PathDescription { Self { Name: "FS_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "FS_2" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 2 FolderId: "bar" CloudId: "baz" BlockSize: 4096 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Version: 2 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[insert-append-proto_fail-Results] [GOOD] >> test.py::test[insert-keepmeta-with_view-Analyze] [SKIPPED] >> test.py::test[insert-keepmeta-with_view-Debug] [SKIPPED] >> test.py::test[insert-keepmeta-with_view-ForceBlocks] [SKIPPED] >> test.py::test[insert-keepmeta-with_view-Plan] [SKIPPED] >> test.py::test[insert-keepmeta-with_view-Results] [SKIPPED] >> test.py::test[insert-part_sortness-desc-Analyze] >> test.py::test[blocks-string_as_agg_key--Debug] [GOOD] >> test.py::test[blocks-string_as_agg_key--ForceBlocks] >> test.py::test[agg_phases_agg_apply-avg-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-min-default.txt-Analyze] >> test.py::test[pg-tpcds-q38-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-Debug] >> test.py::test[window-win_func_cume_dist-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_cume_dist-default.txt-ForceBlocks] >> test.py::test[select-type_assert-default.txt-Debug] [GOOD] >> test.py::test[select-type_assert-default.txt-ForceBlocks] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-ForceBlocks] >> test.py::test[hor_join-out_hor_join-default.txt-Results] [GOOD] >> test.py::test[hor_join-skip_yamr--Debug] >> test.py::test[hor_join-sorted_out--Debug] [GOOD] >> test.py::test[hor_join-sorted_out--ForceBlocks] >> test.py::test[insert-part_sortness-desc-Analyze] [GOOD] >> test.py::test[insert-part_sortness-desc-Debug] >> test.py::test[join-yql-14829_left--Debug] [GOOD] >> test.py::test[join-yql-14829_left--Plan] [GOOD] >> test.py::test[join-yql-14829_left--Results] >> test.py::test[join-group_compact_by--ForceBlocks] [GOOD] >> test.py::test[join-group_compact_by--Plan] [GOOD] >> test.py::test[join-group_compact_by--Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[pg-tpcds-q39-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-Results] >> test.py::test[blocks-string_as_agg_key--ForceBlocks] [GOOD] >> test.py::test[blocks-string_as_agg_key--Plan] [GOOD] >> test.py::test[blocks-string_as_agg_key--Results] >> test.py::test[agg_phases_agg_apply-min-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases_agg_apply-min-default.txt-Debug] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Results] >> test.py::test[hor_join-skip_yamr--Debug] [GOOD] >> test.py::test[hor_join-skip_yamr--Plan] [GOOD] >> test.py::test[select-type_assert-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-type_assert-default.txt-Plan] [GOOD] >> test.py::test[select-type_assert-default.txt-Results] >> test.py::test[pg-select_join_inner_equi_and_one-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_right_one-default.txt-Analyze] >> test.py::test[hor_join-skip_yamr--Results] >> test.py::test[window-win_func_cume_dist-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_cume_dist-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_cume_dist-default.txt-Results] >> test.py::test[insert-part_sortness-desc-Debug] [GOOD] >> test.py::test[insert-part_sortness-desc-ForceBlocks] >> test.py::test[pg-tpcds-q39-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Debug] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-ambigous_order_by_from_table--Analyze] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_from_table--Debug] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_from_table--ForceBlocks] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_from_table--Plan] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_from_table--Results] >> test.py::test[hor_join-sorted_out--ForceBlocks] [GOOD] >> test.py::test[hor_join-sorted_out--Plan] [GOOD] >> test.py::test[hor_join-sorted_out--Results] >> test.py::test[join-group_compact_by--Results] [GOOD] >> test.py::test[join-inner_all--Analyze] >> test.py::test[hor_join-skip_yamr--Results] [GOOD] >> test.py::test[hor_join-table_record--Debug] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[select-type_assert-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Analyze] >> test.py::test[window-win_func_cume_dist-default.txt-Results] [GOOD] >> test.py::test[window-win_func_into_udf--Analyze] >> test.py::test[pg-select_join_right_one-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_join_right_one-default.txt-Debug] >> test.py::test[blocks-string_as_agg_key--Results] [GOOD] >> test.py::test[blocks-string_filter--Analyze] >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [GOOD] >> TPersQueueTest::SchemeshardRestart >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] >> test.py::test[pg_duplicated-ambigous_order_by_from_table--Results] [GOOD] >> test.py::test[produce-process_rows_and_filter--Analyze] [SKIPPED] >> test.py::test[produce-process_rows_and_filter--Debug] [SKIPPED] >> test.py::test[produce-process_rows_and_filter--ForceBlocks] [SKIPPED] >> test.py::test[join-yql-14829_left--Results] [GOOD] >> test.py::test[join-yql-4275--Debug] >> test.py::test[join-inner_all--Analyze] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Results] >> test.py::test[produce-process_rows_and_filter--Plan] [SKIPPED] >> test.py::test[produce-process_rows_and_filter--Results] [SKIPPED] >> test.py::test[produce-reduce_lambda-default.txt-Analyze] >> test.py::test[hor_join-sorted_out--Results] [GOOD] >> test.py::test[in-in_exists_immediate_nested_subq--Analyze] [SKIPPED] >> test.py::test[in-in_exists_immediate_nested_subq--Debug] [SKIPPED] >> test.py::test[in-in_exists_immediate_nested_subq--ForceBlocks] [SKIPPED] >> test.py::test[in-in_exists_immediate_nested_subq--Plan] [SKIPPED] >> test.py::test[in-in_exists_immediate_nested_subq--Results] [SKIPPED] >> test.py::test[in-in_noansi-default.txt-Analyze] >> test.py::test[insert-part_sortness-desc-ForceBlocks] [GOOD] >> test.py::test[insert-part_sortness-desc-Plan] [GOOD] >> test.py::test[insert-part_sortness-desc-Results] >> test.py::test[join-inner_all--Debug] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Analyze] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Debug] >> TPersQueueTest::DirectReadNotCached [GOOD] >> TPersQueueTest::DirectReadBadCases ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] Test command err: 2024-11-21T09:17:06.138809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:06.138850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:06.138859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00252c/r3tmp/tmpiZqKZs/pdisk_1.dat 2024-11-21T09:17:06.218405Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3003, node 1 2024-11-21T09:17:06.313049Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:06.313068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:06.313071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:06.313151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:06.318081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.394161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.394198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.405710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25528 2024-11-21T09:17:06.803498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:07.559807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.559832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.592219Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:07.593034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.640247Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:07.648454Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:07.648478Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:07.654686Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:07.654845Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:07.654861Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:07.654865Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:07.654869Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:07.654873Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:07.654877Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:07.654882Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:07.654958Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:07.827054Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.827072Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.827730Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T09:17:07.829128Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T09:17:07.829412Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T09:17:07.829981Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:07.834721Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:07.834737Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:07.834745Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:07.835891Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.835921Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.838011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:07.839432Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:07.839453Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:07.842357Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:07.854021Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.875941Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:07.990982Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:08.177281Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:08.721754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2141:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.721783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.724346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:08.757110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:08.757145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:08.757175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:08.757194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:08.757211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:08.757224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:08.757236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:08.757249Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:08.757262Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:08.757276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:08.757290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:08.757305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2290:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:08.762481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:08.762505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:08.762532Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:08.762545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:08.762560Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:08.762581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2296:2846];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... 037897] EvServerConnected, pipe server id = [2:8528:6441] 2024-11-21T09:19:30.789745Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8528:6441], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T09:19:30.800806Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:30.800826Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:30.863815Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8532:6444] 2024-11-21T09:19:30.864062Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:3413:3256] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2024-11-21T09:19:30.864072Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3413:3256] 2024-11-21T09:19:30.864088Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2024-11-21T09:19:31.246946Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:31.246972Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:31.257252Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2024-11-21T09:19:31.257280Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:31.817265Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:31.817292Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:31.817296Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:32.770265Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:32.770323Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:32.770329Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:32.770555Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:32.781750Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:32.781870Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:32.781880Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:32.782003Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:32.793101Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:32.793145Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2024-11-21T09:19:32.793335Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8613:6489], server id = [2:8618:6494], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:32.793367Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8613:6489], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.793413Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8614:6490], server id = [2:8619:6495], tablet id = 72075186224037900, status = OK 2024-11-21T09:19:32.793423Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8614:6490], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.793551Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:32.793621Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8615:6491], server id = [2:8621:6497], tablet id = 72075186224037901, status = OK 2024-11-21T09:19:32.793631Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8615:6491], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.793658Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8616:6492], server id = [2:8620:6496], tablet id = 72075186224037902, status = OK 2024-11-21T09:19:32.793662Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8616:6492], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.793722Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T09:19:32.793770Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8613:6489], server id = [2:8618:6494], tablet id = 72075186224037899 2024-11-21T09:19:32.793773Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.793787Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8617:6493], server id = [2:8622:6498], tablet id = 72075186224037903, status = OK 2024-11-21T09:19:32.793792Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8617:6493], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.793861Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T09:19:32.793900Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T09:19:32.793948Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8614:6490], server id = [2:8619:6495], tablet id = 72075186224037900 2024-11-21T09:19:32.793951Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.793965Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8623:6499], server id = [2:8624:6500], tablet id = 72075186224037904, status = OK 2024-11-21T09:19:32.793971Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8623:6499], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.794007Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T09:19:32.794025Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8615:6491], server id = [2:8621:6497], tablet id = 72075186224037901 2024-11-21T09:19:32.794027Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.794078Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8616:6492], server id = [2:8620:6496], tablet id = 72075186224037902 2024-11-21T09:19:32.794080Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.794088Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8625:6501], server id = [2:8627:6503], tablet id = 72075186224037905, status = OK 2024-11-21T09:19:32.794092Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8625:6501], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.794114Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8617:6493], server id = [2:8622:6498], tablet id = 72075186224037903 2024-11-21T09:19:32.794116Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.794158Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8626:6502], server id = [2:8629:6505], tablet id = 72075186224037906, status = OK 2024-11-21T09:19:32.794162Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8626:6502], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.794169Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8628:6504], server id = [2:8630:6506], tablet id = 72075186224037907, status = OK 2024-11-21T09:19:32.794172Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8628:6504], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.794214Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T09:19:32.794249Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T09:19:32.794277Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8631:6507], server id = [2:8632:6508], tablet id = 72075186224037908, status = OK 2024-11-21T09:19:32.794281Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8631:6507], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:32.794286Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T09:19:32.794310Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T09:19:32.794340Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8623:6499], server id = [2:8624:6500], tablet id = 72075186224037904 2024-11-21T09:19:32.794342Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.794362Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8625:6501], server id = [2:8627:6503], tablet id = 72075186224037905 2024-11-21T09:19:32.794363Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.794381Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8626:6502], server id = [2:8629:6505], tablet id = 72075186224037906 2024-11-21T09:19:32.794383Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.794389Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T09:19:32.794393Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:32.794415Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:32.794436Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:32.794480Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:32.794522Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8628:6504], server id = [2:8630:6506], tablet id = 72075186224037907 2024-11-21T09:19:32.794524Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.795017Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8631:6507], server id = [2:8632:6508], tablet id = 72075186224037908 2024-11-21T09:19:32.795025Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:32.795064Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:32.807915Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDdhOTQ1OTMtMzdmODlhMjUtZDBmNTQ5MC00Y2RkYmQwMQ==, TxId: 2024-11-21T09:19:32.807957Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDdhOTQ1OTMtMzdmODlhMjUtZDBmNTQ5MC00Y2RkYmQwMQ==, TxId: 2024-11-21T09:19:32.808110Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:32.819215Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:32.819239Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3413:3256] >> DemoTx::Scenario_2 [GOOD] >> test.py::test[window-win_func_into_udf--Analyze] [GOOD] >> test.py::test[window-win_func_into_udf--Debug] >> test.py::test[blocks-string_filter--Analyze] [GOOD] >> test.py::test[blocks-string_filter--Debug] >> test.py::test[hor_join-table_record--Debug] [GOOD] >> test.py::test[hor_join-table_record--Plan] [GOOD] >> test.py::test[hor_join-table_record--Results] >> test.py::test[insert-part_sortness-desc-Results] [GOOD] >> test.py::test[insert-select_operate_with_columns--Analyze] >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Debug] >> test.py::test[produce-reduce_lambda-default.txt-Analyze] [GOOD] >> test.py::test[produce-reduce_lambda-default.txt-Debug] >> DemoTx::Scenario_3 >> test.py::test[in-in_noansi-default.txt-Analyze] [GOOD] >> test.py::test[in-in_noansi-default.txt-Debug] >> test.py::test[join-inner_all--Debug] [GOOD] >> test.py::test[join-inner_all--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-ForceBlocks] >> test.py::test[pg-select_join_right_one-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_right_one-default.txt-ForceBlocks] >> test.py::test[agg_phases_agg_apply-min-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-min-default.txt-ForceBlocks] >> TPartitionWriterCacheActorTests::WriteReplyOrder >> test.py::test[blocks-string_filter--Debug] [GOOD] >> test.py::test[join-yql-4275--Debug] [GOOD] >> test.py::test[join-yql-4275--Plan] [GOOD] >> test.py::test[join-yql-4275--Results] >> test.py::test[hor_join-table_record--Results] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Debug] [SKIPPED] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Plan] [SKIPPED] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] [SKIPPED] >> test.py::test[in-in_ansi_variant-default.txt-Debug] >> TPersQueueTest::WriteExisting [GOOD] >> TPersQueueTest::WriteExistingBigValue >> test.py::test[blocks-string_filter--ForceBlocks] >> test.py::test[insert-select_operate_with_columns--Analyze] [GOOD] >> test.py::test[insert-select_operate_with_columns--Debug] >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> test.py::test[pg-tpcds-q65-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Results] >> test.py::test[window-win_func_into_udf--Debug] [GOOD] >> test.py::test[window-win_func_into_udf--ForceBlocks] >> test.py::test[in-in_noansi-default.txt-Debug] [GOOD] >> test.py::test[in-in_noansi-default.txt-ForceBlocks] >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> test.py::test[produce-reduce_lambda-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_lambda-default.txt-ForceBlocks] >> test.py::test[join-inner_all--ForceBlocks] [GOOD] >> test.py::test[join-inner_all--Plan] [GOOD] >> test.py::test[join-inner_all--Results] >> TPersQueueTest::TopicServiceCommitOffset [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-Debug] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] >> test.py::test[in-in_ansi_variant-default.txt-Debug] [GOOD] >> test.py::test[in-in_ansi_variant-default.txt-Plan] [GOOD] >> test.py::test[in-in_ansi_variant-default.txt-Results] >> test.py::test[join-yql-4275--Results] [GOOD] >> test.py::test[json-json_query/passing_exception--Debug] [SKIPPED] >> test.py::test[json-json_query/passing_exception--Plan] [SKIPPED] >> test.py::test[json-json_query/passing_exception--Results] >> test.py::test[blocks-string_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-string_filter--Plan] [GOOD] >> test.py::test[blocks-string_filter--Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test.py::test[in-in_noansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_noansi-default.txt-Plan] [GOOD] >> test.py::test[in-in_noansi-default.txt-Results] >> test.py::test[insert-select_operate_with_columns--Debug] [GOOD] >> test.py::test[insert-select_operate_with_columns--ForceBlocks] >> test.py::test[in-in_ansi_variant-default.txt-Results] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Debug] >> test.py::test[pg-tpcds-q94-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-Results] >> test.py::test[produce-reduce_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_lambda-default.txt-Plan] [GOOD] >> test.py::test[produce-reduce_lambda-default.txt-Results] >> test.py::test[pg-select_join_right_one-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_join_right_one-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_right_one-default.txt-Results] >> test.py::test[join-inner_all--Results] [GOOD] >> test.py::test[join-join_comp_common_table--Analyze] >> KqpScanArrowInChanels::AllTypesColumns >> test.py::test[window-win_func_into_udf--ForceBlocks] [GOOD] >> test.py::test[window-win_func_into_udf--Plan] [GOOD] >> test.py::test[window-win_func_into_udf--Results] >> test.py::test[in-in_noansi-default.txt-Results] [GOOD] >> test.py::test[insert-append-with_view-Analyze] [SKIPPED] >> test.py::test[insert-append-with_view-Debug] [SKIPPED] >> test.py::test[insert-append-with_view-ForceBlocks] [SKIPPED] >> test.py::test[insert-append-with_view-Plan] [SKIPPED] >> test.py::test[insert-append-with_view-Results] [SKIPPED] >> test.py::test[insert-override-view_fail-Analyze] [SKIPPED] >> test.py::test[insert-override-view_fail-Debug] [SKIPPED] >> test.py::test[insert-override-view_fail-ForceBlocks] [SKIPPED] >> test.py::test[insert-override-view_fail-Plan] [SKIPPED] >> test.py::test[insert-override-view_fail-Results] >> test.py::test[blocks-string_filter--Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Analyze] |95.3%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[json-json_query/passing_exception--Results] [GOOD] >> test.py::test[json-json_value/passing-default.txt-Debug] |95.3%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Analyze] >> test.py::test[pg-tpcds-q94-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Debug] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[insert-select_operate_with_columns--ForceBlocks] [GOOD] >> test.py::test[insert-select_operate_with_columns--Plan] [GOOD] >> test.py::test[insert-select_operate_with_columns--Results] >> test.py::test[agg_phases_agg_apply-min-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases_agg_apply-min-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-min-default.txt-Results] >> test.py::test[produce-reduce_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-Analyze] >> test.py::test[in-in_enum_single0-default.txt-Debug] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Plan] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Results] >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] >> test.py::test[window-win_func_into_udf--Results] [GOOD] >> test.py::test[window-yql-14479-default.txt-Analyze] >> test.py::test[insert-override-view_fail-Results] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Analyze] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [GOOD] >> TPersQueueTest::BadSids >> test.py::test[coalesce-coalesce_few_real-default.txt-Analyze] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Debug] >> test.py::test[json-json_value/passing-default.txt-Debug] [GOOD] >> test.py::test[json-json_value/passing-default.txt-Plan] [GOOD] >> test.py::test[json-json_value/passing-default.txt-Results] >> test.py::test[pg-select_join_right_one-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_expr_lead-default.txt-Analyze] >> test.py::test[insert-select_operate_with_columns--Results] [GOOD] >> test.py::test[join-bush_in_in--Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181297.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181297.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181297.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181297.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132181297.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132181297.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180097.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112181297.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112181297.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180097.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112180097.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112180097.000000s;Name=;Codec=}; 2024-11-21T09:18:17.538071Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:18:17.549601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:18:17.551114Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:18:17.551126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:18:17.551153Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:18:17.551571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:17.551592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:17.551611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:17.551622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:17.551632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:17.551642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:17.551650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:17.551661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:17.551671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:17.551690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:17.551700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:17.551712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:17.554506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:18:17.554517Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:18:17.555121Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:18:17.555162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:18:17.555169Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:18:17.555183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:17.555233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:18:17.555240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:18:17.555244Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:18:17.555249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:18:17.555255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:18:17.555259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:18:17.555261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:18:17.555271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:17.555275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:18:17.555279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:18:17.555281Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:18:17.555287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:18:17.555291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:18:17.555295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:18:17.555297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:18:17.555303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:18:17.555307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:18:17.555309Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:18:17.555314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:18:17.555318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:18:17.555320Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:18:17.555337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=4; 2024-11-21T09:18:17.555342Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T09:18:17.555346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:18:17.555352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=3; 2024-11-21T09:18:17.555364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:18:17.555368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:18:17.555370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:18:17.555383Z n ... ;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:36.450003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:798:2798] finished for tablet 9437184 2024-11-21T09:19:36.450012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:798:2798] send ScanData to [1:797:2797] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:36.450065Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:798:2798] and sent to [1:797:2797] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.017},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.018}],"full":{"a":1732180776431155,"name":"_full_task","f":1732180776431155,"d_finished":0,"c":0,"l":1732180776450018,"d":18863},"events":[{"name":"bootstrap","f":1732180776431229,"d_finished":794,"c":1,"l":1732180776432023,"d":794},{"a":1732180776449985,"name":"ack","f":1732180776448653,"d_finished":238,"c":3,"l":1732180776449697,"d":271},{"a":1732180776449984,"name":"processing","f":1732180776432248,"d_finished":917,"c":24,"l":1732180776449697,"d":951},{"name":"ProduceResults","f":1732180776431694,"d_finished":529,"c":29,"l":1732180776450001,"d":529},{"a":1732180776450001,"name":"Finish","f":1732180776450001,"d_finished":0,"c":0,"l":1732180776450018,"d":17},{"name":"task_result","f":1732180776432251,"d_finished":647,"c":21,"l":1732180776448624,"d":647}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:798:2798]->[1:797:2797] 2024-11-21T09:19:36.450083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:36.430872Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2024-11-21T09:19:36.450087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:36.450104Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.015894s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.015411s;size=3.792e-06;details={columns=1;};};]};; 2024-11-21T09:19:36.450112Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:36.451090Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T09:19:36.451144Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2024-11-21T09:19:36.451181Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T09:19:36.451231Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T09:19:36.451242Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T09:19:36.451328Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:805:2805];trace_detailed=; 2024-11-21T09:19:36.451384Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T09:19:36.451400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T09:19:36.451416Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:36.451421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:36.451441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:36.451460Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:36.451465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:36.451468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:805:2805] finished for tablet 9437184 2024-11-21T09:19:36.451473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:805:2805] send ScanData to [1:804:2804] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:36.451501Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:805:2805] and sent to [1:804:2804] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732180776451319,"name":"_full_task","f":1732180776451319,"d_finished":0,"c":0,"l":1732180776451477,"d":158},"events":[{"name":"bootstrap","f":1732180776451344,"d_finished":79,"c":1,"l":1732180776451423,"d":79},{"a":1732180776451438,"name":"ack","f":1732180776451438,"d_finished":0,"c":0,"l":1732180776451477,"d":39},{"a":1732180776451437,"name":"processing","f":1732180776451437,"d_finished":0,"c":0,"l":1732180776451477,"d":40},{"name":"ProduceResults","f":1732180776451411,"d_finished":20,"c":2,"l":1732180776451466,"d":20},{"a":1732180776451466,"name":"Finish","f":1732180776451466,"d_finished":0,"c":0,"l":1732180776451477,"d":11}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:805:2805]->[1:804:2804] 2024-11-21T09:19:36.451519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:36.451250Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T09:19:36.451521Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:36.451525Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T09:19:36.451529Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 >> test.py::test[join-join_comp_common_table--Analyze] [GOOD] >> test.py::test[join-join_comp_common_table--Debug] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Analyze] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Debug] >> test.py::test[pg-tpcds-q99-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Results] >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> test.py::test[in-in_enum_single0-default.txt-Results] [GOOD] >> test.py::test[insert-after_group_by-default.txt-Debug] >> KqpScanArrowFormat::AggregateCountStar >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-Analyze] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-Debug] >> test.py::test[json-json_value/passing-default.txt-Results] [GOOD] >> test.py::test[json-jsonpath/unicode-default.txt-Debug] >> test.py::test[window-yql-14479-default.txt-Analyze] [GOOD] >> test.py::test[window-yql-14479-default.txt-Debug] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-Debug] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Analyze] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Debug] >> test.py::test[coalesce-coalesce_few_real-default.txt-Debug] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132181296.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112181296.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112180096.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2024-11-21T09:18:16.596092Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T09:18:16.608820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:18:16.610656Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T09:18:16.610675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:18:16.610708Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T09:18:16.611176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:16.611201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:16.611221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:16.611233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:16.611243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:16.611253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:16.611263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:16.611273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:16.611285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:16.611300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:16.611312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:16.611323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:16.613967Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1526;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T09:18:16.613979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T09:18:16.614622Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T09:18:16.614669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T09:18:16.614674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T09:18:16.614690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:16.614760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:18:16.614769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:18:16.614773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T09:18:16.614779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T09:18:16.614786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:18:16.614790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:18:16.614793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T09:18:16.614803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T09:18:16.614807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:18:16.614811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:18:16.614813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T09:18:16.614819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T09:18:16.614825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:18:16.614830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:18:16.614832Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T09:18:16.614839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:18:16.614843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:18:16.614845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T09:18:16.614850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:18:16.614853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:18:16.614856Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T09:18:16.614876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T09:18:16.614881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T09:18:16.614886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T09:18:16.614892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T09:18:16.614905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:18:16.614910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:18:16.614912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T09:18:16.614926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:18:16.614930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:18:16.614932Z node 1 :TX_COLUM ... tamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:19:37.182338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=26668; 2024-11-21T09:19:37.182341Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=timestamp; 2024-11-21T09:19:37.182352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:762:2761] send ScanData to [1:761:2760] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] Got TEvKqpCompute::TEvScanData [1:762:2761]->[1:761:2760] 2024-11-21T09:19:37.182359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182365Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182367Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:19:37.182369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T09:19:37.182410Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:37.182415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T09:19:37.182422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=26664; 2024-11-21T09:19:37.182425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=timestamp; 2024-11-21T09:19:37.182437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:762:2761] send ScanData to [1:761:2760] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] Got TEvKqpCompute::TEvScanData [1:762:2761]->[1:761:2760] 2024-11-21T09:19:37.182443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T09:19:37.182498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T09:19:37.182504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:762:2761] finished for tablet 9437184 2024-11-21T09:19:37.182512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:762:2761] send ScanData to [1:761:2760] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T09:19:37.182552Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:762:2761] and sent to [1:761:2760] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.02}],"full":{"a":1732180777161720,"name":"_full_task","f":1732180777161720,"d_finished":0,"c":0,"l":1732180777182517,"d":20797},"events":[{"name":"bootstrap","f":1732180777161745,"d_finished":646,"c":1,"l":1732180777162391,"d":646},{"a":1732180777182489,"name":"ack","f":1732180777181932,"d_finished":323,"c":6,"l":1732180777182454,"d":351},{"a":1732180777182489,"name":"processing","f":1732180777165329,"d_finished":1264,"c":48,"l":1732180777182454,"d":1292},{"name":"ProduceResults","f":1732180777162035,"d_finished":769,"c":56,"l":1732180777182503,"d":769},{"a":1732180777182503,"name":"Finish","f":1732180777182503,"d_finished":0,"c":0,"l":1732180777182517,"d":14},{"name":"task_result","f":1732180777165332,"d_finished":890,"c":42,"l":1732180777181906,"d":890}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:762:2761]->[1:761:2760] 2024-11-21T09:19:37.182597Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T09:19:37.161648Z;index_granules=0;index_portions=6;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9495312;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9495312;selected_rows=0; 2024-11-21T09:19:37.182601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T09:19:37.182613Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.018639s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.018902s;size=7.584e-06;details={columns=1;};};]};; 2024-11-21T09:19:37.182619Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:762:2761];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 0/0 160000/9495312 >> test.py::test[pg-select_win_expr_lead-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_win_expr_lead-default.txt-Debug] >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> test.py::test[stream_lookup_join-lookup_join-default.txt-Debug] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Plan] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Analyze] >> test.py::test[join-bush_in_in--Analyze] [GOOD] >> test.py::test[join-bush_in_in--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] Test command err: 2024-11-21T09:17:03.710003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:03.710055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:03.710066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002717/r3tmp/tmpTgct2v/pdisk_1.dat 2024-11-21T09:17:03.830473Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16590, node 1 2024-11-21T09:17:04.003059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:04.003081Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:04.003084Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:04.003173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:04.011405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:04.088346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:04.088403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:04.100384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2293 2024-11-21T09:17:04.510115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.307679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.307703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.341522Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:05.342436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:05.393240Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:05.401401Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:05.401427Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:05.408018Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:05.408174Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:05.408194Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:05.408199Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:05.408225Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:05.408231Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:05.408236Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:05.408243Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:05.408352Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:05.589796Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:05.589850Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:05.592482Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:05.596235Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:05.597448Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:05.598517Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:05.603934Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:05.603951Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:05.603961Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:05.606826Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.606857Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.608067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:05.609833Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:05.609862Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:05.612645Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:05.624869Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:05.647171Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:05.770502Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:05.937517Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:06.734779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.734804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:06.761479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:06.790456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:06.790504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:06.790548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:06.790574Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:06.790592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:06.790612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:06.790630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:06.790648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:06.790666Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:06.790685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:06.790705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:06.790723Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:06.796553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:06.796575Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:06.796598Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:06.796605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:06.796624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:06.796631Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:06.796643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleI ... A(), pipe client id = [2:7554:5664] 2024-11-21T09:19:33.903629Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7554:5664] 2024-11-21T09:19:33.903677Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7555:5665], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:19:33.919158Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:19:33.919186Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:19:33.919232Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:19:33.919354Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:19:33.919383Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded database: /Root/Database 2024-11-21T09:19:33.919390Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal start key 2024-11-21T09:19:33.919396Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal table owner id: 72075186224037889 2024-11-21T09:19:33.919399Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal table local path id: 4 2024-11-21T09:19:33.919403Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal start time: 1732180773869341 2024-11-21T09:19:33.919407Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal IsColumnTable: 1 2024-11-21T09:19:33.919411Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded global traversal round: 2 2024-11-21T09:19:33.919420Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 1 2024-11-21T09:19:33.919425Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:19:33.919437Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 2 2024-11-21T09:19:33.919442Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 1 2024-11-21T09:19:33.919447Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 1 2024-11-21T09:19:33.919452Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:19:33.919470Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:33.919652Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:19:33.919778Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:33.919788Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:33.919889Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:19:33.919915Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:33.919920Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:33.920047Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:33.962701Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:33.962748Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:33.962884Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7600:5692], server id = [2:7601:5693], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:33.962912Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7600:5692], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:33.963053Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:33.963060Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:33.963099Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:33.963122Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:33.963178Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:33.963215Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7600:5692], server id = [2:7601:5693], tablet id = 72075186224037899 2024-11-21T09:19:33.963218Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:33.963636Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:33.966535Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7618:5710]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:33.966579Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:33.966583Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7618:5710], StatRequests.size() = 1 2024-11-21T09:19:34.008857Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDJiMDhhNjgtNzcxZDE1MDctM2I2ZTcyNDAtZDQ3YmVhYzM=, TxId: 2024-11-21T09:19:34.008883Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDJiMDhhNjgtNzcxZDE1MDctM2I2ZTcyNDAtZDQ3YmVhYzM=, TxId: 2024-11-21T09:19:34.009014Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:34.019959Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7630:5716] 2024-11-21T09:19:34.019994Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7554:5664], server id = [2:7630:5716], tablet id = 72075186224037897, status = OK 2024-11-21T09:19:34.020015Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7630:5716], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T09:19:34.020052Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7631:5717] 2024-11-21T09:19:34.020069Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7631:5717], schemeshard id = 72075186224037889 2024-11-21T09:19:34.031068Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:34.031086Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:34.199093Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7639:5722] 2024-11-21T09:19:34.199360Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2674:3169] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2024-11-21T09:19:34.199370Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2674:3169] 2024-11-21T09:19:34.199391Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2024-11-21T09:19:34.745486Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2024-11-21T09:19:34.745514Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:35.448115Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:35.448145Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:35.448149Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:36.858821Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:36.858882Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:36.858887Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:36.859048Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:36.870132Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:36.870240Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:36.870254Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:36.870375Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:36.881374Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:36.881427Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2024-11-21T09:19:36.881563Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7722:5769], server id = [2:7723:5770], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:36.881593Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7722:5769], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:36.881703Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:36.881709Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:36.881745Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:36.881765Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:36.881809Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:36.881853Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7722:5769], server id = [2:7723:5770], tablet id = 72075186224037899 2024-11-21T09:19:36.881856Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:36.882314Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:36.895147Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OThmNjE0MDItOTVhZmQ0NC03ZjVhMzhmMS1jODcwNTZiMw==, TxId: 2024-11-21T09:19:36.895173Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OThmNjE0MDItOTVhZmQ0NC03ZjVhMzhmMS1jODcwNTZiMw==, TxId: 2024-11-21T09:19:36.895305Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:36.906686Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:36.906707Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2674:3169] >> KqpScanArrowFormat::SingleKey >> test.py::test[json-jsonpath/unicode-default.txt-Debug] [GOOD] >> test.py::test[json-jsonpath/unicode-default.txt-Plan] [GOOD] >> test.py::test[json-jsonpath/unicode-default.txt-Results] >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Plan] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Debug] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-ForceBlocks] >> test.py::test[table_range-concat_sorted_max_tables--Analyze] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Debug] >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-Debug] [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-ForceBlocks] >> test.py::test[json-jsonpath/unicode-default.txt-Results] [GOOD] >> test.py::test[key_filter-datetime-default.txt-Debug] >> test.py::test[window-yql-14479-default.txt-Debug] [GOOD] >> test.py::test[window-yql-14479-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 22983, MsgBus: 24265 2024-11-21T09:19:35.939845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659782947885908:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:35.939895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004837/r3tmp/tmpY81eTx/pdisk_1.dat 2024-11-21T09:19:36.005415Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22983, node 1 2024-11-21T09:19:36.039090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:36.039110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:36.040175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:36.050864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:36.050874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:36.050876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:36.050912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24265 TClient is connected to server localhost:24265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:36.139086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:36.149702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:36.217828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:36.227527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:36.235987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:36.264451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659787242854522:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:36.264473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:36.384188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:36.389114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:36.396347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:36.403514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:36.458471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:36.466763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:36.478762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659787242855038:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:36.478774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659787242855043:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:36.478782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:36.481449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:36.487077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659787242855045:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:19:36.726920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:19:36.836513Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180776865, txId: 281474976710675] shutting down 864000000000 Trying to start YDB, gRPC: 31351, MsgBus: 11687 2024-11-21T09:19:36.984454Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659784811187763:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:36.984589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004837/r3tmp/tmpfa4j4X/pdisk_1.dat 2024-11-21T09:19:36.991934Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31351, node 2 2024-11-21T09:19:37.000373Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:37.000380Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:37.000381Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:37.000398Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11687 TClient is connected to server localhost:11687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:37.086094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:37.086117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:37.086414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.087123Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:37.092441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.099496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.112870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.126276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.203030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659789106156600:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.203056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.207388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.212661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.222280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.229950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.236948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.243399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.252010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659789106157091:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.252029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.252060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659789106157096:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.252475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:37.257053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659789106157098:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:37.433104Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180777474, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 22993, MsgBus: 7078 2024-11-21T09:19:37.650093Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659790119411133:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:37.650243Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004837/r3tmp/tmpbjtxnJ/pdisk_1.dat 2024-11-21T09:19:37.658047Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22993, node 3 2024-11-21T09:19:37.667445Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:37.667461Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:37.667462Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:37.667496Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7078 TClient is connected to server localhost:7078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:37.751675Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:37.751703Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:37.752418Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.752711Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:19:37.753097Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:19:37.760383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.767007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.779699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.791451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.883377Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659790119412665:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.883401Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.888151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.943521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.998072Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.006873Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.013552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.020935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.029509Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659794414380478:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:38.029537Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:38.029559Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659794414380483:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:38.030283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:38.034162Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659794414380485:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:38.216564Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180778258, txId: 281474976715671] shutting down 2024-11-21T09:19:38.233892Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180778279, txId: 281474976715673] shutting down |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[join-bush_in_in--Debug] [GOOD] >> test.py::test[join-bush_in_in--ForceBlocks] >> test.py::test[join-join_comp_common_table--Debug] [GOOD] >> test.py::test[join-join_comp_common_table--ForceBlocks] |95.3%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_expr_lead-default.txt-Debug] [GOOD] >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> test.py::test[pg-select_win_expr_lead-default.txt-ForceBlocks] >> test.py::test[insert-after_group_by-default.txt-Debug] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-Analyze] >> KqpScanArrowFormat::AggregateNoColumn >> test.py::test[insert-after_group_by-default.txt-Plan] [GOOD] >> test.py::test[insert-after_group_by-default.txt-Results] >> test.py::test[agg_phases_agg_apply-min-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-container_empty-default.txt-Analyze] >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[table_range-concat_sorted_max_tables--Debug] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--ForceBlocks] >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop >> test.py::test[insert-select_after_replace_unwrap-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Plan] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] >> test.py::test[csee-const_body_same_lambda-default.txt-Analyze] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-Debug] >> test.py::test[join-bush_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in--Plan] [GOOD] >> test.py::test[join-bush_in_in--Results] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[window-yql-14479-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14479-default.txt-Plan] [GOOD] >> test.py::test[window-yql-14479-default.txt-Results] >> test.py::test[insert-after_group_by-default.txt-Results] [GOOD] >> test.py::test[insert-append--Debug] >> test.py::test[aggr_factory-container_empty-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-container_empty-default.txt-Debug] >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-Plan] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-Results] >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> DemoTx::Scenario_3 [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> test.py::test[pg-tpch-q01-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-Results] >> test.py::test[pg-select_win_expr_lead-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_win_expr_lead-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_expr_lead-default.txt-Results] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_before_commit_fail--Analyze] [SKIPPED] >> test.py::test[insert-use_anon_table_before_commit_fail--Debug] [SKIPPED] >> test.py::test[insert-use_anon_table_before_commit_fail--ForceBlocks] [SKIPPED] >> test.py::test[insert-use_anon_table_before_commit_fail--Plan] [SKIPPED] >> test.py::test[insert-use_anon_table_before_commit_fail--Results] >> DemoTx::Scenario_4 >> test.py::test[table_range-concat_sorted_max_tables--ForceBlocks] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-Debug] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-ForceBlocks] >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> test.py::test[join-join_comp_common_table--ForceBlocks] [GOOD] >> test.py::test[join-join_comp_common_table--Plan] [GOOD] >> test.py::test[join-join_comp_common_table--Results] >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn >> test.py::test[insert-append--Debug] [GOOD] >> test.py::test[insert-append--Plan] [GOOD] >> test.py::test[insert-append--Results] >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Analyze] >> test.py::test[insert-use_anon_table_before_commit_fail--Results] [GOOD] >> test.py::test[pg-select_win_expr_lead-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_rank_order-default.txt-Analyze] >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset >> TDataShardTrace::TestTraceDistributedSelectViaReadActors ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 26838, MsgBus: 27902 2024-11-21T09:19:37.225406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659790454630708:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:37.225511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004834/r3tmp/tmpeEcG5K/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26838, node 1 2024-11-21T09:19:37.271716Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:37.275301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:37.275312Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:37.275314Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:37.275356Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27902 TClient is connected to server localhost:27902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:37.314355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.317213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.326478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:37.326499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:37.327632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:37.375355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.392972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.405595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.443662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659790454632256:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.443685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.465780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.471244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.482142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.536698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.591061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.600827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:37.609188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659790454632774:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.609211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.609258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659790454632779:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:37.609821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:37.614340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659790454632781:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:19:37.766906Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659790454633104:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd70a6rb0mjmz8dr27t3vakg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNhMmMwNWItOWNhZDgzMmQtOGY2NjY2Ni0zYjQ0MzNjYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:19:37.920890Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659790454633165:2454] TxId: 281474976710673. Ctx: { TraceId: 01jd70a6rb0mjmz8dr27t3vakg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNhMmMwNWItOWNhZDgzMmQtOGY2NjY2Ni0zYjQ0MzNjYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:19:37.921692Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180777810, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12598, MsgBus: 20109 2024-11-21T09:19:38.094190Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659792581546406:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:38.094205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004834/r3tmp/tmpi1rKjZ/pdisk_1.dat 2024-11-21T09:19:38.101347Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12598, node 2 2024-11-21T09:19:38.110649Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:38.110663Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:38.110665Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:38.110703Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20109 TClient is connected to server localhost:20109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:38.194382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:38.194418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:38.195441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:38.196106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.205003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.212080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.226249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.239475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ... roposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.889684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.898108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.913424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.924961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.056472Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659799692960238:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.056496Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.060474Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.066341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.078431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.085403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.091816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.099052Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.108328Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659799692960753:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.108356Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.108357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659799692960758:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.109060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:39.112129Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659799692960760:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:39.620300Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180779371, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 14317, MsgBus: 30995 2024-11-21T09:19:39.873951Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439659798846657873:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:39.874096Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004834/r3tmp/tmpkohakI/pdisk_1.dat 2024-11-21T09:19:39.880737Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14317, node 4 2024-11-21T09:19:39.888883Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:39.888895Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:39.888897Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:39.888940Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30995 TClient is connected to server localhost:30995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:39.974122Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:39.974163Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:39.975188Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:39.975951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.984286Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.992034Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.008002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.016728Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.144465Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659803141626699:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.144488Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.148322Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.153671Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.162801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.170083Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.176813Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.183947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.192533Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659803141627203:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.192553Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.192581Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659803141627208:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.193178Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:40.196946Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439659803141627210:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:40.417401Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180780414, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: 2024-11-21T09:17:08.670447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:08.670486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:08.670495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0022ec/r3tmp/tmpOBgz6Y/pdisk_1.dat 2024-11-21T09:17:08.744376Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29924, node 1 2024-11-21T09:17:08.837014Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:08.837030Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:08.837034Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:08.837120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:08.843510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.918645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.918680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.929915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7283 2024-11-21T09:17:09.334109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:10.093896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:10.093922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:10.126861Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:10.127582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:10.178996Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:10.188453Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:10.188477Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:10.194857Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:10.194963Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:10.194976Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:10.194980Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:10.194983Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:10.194987Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:10.194990Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:10.194994Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:10.195073Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:10.370549Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:10.370579Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:10.371459Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:10.373136Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:10.373253Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:10.374054Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:10.378476Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:10.378489Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:10.378497Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:10.380736Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:10.380771Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:10.381953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:10.383297Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:10.383324Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:10.385844Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:10.397575Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:10.419502Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:10.533976Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:10.689621Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:11.418589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.418629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:11.422003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:11.465872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:11.465931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:11.465977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:11.465998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:11.466017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:11.466040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:11.466061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:11.466081Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:11.466100Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:11.466118Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:11.466139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:11.466158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:11.472951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:11.472984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:11.473038Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:11.473060Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:11.473082Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:11.473101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... [72075186224037897] Subscribed for config changes 2024-11-21T09:19:39.903647Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:19:39.903705Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:39.903712Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:39.903825Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:39.903834Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:39.904054Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:39.947396Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:39.947437Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:39.947585Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8467:6344], server id = [2:8472:6349], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:39.947618Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8467:6344], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.947795Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:39.947844Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8468:6345], server id = [2:8473:6350], tablet id = 72075186224037900, status = OK 2024-11-21T09:19:39.947851Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8468:6345], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.947880Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8469:6346], server id = [2:8474:6351], tablet id = 72075186224037901, status = OK 2024-11-21T09:19:39.947884Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8469:6346], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.947895Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8467:6344], server id = [2:8472:6349], tablet id = 72075186224037899 2024-11-21T09:19:39.947898Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.947974Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8470:6347], server id = [2:8475:6352], tablet id = 72075186224037902, status = OK 2024-11-21T09:19:39.947983Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8470:6347], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.948043Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8471:6348], server id = [2:8477:6354], tablet id = 72075186224037903, status = OK 2024-11-21T09:19:39.948050Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8471:6348], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.948066Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T09:19:39.948141Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T09:19:39.948190Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T09:19:39.948254Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8476:6353], server id = [2:8478:6355], tablet id = 72075186224037904, status = OK 2024-11-21T09:19:39.948263Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8476:6353], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.948302Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T09:19:39.948356Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8468:6345], server id = [2:8473:6350], tablet id = 72075186224037900 2024-11-21T09:19:39.948359Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.948395Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8469:6346], server id = [2:8474:6351], tablet id = 72075186224037901 2024-11-21T09:19:39.948399Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.948412Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8470:6347], server id = [2:8475:6352], tablet id = 72075186224037902 2024-11-21T09:19:39.948415Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.948438Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8479:6356], server id = [2:8482:6359], tablet id = 72075186224037905, status = OK 2024-11-21T09:19:39.948444Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8479:6356], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.948467Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T09:19:39.948519Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8471:6348], server id = [2:8477:6354], tablet id = 72075186224037903 2024-11-21T09:19:39.948523Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.948542Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8480:6357], server id = [2:8483:6360], tablet id = 72075186224037906, status = OK 2024-11-21T09:19:39.948550Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8480:6357], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.948569Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8481:6358], server id = [2:8485:6362], tablet id = 72075186224037907, status = OK 2024-11-21T09:19:39.948575Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8481:6358], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.948600Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8484:6361], server id = [2:8486:6363], tablet id = 72075186224037908, status = OK 2024-11-21T09:19:39.948606Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8484:6361], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.948692Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T09:19:39.948763Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8476:6353], server id = [2:8478:6355], tablet id = 72075186224037904 2024-11-21T09:19:39.948767Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.948783Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T09:19:39.948797Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T09:19:39.948824Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T09:19:39.948830Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:39.948865Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:39.948885Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:39.948933Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:39.948962Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8479:6356], server id = [2:8482:6359], tablet id = 72075186224037905 2024-11-21T09:19:39.948964Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.949403Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8480:6357], server id = [2:8483:6360], tablet id = 72075186224037906 2024-11-21T09:19:39.949411Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.949447Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:39.949504Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8481:6358], server id = [2:8485:6362], tablet id = 72075186224037907 2024-11-21T09:19:39.949506Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.949600Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8484:6361], server id = [2:8486:6363], tablet id = 72075186224037908 2024-11-21T09:19:39.949606Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.952773Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8503:6380]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:39.952819Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:39.952824Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8503:6380], StatRequests.size() = 1 2024-11-21T09:19:39.974985Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2VkODBiNjEtNmIxYjYwZmItNTJlNDU0ZmQtNGVkMzRjMDc=, TxId: 2024-11-21T09:19:39.975011Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2VkODBiNjEtNmIxYjYwZmItNTJlNDU0ZmQtNGVkMzRjMDc=, TxId: 2024-11-21T09:19:39.975465Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:39.986288Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8513:6386] 2024-11-21T09:19:39.986353Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8513:6386], schemeshard id = 72075186224037889 2024-11-21T09:19:39.986367Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8420:6317], server id = [2:8514:6387], tablet id = 72075186224037897, status = OK 2024-11-21T09:19:39.986380Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8514:6387] 2024-11-21T09:19:39.986398Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8514:6387], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T09:19:39.997905Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:39.997930Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:40.104081Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8519:6390]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:40.104241Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:19:40.104249Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:19:40.104991Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:19:40.105009Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T09:19:40.105018Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:19:40.106723Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> test.py::test[insert-yql-13083-existig-Analyze] >> test.py::test[aggr_factory-container_empty-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-container_empty-default.txt-ForceBlocks] >> TDataShardTrace::TestTraceWriteImmediateOnShard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 2674, MsgBus: 23176 2024-11-21T09:19:37.781224Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659791661676241:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:37.781239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004831/r3tmp/tmpua9xd0/pdisk_1.dat 2024-11-21T09:19:37.819874Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2674, node 1 2024-11-21T09:19:37.833249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:37.833268Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:37.833271Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:37.833309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23176 TClient is connected to server localhost:23176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:37.881723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:37.881747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:37.882823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:37.905650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.907388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:19:37.909290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.970001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.986910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:37.995784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.015506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659795956645075:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:38.015533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:38.035859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.040712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.049105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.055745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.063098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.069925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.078009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659795956645568:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:38.078020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659795956645573:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:38.078030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:38.078568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:38.082963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659795956645575:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:38.264248Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659795956645900:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70a78b71amcxafvsbsewkp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNjZmU3YzMtZWI2OGFjOWQtOWQ4ODc4YjUtNDJkZmQ5YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:19:38.265380Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180778307, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 24248, MsgBus: 64446 2024-11-21T09:19:38.473615Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659792589616249:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:38.473631Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004831/r3tmp/tmpPtZdhT/pdisk_1.dat 2024-11-21T09:19:38.484137Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24248, node 2 2024-11-21T09:19:38.490641Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:38.490651Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:38.490653Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:38.490679Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64446 TClient is connected to server localhost:64446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:38.573928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:38.573957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:38.575084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:38.575646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.578326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.588417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.603268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.616424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:38.682376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor ... ... 2024-11-21T09:19:39.257884Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:19:39.266525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.273398Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.287369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.295343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.386615Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659800157539864:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.386636Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.392148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.397005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.407109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.461488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.469715Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.476774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:39.484969Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659800157540380:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.484987Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.484991Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659800157540385:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:39.485467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:39.489871Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659800157540387:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:39.721880Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180779700, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 8404, MsgBus: 26530 2024-11-21T09:19:39.867023Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439659798321926852:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:39.867041Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004831/r3tmp/tmpSd1NzS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8404, node 4 2024-11-21T09:19:39.883807Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:39.883896Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:39.883904Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:39.883906Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:39.883933Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26530 TClient is connected to server localhost:26530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:39.967140Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:39.967165Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:39.968255Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:39.969445Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.979214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:39.987610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.003027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.011147Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.122561Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659802616895672:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.122580Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.128588Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.134535Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.141862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.148298Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.155662Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.162859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.171450Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659802616896175:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.171473Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.171532Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659802616896180:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.172058Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:40.176222Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439659802616896182:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:40.425704Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180780372, txId: 281474976715671] shutting down >> test.py::test[join-bush_in_in--Results] [GOOD] >> test.py::test[join-grace_join1-map-Analyze] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> test.py::test[csee-const_body_same_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-Plan] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-Results] >> test.py::test[insert-append--Results] [GOOD] >> test.py::test[insert-append-with_read_udf_fail-Debug] [SKIPPED] >> test.py::test[insert-append-with_read_udf_fail-Plan] [SKIPPED] >> test.py::test[insert-append-with_read_udf_fail-Results] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [GOOD] >> TPersQueueTest::TopicServiceReadBudget >> test.py::test[produce-reduce_multi_in-empty-Analyze] [GOOD] >> test.py::test[pg-select_win_rank_order-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_win_rank_order-default.txt-Debug] |95.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[table_range-concat_sorted_max_tables--ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Debug] >> test.py::test[insert-yql-13083-existig-Analyze] [GOOD] >> test.py::test[insert-yql-13083-existig-Debug] >> test.py::test[csee-const_body_same_lambda-default.txt-Results] [GOOD] >> test.py::test[datetime-current_date-default.txt-Analyze] >> TDataShardTrace::TestTraceDistributedSelect >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> test.py::test[join-grace_join1-map-Analyze] [GOOD] >> test.py::test[join-grace_join1-map-Debug] >> test.py::test[insert-append-with_read_udf_fail-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Debug] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [21/Nov/2024 09:19:20] "GET /mylib.sql HTTP/1.1" 200 - >> test.py::test[aggr_factory-container_empty-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-container_empty-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-container_empty-default.txt-Results] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Results] >> test.py::test[datetime-current_date-default.txt-Analyze] [GOOD] >> test.py::test[datetime-current_date-default.txt-Debug] >> test.py::test[pg-select_win_rank_order-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_rank_order-default.txt-ForceBlocks] >> test.py::test[join-grace_join1-map-Debug] [GOOD] >> test.py::test[join-grace_join1-map-ForceBlocks] [SKIPPED] >> test.py::test[join-grace_join1-map-Plan] [GOOD] >> test.py::test[join-grace_join1-map-Results] [GOOD] >> test.py::test[join-inmem_by_uncomparable_structs-off-Analyze] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction |95.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[produce-reduce_multi_in-empty-Analyze] [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[pg_catalog-pg_auth_members-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_proc-default.txt-Debug] >> AnalyzeColumnshard::Analyze [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Debug] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Plan] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] >> test.py::test[insert-yql-13083-existig-Debug] [GOOD] >> test.py::test[insert-yql-13083-existig-ForceBlocks] >> test.py::test[datetime-current_date-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-container_empty-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: 2024-11-21T09:17:04.945500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:04.945557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:04.945570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0025c6/r3tmp/tmpOfiUxh/pdisk_1.dat 2024-11-21T09:17:05.025734Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63432, node 1 2024-11-21T09:17:05.121327Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:05.121352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:05.121357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:05.121434Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:05.127113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.204427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.204469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.216170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8865 2024-11-21T09:17:05.616479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.383185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.383211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.416002Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:06.416795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:06.464388Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:06.472000Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:06.472024Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:06.477147Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:06.477273Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:06.477288Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:06.477292Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:06.477296Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:06.477300Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:06.477304Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:06.477309Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:06.477395Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:06.652045Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:06.652082Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:06.653075Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:06.654650Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:06.654739Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:06.655254Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:06.659280Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:06.659303Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:06.659314Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:06.661326Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.661353Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.662570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:06.664059Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:06.664087Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:06.666642Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:06.678823Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:06.701153Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:06.812200Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:06.967848Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:07.697376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.697417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.704782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:07.731372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:07.731441Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:07.731498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:07.731521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:07.731539Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:07.731563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:07.731581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:07.731600Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:07.731620Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:07.731637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:07.731660Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:07.731678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:07.737962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:07.737997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:07.738027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:07.738036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:07.738059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:07.738067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:07.738081Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleI ... 75186224037897, status = OK 2024-11-21T09:19:36.945653Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7421:5586], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:19:36.945660Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T09:19:36.945677Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:36.945687Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7418:5583], StatRequests.size() = 1 2024-11-21T09:19:36.959766Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODZmYmRlMGEtNjk4ZWMxZDgtNTI1YTgzMTEtZTIxNTMzOTA=, TxId: 2024-11-21T09:19:36.959788Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODZmYmRlMGEtNjk4ZWMxZDgtNTI1YTgzMTEtZTIxNTMzOTA=, TxId: 2024-11-21T09:19:36.959901Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:36.970913Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:19:36.970929Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:37.063243Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:19:37.063271Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:19:37.125128Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7420:5585], schemeshard count = 1 2024-11-21T09:19:38.149514Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:38.149549Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:38.150275Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:38.161850Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:38.162025Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:38.162034Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T09:19:38.173157Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:39.520412Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:39.520443Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:39.520449Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:39.520479Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:39.520485Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:39.520726Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:39.531918Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:39.531962Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:39.532123Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:39.532146Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:39.532333Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:39.532365Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:39.532526Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:39.554417Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:39.554449Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:39.554503Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:39.554686Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7559:5667], server id = [2:7560:5668], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:39.554717Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7559:5667], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:39.554906Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:39.554916Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:39.554954Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:39.554986Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:39.555059Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:39.555108Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7559:5667], server id = [2:7560:5668], tablet id = 72075186224037899 2024-11-21T09:19:39.555113Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:39.555740Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:39.559462Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7577:5685]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:39.559525Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:39.559532Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7577:5685], StatRequests.size() = 1 2024-11-21T09:19:39.586254Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjhlZjFhNzEtOWM5ZjFjZGQtNWE2ZTZjMTUtOTBiMzUyZQ==, TxId: 2024-11-21T09:19:39.586284Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjhlZjFhNzEtOWM5ZjFjZGQtNWE2ZTZjMTUtOTBiMzUyZQ==, TxId: 2024-11-21T09:19:39.586449Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:39.597842Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:39.597866Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:40.290337Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:40.290362Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:40.951382Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:40.951410Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:40.951414Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:42.323599Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:42.323653Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:42.344155Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:42.344221Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:42.344225Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:42.344388Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:42.356092Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:42.356367Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:42.356400Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:42.356695Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:42.367733Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:42.367782Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:42.367904Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7672:5739], server id = [2:7673:5740], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:42.367931Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7672:5739], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:42.368065Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:42.368075Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:42.368100Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:42.368118Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:42.368161Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:42.368193Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7672:5739], server id = [2:7673:5740], tablet id = 72075186224037899 2024-11-21T09:19:42.368196Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:42.368716Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:42.381386Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzI5NWEwNi1hNzk5NWYxZi03NWY0MTlmOC01NzU5MTlhYw==, TxId: 2024-11-21T09:19:42.381410Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzI5NWEwNi1hNzk5NWYxZi03NWY0MTlmOC01NzU5MTlhYw==, TxId: 2024-11-21T09:19:42.381628Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:42.393003Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:42.393025Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2672:3169] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[datetime-current_date-default.txt-ForceBlocks] >> test.py::test[aggr_factory-count-default.txt-Analyze] >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> test.py::test[join-inmem_by_uncomparable_structs-off-Analyze] [GOOD] >> test.py::test[join-inmem_by_uncomparable_structs-off-Debug] >> test.py::test[pg-select_win_rank_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_win_rank_order-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_rank_order-default.txt-Results] >> test.py::test[pg_catalog-pg_proc-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_proc-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_proc-default.txt-Results] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Debug] >> test.py::test[key_filter-datetime-default.txt-Debug] [GOOD] >> test.py::test[key_filter-datetime-default.txt-Plan] [GOOD] >> test.py::test[key_filter-datetime-default.txt-Results] >> test.py::test[datetime-current_date-default.txt-ForceBlocks] [GOOD] >> test.py::test[datetime-current_date-default.txt-Plan] [GOOD] >> test.py::test[datetime-current_date-default.txt-Results] >> test.py::test[aggr_factory-count-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Debug] >> TPersQueueTest::BadSids [GOOD] >> TPersQueueTest::Cache >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> test.py::test[pg-select_win_rank_order-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Analyze] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[join-inmem_by_uncomparable_structs-off-Debug] [GOOD] >> test.py::test[join-inmem_by_uncomparable_structs-off-ForceBlocks] [SKIPPED] >> test.py::test[join-inmem_by_uncomparable_structs-off-Plan] [GOOD] >> test.py::test[join-inmem_by_uncomparable_structs-off-Results] [GOOD] >> test.py::test[join-inmem_by_uncomparable_tuples-off-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 5476, MsgBus: 63199 2024-11-21T09:19:40.633481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659804839921125:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:40.633500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004828/r3tmp/tmpGdRWGL/pdisk_1.dat 2024-11-21T09:19:40.672782Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5476, node 1 2024-11-21T09:19:40.680756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:40.680772Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:40.680773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:40.680799Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63199 TClient is connected to server localhost:63199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:40.733786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:40.733809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:40.734916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:40.757716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.763709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.824027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.837503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.849912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:40.873428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659804839922663:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.873455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.894312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.900183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.911813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.918877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.925433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.932634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:40.941428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659804839923156:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.941438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659804839923161:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.941455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:40.942089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:40.945820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659804839923163:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:41.444817Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180781219, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 1858, MsgBus: 23025 2024-11-21T09:19:41.686692Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659808654392002:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:41.686823Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004828/r3tmp/tmp43kB4G/pdisk_1.dat 2024-11-21T09:19:41.692491Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1858, node 2 2024-11-21T09:19:41.701641Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:41.701650Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:41.701652Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:41.701668Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23025 TClient is connected to server localhost:23025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:41.786992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:41.787029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:41.788112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:41.788261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:41.788995Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:19:41.793186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:19:41.801192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:19:41.816414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:41.829284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:41.941637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659808654393528:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:41.941653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool de ... t proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:42.677558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:42.684452Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:42.698813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:42.712687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:42.790867Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659812497166916:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:42.790890Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:42.795757Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:42.801085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:42.808600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:42.815853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:42.822394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:42.829680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:42.838551Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659812497167408:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:42.838575Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:42.838614Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439659812497167413:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:42.839098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:42.843196Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439659812497167415:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:43.099317Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180783074, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 27041, MsgBus: 1068 2024-11-21T09:19:43.243652Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439659815198646300:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:43.243816Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004828/r3tmp/tmpVEp9fS/pdisk_1.dat 2024-11-21T09:19:43.250910Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27041, node 4 2024-11-21T09:19:43.260120Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:43.260133Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:43.260135Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:43.260164Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1068 TClient is connected to server localhost:1068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:43.345545Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:43.345570Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:43.345942Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:43.346630Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:43.354898Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:43.363356Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:43.381567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:43.391159Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:43.494597Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659815198647835:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:43.494629Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:43.498723Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:43.504639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:43.515853Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:43.522864Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:43.529623Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:43.536799Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:43.545494Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659815198648331:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:43.545520Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439659815198648336:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:43.545523Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:43.545965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:43.549753Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439659815198648338:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:43.757589Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180783753, txId: 281474976715671] shutting down |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> test.py::test[datetime-current_date-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2024-11-21T09:19:41.140313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:19:41.140887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:19:41.140914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004697/r3tmp/tmpXoled6/pdisk_1.dat 2024-11-21T09:19:41.261625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:19:41.279223Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:41.321064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:41.321087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:41.331477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:41.436295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 |95.4%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[insert-yql-13083-existig-ForceBlocks] [GOOD] >> test.py::test[insert-yql-13083-existig-Plan] [GOOD] >> test.py::test[insert-yql-13083-existig-Results] |95.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[pg_catalog-pg_proc-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_set_config_commit-default.txt-Debug] >> test.py::test[aggregate-histogram_cdf-default.txt-ForceBlocks] >> test.py::test[join-nested_semi_join-off-Analyze] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Debug] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Plan] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> test.py::test[pg-sublink_having_exists-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Debug] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> test.py::test[join-inmem_by_uncomparable_tuples-off-Analyze] [GOOD] >> test.py::test[join-inmem_by_uncomparable_tuples-off-Debug] >> test.py::test[aggr_factory-count-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-count-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2024-11-21T09:19:41.128628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:19:41.129127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:19:41.129178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004699/r3tmp/tmpSyZt15/pdisk_1.dat 2024-11-21T09:19:41.261755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:19:41.278907Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:41.321013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:41.321039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:41.331479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:41.436225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:44.082821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:44.082848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:44.082862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:44.083743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:19:44.096086Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T09:19:44.291121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:19:44.475495Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70acyj7ppzy6wtmvzak1s9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE5MjlkNzMtYjUzZmE3YzUtZWEwMmRhOWUtMjBhZjIxYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:19:44.502595Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70adbefzs9mfd2w0jp15nz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdjYjk1MWYtMWMzZmUxNDUtNDQ1YTI2YTktZTg4NDE1NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:19:44.544803Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70adc2fz9rqcgdz062pzkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY5OTQ2MmQtZDZjODQ2NWItZDc2ODE5NDMtMWM3Nzc1NWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test.py::test[insert-yql-13083-existig-Results] [GOOD] >> test.py::test[insert_monotonic-from_empty--Analyze] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> test.py::test[datetime-date_tz_arithmetic-default.txt-Analyze] [GOOD] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Debug] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> test.py::test[join-nested_semi_join-off-Analyze] [GOOD] >> test.py::test[join-nested_semi_join-off-Debug] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] [GOOD] >> test.py::test[insert-override-with_read_udf-Debug] >> test.py::test[pg_catalog-pg_set_config_commit-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_set_config_commit-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_set_config_commit-default.txt-Results] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::DirectReadCleanCache >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> test.py::test[pg-sublink_having_exists-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T09:19:44.700497Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.700506Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.700511Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.700643Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.703895Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:44.703913Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.704180Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.704183Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.704185Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.704320Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.704407Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:44.704416Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.704594Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.704596Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.704598Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.704667Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:19:44.704679Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.704681Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.704858Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T09:19:44.705028Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.705030Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.705032Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.705093Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:19:44.705102Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.705104Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.705110Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T09:19:44.705285Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:19:44.705288Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:19:44.705290Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.705340Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.705461Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.706955Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:19:44.707023Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.708009Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T09:19:44.708423Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T09:19:44.708464Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.708470Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:44.708473Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:44.708476Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T09:19:44.708480Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T09:19:44.708483Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T09:19:44.708486Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T09:19:44.708489Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T09:19:44.708500Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T09:19:44.708503Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T09:19:44.708505Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T09:19:44.708508Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T09:19:44.708510Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T09:19:44.708511Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T09:19:44.708513Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T09:19:44.708515Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T09:19:44.708533Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T09:19:44.708535Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T09:19:44.708537Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T09:19:44.708539Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T09:19:44.708540Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T09:19:44.708542Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T09:19:44.708544Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T09:19:44.708546Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T09:19:44.708547Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T09:19:44.708549Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T09:19:44.708551Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T09:19:44.708552Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T09:19:44.708554Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T09:19:44.708556Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T09:19:44.708558Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T09:19:44.708561Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T09:19:44.708569Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T09:19:44.708572Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T09:19:44.708575Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T09:19:44.708577Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T09:19:44.708581Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T09:19:44.708584Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T09:19:44.708586Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T09:19:44.708589Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T09:19:44.708592Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T09:19:44.708594Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T09:19:44.708597Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T09:19:44.708600Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T09:19:44.708603Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T09:19:44.708608Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T09:19:44.708611Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T09:19:44.708613Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T09:19:44.708616Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T09:19:44.708619Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T09:19:44.708627Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T09:19:44.708707Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T09:19:44.708742Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T09:19:44.708745Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T09:19:44.708749Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T09:19:44.708751Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T09:19:44.708754Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T09:19:44.708757Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T09:19:44.708759Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T09:19:44.708762Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T09:19:44.708766Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T09:19:44.708768Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T09:19:44.708770Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T09:19:44.708773Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T09:19:44.708776Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T09:19:44.708778Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T09:19:44.708780Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T09:19:44.708783Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T09:19:44.708787Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T09:19:44.708789Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T09:19:44.708792Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T09:19:44.708794Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T09:19:44.708797Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T09:19:44.708799Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T09:19:44.708802Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T09:19:44.708805Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T09:19:44.708807Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T09:19:44.708810Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T09:19:44.708813Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T09:19:44.708815Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T09:19:44.708818Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T09:19:44.708820Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T09:19:44.708823Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T09:19:44.708826Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T09:19:44.708832Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T09:19:44.708836Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T09:19:44.708838Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T09:19:44.708841Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T09:19:44.708844Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T09:19:44.708846Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T09:19:44.708849Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T09:19:44.708852Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T09:19:44.708855Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T09:19:44.708858Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T09:19:44.708861Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T09:19:44.708864Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T09:19:44.708866Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T09:19:44.708869Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T09:19:44.708872Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T09:19:44.708875Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T09:19:44.708877Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T09:19:44.708880Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T09:19:44.708885Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T09:19:44.708907Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:19:44.709134Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.709137Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.709139Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.709190Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.709283Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.709316Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.709394Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.809636Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.809713Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:44.809739Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.809745Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:19:44.809771Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:19:45.010051Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T09:19:45.111287Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:19:45.111350Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:45.111402Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:19:45.111702Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.111705Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.111707Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.111757Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.111841Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:45.111882Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.111944Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:45.212147Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.212541Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:45.212563Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:45.212569Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:19:45.212600Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T09:19:45.212629Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:19:45.212670Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:19:45.212701Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:45.212732Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> test.py::test[join-join_without_column-off-Analyze] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> test.py::test[pg_catalog-pg_set_config_commit-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-from_empty--Analyze] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2024-11-21T09:19:41.954361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:19:41.954692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:19:41.954707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004690/r3tmp/tmpdkohY8/pdisk_1.dat 2024-11-21T09:19:42.045990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:19:42.061594Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:42.103771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:42.103804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:42.114251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:42.217847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:44.840417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:44.840444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:44.840454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:44.841065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:19:44.852852Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T09:19:45.048704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:19:45.110258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70adp89ecada9j2t034mfd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRhZGQ4YmItNDk1NTA2MjAtY2YzYTRlMWYtZWEyZTdkNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:19:45.132592Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70adz33egespxz7t9p3e9g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2UxZmRiNGMtMTgxOTJhODktNzdiODdiM2MtN2FmZDZkYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:19:45.325125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70ae3kbgq1sfefhbb6cx9e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDNkY2VjOS1mMGFjZDNmZC0xNWMxOTFmZS05NjViYmY2OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T09:19:45.299991Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.299997Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.299999Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.300116Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:19:45.300130Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.300133Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.300644Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007794s 2024-11-21T09:19:45.300781Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.300943Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:45.300965Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301183Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301187Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301190Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.301253Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:19:45.301263Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301265Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301280Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008919s 2024-11-21T09:19:45.301358Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.301421Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:45.301429Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301652Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301656Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301658Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.301828Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:19:45.301834Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301836Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.301848Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.184783s 2024-11-21T09:19:45.301911Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.301943Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:45.301950Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.302099Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.302101Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.302103Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.302144Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:19:45.302148Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.302150Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.302157Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.267962s 2024-11-21T09:19:45.302199Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.302239Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:45.302251Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.302448Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.302450Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.302452Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.302486Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.302534Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:45.303525Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.303594Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T09:19:45.303600Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.303602Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.303615Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.168003s 2024-11-21T09:19:45.303689Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:19:45.303917Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.303919Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.303921Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.303956Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.304025Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:45.304053Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.304119Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:45.404468Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.404548Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:45.404571Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:45.404577Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:19:45.404598Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:19:45.504761Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:45.504838Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:19:45.505156Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.505160Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.505162Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.505219Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.505314Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:45.505361Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.505426Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:45.605659Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.605729Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:45.605750Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:45.605755Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:19:45.605777Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T09:19:45.605807Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:19:45.605847Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:19:45.605865Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:45.605892Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> test.py::test[join-inmem_by_uncomparable_tuples-off-Debug] [GOOD] >> test.py::test[join-inmem_by_uncomparable_tuples-off-ForceBlocks] [SKIPPED] >> test.py::test[join-inmem_by_uncomparable_tuples-off-Plan] [GOOD] >> test.py::test[join-inmem_by_uncomparable_tuples-off-Results] [GOOD] >> test.py::test[join-inmem_with_set_key-off-Analyze] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> DemoTx::Scenario_4 [GOOD] >> test.py::test[pg_catalog-pg_shdescription-default.txt-Debug] >> test.py::test[insert_monotonic-from_empty--Debug] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Debug] [GOOD] >> test.py::test[datetime-date_tz_arithmetic-default.txt-ForceBlocks] >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> test.py::test[insert-override-with_read_udf-Debug] [GOOD] >> test.py::test[insert-override-with_read_udf-Plan] [GOOD] >> test.py::test[insert-override-with_read_udf-Results] >> test.py::test[aggr_factory-count-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Results] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime |95.4%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[pg-sublink_having_exists-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Results] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] >> test.py::test[join-nested_semi_join-off-Debug] [GOOD] >> test.py::test[join-nested_semi_join-off-ForceBlocks] [SKIPPED] >> test.py::test[join-nested_semi_join-off-Plan] [GOOD] >> test.py::test[join-nested_semi_join-off-Results] [GOOD] >> test.py::test[join-order_of_qualified--Analyze] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> DemoTx::Scenario_5 >> test.py::test[join-join_without_column-off-Analyze] [GOOD] >> test.py::test[join-join_without_column-off-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: 2024-11-21T09:17:05.398251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:05.398309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:05.398322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0025a2/r3tmp/tmp77CLcd/pdisk_1.dat 2024-11-21T09:17:05.482595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1936, node 1 2024-11-21T09:17:05.576640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:05.576656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:05.576660Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:05.576714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:05.581581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.656706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.656743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.668457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8796 2024-11-21T09:17:06.069707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.860504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.860535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.893380Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:06.894100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:06.947397Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:06.956598Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:06.956620Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:06.962432Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:06.962565Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:06.962585Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:06.962591Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:06.962594Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:06.962598Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:06.962602Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:06.962606Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:06.962680Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:07.137876Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.137917Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.139165Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:07.141420Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:07.141549Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:07.142216Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:07.147361Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:07.147382Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:07.147394Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:07.149407Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.149438Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.150940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:07.152184Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:07.152227Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:07.154598Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:07.166352Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.188301Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:07.308334Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:07.432736Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:08.184569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.184610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.187391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:08.215551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:08.215619Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:08.215672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:08.215704Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:08.215730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:08.215755Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:08.215782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:08.215809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:08.215836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:08.215867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:08.215891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:08.215914Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:08.220865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:08.220888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:08.220905Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:08.220911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:08.220923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:08.220928Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:08.220937Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId ... 75186224037897, status = OK 2024-11-21T09:19:40.143938Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7429:5589], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:19:40.143943Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T09:19:40.143957Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:40.143966Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7426:5586], StatRequests.size() = 1 2024-11-21T09:19:40.157151Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjI0Y2Y5NTEtOTI1ZDExNWYtMTgzMjE3N2QtNjEwYjNhMWI=, TxId: 2024-11-21T09:19:40.157172Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjI0Y2Y5NTEtOTI1ZDExNWYtMTgzMjE3N2QtNjEwYjNhMWI=, TxId: 2024-11-21T09:19:40.157267Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:40.178773Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:19:40.178792Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:40.240367Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:19:40.240389Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:19:40.312500Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7428:5588], schemeshard count = 1 2024-11-21T09:19:41.308466Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:41.308499Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:41.309347Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:41.321239Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:41.321386Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:41.321397Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T09:19:41.332452Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:42.680847Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:42.680868Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:42.680872Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:42.680877Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:42.680880Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:42.681073Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:42.693716Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:42.693751Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:42.693933Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:42.693947Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:42.694116Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:42.694143Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:42.694274Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:42.705404Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:42.705423Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:42.705458Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:42.705587Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7567:5670], server id = [2:7568:5671], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:42.705611Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7567:5670], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:42.705808Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:42.705816Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:42.705838Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:42.705858Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:42.705928Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:42.705968Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7567:5670], server id = [2:7568:5671], tablet id = 72075186224037899 2024-11-21T09:19:42.705971Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:42.706593Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:42.709563Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7585:5688]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:42.709609Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:42.709616Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7585:5688], StatRequests.size() = 1 2024-11-21T09:19:42.741730Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjQ3YjQxNjktMWQ3NmYxMGEtZGVlOTIxMzMtZjlmYTg1M2U=, TxId: 2024-11-21T09:19:42.741749Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjQ3YjQxNjktMWQ3NmYxMGEtZGVlOTIxMzMtZjlmYTg1M2U=, TxId: 2024-11-21T09:19:42.741861Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:42.752835Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:42.752846Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:43.430939Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:43.430965Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:44.099954Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:44.099980Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:44.099984Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:45.480317Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:45.480376Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:45.501014Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:45.501065Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:45.501070Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:45.501225Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:45.512416Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:45.512514Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:45.512526Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:45.512631Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:45.523681Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:45.523739Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:45.523857Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7680:5742], server id = [2:7681:5743], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:45.523879Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7680:5742], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:45.523980Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:45.523989Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:45.524016Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:45.524039Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:45.524095Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:45.524126Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7680:5742], server id = [2:7681:5743], tablet id = 72075186224037899 2024-11-21T09:19:45.524129Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:45.524588Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:45.537042Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDEzMjNjYTAtYzY1OWFlYzktZWE4YzEzYTAtZDRhYjdlY2U=, TxId: 2024-11-21T09:19:45.537062Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDEzMjNjYTAtYzY1OWFlYzktZWE4YzEzYTAtZDRhYjdlY2U=, TxId: 2024-11-21T09:19:45.537236Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:45.548442Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:45.548466Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2671:3168] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] Test command err: 2024-11-21T09:17:06.705068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:06.705106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:06.705115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002510/r3tmp/tmpHAbooC/pdisk_1.dat 2024-11-21T09:17:06.784753Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22199, node 1 2024-11-21T09:17:06.877268Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:06.877290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:06.877294Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:06.877381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:06.883033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.959159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.959189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.970481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30361 2024-11-21T09:17:07.369883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.140300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.140324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.173508Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:08.174293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:08.227994Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:08.238049Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:08.238079Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:08.243548Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:08.243718Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:08.243732Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:08.243737Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:08.243741Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:08.243745Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:08.243749Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:08.243753Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:08.243850Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:08.418545Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:08.418573Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:08.419325Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T09:17:08.420661Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T09:17:08.420938Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T09:17:08.421611Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:08.426979Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:08.426999Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:08.427011Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:08.428044Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.428070Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.430587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:08.431990Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:08.432015Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:08.434564Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:08.446295Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:08.468343Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:08.577996Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:08.786335Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:09.502904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2145:3025], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.502948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.505861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:09.527374Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:09.527415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:09.527440Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:09.527456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:09.527469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:09.527484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:09.527495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:09.527509Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:09.527522Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:09.527535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:09.527547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:09.527559Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:09.531695Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:09.531716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:09.531737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:09.531742Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:09.531777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:09.531783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:09.531791Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranule ... DEBUG: ConnectToSA(), pipe client id: [2:7608:5710], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:19:43.056905Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:19:43.056945Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:19:43.056997Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:19:43.057135Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:19:43.057169Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded database: /Root/Database 2024-11-21T09:19:43.057175Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal start key 2024-11-21T09:19:43.057179Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal table owner id: 72075186224037889 2024-11-21T09:19:43.057182Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal table local path id: 4 2024-11-21T09:19:43.057184Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal start time: 1732180783006850 2024-11-21T09:19:43.057188Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded traversal IsColumnTable: 1 2024-11-21T09:19:43.057191Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded global traversal round: 2 2024-11-21T09:19:43.057197Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 1 2024-11-21T09:19:43.057201Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:19:43.057211Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 2 2024-11-21T09:19:43.057216Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 1 2024-11-21T09:19:43.057221Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 1 2024-11-21T09:19:43.057226Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:19:43.057252Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:43.057476Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:19:43.057579Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:43.057588Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:43.057606Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:19:43.057791Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:43.057800Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:43.057987Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:43.121835Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:43.121904Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:43.122054Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7655:5737], server id = [2:7656:5738], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:43.122084Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7655:5737], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:43.122197Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:43.122204Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:43.122236Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:43.122255Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:43.122295Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:43.122334Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7655:5737], server id = [2:7656:5738], tablet id = 72075186224037899 2024-11-21T09:19:43.122337Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:43.122810Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:43.125791Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7673:5755]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:43.125847Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:43.125852Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7673:5755], StatRequests.size() = 1 2024-11-21T09:19:43.158086Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDkzY2Y5Yi01MjQ2MzZkZi1hYTk0NWU0NC1kNWUyMTMyNQ==, TxId: 2024-11-21T09:19:43.158109Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDkzY2Y5Yi01MjQ2MzZkZi1hYTk0NWU0NC1kNWUyMTMyNQ==, TxId: 2024-11-21T09:19:43.158276Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:43.180241Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7685:5761] 2024-11-21T09:19:43.180294Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7685:5761], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T09:19:43.180323Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7607:5709], server id = [2:7685:5761], tablet id = 72075186224037897, status = OK 2024-11-21T09:19:43.180346Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7686:5762] 2024-11-21T09:19:43.180361Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7686:5762], schemeshard id = 72075186224037889 2024-11-21T09:19:43.191568Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:43.191587Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:43.274999Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7690:5765] 2024-11-21T09:19:43.275214Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2679:3174] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2024-11-21T09:19:43.275222Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2679:3174] 2024-11-21T09:19:43.275239Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2024-11-21T09:19:43.769151Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:43.769172Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:43.779362Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2024-11-21T09:19:43.779381Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:44.420896Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:44.420934Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:44.420940Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:45.697247Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:45.697304Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:45.697309Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:45.697506Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:45.708748Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:45.708858Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:45.708870Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:45.708992Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:45.720178Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:45.720254Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2024-11-21T09:19:45.720442Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7773:5812], server id = [2:7774:5813], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:45.720476Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7773:5812], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:45.720623Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:45.720635Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:45.720686Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:45.720717Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:45.720777Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:45.720848Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7773:5812], server id = [2:7774:5813], tablet id = 72075186224037899 2024-11-21T09:19:45.720853Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:45.721436Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:45.734645Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTQ1YzY3NTItODY4NjI1NjktZWQ2OWRjYjQtZmY4ZmU3YTI=, TxId: 2024-11-21T09:19:45.734668Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTQ1YzY3NTItODY4NjI1NjktZWQ2OWRjYjQtZmY4ZmU3YTI=, TxId: 2024-11-21T09:19:45.734827Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:45.746239Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:45.746264Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2679:3174] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T09:19:45.857497Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.857503Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.857506Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.857596Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.857736Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:45.858705Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.858768Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:45.859032Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:45.859131Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:45.859179Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T09:19:45.859189Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:45.859211Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:45.859215Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T09:19:45.859221Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:19:45.859223Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:19:45.859480Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.859484Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.859486Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.859532Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.859602Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:45.859634Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.859661Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T09:19:45.859754Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:45.859778Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T09:19:45.859847Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T09:19:45.859871Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T09:19:45.859905Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:45.859909Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:45.859913Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:45.859945Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T09:19:45.859949Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:45.859951Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T09:19:45.859953Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:45.859964Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T09:19:45.859973Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T09:19:45.859976Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T09:19:45.859977Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:45.859983Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T09:19:45.859986Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T09:19:45.859989Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T09:19:45.859991Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:45.860000Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T09:19:45.860255Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.860258Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.860260Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:45.860303Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:45.860362Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:45.860392Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:45.860413Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T09:19:45.860487Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:45.860501Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T09:19:45.860529Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T09:19:45.860539Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T09:19:45.860556Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:45.860561Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:45.860576Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2024-11-21T09:19:45.860581Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:45.860584Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:45.860589Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2024-11-21T09:19:45.860592Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:45.860593Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:45.860599Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2024-11-21T09:19:45.860603Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T09:19:45.860604Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataRecei ... uster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:46.169388Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2024-11-21T09:19:46.189425Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T09:19:46.189430Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T09:19:46.189432Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:46.189516Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:46.189645Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:46.189704Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T09:19:46.189743Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T09:19:46.211235Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T09:19:46.211296Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:46.211306Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:46.211310Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:46.211312Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T09:19:46.211315Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T09:19:46.211317Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T09:19:46.211319Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T09:19:46.211322Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T09:19:46.211325Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T09:19:46.211327Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T09:19:46.211337Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T09:19:46.211381Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:46.212790Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2024-11-21T09:19:46.214015Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:46.214018Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:46.214020Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:46.214122Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:46.214243Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:46.214303Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:46.214342Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:46.214400Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T09:19:46.214561Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:46.214564Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:46.214566Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:46.214609Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:46.214700Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:46.215047Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:46.215145Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:46.215199Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:46.215232Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:46.215238Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:19:46.215273Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> test.py::test[pg_catalog-pg_shdescription-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_shdescription-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_shdescription-default.txt-Results] >> test.py::test[join-inmem_with_set_key-off-Analyze] [GOOD] >> test.py::test[join-inmem_with_set_key-off-Debug] >> test.py::test[insert-override-with_read_udf-Results] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Debug] >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] >> test.py::test[insert_monotonic-from_empty--Debug] [GOOD] >> test.py::test[insert_monotonic-from_empty--ForceBlocks] >> test.py::test[pg-sublink_having_exists-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Analyze] >> DSProxyCounters::PutGeneratedSubrequestBytes >> TDSProxyGetTest::TestBlock42GetSpecific [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureNone >> TBlobStorageProxySequenceTest::TestGivenStripe42GetThenVGetResponsePartsNodata263451ThenGetOk >> test.py::test[aggr_factory-count-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Analyze] >> TBlobStorageProxySequenceTest::TestGivenBlock42GroupGenerationGreaterThanVDiskGenerations [GOOD] >> TDSProxyGetTest::TestBlock42WipedOneDiskAndErrorDurringGet [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureMirror3dc >> test.py::test[datetime-date_tz_arithmetic-default.txt-ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Results] >> test.py::test[aggregate-histogram_cdf-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Plan] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] >> test.py::test[join-order_of_qualified--Analyze] [GOOD] >> test.py::test[join-order_of_qualified--Debug] >> Compression::WriteRAW >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] >> test.py::test[pg_catalog-pg_shdescription-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-tables-default.txt-Debug] >> DSProxyCounters::PutGeneratedSubrequestBytes [GOOD] >> TDSProxyGetTest::TestBlock42GetSpecific3 >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureNone [GOOD] >> TDSProxyPatchTest::SecuredOk_ErasureMirror3dc >> TBlobStorageProxySequenceTest::TestGivenStripe42GetThenVGetResponsePartsNodata263451ThenGetOk [GOOD] >> TDSProxyLooksLikeLostTheBlob::TDSProxyNoDataRegressionBlock42 [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_ErasureMirror3dc >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_1_2_VdiskErrors >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TDSProxyGetTest::TestBlock42GetSpecific3 [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureMirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:18:49.910628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:18:49.910643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:49.910647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:18:49.910650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:18:49.910653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:18:49.910655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:18:49.910661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:49.910716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:18:49.917503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:18:49.917515Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:18:49.918847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:18:49.918903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:18:49.918932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:18:49.920517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:18:49.920572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:18:49.920634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:49.920795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:18:49.921227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:49.921404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:49.921410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:49.921418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:18:49.921423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:49.921429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:18:49.921455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:18:49.922225Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:18:49.931380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:18:49.931425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:49.931460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:18:49.931488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:18:49.931493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:49.931893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:49.931911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:18:49.931941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:49.931947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:18:49.931950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:18:49.931953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:18:49.932184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:49.932190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:49.932192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:18:49.932399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:49.932404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:49.932408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:49.932413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:18:49.932763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:18:49.933176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:18:49.933230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:18:49.933398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:49.933425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:49.933432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:49.933479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:18:49.933485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:49.933510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:49.933521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:18:49.933961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:49.933972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:49.934005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:49.934009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:18:49.934077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:49.934084Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:18:49.934095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:18:49.934099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:49.934105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:18:49.934110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:49.934114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:18:49.934118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:18:49.934130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:18:49.934135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:18:49.934139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:46.268304Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:19:46.268348Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 53us result status StatusSuccess 2024-11-21T09:19:46.268476Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:46.288881Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1167:2910] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:19:46.288925Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1104:2910] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T09:19:46.288956Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1167:2910] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732180786262856 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732180786262856 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1732180786262856 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:19:46.289696Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1167:2910] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2024-11-21T09:19:46.289717Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1104:2910] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TBlobStorageProxySequenceTest::TestGivenBlock42MultiPut2ItemsStatuses [GOOD] >> TDSProxyGetTest::TestMirror32GetIntervalsAllOk >> TDSProxyPatchTest::SecuredErrorOnGetItem_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_2_0_VdiskErrors >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_1_2_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe >> TDSProxyPatchTest::SecuredOk_ErasureMirror3dc [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors >> TDSProxyPutTest::TestBlock42MaxPartCountOnHandoff [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Block >> test.py::test[key_filter-datetime-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-14157--Debug] >> TDSProxyGetTest::TestMirror32GetIntervalsAllOk [GOOD] >> TDSProxyPatchTest::NaiveOk_Erasure4Plus2Block ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] Test command err: 2024-11-21T09:17:09.299445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:09.299482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:09.299491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0022b7/r3tmp/tmpATkOJg/pdisk_1.dat 2024-11-21T09:17:09.380834Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1986, node 1 2024-11-21T09:17:09.477572Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:09.477598Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:09.477603Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:09.477703Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:09.485563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.562127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:09.562163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:09.573754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14952 2024-11-21T09:17:09.974869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:10.739040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:10.739067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:10.772069Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:10.772895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:10.825686Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:10.833815Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:10.833842Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:10.838926Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:10.839069Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:10.839090Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:10.839095Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:10.839101Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:10.839106Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:10.839111Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:10.839117Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:10.839217Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:11.014704Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:11.014735Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1754:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:11.016132Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1758:2552] 2024-11-21T09:17:11.018363Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1802:2576] 2024-11-21T09:17:11.018482Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1802:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:11.019241Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:11.022988Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:11.023004Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:11.023012Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:11.024421Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:11.024447Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:11.025726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:11.027156Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:11.027181Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:11.029537Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:11.041452Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:11.073652Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:11.178066Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:11.334489Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:12.069519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3020], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.069550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:12.073178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:12.099532Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:12.099591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:12.099636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:12.099661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:12.099681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:12.099705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:12.099726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:12.099747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:12.099768Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:12.099794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:12.099814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:12.099834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:12.105775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:12.105808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:12.105826Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:12.105834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:12.105855Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:12.105862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:12.105874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleI ... e id = 2, client id = [2:7497:5628], server id = [2:7542:5652], tablet id = 72075186224037897, status = OK 2024-11-21T09:19:39.955965Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7542:5652], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T09:19:39.955989Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7543:5653] 2024-11-21T09:19:39.956016Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7543:5653], schemeshard id = 72075186224037889 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to 2024-11-21T09:19:40.069042Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7545:5654] 2024-11-21T09:19:40.069268Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2672:3167] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2024-11-21T09:19:40.069275Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2672:3167] 2024-11-21T09:19:40.069293Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2024-11-21T09:19:41.278265Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:41.278296Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:41.278301Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:41.278308Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:41.278312Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:41.278535Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:41.289635Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:41.289743Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:41.289754Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:41.289981Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:41.301026Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:41.301079Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:41.301252Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7597:5687], server id = [2:7598:5688], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:41.301280Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7597:5687], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:41.301464Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:41.301473Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:41.301517Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:41.301540Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:41.301583Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:41.301628Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7597:5687], server id = [2:7598:5688], tablet id = 72075186224037899 2024-11-21T09:19:41.301633Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:41.302127Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:41.305229Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7615:5705]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:41.305290Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:41.305299Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7615:5705], StatRequests.size() = 1 2024-11-21T09:19:41.329896Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTM2MWE4MjYtY2JmYjcxOWQtZTMwYjFkZC0xN2FjODUyMA==, TxId: 2024-11-21T09:19:41.329923Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTM2MWE4MjYtY2JmYjcxOWQtZTMwYjFkZC0xN2FjODUyMA==, TxId: 2024-11-21T09:19:41.330078Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:41.341427Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:41.341445Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:41.959959Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:41.959985Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:42.600405Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:42.600436Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:42.600603Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:42.611938Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:42.612085Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:42.612096Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T09:19:42.633514Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:43.914199Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:43.914222Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:43.914226Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:43.914282Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:43.914494Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:43.914516Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:43.925664Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:45.176070Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:45.176106Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:45.176112Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:46.414604Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:46.414670Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:46.425057Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:46.425106Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:46.425111Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:46.425284Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:46.436659Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:46.436785Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:46.436801Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:46.436943Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:46.448171Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:46.448241Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:46.448382Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7794:5809], server id = [2:7795:5810], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:46.448415Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7794:5809], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:46.448548Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:46.448560Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:46.448592Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:46.448618Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:46.448700Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:46.448746Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7794:5809], server id = [2:7795:5810], tablet id = 72075186224037899 2024-11-21T09:19:46.448750Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:46.449423Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:46.472964Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWNjNzc3NC1lNzlkODRkNC0yMTEyNjQ1Ni1mODRlZjNhZg==, TxId: 2024-11-21T09:19:46.472987Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWNjNzc3NC1lNzlkODRkNC0yMTEyNjQ1Ni1mODRlZjNhZg==, TxId: 2024-11-21T09:19:46.473111Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:46.495307Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:46.495328Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2672:3167] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: 2024-11-21T09:17:05.892763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:05.892821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:05.892834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002572/r3tmp/tmpynsF9B/pdisk_1.dat 2024-11-21T09:17:05.971993Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25827, node 1 2024-11-21T09:17:06.065477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:06.065501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:06.065504Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:06.065594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:06.072056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.147863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.147917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.159400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23488 2024-11-21T09:17:06.559599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:07.343755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.343792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.377232Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:07.378038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.431032Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:07.441835Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:07.441862Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:07.449905Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:07.450115Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:07.450142Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:07.450148Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:07.450155Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:07.450161Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:07.450166Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:07.450174Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:07.450280Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:07.624573Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.624617Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.625435Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T09:17:07.627076Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T09:17:07.627365Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T09:17:07.627929Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:07.633088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:07.633110Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:07.633123Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:07.634100Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.634124Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.636730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:07.638158Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:07.638181Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:07.640649Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:07.651977Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.673952Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:07.780944Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:07.937759Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:08.708657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2145:3025], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.708703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.711551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:08.735515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:08.735591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:08.735634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:08.735658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:08.735681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:08.735701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:08.735721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:08.735741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:08.735759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:08.735784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:08.735803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:08.735823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2228:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:08.740743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:08.740772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:08.740798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:08.740805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:08.740824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:08.740831Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:08.740842Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranule ... 2024-11-21T09:19:40.276454Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T09:19:40.287716Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:41.530015Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:41.530041Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:41.530045Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:41.530051Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:41.530054Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:41.530229Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:41.541335Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:41.541360Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:41.541466Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:41.541475Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2024-11-21T09:19:41.541737Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:41.542989Z node 2 :STATISTICS ERROR: [72075186224037897] TEvDeliveryProblem with ColumnShard=72075186224037899 2024-11-21T09:19:41.554018Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:41.554060Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:41.560685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7602:5705];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T09:19:41.562278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T09:19:41.562685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7602:5705];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T09:19:41.563224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7602:5705];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T09:19:41.681241Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7598:5703], server id = [2:7654:5738], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:41.681287Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7598:5703], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:41.681450Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:41.681457Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:41.681489Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:41.681511Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:41.681574Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:41.681608Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7598:5703], server id = [2:7654:5738], tablet id = 72075186224037899 2024-11-21T09:19:41.681611Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:41.682029Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:41.685039Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7671:5755]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:41.685079Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:41.685083Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7671:5755], StatRequests.size() = 1 2024-11-21T09:19:41.706425Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzUwMjRlZTctNjI5MmJjN2EtMWZhYWZlOTAtYzdhODk4NWY=, TxId: 2024-11-21T09:19:41.706448Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzUwMjRlZTctNjI5MmJjN2EtMWZhYWZlOTAtYzdhODk4NWY=, TxId: 2024-11-21T09:19:41.706630Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:41.728869Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:41.728892Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:42.356127Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:42.356153Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:42.984569Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:42.984593Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:42.984597Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:42.984667Z node 2 :STATISTICS DEBUG: [72075186224037897] Reset DeliveryProblem to ColumnShard=72075186224037899 2024-11-21T09:19:44.152484Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:44.152561Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:44.173141Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:44.173164Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:44.173169Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:44.173234Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:44.173435Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:44.173456Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:44.184834Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:45.291994Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:45.292019Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:45.292024Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:46.513183Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:46.513243Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:46.513250Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:46.513450Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:46.524678Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:46.524802Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:46.524818Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:46.524957Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:46.536153Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:46.536242Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:46.536394Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7846:5857], server id = [2:7847:5858], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:46.536424Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7846:5857], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:46.536538Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:46.536544Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:46.536564Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:46.536582Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:46.536624Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:46.536663Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7846:5857], server id = [2:7847:5858], tablet id = 72075186224037899 2024-11-21T09:19:46.536666Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:46.537242Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:46.570884Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjAxYTIzLTNkYzczMmE4LWI2NzI0NjQzLWY5MDhhMA==, TxId: 2024-11-21T09:19:46.570906Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjAxYTIzLTNkYzczMmE4LWI2NzI0NjQzLWY5MDhhMA==, TxId: 2024-11-21T09:19:46.571021Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:46.582496Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:46.582517Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2675:3172] >> test.py::test[join-join_without_column-off-Debug] [GOOD] >> test.py::test[join-join_without_column-off-ForceBlocks] [SKIPPED] >> test.py::test[join-join_without_column-off-Plan] [GOOD] >> test.py::test[join-join_without_column-off-Results] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Analyze] >> TBlobStorageProxySequenceTest::TestBlock42PutWithChangingSlowDisk >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Block [GOOD] >> TDSProxyFaultTolerancePatchTest::block42 >> TDSProxyPutTest::TestBlock42PutStatusOkWith_2_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureNone [GOOD] >> TPersQueueTest::TopicServiceReadBudget [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Debug] >> TDSProxyPatchTest::NaiveOk_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_3_0_VdiskErrors >> test.py::test[aggr_factory-log_histogram-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Debug] >> TBlobStorageProxySequenceTest::TestBlock42PutWithChangingSlowDisk [GOOD] >> TDSProxyGetTest::TestBlock42VGetCountWithErasure [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureNone >> test.py::test[datetime-date_tz_arithmetic-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Analyze] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Debug] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Plan] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Block [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors [GOOD] Test command err: 2024-11-21T09:19:47.270206Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:47.270268Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.270274Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.270279Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:47.270283Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:47.270287Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:47.270291Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:47.273211Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:47.273243Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:47.273249Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:47.273279Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T09:19:47.273285Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:47.273289Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:47.273321Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T09:19:47.273363Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:47.273369Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:47.273374Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:47.273389Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2024-11-21T09:19:47.273410Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:19:47.273417Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.273423Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.273428Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 8 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:47.273431Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 8 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:47.273477Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:0:0] Marker# BPP01 2024-11-21T09:19:47.273488Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:2:0] Marker# BPP01 2024-11-21T09:19:47.273500Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:47.273507Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureNone [GOOD] Test command err: 2024-11-21T09:19:47.271818Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:47.271873Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271877Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271879Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271881Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271883Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271885Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271887Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271890Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271892Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271894Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271896Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271898Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271900Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271902Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271904Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271906Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271908Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271910Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.271914Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.271923Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.271928Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.271931Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:47.271934Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:47.271936Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:47.271938Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:47.271941Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T09:19:47.271943Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T09:19:47.271946Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T09:19:47.271948Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T09:19:47.271951Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T09:19:47.271953Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T09:19:47.273960Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:19:47.273985Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:47.273990Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.273992Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.273995Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.273997Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.273999Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.274001Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274003Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274006Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274009Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274012Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274014Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274016Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274018Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274020Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274022Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274024Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274026Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274029Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.274038Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.274041Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.274077Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:47.274081Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:47.274083Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2024-11-21T09:19:47.274085Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.274088Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.274090Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.274092Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.274094Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.274096Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274099Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274101Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274103Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274105Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274108Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274110Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274112Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274114Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274116Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274118Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.274120Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.274124Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:47.274126Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:47.274139Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:47.274147Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2024-11-21T09:19:47.274154Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2024-11-21T09:19:47.274161Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2024-11-21T09:19:47.274188Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2024-11-21T09:19:47.274194Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:7:0] Marker# BPP01 2024-11-21T09:19:47.274205Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:47.274211Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> test.py::test[insert_monotonic-from_empty--ForceBlocks] [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_3_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Stripe >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureNone [GOOD] >> TDSProxyPutTest::TestBlock42MultiPutAllOk >> test.py::test[pg_catalog-tables-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-tables-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-tables-default.txt-Results] >> test.py::test[join-order_of_qualified--Debug] [GOOD] >> test.py::test[join-order_of_qualified--ForceBlocks] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Analyze] >> TDSProxyFaultTolerancePatchTest::mirror3dc >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe [GOOD] >> TBlobStorageProxySequenceTest::TestGivenBlock42IntersectingPutWhenNodataOkThenOk >> test.py::test[join-lookupjoin_semi_subq--Analyze] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Debug] >> TDSProxyGetTest::TestBlock42GetBlobCrcCheck >> test.py::test[pg-tpcds-q09-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-ForceBlocks] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-select_subquery--Debug] >> TDSProxyPutTest::TestBlock42MultiPutAllOk [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Stripe >> test.py::test[distinct-distinct_columns_after_group-default.txt-Analyze] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Debug] >> TBlobStorageProxySequenceTest::TestGivenBlock42IntersectingPutWhenNodataOkThenOk [GOOD] >> TDSProxyGetTest::TestMirror32GetBlobCrcCheck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe [GOOD] Test command err: 2024-11-21T09:19:47.061524Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [2:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:47.061581Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061585Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061587Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061589Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061591Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061593Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061595Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061597Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061599Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061601Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061603Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061605Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061607Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061609Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061611Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061613Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061615Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061617Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.061621Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.061631Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.061635Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.061638Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:47.061640Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:47.061643Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:47.061645Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:47.061648Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T09:19:47.061650Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T09:19:47.061652Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T09:19:47.061654Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T09:19:47.061658Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T09:19:47.061660Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T09:19:47.064885Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:19:47.064931Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:47.064938Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.064942Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.064945Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.064948Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.064952Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.064956Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064960Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064964Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064968Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064970Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064972Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064974Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064976Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064978Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064980Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064982Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064984Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.064988Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.064998Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.065001Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.065033Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:47.065057Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:47.065063Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2024-11-21T09:19:47.065069Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2024-11-21T09:19:47.065077Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2024-11-21T09:19:47.065103Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2024-11-21T09:19:47.065107Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:47.065110Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065113Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065115Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065117Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065119Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065122Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2024-11-21T09:19:47.065124Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.065126Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.065128Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.065130Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.065132Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.065134Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.065136Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.065140Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.065142Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.065162Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:7:0] Marker# BPP01 2024-11-21T09:19:47.065166Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:47.065168Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065170Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065172Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065174Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065176Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:47.065180Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2024-11-21T09:19:47.065182Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 error Marker# BPG50 2024-11-21T09:19:47.065185Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 5 optimisticState# EBS_DISINTEGRATED Marker# BPG55 2024-11-21T09:19:47.065193Z node 2 :BS_PROXY_PUT ERROR: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:47.065198Z node 2 :BS_PROXY_PUT NOTICE: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Stripe [GOOD] >> test.py::test[pg_catalog-tables-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-order_by_with_same_projection_lambda-default.txt-Debug] >> TDSProxyGetTest::TestBlock42GetBlobCrcCheck [GOOD] >> TDSProxyPatchTest::SecuredErrorOnPut_Erasure4Plus2Block >> TDSProxyGetTest::TestMirror32GetBlobCrcCheck [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureNone >> TBlobStorageProxySequenceTest::TestGivenBlock42Put6PartsOnOneVDiskWhenDiscoverThenRecoverFirst >> TBlobStorageProxySequenceTest::TestGivenBlock42PutWhenPartialGetThenSingleDiskRequestOk >> TDSProxyPatchTest::SecuredErrorOnPut_Erasure4Plus2Block [GOOD] >> TDSProxyPatchTest::MovedOk_ErasureMirror3dc |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Stripe [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Analyze] [GOOD] >> test.py::test[join-inmem_with_set_key-off-Debug] [GOOD] >> test.py::test[join-inmem_with_set_key-off-ForceBlocks] [SKIPPED] >> test.py::test[aggregate-list_after_group-default.txt-Debug] >> test.py::test[join-inmem_with_set_key-off-Plan] [GOOD] >> test.py::test[join-inmem_with_set_key-off-Results] [GOOD] >> test.py::test[join-inner_grouped_by_expr--Analyze] >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureNone [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_2_1_VdiskErrors >> test.py::test[aggr_factory-log_histogram-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-ForceBlocks] >> TBlobStorageProxySequenceTest::TestGivenBlock42Put6PartsOnOneVDiskWhenDiscoverThenRecoverFirst [GOOD] >> TDSProxyGetTest::TestMirror32GetIntervalsWipedAllOk >> TBlobStorageProxySequenceTest::TestGivenBlock42PutWhenPartialGetThenSingleDiskRequestOk [GOOD] >> TDSProxyLooksLikeLostTheBlob::TDSProxyErrorRegressionBlock42 [GOOD] >> TDSProxyPatchTest::NaiveOk_ErasureNone >> TPersQueueTest::SameOffset [GOOD] >> test.py::test[join-order_of_qualified--ForceBlocks] [GOOD] >> test.py::test[join-order_of_qualified--Plan] [GOOD] >> test.py::test[join-order_of_qualified--Results] >> TPersQueueTest::SchemeOperationsTest >> TDSProxyPatchTest::MovedOk_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_2_1_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Block >> TDSProxyPatchTest::NaiveOk_ErasureNone [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_0_VdiskErrors |95.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[insert_monotonic-from_empty--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Debug] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] >> test.py::test[key_filter-yql-14157--Debug] [GOOD] >> test.py::test[key_filter-yql-14157--Plan] [GOOD] >> test.py::test[key_filter-yql-14157--Results] >> test.py::test[pg_duplicated-order_by_with_same_projection_lambda-default.txt-Debug] [GOOD] >> test.py::test[pg_duplicated-order_by_with_same_projection_lambda-default.txt-Plan] [GOOD] >> test.py::test[pg_duplicated-order_by_with_same_projection_lambda-default.txt-Results] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3Plus2 >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> test.py::test[insert-select_subquery--Debug] [GOOD] >> test.py::test[insert-select_subquery--Plan] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3Plus2 [GOOD] >> test.py::test[insert-select_subquery--Plan] [GOOD] >> test.py::test[insert-select_subquery--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors [GOOD] Test command err: 2024-11-21T09:19:48.917055Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:48.917132Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:48.917139Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:48.917145Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:48.917149Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:48.917154Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:48.917157Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:48.920135Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:48.920175Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:48.920181Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:48.920233Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T09:19:48.920242Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:48.920246Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:48.920279Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T09:19:48.920323Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:48.920333Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2024-11-21T09:19:48.920350Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:48.920359Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> test.py::test[distinct-distinct_columns_after_group-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] >> test.py::test[join-inner_grouped_by_expr--Analyze] [GOOD] >> test.py::test[join-inner_grouped_by_expr--Debug] >> test.py::test[pg_duplicated-order_by_with_same_projection_lambda-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Analyze] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Debug] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3Plus2 [GOOD] Test command err: 2024-11-21T09:19:49.056494Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:49.056554Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056558Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056560Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056562Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056564Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056566Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056568Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056570Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056572Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056574Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056576Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056578Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056580Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056583Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056585Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056587Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056589Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056591Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.056595Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:49.056604Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:49.056608Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:49.056612Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:49.056614Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:49.056616Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:49.056618Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:49.056621Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T09:19:49.056623Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T09:19:49.056626Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T09:19:49.056628Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T09:19:49.056632Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T09:19:49.056634Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T09:19:49.058678Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:19:49.058701Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:49.058706Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:49.058708Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:49.058710Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:49.058712Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:49.058714Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:49.058717Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058719Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058721Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058724Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058727Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058729Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058731Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058733Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058735Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058737Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058739Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058741Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:49.058744Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:49.058752Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:49.058755Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:49.058787Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:49.058795Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:49.058801Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2024-11-21T09:19:49.058807Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2024-11-21T09:19:49.058816Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2024-11-21T09:19:49.058838Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2024-11-21T09:19:49.058850Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:49.058855Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Debug] >> test.py::test[join-order_of_qualified--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] Test command err: 2024-11-21T09:17:07.336930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:07.336972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:07.336981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002375/r3tmp/tmpmasli3/pdisk_1.dat 2024-11-21T09:17:07.408950Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14635, node 1 2024-11-21T09:17:07.500528Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:07.500550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:07.500553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:07.500615Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:07.504792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:07.580643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.580671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.591865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25829 2024-11-21T09:17:07.989423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.754359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.754380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.787084Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:08.787794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:08.836464Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:08.844879Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:08.844902Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:08.850614Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:08.850716Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:08.850728Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:08.850731Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:08.850735Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:08.850738Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:08.850742Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:08.850746Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:08.850818Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:09.023238Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:09.023267Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:09.024415Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:09.026561Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:09.026662Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:09.027339Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:09.030459Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:09.030471Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:09.030478Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:09.032089Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:09.032109Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:09.033185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:09.034386Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:09.034407Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:09.036475Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:09.048060Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:09.069810Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:09.180465Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:09.335915Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:10.052381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.052414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.055135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:10.076309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:10.076354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:10.076392Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:10.076410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:10.076427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:10.076439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:10.076452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:10.076465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:10.076478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:10.076493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:10.076506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:10.076520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:10.080642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:10.080663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:10.080675Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:10.080680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:10.080692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:10.080697Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:10.080704Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranule ... olumn count# 0 2024-11-21T09:19:43.444520Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 2 2024-11-21T09:19:43.444524Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 1 2024-11-21T09:19:43.444530Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 1 2024-11-21T09:19:43.444534Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:19:43.444551Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:43.444776Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:19:43.444887Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:43.444894Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:43.445001Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:19:43.445040Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:43.445049Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:43.445291Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:43.498974Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:43.499028Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:43.499160Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7608:5694], server id = [2:7609:5695], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:43.499190Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7608:5694], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:43.499359Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:43.499366Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:43.499407Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:43.499431Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:43.499472Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:43.499502Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7608:5694], server id = [2:7609:5695], tablet id = 72075186224037899 2024-11-21T09:19:43.499505Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:43.499936Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:43.502742Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7626:5712]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:43.502791Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:43.502796Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7626:5712], StatRequests.size() = 1 2024-11-21T09:19:43.524327Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTE3ZjFmNjItNzIyZjM2ZWYtM2FkNTczYWEtNjI3ODlkNmY=, TxId: 2024-11-21T09:19:43.524354Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTE3ZjFmNjItNzIyZjM2ZWYtM2FkNTczYWEtNjI3ODlkNmY=, TxId: 2024-11-21T09:19:43.524519Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:43.535404Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7636:5718] 2024-11-21T09:19:43.535486Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7560:5666], server id = [2:7636:5718], tablet id = 72075186224037897, status = OK 2024-11-21T09:19:43.535507Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7636:5718], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T09:19:43.535532Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7637:5719] 2024-11-21T09:19:43.535573Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7637:5719], schemeshard id = 72075186224037889 2024-11-21T09:19:43.546894Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:43.546921Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:43.725031Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7645:5724] 2024-11-21T09:19:43.725296Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2671:3168] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2024-11-21T09:19:43.725307Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2671:3168] 2024-11-21T09:19:43.725326Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2024-11-21T09:19:44.269740Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:44.269767Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:44.981551Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:44.981579Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:44.981779Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:44.993007Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:44.993112Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:44.993120Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T09:19:45.004251Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:46.398591Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:46.398626Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:46.398632Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:46.398702Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:46.398844Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:46.398879Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:46.410080Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:47.789162Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:47.789189Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:47.789194Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:49.089250Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:49.089317Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:49.089326Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:49.089564Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:49.100671Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:49.100790Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:49.100804Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:49.100929Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:49.112080Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:49.112140Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:49.112279Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7810:5819], server id = [2:7811:5820], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:49.112313Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7810:5819], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:49.112425Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:49.112432Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:49.112462Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:49.112480Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:49.112531Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:49.112579Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7810:5819], server id = [2:7811:5820], tablet id = 72075186224037899 2024-11-21T09:19:49.112583Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:49.113196Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:49.136597Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yzc1MGM0NmQtM2Y3MmZiZjktY2E3NGFjNjAtN2VhYzQ0Mjk=, TxId: 2024-11-21T09:19:49.136622Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yzc1MGM0NmQtM2Y3MmZiZjktY2E3NGFjNjAtN2VhYzQ0Mjk=, TxId: 2024-11-21T09:19:49.136811Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:49.147782Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:49.147801Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2671:3168] >> test.py::test[insert-select_subquery--Results] [GOOD] >> test.py::test[insert-yql-13083-existig-Debug] >> test.py::test[aggr_factory-log_histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] >> TBlobStorageProxySequenceTest::TestProtobufSizeWithMultiGet >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Plan] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Results] >> test.py::test[key_filter-yql-14157--Results] [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Debug] >> test.py::test[pg-tpcds-q63-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Debug] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] >> test.py::test[join-inner_grouped_by_expr--Debug] [GOOD] >> test.py::test[join-inner_grouped_by_expr--ForceBlocks] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Debug] [GOOD] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Plan] [GOOD] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Results] >> test.py::test[join-premap_common_inner_both_sides--Analyze] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Debug] >> test.py::test[aggregate-list_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Plan] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] >> test.py::test[pg_duplicated-qualified_star_proj_two_times-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--Debug] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--Plan] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--Results] [SKIPPED] >> test.py::test[produce-discard_process_with_lambda-default.txt-Debug] >> test.py::test[join-lookupjoin_semi_subq--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Analyze] >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Debug] [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Plan] [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Results] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi--Analyze] >> TBlobStorageProxySequenceTest::TestProtobufSizeWithMultiGet [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGet_Erasure4Plus2Block >> test.py::test[pg-tpcds-q63-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> test.py::test[join-inner_grouped_by_expr--ForceBlocks] [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGet_Erasure4Plus2Block [GOOD] >> TDSProxyPatchTest::MovedError_ErasureMirror3dc >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_nullable--Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T09:19:50.919947Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.919953Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.919956Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.920062Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.920230Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.921210Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.921283Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.921545Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.921547Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.921549Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.921601Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.921712Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.921767Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.921812Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.921849Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:50.921993Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.921995Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.921997Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.922109Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.922252Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.922285Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.922314Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.922464Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.922553Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.922588Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.922594Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:19:50.922770Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.922772Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.922775Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.922822Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.922945Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.922981Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.923004Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T09:19:50.923224Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:50.923242Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T09:19:50.923289Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T09:19:50.923299Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T09:19:50.923320Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.923325Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:50.923330Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:50.923368Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.923375Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:50.923378Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T09:19:50.923381Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:50.923396Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.923409Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T09:19:50.923412Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T09:19:50.923415Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:50.923425Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.923428Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T09:19:50.923431Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T09:19:50.923433Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:50.923440Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T09:19:50.923663Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.923666Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.923668Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.923710Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.923780Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.923809Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.923830Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T09:19:50.923896Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:50.923910Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T09:19:50.923944Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T09:19:50.923957Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T09:19:50.923974Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.923978Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:50.923981Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:50.923985Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T09:19:50.923989Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:50.924007Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.924014Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T09:19:50.924016Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T09:19:50.924018Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T09:19:50.924020Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T09:19:50.924022Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:50.924031Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2024-11-21T09:19:50.924195Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.924197Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.924198Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.924294Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.924378Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.924409Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.924449Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.924574Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:50.924601Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:50.924635Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T09:19:50.924644Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:50.924665Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.924669Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:50.924671Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T09:19:50.924674Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T09:19:50.924677Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T09:19:50.924680Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T09:19:50.924694Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:19:50.924709Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> test.py::test[insert-yql-13083-existig-Debug] [GOOD] >> test.py::test[insert-yql-13083-existig-Plan] [GOOD] >> test.py::test[insert-yql-13083-existig-Results] >> TPersQueueTest::WriteExistingBigValue [GOOD] >> TPersQueueTest::WriteEmptyData >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Debug] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] >> TDSProxyPatchTest::MovedError_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_2_2_0_VdiskErrors >> TPersQueueTest::Cache [GOOD] >> TPersQueueTest::CacheHead >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T09:19:50.854293Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.854305Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.854309Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.855214Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:19:50.855239Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.855242Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.866914Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005494s 2024-11-21T09:19:50.867071Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.868793Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:50.868816Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869148Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869152Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869156Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.869228Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:19:50.869242Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869245Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869259Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008581s 2024-11-21T09:19:50.869351Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.869433Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:50.869448Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869601Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869603Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869605Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.869745Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:19:50.869752Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869754Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869769Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.153944s 2024-11-21T09:19:50.869820Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.869839Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:50.869845Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869972Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869973Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.869975Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.870022Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:19:50.870028Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.870029Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.870040Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.230558s 2024-11-21T09:19:50.870089Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.870128Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:50.870134Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.870254Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.870256Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.870257Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.870305Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.870388Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.871935Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.871983Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T09:19:50.871986Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.871988Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.871997Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.287741s 2024-11-21T09:19:50.872036Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:19:50.872278Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.872280Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.872282Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.872325Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.872396Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.872436Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.872534Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.972906Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.972992Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:50.973013Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.973017Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:19:50.973035Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:19:51.073227Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:51.073307Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:19:51.073606Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.073609Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.073612Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:51.073668Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:51.073754Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:51.073800Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.073856Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:51.174051Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.174115Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:51.174137Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:51.174143Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:19:51.174170Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T09:19:51.174200Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:19:51.174237Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:51.174255Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:19:51.174287Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T09:19:50.887665Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.887669Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.887672Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.887753Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.887860Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.888852Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.888930Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.889156Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:50.889213Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:50.889276Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T09:19:50.889288Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.889313Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.889318Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T09:19:50.889325Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:19:50.889328Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:19:50.889593Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.889596Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.889598Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.889653Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.889741Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.889787Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.889818Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T09:19:50.889893Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:50.889910Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T09:19:50.889948Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T09:19:50.889958Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T09:19:50.889997Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.890003Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:50.890008Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:50.890056Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.890062Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:50.890064Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T09:19:50.890066Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:50.890078Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.890092Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T09:19:50.890095Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T09:19:50.890096Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:50.890102Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.890105Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T09:19:50.890107Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T09:19:50.890110Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:50.890119Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T09:19:50.890366Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890368Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890370Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.890439Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.890509Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.891002Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891042Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T09:19:50.891143Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:19:50.891159Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T09:19:50.891219Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T09:19:50.891234Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T09:19:50.891255Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.891259Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:50.891272Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.891276Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:50.891277Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:50.891282Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.891285Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:50.891287Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T09:19:50.891292Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2024-11-21T09:19:50.891296Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T09:19:50.891298Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataRecei ... uster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:51.196865Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2024-11-21T09:19:51.212973Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T09:19:51.212979Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T09:19:51.212981Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:51.213080Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:51.213209Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:51.213267Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T09:19:51.213328Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T09:19:51.234528Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T09:19:51.234590Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:51.234601Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:51.234605Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:51.234607Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T09:19:51.234611Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T09:19:51.234613Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T09:19:51.234615Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T09:19:51.234617Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T09:19:51.234621Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T09:19:51.234623Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T09:19:51.234634Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T09:19:51.234701Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T09:19:51.236036Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2024-11-21T09:19:51.237739Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.237743Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.237745Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:51.237813Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:51.237939Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:51.237985Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.238019Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:51.238082Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T09:19:51.238237Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.238239Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.238241Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:51.238282Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:51.238356Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:51.238369Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.238465Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.238489Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:51.238508Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:51.238515Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:19:51.238539Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_2_2_0_VdiskErrors [GOOD] >> TPersQueueTest::DirectReadCleanCache [FAIL] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder >> AnalyzeColumnshard::AnalyzeServerless [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Results] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Debug] >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Stripe [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T09:19:50.879597Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.879601Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.879604Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.879718Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.879834Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:50.879844Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.880025Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.880027Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.880029Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.880073Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.880120Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T09:19:50.880125Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.880274Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.880277Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.880278Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.880336Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:19:50.880344Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.880347Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.880458Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T09:19:50.881863Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.881865Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.881867Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.881926Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T09:19:50.881935Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.881937Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.881943Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T09:19:50.882096Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:19:50.882099Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:19:50.882100Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.882151Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.882238Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.883255Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:19:50.883313Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.883908Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T09:19:50.884306Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T09:19:50.884503Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.884513Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:19:50.884516Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:19:50.884518Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T09:19:50.884522Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T09:19:50.884524Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T09:19:50.884527Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T09:19:50.884529Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T09:19:50.884544Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T09:19:50.884546Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T09:19:50.884548Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T09:19:50.884550Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T09:19:50.884552Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T09:19:50.884554Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T09:19:50.884556Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T09:19:50.884558Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T09:19:50.884577Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T09:19:50.884579Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T09:19:50.884581Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T09:19:50.884583Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T09:19:50.884585Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T09:19:50.884586Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T09:19:50.884589Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T09:19:50.884592Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T09:19:50.884594Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T09:19:50.884595Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T09:19:50.884597Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T09:19:50.884599Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T09:19:50.884601Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T09:19:50.884603Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T09:19:50.884604Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T09:19:50.884606Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T09:19:50.884613Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T09:19:50.884615Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T09:19:50.884617Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T09:19:50.884619Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T09:19:50.884621Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T09:19:50.884623Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T09:19:50.884625Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T09:19:50.884627Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T09:19:50.884629Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T09:19:50.884630Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T09:19:50.884632Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T09:19:50.884634Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T09:19:50.884635Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T09:19:50.884639Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T09:19:50.884640Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T09:19:50.884642Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T09:19:50.884644Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T09:19:50.884646Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T09:19:50.884654Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T09:19:50.884727Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T09:19:50.884756Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T09:19:50.884759Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T09:19:50.884761Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T09:19:50.884764Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T09:19:50.884766Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T09:19:50.884768Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T09:19:50.884769Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T09:19:50.884771Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T09:19:50.884774Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T09:19:50.884775Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T09:19:50.884777Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T09:19:50.884779Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T09:19:50.884781Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T09:19:50.884782Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T09:19:50.884784Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T09:19:50.884786Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T09:19:50.884789Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T09:19:50.884791Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T09:19:50.884793Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T09:19:50.884795Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T09:19:50.884796Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T09:19:50.884798Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T09:19:50.884800Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T09:19:50.884802Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T09:19:50.884803Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T09:19:50.884805Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T09:19:50.884807Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T09:19:50.884809Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T09:19:50.884811Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T09:19:50.884813Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T09:19:50.884814Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T09:19:50.884816Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T09:19:50.884821Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T09:19:50.884824Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T09:19:50.884826Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T09:19:50.884828Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T09:19:50.884830Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T09:19:50.884831Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T09:19:50.884833Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T09:19:50.884835Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T09:19:50.884837Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T09:19:50.884839Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T09:19:50.884840Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T09:19:50.884842Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T09:19:50.884844Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T09:19:50.884845Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T09:19:50.884847Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T09:19:50.884849Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T09:19:50.884851Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T09:19:50.884852Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T09:19:50.884855Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T09:19:50.884882Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885114Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885116Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885118Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.885192Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.885384Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.885420Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885501Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.985733Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.985816Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:50.985849Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.985855Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:19:50.985886Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:19:51.186127Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T09:19:51.286361Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:19:51.286432Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:51.286495Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T09:19:51.286771Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.286774Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.286776Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:51.286822Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:51.286907Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:51.286928Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.287012Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:51.387263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:51.387328Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T09:19:51.387352Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:51.387358Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T09:19:51.387388Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T09:19:51.387437Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T09:19:51.387475Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T09:19:51.387515Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T09:19:51.387542Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Stripe [GOOD] Test command err: 2024-11-21T09:19:47.581489Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [2:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:47.581536Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581539Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581541Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581544Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581546Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581548Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581550Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581552Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581554Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581556Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581558Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581560Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581562Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581564Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581566Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581568Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581570Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581574Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.581579Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.581590Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.581596Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.581601Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:47.581604Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:47.581609Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:47.581611Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:47.581614Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T09:19:47.581616Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T09:19:47.581618Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T09:19:47.581620Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T09:19:47.581623Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T09:19:47.581625Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T09:19:47.583581Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:19:47.583603Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:47.583607Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583609Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583611Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583613Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583616Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583618Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583620Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583624Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583629Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583633Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583636Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583640Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583644Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583647Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583651Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583653Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583656Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583659Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.583666Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:47.583669Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:47.583695Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:47.583699Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:47.583701Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2024-11-21T09:19:47.583703Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583705Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583707Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583709Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583711Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583713Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583717Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583719Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583721Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583723Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583725Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583727Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583729Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583731Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583733Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583735Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583737Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:47.583741Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:47.583743Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:47.583768Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:47.583772Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:47.583774Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2024-11-21T09:19:47.583776Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 error Marker# BPG50 2024-11-21T09:19:47.583778Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583780Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583782Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583784Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583786Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583789Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583791Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583793Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583795Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583797Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583799Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:47.583801Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583803Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583805Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583807Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:47.583810Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 5 optimisticState# EBS_DISINTEGRATED Marker# BPG55 2024-11-21T09:19:47.583818Z node 2 :BS_PROXY_PUT ERROR: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:47.583823Z node 2 :BS_PROXY_PUT NOTICE: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> test.py::test[produce-discard_process_with_lambda-default.txt-Debug] [GOOD] >> test.py::test[produce-discard_process_with_lambda-default.txt-Plan] [GOOD] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] >> TDSProxyFaultTolerancePatchTest::block42 [GOOD] >> TDSProxyPatchTest::MovedError_ErasureNone >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusErrorWith_2_2_0_VdiskErrors [GOOD] Test command err: 2024-11-21T09:19:51.400363Z node 7 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [7:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:51.400432Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:51.400438Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:51.400443Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:51.400445Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:51.400448Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:51.400450Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:51.402811Z node 7 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:51.402839Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:51.402844Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:51.402875Z node 7 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T09:19:51.402879Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:51.402882Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:51.402894Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T09:19:51.402923Z node 7 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:51.402927Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:51.402929Z node 7 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:51.402935Z node 7 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2024-11-21T09:19:51.402946Z node 7 :BS_PROXY_PUT ERROR: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TPut3dcStrategy failed the Fail Model check" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:51.402952Z node 7 :BS_PROXY_PUT NOTICE: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TPut3dcStrategy failed the Fail Model check" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 |95.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[join-inner_grouped_by_expr--ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2024-11-21T09:17:05.927221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:05.927260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:05.927269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002565/r3tmp/tmpLzeKx3/pdisk_1.dat 2024-11-21T09:17:06.009137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7631, node 1 2024-11-21T09:17:06.101441Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:06.101460Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:06.101462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:06.101532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:06.106599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.182438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.182468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.193855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3911 2024-11-21T09:17:06.593725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:07.389779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.389805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.422884Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:07.423970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.482163Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:07.489497Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:07.489522Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:07.495780Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:07.495885Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:07.495898Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:07.495902Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:07.495906Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:07.495911Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:07.495914Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:07.495918Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:07.495999Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:07.670340Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.670382Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.671499Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:07.673722Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:07.673836Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:07.674648Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T09:17:07.679340Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:07.679357Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:07.679367Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T09:17:07.681120Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.681143Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.682353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:07.683827Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:07.683853Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:07.686190Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:07.698009Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.720116Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:07.834267Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:08.001510Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:08.718232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.317397Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:09.434627Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T09:17:09.434646Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T09:17:09.434655Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2485:2902], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T09:17:09.434790Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2486:2903] 2024-11-21T09:17:09.434883Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2486:2903], schemeshard id = 72075186224037899 2024-11-21T09:17:10.110202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2610:3186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.110247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.113224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T09:17:10.146379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:10.146421Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:10.146449Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:10.146463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:10.146477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:10.146488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:10.146499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:10.146511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:10.146524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:10.146535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:10.146549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:10.146565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2695:3001];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:10.151617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:10.151645Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstr ... uest id = 1, ReplyToActorId = [2:8541:6393], StatRequests.size() = 1 2024-11-21T09:19:45.246115Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2NmM2IxNDQtMzJiYTNjNWEtYzhjYjk1NDMtOWFhNTc4ODk=, TxId: 2024-11-21T09:19:45.246148Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2NmM2IxNDQtMzJiYTNjNWEtYzhjYjk1NDMtOWFhNTc4ODk=, TxId: 2024-11-21T09:19:45.246319Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:45.257684Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:19:45.257705Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:45.362231Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:19:45.362259Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:19:45.455444Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8543:6395], schemeshard count = 1 2024-11-21T09:19:45.941807Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T09:19:45.941832Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 206.000000s, at schemeshard: 72075186224037899 2024-11-21T09:19:45.941945Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2024-11-21T09:19:45.953285Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T09:19:46.672290Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:46.672321Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2024-11-21T09:19:46.673148Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:46.685221Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:46.685386Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:46.685397Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037899, LocalPathId: 2], AnalyzedShards 1 2024-11-21T09:19:46.696462Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:48.105076Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:48.105105Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:48.105112Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:48.105121Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2024-11-21T09:19:48.105126Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T09:19:48.105420Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:48.117015Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:48.117059Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:48.117284Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:48.117309Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:48.117508Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:48.117543Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:48.117737Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:48.129212Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:48.129241Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:48.129307Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:48.129487Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8696:6490], server id = [2:8697:6491], tablet id = 72075186224037905, status = OK 2024-11-21T09:19:48.129524Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8696:6490], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T09:19:48.129833Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T09:19:48.129851Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:48.129893Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:48.129926Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:48.130029Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T09:19:48.130087Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8696:6490], server id = [2:8697:6491], tablet id = 72075186224037905 2024-11-21T09:19:48.130093Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:48.130746Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:48.134626Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8714:6508]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:48.134675Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:48.134682Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8714:6508], StatRequests.size() = 1 2024-11-21T09:19:48.162255Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTA5ZjU4ZDItZTAwMjM4Y2UtYmZkOGQyNWItZjAxOTc0MmY=, TxId: 2024-11-21T09:19:48.162282Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTA5ZjU4ZDItZTAwMjM4Y2UtYmZkOGQyNWItZjAxOTc0MmY=, TxId: 2024-11-21T09:19:48.162430Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:48.174055Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T09:19:48.174082Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:48.914520Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:48.914545Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:49.560493Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:49.560522Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:49.560528Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:51.056653Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:51.056708Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:51.067064Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:51.067133Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2024-11-21T09:19:51.067139Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T09:19:51.067366Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:51.078675Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:51.078804Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:51.078825Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:51.078977Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:51.090058Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:51.090120Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:51.090268Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8821:6574], server id = [2:8822:6575], tablet id = 72075186224037905, status = OK 2024-11-21T09:19:51.090295Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8821:6574], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T09:19:51.090436Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T09:19:51.090447Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:51.090468Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:51.090490Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:51.090553Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T09:19:51.090580Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8821:6574], server id = [2:8822:6575], tablet id = 72075186224037905 2024-11-21T09:19:51.090584Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:51.091096Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:51.104345Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OThjYWRhYWQtNjFmZDY5N2UtMzJhNjliMTItYmVlZTFlMDc=, TxId: 2024-11-21T09:19:51.104376Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OThjYWRhYWQtNjFmZDY5N2UtMzJhNjliMTItYmVlZTFlMDc=, TxId: 2024-11-21T09:19:51.104534Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:51.116093Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T09:19:51.116121Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3176:3340] >> DemoTx::Scenario_5 [GOOD] >> test.py::test[aggregate-list_nullable--Analyze] [GOOD] >> test.py::test[aggregate-list_nullable--Debug] >> test.py::test[insert-yql-13083-existig-Results] [GOOD] >> test.py::test[insert_monotonic-from_empty--Debug] >> test.py::test[aggr_factory-multi--Analyze] [GOOD] >> test.py::test[aggr_factory-multi--Debug] >> TDSProxyPatchTest::MovedError_ErasureNone [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGet_ErasureMirror3dc >> Compression::WriteRAW >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Analyze] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Debug] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Plan] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Results] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TFstClassSrcIdPQTest::TestTableCreated >> TDSProxyPatchTest::SecuredErrorOnGet_ErasureMirror3dc [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Results] [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_0_0_0_VdiskErrors >> test.py::test[pg-tpcds-q67-default.txt-Analyze] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-process_and_filter-default.txt-Debug] >> test.py::test[join-mergejoin_big_primary-off-Debug] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_big_primary-off-Plan] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Results] [GOOD] >> test.py::test[join-mergejoin_force_align1--Analyze] [SKIPPED] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_0_0_0_VdiskErrors [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Debug] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Plan] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] >> test.py::test[join-mergejoin_force_align1--Debug] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--Plan] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Analyze] >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Block [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_0_0_0_VdiskErrors [GOOD] Test command err: 2024-11-21T09:19:52.395101Z node 25 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [25:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:52.395231Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:52.395239Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:52.395245Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:52.395249Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:52.395254Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:52.395258Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:52.398761Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:52.398818Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T09:19:52.398832Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T09:19:52.398849Z node 25 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:52.398858Z node 25 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> test.py::test[aggregate-list_nullable--Debug] [GOOD] >> test.py::test[aggregate-list_nullable--ForceBlocks] >> test.py::test[insert_monotonic-from_empty--Debug] [GOOD] >> test.py::test[insert_monotonic-from_empty--Plan] [GOOD] >> test.py::test[insert_monotonic-from_empty--Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure4Plus2Block [GOOD] Test command err: 2024-11-21T09:19:48.738484Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:48.738526Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738530Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738532Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738534Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738536Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738538Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738540Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738542Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738544Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738546Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738548Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738550Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738552Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738554Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738556Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738558Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738560Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738562Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.738565Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:48.738573Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:48.738577Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:48.738580Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:48.738582Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:48.738585Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:48.738587Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:48.738590Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T09:19:48.738592Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T09:19:48.738595Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T09:19:48.738597Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T09:19:48.738601Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T09:19:48.738603Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T09:19:48.740531Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:19:48.740552Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:48.740556Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740558Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740560Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740562Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740564Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740567Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740569Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740571Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740574Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740576Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740578Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740580Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740582Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740584Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740586Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740588Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740590Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740593Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:48.740600Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:48.740602Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:48.740631Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:48.740634Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:48.740636Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2024-11-21T09:19:48.740639Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740641Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740643Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740645Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740647Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740649Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740652Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740654Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740656Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740658Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740660Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740662Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740664Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740666Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740668Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740670Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740672Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:48.740675Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:48.740678Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:48.740688Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:48.740696Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2024-11-21T09:19:48.740702Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2024-11-21T09:19:48.740709Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2024-11-21T09:19:48.740734Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2024-11-21T09:19:48.740738Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T09:19:48.740741Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2024-11-21T09:19:48.740746Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:48.740749Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:48.740753Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:48.740756Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2024-11-21T09:19:48.740760Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2024-11-21T09:19:48.740763Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740767Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T09:19:48.740770Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740774Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740776Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740778Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:48.740781Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 5 optimisticState# EBS_DISINTEGRATED Marker# BPG55 2024-11-21T09:19:48.740789Z node 3 :BS_PROXY_PUT ERROR: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:48.740794Z node 3 :BS_PROXY_PUT NOTICE: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> test.py::test[pg-tpcds-q67-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Debug] >> test.py::test[join-premap_common_inner_both_sides--Results] [GOOD] >> test.py::test[join-pullup_null_column--Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: 2024-11-21T09:17:07.969839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:07.969879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:07.969887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002339/r3tmp/tmpe4Slo5/pdisk_1.dat 2024-11-21T09:17:08.058506Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62268, node 1 2024-11-21T09:17:08.155607Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:08.155629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:08.155633Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:08.155731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:08.162478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.238677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.238711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.250285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22998 2024-11-21T09:17:08.650709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:09.489681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:09.489710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:09.522730Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:09.523610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:09.571065Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:09.579974Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:09.579996Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:09.586974Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:09.587128Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:09.587150Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:09.587171Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:09.587178Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:09.587183Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:09.587189Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:09.587196Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:09.587304Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:09.762836Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:09.762863Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:09.763894Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:09.765780Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:09.765871Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:09.766544Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:09.770797Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:09.770814Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:09.770824Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:09.772901Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:09.772927Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:09.774161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:09.775633Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:09.775661Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:09.778321Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:09.790341Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:09.812459Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:09.931267Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:10.097612Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:10.816177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.816239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:10.819475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:10.842920Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:10.842965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:10.843003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:10.843016Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:10.843032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:10.843045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:10.843060Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:10.843073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:10.843087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:10.843101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:10.843113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:10.843130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2224:2807];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:10.847671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:10.847694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:10.847713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:10.847718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:10.847731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:10.847736Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:10.847745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranule ... 09:19:41.292340Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:41.304035Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:41.304179Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:41.304190Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T09:19:41.315559Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:42.691785Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:42.691815Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:42.691821Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:42.691830Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:42.691835Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:42.692082Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:42.703486Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:42.703522Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:42.703641Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:42.703655Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) ... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2024-11-21T09:19:42.703948Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:42.703973Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:42.704061Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:42.704170Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:2671:3168] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2024-11-21T09:19:42.704176Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2671:3168] 2024-11-21T09:19:42.715494Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:42.715524Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:42.715541Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Complete 2024-11-21T09:19:42.715582Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:42.715745Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7563:5667], server id = [2:7564:5668], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:42.715776Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7563:5667], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:42.716037Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:42.716055Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:42.716145Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:42.716196Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:42.716311Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:42.716372Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7563:5667], server id = [2:7564:5668], tablet id = 72075186224037899 2024-11-21T09:19:42.716379Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:42.717143Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:42.721829Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7581:5685]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:42.721903Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:42.721911Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7581:5685], StatRequests.size() = 1 2024-11-21T09:19:42.758124Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjQ2YjlmM2ItOGRmNGI3ZDItZGI0OTBkZGYtNTAzMzMxODc=, TxId: 2024-11-21T09:19:42.758146Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjQ2YjlmM2ItOGRmNGI3ZDItZGI0OTBkZGYtNTAzMzMxODc=, TxId: 2024-11-21T09:19:42.758581Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:42.769747Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:42.769767Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:43.442098Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:43.442119Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:44.112892Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:44.112932Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:44.112937Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:45.477924Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:45.477997Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:45.498673Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:45.498740Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:45.498744Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:45.498956Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:45.510160Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:45.510279Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:45.510293Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:45.510418Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:45.521472Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:45.521526Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:45.521662Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7676:5739], server id = [2:7677:5740], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:45.521694Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7676:5739], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:45.521870Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:45.521878Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:45.521910Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:45.521938Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:45.521989Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:45.522029Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7676:5739], server id = [2:7677:5740], tablet id = 72075186224037899 2024-11-21T09:19:45.522032Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:45.522758Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:45.535912Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2M1Y2Y4N2EtZjFhMTc2ZTItYTViZDIwNWItOTY2MjY2ZGM=, TxId: 2024-11-21T09:19:45.535935Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2M1Y2Y4N2EtZjFhMTc2ZTItYTViZDIwNWItOTY2MjY2ZGM=, TxId: 2024-11-21T09:19:45.536153Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:45.547150Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:45.547169Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2671:3168] 2024-11-21T09:19:46.237745Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2024-11-21T09:19:46.237772Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:48.214338Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:49.507492Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:19:49.528161Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:49.528191Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2024-11-21T09:19:50.872532Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:50.872563Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:19:52.377906Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:52.377993Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:52.409114Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2024-11-21T09:19:52.409149Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout >> test.py::test[limit-empty_read_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-grouping_sets--Debug] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Debug] >> TPersQueueTest::TopicServiceSimpleHappyWrites [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> test.py::test[produce-process_and_filter-default.txt-Debug] [GOOD] >> test.py::test[produce-process_and_filter-default.txt-Plan] [GOOD] >> test.py::test[produce-process_and_filter-default.txt-Results] >> test.py::test[insert_monotonic-from_empty--Results] [GOOD] >> test.py::test[insert_monotonic-overlaping_fail--Debug] [SKIPPED] >> test.py::test[insert_monotonic-overlaping_fail--Plan] [SKIPPED] >> test.py::test[insert_monotonic-overlaping_fail--Results] [SKIPPED] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Debug] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::InitRootAgain >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardTest::Boot >> TSchemeShardTest::MkRmDir >> test.py::test[produce-process_and_filter-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_nullable--ForceBlocks] [GOOD] >> test.py::test[aggregate-list_nullable--Plan] [GOOD] >> test.py::test[aggregate-list_nullable--Results] >> test.py::test[join-pullup_null_column--Analyze] [GOOD] >> test.py::test[join-pullup_null_column--Debug] >> test.py::test[produce-process_row_and_columns-default.txt-Debug] [SKIPPED] >> test.py::test[produce-process_row_and_columns-default.txt-Plan] [SKIPPED] >> test.py::test[produce-process_row_and_columns-default.txt-Results] [SKIPPED] >> test.py::test[produce-process_streaming_count-default.txt-Debug] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> test.py::test[join-mergejoin_with_reverse_key_order-off-Debug] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Plan] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [GOOD] >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> test.py::test[pg-tpcds-q67-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Debug] [GOOD] >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> test.py::test[aggregate-list_nullable--Results] [GOOD] >> test.py::test[aggregate-percentiles_containers--Analyze] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Plan] [GOOD] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> test.py::test[join-pullup_null_column--Debug] [GOOD] >> test.py::test[join-pullup_null_column--ForceBlocks] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: 2024-11-21T09:17:06.149338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:06.149386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:06.149395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002527/r3tmp/tmp4WfMlt/pdisk_1.dat 2024-11-21T09:17:06.226056Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7463, node 1 2024-11-21T09:17:06.318882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:06.318902Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:06.318906Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:06.319003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:06.325099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.401509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.401546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.412923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1738 2024-11-21T09:17:06.812046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:07.558876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.558903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.591600Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:07.592424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.638054Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:07.645464Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:07.645489Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:07.650842Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:07.650975Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:07.650999Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:07.651005Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:07.651011Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:07.651017Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:07.651022Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:07.651028Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:07.651148Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:07.825881Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.825914Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:07.826973Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:07.828530Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:07.828617Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:07.829482Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:07.834327Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:07.834347Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:07.834357Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:07.836270Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.836292Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.837684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:07.839380Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:07.839413Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:07.842110Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:07.854003Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:07.875957Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:07.989229Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:08.154989Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:08.862061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.862093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:08.864732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:08.885408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:08.885447Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:08.885479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:08.885504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:08.885523Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:08.885538Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:08.885551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:08.885564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:08.885577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:08.885590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:08.885604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:08.885617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:08.889670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:08.889691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:08.889711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:08.889716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:08.889730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:08.889735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:08.889744Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId ... (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:46.473205Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7931:5908]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:46.473247Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:46.473251Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7931:5908], StatRequests.size() = 1 2024-11-21T09:19:46.495734Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmRhYzA4ZTMtNDQ1OTQxYjgtN2M4YjhkNWMtODk0MDVkMjc=, TxId: 2024-11-21T09:19:46.495752Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmRhYzA4ZTMtNDQ1OTQxYjgtN2M4YjhkNWMtODk0MDVkMjc=, TxId: 2024-11-21T09:19:46.495892Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:46.507342Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T09:19:46.507368Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:47.210814Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:47.210851Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:47.870736Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:47.870774Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is column table. 2024-11-21T09:19:47.870940Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:47.881996Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:47.882113Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:47.882120Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 5], AnalyzedShards 1 2024-11-21T09:19:47.914072Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:49.321948Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:49.322029Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:49.342802Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:49.342866Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:49.342874Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:49.343122Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:49.354527Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:49.354561Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:49.354702Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:49.354712Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:49.354838Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:49.354860Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:49.354905Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:49.376537Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:49.376570Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:49.376634Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:49.376846Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8036:5972], server id = [2:8037:5973], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:49.376884Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8036:5972], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:49.377075Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:49.377085Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:49.377129Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8036:5972], server id = [2:8037:5973], tablet id = 72075186224037899 2024-11-21T09:19:49.377134Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:49.377155Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:49.377186Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:49.377264Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:49.378109Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:49.391600Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWFhOGYzODMtM2UyZDdjYzQtMmUxMmI0N2EtOTc0MjlkZTI=, TxId: 2024-11-21T09:19:49.391628Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWFhOGYzODMtM2UyZDdjYzQtMmUxMmI0N2EtOTc0MjlkZTI=, TxId: 2024-11-21T09:19:49.391801Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:49.403201Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:49.403230Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3102:3323] 2024-11-21T09:19:50.055433Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2024-11-21T09:19:50.055462Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:50.594149Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:50.594190Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:50.594194Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:51.910986Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:53.165651Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:19:53.175966Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:53.175994Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2024-11-21T09:19:53.176022Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:53.176026Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:53.176029Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:54.410242Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:54.410298Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is column table. 2024-11-21T09:19:54.410302Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T09:19:54.410472Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:54.432311Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:54.432444Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:54.432459Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:54.432601Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:54.443830Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:54.443903Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2024-11-21T09:19:54.444042Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8217:6077], server id = [2:8218:6078], tablet id = 72075186224037900, status = OK 2024-11-21T09:19:54.444070Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8217:6077], path = { OwnerId: 72075186224037889 LocalId: 5 } 2024-11-21T09:19:54.444191Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T09:19:54.444198Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:54.444243Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:54.444273Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:54.444340Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:54.444389Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8217:6077], server id = [2:8218:6078], tablet id = 72075186224037900 2024-11-21T09:19:54.444392Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:54.444985Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:54.478737Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTgwZjdmM2MtNGVhMGNkOGQtZGEwYTY5Zi0yYTI4ZjUyZQ==, TxId: 2024-11-21T09:19:54.478769Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTgwZjdmM2MtNGVhMGNkOGQtZGEwYTY5Zi0yYTI4ZjUyZQ==, TxId: 2024-11-21T09:19:54.478982Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:54.490142Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T09:19:54.490162Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3102:3323] >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TDSProxyGetTest::TestMirror32GetIntervalsWipedAllOk [GOOD] >> TDSProxyPatchTest::NaiveOk_ErasureMirror3dc >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> test.py::test[produce-process_streaming_count-default.txt-Debug] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-Plan] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-Results] >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TSchemeShardTest::CreateAlterTableWithCodec >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_parallel_mix--Analyze] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] [GOOD] >> TDSProxyPatchTest::NaiveOk_ErasureMirror3dc [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-off-Debug] [SKIPPED] >> test.py::test[join-anyjoin_common_nodata_keys-off-Plan] [SKIPPED] >> test.py::test[join-anyjoin_common_nodata_keys-off-Results] [SKIPPED] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_0_0_VdiskErrors >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects |95.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> test.py::test[lineage-grouping_sets--Debug] [GOOD] >> test.py::test[lineage-grouping_sets--Plan] [GOOD] >> test.py::test[lineage-grouping_sets--Results] >> test.py::test[aggregate-percentiles_containers--Analyze] [GOOD] >> test.py::test[aggregate-percentiles_containers--Debug] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableById >> TDSProxyPutTest::TestBlock42PutStatusOkWith_0_0_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3 [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> test.py::test[join-pullup_null_column--ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column--Plan] [GOOD] >> test.py::test[join-pullup_null_column--Results] >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> test.py::test[produce-process_streaming_count-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Debug] >> KqpLocks::Invalidate >> KqpTx::RollbackInvalidated >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3 [GOOD] Test command err: 2024-11-21T09:19:55.433823Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:19:55.433898Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433905Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433908Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433910Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433913Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433915Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433917Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433919Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433921Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433923Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433925Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433927Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433929Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433931Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433933Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433935Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433936Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433938Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:19:55.433943Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:19:55.433953Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:19:55.433958Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:19:55.433962Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:19:55.433964Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:19:55.433966Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:19:55.433968Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:19:55.433972Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T09:19:55.433974Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T09:19:55.433976Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T09:19:55.433978Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T09:19:55.433982Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T09:19:55.433984Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T09:19:55.436790Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T09:19:55.436841Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:19:55.436847Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:19:55.436853Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2024-11-21T09:19:55.436864Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2024-11-21T09:19:55.436870Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2024-11-21T09:19:55.436882Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:19:55.436888Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Results] >> KqpSinkLocks::DifferentKeyUpdateOlap >> test.py::test[dq-precompute_parallel_mix--Analyze] [GOOD] >> test.py::test[dq-precompute_parallel_mix--Debug] >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ |95.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[join-anyjoin_common_nodata_keys-off-Results] [SKIPPED] >> KqpSinkLocks::EmptyRangeAlreadyBroken >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> KqpLocks::Invalidate [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken >> KqpTx::RollbackInvalidated [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Debug] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Plan] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Results] >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[join-pullup_null_column-off-Analyze] >> KqpSinkTx::SnapshotROInteractive1 >> test.py::test[pg-tpcds-q67-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Analyze] >> test.py::test[aggregate-percentiles_containers--Debug] [GOOD] >> test.py::test[aggregate-percentiles_containers--ForceBlocks] >> KqpTx::MixEnginesOldNew >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTable >> TSchemeShardTest::CopyTableOmitFollowers >> test.py::test[lineage-grouping_sets--Results] [GOOD] >> test.py::test[lineage-list_literal4-default.txt-Debug] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 4394, MsgBus: 18076 2024-11-21T09:19:56.036559Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659870763079243:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:56.036597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002861/r3tmp/tmpJjFRLJ/pdisk_1.dat 2024-11-21T09:19:56.102871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4394, node 1 2024-11-21T09:19:56.136275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:56.136302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:56.137368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:56.144472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:56.144487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:56.144488Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:56.144521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18076 TClient is connected to server localhost:18076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:56.214657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.226766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.291140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.300097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.312148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.335789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870763080572:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.335813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.439494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.444827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.451831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.458519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.466115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.472960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.481866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870763081076:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.481885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.481944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870763081081:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.482392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:56.486108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659870763081083:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:19:56.689530Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659870763081400:2465], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:56.689704Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQzNTA5NzMtZTRlZDY4MTgtZDk2YzMyMjEtZjgxNTUzODU=, ActorId: [1:7439659870763081378:2454], ActorState: ExecuteState, TraceId: 01jd70as8fam2xhgt68txcfy89, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 01jd70as8e4ctfnmvzjc6gr8qq 2024-11-21T09:19:56.690525Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQzNTA5NzMtZTRlZDY4MTgtZDk2YzMyMjEtZjgxNTUzODU=, ActorId: [1:7439659870763081378:2454], ActorState: ReadyState, TraceId: 01jd70as8j1z4912zt4qxh2m5h, Create QueryResponse for error on request, msg: >> test.py::test[dq-precompute_parallel_mix--Debug] [GOOD] >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix--Plan] [GOOD] >> test.py::test[dq-precompute_parallel_mix--Results] >> test.py::test[lineage-list_literal4-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-list_literal4-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-Results] [SKIPPED] >> test.py::test[match_recognize-alerts-default.txt-Debug] >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail >> test.py::test[dq-precompute_parallel_mix--Results] [GOOD] >> test.py::test[dq-wrong_script_segf--Analyze] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Debug] [SKIPPED] >> test.py::test[dq-wrong_script_segf--ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Plan] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Results] [SKIPPED] >> test.py::test[expr-as_table_emptylist2-default.txt-Analyze] >> AnalyzeColumnshard::AnalyzeStatus [GOOD] >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> TSchemeShardTest::CopyTableAndConcurrentChanges >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> test.py::test[produce-process_with_udf-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Debug] >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> test.py::test[join-pullup_null_column-off-Analyze] [GOOD] >> test.py::test[join-pullup_null_column-off-Debug] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: 2024-11-21T09:17:04.900959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:04.901006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:04.901015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0025f2/r3tmp/tmpSEYazn/pdisk_1.dat 2024-11-21T09:17:04.974824Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12043, node 1 2024-11-21T09:17:05.068044Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:05.068064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:05.068066Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:05.068135Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:05.073914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:05.149675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:05.149705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:05.161407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19308 2024-11-21T09:17:05.562981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:06.342656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.342687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.375992Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:06.376658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:06.428580Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:06.439597Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:06.439623Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:06.447644Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:06.447803Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:06.447818Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:06.447822Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:06.447826Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:06.447830Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:06.447834Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:06.447839Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:06.447919Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:06.622150Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:06.622194Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1757:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:06.623038Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:06.624567Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:06.625248Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1805:2577] 2024-11-21T09:17:06.625582Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1805:2577], schemeshard id = 72075186224037889 2024-11-21T09:17:06.629955Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:06.629977Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:06.629988Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:06.630986Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:06.631010Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:06.632637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:06.634261Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:06.634304Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:06.637467Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:06.649494Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:06.671646Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:06.789543Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:06.945881Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:07.538638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2139:3019], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.538680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:07.541647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:07.584443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:07.584510Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:07.584554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:07.584571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:07.584588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:07.584607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:07.584621Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:07.584634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:07.584647Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:07.584669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:07.584688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:07.584707Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2226:2809];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:07.590362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:17:07.590394Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:17:07.590427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:17:07.590434Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:17:07.590454Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:17:07.590461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:17:07.590473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranule ... 224037897] TTxResolve::Execute 2024-11-21T09:19:52.254252Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037889, LocalPathId: 4], AnalyzedShards 1 2024-11-21T09:19:52.276014Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:53.789041Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:53.789091Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:53.789098Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. All the force traversal operations sent the requests. 2024-11-21T09:19:53.789108Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:53.789113Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:53.789358Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:53.800574Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableRequest::Complete. Send 1 events. 2024-11-21T09:19:53.800610Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:53.800746Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:53.800760Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2024-11-21T09:19:53.801074Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:53.801237Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7592:5701] 2024-11-21T09:19:53.801374Z node 2 :STATISTICS DEBUG: [72075186224037897] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_ENQUEUED 2024-11-21T09:19:53.801620Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7594:5702]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037889
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037889
FastCounter: 3
FastCheckInFlight: 0
FastSchemeShards: 0
FastNodes: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 2
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037889, LocalPathId: 4], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037889
    [OwnerId: 72075186224037889, LocalPathId: 4], [OwnerId: 72075186224037889, LocalPathId: 3]
ForceTraversals: 1
    1970-01-01T00:00:05Z
NavigateType: Traversal
NavigateAnalyzeOperationId: 
NavigatePathId: 
ForceTraversalOperationId: 
TraversalStartTime: 2024-11-21T09:19:53Z
TraversalPathId: [OwnerId: 72075186224037889, LocalPathId: 4]
TraversalIsColumnTable: 1
TraversalStartKey: 
GlobalTraversalRound: 2
TraversalRound: 1
HiveRequestRound: 1
... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2024-11-21T09:19:53.801820Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute 2024-11-21T09:19:53.801831Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2024-11-21T09:19:53.813017Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:53.813069Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyzeTableResponse::Complete. 2024-11-21T09:19:53.813105Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T09:19:53.813273Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7597:5705], server id = [2:7598:5706], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:53.813312Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7597:5705], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:53.813516Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:53.813526Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:53.813576Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:53.813603Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:53.813662Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:53.813709Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7597:5705], server id = [2:7598:5706], tablet id = 72075186224037899 2024-11-21T09:19:53.813713Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:53.814207Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:53.817252Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7615:5723]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:19:53.817305Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:19:53.817310Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7615:5723], StatRequests.size() = 1 2024-11-21T09:19:53.839414Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2ViODgyMDQtMjZkNWRjOWItNjM5YWU1ZDItNWE5NGY3Mjk=, TxId: 2024-11-21T09:19:53.839440Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2ViODgyMDQtMjZkNWRjOWItNjM5YWU1ZDItNWE5NGY3Mjk=, TxId: 2024-11-21T09:19:53.839575Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:53.851251Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:53.851279Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:19:54.638155Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:19:54.638187Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:19:55.363967Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:19:55.363999Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T09:19:55.364004Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:19:56.809061Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:19:56.809126Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:19:56.829858Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:19:56.829940Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:19:56.829954Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:56.830160Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:19:56.841482Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:19:56.841621Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:19:56.841642Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:19:56.841783Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:19:56.863719Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:19:56.863782Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:19:56.863936Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7714:5780], server id = [2:7715:5781], tablet id = 72075186224037899, status = OK 2024-11-21T09:19:56.863968Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7714:5780], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:19:56.864078Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:19:56.864084Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:19:56.864109Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:19:56.864132Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:19:56.864199Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:19:56.864261Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7714:5780], server id = [2:7715:5781], tablet id = 72075186224037899 2024-11-21T09:19:56.864266Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:19:56.864894Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:19:56.878356Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWY5OGM0ZWYtMzBhMWVkOWItYzgxMTJmYTAtYTYzOTQ3MGQ=, TxId: 2024-11-21T09:19:56.878384Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWY5OGM0ZWYtMzBhMWVkOWItYzgxMTJmYTAtYTYzOTQ3MGQ=, TxId: 2024-11-21T09:19:56.878458Z node 2 :SYSTEM_VIEWS WARN: [72075186224037896] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2024-11-21T09:19:54.000000Z 2024-11-21T09:19:56.878530Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:19:56.889928Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:19:56.889951Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:888:2714] 2024-11-21T09:19:56.890249Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7734:5796] 2024-11-21T09:19:56.890351Z node 2 :STATISTICS DEBUG: [72075186224037897] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION >> KqpTx::MixEnginesOldNew [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Debug] >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TSchemeShardTest::CopyTableAndConcurrentSplit >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 19940, MsgBus: 20569 2024-11-21T09:19:56.036522Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659870363323578:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:56.036542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0028ce/r3tmp/tmp9Msj01/pdisk_1.dat 2024-11-21T09:19:56.106105Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19940, node 1 2024-11-21T09:19:56.136249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:56.136273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:56.137302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:56.144472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:56.144486Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:56.144488Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:56.144517Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20569 TClient is connected to server localhost:20569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:56.214676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.226785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.291097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.300228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.312393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.335706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870363324912:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.335725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.439473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.444778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.451638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.458649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.466105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.472927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.481820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870363325416:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.481840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.481848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870363325421:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.482367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:56.485962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659870363325423:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:19:56.732972Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDJkMDU1MjQtZjUzOGFhODktNWE2MWJlZWUtOGEyNjc3ZmM=, ActorId: [1:7439659870363325717:2454], ActorState: ExecuteState, TraceId: 01jd70as9m3c4v27drn2kr34j7, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 9604, MsgBus: 9601 2024-11-21T09:19:56.887159Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659872296975454:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:56.887423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0028ce/r3tmp/tmpqtDKW9/pdisk_1.dat 2024-11-21T09:19:56.899686Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9604, node 2 2024-11-21T09:19:56.905338Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:56.905350Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:56.905351Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:56.905381Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9601 TClient is connected to server localhost:9601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:56.989669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:56.989692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:56.989984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.990707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:56.997778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.005970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.021415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.034313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.143812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659876591944291:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.143851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.148743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.155508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.165949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.172937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.180118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.186686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.195641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659876591944791:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.195666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.195675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659876591944796:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.196264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:57.200479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659876591944798:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:19:57.416502Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzU3NWViYjctNWM4NDQwOGYtNzA5NGU0Y2YtZjFmOGIyNTQ=, ActorId: [2:7439659876591945084:2454], ActorState: ExecuteState, TraceId: 01jd70asynfd96c6fq6mwnq081, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken >> test.py::test[expr-as_table_emptylist2-default.txt-Analyze] [GOOD] >> test.py::test[expr-as_table_emptylist2-default.txt-Debug] >> test.py::test[match_recognize-alerts-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-alerts-default.txt-Plan] [GOOD] >> test.py::test[match_recognize-alerts-default.txt-Results] >> KqpSinkLocks::UncommittedRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 7246, MsgBus: 4480 2024-11-21T09:19:57.155758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659876469520168:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:57.155881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002807/r3tmp/tmpjOgOFp/pdisk_1.dat 2024-11-21T09:19:57.207094Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7246, node 1 2024-11-21T09:19:57.215829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:57.215843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:57.215845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:57.215880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4480 TClient is connected to server localhost:4480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:57.256332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.257104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:57.257126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:19:57.258144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:57.258843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.318381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.331659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.341823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.379708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659876469521717:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.379734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.403157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.408702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.418110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.472502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.481135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.488081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.496543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659876469522215:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.496566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.496579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659876469522220:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.497187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:57.500834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659876469522224:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TPersQueueTest::WriteEmptyData [GOOD] >> TPersQueueTest::WriteNonExistingPartition >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> test.py::test[aggr_factory-multi--Debug] [GOOD] >> test.py::test[aggr_factory-multi--ForceBlocks] >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> test.py::test[match_recognize-alerts-default.txt-Results] [GOOD] >> test.py::test[match_recognize-permute-default.txt-Debug] >> test.py::test[aggregate-percentiles_containers--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_containers--Plan] [GOOD] >> test.py::test[aggregate-percentiles_containers--Results] >> test.py::test[join-pullup_null_column-off-Debug] [GOOD] >> test.py::test[join-pullup_null_column-off-ForceBlocks] [SKIPPED] >> test.py::test[join-pullup_null_column-off-Plan] [GOOD] >> test.py::test[join-pullup_null_column-off-Results] [GOOD] >> test.py::test[join-star_join_inners-off-Analyze] >> test.py::test[produce-reduce_by_struct-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Plan] [GOOD] >> KqpSinkTx::SnapshotRO >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::PathErrors >> TSchemeShardTest::ConsistentCopyTablesForBackup >> test.py::test[expr-as_table_emptylist2-default.txt-Debug] [GOOD] >> test.py::test[expr-as_table_emptylist2-default.txt-ForceBlocks] >> KqpLocks::InvalidateOnCommit >> KqpSinkTx::OlapDeferredEffects >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::ManyDirs >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> TFstClassSrcIdPQTest::TestTableCreated [GOOD] >> TFstClassSrcIdPQTest::NoMapping >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> TPersQueueTest::SchemeOperationsTest [GOOD] >> TPersQueueTest::SchemeOperationFirstClassCitizen >> test.py::test[pg-tpcds-q70-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-ForceBlocks] >> test.py::test[match_recognize-permute-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-permute-default.txt-Plan] [GOOD] >> test.py::test[match_recognize-permute-default.txt-Results] >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> test.py::test[join-star_join_inners-off-Analyze] [GOOD] >> test.py::test[join-star_join_inners-off-Debug] >> test.py::test[expr-as_table_emptylist2-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-as_table_emptylist2-default.txt-Plan] [GOOD] >> test.py::test[expr-as_table_emptylist2-default.txt-Results] >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::MixedTxFail >> test.py::test[match_recognize-permute-default.txt-Results] [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Debug] >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> TPersQueueTest::CacheHead [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite >> test.py::test[expr-as_table_emptylist2-default.txt-Results] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Analyze] |95.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[produce-reduce_by_struct-default.txt-Plan] [GOOD] >> test.py::test[aggregate-percentiles_containers--Results] [GOOD] >> test.py::test[bigdate-bitcast_timestamp64-default.txt-Analyze] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> test.py::test[join-star_join_inners-off-Debug] [GOOD] >> test.py::test[join-star_join_inners-off-ForceBlocks] [SKIPPED] >> test.py::test[join-star_join_inners-off-Plan] [GOOD] >> test.py::test[join-star_join_inners-off-Results] [GOOD] >> test.py::test[join-yql-14829_leftonly--Analyze] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> KqpLocks::MixedTxFail [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Results] >> test.py::test[expr-cast_reverse_list-default.txt-Analyze] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:19:54.176103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:54.176132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:54.176138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:54.176146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:54.176148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:54.176155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:54.187324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:54.187337Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:54.189809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:54.190569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:54.190595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:54.191737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:54.192163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:54.193936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.194016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:54.194929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:54.198699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.198703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:54.198715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.200670Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:19:54.214754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:54.215608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.215657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:54.215691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:54.215696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:54.216460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:54.216475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:54.216479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:54.216846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:54.217194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.219070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.219506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:54.219832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:54.222314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:54.223497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:54.223577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.223605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:54.224051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.224083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:19:54.224147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224153Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:54.224161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:54.224165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:54.224177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:54.224184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:54.224193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:54.224197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:54.224201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:19:54.224506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.224519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.224523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:19:54.224527Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:19:54.224531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.224542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } Children { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:00.460759Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:00.460783Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 24us result status StatusSuccess 2024-11-21T09:20:00.460834Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:00.460877Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:00.460892Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 16us result status StatusSuccess 2024-11-21T09:20:00.460941Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:00.460980Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:00.460990Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 11us result status StatusSuccess 2024-11-21T09:20:00.461015Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:00.461047Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:00.461059Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 13us result status StatusSuccess 2024-11-21T09:20:00.461086Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSinkLocks::InvalidateOlapOnCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail [GOOD] Test command err: Trying to start YDB, gRPC: 10374, MsgBus: 7214 2024-11-21T09:19:58.929022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659879068758557:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:58.929162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00278c/r3tmp/tmphEcut8/pdisk_1.dat 2024-11-21T09:19:58.975947Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10374, node 1 2024-11-21T09:19:58.983225Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:58.983236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:58.983237Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:58.983273Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7214 TClient is connected to server localhost:7214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:59.022376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:59.025162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:59.030457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:59.030476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:59.031600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:59.085070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:59.099272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:59.107438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:59.171797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659883363727405:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:59.171828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:59.193151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.198783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.210370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.264873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.319339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.329493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.337919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659883363727922:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:59.337947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:59.337953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659883363727927:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:59.338586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:19:59.342046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659883363727929:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:19:59.564643Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjIwZGQ0NmUtZjJmZDI1ODctYzlkZmYyZC0yZTM5NjUyYw==, ActorId: [1:7439659883363728213:2454], ActorState: ExecuteState, TraceId: 01jd70aw2abhcfc2y7bz8qbj0k, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 15416, MsgBus: 9286 2024-11-21T09:19:59.781149Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659883349262812:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:59.781278Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00278c/r3tmp/tmpVIeFYc/pdisk_1.dat 2024-11-21T09:19:59.787261Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15416, node 2 2024-11-21T09:19:59.796125Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:59.796134Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:59.796135Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:59.796159Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9286 TClient is connected to server localhost:9286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:59.882539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:59.882565Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:59.883297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.883548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:20:00.009242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659887644230694:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:00.009263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:00.009275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659887644230721:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:00.009800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:00.011031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659887644230723:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:00.115687Z node 2 :FLAT ... fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:20:00.247024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:20:00.247027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:20:00.247036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:20:00.247039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:20:00.247039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:20:00.247044Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:20:00.247051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:20:00.247052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:20:00.247054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:20:00.247055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:20:00.247062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:20:00.247062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:20:00.247065Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:20:00.247072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:20:00.247079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:20:00.247084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:20:00.247108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:20:00.247117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:20:00.247130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:20:00.247138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:20:00.247147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:20:00.247155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:20:00.247168Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:20:00.247176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:20:00.247185Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:20:00.247192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:20:00.247233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:20:00.247243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:20:00.247250Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:20:00.247258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:20:00.247270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:20:00.247279Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:20:00.247286Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:20:00.247294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:20:00.247300Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:20:00.247308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:20:00.247313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:20:00.247319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:20:00.247550Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:20:00.247563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:20:00.247577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:20:00.247586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:20:00.247596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:20:00.247602Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:20:00.247616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:20:00.247625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:20:00.247634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:20:00.247642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:20:00.373583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037936;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] Col2: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"2;1;"}}]}; 2024-11-21T09:20:00.376622Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MThhNTliZDYtN2IxYjllYTktNjgyNjRiYzUtODJiNWY0MDY=, ActorId: [2:7439659887644233451:2658], ActorState: ExecuteState, TraceId: 01jd70awvb8cm096q60rg2ckwa, Create QueryResponse for error on request, msg: 2024-11-21T09:20:00.377151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[2:7439659887644231278:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:111;event=abort;tx_id=281474976715665;problem=finished; 2024-11-21T09:20:00.377541Z node 2 :TX_COLUMNSHARD WARN: fline=manager.cpp:128;event=abort;tx_id=281474976715665;problem=finished; 2024-11-21T09:20:00.379238Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037936;tx_state=complete;fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715670; >> test.py::test[bigdate-bitcast_timestamp64-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-bitcast_timestamp64-default.txt-Debug] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> test.py::test[join-yql-14829_leftonly--Analyze] [GOOD] >> test.py::test[join-yql-14829_leftonly--Debug] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Debug] [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Plan] [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed >> test.py::test[expr-cast_reverse_list-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-ForceBlocks] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> test.py::test[bigdate-bitcast_timestamp64-default.txt-Debug] [GOOD] >> test.py::test[bigdate-bitcast_timestamp64-default.txt-ForceBlocks] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> test.py::test[pg-tpcds-q70-default.txt-Results] [GOOD] >> test.py::test[pg-with-default.txt-Analyze] >> KqpTx::RollbackManyTx >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap >> test.py::test[join-yql-14829_leftonly--Debug] [GOOD] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> KqpSinkLocks::EmptyRange >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Results] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Debug] >> test.py::test[bigdate-bitcast_timestamp64-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-bitcast_timestamp64-default.txt-Plan] [GOOD] >> test.py::test[bigdate-bitcast_timestamp64-default.txt-Results] >> test.py::test[pg-with-default.txt-Analyze] [GOOD] >> test.py::test[pg-with-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: 2024-11-21T09:17:07.139732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:17:07.139784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:17:07.139794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00238d/r3tmp/tmp6ejF8S/pdisk_1.dat 2024-11-21T09:17:07.212850Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32279, node 1 2024-11-21T09:17:07.303803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:17:07.303821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:17:07.303824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:17:07.303886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:17:07.308230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:17:07.383301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:07.383329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:07.394495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14411 2024-11-21T09:17:07.792592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:17:08.535423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.535450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.567988Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:17:08.568727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:08.621130Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:17:08.628873Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:17:08.628902Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:17:08.634670Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:17:08.634788Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:17:08.634802Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:17:08.634806Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:17:08.634810Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:17:08.634814Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:17:08.634818Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:17:08.634822Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:17:08.634904Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:17:08.809039Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:08.809064Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:17:08.810058Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:17:08.811861Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:17:08.811943Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:17:08.812831Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:17:08.817296Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:17:08.817314Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:17:08.817325Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:17:08.818959Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:17:08.818980Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:17:08.820117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:17:08.821539Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:17:08.821564Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:17:08.823416Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:17:08.835036Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:17:08.857186Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:17:08.969323Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:17:09.124252Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:17:09.852795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.852830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:17:09.855653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:17:09.896163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:09.896258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:09.896305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:09.896328Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:09.896355Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:09.896383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:17:09.896405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:17:09.896427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:17:09.896450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:17:09.896472Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:17:09.896492Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:17:09.896513Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:17:09.901703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:17:09.901735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:17:09.901763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:17:09.901776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:17:09.901789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:17:09.901804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... [72075186224037897] Subscribed for config changes 2024-11-21T09:20:01.510206Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:20:01.510213Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:20:01.510226Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:20:01.510345Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:20:01.510354Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:20:01.510525Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:20:01.544163Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:20:01.544235Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:20:01.544451Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8449:6327], server id = [2:8454:6332], tablet id = 72075186224037899, status = OK 2024-11-21T09:20:01.544580Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8449:6327], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.544920Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:20:01.544994Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8450:6328], server id = [2:8455:6333], tablet id = 72075186224037900, status = OK 2024-11-21T09:20:01.545009Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8450:6328], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.545037Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8451:6329], server id = [2:8456:6334], tablet id = 72075186224037901, status = OK 2024-11-21T09:20:01.545044Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8451:6329], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.545148Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8449:6327], server id = [2:8454:6332], tablet id = 72075186224037899 2024-11-21T09:20:01.545154Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.545250Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8452:6330], server id = [2:8457:6335], tablet id = 72075186224037902, status = OK 2024-11-21T09:20:01.545261Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8452:6330], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.545284Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8453:6331], server id = [2:8458:6336], tablet id = 72075186224037903, status = OK 2024-11-21T09:20:01.545290Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8453:6331], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.545310Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T09:20:01.545450Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T09:20:01.545471Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8459:6337], server id = [2:8460:6338], tablet id = 72075186224037904, status = OK 2024-11-21T09:20:01.545480Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8459:6337], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.545574Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T09:20:01.545620Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T09:20:01.545672Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8450:6328], server id = [2:8455:6333], tablet id = 72075186224037900 2024-11-21T09:20:01.545676Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.545708Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8451:6329], server id = [2:8456:6334], tablet id = 72075186224037901 2024-11-21T09:20:01.545712Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.545744Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T09:20:01.545770Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8452:6330], server id = [2:8457:6335], tablet id = 72075186224037902 2024-11-21T09:20:01.545774Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.545798Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8461:6339], server id = [2:8463:6341], tablet id = 72075186224037905, status = OK 2024-11-21T09:20:01.545810Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8461:6339], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.545827Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8453:6331], server id = [2:8458:6336], tablet id = 72075186224037903 2024-11-21T09:20:01.545830Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.545890Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8462:6340], server id = [2:8466:6344], tablet id = 72075186224037906, status = OK 2024-11-21T09:20:01.545899Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8462:6340], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.545924Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8464:6342], server id = [2:8467:6345], tablet id = 72075186224037907, status = OK 2024-11-21T09:20:01.545929Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8464:6342], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.545941Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8459:6337], server id = [2:8460:6338], tablet id = 72075186224037904 2024-11-21T09:20:01.545944Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.546039Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8465:6343], server id = [2:8468:6346], tablet id = 72075186224037908, status = OK 2024-11-21T09:20:01.546048Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8465:6343], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:01.546064Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T09:20:01.546138Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T09:20:01.546149Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T09:20:01.546186Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T09:20:01.546191Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:20:01.546229Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:20:01.546255Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:20:01.546302Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:20:01.546332Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8461:6339], server id = [2:8463:6341], tablet id = 72075186224037905 2024-11-21T09:20:01.546335Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.546883Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8462:6340], server id = [2:8466:6344], tablet id = 72075186224037906 2024-11-21T09:20:01.546894Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.546926Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:20:01.546992Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8464:6342], server id = [2:8467:6345], tablet id = 72075186224037907 2024-11-21T09:20:01.546996Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.547099Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8465:6343], server id = [2:8468:6346], tablet id = 72075186224037908 2024-11-21T09:20:01.547106Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:01.550768Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8485:6363]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:01.550829Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:20:01.550837Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8485:6363], StatRequests.size() = 1 2024-11-21T09:20:01.588742Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDdkZGE5NzAtZWY0ZjJjY2QtNDM2NjlmNzgtYzhiYzY3ODg=, TxId: 2024-11-21T09:20:01.588770Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDdkZGE5NzAtZWY0ZjJjY2QtNDM2NjlmNzgtYzhiYzY3ODg=, TxId: 2024-11-21T09:20:01.588946Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:20:01.599930Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8495:6369] 2024-11-21T09:20:01.599977Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8401:6299], server id = [2:8495:6369], tablet id = 72075186224037897, status = OK 2024-11-21T09:20:01.600021Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:8495:6369], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2024-11-21T09:20:01.600072Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:8496:6370] 2024-11-21T09:20:01.600092Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:8496:6370], schemeshard id = 72075186224037889 2024-11-21T09:20:01.611577Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:20:01.611600Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:20:01.770357Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8503:6375]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:01.770492Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:20:01.770498Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:20:01.771148Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T09:20:01.771159Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T09:20:01.771165Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T09:20:01.772622Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> test.py::test[expr-cast_reverse_list-default.txt-Results] [GOOD] >> test.py::test[expr-cast_utf8-default.txt-Analyze] >> test.py::test[aggr_factory-multi--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-multi--Plan] [GOOD] >> test.py::test[aggr_factory-multi--Results] >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSinkTx::SnapshotROInteractive2 >> test.py::test[bigdate-bitcast_timestamp64-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Analyze] >> KqpTx::RollbackManyTx [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId >> test.py::test[optimizers-yql-12620_stage_multiuse--Debug] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Plan] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_leftonly--Plan] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackManyTx [GOOD] Test command err: Trying to start YDB, gRPC: 1745, MsgBus: 30124 2024-11-21T09:20:01.978956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659893965990705:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:01.978970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0026c5/r3tmp/tmpp1stwB/pdisk_1.dat 2024-11-21T09:20:02.018681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1745, node 1 2024-11-21T09:20:02.029005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:02.029016Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:02.029018Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:02.029042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30124 TClient is connected to server localhost:30124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:02.071630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:02.074348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:02.079133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:02.079151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:02.080263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:02.132186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:02.144188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:02.157740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:02.204780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659898260959533:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.204811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.225306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.230733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.240890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.247831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.302070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.311025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.319005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659898260960036:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.319042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.319058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659898260960041:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.319547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:02.324033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659898260960043:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |95.5%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[pg-with-default.txt-Debug] [GOOD] >> test.py::test[pg-with-default.txt-ForceBlocks] |95.5%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::SnapshotROInteractive2 >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> test.py::test[expr-cast_utf8-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Debug] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Debug] >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSinkMvcc::OlapMultiSinks >> test.py::test[pg-with-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-with-default.txt-Plan] [GOOD] >> test.py::test[pg-with-default.txt-Results] >> KqpSinkTx::OlapLocksAbortOnCommit >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> KqpTx::SnapshotROInteractive2 [GOOD] |95.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[expr-cast_utf8-default.txt-Analyze] [GOOD] >> test.py::test[pg-with-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-Analyze] >> test.py::test[join-yql-14829_leftonly--Results] [GOOD] >> test.py::test[join-yql-8125--Analyze] >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 6063, MsgBus: 29767 2024-11-21T09:20:03.593262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659901805559282:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:03.593394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002699/r3tmp/tmpW1z3yK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6063, node 1 2024-11-21T09:20:03.641717Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:03.647346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:03.647356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:03.647358Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:03.647396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29767 TClient is connected to server localhost:29767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:03.690058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:03.694202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:03.694231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:03.695359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:03.701923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:03.761555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:03.774832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:03.782989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:03.823374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659901805560833:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:03.823398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:03.842033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.896735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.951191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.957074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.962873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.970107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.978398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659901805561351:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:03.978421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:03.978420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659901805561356:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:03.979120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:03.983473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659901805561358:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:19:54.176103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:54.176124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:54.176133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:54.176159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:54.176162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:54.176167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:54.187346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:54.187362Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:54.189400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:54.189884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:54.189908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:54.191988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:54.192169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:54.193936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.194010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:54.194897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:54.198691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.198695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:54.198709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.200682Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:19:54.214624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:54.215604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.215657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:54.215691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:54.215696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:54.216461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:54.216475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:54.216479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:54.216830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:54.217104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.219067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.219473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:54.219828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:54.222301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:54.223504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:54.223585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.223609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:54.224007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.224039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:19:54.224113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:54.224127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:54.224130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:54.224137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:54.224142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:54.224150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:54.224154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:54.224156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:19:54.224376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.224385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.224389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:19:54.224392Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:19:54.224395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.224403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T09:20:03.683419Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:20:03.683440Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:20:03.683463Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:03.683466Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T09:20:03.683469Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T09:20:03.683547Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:20:03.683552Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:20:03.683562Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:20:03.683565Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:20:03.683569Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T09:20:03.683572Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:20:03.683575Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:20:03.683578Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:20:03.683602Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:20:03.683607Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T09:20:03.683609Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T09:20:03.683611Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:20:03.683696Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:20:03.683706Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:20:03.683709Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:20:03.683712Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T09:20:03.683715Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:20:03.684075Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:20:03.684111Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:20:03.684116Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:20:03.684121Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:20:03.684127Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:20:03.684152Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T09:20:03.684800Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:20:03.685129Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T09:20:03.685193Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:20:03.685198Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T09:20:03.685255Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:20:03.685270Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:20:03.685274Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:455:2414] TestWaitNotification: OK eventTxId 102 2024-11-21T09:20:03.685348Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:03.685394Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 74us result status StatusSuccess 2024-11-21T09:20:03.685455Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:03.685490Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:03.685501Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 13us result status StatusSuccess 2024-11-21T09:20:03.685546Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:04.102250Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:04.102312Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 73us result status StatusSuccess 2024-11-21T09:20:04.102414Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:04.254493Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 >> test.py::test[bigdate-table_explicit_cast-default.txt-Debug] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-ForceBlocks] >> TPersQueueTest::WriteNonExistingPartition [GOOD] >> TPersQueueTest::WriteNonExistingTopic >> TPersQueueTest::SchemeOperationFirstClassCitizen [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::LocksAbortOnCommit >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-Analyze] [GOOD] >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-Debug] >> test.py::test[join-yql-8125--Analyze] [GOOD] >> test.py::test[join-yql-8125--Debug] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> KqpTx::SnapshotRO >> TFstClassSrcIdPQTest::NoMapping [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected >> KqpTx::RollbackRoTx >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> KqpTx::RollbackTx >> test.py::test[optimizers-yql-5978_fill_multi_usage--Debug] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Plan] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-Debug] [GOOD] >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-ForceBlocks] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> KqpTx::SnapshotRO [GOOD] >> KqpTx::RollbackRoTx [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 9422, MsgBus: 18193 2024-11-21T09:20:05.454673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659908893586584:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:05.454803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0025ff/r3tmp/tmpoRiiL1/pdisk_1.dat 2024-11-21T09:20:05.495829Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9422, node 1 2024-11-21T09:20:05.509373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:05.509382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:05.509383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:05.509407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18193 TClient is connected to server localhost:18193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:05.555676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:05.555703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:05.556711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:05.580662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.586627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.647559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.664590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.674720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.735819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659908893588151:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.735842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.763550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.768462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.775435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.782482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.790012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.796723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.805163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659908893588642:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.805206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.805211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659908893588647:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.805770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:05.810090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659908893588649:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:06.015844Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzhhMWFkZmItZGRkMGZlMDEtNzdlODM4YS01ODZhYTJh, ActorId: [1:7439659908893588930:2454], ActorState: ExecuteState, TraceId: 01jd70b2bpdhzzecpkkaq8zd2g, Create QueryResponse for error on request, msg:
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 >> test.py::test[bigdate-table_explicit_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Plan] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] >> KqpTx::RollbackTx [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 8486, MsgBus: 28597 2024-11-21T09:20:05.671306Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659908532083414:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:05.671421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0025cc/r3tmp/tmpeR1FKb/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8486, node 1 2024-11-21T09:20:05.726850Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:05.731339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:05.731350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:05.731352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:05.731379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28597 TClient is connected to server localhost:28597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:05.772495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:05.772519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:05.773605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:05.777189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.789263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.850900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.864270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.874408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.926756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659908532084961:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.926785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.947570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.957984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.964934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.972062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.978615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.985957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.994723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659908532085452:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.994746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659908532085457:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.994746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.995327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:05.999037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659908532085459:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:06.204089Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjUwZTc0MjUtNzkwNmVjZDEtNDAzNWMyN2EtY2JiMjA5YzA=, ActorId: [1:7439659912827053042:2454], ActorState: ReadyState, TraceId: 01jd70b2hvc8d4n3m2pq4vwnq3, Create QueryResponse for error on request, msg: >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-Plan] [GOOD] >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-Results] >> test.py::test[join-yql-8125--Debug] [GOOD] >> test.py::test[join-yql-8125--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx [GOOD] Test command err: Trying to start YDB, gRPC: 1510, MsgBus: 31153 2024-11-21T09:20:05.864446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659912047498663:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:05.864462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0025af/r3tmp/tmp9TBNkn/pdisk_1.dat 2024-11-21T09:20:05.904021Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1510, node 1 2024-11-21T09:20:05.913887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:05.913901Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:05.913903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:05.913936Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31153 TClient is connected to server localhost:31153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:05.965337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:05.965368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:05.966493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:05.986304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.997059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:06.011346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:06.024768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:06.035700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:06.100931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659916342467500:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:06.100952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:06.121080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:06.126445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:06.132919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:06.139586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:06.146849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:06.154197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:06.162255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659916342467994:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:06.162296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:06.162307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659916342467999:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:06.162935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:06.166993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659916342468001:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:06.362413Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OThmMTdhMjgtMjYzMDQ0MjUtYWJmY2ZjMTQtN2Y4YmE2OTA=, ActorId: [1:7439659916342468285:2454], ActorState: ReadyState, TraceId: 01jd70b2ps2e7ddrbpg79peek4, Create QueryResponse for error on request, msg: >> TPersQueueTest::CheckACLForGrpcWrite [GOOD] >> TPersQueueTest::CheckACLForGrpcRead >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> test.py::test[pg_duplicated-order_by_duplicated_proj_on_column_ref-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Analyze] >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpSinkLocks::EmptyRangeOlap >> KqpSinkTx::InvalidateOnError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:19:54.176117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:54.176135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:54.176142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:54.176154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:54.176156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:54.176162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:54.187282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:54.187299Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:54.189384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:54.189936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:54.189959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:54.191945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:54.192165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:54.193931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.194007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:54.194811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:54.198698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.198702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:54.198715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.200656Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:19:54.214624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:54.215606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.215660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:54.215691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:54.215696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:54.216497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:54.216508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:54.216511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:54.216836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:54.217153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.219052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.219471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:54.219815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:54.222302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:54.223502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:54.223573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.223605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:54.224003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.224039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:19:54.224113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224117Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:54.224127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:54.224130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:54.224137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:54.224142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:54.224150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:54.224154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:54.224156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:19:54.224390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.224400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.224403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:19:54.224406Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:19:54.224408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.224417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... oseLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 35 } } 2024-11-21T09:20:06.776918Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T09:20:06.776950Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 61500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 35 } } 2024-11-21T09:20:06.776966Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 61500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 35 } } 2024-11-21T09:20:06.776971Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T09:20:06.776996Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T09:20:06.777002Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2024-11-21T09:20:06.777485Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:20:06.777536Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:20:06.777544Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:06.777556Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T09:20:06.777591Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:06.777988Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T09:20:06.778021Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T09:20:06.778286Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:06.778314Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 51539609707 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:06.778322Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:20:06.778399Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T09:20:06.778422Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T09:20:06.779759Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:06.779770Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:20:06.779827Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:06.779833Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:20:06.779948Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:20:06.779957Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:20:06.780082Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:20:06.780095Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:20:06.780103Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:20:06.780108Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T09:20:06.780113Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:20:06.780127Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T09:20:06.780262Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 270 } } 2024-11-21T09:20:06.780270Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T09:20:06.780287Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 270 } } 2024-11-21T09:20:06.780299Z node 12 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 270 } } 2024-11-21T09:20:06.780447Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:20:06.780453Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T09:20:06.780465Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:20:06.780470Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:20:06.780477Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:20:06.780486Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:06.780491Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:20:06.780495Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:20:06.780500Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T09:20:06.780984Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:20:06.781263Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:20:06.781288Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:20:06.781303Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:20:06.781309Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:20:06.781322Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:20:06.781326Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:20:06.781331Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T09:20:06.781343Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:340:2315] message: TxId: 102 2024-11-21T09:20:06.781349Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:20:06.781354Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:20:06.781358Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:20:06.781381Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:20:06.781742Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:20:06.781754Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:389:2363] TestWaitNotification: OK eventTxId 102 >> test.py::test[optimizers-yt_shuffle_by_keys--Debug] [SKIPPED] >> test.py::test[optimizers-yt_shuffle_by_keys--Plan] [SKIPPED] >> test.py::test[optimizers-yt_shuffle_by_keys--Results] [SKIPPED] >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> KqpLocks::DifferentKeyUpdate >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> test.py::test[aggr_factory-multi--Results] [GOOD] >> test.py::test[aggr_factory-some-default.txt-Analyze] >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> KqpTx::TooManyTx >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> test.py::test[produce-process_streaming_inline_bash-default.txt-Analyze] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Debug] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_pull--Analyze] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> test.py::test[join-yql-8125--ForceBlocks] [GOOD] >> test.py::test[join-yql-8125--Plan] [GOOD] >> test.py::test[join-yql-8125--Results] >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] |95.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[optimizers-yt_shuffle_by_keys--Results] [SKIPPED] >> KqpTx::TooManyTx [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::NestedDirs >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus >> test.py::test[aggr_factory-some-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-some-default.txt-Debug] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRange [GOOD] Test command err: Trying to start YDB, gRPC: 10400, MsgBus: 9656 2024-11-21T09:19:56.087843Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659870202232583:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:56.087990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a00/r3tmp/tmpMcnqOG/pdisk_1.dat 2024-11-21T09:19:56.133442Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10400, node 1 2024-11-21T09:19:56.144464Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:56.144475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:56.144477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:56.144508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9656 2024-11-21T09:19:56.188969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:56.188997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:56.190031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:56.214661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.323013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870202233200:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.323012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870202233189:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.323034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.324355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:19:56.326010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659870202233203:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:19:56.463381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.473875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:19:56.473909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:19:56.473929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:19:56.473943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:19:56.473960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:19:56.473976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:19:56.473993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:19:56.474003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:19:56.474033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:19:56.474051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:19:56.474068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:19:56.474087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439659870202233411:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:19:56.474143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:19:56.474171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:19:56.474212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:19:56.474239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:19:56.474262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:19:56.474287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:19:56.474324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:19:56.474349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:19:56.474377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:19:56.474401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:19:56.474427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:19:56.474452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439659870202233406:2312];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:19:56.474872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:19:56.474890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:19:56.474902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:19:56.474919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:19:56.474942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstr ... line=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:56.965682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:56.965700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:56.965703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:56.965723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:56.965800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:56.965826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:56.966621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:56.998364Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659870202241109:3252] TxId: 281474976710665. Ctx: { TraceId: 01jd70ash984w8ybnh7ys0pc2p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjE5OGQ1NC0zODgxYjNhNC04NGY4YzIzNS1kODFkMzJmMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:19:57.032471Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659874497208592:3255] TxId: 281474976710666. Ctx: { TraceId: 01jd70ask3503ga4gg6jxgzpem, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA0MmJhMWItNWFlMDBjMC03YWRiMmM2ZS1iYjUwMmE5YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:19:57.036359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037895;tx_state=complete;fline=interaction.h:353;batch=Group: [ 2 ] Name: [ 5061756C ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976710665}],"starts":[{"inc":{"count_not_include":1},"id":281474976710665}],"finishes":[]},"p":{"include":1,"pk":"1;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976710665}]},"p":{"include":2147483647,"pk":"1;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710666}],"finishes":[{"inc":{"count_include":1},"id":281474976710666}]},"p":{"include":0,"pk":"2;Paul;"}}]}; 2024-11-21T09:19:57.039856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:01.088170Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659870202232583:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:01.088230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11720, MsgBus: 8798 2024-11-21T09:20:02.255776Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659898449869249:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:02.255797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002a00/r3tmp/tmpuqkXpz/pdisk_1.dat 2024-11-21T09:20:02.263710Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11720, node 2 2024-11-21T09:20:02.273835Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:02.273850Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:02.273852Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:02.273894Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8798 TClient is connected to server localhost:8798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:02.355999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:02.356026Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:02.357183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:02.357758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:02.507104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659898449869849:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.507124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.507134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659898449869860:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.507615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:02.508923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659898449869863:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:02.612423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.667951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.859174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.097442Z node 2 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2024-11-21T09:20:03.097484Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2024-11-21T09:20:03.097501Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" } 2024-11-21T09:20:03.097535Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659902744844869:2931], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [2:7439659902744844797:2931]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 6]`. ShardID=72075186224037888, Sink=[2:7439659902744844869:2931].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T09:20:03.098421Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659902744844862:2931], SessionActorId: [2:7439659902744844797:2931], Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[2:7439659902744844797:2931]. isRollback=0 2024-11-21T09:20:03.098446Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2Q0Y2ZmNjMtYzAxMTA2ODAtOTNhNTgzNDYtYWFiNThlMWY=, ActorId: [2:7439659902744844797:2931], ActorState: ExecuteState, TraceId: 01jd70azgmewm1eh77wpx1kxz1, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439659902744844863:2931] from: [2:7439659902744844862:2931] 2024-11-21T09:20:03.098479Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659902744844863:2931] TxId: 281474976715667. Ctx: { TraceId: 01jd70azgmewm1eh77wpx1kxz1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q0Y2ZmNjMtYzAxMTA2ODAtOTNhNTgzNDYtYWFiNThlMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T09:20:03.099151Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2Q0Y2ZmNjMtYzAxMTA2ODAtOTNhNTgzNDYtYWFiNThlMWY=, ActorId: [2:7439659902744844797:2931], ActorState: ExecuteState, TraceId: 01jd70azgmewm1eh77wpx1kxz1, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table `/Root/Test`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:07.256244Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659898449869249:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:07.256279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[bigdate-tz_table_pull--Analyze] [GOOD] >> test.py::test[bigdate-tz_table_pull--Debug] >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 18004, MsgBus: 31368 2024-11-21T09:19:56.312009Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659870896666824:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:56.312154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00284d/r3tmp/tmpX4NRO7/pdisk_1.dat 2024-11-21T09:19:56.362566Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18004, node 1 2024-11-21T09:19:56.371469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:56.371482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:56.371484Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:56.371510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31368 TClient is connected to server localhost:31368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:19:56.413421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:56.413448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:56.414554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:56.441489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:56.542451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870896667419:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.542469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.542594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659870896667446:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:56.543186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:19:56.544356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659870896667448:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:19:56.638723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.696900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:19:56.889634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.023209Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmU4MzMxM2YtMjRiODJjYWMtNDc4ZjZhZmMtNjNlZDIyN2Q=, ActorId: [1:7439659870896674908:2931], ActorState: ExecuteState, TraceId: 01jd70asjhf546fcfa8tcj9sde, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:01.312138Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659870896666824:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:01.312179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14828, MsgBus: 29893 2024-11-21T09:20:02.228327Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659898379579376:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:02.228367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00284d/r3tmp/tmptkXpcr/pdisk_1.dat 2024-11-21T09:20:02.234863Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14828, node 2 2024-11-21T09:20:02.244288Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:02.244300Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:02.244302Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:02.244339Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29893 TClient is connected to server localhost:29893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:02.329553Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:02.329574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:02.330276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:02.330646Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:02.452622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659898379579964:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.452641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659898379579991:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.452647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.453237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:02.454516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659898379579993:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:02.525690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.535137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439659898379580197:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:02.535172Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439659898379580197:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:02.535193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439659898379580197:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:02.535211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439659898379580197:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:02.535228Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7439659898379580197:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:02.535247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self ... =disabled; 2024-11-21T09:20:02.952239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952251Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952261Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952306Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.952337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.977737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.977739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.977762Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.977775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.977783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.977796Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.977801Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.977918Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:02.978717Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.061323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037891;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 53657373696F6E32 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665}],"finishes":[]},"p":{"include":1,"pk":"10;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665}]},"p":{"include":2147483647,"pk":"10;"}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715666}],"starts":[{"inc":{"count_not_include":1},"id":281474976715666}],"finishes":[]},"p":{"include":1,"pk":"11;"}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715666}],"starts":[{"inc":{"count_include":1},"id":281474976715666}],"finishes":[{"inc":{"count_include":1},"id":281474976715666}]},"p":{"include":0,"pk":"11;Session2;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715666}]},"p":{"include":2147483647,"pk":"11;"}}]}; 2024-11-21T09:20:03.066412Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.067341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.067373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.067400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.067418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.067429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.067435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.067452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.067452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.068330Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:03.100178Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGFhOWI0NGMtMmM4MGI2YjUtNjk5NmUyZTUtNGZlM2I3NDI=, ActorId: [2:7439659898379587929:3252], ActorState: ExecuteState, TraceId: 01jd70azfw9dqf677cg7c9w86r, Create QueryResponse for error on request, msg: tx has effects, but locks are broken 2024-11-21T09:20:03.101163Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037896;self_id=[2:7439659898379580214:2321];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101174Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[2:7439659898379580198:2315];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101182Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[2:7439659898379580197:2314];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101194Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[2:7439659898379580195:2312];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101199Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037893;self_id=[2:7439659898379580200:2317];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101204Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037892;self_id=[2:7439659898379580196:2313];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101204Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037895;self_id=[2:7439659898379580202:2319];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101209Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037894;self_id=[2:7439659898379580201:2318];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101214Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037897;self_id=[2:7439659898379580204:2320];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101214Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037891;self_id=[2:7439659898379580199:2316];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:03.101242Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659902674555688:3252], SessionActorId: [2:7439659898379587929:3252], Got BAD REQUEST for table. ShardID=72075186224037888, Sink=[2:7439659902674555688:3252].{
: Fatal: only single operation is supported } 2024-11-21T09:20:03.101331Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659902674555688:3252], SessionActorId: [2:7439659898379587929:3252], Bad request. {
: Fatal: only single operation is supported }. statusCode=BAD_REQUEST. subIssues=
: Fatal: only single operation is supported . sessionActorId=[2:7439659898379587929:3252]. isRollback=0 2024-11-21T09:20:03.101356Z node 2 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=2&id=OGFhOWI0NGMtMmM4MGI2YjUtNjk5NmUyZTUtNGZlM2I3NDI=, ActorId: [2:7439659898379587929:3252], ActorState: ReadyState, got TEvKqpBuffer::TEvError in ReadyState, status: BAD_REQUEST send to: [0:0:0] from: [2:7439659902674555688:3252]: Old error.
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001
: Error: tx has effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:07.228791Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659898379579376:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:07.228826Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::TooManyTx [GOOD] Test command err: Trying to start YDB, gRPC: 17506, MsgBus: 26646 2024-11-21T09:20:07.809060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659919855285954:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:07.809183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00256f/r3tmp/tmp0qHMXr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17506, node 1 2024-11-21T09:20:07.854103Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:07.857590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:07.857600Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:07.857601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:07.857640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26646 TClient is connected to server localhost:26646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:07.910032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:07.910056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:07.911060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:07.931939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:07.940589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.000266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.016195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.024364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.045280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659924150254799:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.045305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.066193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.121224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.127774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.182258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.190980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.198895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.206141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659924150255317:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.206164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659924150255322:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.206167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.206664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:08.211274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659924150255324:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:08.368507Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjBjZDk3NTQtNWQxMWNlZi0xNDE4ODYxYi1lOGRkMDJmZg==, ActorId: [1:7439659924150255594:2454], ActorState: ReadyState, TraceId: 01jd70b4nd78yznb5qjh1kcdan, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:780: Too many transactions, current active: 2 MaxTxPerSession: 2 >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase >> test.py::test[produce-process_streaming_inline_bash-default.txt-Debug] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-ForceBlocks] >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon >> KqpLocks::EmptyRange [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 27893, MsgBus: 18329 2024-11-21T09:19:56.982252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659873239837204:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:56.982270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002819/r3tmp/tmp5q3mb5/pdisk_1.dat 2024-11-21T09:19:57.028364Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27893, node 1 2024-11-21T09:19:57.038417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:57.038427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:57.038429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:57.038452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18329 TClient is connected to server localhost:18329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:57.080701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:57.082952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:57.082971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:57.083975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:57.203288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659877534805099:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.203312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659877534805111:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.203319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:57.203857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:19:57.204993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659877534805114:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:19:57.314844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.372267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:19:57.447716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:01.982607Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659873239837204:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:01.982648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1252, MsgBus: 18016 2024-11-21T09:20:02.885953Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659895883911562:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:02.885970Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002819/r3tmp/tmpXWkeps/pdisk_1.dat 2024-11-21T09:20:02.892674Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1252, node 2 2024-11-21T09:20:02.901744Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:02.901758Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:02.901760Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:02.901789Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18016 TClient is connected to server localhost:18016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:02.986257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:02.986286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:02.987349Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:02.987569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:03.100224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659900178879454:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:03.100246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659900178879466:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:03.100252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:03.100856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:03.102243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659900178879468:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:03.201687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.208313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:03.396480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:07.886452Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659895883911562:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:07.886484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRange [GOOD] Test command err: Trying to start YDB, gRPC: 25973, MsgBus: 8554 2024-11-21T09:20:07.524133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659919139271968:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:07.524266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00257a/r3tmp/tmpyIC5UV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25973, node 1 2024-11-21T09:20:07.572568Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:07.576592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:07.576607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:07.576609Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:07.576638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8554 TClient is connected to server localhost:8554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:07.619920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:07.622659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:07.626436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:07.626472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:07.627521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:07.680752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:07.699388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:07.708633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:07.757310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659919139273529:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:07.757329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:07.788342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.793898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.806172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.812645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.819878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.826949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.835163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659919139274019:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:07.835185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:07.835202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659919139274024:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:07.835770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:07.840080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659919139274026:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 9645, MsgBus: 7778 2024-11-21T09:20:08.196996Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659923796079814:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:08.197154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00257a/r3tmp/tmpISxZiN/pdisk_1.dat 2024-11-21T09:20:08.207793Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9645, node 2 2024-11-21T09:20:08.213438Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:08.213448Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:08.213450Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:08.213478Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7778 TClient is connected to server localhost:7778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:08.297181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:08.297207Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:08.298345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:08.299061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.306346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.314913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.332123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.342197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.457115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659923796081348:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.457136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.462270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.468104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.477817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.484771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.539630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.548148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.556855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659923796081867:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.556881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.556965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659923796081872:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.557545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:08.561260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659923796081874:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:08.790074Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWI5Y2M2YjEtZWFiMWZkYTgtZDk0NDBmMjYtMTJmYmVkM2M=, ActorId: [2:7439659923796082160:2454], ActorState: ExecuteState, TraceId: 01jd70b529062ycdftvk2wqp4f, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:18:51.483013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:18:51.483029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:51.483032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:18:51.483035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:18:51.483038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:18:51.483041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:18:51.483047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:18:51.483103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:18:51.489650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:18:51.489661Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:18:51.490892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:18:51.490946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:18:51.490974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:18:51.492443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:18:51.492497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:18:51.492553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:51.492686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:18:51.493127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:51.493301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:51.493307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:51.493314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:18:51.493318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:51.493324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:18:51.493350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:18:51.494132Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:18:51.504869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:18:51.504922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:51.504978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:18:51.505011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:18:51.505017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:51.505426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:51.505443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:18:51.505480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:51.505487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:18:51.505490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:18:51.505493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:18:51.505700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:51.505705Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:18:51.505707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:18:51.505878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:51.505882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:51.505886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:51.505890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:18:51.506235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:18:51.506493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:18:51.506522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:18:51.506640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:18:51.506656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:18:51.506660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:51.506692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:18:51.506696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:18:51.506715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:18:51.506722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:18:51.506935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:18:51.506940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:18:51.506963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:18:51.506966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:18:51.507019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:18:51.507023Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:18:51.507031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:18:51.507033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:51.507037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:18:51.507040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:18:51.507043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:18:51.507045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:18:51.507051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:18:51.507054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:18:51.507057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... tToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:08.628285Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:08.628327Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 50us result status StatusSuccess 2024-11-21T09:20:08.628446Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:08.638723Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:778:2600] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:20:08.638755Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:700:2600] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T09:20:08.638790Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:778:2600] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732180808626064 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732180808626064 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732180808626064 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:20:08.639470Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:778:2600] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T09:20:08.639494Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:700:2600] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpTx::RollbackByIdle >> test.py::test[join-yql-8125--Results] [GOOD] >> test.py::test[json-combination/unwrapped-default.txt-Analyze] >> KqpSinkTx::DeferredEffects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:19:54.176103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:54.176125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:54.176135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:54.176146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:54.176150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:54.176157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:54.187328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:54.187344Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:54.189414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:54.189975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:54.189996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:54.191781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:54.192167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:54.193922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.194039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:54.194866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:54.198699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.198703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:54.198715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.200680Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:19:54.217264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:54.217322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:54.217402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:54.217408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.218011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:54.218039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.218051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:54.218054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:54.218059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:54.218409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.218419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:54.218423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:54.218791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.218803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.218807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.219079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.219470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:54.219840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:54.222282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:54.223487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:54.223574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.223616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:54.224012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.224041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:19:54.224126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224131Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:54.224138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:54.224141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:54.224147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:54.224152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:54.224159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:54.224162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:54.224164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:19:54.224396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.224407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.224410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:19:54.224412Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:19:54.224416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.224425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... athType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:08.944560Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:08.944581Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir/Table/UserDefinedIndex" took 23us result status StatusSuccess 2024-11-21T09:20:08.944691Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:08.944740Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:08.944765Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" took 24us result status StatusSuccess 2024-11-21T09:20:08.944842Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[bigdate-tz_table_pull--Debug] [GOOD] >> test.py::test[bigdate-tz_table_pull--ForceBlocks] >> KqpSinkMvcc::OltpNamedStatement >> test.py::test[aggr_factory-some-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-some-default.txt-ForceBlocks] >> test.py::test[produce-process_streaming_inline_bash-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Plan] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:19:55.382603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:55.382622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:55.382625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:55.382628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:55.382638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:55.382640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:55.382646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:55.382701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:55.389612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:55.389624Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:55.391377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:55.391900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:55.391924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:55.392995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:55.393135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:55.393202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:55.393258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:55.393916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:55.394112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:55.394119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:55.394145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:55.394150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:55.394153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:55.394162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:19:55.394974Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:19:55.405015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:55.405065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:55.405104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:55.405137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:55.405141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:55.405587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:55.405606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:55.405631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:55.405637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:55.405640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:55.405643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:55.405880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:55.405886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:55.405888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:55.406129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:55.406136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:55.406139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:55.406151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:55.406528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:55.406792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:55.406821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:55.406925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:55.406940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:55.406950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:55.406980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:55.406986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:55.407007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:55.407017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:55.407300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:55.407305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:55.407325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:55.407328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:19:55.407372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:55.407376Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:55.407383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:55.407386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:55.407389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:55.407392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:55.407397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:55.407399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:55.407405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:55.407409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:55.407411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:19:55.407609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:55.407617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:55.407621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:19:55.407623Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:19:55.407626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:55.407633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d: 2], 18446744073709551615 2024-11-21T09:20:09.344410Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:20:09.344415Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:20:09.344418Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:20:09.344420Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:20:09.344422Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:20:09.344455Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:20:09.344459Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:20:09.344461Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:20:09.344463Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:20:09.344465Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:09.344469Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T09:20:09.344801Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:20:09.344809Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:20:09.344811Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:20:09.344813Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:20:09.345069Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T09:20:09.345108Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:20:09.345145Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:20:09.345186Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:20:09.345200Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409548 2024-11-21T09:20:09.345296Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:09.345314Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:20:09.345413Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T09:20:09.345438Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409546 2024-11-21T09:20:09.345509Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:20:09.345529Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:20:09.345561Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:20:09.345585Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:20:09.345598Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:20:09.345653Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:20:09.345656Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:20:09.345662Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2024-11-21T09:20:09.346153Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:20:09.346162Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:20:09.346284Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:20:09.346287Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:20:09.346316Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:20:09.346320Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:20:09.346370Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:20:09.346374Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:20:09.346401Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T09:20:09.346432Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T09:20:09.346436Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T09:20:09.346477Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T09:20:09.346491Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:20:09.346496Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:527:2482] TestWaitNotification: OK eventTxId 103 2024-11-21T09:20:09.346546Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:09.346562Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 22us result status StatusPathDoesNotExist 2024-11-21T09:20:09.346583Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T09:20:09.346625Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:20:09.346636Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T09:20:09.346643Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T09:20:09.346651Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2024-11-21T09:20:09.346694Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:09.346706Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2024-11-21T09:20:09.346742Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpTx::SnapshotROInteractive1 >> KqpTx::RollbackByIdle [GOOD] >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> KqpSinkTx::OlapSnapshotRO >> test.py::test[json-combination/unwrapped-default.txt-Analyze] [GOOD] >> test.py::test[json-combination/unwrapped-default.txt-Debug] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_assume--Analyze] >> test.py::test[bigdate-tz_table_pull--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_pull--Plan] [GOOD] >> test.py::test[bigdate-tz_table_pull--Results] >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapInvalidateOnError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackByIdle [GOOD] Test command err: Trying to start YDB, gRPC: 4541, MsgBus: 21986 2024-11-21T09:20:09.643258Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659927637366682:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:09.643272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00253f/r3tmp/tmpqtv0qF/pdisk_1.dat 2024-11-21T09:20:09.682132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4541, node 1 2024-11-21T09:20:09.691880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:09.691898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:09.691900Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:09.691941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21986 TClient is connected to server localhost:21986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:09.744239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:09.744271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:09.745320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:09.766106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:09.775729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:09.836182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:09.850162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:09.858156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:09.896252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659927637368231:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.896280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.917511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:09.923576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:09.934168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:09.940672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:09.947990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:09.955090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:09.963077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659927637368721:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.963102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.963122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659927637368726:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.963583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:09.968249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659927637368728:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:10.141079Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmI4Y2I4MzQtMjg1YWM5ZGMtYzk1MzQ4MTYtYTk4NGFiMTI=, ActorId: [1:7439659931932336307:2454], ActorState: ReadyState, TraceId: 01jd70b6cwbnwfkctpx575871k, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 7343, MsgBus: 15391 2024-11-21T09:19:58.102355Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659880443907634:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:58.102368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027da/r3tmp/tmp7XLb9C/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7343, node 1 2024-11-21T09:19:58.148871Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:58.152528Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:58.152542Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:58.152544Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:58.152571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15391 TClient is connected to server localhost:15391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:58.194598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:58.203305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:58.203326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:58.204417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:58.356229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659880443908244:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:58.356249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659880443908253:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:58.356254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:58.356833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:19:58.358276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659880443908258:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:19:58.440925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:19:58.500105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:19:58.566601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:03.102522Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659880443907634:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:03.102560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19079, MsgBus: 9417 2024-11-21T09:20:04.028995Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659905553894238:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:04.029174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027da/r3tmp/tmpubqqrT/pdisk_1.dat 2024-11-21T09:20:04.035560Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19079, node 2 2024-11-21T09:20:04.045014Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:04.045030Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:04.045031Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:04.045086Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9417 TClient is connected to server localhost:9417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:04.129298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:04.129326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:04.130305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:04.131083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:04.253156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659905553894846:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:04.253173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659905553894816:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:04.253243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:04.253906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:04.255346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659905553894853:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:04.330985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:04.344138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439659905553895072:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:04.344161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7439659905553895069:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:04.344167Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439659905553895072:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:04.344171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7439659905553895069:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:04.344192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439659905553895072:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:04.344195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7439659905553895069:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:04.344234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7439659905553895069:2315];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:04.344239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=7207518 ... 09:20:04.846688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846723Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846788Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846882Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846913Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.846982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847012Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847407Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847472Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847751Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847817Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847878Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.847991Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848100Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848127Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848172Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848181Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.848252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.874690Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.874719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.874742Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.874749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.874779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.875149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.875188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.875190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.876198Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:04.893657Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T09:20:05.545227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439659905553895964:2423];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T09:20:05.548690Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=cb848048-a7e911ef-a59a9456-8b09150d;fline=with_appended.cpp:80;portions=4,;task_id=cb848048-a7e911ef-a59a9456-8b09150d; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:09.029460Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659905553894238:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:09.029495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[aggr_factory-some-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-some-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-some-default.txt-Results] >> KqpTx::SnapshotROInteractive1 [GOOD] >> test.py::test[json-combination/unwrapped-default.txt-Debug] [GOOD] >> test.py::test[json-combination/unwrapped-default.txt-ForceBlocks] >> test.py::test[bigdate-tz_table_pull--Results] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Analyze] >> KqpSinkTx::LocksAbortOnCommit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 24147, MsgBus: 16928 2024-11-21T09:20:10.117145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659932237743864:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:10.117159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002523/r3tmp/tmp0jFC8J/pdisk_1.dat 2024-11-21T09:20:10.164063Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24147, node 1 2024-11-21T09:20:10.173618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:10.173634Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:10.173636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:10.173674Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16928 TClient is connected to server localhost:16928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:10.217113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:10.217137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:10.218331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:10.247210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:10.254777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:10.314350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:10.332944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:10.340828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:10.402117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659932237745421:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.402144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.421847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.427466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.437941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.492241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.500505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.507922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.516229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659932237745926:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.516250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.516251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659932237745931:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.516707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:10.520747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659932237745933:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> test.py::test[produce-reduce_with_assume--Analyze] [GOOD] >> test.py::test[produce-reduce_with_assume--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::LocksAbortOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 25223, MsgBus: 27329 2024-11-21T09:19:59.045953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659882569585096:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:59.045991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002750/r3tmp/tmpRq3BTQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25223, node 1 2024-11-21T09:19:59.097687Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:59.100841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:59.100854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:59.100856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:59.100888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27329 TClient is connected to server localhost:27329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:59.146633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:59.146661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:59.147810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:59.175817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:59.267719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659882569585493:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:59.267779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659882569585474:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:59.267788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:59.268437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:19:59.269909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659882569585503:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:19:59.371439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.380368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:19:59.380402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:19:59.380426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:19:59.380443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:19:59.380465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:19:59.380487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:19:59.380507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:19:59.380523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:19:59.380539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:19:59.380554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:19:59.380569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:19:59.380585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659882569585707:2314];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:19:59.380679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:19:59.380712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:19:59.380736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:19:59.380750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:19:59.380764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:19:59.380786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:19:59.380801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:19:59.380814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:19:59.380829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:19:59.380843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:19:59.380856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:19:59.380866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659882569585722:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:19:59.381154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:19:59.381164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:19:59.381172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:19:59.381175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:19:59.381184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=ab ... 667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037895;tx_state=complete;fline=interaction.h:353;batch=Group: [ 1 ] Name: [ 536572676579 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_include":2},"id":281474976715665}],"finishes":[{"inc":{"count_include":2},"id":281474976715665}]},"p":{"include":0,"pk":"1;Sergey;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665}]},"p":{"include":2147483647}}]}; 2024-11-21T09:19:59.923238Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659882569593936:3252] TxId: 281474976715666. Ctx: { TraceId: 01jd70awcnfx1sr0t4q1ntef5q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY2NDk1ZTEtZTJmOTU4OGUtYzQ1YmIxODQtZTg0NWQzMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:19:59.945793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.945815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.945823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.945840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.945850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.945857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.945865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.945871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.945873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.946806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:19:59.947621Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659882569594173:3252] TxId: 281474976715668. Ctx: { TraceId: 01jd70aweac1wzsg0jcestkvev, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY2NDk1ZTEtZTJmOTU4OGUtYzQ1YmIxODQtZTg0NWQzMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root WAIT_INDEXATION: 0 2024-11-21T09:20:00.394444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659882569585708:2315];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=6; 2024-11-21T09:20:00.396756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=c8728ddc-a7e911ef-81eb41ef-21293694;fline=with_appended.cpp:80;portions=3,;task_id=c8728ddc-a7e911ef-81eb41ef-21293694; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:04.046356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659882569585096:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:04.046389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2213, MsgBus: 26726 2024-11-21T09:20:05.231495Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659911581344848:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:05.231682Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002750/r3tmp/tmpTMXgzj/pdisk_1.dat 2024-11-21T09:20:05.238214Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2213, node 2 2024-11-21T09:20:05.247348Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:05.247361Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:05.247362Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:05.247407Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26726 TClient is connected to server localhost:26726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:05.331680Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:05.331718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:05.332725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:05.333399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:05.455059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659911581345442:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.455079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659911581345450:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.455086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.455702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:05.457189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659911581345456:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:05.513470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.569386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.660857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.938327Z node 2 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715667; 2024-11-21T09:20:05.940883Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659911581353392:2931], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7439659911581353083:2931]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037889, Sink=[2:7439659911581353392:2931].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T09:20:05.940987Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659911581353381:2931], SessionActorId: [2:7439659911581353083:2931], Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[2:7439659911581353083:2931]. isRollback=0 2024-11-21T09:20:05.941019Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y0NGU1NzAtZjkyZWZkZWMtMzA0MjNkNmItMmIzMTFiY2Q=, ActorId: [2:7439659911581353083:2931], ActorState: ExecuteState, TraceId: 01jd70b29da2vv44dsqx7hnsr3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439659911581353382:2931] from: [2:7439659911581353381:2931] 2024-11-21T09:20:05.941067Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659911581353382:2931] TxId: 281474976715667. Ctx: { TraceId: 01jd70b29da2vv44dsqx7hnsr3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y0NGU1NzAtZjkyZWZkZWMtMzA0MjNkNmItMmIzMTFiY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T09:20:05.941811Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y0NGU1NzAtZjkyZWZkZWMtMzA0MjNkNmItMmIzMTFiY2Q=, ActorId: [2:7439659911581353083:2931], ActorState: ExecuteState, TraceId: 01jd70b29da2vv44dsqx7hnsr3, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:10.231842Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659911581344848:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:10.231876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestDoubleFailure ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 61074, MsgBus: 5061 2024-11-21T09:19:58.693029Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659879443611009:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:58.693204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027b5/r3tmp/tmp14FJlk/pdisk_1.dat 2024-11-21T09:19:58.742076Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61074, node 1 2024-11-21T09:19:58.753827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:58.753841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:58.753842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:58.753878Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5061 TClient is connected to server localhost:5061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:19:58.794211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:58.794252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:58.795357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:58.825479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:19:58.948829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659879443611620:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:58.948855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:58.948868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659879443611630:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:58.949547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:19:58.957302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659879443611634:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:19:59.066663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.124987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.309881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:59.500653Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjZhNjM0ZDctNzcyNzE4MTEtY2RiNTFjNGYtNTJkODRhNTY=, ActorId: [1:7439659883738586403:2931], ActorState: ExecuteState, TraceId: 01jd70aw03djbyg07fwwt82a54, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:03.693368Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659879443611009:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:03.693401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8178, MsgBus: 5119 2024-11-21T09:20:04.815052Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659907435928044:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:04.815069Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0027b5/r3tmp/tmpo3zNPa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8178, node 2 2024-11-21T09:20:04.830295Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:04.831510Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:04.831522Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:04.831524Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:04.831560Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5119 TClient is connected to server localhost:5119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:04.915606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:04.915632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:04.916668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:04.917408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:04.918230Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:05.102913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659911730895923:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.102933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.102973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659911730895954:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:05.103523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:05.104833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659911730895956:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:05.196790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:05.205627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439659911730896164:2317];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:05.205667Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439659911730896164:2317];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:05.205690Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439659911730896164:2317];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:05.205708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439659911730896164:2317];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:05.205726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439659911730896164:2317];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:05.205743Z ... 09:20:05.700191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700251Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700285Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700291Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700327Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700365Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700392Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700420Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700423Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700449Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700486Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700491Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700502Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700601Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.700674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.729158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.729158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.729206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.729229Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.729352Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.729393Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.729423Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.729454Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.730152Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:05.818782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T09:20:06.456419Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439659911730897216:2433];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T09:20:06.458732Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=cc0f8990-a7e911ef-b2b88e53-d73be109;fline=with_appended.cpp:80;portions=3,;task_id=cc0f8990-a7e911ef-b2b88e53-d73be109; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:09.815398Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659907435928044:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:09.815491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2024-11-21T09:19:44.915604Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.915613Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.915618Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.915751Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.916960Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.917001Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.917101Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.917244Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.917289Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:44.917333Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.917344Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:44.917549Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.917553Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.917555Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.917618Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.917718Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.917751Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.917804Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.917877Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.917911Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:44.917951Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.917959Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:44.918152Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.918155Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.918158Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.918207Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.918292Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.918316Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.918356Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.918668Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.918725Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:44.918763Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.918771Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:44.918995Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.918999Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.919002Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.919053Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.919116Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.919158Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.919190Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.920872Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.921003Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:44.921040Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.921050Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:44.921284Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.921289Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.921292Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.921362Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.921482Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.921524Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.921554Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.921616Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.921641Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:44.921666Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.921672Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:19:44.921801Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.921806Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.921809Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.921863Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.921988Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.922022Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.922054Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.922097Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.922127Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:44.922164Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.922168Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:19:44.922296Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.922298Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.922300Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.922361Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.922457Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.922479Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.922507Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.922644Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.922682Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:44.922706Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.922711Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:19:44.922867Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.922869Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.922871Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:44.922922Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:44.923018Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:44.923031Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.923052Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:44.923564Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:44.923585Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:44.923592Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:44.923595Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:44.926606Z :ReadSession INFO: Random seed for debugging is 1732180784926604 2024-11-21T09:19:44.999846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659818306422019:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:45.000019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:45.005332Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659826336106610:2263];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:45.005368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existen ... dget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:58.766581Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T09:19:58.766603Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: 2024-11-21T09:19:58.766636Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 grpc read done: success# 1, data# { read { } } Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T09:19:58.764000Z WriteTime: 2024-11-21T09:19:58.764000Z Ip: "ipv6:[::1]:52182" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:52182" } } } 2024-11-21T09:19:58.766667Z :DEBUG: [/Root] [/Root] [cf0fa103-17b6ee0e-e40db951-171a6655] [dc1] Commit offsets [2, 3). Partition stream id: 1 2024-11-21T09:19:58.766663Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 got read request: guid# 85588936-bbeca060-e558005d-9cc9933b 2024-11-21T09:19:58.766737Z :DEBUG: [/Root] [/Root] [cf0fa103-17b6ee0e-e40db951-171a6655] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:19:58.766818Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2024-11-21T09:19:58.766865Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T09:19:58.766873Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T09:19:58.766876Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2024-11-21T09:19:58.766972Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:19:58.766983Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:19:58.767027Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_6864456160278481499_v1 2024-11-21T09:19:58.767052Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:19:58.767637Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:19:58.767658Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:19:58.767659Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T09:19:58.767742Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T09:19:58.767761Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2024-11-21T09:19:58.767771Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2024-11-21T09:19:58.767966Z :DEBUG: [/Root] [/Root] [cf0fa103-17b6ee0e-e40db951-171a6655] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2024-11-21T09:19:58.864628Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d08ec842-9faa9e56-4401ad4c-fed63dea_0] Write session will now close 2024-11-21T09:19:58.864656Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d08ec842-9faa9e56-4401ad4c-fed63dea_0] Write session: aborting 2024-11-21T09:19:58.864928Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d08ec842-9faa9e56-4401ad4c-fed63dea_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:19:58.864946Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d08ec842-9faa9e56-4401ad4c-fed63dea_0] Write session: destroy 2024-11-21T09:19:58.865073Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|d08ec842-9faa9e56-4401ad4c-fed63dea_0 grpc read done: success: 0 data: 2024-11-21T09:19:58.865089Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|d08ec842-9faa9e56-4401ad4c-fed63dea_0 grpc read failed 2024-11-21T09:19:58.865097Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|d08ec842-9faa9e56-4401ad4c-fed63dea_0 grpc closed 2024-11-21T09:19:58.865104Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|d08ec842-9faa9e56-4401ad4c-fed63dea_0 is DEAD 2024-11-21T09:19:58.865395Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:19:58.865604Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:19:58.865623Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439659878435967043:2605] destroyed 2024-11-21T09:19:58.865655Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:20:00.045311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:20:00.045327Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:01.540423Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:20:01.543423Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T09:20:06.540646Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:20:08.766730Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T09:20:08.865278Z :INFO: [/Root] [/Root] [cf0fa103-17b6ee0e-e40db951-171a6655] Closing read session. Close timeout: 0.000000s 2024-11-21T09:20:08.865307Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T09:20:08.865323Z :INFO: [/Root] [/Root] [cf0fa103-17b6ee0e-e40db951-171a6655] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16327 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:08.865359Z :NOTICE: [/Root] [/Root] [cf0fa103-17b6ee0e-e40db951-171a6655] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:20:08.865371Z :DEBUG: [/Root] [/Root] [cf0fa103-17b6ee0e-e40db951-171a6655] [dc1] Abort session to cluster 2024-11-21T09:20:08.865676Z :NOTICE: [/Root] [/Root] [cf0fa103-17b6ee0e-e40db951-171a6655] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:20:08.866015Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 grpc read done: success# 0, data# { } 2024-11-21T09:20:08.866033Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 grpc read failed 2024-11-21T09:20:08.866041Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 grpc closed 2024-11-21T09:20:08.866053Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_6864456160278481499_v1 is DEAD 2024-11-21T09:20:08.866453Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:08.866471Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_6864456160278481499_v1 2024-11-21T09:20:08.866481Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439659852666162732:2505] destroyed 2024-11-21T09:20:08.866496Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_6864456160278481499_v1 2024-11-21T09:20:08.866599Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439659852666162729:2502] disconnected; active server actors: 1 2024-11-21T09:20:08.866621Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439659852666162729:2502] client user disconnected session shared/user_1_1_6864456160278481499_v1 2024-11-21T09:20:09.336722Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:09.336731Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:09.336736Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:09.336831Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:09.336955Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:09.337026Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:09.337076Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:20:09.337317Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:20:09.337347Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:20:09.337411Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T09:20:09.337431Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:20:09.337439Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:20:09.337444Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T09:20:09.337478Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:20:09.337488Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:19:03.587014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:03.587031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:03.587034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:03.587037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:03.587040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:03.587043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:03.587048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:03.587107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:03.594229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:03.594244Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:19:03.595737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:03.595801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:03.595828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:03.597560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:03.597614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:03.597677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:03.597836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:03.598250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:03.598419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:03.598425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:03.598433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:03.598437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:03.598444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:03.598469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:19:03.599225Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:19:03.609236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:03.609287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.609327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:03.609359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:03.609364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.609793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:03.609809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:03.609841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.609847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:03.609850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:03.609854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:03.610080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.610085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:03.610088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:03.610296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.610301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.610305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:03.610310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:03.610667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:03.610974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:03.611004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:03.611119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:03.611135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:03.611139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:03.611171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:03.611175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:03.611194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:03.611202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:03.611477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:03.611482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:03.611505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:03.611508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:03.611560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:03.611564Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:03.611571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:03.611573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:03.611577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:03.611580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:03.611583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:03.611585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:03.611592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:03.611595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:03.611597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... rTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:10.957703Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:10.957733Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 36us result status StatusSuccess 2024-11-21T09:20:10.957807Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:10.967991Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1090:2863] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:20:10.968012Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1036:2863] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2024-11-21T09:20:10.968033Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1090:2863] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732180810956103 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732180810956103 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732180810956103 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:20:10.968633Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1090:2863] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T09:20:10.968646Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1036:2863] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> TFstClassSrcIdPQTest::ProperPartitionSelected [GOOD] >> TPQCompatTest::DiscoverTopics >> test.py::test[json-combination/unwrapped-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-combination/unwrapped-default.txt-Plan] [GOOD] >> test.py::test[json-combination/unwrapped-default.txt-Results] >> test.py::test[aggr_factory-some-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_full_table_list-default.txt-Analyze] >> test.py::test[binding-table_filter_binding-default.txt-Analyze] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Debug] >> TPersQueueTest::WriteNonExistingTopic [GOOD] >> TPersQueueTest::WriteAfterAlter >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots [GOOD] >> TBlobStorageProxyTest::TestPartialGetBlock >> test.py::test[json-combination/unwrapped-default.txt-Results] [GOOD] >> test.py::test[json-json_value/on_empty-default.txt-Analyze] >> TBlobStorageProxyTest::TestDoubleGroups >> test.py::test[produce-reduce_with_assume--Debug] [GOOD] >> test.py::test[produce-reduce_with_assume--ForceBlocks] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [GOOD] >> TPersQueueTest::TestWriteStat >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> test.py::test[aggregate-agg_full_table_list-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-agg_full_table_list-default.txt-Debug] >> test.py::test[binding-table_filter_binding-default.txt-Debug] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-ForceBlocks] >> test.py::test[json-json_value/on_empty-default.txt-Analyze] [GOOD] >> test.py::test[json-json_value/on_empty-default.txt-Debug] >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpSinkTx::Interactive >> TPersQueueTest::SchemeOperationsCheckPropValues [GOOD] >> TPersQueueTest::ReadRuleServiceType ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:19:14.669853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:14.669872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:14.669876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:14.669879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:14.669896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:14.669898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:14.669904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:14.669971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:14.679567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:14.679584Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:19:14.681227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:14.681295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:14.681321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:14.683936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:14.683994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:14.685759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:14.686496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:14.687938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:14.689624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:14.689633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:14.689644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:14.689649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:14.689653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:14.689692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:19:14.690615Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:19:14.703020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:14.703819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:14.703881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:14.703915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:14.703920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:14.704507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:14.704526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:14.704559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:14.704567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:14.704570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:14.704573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:14.704871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:14.704889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:14.704892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:14.705135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:14.705141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:14.705145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:14.705150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:14.705609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:14.705895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:14.706913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:14.707064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:14.707083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:14.707088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:14.707148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:14.707152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:14.707175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:14.707183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:14.707453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:14.707458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:14.707485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:14.707489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:14.707549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:14.707553Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:14.707561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:14.707564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:14.707567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:14.707571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:14.707574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:14.707576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:14.707583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:14.707587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:14.707589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2024-11-21T09:20:12.003655Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:20:12.003668Z node 238 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 4], Generation: 2, ActorId:[238:623:2551], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:20:12.003686Z node 238 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2024-11-21T09:20:12.003690Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2024-11-21T09:20:12.003705Z node 238 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2024-11-21T09:20:12.003709Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [238:720:2621], at schemeshard: 72075186234409546, txId: 0, path id: 1 2024-11-21T09:20:12.003801Z node 238 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2024-11-21T09:20:12.004062Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1007:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:4 msg type: 268697640 2024-11-21T09:20:12.004081Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1007, partId: 0, tablet: 72075186233409546 2024-11-21T09:20:12.004139Z node 238 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 4 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 1007 2024-11-21T09:20:12.004151Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 1007, at schemeshard: 72057594046678944 2024-11-21T09:20:12.004155Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1007, tablet: 72075186233409546, partId: 0 2024-11-21T09:20:12.004167Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1007:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1007 2024-11-21T09:20:12.004172Z node 238 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1007:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2024-11-21T09:20:12.004176Z node 238 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1007:0 138 -> 240 2024-11-21T09:20:12.004304Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1007 2024-11-21T09:20:12.004314Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:20:12.004580Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1007:0, at schemeshard: 72057594046678944 2024-11-21T09:20:12.004601Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1007:0, at schemeshard: 72057594046678944 2024-11-21T09:20:12.004607Z node 238 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1007:0 ProgressState 2024-11-21T09:20:12.004617Z node 238 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1007:0 progress is 1/1 2024-11-21T09:20:12.004621Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2024-11-21T09:20:12.004626Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1007, ready parts: 1/1, is published: true 2024-11-21T09:20:12.004631Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2024-11-21T09:20:12.004636Z node 238 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1007:0 2024-11-21T09:20:12.004639Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1007:0 2024-11-21T09:20:12.004648Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 TestModificationResult got TxId: 1007, wait until txId: 1007 TestWaitNotification wait txId: 1007 2024-11-21T09:20:12.005008Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1007: send EvNotifyTxCompletion 2024-11-21T09:20:12.005016Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1007 2024-11-21T09:20:12.005081Z node 238 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1007, at schemeshard: 72057594046678944 2024-11-21T09:20:12.005096Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1007: got EvNotifyTxCompletionResult 2024-11-21T09:20:12.005100Z node 238 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1007: satisfy waiter [238:857:2738] TestWaitNotification: OK eventTxId 1007 2024-11-21T09:20:12.005176Z node 238 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:12.005197Z node 238 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 29us result status StatusSuccess 2024-11-21T09:20:12.005257Z node 238 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:12.005313Z node 238 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:12.005324Z node 238 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 12us result status StatusSuccess 2024-11-21T09:20:12.005357Z node 238 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:12.005401Z node 238 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:4 2024-11-21T09:20:12.005447Z node 238 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2024-11-21T09:20:12.005459Z node 238 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/ServerLess0" took 13us result status StatusSuccess 2024-11-21T09:20:12.005492Z node 238 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 4 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> test.py::test[aggregate-agg_full_table_list-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_with_assume--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_with_assume--Plan] [GOOD] >> test.py::test[produce-reduce_with_assume--Results] >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 12057, MsgBus: 13493 2024-11-21T09:20:01.072182Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659895089074598:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:01.072410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002735/r3tmp/tmpt6XMIn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12057, node 1 2024-11-21T09:20:01.125295Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:01.127168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:01.127178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:01.127179Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:01.127208Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13493 TClient is connected to server localhost:13493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:01.173302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:01.173325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:01.174397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:01.203875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:01.314932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659895089075210:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:01.314961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659895089075221:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:01.314968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:01.315535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:01.316901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659895089075224:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:01.422261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:01.433578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:01.433584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:01.433611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:01.433611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:01.433663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:01.433667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:01.433686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:01.433687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:01.433713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:01.433717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:01.433731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:01.433731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:01.433742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:01.433745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:01.433762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:01.433762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:01.433784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:01.433784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:01.433809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:01.433809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:01.433826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:01.433827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:01.433838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659895089075431:2316];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:01.433841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659895089075442:2317];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:01.435798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439659895089075450:2319];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:01.435818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439659895089075450:2319];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:01.435840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439659895089075450:2319];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:01.435854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439659895089075450:2319];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstr ... ed; 2024-11-21T09:20:08.054916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.054927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.054936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.054945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.054955Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.054972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.054989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055148Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055240Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055256Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055291Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.055383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.080126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.080126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.080146Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.080162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.080177Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.080235Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.080255Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.080270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.081124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.158564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037891;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 53657373696F6E32 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715665},{"inc":{"count":1},"id":281474976715666}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665},{"inc":{"count_not_include":1},"id":281474976715666}],"finishes":[]},"p":{"include":1,"pk":"11;"}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715665},{"inc":{"count":1},"id":281474976715666}],"starts":[{"inc":{"count_include":1},"id":281474976715666}],"finishes":[{"inc":{"count_include":1},"id":281474976715666}]},"p":{"include":0,"pk":"11;Session2;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665},{"inc":{"count_not_include":1},"id":281474976715666}]},"p":{"include":2147483647,"pk":"11;"}}]}; 2024-11-21T09:20:08.164595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.164627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.164641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.164656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.164667Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;commit_tx_id=281474976715668;commit_lock_id=281474976715666;fline=manager.cpp:89;broken_lock_id=281474976715665; 2024-11-21T09:20:08.164727Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.164859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.164970Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.164993Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.165009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.166094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:08.174404Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;local_tx_no=16;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037896;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 53657373696F6E31 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665}],"finishes":[]},"p":{"include":1,"pk":"11;"}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"11;Session1;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665}]},"p":{"include":2147483647,"pk":"11;"}}]}; 2024-11-21T09:20:08.174677Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037891;self_id=[2:7439659920927088574:2316];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=0; 2024-11-21T09:20:08.174782Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659925222064103:3200], SessionActorId: [2:7439659925222063650:3200], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[2:7439659925222064103:3200].{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T09:20:08.174802Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659925222064103:3200], SessionActorId: [2:7439659925222063650:3200], Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 }. statusCode=ABORTED. subIssues=
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 . sessionActorId=[2:7439659925222063650:3200]. isRollback=0 2024-11-21T09:20:08.174814Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzhhYTFjNmUtYzY5OTQyMzYtNDgzODZlZGQtMTM4ZjI1ODE=, ActorId: [2:7439659925222063650:3200], ActorState: ExecuteState, TraceId: 01jd70b4f57hh3rfday94b7cz7, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439659925222064122:3200] from: [2:7439659925222064103:3200] 2024-11-21T09:20:08.174833Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659925222064122:3200] TxId: 281474976715670. Ctx: { TraceId: 01jd70b4f57hh3rfday94b7cz7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzhhYTFjNmUtYzY5OTQyMzYtNDgzODZlZGQtMTM4ZjI1ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T09:20:08.174889Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzhhYTFjNmUtYzY5OTQyMzYtNDgzODZlZGQtMTM4ZjI1ODE=, ActorId: [2:7439659925222063650:3200], ActorState: ExecuteState, TraceId: 01jd70b4f57hh3rfday94b7cz7, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:12.250793Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659920927087751:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:12.250833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[json-json_value/on_empty-default.txt-Debug] [GOOD] >> test.py::test[json-json_value/on_empty-default.txt-ForceBlocks] >> test.py::test[binding-table_filter_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Plan] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Results] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs |95.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> test.py::test[produce-reduce_with_assume--Results] [GOOD] >> test.py::test[produce-reduce_with_python_presort-default.txt-Analyze] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort-default.txt-Results] [SKIPPED] >> test.py::test[sampling-bind_join_left-default.txt-Analyze] >> test.py::test[binding-table_filter_binding-default.txt-Results] [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> test.py::test[blocks-add_uint32--Analyze] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpSinkMvcc::OltpMultiSinks >> test.py::test[json-json_value/on_empty-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_value/on_empty-default.txt-Plan] |95.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[json-json_value/on_empty-default.txt-Plan] [GOOD] >> test.py::test[json-json_value/on_empty-default.txt-Results] >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> BasicUsage::BrokenCredentialsProvider [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Analyze] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Debug] >> test.py::test[json-json_value/on_empty-default.txt-Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Analyze] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |95.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[aggregate-agg_full_table_list-default.txt-Debug] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2024-11-21T09:19:50.883097Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1732180790883084 2024-11-21T09:19:50.987597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659847858193992:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:50.987755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:51.032130Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004226/r3tmp/tmpKk2HZo/pdisk_1.dat 2024-11-21T09:19:51.039647Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:51.042532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:19:51.072993Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10975, node 1 2024-11-21T09:19:51.088044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:51.088065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:51.089470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:51.114563Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004226/r3tmp/yandextNfxa4.tmp 2024-11-21T09:19:51.114580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004226/r3tmp/yandextNfxa4.tmp 2024-11-21T09:19:51.114646Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004226/r3tmp/yandextNfxa4.tmp 2024-11-21T09:19:51.114692Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:51.136373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:51.136394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:51.137324Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:51.137573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:51.150542Z INFO: TTestServer started on Port 30618 GrpcPort 10975 TClient is connected to server localhost:30618 PQClient connected to localhost:10975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:51.173415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T09:19:51.275751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659852153162193:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:51.275779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:51.275845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659852153162205:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:51.276446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T09:19:51.276737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659852153162237:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:51.276762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:51.279594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659852153162207:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T09:19:51.363640Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659851334182390:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:51.363642Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659852153162309:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:51.363729Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGE2M2Y1MjItM2I1NjVjMDYtOTgxZDk2NS05NzcyNmFiNQ==, ActorId: [2:7439659851334182350:2277], ActorState: ExecuteState, TraceId: 01jd70akzge5bfr3syne8kerwq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:51.365242Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:51.365516Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgxODM2ZmUtNzYzYmY1YzEtOWFkOTdlZi1kMDEwNjViOA==, ActorId: [1:7439659852153162190:2299], ActorState: ExecuteState, TraceId: 01jd70akz9de2hxr6pw6mfgnmt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:51.365694Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:51.383426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:51.451792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:51.518384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:10975", true, true, 1000); 2024-11-21T09:19:51.591554Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd70am7bcqhj1g4vbxg13qeh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVjYmY4MzQtYjE2M2QxNDktNTNjMDUxOWYtMzM3OGVmY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659852153162683:2941] 2024-11-21T09:19:55.987813Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659847858193992:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:55.987847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:19:57.545846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:10975 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T09:19:57.561379Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:10975 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadR ... quest 2024-11-21T09:20:14.494187Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:13619 2024-11-21T09:20:14.494530Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T09:20:14.494658Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T09:20:14.494671Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T09:20:14.494758Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T09:20:14.494781Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:59600 2024-11-21T09:20:14.494786Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:59600 proto=v1 topic=test-topic durationSec=0 2024-11-21T09:20:14.494788Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T09:20:14.495082Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T09:20:14.495106Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T09:20:14.495112Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:20:14.495113Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T09:20:14.495117Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439659946986453271:2481] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T09:20:14.495521Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439659946986453271:2481] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T09:20:14.510185Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439659946986453271:2481] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T09:20:14.510229Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439659946986453301:2481] connected; active server actors: 1 2024-11-21T09:20:14.510236Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439659946986453271:2481] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T09:20:14.510239Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439659946986453271:2481] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T09:20:14.510282Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439659946986453301:2481] disconnected; active server actors: 1 2024-11-21T09:20:14.510287Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439659946986453301:2481] disconnected no session 2024-11-21T09:20:14.522313Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439659946986453271:2481] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T09:20:14.522329Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439659946986453271:2481] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T09:20:14.522333Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439659946986453271:2481] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T09:20:14.522341Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T09:20:14.522547Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:20:14.522578Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2024-11-21T09:20:14.522566Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7439659946986453321:2481], now have 1 active actors on pipe 2024-11-21T09:20:14.522635Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:20:14.522649Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:20:14.522677Z node 6 :PERSQUEUE INFO: new Cookie src|838f3525-89e6a242-4df098aa-a9c73f50_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T09:20:14.522716Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T09:20:14.522741Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:20:14.522889Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:20:14.522900Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:20:14.522916Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:20:14.522987Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|838f3525-89e6a242-4df098aa-a9c73f50_0 2024-11-21T09:20:14.523303Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732180814523 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:14.523332Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|838f3525-89e6a242-4df098aa-a9c73f50_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T09:20:14.523431Z :INFO: [] MessageGroupId [src] SessionId [src|838f3525-89e6a242-4df098aa-a9c73f50_0] Write session: close. Timeout = 0 ms 2024-11-21T09:20:14.523441Z :INFO: [] MessageGroupId [src] SessionId [src|838f3525-89e6a242-4df098aa-a9c73f50_0] Write session will now close 2024-11-21T09:20:14.523449Z :DEBUG: [] MessageGroupId [src] SessionId [src|838f3525-89e6a242-4df098aa-a9c73f50_0] Write session: aborting 2024-11-21T09:20:14.523610Z :INFO: [] MessageGroupId [src] SessionId [src|838f3525-89e6a242-4df098aa-a9c73f50_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:20:14.523628Z :DEBUG: [] MessageGroupId [src] SessionId [src|838f3525-89e6a242-4df098aa-a9c73f50_0] Write session: destroy 2024-11-21T09:20:14.523810Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|838f3525-89e6a242-4df098aa-a9c73f50_0 grpc read done: success: 0 data: 2024-11-21T09:20:14.523822Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|838f3525-89e6a242-4df098aa-a9c73f50_0 grpc read failed 2024-11-21T09:20:14.523826Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|838f3525-89e6a242-4df098aa-a9c73f50_0 grpc closed 2024-11-21T09:20:14.523831Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|838f3525-89e6a242-4df098aa-a9c73f50_0 is DEAD 2024-11-21T09:20:14.524055Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:20:14.524153Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:14.524168Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439659946986453321:2481] destroyed 2024-11-21T09:20:14.524177Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:20:14.529808Z :INFO: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] Starting read session 2024-11-21T09:20:14.529823Z :DEBUG: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] Starting session to cluster null (localhost:13619) 2024-11-21T09:20:14.530066Z :DEBUG: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:14.530071Z :DEBUG: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:14.530077Z :DEBUG: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T09:20:14.530135Z :ERROR: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2024-11-21T09:20:14.530152Z :DEBUG: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:14.530155Z :DEBUG: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:14.530167Z :INFO: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2024-11-21T09:20:14.530202Z :NOTICE: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:20:14.530207Z :DEBUG: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2024-11-21T09:20:14.530215Z :INFO: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] Closing read session. Close timeout: 0.000000s 2024-11-21T09:20:14.530219Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T09:20:14.530224Z :INFO: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:14.530232Z :NOTICE: [/Root] [/Root] [9baa3c57-11b46fed-54ac4af7-541422b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> test.py::test[blocks-add_uint32--Analyze] [GOOD] >> test.py::test[blocks-add_uint32--Debug] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> TBlobStorageProxyTest::TestBlock >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TPersQueueTest::CheckACLForGrpcRead [GOOD] >> TPersQueueTest::CheckKillBalancer >> KqpSinkTx::DeferredEffects [GOOD] >> KqpSinkTx::ExplicitTcl >> TBlobStorageProxyTest::TestVPutVGet >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Analyze] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Debug] >> TBlobStorageProxyTest::TestVPutVCollectVGetRace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] Test command err: Trying to start YDB, gRPC: 11599, MsgBus: 11107 2024-11-21T09:20:01.862862Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659891269513710:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:01.863014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002706/r3tmp/tmprujYgW/pdisk_1.dat 2024-11-21T09:20:01.903515Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11599, node 1 2024-11-21T09:20:01.915713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:01.915726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:01.915728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:01.915761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11107 TClient is connected to server localhost:11107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:01.963644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:01.963671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:01.964803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:01.988640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:02.089153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659895564481591:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.089173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.089215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659895564481618:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:02.089747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:02.090942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659895564481620:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:02.169025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.226806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.306353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:02.565173Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715666; 2024-11-21T09:20:02.566278Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659895564489310:2931], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7439659895564489242:2931]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037889, Sink=[1:7439659895564489310:2931].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T09:20:02.567680Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659895564489303:2931], SessionActorId: [1:7439659895564489242:2931], Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7439659895564489242:2931]. isRollback=0 2024-11-21T09:20:02.567716Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNkMDljY2YtMzY5ZGM5YjMtZmQwNTU5OWQtNGI2YjcyNjg=, ActorId: [1:7439659895564489242:2931], ActorState: ExecuteState, TraceId: 01jd70ayzx0f8hyrmkvahxcjbn, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7439659895564489304:2931] from: [1:7439659895564489303:2931] 2024-11-21T09:20:02.567771Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659895564489304:2931] TxId: 281474976715666. Ctx: { TraceId: 01jd70ayzx0f8hyrmkvahxcjbn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNkMDljY2YtMzY5ZGM5YjMtZmQwNTU5OWQtNGI2YjcyNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T09:20:02.568624Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNkMDljY2YtMzY5ZGM5YjMtZmQwNTU5OWQtNGI2YjcyNjg=, ActorId: [1:7439659895564489242:2931], ActorState: ExecuteState, TraceId: 01jd70ayzx0f8hyrmkvahxcjbn, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:06.863340Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659891269513710:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:06.863370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20293, MsgBus: 22139 2024-11-21T09:20:07.815313Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659918198130479:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:07.815519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002706/r3tmp/tmpsVWsvD/pdisk_1.dat 2024-11-21T09:20:07.823092Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20293, node 2 2024-11-21T09:20:07.832517Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:07.832531Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:07.832532Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:07.832571Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22139 TClient is connected to server localhost:22139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:07.917176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:07.917215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:07.917550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:07.918185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:08.043585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659922493098384:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.043599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659922493098376:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.043654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.044105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:08.045243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659922493098390:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:08.129957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.135134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.327880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.473182Z node 2 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715666; 2024-11-21T09:20:10.474063Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659931083041585:2931], SessionActorId: [2:7439659922493105911:2931], Got LOCKS BROKEN for table. ShardID=72075186224037989, Sink=[2:7439659931083041585:2931].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T09:20:10.474078Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659931083041585:2931], SessionActorId: [2:7439659922493105911:2931], Transaction locks invalidated.{
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[2:7439659922493105911:2931]. isRollback=0 2024-11-21T09:20:10.474092Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2Q1YTIzNDktNWFlMzViMjktYzYxODFlMmYtYzM0ZWNmNjY=, ActorId: [2:7439659922493105911:2931], ActorState: ExecuteState, TraceId: 01jd70b6prcrg535z411hq7vg2, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439659931083041586:2931] from: [2:7439659931083041585:2931] 2024-11-21T09:20:10.474147Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659931083041586:2931] TxId: 281474976715666. Ctx: { TraceId: 01jd70b6prcrg535z411hq7vg2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q1YTIzNDktNWFlMzViMjktYzYxODFlMmYtYzM0ZWNmNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated.{
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T09:20:10.474236Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2Q1YTIzNDktNWFlMzViMjktYzYxODFlMmYtYzM0ZWNmNjY=, ActorId: [2:7439659922493105911:2931], ActorState: ExecuteState, TraceId: 01jd70b6prcrg535z411hq7vg2, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 2024-11-21T09:20:10.475032Z node 2 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715666; 2024-11-21T09:20:10.475052Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1732180810521 : 281474976715666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:12.815841Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659918198130479:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:12.815881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TBlobStorageProxyTest::TestSingleFailureMirror >> test.py::test[sampling-bind_join_left-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-ForceBlocks] >> test.py::test[blocks-add_uint32--Debug] [GOOD] >> test.py::test[blocks-add_uint32--ForceBlocks] >> TBlobStorageProxyTest::TestCollectGarbagePersistence |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> KqpSinkTx::OlapSnapshotRO [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive1 >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> KqpSinkTx::OlapInvalidateOnError [GOOD] >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Debug] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 18054, MsgBus: 27580 2024-11-21T09:20:04.244763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659907115750977:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:04.244866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002659/r3tmp/tmpqlyxXh/pdisk_1.dat 2024-11-21T09:20:04.280925Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18054, node 1 2024-11-21T09:20:04.292490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:04.292505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:04.292506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:04.292540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27580 TClient is connected to server localhost:27580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:04.345871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:04.345895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:04.346955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:04.366311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:04.475450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659907115751599:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:04.475473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659907115751588:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:04.475526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:04.476132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:04.477495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659907115751602:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:04.557544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:04.575117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:04.575117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:04.575141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:04.575141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:04.575173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:04.575177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:04.575196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:04.575205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:04.575209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:04.575221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:04.575225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:04.575237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:04.575243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:04.575251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:04.575286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:04.575287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:04.575298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:04.575327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:04.575341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:04.575353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:04.575360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439659907115751818:2319];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:04.575369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:04.575385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:04.575406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659907115751817:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:04.575695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:20:04.575707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:20:04.575715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:20:04.575721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:20:04.575723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;eve ... d=72075186224038091;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392274Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392291Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392320Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392322Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392380Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392393Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392394Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392423Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392464Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.392498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.415556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.415556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.415587Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.415691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.415719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.415744Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.415772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.415793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.416521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.447957Z node 2 :TX_COLUMNSHARD WARN: tablet_id=7;fline=restore.cpp:23;event=merge_data_problems;write_id=2;tablet_id=7;message=Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]}; 2024-11-21T09:20:11.448009Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037993;self_id=[2:7439659935815818456:2368];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037993;event=TEvWriteBlobsResult;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]};tx_id=281474976715665; 2024-11-21T09:20:11.448044Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439659935815818456:2368];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037993;event=TEvWriteBlobsResult;tablet_id=72075186224037993;local_tx_no=11;tx_info=TTxWrite;tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:11.448081Z node 2 :TX_COLUMNSHARD_SCAN WARN: Scan [2:7439659935815825453:3211] got AbortExecution txId: 281474976715665 scanId: 1 gen: 1 tablet: 72075186224037993 code: ABORTED reason: {
: Error: task finished: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } 2024-11-21T09:20:11.448130Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659935815825449:3208], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [0:0:0]Got BAD REQUEST for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037993, Sink=[2:7439659935815825449:3208].{
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } 2024-11-21T09:20:11.448145Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659935815825446:3208], TxId: 281474976715665, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjliYjEyMjctZTZkNGNhYzQtODEwZjMzZC02ZWM4ZTYxMw==. CustomerSuppliedId : . TraceId : 01jd70b7mte77d3223nwcah7n7. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Sink[0] fatal error: {
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } subissue: {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } } 2024-11-21T09:20:11.448155Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659935815825446:3208], TxId: 281474976715665, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjliYjEyMjctZTZkNGNhYzQtODEwZjMzZC02ZWM4ZTYxMw==. CustomerSuppliedId : . TraceId : 01jd70b7mte77d3223nwcah7n7. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: BAD_REQUEST DEFAULT_ERROR: {
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } subissue: {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } }. 2024-11-21T09:20:11.448420Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjliYjEyMjctZTZkNGNhYzQtODEwZjMzZC02ZWM4ZTYxMw==, ActorId: [2:7439659935815825423:3200], ActorState: ExecuteState, TraceId: 01jd70b7mte77d3223nwcah7n7, Create QueryResponse for error on request, msg:
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} }
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} 2024-11-21T09:20:11.454251Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjliYjEyMjctZTZkNGNhYzQtODEwZjMzZC02ZWM4ZTYxMw==, ActorId: [2:7439659935815825423:3200], ActorState: ExecuteState, TraceId: 01jd70b7ns74s2zx0efasjg75w, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jd70b7mtcx871dcp3yz6g863, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:15.671227Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659931520849622:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:15.671271Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[sampling-bind_join_left-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> test.py::test[blocks-add_uint32--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint32--Plan] [GOOD] >> test.py::test[blocks-add_uint32--Results] >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2024-11-21T09:19:50.882015Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.882019Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.882021Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.882105Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.883986Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.884020Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.884073Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.884150Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.884181Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.884254Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.884268Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:50.884409Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.884412Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.884414Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.884455Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.884868Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.884899Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.884935Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.884992Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885018Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.885045Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.885049Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:50.885205Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885207Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885209Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.885284Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.885433Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.885485Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885532Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.885781Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.885822Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.885849Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.885855Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:50.886014Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.886016Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.886018Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.886058Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.886149Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.886168Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.886187Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.888594Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890154Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.890190Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.890196Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:50.890394Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890396Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890398Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.890448Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.890549Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.890569Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890604Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.890638Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890664Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.890684Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.890688Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:19:50.890760Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890762Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890763Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.890815Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.890923Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.890951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.890969Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.891001Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891020Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.891037Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.891041Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:19:50.891152Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891154Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891155Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.891197Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.891296Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.891323Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891345Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.891458Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891485Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.891503Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.891506Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T09:19:50.891638Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891640Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891642Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:19:50.891675Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:19:50.891748Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:19:50.891771Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.891789Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:19:50.892322Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:19:50.892345Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:19:50.892353Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:19:50.892356Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T09:19:50.896769Z :ReadSession INFO: Random seed for debugging is 1732180790896765 2024-11-21T09:19:50.987749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659847701060704:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:50.987773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:50.991582Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659844654958453:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:50.991727Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existen ... eadSizeServerDelta = 0 2024-11-21T09:20:04.847093Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T09:20:04.847112Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T09:20:04.844000Z WriteTime: 2024-11-21T09:20:04.844000Z Ip: "ipv6:[::1]:45048" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:45048" } } } 2024-11-21T09:20:04.847187Z :DEBUG: [/Root] [/Root] [467fc77b-61adcbe6-2d5fe035-a6c289ba] [dc1] Commit offsets [2, 3). Partition stream id: 1 2024-11-21T09:20:04.847182Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T09:20:04.847212Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 got read request: guid# ce5850ac-4987c616-a77ca66-4acfc334 2024-11-21T09:20:04.847283Z :DEBUG: [/Root] [/Root] [467fc77b-61adcbe6-2d5fe035-a6c289ba] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:20:04.847351Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2024-11-21T09:20:04.847411Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T09:20:04.847419Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T09:20:04.847425Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2024-11-21T09:20:04.847539Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:20:04.847553Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:20:04.847587Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_15478784578920269134_v1 2024-11-21T09:20:04.847618Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:20:04.848420Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:20:04.848437Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T09:20:04.848439Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:20:04.848519Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T09:20:04.848536Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2024-11-21T09:20:04.848546Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2024-11-21T09:20:04.848674Z :DEBUG: [/Root] [/Root] [467fc77b-61adcbe6-2d5fe035-a6c289ba] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2024-11-21T09:20:04.944420Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|947a7604-c32df99b-7bf2e6f9-8ce62765_0] Write session will now close 2024-11-21T09:20:04.944446Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|947a7604-c32df99b-7bf2e6f9-8ce62765_0] Write session: aborting 2024-11-21T09:20:04.944650Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|947a7604-c32df99b-7bf2e6f9-8ce62765_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:20:04.944660Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|947a7604-c32df99b-7bf2e6f9-8ce62765_0] Write session: destroy 2024-11-21T09:20:04.944863Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|947a7604-c32df99b-7bf2e6f9-8ce62765_0 grpc read done: success: 0 data: 2024-11-21T09:20:04.944879Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|947a7604-c32df99b-7bf2e6f9-8ce62765_0 grpc read failed 2024-11-21T09:20:04.944885Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|947a7604-c32df99b-7bf2e6f9-8ce62765_0 grpc closed 2024-11-21T09:20:04.944891Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|947a7604-c32df99b-7bf2e6f9-8ce62765_0 is DEAD 2024-11-21T09:20:04.945187Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:20:04.945364Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:04.945393Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439659907830605530:2605] destroyed 2024-11-21T09:20:04.945411Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:20:06.072050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:20:06.072064Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:07.620449Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:20:07.620888Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T09:20:12.620606Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:20:14.846767Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T09:20:14.944890Z :INFO: [/Root] [/Root] [467fc77b-61adcbe6-2d5fe035-a6c289ba] Closing read session. Close timeout: 0.000000s 2024-11-21T09:20:14.944931Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T09:20:14.944940Z :INFO: [/Root] [/Root] [467fc77b-61adcbe6-2d5fe035-a6c289ba] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16330 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:14.944968Z :NOTICE: [/Root] [/Root] [467fc77b-61adcbe6-2d5fe035-a6c289ba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T09:20:14.944980Z :DEBUG: [/Root] [/Root] [467fc77b-61adcbe6-2d5fe035-a6c289ba] [dc1] Abort session to cluster 2024-11-21T09:20:14.945297Z :NOTICE: [/Root] [/Root] [467fc77b-61adcbe6-2d5fe035-a6c289ba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:20:14.945552Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 grpc read done: success# 0, data# { } 2024-11-21T09:20:14.945571Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 grpc read failed 2024-11-21T09:20:14.945578Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 grpc closed 2024-11-21T09:20:14.945597Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15478784578920269134_v1 is DEAD 2024-11-21T09:20:14.945829Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:14.945850Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_15478784578920269134_v1 2024-11-21T09:20:14.945903Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439659882060801225:2505] destroyed 2024-11-21T09:20:14.945923Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_15478784578920269134_v1 2024-11-21T09:20:14.945989Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439659882060801222:2502] disconnected; active server actors: 1 2024-11-21T09:20:14.945999Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439659882060801222:2502] client user disconnected session shared/user_1_1_15478784578920269134_v1 2024-11-21T09:20:15.350243Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:15.350250Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:15.350254Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:15.350338Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:15.350442Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:15.350505Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:15.350567Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:20:15.350945Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:20:15.350977Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:20:15.351032Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T09:20:15.351049Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:20:15.351059Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:20:15.351063Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T09:20:15.351103Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:20:15.351115Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T09:19:44.804108Z :ReadSession INFO: Random seed for debugging is 1732180784804104 2024-11-21T09:19:44.898813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659821373900000:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:44.898837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:44.903192Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659818169480656:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:44.903314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042d3/r3tmp/tmpWTMiwH/pdisk_1.dat 2024-11-21T09:19:44.930009Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:44.931350Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:44.963013Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6808, node 1 2024-11-21T09:19:45.001311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:45.001340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:45.002932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:45.013109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0042d3/r3tmp/yandexvrxHQF.tmp 2024-11-21T09:19:45.013121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0042d3/r3tmp/yandexvrxHQF.tmp 2024-11-21T09:19:45.013179Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0042d3/r3tmp/yandexvrxHQF.tmp 2024-11-21T09:19:45.013237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:45.029736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:45.029768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:45.031361Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:45.031606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:45.055067Z INFO: TTestServer started on Port 5721 GrpcPort 6808 TClient is connected to server localhost:5721 PQClient connected to localhost:6808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:45.076965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T09:19:45.148356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659825668868013:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:45.148363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659822464448239:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:45.148381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:45.148446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:45.148481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659822464448251:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:45.148515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659825668868026:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:45.150422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T09:19:45.151458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659825668868072:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:45.151471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:45.153060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659825668868028:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T09:19:45.153069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659822464448253:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T09:19:45.267144Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659825668868130:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:45.267144Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659822464448290:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:45.267250Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmE0ZWY4ZjgtYjIxMmE4YjEtNTQ3YWI1MjAtMjFjNWNhODI=, ActorId: [2:7439659822464448237:2277], ActorState: ExecuteState, TraceId: 01jd70adzv8d2dgsa7kgfj420b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:45.268537Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:45.268671Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2M3Mzc2ZC0yNWQzYTIyMi03MjA2ODM5LTVmNTJlNzRm, ActorId: [1:7439659825668868011:2299], ActorState: ExecuteState, TraceId: 01jd70adzvf7pjx1wmytc0m0tt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:45.268834Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:45.279637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:45.348701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:45.412252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:6808", true, true, 1000); 2024-11-21T09:19:45.501087Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd70ae8f9jkdjatrrqpgk8x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFhNTI1NWUtODhhNTg2YzQtYTdlMjRlYTktM2QzNmYxODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659825668868499:2929] 2024-11-21T09:19:49.898950Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659821373900000:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:49.898980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024- ... n cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2024-11-21T09:20:17.013473Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 941eb9a7-c8ffee74-3983fd04-ab5ff05a has messages 1 2024-11-21T09:20:17.013543Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 read done: guid# 941eb9a7-c8ffee74-3983fd04-ab5ff05a, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2024-11-21T09:20:17.013565Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 response to read: guid# 941eb9a7-c8ffee74-3983fd04-ab5ff05a 2024-11-21T09:20:17.013686Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 Process answer. Aval parts: 0 2024-11-21T09:20:17.013897Z :DEBUG: [/Root] [/Root] [bb74b259-96f8ff3-5b8b53cd-1a4ba75e] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.013932Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T09:20:17.013979Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T09:20:17.013983Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 got read request: guid# cc717429-e8030b14-93c3c194-81bfea43 2024-11-21T09:20:17.014034Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T09:20:17.014048Z :DEBUG: [/Root] [/Root] [bb74b259-96f8ff3-5b8b53cd-1a4ba75e] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T09:20:15.908000Z WriteTime: 2024-11-21T09:20:15.909000Z Ip: "ipv6:[::1]:40076" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:40076" } } } } 2024-11-21T09:20:17.014085Z :INFO: [/Root] [/Root] [bb74b259-96f8ff3-5b8b53cd-1a4ba75e] Closing read session. Close timeout: 3.000000s 2024-11-21T09:20:17.014093Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T09:20:17.014100Z :INFO: [/Root] [/Root] [bb74b259-96f8ff3-5b8b53cd-1a4ba75e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1232 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:17.014287Z :INFO: [/Root] [/Root] [bb74b259-96f8ff3-5b8b53cd-1a4ba75e] Closing read session. Close timeout: 0.000000s 2024-11-21T09:20:17.014294Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T09:20:17.014298Z :INFO: [/Root] [/Root] [bb74b259-96f8ff3-5b8b53cd-1a4ba75e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1232 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:17.014331Z :NOTICE: [/Root] [/Root] [bb74b259-96f8ff3-5b8b53cd-1a4ba75e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:20:17.014361Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 grpc read done: success# 0, data# { } 2024-11-21T09:20:17.014370Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 grpc read failed 2024-11-21T09:20:17.014374Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 grpc closed 2024-11-21T09:20:17.014388Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_9895977601420046281_v1 is DEAD 2024-11-21T09:20:17.014539Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:17.014553Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_9895977601420046281_v1 2024-11-21T09:20:17.014567Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7439659951647136940:2497] destroyed 2024-11-21T09:20:17.014584Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_9895977601420046281_v1 2024-11-21T09:20:17.014634Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439659951647136937:2494] disconnected; active server actors: 1 2024-11-21T09:20:17.014648Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439659951647136937:2494] client user disconnected session shared/user_7_1_9895977601420046281_v1 2024-11-21T09:20:17.377358Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.377367Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.377371Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:17.377441Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:17.377551Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:17.377611Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.377672Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T09:20:17.377909Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.377912Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.377915Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:17.377980Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:17.378087Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:17.378155Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.378192Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:20:17.378368Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:20:17.378567Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T09:20:17.378584Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T09:20:17.378621Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:20:17.378628Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:20:17.378632Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:20:17.378640Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T09:20:17.384806Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.384810Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.384813Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:17.384884Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:17.385015Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:17.385068Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.385133Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:20:17.385304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.385357Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:20:17.385392Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:20:17.385400Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:20:17.385410Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2024-11-21T09:20:17.388188Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.388193Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.388196Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:17.388268Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:17.388374Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:17.388421Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.388554Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:17.388587Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:20:17.388601Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:20:17.388616Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> test.py::test[blocks-add_uint32--Results] [GOOD] >> test.py::test[blocks-add_uint8--Analyze] >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> TBlobStorageProxyTest::TestInFlightPuts >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TPQCompatTest::DiscoverTopics [GOOD] >> TPQCompatTest::SetupLockSession >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2024-11-21T09:20:11.805853Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/004238/r3tmp/tmp4k2zlc//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T09:20:11.805991Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/004238/r3tmp/tmp4k2zlc//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T09:20:11.807161Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:11.807250Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:16.757419Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/004238/r3tmp/tmpn4OB9e//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T09:20:16.757567Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/004238/r3tmp/tmpn4OB9e//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T09:20:16.757924Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:16.757967Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Plan] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] >> TBlobStorageProxyTest::TestNormal |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] [GOOD] >> test.py::test[sampling-read-dynamic-Analyze] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2024-11-21T09:20:17.143178Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/004220/r3tmp/tmpV2TsR4//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T09:20:17.143298Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/004220/r3tmp/tmpV2TsR4//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T09:20:17.144388Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:17.144440Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> test.py::test[blocks-add_uint8--Analyze] [GOOD] >> test.py::test[blocks-add_uint8--Debug] >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> KqpSinkTx::Interactive [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Analyze] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TPersQueueTest::ReadRuleServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeLimit >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 20690, MsgBus: 2464 2024-11-21T09:20:07.363681Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659918487507003:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:07.363696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00258a/r3tmp/tmpvSDxlE/pdisk_1.dat 2024-11-21T09:20:07.408969Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20690, node 1 2024-11-21T09:20:07.413546Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:07.413558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:07.413560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:07.413597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2464 TClient is connected to server localhost:2464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:07.455313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:07.464757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:07.464786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:07.465812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:07.587299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659918487507618:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:07.587317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659918487507607:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:07.587357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:07.587901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:07.589575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659918487507621:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:20:07.660846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.720480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.799069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:07.930941Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Operation is aborting because an duplicate key;tx_id=3; 2024-11-21T09:20:07.931001Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_BAD_REQUEST Issues: { message: "Operation is aborting because an duplicate key" } 2024-11-21T09:20:07.931033Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_BAD_REQUEST Issues: { message: "Operation is aborting because an duplicate key" } 2024-11-21T09:20:07.931100Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659918487515051:2931], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7439659918487515027:2931]Got BAD REQUEST for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037889, Sink=[1:7439659918487515051:2931].{
: Fatal: Operation is aborting because an duplicate key } 2024-11-21T09:20:07.931234Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659918487515044:2931], SessionActorId: [1:7439659918487515027:2931], Bad request. Table `/Root/KV`. {
: Fatal: Operation is aborting because an duplicate key }. statusCode=BAD_REQUEST. subIssues=
: Fatal: Operation is aborting because an duplicate key . sessionActorId=[1:7439659918487515027:2931]. isRollback=0 2024-11-21T09:20:07.931267Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjZkMTJkMTItN2QwYTQ0MzItMjJkMDFhNzMtMzE0ZWM3MzU=, ActorId: [1:7439659918487515027:2931], ActorState: ExecuteState, TraceId: 01jd70b46v4fjfezj38vbhxazt, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [1:7439659918487515045:2931] from: [1:7439659918487515044:2931] 2024-11-21T09:20:07.931330Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439659918487515045:2931] TxId: 281474976710664. Ctx: { TraceId: 01jd70b46v4fjfezj38vbhxazt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZkMTJkMTItN2QwYTQ0MzItMjJkMDFhNzMtMzE0ZWM3MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table `/Root/KV`. {
: Fatal: Operation is aborting because an duplicate key };
: Fatal: Operation is aborting because an duplicate key } 2024-11-21T09:20:07.932393Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjZkMTJkMTItN2QwYTQ0MzItMjJkMDFhNzMtMzE0ZWM3MzU=, ActorId: [1:7439659918487515027:2931], ActorState: ExecuteState, TraceId: 01jd70b46v4fjfezj38vbhxazt, Create QueryResponse for error on request, msg:
: Error: Bad request. Table `/Root/KV`. {
: Fatal: Operation is aborting because an duplicate key };
: Fatal: Operation is aborting because an duplicate key 2024-11-21T09:20:07.939662Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjZkMTJkMTItN2QwYTQ0MzItMjJkMDFhNzMtMzE0ZWM3MzU=, ActorId: [1:7439659918487515027:2931], ActorState: ExecuteState, TraceId: 01jd70b47x0b7bat2p5qybqcnz, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jd70b46v2zakqb7vc0sx7gwy, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:12.363929Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659918487507003:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:12.363974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22829, MsgBus: 10831 2024-11-21T09:20:13.070318Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659944133707127:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:13.070521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00258a/r3tmp/tmpIwqHBo/pdisk_1.dat 2024-11-21T09:20:13.076418Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22829, node 2 2024-11-21T09:20:13.085497Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:13.085509Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:13.085510Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:13.085533Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10831 TClient is connected to server localhost:10831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:13.171923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:13.171957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:13.172260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:13.173023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:13.173133Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:13.294193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659944133707726:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:13.294212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659944133707738:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:13.294217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:13.294818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:13.296366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659944133707741:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:13.376284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:13.382451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:13.459482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:18.070656Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659944133707127:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:18.070710Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[sampling-read-dynamic-Analyze] [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2024-11-21T09:20:16.293509Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00420a/r3tmp/tmprIpQ0b//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T09:20:16.296161Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2024-11-21T09:20:18.098228Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/004214/r3tmp/tmpS95FAY//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T09:20:18.098793Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> test.py::test[blocks-add_uint8--Debug] [GOOD] >> test.py::test[blocks-add_uint8--ForceBlocks] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TBlobStorageProxyTest::TestGetMultipart >> test.py::test[key_filter-string_with_ff-default.txt-Analyze] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Debug] >> TBlobStorageProxyTest::TestProxySimpleDiscover >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestPersistence |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] |95.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[sampling-read-dynamic-Analyze] [GOOD] >> test.py::test[blocks-add_uint8--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint8--Plan] [GOOD] >> test.py::test[blocks-add_uint8--Results] >> KqpSinkMvcc::OltpMultiSinks [GOOD] >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 23454, MsgBus: 64548 2024-11-21T09:20:08.321792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659925068690576:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:08.321899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002562/r3tmp/tmpoQ2Bov/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23454, node 1 2024-11-21T09:20:08.367147Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:08.370699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:08.370710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:08.370711Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:08.370738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64548 TClient is connected to server localhost:64548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:08.412574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:08.422711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:08.422730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:08.423792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:08.542319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659925068691197:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.542336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659925068691172:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.542376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:08.542805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:08.544021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659925068691201:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:20:08.622335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:08.629188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:08.629229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:08.629253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:08.629268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:08.629282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:08.629296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:08.629310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:08.629325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:08.629344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:08.629359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:08.629373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:08.629388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659925068691402:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:08.629718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:08.629740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:08.629778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:08.629799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:08.629818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:08.629838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:08.629856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:08.629875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:08.629894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:08.629914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:08.629933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:08.629953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439659925068691403:2313];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:08.631215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925068691404:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:08.631231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925068691404:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:08.631255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925068691404:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:08.631274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925068691404:2314];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstr ... 4-11-21T09:20:09.247880Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037977;self_id=[1:7439659925068692425:2440];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247885Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037978;self_id=[1:7439659925068692428:2443];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247887Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037988;self_id=[1:7439659925068692453:2452];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247889Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037979;self_id=[1:7439659925068692358:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247891Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037989;self_id=[1:7439659925068692351:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247896Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037980;self_id=[1:7439659925068692231:2421];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247896Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037990;self_id=[1:7439659925068692357:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247901Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037991;self_id=[1:7439659925068692222:2418];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247905Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037993;self_id=[1:7439659925068692221:2417];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247907Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037981;self_id=[1:7439659925068692424:2439];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247911Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037994;self_id=[1:7439659925068692216:2415];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247919Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659929363667453:3252], SessionActorId: [1:7439659929363666497:3252], Got BAD REQUEST for table. ShardID=72075186224037898, Sink=[1:7439659929363667453:3252].{
: Fatal: only single operation is supported } 2024-11-21T09:20:09.247920Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037995;self_id=[1:7439659925068692217:2416];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247924Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037996;self_id=[1:7439659925068692247:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.247925Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439659929363667453:3252], SessionActorId: [1:7439659929363666497:3252], Bad request. {
: Fatal: only single operation is supported }. statusCode=BAD_REQUEST. subIssues=
: Fatal: only single operation is supported . sessionActorId=[1:7439659929363666497:3252]. isRollback=0 2024-11-21T09:20:09.247933Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037997;self_id=[1:7439659925068692245:2425];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2024-11-21T09:20:09.248006Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=NDE1M2QyNWMtYmE2NGFlNzEtM2EzYjNhNWItZmZlMzFiYzE=, ActorId: [1:7439659929363666497:3252], ActorState: ReadyState, got TEvKqpBuffer::TEvError in ReadyState, status: BAD_REQUEST send to: [0:0:0] from: [1:7439659929363667453:3252]: Old error. WAIT_INDEXATION: 0 2024-11-21T09:20:09.805280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[1:7439659925068692971:2514];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T09:20:09.807673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;task_id=ce0e893a-a7e911ef-98b37d29-85793013;fline=with_appended.cpp:80;portions=4,;task_id=ce0e893a-a7e911ef-98b37d29-85793013; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:13.321956Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659925068690576:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:13.322018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63447, MsgBus: 28419 2024-11-21T09:20:14.549513Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659948402618479:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:14.549814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002562/r3tmp/tmpQbv4yK/pdisk_1.dat 2024-11-21T09:20:14.557657Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63447, node 2 2024-11-21T09:20:14.567995Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:14.568009Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:14.568012Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:14.568071Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28419 TClient is connected to server localhost:28419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:14.649727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:14.649767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:14.650832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:14.652085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:14.786593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659948402619071:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:14.786613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659948402619082:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:14.786620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:14.787234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:14.788924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659948402619085:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:14.874456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:14.930674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:15.027791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:19.549879Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659948402618479:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:19.549931Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> test.py::test[key_filter-string_with_ff-default.txt-Debug] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> test.py::test[blocks-add_uint8--Results] [GOOD] >> test.py::test[blocks-coalesce_bools--Analyze] >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TPersQueueTest::FetchRequest [GOOD] >> TPersQueueTest::EventBatching >> TBlobStorageProxyTest::TestNormalMirror [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2024-11-21T09:20:15.054539Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00422f/r3tmp/tmp7aqD6b//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T09:20:15.056477Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:16.107446Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00422f/r3tmp/tmp7aqD6b//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T09:20:16.108043Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:17.170814Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00422f/r3tmp/tmp7aqD6b//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2024-11-21T09:20:17.171207Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:18.236522Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00422f/r3tmp/tmp7aqD6b//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 4 2024-11-21T09:20:18.236911Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:3:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:19.310582Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00422f/r3tmp/tmp7aqD6b//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 5 2024-11-21T09:20:19.310855Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:4:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:20.388306Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00422f/r3tmp/tmp7aqD6b//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 6 2024-11-21T09:20:20.388560Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:5:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] Test command err: Trying to start YDB, gRPC: 15831, MsgBus: 20003 2024-11-21T09:20:09.784161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659929542231033:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:09.784390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002532/r3tmp/tmpdbaEo9/pdisk_1.dat 2024-11-21T09:20:09.838024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15831, node 1 2024-11-21T09:20:09.843460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:09.843475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:09.843476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:09.843505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20003 TClient is connected to server localhost:20003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:09.884987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:09.885344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:09.885370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:20:09.886453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:10.025735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659933837198942:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.025751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659933837198933:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.025765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.026360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:10.027635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659933837198947:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:20:10.109231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.167229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.231668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:14.784082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659929542231033:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:14.784116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5180, MsgBus: 29840 2024-11-21T09:20:15.744849Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659952311850299:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:15.744885Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002532/r3tmp/tmpS1jJvR/pdisk_1.dat 2024-11-21T09:20:15.752707Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5180, node 2 2024-11-21T09:20:15.765336Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:15.765348Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:15.765350Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:15.765383Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29840 TClient is connected to server localhost:29840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:15.845642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:15.845668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:15.846752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:15.847450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:16.003030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659956606818174:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:16.003051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:16.003062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659956606818193:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:16.003749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:16.005440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659956606818203:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:16.108364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:16.115586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:16.201834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:20.745292Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659952311850299:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:20.745323Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> KqpSinkTx::ExplicitTcl [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] >> test.py::test[blocks-coalesce_bools--Analyze] [GOOD] >> test.py::test[blocks-coalesce_bools--Debug] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::ExplicitTcl [GOOD] Test command err: Trying to start YDB, gRPC: 32043, MsgBus: 17820 2024-11-21T09:20:09.721667Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659927987785632:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:09.721780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002535/r3tmp/tmpux4Mu9/pdisk_1.dat 2024-11-21T09:20:09.767857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32043, node 1 2024-11-21T09:20:09.774665Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:09.774675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:09.774677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:09.774703Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17820 TClient is connected to server localhost:17820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:09.820857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:09.822505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:09.822527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:09.823676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:09.966200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659927987786255:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.966256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659927987786229:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.966261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.966782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:09.968177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659927987786258:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:20:10.067320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.126067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.318192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:14.721793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659927987785632:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:14.721833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14349, MsgBus: 11827 2024-11-21T09:20:15.900550Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659952976237401:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:15.900598Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002535/r3tmp/tmp1c2Jfm/pdisk_1.dat 2024-11-21T09:20:15.909676Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14349, node 2 2024-11-21T09:20:15.920944Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:15.920958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:15.920960Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:15.921020Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11827 TClient is connected to server localhost:11827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:16.000850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:16.000879Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:16.001935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:16.003179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:16.152931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659957271205282:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:16.152961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659957271205308:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:16.152967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:16.153626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:16.155302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659957271205311:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:20:16.231934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:16.238739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:16.330871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:16.583858Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWI4NTg4YzktODMxNDU2ZmMtMjhmM2JjZDEtMzlmMWRkODY=, ActorId: [2:7439659957271212950:2931], ActorState: ReadyState, TraceId: 01jd70bcp6dqjtnrw5zr4xm1bb, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:20.901142Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659952976237401:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:20.901187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Plan] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Results] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> TTabletResolver::NodeProblem >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> BootstrapperTest::KeepExistingTablet >> TTabletPipeTest::TestRewriteSameNode >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TTabletPipeTest::TestInterconnectSession [GOOD] >> test.py::test[blocks-coalesce_bools--Debug] [GOOD] >> test.py::test[blocks-coalesce_bools--ForceBlocks] >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> TPersQueueTest::WriteAfterAlter [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] >> test.py::test[library-library_via_http--Analyze] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TResourceBroker::TestRealUsage >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> TTabletPipeTest::TestSendAfterOpen >> TTabletResolver::NodeProblem [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TTabletPipeTest::TestRewriteSameNode [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 12173, MsgBus: 14196 2024-11-21T09:20:10.334980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659931292650343:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:10.334994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00251c/r3tmp/tmpJh3Peq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12173, node 1 2024-11-21T09:20:10.380611Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:10.384118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:10.384131Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:10.384132Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:10.384163Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14196 TClient is connected to server localhost:14196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:10.424993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:10.436189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:10.436237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:10.437270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:10.599709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659931292650941:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.599731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.599766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659931292650968:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:10.600344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:10.601982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659931292650970:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:20:10.687680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:10.704112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:10.704152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:10.704181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:10.704197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:10.704242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:10.704262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:10.704279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:10.704297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:10.704316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:10.704333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:10.704351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:10.704369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439659931292651187:2314];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:10.704407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:10.704429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:10.704473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:10.704502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:10.704527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:10.704551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:10.704575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:10.704605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:10.704630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:10.704653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:10.704678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:10.704702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439659931292651173:2312];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:10.704731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:20:10.704745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:20:10.704753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:20:10.704762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:20:10.704772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=ab ... 09:20:17.544655Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544795Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544892Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.544975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545082Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545291Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545324Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545455Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545476Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545500Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545529Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545665Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545697Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545806Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545919Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.545977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546060Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.546222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.573942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.573948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.573983Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.573992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.574017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.574047Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.574080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.574112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.574979Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:17.650289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T09:20:18.202784Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439659958462615971:2453];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T09:20:18.204631Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=d30fe4ba-a7e911ef-a2571948-af496295;fline=with_appended.cpp:80;portions=3,;task_id=d30fe4ba-a7e911ef-a2571948-af496295; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:21.550616Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659958462613632:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:21.550721Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2024-11-21T09:20:22.622534Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.622594Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:201:2134] CurrentLeaderTablet: [1:202:2135] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T09:20:22.622598Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T09:20:22.622604Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:201:2134] 2024-11-21T09:20:22.622624Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.622639Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:207:2138] CurrentLeaderTablet: [1:208:2139] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T09:20:22.622642Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T09:20:22.622645Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2138] 2024-11-21T09:20:22.622733Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.622736Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:201:2134] 2024-11-21T09:20:22.622749Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.622752Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2138] 2024-11-21T09:20:22.622765Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2024-11-21T09:20:22.622769Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:201:2134] by NodeId 2024-11-21T09:20:22.622773Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.622788Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:217:2092] CurrentLeaderTablet: [2:218:2093] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T09:20:22.622790Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T09:20:22.622793Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:217:2092] 2024-11-21T09:20:22.622810Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:207:2138] by NodeId 2024-11-21T09:20:22.622819Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.622839Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:223:2094] CurrentLeaderTablet: [2:224:2095] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T09:20:22.622844Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T09:20:22.622849Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T09:20:22.622960Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2024-11-21T09:20:22.622965Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.622968Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:217:2092] 2024-11-21T09:20:22.622981Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.622984Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T09:20:22.622998Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2024-11-21T09:20:22.623001Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:217:2092] by NodeId 2024-11-21T09:20:22.623004Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.623018Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:235:2092] CurrentLeaderTablet: [3:236:2093] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T09:20:22.623020Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T09:20:22.623023Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:235:2092] 2024-11-21T09:20:22.623039Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.623042Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T09:20:22.623055Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2024-11-21T09:20:22.623059Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.623061Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:235:2092] 2024-11-21T09:20:22.623076Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:223:2094] by NodeId 2024-11-21T09:20:22.623079Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T09:20:22.623092Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:241:2094] CurrentLeaderTablet: [3:242:2095] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T09:20:22.623096Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T09:20:22.623098Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes >> TResourceBroker::TestRandomQueue [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> test.py::test[library-library_via_http--Analyze] [GOOD] >> test.py::test[library-library_via_http--Debug] >> TTabletPipeTest::TestPipeWithVersionInfo >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2024-11-21T09:20:23.196710Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:23.196770Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:23.196776Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:23.196781Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-1 (1 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:23.196785Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:23.196791Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:23.196797Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-2 (2 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:23.196799Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:23.196802Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:23.196806Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:23.196808Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:23.196812Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:23.196814Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.196817Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-4 (4 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:23.196819Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:23.196821Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:23.196822Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.196825Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-5 (5 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:23.196827Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:23.196829Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:23.196831Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.196834Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-6 (6 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:23.196835Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-6 (6 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:23.196837Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:23.196839Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.196857Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {400, 400}) 2024-11-21T09:20:23.196861Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 800.000000 2024-11-21T09:20:23.196864Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-2 (2 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:23.196866Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:23.196869Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 800.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:23.196873Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-4 (4 by [1:97:2132]) 2024-11-21T09:20:23.196875Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.196878Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-2 (2 by [1:97:2132]) (release resources {400, 400}) 2024-11-21T09:20:23.196881Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 800.000000 to 280.000000 (remove task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:23.196884Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 0.000000 to 280.000000 2024-11-21T09:20:23.196886Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-4 (4 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:23.196888Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:23.196890Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 280.000000 to 1054.000000 (insert task task-4 (4 by [1:97:2132])) 2024-11-21T09:20:23.196892Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [1:97:2132]) 2024-11-21T09:20:23.196894Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction1 blocked by an earlier queue 2024-11-21T09:20:23.196897Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-4 (4 by [1:97:2132]) (release resources {400, 400}) 2024-11-21T09:20:23.196911Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1054.000000 to 560.000000 (remove task task-4 (4 by [1:97:2132])) 2024-11-21T09:20:23.196914Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 280.000000 to 560.000000 2024-11-21T09:20:23.196916Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-6 (6 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:23.196918Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-6 (6 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:23.196921Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 560.000000 to 1308.000000 (insert task task-6 (6 by [1:97:2132])) 2024-11-21T09:20:23.196923Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [1:97:2132]) 2024-11-21T09:20:23.427685Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:23.427755Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [2:97:2132]) priority=2 resources={193, 476} 2024-11-21T09:20:23.427760Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:23.427765Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {193, 476} for task task-1 (1 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:23.427768Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:23.427774Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 952.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:23.427780Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-2 (2 by [2:97:2132]) priority=1 resources={138, 267} 2024-11-21T09:20:23.427782Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:23.427785Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:23.427788Z node 2 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-3 (3 by [2:97:2132]) priority=4 resources={370, 326} 2024-11-21T09:20:23.427791Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.427794Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:23.427796Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_default blocked by an earlier queue 2024-11-21T09:20:23.427799Z node 2 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-4 (4 by [2:97:2132]) priority=0 resources={23, 476} 2024-11-21T09:20:23.427801Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.427803Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:23.427805Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_default blocked by an earlier queue 2024-11-21T09:20:23.427808Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-5 (5 by [2:97:2132]) priority=2 resources={403, 125} 2024-11-21T09:20:23.427810Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:23.427812Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:23.427814Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_default blocked by an earlier queue 2024-11-21T09:20:23.427816Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.427819Z node 2 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-6 (6 by [2:97:2132]) priority=4 resources={335, 212} 2024-11-21T09:20:23.427821Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-6 (6 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T09:20:23.427824Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:23.427825Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_default blocked by an earlier queue 2024-11-21T09:20:23.427827Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.427830Z node 2 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-7 (7 by [2:97:2132]) priority=2 resources={442, 136} 2024-11-21T09:20:23.427832Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-7 (7 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.427834Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:23.427835Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_default blocked by an earlier queue 2024-11-21T09:20:23.427837Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.427843Z node 2 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-8 (8 by [2:97:2132]) priority=1 resources={437, 61} 2024-11-21T09:20:23.427847Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-8 (8 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.427850Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:23.427853Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_default blocked by an earlier queue 2024-11-21T09:20:23.427856Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.427860Z node 2 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-9 (9 by [2:97:2132]) priority=2 resources={145, 65} 2024-11-21T09:20:23.427863Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-9 (9 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T09:20:23.427867Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:23.427869Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_default blocked by an earlier queue 2024-11-21T09:20:23.427872Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:23.427877Z node 2 :RESOURC ... 7.426000 to 678189.247600 (insert task task-817 (817 by [2:97:2132])) 2024-11-21T09:20:23.575717Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-821 (821 by [2:97:2132]) 2024-11-21T09:20:23.575722Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-817 (817 by [2:97:2132]) (release resources {231, 453}) 2024-11-21T09:20:23.575727Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 678189.247600 to 677291.220400 (remove task task-817 (817 by [2:97:2132])) 2024-11-21T09:20:23.575731Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 675964.836400 to 677291.220400 2024-11-21T09:20:23.575734Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {62, 458} for task task-821 (821 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575738Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-821 (821 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.575742Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 677291.220400 to 679471.300400 (insert task task-821 (821 by [2:97:2132])) 2024-11-21T09:20:23.575746Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-836 (836 by [2:97:2132]) 2024-11-21T09:20:23.575750Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-821 (821 by [2:97:2132]) (release resources {62, 458}) 2024-11-21T09:20:23.575755Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 677291.220400 to 680407.452400 2024-11-21T09:20:23.575759Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {57, 308} for task task-836 (836 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575762Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-836 (836 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.575766Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 679471.300400 to 681885.606000 (insert task task-836 (836 by [2:97:2132])) 2024-11-21T09:20:23.575770Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-847 (847 by [2:97:2132]) 2024-11-21T09:20:23.575774Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-836 (836 by [2:97:2132]) (release resources {57, 308}) 2024-11-21T09:20:23.575779Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 680407.452400 to 682506.903600 2024-11-21T09:20:23.575782Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {127, 318} for task task-847 (847 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575786Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-847 (847 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.575792Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 681885.606000 to 684060.778800 (insert task task-847 (847 by [2:97:2132])) 2024-11-21T09:20:23.575795Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-859 (859 by [2:97:2132]) 2024-11-21T09:20:23.575800Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-847 (847 by [2:97:2132]) (release resources {127, 318}) 2024-11-21T09:20:23.575805Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 684060.778800 to 683061.622800 (remove task task-847 (847 by [2:97:2132])) 2024-11-21T09:20:23.575809Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 682506.903600 to 683061.622800 2024-11-21T09:20:23.575812Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {223, 414} for task task-859 (859 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575815Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-859 (859 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.575819Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 683061.622800 to 684982.251600 (insert task task-859 (859 by [2:97:2132])) 2024-11-21T09:20:23.575823Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_default due to exceeded limits 2024-11-21T09:20:23.575828Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-859 (859 by [2:97:2132]) (release resources {223, 414}) 2024-11-21T09:20:23.575832Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 683061.622800 to 686292.810000 2024-11-21T09:20:23.575836Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {220, 14} for task task-873 (873 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575839Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-873 (873 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T09:20:23.575844Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 684982.251600 to 687322.410000 (insert task task-873 (873 by [2:97:2132])) 2024-11-21T09:20:23.575848Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-905 (905 by [2:97:2132]) 2024-11-21T09:20:23.575855Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-873 (873 by [2:97:2132]) (release resources {220, 14}) 2024-11-21T09:20:23.575860Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 687322.410000 to 686372.538000 (remove task task-873 (873 by [2:97:2132])) 2024-11-21T09:20:23.575864Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 686292.810000 to 686372.538000 2024-11-21T09:20:23.575867Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {365, 127} for task task-905 (905 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575871Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-905 (905 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T09:20:23.575875Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 686372.538000 to 687870.060000 (insert task task-905 (905 by [2:97:2132])) 2024-11-21T09:20:23.575878Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-907 (907 by [2:97:2132]) 2024-11-21T09:20:23.575883Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-905 (905 by [2:97:2132]) (release resources {365, 127}) 2024-11-21T09:20:23.575888Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 686372.538000 to 688418.144000 2024-11-21T09:20:23.575891Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {97, 416} for task task-907 (907 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575895Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-907 (907 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.575899Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 687870.060000 to 690168.672000 (insert task task-907 (907 by [2:97:2132])) 2024-11-21T09:20:23.575904Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-919 (919 by [2:97:2132]) 2024-11-21T09:20:23.575909Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-907 (907 by [2:97:2132]) (release resources {97, 416}) 2024-11-21T09:20:23.575913Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 688418.144000 to 690764.883200 2024-11-21T09:20:23.575917Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {463, 35} for task task-919 (919 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575920Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-919 (919 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T09:20:23.575924Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 690168.672000 to 692804.490800 (insert task task-919 (919 by [2:97:2132])) 2024-11-21T09:20:23.575928Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-930 (930 by [2:97:2132]) 2024-11-21T09:20:23.575932Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-919 (919 by [2:97:2132]) (release resources {463, 35}) 2024-11-21T09:20:23.575937Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 690764.883200 to 693084.328000 2024-11-21T09:20:23.575940Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {249, 390} for task task-930 (930 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575943Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-930 (930 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.575948Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 692804.490800 to 694758.520000 (insert task task-930 (930 by [2:97:2132])) 2024-11-21T09:20:23.575952Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-946 (946 by [2:97:2132]) 2024-11-21T09:20:23.575956Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-930 (930 by [2:97:2132]) (release resources {249, 390}) 2024-11-21T09:20:23.575961Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 694758.520000 to 694729.036000 (remove task task-930 (930 by [2:97:2132])) 2024-11-21T09:20:23.575965Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 693084.328000 to 694729.036000 2024-11-21T09:20:23.575968Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {212, 349} for task task-946 (946 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575972Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-946 (946 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T09:20:23.575976Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 694729.036000 to 696231.132000 (insert task task-946 (946 by [2:97:2132])) 2024-11-21T09:20:23.575980Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-987 (987 by [2:97:2132]) 2024-11-21T09:20:23.575985Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-946 (946 by [2:97:2132]) (release resources {212, 349}) 2024-11-21T09:20:23.575989Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 694729.036000 to 697463.381200 2024-11-21T09:20:23.575993Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {300, 264} for task task-987 (987 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.575996Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-987 (987 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T09:20:23.576000Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 696231.132000 to 698808.461200 (insert task task-987 (987 by [2:97:2132])) 2024-11-21T09:20:23.576004Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-990 (990 by [2:97:2132]) 2024-11-21T09:20:23.576009Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-987 (987 by [2:97:2132]) (release resources {300, 264}) 2024-11-21T09:20:23.576014Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 698808.461200 to 698392.181200 (remove task task-987 (987 by [2:97:2132])) 2024-11-21T09:20:23.576018Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 697463.381200 to 698392.181200 2024-11-21T09:20:23.576022Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {163, 317} for task task-990 (990 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:23.576025Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-990 (990 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:23.576029Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 698392.181200 to 699752.745200 (insert task task-990 (990 by [2:97:2132])) 2024-11-21T09:20:23.576034Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-990 (990 by [2:97:2132]) (release resources {163, 317}) 2024-11-21T09:20:23.576039Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 698392.181200 to 700155.462000 >> TPQCompatTest::SetupLockSession [GOOD] >> TPQCompatTest::BadTopics >> test.py::test[blocks-coalesce_bools--ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_bools--Plan] [GOOD] >> test.py::test[blocks-coalesce_bools--Results] >> TResourceBroker::TestResubmitTask >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TResourceBroker::TestUpdateCookie [GOOD] >> BootstrapperTest::DuplicateNodes [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] Test command err: Trying to start YDB, gRPC: 17974, MsgBus: 24072 2024-11-21T09:20:09.412620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659925941127056:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:09.412633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00255a/r3tmp/tmpfVoGZP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17974, node 1 2024-11-21T09:20:09.465116Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:09.465362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:09.465370Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:09.465371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:09.465393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24072 TClient is connected to server localhost:24072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:09.510378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:09.513491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:09.513511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:09.514574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:09.652565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659925941127645:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.652585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659925941127671:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.652591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:09.653207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T09:20:09.654363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659925941127674:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T09:20:09.756848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:20:09.766383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:09.766417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:09.766449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:09.766470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:09.766488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:09.766514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:09.766532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:09.766553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:09.766566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:09.766578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:09.766596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:09.766612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439659925941127880:2316];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:20:09.766770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:20:09.766793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:20:09.766832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:20:09.766853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:20:09.766868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:20:09.766887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:20:09.766906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:20:09.766949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:20:09.766964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:20:09.766974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:20:09.766979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:20:09.766991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:20:09.766997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:20:09.767001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:20:09.767011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439659925941127882:2317];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:20:09.767016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:20:09.767021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NA ... lanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7439659956151899081:2880];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484169Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7439659956151899059:2875];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7439659956151899081:2880];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7439659956151899059:2875];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7439659956151898921:2843];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7439659956151898921:2843];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7439659956151898927:2846];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038030;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484704Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7439659956151898910:2839];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7439659956151898927:2846];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038030;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7439659956151899021:2861];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484751Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7439659956151898910:2839];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7439659956151899021:2861];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484774Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7439659956151899041:2866];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.484794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7439659956151899041:2866];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7439659956151899065:2877];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7439659956151899108:2881];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7439659956151899065:2877];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7439659956151899108:2881];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7439659956151898868:2829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7439659956151898868:2829];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485697Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7439659956151899182:2891];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7439659956151899167:2887];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485724Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7439659956151899182:2891];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7439659956151899167:2887];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7439659956151899032:2863];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485751Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7439659956151899235:2892];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7439659956151899032:2863];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.485780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7439659956151899235:2892];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.486231Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7439659956151899044:2868];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.486269Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7439659956151899044:2868];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.648658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;tx_state=complete;fline=columnshard_impl.cpp:718;event=skip_indexation;reason=disabled; 2024-11-21T09:20:18.736316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037993;tx_state=complete;fline=interaction.h:353;batch=Key: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":2},"id":281474976715665}],"finishes":[{"inc":{"count_include":2},"id":281474976715665}]},"p":{"include":0,"pk":"1;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"4000000001;"}}]}; 2024-11-21T09:20:18.743566Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224038022;self_id=[2:7439659956151899235:2892];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224038022;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=0; 2024-11-21T09:20:18.745139Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659964741838108:3200], SessionActorId: [2:7439659956151901722:3200], Got LOCKS BROKEN for table. ShardID=72075186224038022, Sink=[2:7439659964741838108:3200].{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T09:20:18.745169Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439659964741838108:3200], SessionActorId: [2:7439659956151901722:3200], Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 }. statusCode=ABORTED. subIssues=
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 . sessionActorId=[2:7439659956151901722:3200]. isRollback=0 2024-11-21T09:20:18.745205Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGFkZjhmOTAtYWE2N2ZmYWQtZWE0OWE3M2UtZDgyMmVkZDM=, ActorId: [2:7439659956151901722:3200], ActorState: ExecuteState, TraceId: 01jd70beps4mg7jm7bs0wm0w36, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439659964741839170:3200] from: [2:7439659964741838108:3200] 2024-11-21T09:20:18.745240Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439659964741839170:3200] TxId: 281474976715669. Ctx: { TraceId: 01jd70beps4mg7jm7bs0wm0w36, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGFkZjhmOTAtYWE2N2ZmYWQtZWE0OWE3M2UtZDgyMmVkZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T09:20:18.745332Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGFkZjhmOTAtYWE2N2ZmYWQtZWE0OWE3M2UtZDgyMmVkZDM=, ActorId: [2:7439659956151901722:3200], ActorState: ExecuteState, TraceId: 01jd70beps4mg7jm7bs0wm0w36, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T09:20:20.649990Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659951856925943:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:20.650056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> test.py::test[blocks-coalesce_bools--Results] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Analyze] >> TResourceBrokerInstant::TestErrors >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> TTabletPipeTest::TestConnectReject >> test.py::test[library-library_via_http--Debug] [GOOD] >> TResourceBroker::TestCounters >> TResourceBroker::TestOverusage >> test.py::test[library-library_via_http--ForceBlocks] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> TTabletCountersAggregator::ColumnShardCounters [GOOD] |95.6%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] Test command err: 2024-11-21T09:20:24.184376Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:24.184465Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:24.184473Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184480Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-1 (1 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.184485Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184494Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:24.184502Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [1:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:24.184507Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184511Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-2 (2 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.184515Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184520Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 800.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:24.184526Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [1:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:24.184529Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184534Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [1:97:2132]) 2024-11-21T09:20:24.184548Z node 1 :RESOURCE_BROKER DEBUG: Update task task-2 (2 by [1:97:2132]) (priority=5 type=compaction0 resources={400, 400} resubmit=1) 2024-11-21T09:20:24.184552Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184556Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:24.184563Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {200, 200}) 2024-11-21T09:20:24.184568Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 40.000000 (remove task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:24.184574Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 40.000000 2024-11-21T09:20:24.184578Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-2 (2 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.184582Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184587Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 40.000000 to 804.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:24.184591Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [1:97:2132]) 2024-11-21T09:20:24.184596Z node 1 :RESOURCE_BROKER DEBUG: Update task task-2 (2 by [1:97:2132]) (priority=5 type=compaction0 resources={200, 200} resubmit=1) 2024-11-21T09:20:24.184600Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184604Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-2 (2 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.184608Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184614Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 40.000000 to 422.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:24.184618Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-3 (3 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.184622Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.184626Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 422.000000 to 804.000000 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:24.424873Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:24.424957Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [2:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:24.424964Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.424971Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-1 (1 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.424975Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.424984Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:24.425013Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [2:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:24.425017Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.425021Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:24.425027Z node 2 :RESOURCE_BROKER DEBUG: Update cookie for task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:24.425033Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {400, 400}) 2024-11-21T09:20:24.425038Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 0.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:24.425043Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-2 (2 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.425046Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.425050Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 380.000000 (insert task task-2 (2 by [2:97:2132])) 2024-11-21T09:20:24.425057Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [2:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:24.425060Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.425064Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-3 (3 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.425067Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.425071Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 380.000000 to 760.000000 (insert task task-3 (3 by [2:97:2132])) 2024-11-21T09:20:24.425076Z node 2 :RESOURCE_BROKER DEBUG: Update cookie for task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:24.425080Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-3 (3 by [2:97:2132]) (release resources {200, 200}) 2024-11-21T09:20:24.425085Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 760.000000 to 380.000000 (remove task task-3 (3 by [2:97:2132])) 2024-11-21T09:20:24.425091Z node 2 :RESOURCE_BROKER DEBUG: Update task task-2 (2 by [2:97:2132]) (priority=5 type=compaction0 resources={400, 400} resubmit=1) 2024-11-21T09:20:24.425095Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.425098Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-2 (2 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.425101Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.425105Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 720.000000 (insert task task-2 (2 by [2:97:2132])) >> TPipeCacheTest::TestIdleRefresh >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TResourceBrokerInstant::TestErrors [GOOD] >> TResourceBrokerInstant::TestMerge >> TResourceBroker::TestCounters [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2024-11-21T09:20:23.632611Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:20:23.632633Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:20:23.632709Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T09:20:23.632714Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 7090319362426798975 2024-11-21T09:20:23.632735Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T09:20:23.632738Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16229357239031646724 2024-11-21T09:20:23.632857Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2024-11-21T09:20:23.632862Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2024-11-21T09:20:23.632929Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2024-11-21T09:20:23.632932Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) >> TResourceBroker::TestChangeTaskType >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> TPersQueueTest::ReadRuleServiceTypeLimit [GOOD] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] Test command err: 2024-11-21T09:20:21.530953Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:0:0:0:0:104:4] barrier# {Soft# {Gen# 0 Step# 0} Hard# } |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerInstant::Test >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode >> TResourceBrokerInstant::TestMerge [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] Test command err: 2024-11-21T09:20:23.815674Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 [1:6:2053] 2024-11-21T09:20:23.815735Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:7:2054] worker 0 2024-11-21T09:20:23.815742Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:8:2055] worker 1 2024-11-21T09:20:23.815747Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:9:2056] worker 2 2024-11-21T09:20:23.815751Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:10:2057] worker 3 2024-11-21T09:20:23.815755Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:11:2058] worker 4 2024-11-21T09:20:23.815760Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:12:2059] worker 5 2024-11-21T09:20:23.815767Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:13:2060] worker 6 2024-11-21T09:20:23.815772Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:14:2061] worker 7 2024-11-21T09:20:23.815777Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:15:2062] worker 8 2024-11-21T09:20:23.815781Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:16:2063] worker 9 Sending message to [1:8:2055] from [1:6:2053] id 1 Sending message to [1:9:2056] from [1:6:2053] id 2 Sending message to [1:10:2057] from [1:6:2053] id 3 Sending message to [1:11:2058] from [1:6:2053] id 4 Sending message to [1:12:2059] from [1:6:2053] id 5 Sending message to [1:13:2060] from [1:6:2053] id 6 Sending message to [1:14:2061] from [1:6:2053] id 7 Sending message to [1:15:2062] from [1:6:2053] id 8 Sending message to [1:16:2063] from [1:6:2053] id 9 Sending message to [1:7:2054] from [1:6:2053] id 10 2024-11-21T09:20:23.873000Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [1:16:2063] 2024-11-21T09:20:23.873042Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [1:7:2054] 2024-11-21T09:20:23.873050Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [1:8:2055] 2024-11-21T09:20:23.873055Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [1:9:2056] 2024-11-21T09:20:23.873060Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [1:10:2057] 2024-11-21T09:20:23.873069Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [1:11:2058] 2024-11-21T09:20:23.873076Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [1:12:2059] 2024-11-21T09:20:23.873080Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [1:13:2060] 2024-11-21T09:20:23.873087Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [1:14:2061] 2024-11-21T09:20:23.873092Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [1:15:2062] 2024-11-21T09:20:23.873097Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:14:2061] 2024-11-21T09:20:23.873313Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:14:2061] 2024-11-21T09:20:23.877404Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:14:2061] Initiator [1:6:2053] 2024-11-21T09:20:23.880934Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:15:2062] 2024-11-21T09:20:23.881189Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:15:2062] 2024-11-21T09:20:23.885541Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:15:2062] Initiator [1:6:2053] 2024-11-21T09:20:23.889823Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:16:2063] 2024-11-21T09:20:23.890107Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:16:2063] 2024-11-21T09:20:23.894424Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:16:2063] Initiator [1:6:2053] 2024-11-21T09:20:23.898745Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:6:2053] 2024-11-21T09:20:23.898794Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:6:2053] 2024-11-21T09:20:23.899758Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [1:7:2054] 2024-11-21T09:20:23.900045Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [1:7:2054] 2024-11-21T09:20:23.904953Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:7:2054] Initiator [1:6:2053] 2024-11-21T09:20:23.909063Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:8:2055] 2024-11-21T09:20:23.909352Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:8:2055] 2024-11-21T09:20:23.912871Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:8:2055] Initiator [1:6:2053] 2024-11-21T09:20:23.915447Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:9:2056] 2024-11-21T09:20:23.915652Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:9:2056] 2024-11-21T09:20:23.918504Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:9:2056] Initiator [1:6:2053] 2024-11-21T09:20:23.921160Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:10:2057] 2024-11-21T09:20:23.921355Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:10:2057] 2024-11-21T09:20:23.925163Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:10:2057] Initiator [1:6:2053] 2024-11-21T09:20:23.927797Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:11:2058] 2024-11-21T09:20:23.928014Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:11:2058] 2024-11-21T09:20:23.930935Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:11:2058] Initiator [1:6:2053] 2024-11-21T09:20:23.933396Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:12:2059] 2024-11-21T09:20:23.933581Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:12:2059] 2024-11-21T09:20:23.937980Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:12:2059] Initiator [1:6:2053] 2024-11-21T09:20:23.942370Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:13:2060] 2024-11-21T09:20:23.942648Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:13:2060] 2024-11-21T09:20:23.946845Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:13:2060] Initiator [1:6:2053] 2024-11-21T09:20:23.950855Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:6:2053] 2024-11-21T09:20:23.950913Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:6:2053] 2024-11-21T09:20:23.952296Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:6:2053] 2024-11-21T09:20:23.952339Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:6:2053] 2024-11-21T09:20:23.953517Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [1:6:2053] 2024-11-21T09:20:23.953555Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [1:6:2053] 2024-11-21T09:20:23.954900Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:6:2053] 2024-11-21T09:20:23.954928Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:6:2053] 2024-11-21T09:20:23.955840Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:6:2053] 2024-11-21T09:20:23.955868Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:6:2053] 2024-11-21T09:20:23.956841Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:6:2053] 2024-11-21T09:20:23.956876Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:6:2053] 2024-11-21T09:20:23.958477Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:6:2053] 2024-11-21T09:20:23.958508Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:6:2053] 2024-11-21T09:20:23.959392Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:6:2053] 2024-11-21T09:20:23.959417Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:6:2053] 2024-11-21T09:20:23.960359Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:6:2053] 2024-11-21T09:20:23.960386Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:6:2053] 2024-11-21T09:20:23.961257Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:6:2053] Initiator [1:5:2052] TEST 2 10 duration 0.170984s 2024-11-21T09:20:24.020797Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 [2:6:2053] 2024-11-21T09:20:24.020873Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:7:2054] worker 0 2024-11-21T09:20:24.020879Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:8:2055] worker 1 2024-11-21T09:20:24.020883Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:9:2056] worker 2 2024-11-21T09:20:24.020888Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:10:2057] worker 3 2024-11-21T09:20:24.020892Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:11:2058] worker 4 2024-11-21T09:20:24.020896Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:12:2059] worker 5 2024-11-21T09:20:24.020912Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:13:2060] worker 6 2024-11-21T09:20:24.020917Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:14:2061] worker 7 2024-11-21T09:20:24.020921Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:15:2062] worker 8 2024-11-21T09:20:24.020925Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:16:2063] worker 9 2024-11-21T09:20:24.020929Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:17:2064] worker 10 2024-11-21T09:20:24.020933Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:18:2065] worker 11 2024-11-21T09:20:24.020937Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:19:2066] worker 12 2024-11-21T09:20:24.020941Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:20:2067] worker 13 2024-11-21T09:20:24.020945Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:21:2068] worker 14 2024-11-21T09:20:24.020950Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:22:2069] worker 15 2024-11-21T09:20:24.020953Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:23:2070] worker 16 2024-11-21T09:20:24.020958Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:24:2071] worker 17 2024-11-21T09:20:24.020962Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:25:2072] worker 18 2024-11-21T09:20:24.020966Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:26:2073] worker 19 Sending message to [2:8:2055] from [2:6:2053] id 1 Sending message to [2:9:2056] from [2:6:2053] id 2 Sending message to [2:10:2057] from [2:6:2053] id 3 Sending message to [2:11:2058] from [2:6:2053] id 4 Sending message to [2:12:2059] from [2:6:2053] id 5 Sending message to [2:13:2060] from [2:6:2053] id 6 Sending message to [2:14:2061] from [2:6:2053] id 7 Sending message to [2:15:2062] from [2:6:2053] id 8 Sending message to [2:16:2063] from [2:6:2053] id ... response node 15 [2:6:2053] 2024-11-21T09:20:24.162441Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 16 [2:6:2053] 2024-11-21T09:20:24.162445Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 16 [2:6:2053] 2024-11-21T09:20:24.162454Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 17 [2:6:2053] 2024-11-21T09:20:24.162458Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 17 [2:6:2053] 2024-11-21T09:20:24.162466Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 18 [2:6:2053] 2024-11-21T09:20:24.162471Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 18 [2:6:2053] 2024-11-21T09:20:24.162478Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 19 [2:6:2053] 2024-11-21T09:20:24.162482Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 19 [2:6:2053] 2024-11-21T09:20:24.162489Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [2:6:2053] 2024-11-21T09:20:24.162493Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [2:6:2053] 2024-11-21T09:20:24.162498Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [2:6:2053] 2024-11-21T09:20:24.162539Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [2:6:2053] 2024-11-21T09:20:24.163557Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [2:6:2053] 2024-11-21T09:20:24.163599Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [2:6:2053] 2024-11-21T09:20:24.164885Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [2:6:2053] 2024-11-21T09:20:24.164941Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [2:6:2053] 2024-11-21T09:20:24.166092Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [2:6:2053] 2024-11-21T09:20:24.166137Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [2:6:2053] 2024-11-21T09:20:24.167531Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [2:6:2053] 2024-11-21T09:20:24.167573Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [2:6:2053] 2024-11-21T09:20:24.168696Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [2:6:2053] 2024-11-21T09:20:24.168735Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [2:6:2053] 2024-11-21T09:20:24.169864Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [2:6:2053] 2024-11-21T09:20:24.169902Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [2:6:2053] 2024-11-21T09:20:24.171650Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [2:6:2053] 2024-11-21T09:20:24.171693Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [2:6:2053] 2024-11-21T09:20:24.172853Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [2:6:2053] 2024-11-21T09:20:24.172893Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [2:6:2053] 2024-11-21T09:20:24.174013Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [2:6:2053] 2024-11-21T09:20:24.174047Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [2:6:2053] 2024-11-21T09:20:24.175166Z node 2 :TABLET_AGGREGATOR INFO: aggregator request processed [2:6:2053] Initiator [2:5:2052] TEST 2 20 duration 0.176872s 2024-11-21T09:20:24.231571Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 [3:6:2053] 2024-11-21T09:20:24.231611Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [3:6:2053] self [3:7:2054] worker 0 Sending message to [3:7:2054] from [3:6:2053] id 1 Sending message to [3:7:2054] from [3:6:2053] id 2 Sending message to [3:7:2054] from [3:6:2053] id 3 Sending message to [3:7:2054] from [3:6:2053] id 4 Sending message to [3:7:2054] from [3:6:2053] id 5 Sending message to [3:7:2054] from [3:6:2053] id 6 Sending message to [3:7:2054] from [3:6:2053] id 7 Sending message to [3:7:2054] from [3:6:2053] id 8 Sending message to [3:7:2054] from [3:6:2053] id 9 Sending message to [3:7:2054] from [3:6:2053] id 10 2024-11-21T09:20:24.285695Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [3:7:2054] 2024-11-21T09:20:24.285710Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [3:7:2054] 2024-11-21T09:20:24.285713Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [3:7:2054] 2024-11-21T09:20:24.285734Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [3:7:2054] 2024-11-21T09:20:24.285739Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [3:7:2054] 2024-11-21T09:20:24.285743Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [3:7:2054] 2024-11-21T09:20:24.285747Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [3:7:2054] 2024-11-21T09:20:24.285751Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [3:7:2054] 2024-11-21T09:20:24.285755Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [3:7:2054] 2024-11-21T09:20:24.285759Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [3:7:2054] 2024-11-21T09:20:24.285814Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [3:7:2054] 2024-11-21T09:20:24.286094Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [3:7:2054] 2024-11-21T09:20:24.290824Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [3:7:2054] 2024-11-21T09:20:24.291115Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [3:7:2054] 2024-11-21T09:20:24.296087Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [3:7:2054] 2024-11-21T09:20:24.296382Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [3:7:2054] 2024-11-21T09:20:24.302277Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [3:7:2054] 2024-11-21T09:20:24.302548Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [3:7:2054] 2024-11-21T09:20:24.307547Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [3:7:2054] 2024-11-21T09:20:24.307772Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [3:7:2054] 2024-11-21T09:20:24.316868Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [3:7:2054] 2024-11-21T09:20:24.317094Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [3:7:2054] 2024-11-21T09:20:24.321103Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [3:7:2054] 2024-11-21T09:20:24.321322Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [3:7:2054] 2024-11-21T09:20:24.326445Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [3:7:2054] 2024-11-21T09:20:24.326684Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [3:7:2054] 2024-11-21T09:20:24.331583Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [3:7:2054] 2024-11-21T09:20:24.331788Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [3:7:2054] 2024-11-21T09:20:24.337431Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [3:7:2054] 2024-11-21T09:20:24.337644Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [3:7:2054] 2024-11-21T09:20:24.352274Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:7:2054] Initiator [3:6:2053] 2024-11-21T09:20:24.387628Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [3:6:2053] 2024-11-21T09:20:24.388058Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [3:6:2053] 2024-11-21T09:20:24.399608Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:6:2053] Initiator [3:5:2052] TEST 2 1 duration 0.186517s 2024-11-21T09:20:24.439406Z node 4 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [4:5:2052] self [4:6:2053] worker 0 Sending message to [4:6:2053] from [4:6:2053] id 1 Sending message to [4:6:2053] from [4:6:2053] id 2 Sending message to [4:6:2053] from [4:6:2053] id 3 Sending message to [4:6:2053] from [4:6:2053] id 4 Sending message to [4:6:2053] from [4:6:2053] id 5 Sending message to [4:6:2053] from [4:6:2053] id 6 Sending message to [4:6:2053] from [4:6:2053] id 7 Sending message to [4:6:2053] from [4:6:2053] id 8 Sending message to [4:6:2053] from [4:6:2053] id 9 Sending message to [4:6:2053] from [4:6:2053] id 10 2024-11-21T09:20:24.484459Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [4:6:2053] 2024-11-21T09:20:24.484476Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [4:6:2053] 2024-11-21T09:20:24.484480Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [4:6:2053] 2024-11-21T09:20:24.484485Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [4:6:2053] 2024-11-21T09:20:24.484503Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [4:6:2053] 2024-11-21T09:20:24.484508Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [4:6:2053] 2024-11-21T09:20:24.484513Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [4:6:2053] 2024-11-21T09:20:24.484516Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [4:6:2053] 2024-11-21T09:20:24.484520Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [4:6:2053] 2024-11-21T09:20:24.484524Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [4:6:2053] 2024-11-21T09:20:24.484577Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [4:6:2053] 2024-11-21T09:20:24.484870Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [4:6:2053] 2024-11-21T09:20:24.489035Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [4:6:2053] 2024-11-21T09:20:24.489330Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [4:6:2053] 2024-11-21T09:20:24.493879Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [4:6:2053] 2024-11-21T09:20:24.494150Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [4:6:2053] 2024-11-21T09:20:24.500098Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [4:6:2053] 2024-11-21T09:20:24.500402Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [4:6:2053] 2024-11-21T09:20:24.504509Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [4:6:2053] 2024-11-21T09:20:24.504742Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [4:6:2053] 2024-11-21T09:20:24.512060Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [4:6:2053] 2024-11-21T09:20:24.512306Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [4:6:2053] 2024-11-21T09:20:24.515695Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [4:6:2053] 2024-11-21T09:20:24.515938Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [4:6:2053] 2024-11-21T09:20:24.519959Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [4:6:2053] 2024-11-21T09:20:24.520197Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [4:6:2053] 2024-11-21T09:20:24.523815Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [4:6:2053] 2024-11-21T09:20:24.524031Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [4:6:2053] 2024-11-21T09:20:24.527924Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [4:6:2053] 2024-11-21T09:20:24.528137Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [4:6:2053] 2024-11-21T09:20:24.542189Z node 4 :TABLET_AGGREGATOR INFO: aggregator request processed [4:6:2053] Initiator [4:5:2052] TEST 2 1 duration 0.140013s ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T09:19:53.541077Z :ReadSession INFO: Random seed for debugging is 1732180793541072 2024-11-21T09:19:53.611867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659860121460819:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:53.612092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:53.614081Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659860212727424:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:53.614254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0041d5/r3tmp/tmpfkhEUr/pdisk_1.dat 2024-11-21T09:19:53.636175Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:53.638103Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:53.664354Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24271, node 1 2024-11-21T09:19:53.668937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0041d5/r3tmp/yandexie4S0z.tmp 2024-11-21T09:19:53.668959Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0041d5/r3tmp/yandexie4S0z.tmp 2024-11-21T09:19:53.669009Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0041d5/r3tmp/yandexie4S0z.tmp 2024-11-21T09:19:53.669040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:53.672166Z INFO: TTestServer started on Port 14093 GrpcPort 24271 TClient is connected to server localhost:14093 PQClient connected to localhost:24271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:53.688318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T09:19:53.712258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:53.712283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:53.713609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:53.734122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:53.734147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:53.735653Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:53.735917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:53.856066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659860121461725:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:53.856068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659860121461698:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:53.856086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:53.856544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659860121461756:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:53.856566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:53.856674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T09:19:53.860483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659860121461727:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T09:19:53.875442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:53.882030Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659860212727727:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:53.882105Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjczNGYzYjYtZjVlOTMzYjUtZGVkMjg3OWUtNDM1OGQyYTc=, ActorId: [2:7439659860212727695:2277], ActorState: ExecuteState, TraceId: 01jd70apgn9ty7g5ztr2pr5caa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:53.882580Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:53.932525Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659860121461896:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:53.932649Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODVkZWMwNjQtZWRhM2NiMWYtYWJlZGZkMTQtMmJkNTc0NDU=, ActorId: [1:7439659860121461695:2299], ActorState: ExecuteState, TraceId: 01jd70apfzftmedjb8zmd6rtrj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:53.932863Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:53.938601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:54.001592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:24271", true, true, 1000); 2024-11-21T09:19:54.078021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd70appf5f9k8hh098wvphqr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RmNmU1NTktNGFmNzE3MDMtOTVmNmU4ZGYtZjczODVjMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659864416429497:2935] 2024-11-21T09:19:58.611961Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659860121460819:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:58.611991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:19:58.614508Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659860212727424:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:58.614532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:20:00.099401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:24271 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T09:20:00.110795Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:24271 MetaRequest { CmdCreateTopic ... r shared/user session shared/user_7_1_15074884130054417241_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2024-11-21T09:20:24.564574Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid c9e02d20-43a7e212-a20d5035-40be311 has messages 1 2024-11-21T09:20:24.564616Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 read done: guid# c9e02d20-43a7e212-a20d5035-40be311, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2024-11-21T09:20:24.564634Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 response to read: guid# c9e02d20-43a7e212-a20d5035-40be311 2024-11-21T09:20:24.564735Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 Process answer. Aval parts: 0 2024-11-21T09:20:24.564947Z :DEBUG: [/Root] [/Root] [6d0899ae-bd45dc19-bf10980c-b7c6350a] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.565012Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T09:20:24.565045Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T09:20:24.565092Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T09:20:24.565107Z :DEBUG: [/Root] [/Root] [6d0899ae-bd45dc19-bf10980c-b7c6350a] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:20:24.565114Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 got read request: guid# eb613e9d-d0629538-96778534-138fa9a8 DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T09:20:23.458000Z WriteTime: 2024-11-21T09:20:23.459000Z Ip: "ipv6:[::1]:45522" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:45522" } } } } 2024-11-21T09:20:24.565149Z :INFO: [/Root] [/Root] [6d0899ae-bd45dc19-bf10980c-b7c6350a] Closing read session. Close timeout: 3.000000s 2024-11-21T09:20:24.565157Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T09:20:24.565164Z :INFO: [/Root] [/Root] [6d0899ae-bd45dc19-bf10980c-b7c6350a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1230 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:24.565393Z :INFO: [/Root] [/Root] [6d0899ae-bd45dc19-bf10980c-b7c6350a] Closing read session. Close timeout: 0.000000s 2024-11-21T09:20:24.565398Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T09:20:24.565401Z :INFO: [/Root] [/Root] [6d0899ae-bd45dc19-bf10980c-b7c6350a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1231 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:24.565390Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 grpc read done: success# 0, data# { } 2024-11-21T09:20:24.565398Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 grpc read failed 2024-11-21T09:20:24.565402Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 grpc closed 2024-11-21T09:20:24.565416Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_15074884130054417241_v1 is DEAD 2024-11-21T09:20:24.565418Z :NOTICE: [/Root] [/Root] [6d0899ae-bd45dc19-bf10980c-b7c6350a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:20:24.565504Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:24.565513Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_15074884130054417241_v1 2024-11-21T09:20:24.565524Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7439659988265057367:2508] destroyed 2024-11-21T09:20:24.565554Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_15074884130054417241_v1 2024-11-21T09:20:24.565637Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439659988265057364:2505] disconnected; active server actors: 1 2024-11-21T09:20:24.565644Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439659988265057364:2505] client user disconnected session shared/user_7_1_15074884130054417241_v1 2024-11-21T09:20:24.881991Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.881998Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.882002Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:24.882077Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:24.882184Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:24.882250Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.882306Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T09:20:24.882520Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.882523Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.882525Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:24.882565Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:24.882631Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:24.882663Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.882688Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:20:24.882854Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T09:20:24.882984Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T09:20:24.882999Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T09:20:24.883025Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:20:24.883029Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T09:20:24.883032Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T09:20:24.883037Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T09:20:24.883397Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.883400Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.883401Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:24.883448Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:24.883531Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:24.883574Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.883605Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T09:20:24.883687Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.883729Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:20:24.883758Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:20:24.883765Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T09:20:24.883773Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2024-11-21T09:20:24.884221Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.884228Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.884231Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:20:24.884299Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T09:20:24.884387Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T09:20:24.884423Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.884519Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:20:24.884554Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T09:20:24.884569Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T09:20:24.884584Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TResourceBroker::TestErrors >> TResourceBroker::TestChangeTaskType [GOOD] >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Analyze] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Debug] >> TPipeCacheTest::TestTabletNode [GOOD] >> BootstrapperTest::RestartUnavailableTablet >> TResourceBrokerInstant::Test [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] Test command err: 2024-11-21T09:20:24.817621Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:24.817715Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:24.817725Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.817733Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-1 (1 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.817738Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.817746Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:24.817755Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-2 (2 by [1:97:2132]) priority=5 resources={100, 100} 2024-11-21T09:20:24.817759Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817763Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-2 (2 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:24.817766Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817770Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:24.817776Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-3 (3 by [1:97:2132]) priority=5 resources={100, 100} 2024-11-21T09:20:24.817780Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817783Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-3 (3 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:24.817787Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817791Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 400.000000 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:24.817796Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-4 (4 by [1:97:2132]) priority=5 resources={100, 100} 2024-11-21T09:20:24.817800Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817805Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-4 (4 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:24.817808Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817813Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 400.000000 to 600.000000 (insert task task-4 (4 by [1:97:2132])) 2024-11-21T09:20:24.817818Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-5 (5 by [1:97:2132]) priority=5 resources={250, 250} 2024-11-21T09:20:24.817822Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.817826Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-5 (5 by [1:97:2132]) 2024-11-21T09:20:24.817832Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-6 (6 by [1:97:2132]) priority=5 resources={250, 250} 2024-11-21T09:20:24.817835Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-6 (6 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817839Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-5 (5 by [1:97:2132]) 2024-11-21T09:20:24.817842Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction1 blocked by an earlier queue 2024-11-21T09:20:24.817847Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-7 (7 by [1:97:2132]) priority=5 resources={150, 150} 2024-11-21T09:20:24.817852Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-7 (7 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817856Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-5 (5 by [1:97:2132]) 2024-11-21T09:20:24.817859Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction1 blocked by an earlier queue 2024-11-21T09:20:24.817890Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {200, 200}) 2024-11-21T09:20:24.817896Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 0.000000 (remove task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:24.817900Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-5 (5 by [1:97:2132]) 2024-11-21T09:20:24.817904Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction1 blocked by an earlier queue 2024-11-21T09:20:24.817909Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-2 (2 by [1:97:2132]) (release resources {100, 100}) 2024-11-21T09:20:24.817914Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 600.000000 to 400.000000 (remove task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:24.817918Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {250, 250} for task task-5 (5 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.817921Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-5 (5 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.817926Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 475.000000 (insert task task-5 (5 by [1:97:2132])) 2024-11-21T09:20:24.817930Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-6 (6 by [1:97:2132]) 2024-11-21T09:20:24.817949Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-3 (3 by [1:97:2132]) (release resources {100, 100}) 2024-11-21T09:20:24.817954Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 400.000000 to 200.000000 (remove task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:24.817957Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-6 (6 by [1:97:2132]) 2024-11-21T09:20:24.817961Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-4 (4 by [1:97:2132]) (release resources {100, 100}) 2024-11-21T09:20:24.817965Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 0.000000 (remove task task-4 (4 by [1:97:2132])) 2024-11-21T09:20:24.817968Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {250, 250} for task task-6 (6 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:24.817972Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-6 (6 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.817976Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 425.000000 (insert task task-6 (6 by [1:97:2132])) 2024-11-21T09:20:24.817979Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-7 (7 by [1:97:2132]) 2024-11-21T09:20:24.818011Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-5 (5 by [1:97:2132]) (release resources {250, 250}) 2024-11-21T09:20:24.818016Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 475.000000 to 0.000000 (remove task task-5 (5 by [1:97:2132])) 2024-11-21T09:20:24.818019Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {150, 150} for task task-7 (7 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:24.818023Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-7 (7 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.818027Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 425.000000 to 680.000000 (insert task task-7 (7 by [1:97:2132])) 2024-11-21T09:20:24.818034Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-6 (6 by [1:97:2132]) (release resources {250, 250}) 2024-11-21T09:20:24.818039Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 680.000000 to 255.000000 (remove task task-6 (6 by [1:97:2132])) 2024-11-21T09:20:24.818058Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-7 (7 by [1:97:2132]) (release resources {150, 150}) 2024-11-21T09:20:24.818062Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 255.000000 to 0.000000 (remove task task-7 (7 by [1:97:2132])) 2024-11-21T09:20:24.818069Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1000 (1000 by [1:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:24.818072Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1000 (1000 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.818076Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {500, 500} for task task-1000 (1000 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.818079Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1000 (1000 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.818083Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 900.000000 (insert task task-1000 (1000 by [1:97:2132])) 2024-11-21T09:20:24.818090Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-1 (1 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818094Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818097Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818103Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-2 (2 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818106Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818109Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818114Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-3 (3 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818118Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818121Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818126Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-4 (4 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818129Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818133Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818138Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-5 (5 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818141Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818144Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818149Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-6 (6 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818152Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-6 (6 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818156Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818161Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-7 (7 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818164Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-7 (7 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818169Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818174Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-8 (8 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818178Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-8 (8 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818181Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818187Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-9 (9 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818191Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-9 (9 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818195Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818200Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-10 (10 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T09:20:24.818235Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-10 (10 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818239Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T09:20:24.818243Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1000 (1000 by [1:97:2132]) (release resources {500, 500}) 2024-11-21T09:20:24.818250Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 1500.000000 2024-11-21T09:20:24.818254Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-1 (1 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818258Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818262Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 0.000000 to 2.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:24.818266Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-2 (2 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818270Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818273Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 2.000000 to 4.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:24.818277Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-3 (3 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818280Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818284Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 4.000000 to 6.000000 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:24.818288Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-4 (4 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818292Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818296Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 6.000000 to 8.000000 (insert task task-4 (4 by [1:97:2132])) 2024-11-21T09:20:24.818299Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-5 (5 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818303Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-5 (5 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818307Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 8.000000 to 10.000000 (insert task task-5 (5 by [1:97:2132])) 2024-11-21T09:20:24.818310Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-6 (6 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818315Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-6 (6 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818319Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 10.000000 to 12.000000 (insert task task-6 (6 by [1:97:2132])) 2024-11-21T09:20:24.818323Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-7 (7 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818327Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-7 (7 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818331Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 12.000000 to 14.000000 (insert task task-7 (7 by [1:97:2132])) 2024-11-21T09:20:24.818334Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-8 (8 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818338Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-8 (8 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818342Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 14.000000 to 16.000000 (insert task task-8 (8 by [1:97:2132])) 2024-11-21T09:20:24.818345Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-9 (9 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818349Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-9 (9 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818353Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 16.000000 to 18.000000 (insert task task-9 (9 by [1:97:2132])) 2024-11-21T09:20:24.818356Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-10 (10 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:24.818360Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-10 (10 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:24.818364Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 18.000000 to 20.000000 (insert task task-10 (10 by [1:97:2132])) 2024-11-21T09:20:24.818376Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818380Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 0.000000 to 20.000000 2024-11-21T09:20:24.818384Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-2 (2 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818389Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-3 (3 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818394Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-4 (4 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818398Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-5 (5 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818403Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-6 (6 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818407Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-7 (7 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818412Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-8 (8 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818416Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-9 (9 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:24.818421Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-10 (10 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T09:20:25.058027Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:25.058085Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [2:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.058090Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.058094Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-1 (1 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.058097Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.058104Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.058109Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [2:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.058111Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.058114Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.058117Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [2:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.058119Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.058121Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.058129Z node 2 :RESOURCE_BROKER DEBUG: Update task task-3 (3 by [2:97:2132]) (priority=5 type=compaction1 resources={400, 400} resubmit=0) 2024-11-21T09:20:25.058132Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.058136Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [2:97:2132]) 2024-11-21T09:20:25.058138Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.058142Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {400, 400}) 2024-11-21T09:20:25.058146Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 80.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.058149Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 80.000000 2024-11-21T09:20:25.058151Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-3 (3 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.058153Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.058156Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 800.000000 (insert task task-3 (3 by [2:97:2132])) 2024-11-21T09:20:25.058158Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation >> test.py::test[library-library_via_http--ForceBlocks] [GOOD] >> test.py::test[library-library_via_http--Plan] [GOOD] >> test.py::test[library-library_via_http--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge [GOOD] Test command err: 2024-11-21T09:20:24.743914Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:24.743992Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=0 resources={100, 100} 2024-11-21T09:20:24.744000Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.744007Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-1 (1 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.744011Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.744021Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 200.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:24.744042Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=0 resources={100500, 100500} 2024-11-21T09:20:24.744046Z node 1 :RESOURCE_BROKER DEBUG: SubmitTask failed for task 1 to [1:97:2132]: task with the same ID has been already submitted 2024-11-21T09:20:24.744057Z node 1 :RESOURCE_BROKER DEBUG: FinishTask failed for task 2 to [1:97:2132]: cannot finish unknown task 2024-11-21T09:20:24.744062Z node 1 :RESOURCE_BROKER ERROR: FinishTaskInstant failed for task 2: cannot finish unknown task 2024-11-21T09:20:24.986611Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:24.986679Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [2:97:2132]) priority=0 resources={100, 200} 2024-11-21T09:20:24.986686Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.986693Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {100, 200} for task task-1 (1 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.986698Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.986705Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:24.986716Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [2:97:2132]) priority=0 resources={100, 100} 2024-11-21T09:20:24.986718Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.986720Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-2 (2 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.986723Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.986725Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 600.000000 (insert task task-2 (2 by [2:97:2132])) 2024-11-21T09:20:24.986731Z node 2 :RESOURCE_BROKER DEBUG: Update task task-1 (1 by [2:97:2132]) (priority=0 type=compaction0 resources={200, 300} resubmit=0) 2024-11-21T09:20:24.986733Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.986735Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 200.000000 to 800.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:24.986738Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-2 (2 by [2:97:2132]) (release resources {100, 100}) 2024-11-21T09:20:24.986741Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 600.000000 (remove task task-2 (2 by [2:97:2132])) 2024-11-21T09:20:24.986747Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-3 (3 by [2:97:2132]) priority=0 resources={10, 20} 2024-11-21T09:20:24.986749Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.986751Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {10, 20} for task task-3 (3 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:24.986753Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.986756Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 40.000000 (insert task task-3 (3 by [2:97:2132])) |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] Test command err: 2024-11-21T09:20:24.848557Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:24.848658Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:24.848668Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.848676Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-1 (1 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.848681Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.848690Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 100.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:24.848700Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [1:97:2132]) priority=5 resources={410, 410} 2024-11-21T09:20:24.848704Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.848709Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 due to exceeded limits 2024-11-21T09:20:24.848714Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-3 (3 by [1:97:2132]) priority=5 resources={550, 550} 2024-11-21T09:20:24.848718Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.848723Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [1:97:2132]) 2024-11-21T09:20:24.848726Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:24.848743Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {50, 50}) 2024-11-21T09:20:24.848749Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 100.000000 2024-11-21T09:20:24.848754Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {550, 550} for task task-3 (3 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:24.848758Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:24.848762Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 1100.000000 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:24.848766Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:24.848773Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-3 (3 by [1:97:2132]) (release resources {550, 550}) 2024-11-21T09:20:24.848778Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1100.000000 to 550.000000 (remove task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:24.848782Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 0.000000 to 550.000000 2024-11-21T09:20:24.848786Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {410, 410} for task task-2 (2 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:24.848790Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:24.848795Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 100.000000 to 920.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:25.087977Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:25.088055Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [2:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:25.088063Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.088072Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {500, 500} for task task-1 (1 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.088077Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.088088Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.088096Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-2 (2 by [2:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:25.088099Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.088103Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.088107Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [2:98:2133]) priority=5 resources={200, 200} 2024-11-21T09:20:25.088109Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:98:2133]) to queue queue_compaction0 2024-11-21T09:20:25.088113Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.088115Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.088118Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-4 (4 by [2:98:2133]) priority=5 resources={200, 200} 2024-11-21T09:20:25.088120Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [2:98:2133]) to queue queue_compaction1 2024-11-21T09:20:25.088123Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.088124Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.088135Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {500, 500}) 2024-11-21T09:20:25.088140Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 1000.000000 to 100.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.088143Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 100.000000 2024-11-21T09:20:25.088146Z node 2 :RESOURCE_BROKER DEBUG: Removing task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.088149Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-4 (4 by [2:98:2133]) from queue queue_compaction1 2024-11-21T09:20:25.088151Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [2:98:2133]) to queue queue_compaction1 2024-11-21T09:20:25.088154Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 400.000000 (insert task task-4 (4 by [2:98:2133])) 2024-11-21T09:20:25.088156Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-3 (3 by [2:98:2133]) from queue queue_compaction0 2024-11-21T09:20:25.088159Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [2:98:2133]) to queue queue_compaction0 2024-11-21T09:20:25.088161Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 100.000000 to 500.000000 (insert task task-3 (3 by [2:98:2133])) >> TTabletResolver::TabletResolvePriority [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::Test [GOOD] Test command err: 2024-11-21T09:20:25.183007Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:25.183096Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=0 resources={100, 100} 2024-11-21T09:20:25.183102Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.183107Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-1 (1 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.183110Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.183118Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 200.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:25.183132Z node 1 :RESOURCE_BROKER DEBUG: Update task task-1 (1 by [1:97:2132]) (priority=0 type=compaction0 resources={80, 70} resubmit=0) 2024-11-21T09:20:25.183136Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.183140Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 160.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:25.183149Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {80, 70}) 2024-11-21T09:20:25.183155Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 160.000000 to 0.000000 (remove task task-1 (1 by [1:97:2132])) >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation >> TResourceBroker::TestExecutionStat [GOOD] >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: 2024-11-21T09:20:25.087075Z node 3 :PIPE_SERVER ERROR: [9437185] NodeDisconnected NodeId# 2 >> TTabletPipeTest::TestSendBeforeBootTarget >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:19:54.176114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:54.176134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:54.176141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:54.176153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:54.176156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:54.176162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:54.176242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:54.187309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:54.187329Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:54.189440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:54.190036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:54.190065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:54.191656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:54.192152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:54.193928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.194013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:54.194815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.198684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:54.198689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.198694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:54.198709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.200596Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:19:54.214642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:54.215616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.215659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:54.215692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:54.215697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:54.216443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:54.216463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:54.216467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:54.216813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:54.216827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:54.217123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217131Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.217137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.219083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.219669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:54.220033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:54.222338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:54.223633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:54.223681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:54.223740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:54.223773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.223785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:54.224294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:54.224353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:19:54.224479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:54.224490Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:54.224507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:54.224513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:54.224529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:54.224535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:54.224541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:54.224556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:54.224565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:54.224570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:19:54.225009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.225040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:19:54.225049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:19:54.225056Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:19:54.225064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:54.225084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... okerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:25.491347Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:25.491365Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 19us result status StatusSuccess 2024-11-21T09:20:25.491442Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:25.491558Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:25.491583Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 26us result status StatusSuccess 2024-11-21T09:20:25.491707Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 4 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage >> TTabletPipeTest::TestTwoNodes >> TResourceBroker::TestQueueWithConfigure >> test.py::test[library-library_via_http--Results] [GOOD] >> test.py::test[library-package--Analyze] [SKIPPED] >> test.py::test[library-package--Debug] [SKIPPED] >> test.py::test[library-package--ForceBlocks] [SKIPPED] >> test.py::test[library-package--Plan] [SKIPPED] >> test.py::test[library-package--Results] [SKIPPED] >> test.py::test[library-package_override--Analyze] [SKIPPED] >> test.py::test[library-package_override--Debug] [SKIPPED] >> TResourceBroker::TestAutoTaskId [GOOD] >> TTabletPipeTest::TestShutdown >> TTabletPipeTest::TestPipeConnectToHint >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] Test command err: 2024-11-21T09:20:25.320115Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:25.320185Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-1 (1 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.320191Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:25.320196Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-1 (1 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:25.320199Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:25.320223Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 0.000000 to 800.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:25.320236Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [1:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:25.320238Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.320242Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.320245Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [1:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:25.320247Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.320250Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.320253Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-4 (4 by [1:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:25.320255Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.320257Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.320260Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [1:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:25.320262Z node 1 :RESOURCE_BROKER DEBUG: SubmitTask failed for task 2 to [1:97:2132]: task with the same ID has been already submitted 2024-11-21T09:20:25.320271Z node 1 :RESOURCE_BROKER DEBUG: RemoveQueuedTask failed for task 1 to [1:97:2132]: cannot remove in-fly task 2024-11-21T09:20:25.320275Z node 1 :RESOURCE_BROKER DEBUG: RemoveQueuedTask failed for task 5 to [1:97:2132]: cannot remove unknown task 2024-11-21T09:20:25.320279Z node 1 :RESOURCE_BROKER DEBUG: RemoveQueuedTask failed for task 2 to [1:98:2133]: cannot remove unknown task 2024-11-21T09:20:25.320282Z node 1 :RESOURCE_BROKER DEBUG: Removing task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.320284Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [1:97:2132]) 2024-11-21T09:20:25.320288Z node 1 :RESOURCE_BROKER DEBUG: UpdateTask failed for task 2 to [1:97:2132]: cannot update unknown task 2024-11-21T09:20:25.320292Z node 1 :RESOURCE_BROKER DEBUG: UpdateTask failed for task 4 to [1:98:2133]: cannot update unknown task 2024-11-21T09:20:25.320296Z node 1 :RESOURCE_BROKER DEBUG: Update task task-4 (4 by [1:97:2132]) (priority=4 type=compaction0 resources={250, 250} resubmit=0) 2024-11-21T09:20:25.320298Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.320300Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-4 (4 by [1:97:2132]) 2024-11-21T09:20:25.320303Z node 1 :RESOURCE_BROKER DEBUG: Update task task-3 (3 by [1:97:2132]) (priority=6 type=compaction0 resources={250, 250} resubmit=0) 2024-11-21T09:20:25.320305Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.320307Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-4 (4 by [1:97:2132]) 2024-11-21T09:20:25.320312Z node 1 :RESOURCE_BROKER DEBUG: FinishTask failed for task 5 to [1:97:2132]: cannot finish unknown task 2024-11-21T09:20:25.320316Z node 1 :RESOURCE_BROKER DEBUG: FinishTask failed for task 2 to [1:98:2133]: cannot finish unknown task 2024-11-21T09:20:25.320319Z node 1 :RESOURCE_BROKER DEBUG: FinishTask failed for task 3 to [1:97:2132]: cannot finish queued task 2024-11-21T09:20:25.320323Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {400, 400}) 2024-11-21T09:20:25.320327Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 0.000000 to 1600.000000 2024-11-21T09:20:25.320329Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {250, 250} for task task-4 (4 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.320331Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.320334Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 500.000000 (insert task task-4 (4 by [1:97:2132])) 2024-11-21T09:20:25.320337Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 due to exceeded limits 2024-11-21T09:20:25.320340Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-4 (4 by [1:97:2132]) (release resources {250, 250}) 2024-11-21T09:20:25.320342Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 500.000000 2024-11-21T09:20:25.320345Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {250, 250} for task task-3 (3 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.320346Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.320349Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 500.000000 to 1000.000000 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:25.561285Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:25.561350Z node 2 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-1 (1 by [2:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:25.561356Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:25.561362Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {500, 500} for task task-1 (1 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:25.561365Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:25.561371Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561376Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-2 (2 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561379Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561382Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561385Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561387Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.561389Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561391Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561394Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-4 (4 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561396Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561398Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561400Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561403Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-5 (5 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561406Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.561408Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561410Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561413Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-6 (6 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561415Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-6 (6 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561417Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561418Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561421Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-7 (7 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561423Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-7 (7 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.561425Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561427Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561438Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {500, 500}) 2024-11-21T09:20:25.561442Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 1000.000000 to 0.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561447Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-2 (2 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.561449Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561452Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 100.000000 (insert task task-2 (2 by [2:97:2132])) 2024-11-21T09:20:25.561454Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-3 (3 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.561456Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.561459Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 100.000000 (insert task task-3 (3 by [2:97:2132])) 2024-11-21T09:20:25.561461Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-4 (4 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.561463Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561465Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 100.000000 to 200.000000 (insert task task-4 (4 by [2:97:2132])) 2024-11-21T09:20:25.561467Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-5 (5 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.561469Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-5 (5 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.561472Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 100.000 ... R DEBUG: Updated real resource usage for queue queue_compaction1 from 1050.000000 to 1100.000000 2024-11-21T09:20:25.561844Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-1 (1 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561846Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561848Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-1 (1 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.561850Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561853Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1100.000000 to 1160.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561856Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {50, 50}) 2024-11-21T09:20:25.561859Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1160.000000 to 1150.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561861Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 1100.000000 to 1150.000000 2024-11-21T09:20:25.561864Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-1 (1 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561866Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561868Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-1 (1 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.561870Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561872Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1150.000000 to 1207.500000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561875Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {50, 50}) 2024-11-21T09:20:25.561878Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1207.500000 to 1200.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561880Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 1150.000000 to 1200.000000 2024-11-21T09:20:25.561883Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-1 (1 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561885Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561887Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-1 (1 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.561889Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561891Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1200.000000 to 1255.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561894Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {50, 50}) 2024-11-21T09:20:25.561896Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1255.000000 to 1250.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561899Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 1200.000000 to 1250.000000 2024-11-21T09:20:25.561902Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-1 (1 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561903Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561905Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-1 (1 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.561907Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561910Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1250.000000 to 1302.500000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561912Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {50, 50}) 2024-11-21T09:20:25.561916Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1302.500000 to 1300.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561918Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 1250.000000 to 1300.000000 2024-11-21T09:20:25.561921Z node 2 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-1 (1 by [2:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:25.561923Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:25.561926Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {500, 500} for task task-1 (1 by [2:97:2132]) from queue queue_default 2024-11-21T09:20:25.561927Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_default 2024-11-21T09:20:25.561930Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 0.000000 to 950.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561933Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-2 (2 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561935Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561937Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561940Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561942Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.561944Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 300.000000 to 1300.000000 2024-11-21T09:20:25.561946Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561948Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561950Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-4 (4 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561952Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561954Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561956Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561959Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-5 (5 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561960Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.561962Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561964Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561967Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-6 (6 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561969Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-6 (6 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561970Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561972Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561975Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-7 (7 by [2:97:2132]) priority=5 resources={50, 50} 2024-11-21T09:20:25.561977Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-7 (7 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.561979Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:25.561982Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.561984Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {500, 500}) 2024-11-21T09:20:25.561987Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 950.000000 to 0.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:25.561989Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-2 (2 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.561991Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.561993Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1300.000000 to 1350.000000 (insert task task-2 (2 by [2:97:2132])) 2024-11-21T09:20:25.561996Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-3 (3 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.561998Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.562000Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 300.000000 to 1400.000000 (insert task task-3 (3 by [2:97:2132])) 2024-11-21T09:20:25.562002Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-4 (4 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.562004Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.562006Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1350.000000 to 1400.000000 (insert task task-4 (4 by [2:97:2132])) 2024-11-21T09:20:25.562009Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-6 (6 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.562010Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-6 (6 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.562013Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1400.000000 to 1450.000000 (insert task task-6 (6 by [2:97:2132])) 2024-11-21T09:20:25.562015Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-5 (5 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.562017Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-5 (5 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.562019Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 1400.000000 to 1500.000000 (insert task task-5 (5 by [2:97:2132])) 2024-11-21T09:20:25.562021Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {50, 50} for task task-7 (7 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.562023Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-7 (7 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.562026Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 1500.000000 to 1600.000000 (insert task task-7 (7 by [2:97:2132])) >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Debug] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--ForceBlocks] >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--true] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] Test command err: 2024-11-21T09:20:25.886052Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:25.886143Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886153Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886162Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-1 (1 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:25.886167Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886177Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:25.886186Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886190Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886196Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886201Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886204Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886208Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886213Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-4 (4 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886217Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886220Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886225Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-5 (5 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886229Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886232Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886237Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-6 (6 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886240Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-6 (6 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886243Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886248Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-7 (7 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886251Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-7 (7 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886255Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886259Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-8 (8 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886263Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-8 (8 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886266Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886271Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-9 (9 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886297Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-9 (9 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886303Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886309Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-10 (10 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886312Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-10 (10 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886316Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886321Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-11 (11 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886324Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-11 (11 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886327Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886331Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-12 (12 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886333Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-12 (12 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886336Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886341Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-13 (13 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886345Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-13 (13 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886348Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886353Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-14 (14 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886357Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-14 (14 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886360Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886365Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-15 (15 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886368Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-15 (15 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886372Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886377Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-16 (16 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886380Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-16 (16 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886383Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886388Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-17 (17 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886391Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-17 (17 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886395Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886399Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-18 (18 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886403Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-18 (18 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886406Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886411Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-19 (19 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886415Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-19 (19 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886419Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886425Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-20 (20 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886428Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-20 (20 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886431Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886436Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-21 (21 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886439Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-21 (21 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886442Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886447Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-22 (22 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886450Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-22 (22 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886454Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886458Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-23 (23 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886461Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-23 (23 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886464Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886470Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-24 (24 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886477Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-24 (24 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886480Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886485Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-25 (25 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886489Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-25 (25 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886492Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886498Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-26 (26 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886501Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-26 (26 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886504Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886508Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-27 (27 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886512Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-27 (27 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886515Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886521Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-28 (28 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886524Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-28 (28 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.886527Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.886533Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-29 (29 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.886537Z node 1 :RESOURCE_BR ... (78 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887145Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-78 (78 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887149Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887154Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-79 (79 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887157Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-79 (79 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887162Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887168Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-80 (80 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887171Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-80 (80 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887174Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887179Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-81 (81 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887182Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-81 (81 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887186Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887191Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-82 (82 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887194Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-82 (82 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887197Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887202Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-83 (83 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887205Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-83 (83 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887209Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887214Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-84 (84 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887218Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-84 (84 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887221Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887226Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-85 (85 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887229Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-85 (85 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887233Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887238Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-86 (86 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887241Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-86 (86 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887244Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887249Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-87 (87 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887252Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-87 (87 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887256Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887261Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-88 (88 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887264Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-88 (88 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887268Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887273Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-89 (89 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887276Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-89 (89 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887281Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887286Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-90 (90 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887290Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-90 (90 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887293Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887298Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-91 (91 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887302Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-91 (91 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887305Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887310Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-92 (92 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887313Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-92 (92 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887317Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887322Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-93 (93 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887326Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-93 (93 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887329Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887334Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-94 (94 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887337Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-94 (94 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887341Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887346Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-95 (95 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887349Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-95 (95 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887353Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887359Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-96 (96 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887362Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-96 (96 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887365Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887370Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-97 (97 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887374Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-97 (97 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887377Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887382Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-98 (98 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887388Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-98 (98 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887392Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887397Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-99 (99 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887401Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-99 (99 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887405Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887411Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-100 (100 by [1:97:2132]) priority=5 resources={400, 400} 2024-11-21T09:20:25.887414Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-100 (100 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:25.887417Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887431Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-0 (0 by [1:97:2132]) priority=5 resources={100, 100} 2024-11-21T09:20:25.887436Z node 1 :RESOURCE_BROKER DEBUG: Use ID 101 for submitted task 2024-11-21T09:20:25.887440Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-0 (0 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.887444Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-0 (0 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.887449Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-0 (0 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.887457Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-0 (0 by [1:97:2132])) 2024-11-21T09:20:25.887461Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:25.887467Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-102 (102 by [1:97:2132]) priority=5 resources={100, 100} 2024-11-21T09:20:25.887471Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-102 (102 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.887474Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-102 (102 by [1:97:2132]) 2024-11-21T09:20:25.887477Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:25.887486Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-0 (0 by [1:97:2132]) (release resources {100, 100}) 2024-11-21T09:20:25.887492Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 10.000000 (remove task task-0 (0 by [1:97:2132])) 2024-11-21T09:20:25.887497Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 0.000000 to 10.000000 2024-11-21T09:20:25.887503Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 80.000000 (in-fly consumption {400, 400}) 2024-11-21T09:20:25.887507Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-102 (102 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:25.887510Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-102 (102 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:25.887515Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 10.000000 to 200.500000 (insert task task-102 (102 by [1:97:2132])) 2024-11-21T09:20:25.887519Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] Test command err: LabeledCountersByGroup { Group: "group1/group2" LabeledCounter { Value: 39 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "/" } CounterNames: "value1" LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } >> TTabletPipeTest::TestShutdown [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletPipeTest::TestTwoNodes [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2024-11-21T09:20:20.049204Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/0041fe/r3tmp/tmpg9YjWq//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T09:20:20.049576Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:22.175490Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/0041fe/r3tmp/tmpg9YjWq//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2024-11-21T09:20:22.175770Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:23.212518Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/0041fe/r3tmp/tmpg9YjWq//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2024-11-21T09:20:23.212804Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:24.235928Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/0041fe/r3tmp/tmpg9YjWq//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2024-11-21T09:20:24.236255Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T09:20:25.262921Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/0041fe/r3tmp/tmpg9YjWq//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2024-11-21T09:20:25.263135Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestShutdown [GOOD] >> BootstrapperTest::UnavailableStateStorage [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2024-11-21T09:20:26.112442Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:26.112523Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-1 (1 by [1:97:2132]) priority=5 resources={500, 500} 2024-11-21T09:20:26.112532Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:26.112539Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {500, 500} for task task-1 (1 by [1:97:2132]) from queue queue_default 2024-11-21T09:20:26.112543Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:26.112553Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 0.000000 to 1000.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:26.112562Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [1:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:26.112566Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:26.112570Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [1:97:2132]) 2024-11-21T09:20:26.112576Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-3 (3 by [1:97:2132]) priority=5 resources={200, 200} 2024-11-21T09:20:26.112580Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:26.112585Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [1:97:2132]) 2024-11-21T09:20:26.112588Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T09:20:26.112799Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_compaction1" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2024-11-21T09:20:26.112839Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:26.112847Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 0.000000 to 173218082611.200012 2024-11-21T09:20:26.112852Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 0.000000 to 500.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:26.112855Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:26.112859Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:26.112864Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-2 (2 by [1:97:2132]) from queue queue_compaction0 2024-11-21T09:20:26.112868Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:26.112872Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 40.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:26.112877Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-3 (3 by [1:97:2132]) from queue queue_compaction1 2024-11-21T09:20:26.112881Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T09:20:26.112886Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:26.112895Z node 1 :RESOURCE_BROKER INFO: Configure result: Success: true 2024-11-21T09:20:26.112947Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default1" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2024-11-21T09:20:26.112955Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2024-11-21T09:20:26.112985Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown1" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2024-11-21T09:20:26.112992Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' is required" 2024-11-21T09:20:26.113020Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default1" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2024-11-21T09:20:26.113026Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" 2024-11-21T09:20:26.113054Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2024-11-21T09:20:26.113081Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:26.113086Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 0.000000 to 173218082611.200012 2024-11-21T09:20:26.113090Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 0.000000 to 500.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T09:20:26.113094Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction0 2024-11-21T09:20:26.113100Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 6928723304.448001 2024-11-21T09:20:26.113104Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 40.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T09:20:26.113107Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_default 2024-11-21T09:20:26.113113Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 500.000000 to 173218083411.200012 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T09:20:26.113117Z node 1 :RESOURCE_BROKER INFO: Configure result: Success: true 2024-11-21T09:20:26.352941Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T09:20:26.353000Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [2:97:2132]) priority=5 resources={500, 0} 2024-11-21T09:20:26.353005Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:26.353012Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {500, 0} for task task-1 (1 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:26.353016Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:26.353024Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T09:20:26.353031Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-2 (2 by [2:97:2132]) priority=5 resources={500, 0} 2024-11-21T09:20:26.353034Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:26.353038Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T09:20:26.353049Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {500, 0}) 2024-11-21T09:20:26.353055Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 2024-11-21T09:20:26.353059Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {500, 0} for task task-2 (2 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:26.353062Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:26.353066Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 1000.000000 (insert task task-2 (2 by [2:97:2132])) 2024-11-21T09:20:26.353071Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-2 (2 by [2:97:2132]) (release resources {500, 0}) 2024-11-21T09:20:26.353076Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 1000.000000 to 500.000000 (remove task task-2 (2 by [2:97:2132])) 2024-11-21T09:20:26.353080Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 0.000000 to 500.000000 2024-11-21T09:20:26.353085Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-3 (3 by [2:97:2132]) priority=5 resources={250, 0} 2024-11-21T09:20:26.353088Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:26.353091Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {250, 0} for task task-3 (3 by [2:97:2132]) from queue queue_compaction1 2024-11-21T09:20:26.353094Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [2:97:2132]) to queue queue_compaction1 2024-11-21T09:20:26.353098Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 500.000000 to 987.500000 (insert task task-3 (3 by [2:97:2132])) 2024-11-21T09:20:26.353104Z node 2 :RESOURCE_BROKER DEBUG: Submitted new scan task task-4 (4 by [2:97:2132]) priority=5 resources={0, 800} 2024-11-21T09:20:26.353107Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [2:97:2132]) to queue queue_scan 2024-11-21T09:20:26.353112Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 500.000000 to 750.000000 (in-fly consumption {250, 0}) 2024-11-21T09:20:26.353115Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-4 (4 by [2:97:2132]) 2024-11-21T09:20:26.353120Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-5 (5 by [2:97:2132]) priority=5 resources={250, 0} 2024-11-21T09:20:26.353123Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:26.353127Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction1 from 750.000000 to 1000.000000 (in-fly consumption {250, 0}) 2024-11-21T09:20:26.353135Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-4 (4 by [2:97:2132]) 2024-11-21T09:20:26.353138Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {250, 0} for task task-5 (5 by [2:97:2132]) from queue queue_compaction0 2024-11-21T09:20:26.353141Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-5 (5 by [2:97:2132]) to queue queue_compaction0 2024-11-21T09:20:26.353145Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 1000.000000 to 1500.000000 (insert task task-5 (5 by [2:97:2132])) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2024-11-21T09:20:26.142019Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2024-11-21T09:20:26.142182Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2024-11-21T09:20:26.142188Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.170556s 2024-11-21T09:20:26.265051Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) >> test.py::test[blocks-combine_all_minmax_double--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Plan] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Results] >> TPipeCacheTest::TestAutoConnect [GOOD] |95.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[library-package_override--Debug] [SKIPPED] Test command err: 127.0.0.1 - - [21/Nov/2024 09:20:23] "GET /lib2.sql HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 09:20:23] "GET /lib1.sql HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 09:20:24] "GET /lib2.sql HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 09:20:24] "GET /lib1.sql HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 09:20:25] "GET /lib2.sql HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 09:20:25] "GET /lib1.sql HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 09:20:25] "GET /lib2.sql HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 09:20:25] "GET /lib1.sql HTTP/1.1" 200 - |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Results] [GOOD] >> YdbProxy::DescribePath >> test.py::test[blocks-date_equals_scalar--Analyze] >> YdbProxy::CopyTable >> YdbProxy::ReadTopic >> YdbProxy::DropTable >> PartitionEndWatcher::EmptyPartition [GOOD] >> YdbProxy::CreateTable >> YdbProxy::MakeDirectory >> YdbProxy::RemoveDirectory >> YdbProxy::ListDirectory >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable |95.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TOlap::Decimal >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> YdbProxy::AlterTable [GOOD] >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> YdbProxy::DropTopic [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] >> YdbProxy::OAuthToken [GOOD] >> YdbProxy::DescribeTopic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2024-11-21T09:20:27.872373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660005426200597:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.872395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00433d/r3tmp/tmpcwNTEL/pdisk_1.dat 2024-11-21T09:20:27.941410Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27861 TServer::EnableGrpc on GrpcPort 29350, node 1 2024-11-21T09:20:27.972581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.972614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.973669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.998435Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998490Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.081802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.172808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:28.231113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 9437184 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] Leader for TabletID 9437184 is [1:106:2138] sender: [1:126:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [1:161:2057] recipient: [1:159:2166] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:161:2057] recipient: [1:159:2166] Leader for TabletID 9437185 is [1:165:2170] sender: [1:166:2057] recipient: [1:159:2166] Leader for TabletID 9437185 is [1:165:2170] sender: [1:201:2057] recipient: [1:14:2061] >> YdbProxy::CreateCdcStream [GOOD] >> TOlap::Decimal [GOOD] >> YdbProxy::StaticCreds [GOOD] >> test.py::test[blocks-date_equals_scalar--Analyze] [GOOD] >> test.py::test[blocks-date_equals_scalar--Debug] >> YdbProxy::DescribeTable [GOOD] >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2024-11-21T09:20:27.866302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660006839558858:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.866534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004315/r3tmp/tmpBNQip6/pdisk_1.dat 2024-11-21T09:20:27.933103Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13144 TServer::EnableGrpc on GrpcPort 9370, node 1 2024-11-21T09:20:27.966447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.966491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.967607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.998412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.080742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.305188Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660011307490930:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:28.305318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004315/r3tmp/tmpGvwcHp/pdisk_1.dat 2024-11-21T09:20:28.318776Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5202 TServer::EnableGrpc on GrpcPort 15843, node 2 2024-11-21T09:20:28.339213Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:28.339226Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:28.339230Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:28.339275Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.405266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:28.405290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:28.406315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:28.408006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2024-11-21T09:20:27.862559Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660004823792979:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.862575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004341/r3tmp/tmpszSUtk/pdisk_1.dat 2024-11-21T09:20:27.934065Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12936 TServer::EnableGrpc on GrpcPort 7696, node 1 2024-11-21T09:20:27.962545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.962574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.963597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.998429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998467Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.080814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.309029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660011147242750:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:28.309057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004341/r3tmp/tmph6PMzP/pdisk_1.dat 2024-11-21T09:20:28.319586Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5211 TServer::EnableGrpc on GrpcPort 3110, node 2 2024-11-21T09:20:28.344387Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:28.344406Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:28.344408Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:28.344464Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.409526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:28.409549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:28.410638Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:28.411218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.469304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:28.471063Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2024-11-21T09:20:28.471073Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T09:20:28.476018Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2024-11-21T09:20:28.476062Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2024-11-21T09:20:27.859641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660003872165793:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.859666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00430f/r3tmp/tmpGpyb8D/pdisk_1.dat TClient is connected to server localhost:29770 TServer::EnableGrpc on GrpcPort 61030, node 1 2024-11-21T09:20:27.953131Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:27.960610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.960639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.961706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.998413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998433Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.081528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.293034Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660010735536493:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:28.293212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00430f/r3tmp/tmpgmYt8U/pdisk_1.dat 2024-11-21T09:20:28.302538Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27557 TServer::EnableGrpc on GrpcPort 28853, node 2 2024-11-21T09:20:28.331426Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:28.331456Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:28.331458Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:28.331512Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.393286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:28.393315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:28.394377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:28.395050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.536976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2024-11-21T09:20:27.859644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660004646402897:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.859664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004307/r3tmp/tmp8p059O/pdisk_1.dat TClient is connected to server localhost:26880 TServer::EnableGrpc on GrpcPort 61946, node 1 2024-11-21T09:20:27.960427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.960452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.961508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.965277Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:27.998404Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.082340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.134218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:28.302368Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660008692315703:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:28.302389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004307/r3tmp/tmpH8F56R/pdisk_1.dat 2024-11-21T09:20:28.312908Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1622 TServer::EnableGrpc on GrpcPort 1399, node 2 2024-11-21T09:20:28.330058Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:28.330070Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:28.330072Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:28.330120Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.402776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:28.402803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:28.403869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:28.405128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.533084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2024-11-21T09:20:27.859650Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660003967129958:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.859671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00432b/r3tmp/tmpE7MXSs/pdisk_1.dat TClient is connected to server localhost:28636 TServer::EnableGrpc on GrpcPort 17608, node 1 2024-11-21T09:20:27.953113Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:27.960699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.960727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.961819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.998417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998433Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.080776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.126874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:28.198327Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T09:20:28.325092Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660010478766095:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:28.325287Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00432b/r3tmp/tmpbFYQ3X/pdisk_1.dat 2024-11-21T09:20:28.333959Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14328 TServer::EnableGrpc on GrpcPort 11647, node 2 2024-11-21T09:20:28.353648Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:28.353667Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:28.353669Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:28.353709Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.425568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:28.425599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:28.426658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:28.427858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:20:28.405528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:28.405552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:28.405555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:28.405559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:28.405569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:28.405572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:28.405578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:28.405643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:28.416076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:28.416093Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:28.418303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:28.418961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:28.418993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:28.421151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:28.421369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:28.433142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:28.434167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:28.438927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:28.442142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:28.442158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:28.442186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:28.442192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:28.442196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:28.442208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.443627Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:20:28.457118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:28.458100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.458169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:28.458203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:28.458208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.458889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:28.458929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:28.458966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.458972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:28.458975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:28.458979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:28.459342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.459360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:28.459362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:28.459714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.459739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.459743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:28.459748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:28.460160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:28.460685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:28.461603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:28.461792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:28.461815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:28.461821Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:28.461870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:28.461875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:28.461896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:28.461905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:28.462361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:28.462367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:28.462399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:28.462402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:20:28.462453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.462458Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:28.462468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:28.462471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:28.462474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:28.462477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:28.462480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:28.462482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:28.462490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:28.462494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:28.462497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:20:28.462715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:28.462726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:28.462730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:20:28.462733Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:20:28.462736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:28.462744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1-21T09:20:28.528548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2024-11-21T09:20:28.528623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:28.528638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:28.528644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId#101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2024-11-21T09:20:28.528685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2024-11-21T09:20:28.528713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:28.528723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T09:20:28.529183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:28.529190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:28.529227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:20:28.529261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:28.529266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:20:28.529271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:20:28.529347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.529353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId#101:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:20:28.529360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId#101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T09:20:28.529449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:20:28.529458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:20:28.529462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:20:28.529466Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:20:28.529471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:28.529699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:20:28.529709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:20:28.529713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:20:28.529716Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:20:28.529720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:20:28.529730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T09:20:28.529904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T09:20:28.530129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:20:28.530298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:20:28.541060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T09:20:28.541099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:20:28.541124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T09:20:28.541134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T09:20:28.541218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2024-11-21T09:20:28.541224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:20:28.541234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:20:28.541831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.541897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.541914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:20:28.541921Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:20:28.541934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:20:28.541938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:20:28.541944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T09:20:28.541954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 101 2024-11-21T09:20:28.541959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:20:28.541963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:20:28.541966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:20:28.541989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:20:28.542309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:20:28.542318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:364:2344] TestWaitNotification: OK eventTxId 101 2024-11-21T09:20:28.542435Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:28.542489Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 61us result status StatusSuccess 2024-11-21T09:20:28.542642Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Decimal(35,9)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2024-11-21T09:20:27.866236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660002866852947:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.866256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00430e/r3tmp/tmp86fMVU/pdisk_1.dat 2024-11-21T09:20:27.936859Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8561 TServer::EnableGrpc on GrpcPort 16380, node 1 2024-11-21T09:20:27.966141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.966164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.967222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.998426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.082168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.091763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:28.319962Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660008121507201:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:28.320009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00430e/r3tmp/tmpneEOlM/pdisk_1.dat 2024-11-21T09:20:28.329601Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22491 TServer::EnableGrpc on GrpcPort 4300, node 2 2024-11-21T09:20:28.357016Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:28.357029Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:28.357030Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:28.357053Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.420332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:28.420362Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:28.421512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:28.421687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.437153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732180828469 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732180828469 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 ... (TRUNCATED) >> YdbProxy::CreateTopic >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates >> KqpImmediateEffects::Delete >> KqpImmediateEffects::DeleteAfterUpsert >> KqpEffects::InsertAbort_Params_Duplicates |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> KqpImmediateEffects::UpsertDuplicates >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> YdbProxy::AlterTopic [GOOD] >> TOlap::CreateDropStandaloneTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2024-11-21T09:16:34.203227Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.203238Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.203242Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T09:16:34.203387Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:16:34.203404Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.203408Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.204054Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008808s 2024-11-21T09:16:34.204199Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:16:34.204222Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.204224Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.204242Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008398s 2024-11-21T09:16:34.204348Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T09:16:34.204358Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.204361Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T09:16:34.204376Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007182s 2024-11-21T09:16:34.220630Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1732180594220619 2024-11-21T09:16:34.307208Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659001999846301:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.307276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004624/r3tmp/tmpxAPOIH/pdisk_1.dat 2024-11-21T09:16:34.341525Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:16:34.342746Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659003831456687:2265];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:16:34.342858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:16:34.346233Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:16:34.372654Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31279, node 1 2024-11-21T09:16:34.404654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/004624/r3tmp/yandex8QzXeV.tmp 2024-11-21T09:16:34.404668Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/004624/r3tmp/yandex8QzXeV.tmp 2024-11-21T09:16:34.404719Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/004624/r3tmp/yandex8QzXeV.tmp 2024-11-21T09:16:34.404764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:16:34.408097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:34.408133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:34.410034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:16:34.439479Z INFO: TTestServer started on Port 17598 GrpcPort 31279 2024-11-21T09:16:34.442351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:16:34.442379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:16:34.443814Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:16:34.444060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17598 PQClient connected to localhost:31279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:16:34.461935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T09:16:34.618140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001999846982:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.618177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.618257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001999846994:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.619088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T09:16:34.619640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659001999847025:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.619667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:16:34.623136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659001999846996:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T09:16:34.695799Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659003831456782:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:34.695951Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM2MzQ2YjMtMjFiN2RmOWUtZTJhYjVjMjktMzNhNDQ2ZTI=, ActorId: [2:7439659003831456752:2277], ActorState: ExecuteState, TraceId: 01jd704kz49kpncvak2f2cr7gn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:34.696676Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:16:34.708116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.720705Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659001999847155:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:16:34.720811Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTgzMGMwMTItOTE3MzZmYTktNDY4NGVhZGItNGY4MmEyYTU=, ActorId: [1:7439659001999846964:2299], ActorState: ExecuteState, TraceId: 01jd704kxrfa5gggp9xj189j28, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:16:34.721089Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:16:34.778710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T09:16:34.844548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:31279", true, true, 1000); 2024-11-21T09:16:34.878491Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd704m5cdz8jmnd0675yj12j, Database: , ... o 9 partNo 0 2024-11-21T09:20:28.618966Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2024-11-21T09:20:28.618968Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2024-11-21T09:20:28.618970Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2024-11-21T09:20:28.619006Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1732180828618 2024-11-21T09:20:28.619039Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:20:28.619756Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 2024-11-21T09:20:28.619766Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619776Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T09:20:28.619779Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619782Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T09:20:28.619783Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619785Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T09:20:28.619787Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619790Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2024-11-21T09:20:28.619791Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619795Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2024-11-21T09:20:28.619797Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619800Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2024-11-21T09:20:28.619801Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619803Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2024-11-21T09:20:28.619804Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619810Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2024-11-21T09:20:28.619811Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619814Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2024-11-21T09:20:28.619815Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:28.619818Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2024-11-21T09:20:28.619820Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T09:20:28.619827Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:20:28.619837Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:20:28.619845Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T09:20:28.619872Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2024-11-21T09:20:28.619880Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T09:20:28.619908Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T09:20:28.619914Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:20:28.619925Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732180828618 queuesize 0 startOffset 0 2024-11-21T09:20:28.619985Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { } 2024-11-21T09:20:28.619994Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 1 2024-11-21T09:20:28.619997Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 2 2024-11-21T09:20:28.620004Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 3 2024-11-21T09:20:28.620005Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 4 2024-11-21T09:20:28.620007Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 5 2024-11-21T09:20:28.620009Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 6 2024-11-21T09:20:28.620013Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 7 2024-11-21T09:20:28.620014Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 8 2024-11-21T09:20:28.620016Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 9 2024-11-21T09:20:28.620018Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: acknoledged message 10 2024-11-21T09:20:28.620078Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: close. Timeout = 0 ms 2024-11-21T09:20:28.620083Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session will now close 2024-11-21T09:20:28.620088Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: aborting 2024-11-21T09:20:28.620225Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:20:28.620230Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0] Write session: destroy 2024-11-21T09:20:28.620334Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0 grpc read done: success: 0 data: 2024-11-21T09:20:28.620362Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0 grpc read failed 2024-11-21T09:20:28.620369Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0 grpc closed 2024-11-21T09:20:28.620374Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|eb77296a-459ef9bc-6506dda1-2901f1af_0 is DEAD 2024-11-21T09:20:28.620679Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:20:28.620699Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:28.620711Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7439660007804029144:2642] destroyed 2024-11-21T09:20:28.620723Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> KqpEffects::InsertAbort_Select_Success >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2024-11-21T09:20:27.859677Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660003620919284:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.859702Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004350/r3tmp/tmp4dOX05/pdisk_1.dat 2024-11-21T09:20:27.932579Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9924 TServer::EnableGrpc on GrpcPort 29405, node 1 2024-11-21T09:20:27.960465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.960488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.961607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.998417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998440Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.081803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.131151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:28.500949Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660007953078617:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:28.500989Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004350/r3tmp/tmpTirUI4/pdisk_1.dat 2024-11-21T09:20:28.508051Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10172 TServer::EnableGrpc on GrpcPort 17136, node 2 2024-11-21T09:20:28.524384Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:28.524398Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:28.524400Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:28.524440Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.601484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:28.601510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:28.602575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:28.603222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.726555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:28.733773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:28.963463Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660011103732901:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:28.963657Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004350/r3tmp/tmp5zSqIB/pdisk_1.dat 2024-11-21T09:20:28.974354Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16016 TServer::EnableGrpc on GrpcPort 20212, node 3 2024-11-21T09:20:28.991725Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:28.991739Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:28.991740Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:28.991770Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.063870Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.063898Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.064947Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.066137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.127795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> YdbProxy::DescribeConsumer [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir |95.7%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::WriteThenReadWithCommit >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpInplaceUpdate::SingleRowStr >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> TPQCompatTest::BadTopics [GOOD] >> TPQCompatTest::CommitOffsets >> TPersQueueTest::EventBatching [GOOD] >> KqpImmediateEffects::UpsertAfterInsert >> TPersQueueTest::CreateTopicWithMeteringMode >> KqpEffects::InsertAbort_Literal_Conflict ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2024-11-21T09:20:29.131027Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660014030622599:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.131186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042f8/r3tmp/tmpiGFYGo/pdisk_1.dat TClient is connected to server localhost:13848 TServer::EnableGrpc on GrpcPort 62987, node 1 2024-11-21T09:20:29.189960Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:29.197481Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.197496Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.197497Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.197529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.226417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.231384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.231414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.232547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.480132Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660013446243486:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.480152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0042f8/r3tmp/tmpzRmb9u/pdisk_1.dat 2024-11-21T09:20:29.489553Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7948 TServer::EnableGrpc on GrpcPort 10336, node 2 2024-11-21T09:20:29.516301Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.516316Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.516318Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.516360Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.580097Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.580125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.581201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.583400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpEffects::InsertAbort_Params_Duplicates [GOOD] >> KqpEffects::InsertAbort_Params_Conflict >> KqpImmediateEffects::UpdateOn >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> KqpImmediateEffects::UpdateAfterInsert >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange >> KqpWrite::ProjectReplace >> KqpImmediateEffects::Replace >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates >> test.py::test[blocks-date_equals_scalar--Debug] [GOOD] >> test.py::test[blocks-date_equals_scalar--ForceBlocks] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertDuplicates [GOOD] Test command err: Trying to start YDB, gRPC: 9981, MsgBus: 10523 2024-11-21T09:20:29.252881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660012774674163:2248];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.252971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004719/r3tmp/tmpQGypfH/pdisk_1.dat 2024-11-21T09:20:29.328928Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9981, node 1 2024-11-21T09:20:29.352306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.352348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.353392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.369333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.369344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.369346Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.369372Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10523 TClient is connected to server localhost:10523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.451535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.461721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:20:29.528977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.543550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.551703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.577473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660012774675499:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.577497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.728018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.734288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.743677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.798483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.806994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.814232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.822977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660012774676015:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.823002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660012774676020:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.823006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.823681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:29.827137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660012774676022:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:30.030099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpInplaceUpdate::SingleRowStr [GOOD] >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict [GOOD] >> KqpEffects::DeletePkPrefixWithIndex >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2024-11-21T09:20:29.911800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660015294017876:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.911920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f40/r3tmp/tmp3qXWF8/pdisk_1.dat 2024-11-21T09:20:29.961654Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10675, node 1 2024-11-21T09:20:29.977872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.977884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.977886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.977920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.997085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.998177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:29.998197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.998844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:20:29.998901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:20:29.998909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:20:29.999303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:29.999312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:20:29.999445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:20:29.999634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.000615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180830044, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:30.000628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:20:30.000711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:20:30.001106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:30.001161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:30.001176Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:20:30.001193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:20:30.001205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:20:30.001222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:20:30.001671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:20:30.001691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:20:30.001695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:20:30.001705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:20:30.010677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/dir, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.010752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:30.011431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/dir 2024-11-21T09:20:30.011483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:30.011536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:30.011553Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:30.011601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:20:30.011732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:30.011753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:30.011762Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:20:30.011821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:30.011824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:30.011826Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T09:20:30.012089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.012105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.012470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180830058, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:30.012483Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180830058, at schemeshard: 72057594046644480 2024-11-21T09:20:30.012504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T09:20:30.012824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:30.012868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:30.012882Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T09:20:30.012895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T09:20:30.012917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T09:20:30.012929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T09:20:30.013045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:30.013058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:30.013061Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:20:30.013088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:30.013094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:30.013095Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:20:30.013100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T09:20:30.013501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Backup "/Root" to "/home/runner/.ya/build/build_root/jptk/001f40/r3tmp/tmpuhIVa2/"Create temporary directory "/Root/~backup_20241121T092030"2024-11-21T09:20:30.014439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/~backup_20241121T092030, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.014479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:30.014841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/~backup_20241121T092030 2024-11-21T09:20:30 ... 44480 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046644480, cookie: 281474976710663 2024-11-21T09:20:30.046292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710663 2024-11-21T09:20:30.046300Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 10 2024-11-21T09:20:30.046332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710663 2024-11-21T09:20:30.046339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710663 2024-11-21T09:20:30.046340Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:20:30.051015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180830100, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:30.051030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710663:0, step: 1732180830100, at schemeshard: 72057594046644480 2024-11-21T09:20:30.051060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710663:0 progress is 1/1 2024-11-21T09:20:30.051084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710663:0 2024-11-21T09:20:30.051101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710663, publications: 2, subscribers: 1 2024-11-21T09:20:30.051487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:30.051541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:30.051784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046644480, cookie: 281474976710663 2024-11-21T09:20:30.051794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710663 2024-11-21T09:20:30.051797Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 11 2024-11-21T09:20:30.051824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710663 2024-11-21T09:20:30.051830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710663 2024-11-21T09:20:30.051831Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:20:30.051838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710663, subscribers: 1 2024-11-21T09:20:30.052351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/jptk/001f40/r3tmp/tmpuhIVa2/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/jptk/001f40/r3tmp/tmpuhIVa2/" to "/Root"Process "/home/runner/.ya/build/build_root/jptk/001f40/r3tmp/tmpuhIVa2/dir"Restore empty directory "/home/runner/.ya/build/build_root/jptk/001f40/r3tmp/tmpuhIVa2/dir" to "/Root/dir"2024-11-21T09:20:30.057738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/dir, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.057776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710664:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:30.058402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710664, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/dir 2024-11-21T09:20:30.058482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:30.058535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:30.058564Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710664:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:30.058670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710664, at schemeshard: 72057594046644480 2024-11-21T09:20:30.058731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710664 2024-11-21T09:20:30.058743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710664 2024-11-21T09:20:30.058749Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 12 2024-11-21T09:20:30.058809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710664 2024-11-21T09:20:30.058816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710664 2024-11-21T09:20:30.058818Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T09:20:30.065051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180830114, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:30.065070Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710664:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180830114, at schemeshard: 72057594046644480 2024-11-21T09:20:30.065113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710664:0 128 -> 240 2024-11-21T09:20:30.065559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:30.065620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:30.065637Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710664:0 ProgressState 2024-11-21T09:20:30.065648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2024-11-21T09:20:30.065657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710664:0 2024-11-21T09:20:30.065672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710664, publications: 2, subscribers: 1 2024-11-21T09:20:30.066009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046644480, cookie: 281474976710664 2024-11-21T09:20:30.066020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710664 2024-11-21T09:20:30.066024Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 13 2024-11-21T09:20:30.066054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710664 2024-11-21T09:20:30.066058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710664 2024-11-21T09:20:30.066059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T09:20:30.066065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 Restore ACL "/home/runner/.ya/build/build_root/jptk/001f40/r3tmp/tmpuhIVa2/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/jptk/001f40/r3tmp/tmpuhIVa2/dir/permissions.pb"2024-11-21T09:20:30.068409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/dir, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.068449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:30.068459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.068471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2024-11-21T09:20:30.068492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2024-11-21T09:20:30.068496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 2, subscribers: 0 2024-11-21T09:20:30.068939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/dir, set owner:root@builtin 2024-11-21T09:20:30.068972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:30.069018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:30.069329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710665 2024-11-21T09:20:30.069358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710665 2024-11-21T09:20:30.069362Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 4 2024-11-21T09:20:30.069441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046644480, cookie: 281474976710665 2024-11-21T09:20:30.069448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2024-11-21T09:20:30.069449Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2024-11-21T09:20:30.069455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 Restore completed successfully ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 7627, MsgBus: 3399 2024-11-21T09:20:29.757816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660013001935796:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.757933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004704/r3tmp/tmpIGnzfJ/pdisk_1.dat 2024-11-21T09:20:29.813477Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7627, node 1 2024-11-21T09:20:29.828353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.828365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.828367Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.828399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3399 2024-11-21T09:20:29.858243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.858266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.859325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.876664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.885761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.946153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.961031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.969625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.033190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017296904650:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.033226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.062406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.069274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.080223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.094699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.101025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.108493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.124649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017296905145:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.124670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.124735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017296905150:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.125424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.128301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660017296905152:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:30.281912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpImmediateEffects::UpdateOn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] Test command err: Trying to start YDB, gRPC: 4837, MsgBus: 14427 2024-11-21T09:20:29.981886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660013560862177:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.981912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004700/r3tmp/tmpizqNwg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4837, node 1 2024-11-21T09:20:30.048255Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:30.055123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.055138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.055140Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.055175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14427 2024-11-21T09:20:30.082041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.082065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.083133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.102169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.112550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.128010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.144493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.155510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.270969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017855831007:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.270996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.299834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.306365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.319197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.332611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.339414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.347109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.365177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017855831522:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.365202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.365210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017855831527:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.366123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.373671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660017855831529:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:30.529851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.582326Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832023:2454] TxId: 281474976710675. Ctx: { TraceId: 01jd70btb91vkp8nvbcgtr3c8a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2024-11-21T09:20:30.582352Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2024-11-21T09:20:30.582385Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Resolved key sets: 1 2024-11-21T09:20:30.582421Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T09:20:30.582438Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832023:2454] TxId: 281474976710675. Ctx: { TraceId: 01jd70btb91vkp8nvbcgtr3c8a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2024-11-21T09:20:30.582484Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T09:20:30.582512Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710675. Shard resolve complete, resolved shards: 1 2024-11-21T09:20:30.582518Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832023:2454] TxId: 281474976710675. Ctx: { TraceId: 01jd70btb91vkp8nvbcgtr3c8a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2024-11-21T09:20:30.582524Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832023:2454] TxId: 281474976710675. Ctx: { TraceId: 01jd70btb91vkp8nvbcgtr3c8a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037919] 2024-11-21T09:20:30.582538Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Ctx: { TraceId: 01jd70btb91vkp8nvbcgtr3c8a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {18446744073709551615, 1732180830618} 2024-11-21T09:20:30.582607Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7439660017855832023:2454] TxId: 281474976710675. Ctx: { TraceId: 01jd70btb91vkp8nvbcgtr3c8a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T09:20:30.582616Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832023:2454] TxId: 281474976710675. Ctx: { TraceId: 01jd70btb91vkp8nvbcgtr3c8a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7439660017855832027:2454], 2024-11-21T09:20:30.582619Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832023:2454] TxId: 281474976710675. Ctx: { TraceId: 01jd70btb91vkp8nvbcgtr3c8a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7439660017855832027:2454], 2024-11-21T09:20:30.582621Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832023:2454] TxId: 281474976710675. Ctx: { TraceId: 01 ... on: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T09:20:30.607734Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2024-11-21T09:20:30.607769Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T09:20:30.607811Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710677. Shard resolve complete, resolved shards: 1 2024-11-21T09:20:30.607818Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2024-11-21T09:20:30.607824Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037919] 2024-11-21T09:20:30.607831Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {18446744073709551615, 1732180830618} 2024-11-21T09:20:30.607901Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T09:20:30.607910Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7439660017855832057:2454], 2024-11-21T09:20:30.607916Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7439660017855832057:2454], 2024-11-21T09:20:30.607918Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T09:20:30.608011Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439660017855832057:2454], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2024-11-21T09:20:30.608018Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7439660017855832057:2454], 2024-11-21T09:20:30.608022Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7439660017855832057:2454], 2024-11-21T09:20:30.608451Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439660017855832057:2454], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 250 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 36 FinishTimeMs: 1732180830608 OutputRows: 1 OutputBytes: 22 Tables { TablePath: "/Root/TestImmediateEffects" ReadRows: 1 ReadBytes: 22 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 22 ComputeCpuTimeUs: 13 BuildCpuTimeUs: 23 WaitInputTimeUs: 212 HostName: "ghrun-qcxhsi27zq" NodeId: 1 StartTimeMs: 1732180830607 } MaxMemoryUsage: 1048576 } 2024-11-21T09:20:30.608463Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439660017855832057:2454] 2024-11-21T09:20:30.608500Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:20:30.608512Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832053:2454] TxId: 281474976710677. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000250s ReadRows: 1 ReadBytes: 22 ru: 1 rate limiter was not found force flag: 1 2024-11-21T09:20:30.608605Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710678. Resolved key sets: 0 2024-11-21T09:20:30.608632Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {18446744073709551615, 1732180830618} 2024-11-21T09:20:30.608669Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037919, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976710675 DataShard: 72075186224037919 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 16 HasWrites: true } SendingShards: 72075186224037919 ReceivingShards: 72075186224037919 Op: Commit, immediate: 1 2024-11-21T09:20:30.608681Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T09:20:30.608697Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T09:20:30.608705Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037919 not finished yet: Executing 2024-11-21T09:20:30.608710Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037919 (Executing), 2024-11-21T09:20:30.608717Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T09:20:30.609925Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037919, status: COMPLETE, error: 2024-11-21T09:20:30.609950Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:20:30.609962Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439660017855832060:2454] TxId: 281474976710678. Ctx: { TraceId: 01jd70btc389qnshyhkvaphrzj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUwYjNjY2UtZTI2MjM4OTgtMTI2YzMzNS1mMWU5ZGFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> KqpEffects::InsertAbort_Params_Conflict [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 27120, MsgBus: 23853 2024-11-21T09:20:30.094280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660016014919884:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.094456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046fa/r3tmp/tmpcEgFHi/pdisk_1.dat 2024-11-21T09:20:30.161588Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27120, node 1 2024-11-21T09:20:30.173249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.173262Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.173263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.173295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23853 2024-11-21T09:20:30.194464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.194487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.195529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.220185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.231014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.245363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.263022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.273188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.372127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660016014921420:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.372153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.408956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.414834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.422723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.430388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.437491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.451362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.459974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660016014921930:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.460020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.460034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660016014921935:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.460675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.464101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660016014921939:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:30.669626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr [GOOD] Test command err: Trying to start YDB, gRPC: 21186, MsgBus: 4164 2024-11-21T09:20:29.981279Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660015339580739:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.981292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046ff/r3tmp/tmp3sOuo4/pdisk_1.dat 2024-11-21T09:20:30.031521Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21186, node 1 2024-11-21T09:20:30.050701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.050719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.050720Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.050752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4164 2024-11-21T09:20:30.081688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.081723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.082818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.110224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.117139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.177563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.193113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.200946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.247526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019634549572:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.247564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.278913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.333404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.345904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.352830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.407159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.416062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.424614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019634550089:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.424645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019634550094:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.424648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.425189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.429044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660019634550096:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.577572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 7343, MsgBus: 14734 2024-11-21T09:20:29.253039Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660014928748334:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.253088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004714/r3tmp/tmpdhe392/pdisk_1.dat 2024-11-21T09:20:29.324422Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7343, node 1 2024-11-21T09:20:29.352426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.352447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.353535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.370771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.370781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.370782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.370803Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14734 TClient is connected to server localhost:14734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.451560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.461232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.479740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.535731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.591569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.654642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014928749682:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.654673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.728050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.733920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.744111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.750549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.758503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.764972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.776149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014928750184:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.776168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.776179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014928750189:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.777497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:29.785457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660014928750191:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 30496, MsgBus: 28579 2024-11-21T09:20:30.313840Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660017618128115:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.314053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004714/r3tmp/tmpYt96Dl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30496, node 2 2024-11-21T09:20:30.328507Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:30.329019Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.329029Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.329031Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.329061Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28579 TClient is connected to server localhost:28579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.413787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.413806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.414979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.416613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.425147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.432338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.445352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.458747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.573504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017618129642:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.573526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.578254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.583902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.591020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.598025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.604563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.612200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.620577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017618130142:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.620597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.620620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017618130147:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.621143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.625103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660017618130149:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpWrite::ProjectReplace [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 16977, MsgBus: 19077 2024-11-21T09:20:30.302688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660016930306423:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.302706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046f0/r3tmp/tmp2BIM3B/pdisk_1.dat 2024-11-21T09:20:30.352559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16977, node 1 2024-11-21T09:20:30.371771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.371786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.371788Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.371820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19077 2024-11-21T09:20:30.403096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.403120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.404170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.431528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.439301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.499852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.518070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.529389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.582291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660016930307964:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.582328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.615808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.622048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.633224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.640491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.646647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.654153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.662723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660016930308457:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.662746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.662781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660016930308462:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.663386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.667186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660016930308464:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.815303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 26425, MsgBus: 11389 2024-11-21T09:20:29.253413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660014853596949:2235];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.253474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004721/r3tmp/tmpq5rmsE/pdisk_1.dat 2024-11-21T09:20:29.323871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26425, node 1 2024-11-21T09:20:29.352290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.352320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.353377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.368874Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.368888Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.368889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.368941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11389 TClient is connected to server localhost:11389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.451539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.461222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.525783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.544147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.555699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.577964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014853598293:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.577991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.728116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.733928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.743705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.750948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.758528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.764796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.776165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014853598806:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.776198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.776232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014853598811:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.777538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:29.785483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660014853598813:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:29.990555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20783, MsgBus: 25150 2024-11-21T09:20:30.326823Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660016385000301:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.327021Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004721/r3tmp/tmpcynHaU/pdisk_1.dat 2024-11-21T09:20:30.338026Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20783, node 2 2024-11-21T09:20:30.346617Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.346631Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.346633Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.346666Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25150 TClient is connected to server localhost:25150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.426993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.427022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.428078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.429793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.432528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.444841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.460637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.473448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.601109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660016385001824:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.601143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.606837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.615657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.626169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.634247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.647281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.661113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.669667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660016385002338:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.669694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.669702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660016385002343:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.670260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.674564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660016385002345:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.861209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] |95.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 17749, MsgBus: 28015 2024-11-21T09:20:29.253099Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660014084881838:2063];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.253177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00471b/r3tmp/tmpkISAcg/pdisk_1.dat 2024-11-21T09:20:29.325983Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17749, node 1 2024-11-21T09:20:29.353035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.353070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.354113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.368875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.368887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.368889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.368947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28015 TClient is connected to server localhost:28015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.461547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.472189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:20:29.533828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.544690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.599367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.617620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014084883352:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.617653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.728052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.733844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.744101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.750568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.758509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.765176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.776140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014084883857:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.776165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014084883862:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.776174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.777491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:29.785429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660014084883864:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:30.050007Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660018379851484:2466], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jd70bsrr0mh2w9xpehjvv111. SessionId : ydb://session/3?node_id=1&id=NTRmYzY2M2UtODQ1MzdkOTEtZWY3NTk2NTUtODExM2E2NDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2024-11-21T09:20:30.050154Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660018379851485:2467], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd70bsrr0mh2w9xpehjvv111. SessionId : ydb://session/3?node_id=1&id=NTRmYzY2M2UtODQ1MzdkOTEtZWY3NTk2NTUtODExM2E2NDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660018379851481:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:30.051357Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTRmYzY2M2UtODQ1MzdkOTEtZWY3NTk2NTUtODExM2E2NDQ=, ActorId: [1:7439660014084884157:2454], ActorState: ExecuteState, TraceId: 01jd70bsrr0mh2w9xpehjvv111, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4740, MsgBus: 21685 2024-11-21T09:20:30.347417Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660017978962203:2060];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00471b/r3tmp/tmpoY3T2y/pdisk_1.dat 2024-11-21T09:20:30.354769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 4740, node 2 2024-11-21T09:20:30.365754Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.365766Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.365768Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.365821Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:20:30.366135Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21685 TClient is connected to server localhost:21685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.447176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.447212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.448307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.449477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.456690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.465925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.482794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.494386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.691254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017978963742:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.691280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.694748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.700617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.709994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.716865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.723860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.730752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.740400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017978964232:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.740431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.740498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017978964237:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.741053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.744411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660017978964239:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.923919Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660017978964555:2466], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jd70btnm3c7f17785fgn7e70. SessionId : ydb://session/3?node_id=2&id=NThmZDRkODItYTUzN2Q1MjEtNjcxOWZkNTYtMTc0ZTFhOWE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:20:30.923978Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660017978964556:2467], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd70btnm3c7f17785fgn7e70. SessionId : ydb://session/3?node_id=2&id=NThmZDRkODItYTUzN2Q1MjEtNjcxOWZkNTYtMTc0ZTFhOWE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439660017978964552:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:30.924215Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NThmZDRkODItYTUzN2Q1MjEtNjcxOWZkNTYtMTc0ZTFhOWE=, ActorId: [2:7439660017978964522:2454], ActorState: ExecuteState, TraceId: 01jd70btnm3c7f17785fgn7e70, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace [GOOD] Test command err: Trying to start YDB, gRPC: 1277, MsgBus: 31165 2024-11-21T09:20:30.511931Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660019357495558:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.511951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046ec/r3tmp/tmpKkDnvi/pdisk_1.dat 2024-11-21T09:20:30.578546Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1277, node 1 2024-11-21T09:20:30.592726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.592742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.592744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.592784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31165 2024-11-21T09:20:30.612291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.612315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.613372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.652079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.658272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.718882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.735496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.745751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.809432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019357497108:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.809453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.834800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.889490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.898904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.905829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.912531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.919849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.928626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019357497614:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.928656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.928660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019357497619:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.929237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.933054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660019357497621:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpImmediateEffects::UpsertExistingKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 31763, MsgBus: 22097 2024-11-21T09:20:29.253091Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660011659387168:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.253165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00471f/r3tmp/tmpTbqDK3/pdisk_1.dat 2024-11-21T09:20:29.327687Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31763, node 1 2024-11-21T09:20:29.352434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.352454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.353534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.368874Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.368918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.368920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.368955Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22097 TClient is connected to server localhost:22097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.451550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.461221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:20:29.527199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.540728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.549375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.577199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660011659388486:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.577221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.728074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.733879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.744128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.798725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.807041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.814207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.822248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660011659389003:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.822290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.822291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660011659389008:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.822853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:29.827145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660011659389010:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:29.991473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62349, MsgBus: 2283 2024-11-21T09:20:30.326938Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660017314624774:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.327148Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00471f/r3tmp/tmpD5FSqM/pdisk_1.dat 2024-11-21T09:20:30.336018Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62349, node 2 2024-11-21T09:20:30.344853Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.344865Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.344867Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.344910Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2283 TClient is connected to server localhost:2283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.427109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.427137Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.428261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.429423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.440994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.448926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.464957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.473371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.608107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017314626303:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.608134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.612559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.617324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.625654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.632613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.640263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.646579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.655243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017314626809:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.655273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.655332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660017314626814:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.655974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.660347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660017314626817:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.829385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.835533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.842828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KqpEffects::UpdateOn_Params >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpEffects::InsertAbort_Params_Success >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration >> KqpImmediateEffects::ConflictingKeyW1RWR2 |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> KqpImmediateEffects::Upsert >> KqpImmediateEffects::MultiShardUpsertAfterRead >> KqpImmediateEffects::UpdateAfterUpsert >> KqpWrite::Insert >> KqpImmediateEffects::InsertExistingKey >> KqpImmediateEffects::ForceImmediateEffectsExecution >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate >> KqpWrite::UpsertNullKey >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::UpdateOn_Select >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> KqpEffects::InsertAbort_Select_Conflict >> BackupRestoreS3::RestoreIndexTableSplitBoundaries ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates [GOOD] Test command err: Trying to start YDB, gRPC: 31409, MsgBus: 19570 2024-11-21T09:20:29.716926Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660014951664919:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.716960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004710/r3tmp/tmp1zXfMx/pdisk_1.dat 2024-11-21T09:20:29.762883Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31409, node 1 2024-11-21T09:20:29.776383Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.776400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.776402Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.776435Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19570 TClient is connected to server localhost:19570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:29.816870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.816908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.818043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.842923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.853328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.914726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.931941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.940871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.997576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660014951666453:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.997605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.033298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.038505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.045652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.051798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.058820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.067740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.085342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019246634243:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.085382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.085439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019246634248:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.086282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.093531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660019246634250:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.250070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64347, MsgBus: 4880 2024-11-21T09:20:30.608974Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660015902741131:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.609158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004710/r3tmp/tmpoWp6iz/pdisk_1.dat 2024-11-21T09:20:30.622941Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64347, node 2 2024-11-21T09:20:30.629249Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.629262Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.629263Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.629300Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4880 TClient is connected to server localhost:4880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.709333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.709380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.710398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.712096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.716552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.724761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.740491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.752776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.867165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660015902742661:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.867194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.871731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.877997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.932313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.940980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.947688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.955150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.963548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660015902743177:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.963573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.963577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660015902743182:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.964142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.968130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660015902743184:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:31.142883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.191900Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660020197710892:2481], TxId: 281474976715675, task: 1. Ctx: { TraceId : 01jd70btxv68ts619rpj6wyh7s. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Zjc2YTRlM2YtYzg0ZjJhZjMtYWIwZGU4ZjYtN2NkMzlkYzA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2024-11-21T09:20:31.191962Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660020197710894:2482], TxId: 281474976715675, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=Zjc2YTRlM2YtYzg0ZjJhZjMtYWIwZGU4ZjYtN2NkMzlkYzA=. TraceId : 01jd70btxv68ts619rpj6wyh7s. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439660020197710889:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:31.193098Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Zjc2YTRlM2YtYzg0ZjJhZjMtYWIwZGU4ZjYtN2NkMzlkYzA=, ActorId: [2:7439660020197710764:2454], ActorState: ExecuteState, TraceId: 01jd70btxv68ts619rpj6wyh7s, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 23161, MsgBus: 12172 2024-11-21T09:20:29.292502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660011644338494:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.292782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004724/r3tmp/tmpNCYair/pdisk_1.dat 2024-11-21T09:20:29.341389Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23161, node 1 2024-11-21T09:20:29.368875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.368893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.368895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.368941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:20:29.392657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.392692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.393699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12172 TClient is connected to server localhost:12172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.452965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.461554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.529497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.548553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.557550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:29.577237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660011644340027:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.577269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.728013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.734098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.743787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.750913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.758571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.764787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.776146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660011644340540:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.776155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660011644340545:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.776162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.777492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:29.785193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660011644340547:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:29.988112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.089552Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDg0M2JlZDEtYWQ5ZjI5N2MtODUxMGExY2UtMmFiMzI1OQ==, ActorId: [1:7439660015939308355:2490], ActorState: ExecuteState, TraceId: 01jd70bsw19wa4fsb7y0bx1wfc, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T09:20:30.092453Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDg0M2JlZDEtYWQ5ZjI5N2MtODUxMGExY2UtMmFiMzI1OQ==, ActorId: [1:7439660015939308355:2490], ActorState: ReadyState, TraceId: 01jd70bswc6x2nbrf443trn53f, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5970, MsgBus: 10125 2024-11-21T09:20:30.374863Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660018974033781:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.374886Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004724/r3tmp/tmpGUQVic/pdisk_1.dat 2024-11-21T09:20:30.384950Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5970, node 2 2024-11-21T09:20:30.391827Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.391844Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.391845Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.391882Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10125 TClient is connected to server localhost:10125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.474427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.474449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.475609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.477778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.479616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.493851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.511706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:20:30.522197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.665534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660018974035108:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.665569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.669596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.675271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.681965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.689017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.696041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.702666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.711375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660018974035620:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.711402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.711405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660018974035625:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.711991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.716403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660018974035627:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.897657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpWrite::Insert [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [GOOD] >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--false] >> KqpImmediateEffects::Upsert [GOOD] >> KqpWrite::UpsertNullKey [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpImmediateEffects::InsertExistingKey [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate [GOOD] >> KqpInplaceUpdate::SingleRowArithm >> KqpEffects::UpdateOn_Select [GOOD] >> KqpEffects::InsertAbort_Select_Conflict [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> KqpImmediateEffects::ImmediateUpdate >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 12966, MsgBus: 28580 2024-11-21T09:20:31.782833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660020486974208:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:31.783030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046dc/r3tmp/tmpbUvRmF/pdisk_1.dat 2024-11-21T09:20:31.825317Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12966, node 1 2024-11-21T09:20:31.840063Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:31.840080Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:31.840081Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:31.840127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28580 TClient is connected to server localhost:28580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:31.880792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.883005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:31.883024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:31.883418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.884130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:31.940951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.958634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.971258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.016480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660024781943037:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.016505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.038660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.043553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.054072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.061132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.115790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.124081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.132143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660024781943542:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.132173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.132176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660024781943547:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.132817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.137185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660024781943549:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:32.321078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd [GOOD] >> KqpInplaceUpdate::SingleRowArithm [GOOD] >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> test.py::test[blocks-date_equals_scalar--ForceBlocks] [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpInplaceUpdate::BigRow >> KqpEffects::InsertRevert_Literal_Success >> TPersQueueTest::CheckKillBalancer [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> test.py::test[blocks-date_equals_scalar--Plan] [GOOD] >> KqpInplaceUpdate::BigRow [GOOD] >> test.py::test[blocks-date_equals_scalar--Results] >> TPersQueueTest::CheckDeleteTopic >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 61268, MsgBus: 18408 2024-11-21T09:20:32.295579Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660024749831283:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.295745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046a2/r3tmp/tmpCEW4AX/pdisk_1.dat 2024-11-21T09:20:32.332928Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61268, node 1 2024-11-21T09:20:32.349622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.349641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.349643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.349687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18408 TClient is connected to server localhost:18408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.396000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.396027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.397075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.417940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.423237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.483489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.498180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.509990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.554147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660024749832816:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.554173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.580448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.586017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.640463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.694573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.704917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.711571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.720193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660024749833335:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.720236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.720263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660024749833340:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.720800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.725379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660024749833342:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:32.879706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeletePkPrefixWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 27918, MsgBus: 62073 2024-11-21T09:20:30.115565Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660019523763428:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.115768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046f1/r3tmp/tmp4SN8pL/pdisk_1.dat 2024-11-21T09:20:30.168763Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27918, node 1 2024-11-21T09:20:30.184513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.184530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.184532Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.184588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62073 2024-11-21T09:20:30.215649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.215678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.216744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.229861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.241494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.303618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.322717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.333254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.410884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019523764962:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.410911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.441020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.446442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.500528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.506862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.514108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.521654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.529282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019523765477:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.529307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.529310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019523765482:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.529938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.534545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660019523765484:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:30.739130Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660019523765800:2467], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWQzOTFjNzItMWYwNzZmMmUtY2VkYTUwOWItOTYyNDk5Yjg=. CustomerSuppliedId : . TraceId : 01jd70btfw2tk3d5ybyx1k9wfw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:20:30.739225Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660019523765801:2468], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd70btfw2tk3d5ybyx1k9wfw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWQzOTFjNzItMWYwNzZmMmUtY2VkYTUwOWItOTYyNDk5Yjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660019523765797:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:30.740416Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQzOTFjNzItMWYwNzZmMmUtY2VkYTUwOWItOTYyNDk5Yjg=, ActorId: [1:7439660019523765768:2454], ActorState: ExecuteState, TraceId: 01jd70btfw2tk3d5ybyx1k9wfw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 18411, MsgBus: 23673 2024-11-21T09:20:30.984949Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660019744448774:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.984969Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046f1/r3tmp/tmpz9MrQ3/pdisk_1.dat 2024-11-21T09:20:30.998540Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18411, node 2 2024-11-21T09:20:31.003060Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:31.003072Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:31.003074Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:31.003110Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23673 TClient is connected to server localhost:23673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:31.085509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:31.085542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:31.086620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:31.087310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.096565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.104245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.119128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.130763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.226835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660024039417603:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.226856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.231401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.238341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.248856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.256028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.263046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.269852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.278933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660024039418095:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.278953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.278960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660024039418100:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.279570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:31.283367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660024039418102:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:31.433704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 4911, MsgBus: 6245 2024-11-21T09:20:30.710869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660017596256160:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.711032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046e6/r3tmp/tmpcH2O6K/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4911, node 1 2024-11-21T09:20:30.763893Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:30.766169Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.766183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.766185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.766221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6245 TClient is connected to server localhost:6245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.810174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.811222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.811241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.812378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.821253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.882729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.900783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.910248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.952689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017596257699:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.952730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.988128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.994196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.003628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.057898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.111981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.123408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.179396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660021891225516:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.179420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.179426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660021891225521:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.179980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:31.185344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660021891225523:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:31.360173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.370135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.382754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 14468, MsgBus: 19121 2024-11-21T09:20:30.036561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660017459578946:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.036586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046fd/r3tmp/tmp0wK0FF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14468, node 1 2024-11-21T09:20:30.091992Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:30.097466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.097478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.097479Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.097502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19121 TClient is connected to server localhost:19121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:30.136733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.136762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.137864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.162866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.175777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.189490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.207009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.216280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.338041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017459580485:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.338063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.370380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.377545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.388331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.394905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.401853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.409017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.417559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017459581000:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.417589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017459581005:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.417591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.418202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.422213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660017459581007:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.594744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7284, MsgBus: 14490 2024-11-21T09:20:30.890679Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660017679796225:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.891002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046fd/r3tmp/tmpGJlPkx/pdisk_1.dat 2024-11-21T09:20:30.901055Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7284, node 2 2024-11-21T09:20:30.910829Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.910842Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.910844Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.910883Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14490 TClient is connected to server localhost:14490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.990798Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.990827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.991884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.993205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.998566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.005628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.019842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.033138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.136479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660021974765055:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.136505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.141174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.146910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.157991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.165140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.172357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.179653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.188388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660021974765555:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.188409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.188588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660021974765560:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.189251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:31.192311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660021974765562:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:31.355773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.439955Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmM2MmE0YmQtYjIyM2RlZmMtY2EwN2M0NDgtZjVlZmJmZDc=, ActorId: [2:7439660021974765843:2456], ActorState: ExecuteState, TraceId: 01jd70bv644n2w07bgwghdyeze, Create QueryResponse for error on request, msg: Error while locks merge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::UpsertNullKey [GOOD] Test command err: Trying to start YDB, gRPC: 2148, MsgBus: 14114 2024-11-21T09:20:32.837183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660024988437808:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.837218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004689/r3tmp/tmpu3mMlY/pdisk_1.dat 2024-11-21T09:20:32.875575Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2148, node 1 2024-11-21T09:20:32.894191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.894207Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.894209Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.894240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14114 TClient is connected to server localhost:14114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:32.937474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.937495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.938610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.939083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.941818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.955499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.971073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.978824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.084756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660029283406637:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.084783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.111103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.115954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.124494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.131941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.139117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.145724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.154336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660029283407140:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.154346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660029283407145:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.154353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.154808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.159282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660029283407147:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Upsert [GOOD] Test command err: Trying to start YDB, gRPC: 2579, MsgBus: 19528 2024-11-21T09:20:32.263640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660026042511273:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.263764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046b2/r3tmp/tmpYxfOny/pdisk_1.dat 2024-11-21T09:20:32.301382Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2579, node 1 2024-11-21T09:20:32.319018Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.319028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.319029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.319055Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19528 TClient is connected to server localhost:19528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:32.363559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.363589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.364652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.385474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.397083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.457493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.471443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.481732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.506504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660026042512807:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.506523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.534821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.540234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.551180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.605630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.613954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.621347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.629366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660026042513301:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.629386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.629410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660026042513306:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.629947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.634143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660026042513308:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:32.772306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 5144, MsgBus: 5855 2024-11-21T09:20:30.484856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660019106332885:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.484906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046eb/r3tmp/tmpHONf8H/pdisk_1.dat 2024-11-21T09:20:30.528139Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5144, node 1 2024-11-21T09:20:30.546279Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.546293Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.546295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.546326Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5855 TClient is connected to server localhost:5855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:30.584751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.584779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.585916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.591246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.602598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.664463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.681531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.689569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.753508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019106334421:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.753532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.781169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.787330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.794055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.801090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.808073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.815023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.823015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019106334914:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.823035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.823045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660019106334919:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.823537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.828297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660019106334921:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:31.028863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25327, MsgBus: 63441 2024-11-21T09:20:31.344380Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660021887888770:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:31.344636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046eb/r3tmp/tmpy4WgCU/pdisk_1.dat 2024-11-21T09:20:31.353313Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25327, node 2 2024-11-21T09:20:31.362632Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:31.362645Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:31.362647Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:31.362686Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63441 TClient is connected to server localhost:63441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:31.444496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:31.444526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:31.445569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:31.446235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.448178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.460263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.477059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.489376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.636738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660021887890297:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.636768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.642860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.649555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.704335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.711290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.717709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.725017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.733219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660021887890811:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.733238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.733244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660021887890816:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.733765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:31.738232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660021887890818:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:31.936417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.942280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.952173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 18347, MsgBus: 14420 2024-11-21T09:20:32.264637Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660027188917387:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.264653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046be/r3tmp/tmpwalZFg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18347, node 1 2024-11-21T09:20:32.315867Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:32.322842Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.322854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.322855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.322894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14420 TClient is connected to server localhost:14420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:32.364625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.364649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.365662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.366616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.377857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.437933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.451962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.461761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.506186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027188918921:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.506210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.530465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.536552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.543630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.551170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.558050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.564830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.573395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027188919413:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.573422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.573435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027188919418:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.573970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.578449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660027188919420:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:32.733677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21164, MsgBus: 19017 2024-11-21T09:20:32.922904Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660025234699684:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.923055Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046be/r3tmp/tmpPR3X8h/pdisk_1.dat 2024-11-21T09:20:32.936097Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21164, node 2 2024-11-21T09:20:32.940710Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.940720Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.940722Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.940745Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19017 TClient is connected to server localhost:19017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:33.023083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:33.023107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:33.024240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:33.025350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.029445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.041513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.057086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.069908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.170077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660029529668518:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.170101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.173976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.179021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.187768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.194920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.202293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.208653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.217192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660029529669010:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.217211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660029529669015:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.217214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.217604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.222262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660029529669017:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:33.384943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Select [GOOD] Test command err: Trying to start YDB, gRPC: 3284, MsgBus: 15214 2024-11-21T09:20:31.983918Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660021452427910:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:31.983933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046d2/r3tmp/tmpMQKDhD/pdisk_1.dat 2024-11-21T09:20:32.037128Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3284, node 1 2024-11-21T09:20:32.050863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.050882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.050884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.050929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15214 TClient is connected to server localhost:15214 2024-11-21T09:20:32.084135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.084160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:20:32.085268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.091595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.095122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.154626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.167479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.180381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.241155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660025747396738:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.241185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.282626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.288855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.299197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.353949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.362276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.369301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.377383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660025747397254:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.377409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660025747397259:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.377408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.378038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.382591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660025747397261:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 64266, MsgBus: 32532 2024-11-21T09:20:32.838304Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660028417046430:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.838320Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046d2/r3tmp/tmpIWY4ig/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64266, node 2 2024-11-21T09:20:32.852225Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:32.853570Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.853581Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.853582Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.853609Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32532 TClient is connected to server localhost:32532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.938570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.938600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.939703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.940304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.948298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.957786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.973930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.986052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.093228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032712015261:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.093248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.096615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.151069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.159587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.166495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.173750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.180378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.189017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032712015777:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.189044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032712015782:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.189044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.189509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.194196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660032712015784:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::Insert [GOOD] Test command err: Trying to start YDB, gRPC: 63875, MsgBus: 29697 2024-11-21T09:20:32.146422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660026080777610:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.146463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046c9/r3tmp/tmpEqOOjA/pdisk_1.dat 2024-11-21T09:20:32.180698Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63875, node 1 2024-11-21T09:20:32.198266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.198276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.198278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.198302Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29697 TClient is connected to server localhost:29697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.246573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.246600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.247739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.266976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.275557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.335707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.352572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.363171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.398337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660026080779144:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.398365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.429922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.436452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.446362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.452689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.459851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.467119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.475426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660026080779636:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.475447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660026080779641:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.475454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.476020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.480303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660026080779643:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:32.640704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.682476Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660026080780032:2474], TxId: 281474976715673, task: 1. Ctx: { TraceId : 01jd70bwc6fbfma5nt98ytdtes. SessionId : ydb://session/3?node_id=1&id=NjljMjg2NGQtOWM4MjMxZmEtNTFiNzQyNWEtY2FjZjdiZTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:20:32.682562Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660026080780033:2475], TxId: 281474976715673, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjljMjg2NGQtOWM4MjMxZmEtNTFiNzQyNWEtY2FjZjdiZTQ=. TraceId : 01jd70bwc6fbfma5nt98ytdtes. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660026080780029:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:32.683658Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjljMjg2NGQtOWM4MjMxZmEtNTFiNzQyNWEtY2FjZjdiZTQ=, ActorId: [1:7439660026080779925:2454], ActorState: ExecuteState, TraceId: 01jd70bwc6fbfma5nt98ytdtes, Create QueryResponse for error on request, msg:
: Error: Conflict with existing key., code: 2012 2024-11-21T09:20:32.711024Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660026080780083:2486], TxId: 281474976715676, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd70bwdc750sc0wmmf59j9s0. SessionId : ydb://session/3?node_id=1&id=NjljMjg2NGQtOWM4MjMxZmEtNTFiNzQyNWEtY2FjZjdiZTQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2024-11-21T09:20:32.711079Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660026080780084:2487], TxId: 281474976715676, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjljMjg2NGQtOWM4MjMxZmEtNTFiNzQyNWEtY2FjZjdiZTQ=. TraceId : 01jd70bwdc750sc0wmmf59j9s0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660026080780080:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:32.711219Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjljMjg2NGQtOWM4MjMxZmEtNTFiNzQyNWEtY2FjZjdiZTQ=, ActorId: [1:7439660026080779925:2454], ActorState: ExecuteState, TraceId: 01jd70bwdc750sc0wmmf59j9s0, Create QueryResponse for error on request, msg:
: Error: Duplicated keys found., code: 2012 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 28333, MsgBus: 17523 2024-11-21T09:20:32.939045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660024773532255:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.939061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004686/r3tmp/tmpBxsyHN/pdisk_1.dat 2024-11-21T09:20:32.986333Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28333, node 1 2024-11-21T09:20:32.997017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.997029Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.997030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.997059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17523 TClient is connected to server localhost:17523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:33.037624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.039347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:33.039365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:33.040417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:33.050439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.111006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.128727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.139708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.208860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660029068501101:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.208879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.247796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.302239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.314136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.320516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.327932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.335472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.343312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660029068501609:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.343338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.343347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660029068501614:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.343975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.348175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660029068501616:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:33.486526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29706, MsgBus: 6978 2024-11-21T09:20:33.790314Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660030301718889:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:33.790590Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004686/r3tmp/tmp6mGD5o/pdisk_1.dat 2024-11-21T09:20:33.796946Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29706, node 2 2024-11-21T09:20:33.805149Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:33.805159Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:33.805161Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:33.805187Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6978 TClient is connected to server localhost:6978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:33.890778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:33.890804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:33.891864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:33.892488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.899610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.905745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.918747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.930666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.997241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660030301720418:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.997266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.000768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.005646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.013577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.020477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.027534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.034830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.042793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660034596688204:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.042816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660034596688209:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.042824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.043251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:34.048046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660034596688211:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:34.226013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 3426, MsgBus: 10344 2024-11-21T09:20:32.823481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660026548455495:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.823515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00468f/r3tmp/tmp4FuUKp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3426, node 1 2024-11-21T09:20:32.880312Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:32.880784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.880793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.880794Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.880827Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10344 TClient is connected to server localhost:10344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.921395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.923490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.923508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.924633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.933911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.995085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.009553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.020427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.100827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660030843424325:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.100864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.133076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.138609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.193082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.202119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.209149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.215561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.224338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660030843424841:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.224356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.224366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660030843424846:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.224792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.229263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660030843424848:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:33.370204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::UpdateOn_Literal ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowArithm [GOOD] Test command err: Trying to start YDB, gRPC: 17815, MsgBus: 10519 2024-11-21T09:20:33.201833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660028894964436:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:33.201922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004684/r3tmp/tmpcPcLui/pdisk_1.dat 2024-11-21T09:20:33.241511Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17815, node 1 2024-11-21T09:20:33.252402Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:33.252412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:33.252413Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:33.252439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10519 TClient is connected to server localhost:10519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:33.301358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:33.301380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:33.302481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:33.321692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.324871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.385570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.398825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.407554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.431446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660028894965762:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.431470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.452607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.457183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.467396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.521656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.575912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.586882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.594957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660028894966281:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.594980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660028894966286:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.594980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.595431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.599838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660028894966288:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:33.750702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 27107, MsgBus: 1158 2024-11-21T09:20:32.562405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660027961709732:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.562670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004693/r3tmp/tmpYA1uRP/pdisk_1.dat 2024-11-21T09:20:32.603134Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27107, node 1 2024-11-21T09:20:32.617418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.617434Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.617436Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.617460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1158 TClient is connected to server localhost:1158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.662587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.662611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.663699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.687998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.691211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.750939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.764013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.776542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.797036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027961711264:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.797059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.830753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.837115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.844975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.852031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.906835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.915105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.923155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027961711759:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.923178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027961711764:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.923181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.923661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.927815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660027961711766:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:33.077610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20086, MsgBus: 25523 2024-11-21T09:20:33.434313Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660032344357980:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:33.434449Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004693/r3tmp/tmpJXmtrb/pdisk_1.dat 2024-11-21T09:20:33.447066Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20086, node 2 2024-11-21T09:20:33.451553Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:33.451562Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:33.451563Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:33.451583Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25523 TClient is connected to server localhost:25523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:33.534567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:33.534594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:33.535622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:33.536294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.537819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.545297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.560883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.573845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.687579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032344359508:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.687599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.691474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.696031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.705584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.712728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.719340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.726624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.735044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032344360011:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.735059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.735074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032344360016:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.735521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.739908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660032344360018:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:33.893316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 3578, MsgBus: 8001 2024-11-21T09:20:31.961711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660023631268387:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:31.961909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046d7/r3tmp/tmpir63NV/pdisk_1.dat 2024-11-21T09:20:32.004116Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3578, node 1 2024-11-21T09:20:32.018837Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.018848Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.018849Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.018875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8001 TClient is connected to server localhost:8001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.061657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.061680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.062779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.083573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.093968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.155199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.169396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.180752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.204678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027926237219:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.204704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.229611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.235590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.242963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.250191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.304691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.312987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.321481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027926237714:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.321502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660027926237719:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.321509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.322026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.326143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660027926237721:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 13256, MsgBus: 7510 2024-11-21T09:20:32.820596Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660027877015485:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.820640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046d7/r3tmp/tmpB4BAos/pdisk_1.dat 2024-11-21T09:20:32.827961Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13256, node 2 2024-11-21T09:20:32.837893Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.837907Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.837910Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.837938Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7510 TClient is connected to server localhost:7510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.920797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.920821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.921903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.923033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.931826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.938770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.959542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.972493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.101720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032171984316:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.101744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.105629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.111117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.165046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.173825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.180436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.187760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.196694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032171984832:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.196712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.196729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660032171984837:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.197198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.200985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660032171984839:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:33.378664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.427981Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660032171985250:2481], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MzcyMWU4N2UtMzQxMjVkYmItM2Y0ZGIyZmMtOTM3MzEzYzk=. CustomerSuppliedId : . TraceId : 01jd70bx3m9q45ste39qb066dm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:20:33.428068Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660032171985251:2482], TxId: 281474976715675, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MzcyMWU4N2UtMzQxMjVkYmItM2Y0ZGIyZmMtOTM3MzEzYzk=. CustomerSuppliedId : . TraceId : 01jd70bx3m9q45ste39qb066dm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439660032171985247:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:33.429289Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzcyMWU4N2UtMzQxMjVkYmItM2Y0ZGIyZmMtOTM3MzEzYzk=, ActorId: [2:7439660032171985122:2454], ActorState: ExecuteState, TraceId: 01jd70bx3m9q45ste39qb066dm, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 6368, MsgBus: 20215 2024-11-21T09:20:30.374249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660018505527717:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.374519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046ee/r3tmp/tmp0MlqmX/pdisk_1.dat 2024-11-21T09:20:30.437978Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6368, node 1 2024-11-21T09:20:30.450951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.450967Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.450969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.451005Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20215 2024-11-21T09:20:30.474126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.474148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.475276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.513948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.516477Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:30.517673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.531441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.547212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.556818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.656577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660018505529255:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.656605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.695727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.701159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.709984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.764289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.773199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.779606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.788937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660018505529771:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.788963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.789007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660018505529776:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.789545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.793487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660018505529778:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.974174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27517, MsgBus: 28043 2024-11-21T09:20:31.232625Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660020750317432:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:31.232779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046ee/r3tmp/tmpyRbXes/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27517, node 2 2024-11-21T09:20:31.247005Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:31.249145Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:31.249154Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:31.249156Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:31.249183Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28043 TClient is connected to server localhost:28043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:31.332827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:31.332864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:31.333971Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:31.335688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.345484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.354283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.371539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.383397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.509889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660020750318969:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.509921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.514215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.520654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.529105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.536068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.543430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.550184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.559447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660020750319473:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.559466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660020750319478:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.559468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.560053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:31.563570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660020750319480:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:31.732235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.816224Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Q2ZGQxNGQtNzFhOWRlNDctZmE0ZTc1MDAtYWIyODg0Njg=, ActorId: [2:7439660020750319986:2490], ActorState: ExecuteState, TraceId: 01jd70bvj48ah0he698wywcq1h, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:20:29.455579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:29.455602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:29.455607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:29.455611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:29.455617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:29.455620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:29.455629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:29.455702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:29.466091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:29.466113Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:29.468914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:29.469651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:29.469683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:29.478061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:29.478957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:29.479099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:29.479233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:29.481138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:29.481400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:29.481411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:29.481440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:29.481445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:29.481449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:29.481461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:20:29.482796Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:20:29.496735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:29.496814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:29.496872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:29.496930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:29.496938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:29.497704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:29.497724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:29.497764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:29.497772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:29.497776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:29.497780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:29.498183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:29.498192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:29.498196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:29.498533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:29.498548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:29.498553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:29.498559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:29.499079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:29.499445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:29.499486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:29.499642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:29.499663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:29.499672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:29.499716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:29.499722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:29.499746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:29.499757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:29.500182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:29.500188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:29.500245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:29.500252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:20:29.500322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:29.500327Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:29.500337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:29.500341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:29.500346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:29.500351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:29.500355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:29.500359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:29.500370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:29.500375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:29.500378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:20:29.500631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:29.500643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:29.500647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:20:29.500652Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:20:29.500656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:29.500670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:56 2024-11-21T09:20:31.925289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:56 tabletId 72075186233409601 2024-11-21T09:20:31.925330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:50 2024-11-21T09:20:31.925333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:50 tabletId 72075186233409595 2024-11-21T09:20:31.925478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:52 2024-11-21T09:20:31.925481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:52 tabletId 72075186233409597 2024-11-21T09:20:31.925505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:46 2024-11-21T09:20:31.925508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:46 tabletId 72075186233409591 2024-11-21T09:20:31.925517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:48 2024-11-21T09:20:31.925519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:48 tabletId 72075186233409593 2024-11-21T09:20:31.925710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2024-11-21T09:20:31.925715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2024-11-21T09:20:31.925725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:44 2024-11-21T09:20:31.925727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:44 tabletId 72075186233409589 2024-11-21T09:20:31.925736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:65 2024-11-21T09:20:31.925738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:65 tabletId 72075186233409610 2024-11-21T09:20:31.925747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:63 2024-11-21T09:20:31.925750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:63 tabletId 72075186233409608 2024-11-21T09:20:31.925759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:61 2024-11-21T09:20:31.925762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:61 tabletId 72075186233409606 2024-11-21T09:20:31.925769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:59 2024-11-21T09:20:31.925771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2024-11-21T09:20:31.925779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T09:20:31.925781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T09:20:31.925791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:20:31.925794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:20:31.925803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:20:31.925805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:20:31.925813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2024-11-21T09:20:31.925815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2024-11-21T09:20:31.926469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2024-11-21T09:20:31.926479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2024-11-21T09:20:31.926490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2024-11-21T09:20:31.926493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2024-11-21T09:20:31.926654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2024-11-21T09:20:31.926658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2024-11-21T09:20:31.926674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2024-11-21T09:20:31.926677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2024-11-21T09:20:31.926690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2024-11-21T09:20:31.926693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2024-11-21T09:20:31.926710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2024-11-21T09:20:31.926712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2024-11-21T09:20:31.926724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2024-11-21T09:20:31.926726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2024-11-21T09:20:31.926734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2024-11-21T09:20:31.926737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2024-11-21T09:20:31.926860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2024-11-21T09:20:31.926864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2024-11-21T09:20:31.926873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:40 2024-11-21T09:20:31.926875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2024-11-21T09:20:31.926883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2024-11-21T09:20:31.926885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2024-11-21T09:20:31.926893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2024-11-21T09:20:31.926895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2024-11-21T09:20:31.926903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2024-11-21T09:20:31.926906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2024-11-21T09:20:31.926928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2024-11-21T09:20:31.926930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2024-11-21T09:20:31.926994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2024-11-21T09:20:31.926997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2024-11-21T09:20:31.927044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T09:20:31.927047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2024-11-21T09:20:31.927371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:57 2024-11-21T09:20:31.927377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2024-11-21T09:20:31.927390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:53 2024-11-21T09:20:31.927392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2024-11-21T09:20:31.927401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:55 2024-11-21T09:20:31.927403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2024-11-21T09:20:31.927419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:49 2024-11-21T09:20:31.927421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2024-11-21T09:20:31.927450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:47 2024-11-21T09:20:31.927452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2024-11-21T09:20:31.927478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:51 2024-11-21T09:20:31.927480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2024-11-21T09:20:31.927490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:45 2024-11-21T09:20:31.927492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2024-11-21T09:20:31.927598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:43 2024-11-21T09:20:31.927602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2024-11-21T09:20:31.927652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:41 2024-11-21T09:20:31.927657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2024-11-21T09:20:31.928235Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 2024-11-21T09:20:31.928344Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:31.928377Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 44us result status StatusPathDoesNotExist 2024-11-21T09:20:31.928401Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:20:31.928462Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2024-11-21T09:20:31.928468Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 6us result status StatusPathDoesNotExist 2024-11-21T09:20:31.928472Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 13218, MsgBus: 11164 2024-11-21T09:20:29.925293Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660012843366938:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.925353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004702/r3tmp/tmpZXu5pZ/pdisk_1.dat 2024-11-21T09:20:29.968707Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13218, node 1 2024-11-21T09:20:29.992370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.992385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.992387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.992425Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11164 2024-11-21T09:20:30.025327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.025356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:11164 2024-11-21T09:20:30.026433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.047647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.052424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:30.059515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:20:30.121495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.138913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.153144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.217270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017138335768:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.217292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.251822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.258588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.269556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.324362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.332309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.339270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.347682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017138336284:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.347706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.347765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660017138336289:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.348394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.352221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660017138336291:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:30.546453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8191, MsgBus: 8506 2024-11-21T09:20:30.794496Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660016502701404:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.794637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004702/r3tmp/tmpYXjxZw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8191, node 2 2024-11-21T09:20:30.808355Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:30.810071Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.810082Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.810084Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.810118Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8506 TClient is connected to server localhost:8506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.894737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.894767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.895895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:30.897037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.905579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.913552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.931477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.941831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.086506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660020797670235:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.086527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.090873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.096478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.150872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.157929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.165171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.172402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.180162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660020797670749:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.180198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.180291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660020797670755:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.180831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:31.185345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660020797670757:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:31.332988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.416381Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzU3YzJlZGEtOTVkNGMzNDctODAyNWYwMDYtMmYzNmYwYjE=, ActorId: [2:7439660020797671038:2456], ActorState: ExecuteState, TraceId: 01jd70bv5e9z37z288mxrwa07s, Create QueryResponse for error on request, msg: Error while locks merge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 6960, MsgBus: 5928 2024-11-21T09:20:32.484975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660028363355174:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.485161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046a0/r3tmp/tmp296Phn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6960, node 1 2024-11-21T09:20:32.535593Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:32.544147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.544160Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.544161Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.544193Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5928 TClient is connected to server localhost:5928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:32.585178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.585204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.586289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.588287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.601110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.661686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.675337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.684326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.722040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660028363356704:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.722077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.751754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.757482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.812030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.823921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.878774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.887200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.895526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660028363357222:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.895554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.895565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660028363357227:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.896281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.900282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660028363357229:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:33.087984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.158930Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660032658324950:2484], TxId: 281474976710676, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MzQ0MWUyLWM0YjExYTlmLTRmMTY0ODc2LTk5OGZhZWIw. TraceId : 01jd70bwv4cyaa7rqqr7yw5tsj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:20:33.158998Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660032658324951:2485], TxId: 281474976710676, task: 2. Ctx: { TraceId : 01jd70bwv4cyaa7rqqr7yw5tsj. SessionId : ydb://session/3?node_id=1&id=MzQ0MWUyLWM0YjExYTlmLTRmMTY0ODc2LTk5OGZhZWIw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660032658324947:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:33.159917Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzQ0MWUyLWM0YjExYTlmLTRmMTY0ODc2LTk5OGZhZWIw, ActorId: [1:7439660032658324809:2454], ActorState: ExecuteState, TraceId: 01jd70bwv4cyaa7rqqr7yw5tsj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 32675, MsgBus: 19826 2024-11-21T09:20:33.351021Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660030349146099:2058];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046a0/r3tmp/tmpW0k2Gp/pdisk_1.dat 2024-11-21T09:20:33.357589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 32675, node 2 2024-11-21T09:20:33.367789Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:33.367871Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:33.367883Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:33.367884Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:33.367917Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19826 TClient is connected to server localhost:19826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:33.451067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:33.451106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:33.452133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:33.452795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.453583Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:33.464427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.472267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.486905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.496186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:33.607858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660030349147620:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.607891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.611881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.617263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.629141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.635604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.642744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.649976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:33.658874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660030349148121:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.658907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.658931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660030349148126:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:33.659498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:33.663044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660030349148128:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:33.829956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 19773, MsgBus: 6834 2024-11-21T09:20:30.111083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660018849783174:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.111097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046f8/r3tmp/tmpxOtMeI/pdisk_1.dat 2024-11-21T09:20:30.155318Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19773, node 1 2024-11-21T09:20:30.172604Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:30.172621Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:30.172623Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:30.172668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6834 TClient is connected to server localhost:6834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:30.211074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:30.211099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:30.212201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:30.213071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.216078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.230516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.246135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.255850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:30.373223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660018849784705:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.373252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.397146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.402111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.409161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.416341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.422792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.430492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.438911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660018849785208:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.438943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.438968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660018849785213:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:30.439577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:30.443264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660018849785215:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:30.623924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5592, MsgBus: 32163 2024-11-21T09:20:30.990743Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660018282005750:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:30.990900Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046f8/r3tmp/tmptIXJ9B/pdisk_1.dat 2024-11-21T09:20:31.004366Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5592, node 2 2024-11-21T09:20:31.010977Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:31.010991Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:31.010993Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:31.011037Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32163 TClient is connected to server localhost:32163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:31.091212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:31.091239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:31.092354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:31.092942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.104452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.112280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.127561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.138057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.244239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660022576974583:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.244260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.250324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.256665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.262991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.317627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.326598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.340790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.349400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660022576975098:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.349421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.349449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660022576975103:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.350065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:31.353117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660022576975105:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:31.512686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.599665Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTJmMzU2YWEtMzllYzY2N2EtOTQyNmIxMGUtMmQ0MmE5MDY=, ActorId: [2:7439660022576975390:2456], ActorState: ExecuteState, TraceId: 01jd70bvb10h7264tj27gp85cp, Create QueryResponse for error on request, msg: Error while locks merge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 1091, MsgBus: 8528 2024-11-21T09:20:31.146606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660023960962909:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:31.146769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046e1/r3tmp/tmpEfrZ0M/pdisk_1.dat 2024-11-21T09:20:31.184967Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1091, node 1 2024-11-21T09:20:31.199828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:31.199840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:31.199841Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:31.199881Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8528 TClient is connected to server localhost:8528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:31.246846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:31.246870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:31.248007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:31.268804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.304187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.366472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.384984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.397195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:31.464755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660023960964467:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.464786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.499085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.505606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.560076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.571425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.578327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.585450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:31.593941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660023960964972:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.593979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.593982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660023960964977:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:31.594698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:31.598483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660023960964979:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:31.781846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21225, MsgBus: 16462 2024-11-21T09:20:32.009705Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660025681386873:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.009723Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046e1/r3tmp/tmpzkgUaB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21225, node 2 2024-11-21T09:20:32.024280Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:32.024508Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:32.024517Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:32.024518Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:32.024550Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16462 TClient is connected to server localhost:16462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:32.110090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:32.110118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:32.111157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:32.111947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.120586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.176293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.191322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.201994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:32.252920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660025681388406:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.252952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.257892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.264403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.318675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.327287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.333728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.340851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.349686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660025681388901:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.349713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.349714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660025681388906:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:32.350175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:32.353787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660025681388908:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:32.518184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.588175Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWQ2ODE1ZjYtNzE2OWI5MmMtNTNjMzZhMDUtZTIwMDMzMGM=, ActorId: [2:7439660025681389193:2456], ActorState: ExecuteState, TraceId: 01jd70bwa12mm0v9cfwtfakvjy, Create QueryResponse for error on request, msg: Error while locks merge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 61288, MsgBus: 10995 2024-11-21T09:20:34.671574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660034823968793:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:34.671609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00467f/r3tmp/tmpMOWDkT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61288, node 1 2024-11-21T09:20:34.713771Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:34.720570Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:34.720579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:34.720580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:34.720603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10995 TClient is connected to server localhost:10995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:34.758832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.762044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.771780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:34.771810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:34.772890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:34.820362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.834262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.847645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.881631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660034823970333:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.881661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.902282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.908076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.916892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.923294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.977415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.986656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.995814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660034823970828:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.995843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.995855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660034823970833:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.996327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:35.000143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660034823970835:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:35.158044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] Test command err: 2024-11-21T09:20:29.127558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660012406619499:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.127617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f49/r3tmp/tmp3FKSPa/pdisk_1.dat 2024-11-21T09:20:29.193412Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15775, node 1 2024-11-21T09:20:29.227672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:29.227722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:29.229350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:29.247678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:29.247695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:29.247696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:29.247757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:29.316637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.317871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:29.317893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.318759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:20:29.318830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:20:29.318853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:20:29.319298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:29.319310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:20:29.319384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:20:29.319702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.320635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180829365, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:29.320648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:20:29.321235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:20:29.321793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:29.321846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:29.321864Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:20:29.321876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:20:29.321902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:20:29.321922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:20:29.322361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:20:29.322377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:20:29.322381Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:20:29.322395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:20:29.418917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660012406620198:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.418947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:29.538422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.538577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:20:29.538795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:29.538810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.540850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2024-11-21T09:20:29.540928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:29.541014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:29.541035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:20:29.541483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:29.541498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:29.541502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:20:29.541540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:29.541549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:29.541550Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:20:29.541594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:20:29.544929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:20:29.544965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:20:29.545490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:20:29.600041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:20:29.600055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:20:29.600093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T09:20:29.600668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:20:29.601546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180829645, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:29.601558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180829645 2024-11-21T09:20:29.601588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T09:20:29.601939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:29.602029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:29.602051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:20:29.602229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:29.602240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:29.602244Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId ... 46316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 100 } } 2024-11-21T09:20:34.709641Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037910 Status: COMPLETE TxId: 281474976710765 Step: 1732180834755 OrderId: 281474976710765 ExecLatency: 2 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037910 CpuTimeUsec: 88 } } 2024-11-21T09:20:34.709718Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:20:34.709740Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:20:34.709757Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:20:34.709905Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037902 Status: COMPLETE TxId: 281474976710765 Step: 1732180834755 OrderId: 281474976710765 ExecLatency: 2 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037902 CpuTimeUsec: 99 } } 2024-11-21T09:20:34.710056Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:20:34.710065Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:2, at schemeshard: 72057594046644480 2024-11-21T09:20:34.710070Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:2 129 -> 240 2024-11-21T09:20:34.710179Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710765:0 ProgressState 2024-11-21T09:20:34.710191Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 2/3 2024-11-21T09:20:34.710311Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710765:2 ProgressState 2024-11-21T09:20:34.710324Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:2 progress is 3/3 2024-11-21T09:20:34.710335Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2024-11-21T09:20:34.710361Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:1 2024-11-21T09:20:34.710367Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:2 2024-11-21T09:20:34.710625Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2024-11-21T09:20:34.711143Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976710762, at schemeshard: 72057594046644480 2024-11-21T09:20:34.711393Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710766:0, path# /Root/table 2024-11-21T09:20:34.711422Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710766:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:34.711665Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710766, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2024-11-21T09:20:34.711688Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710766, status# StatusAccepted 2024-11-21T09:20:34.711734Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710766:0 ProgressState 2024-11-21T09:20:34.711961Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710766, at schemeshard: 72057594046644480 2024-11-21T09:20:34.712856Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180834762, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:34.712869Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710766:0 HandleReply TEvOperationPlan: step# 1732180834762 2024-11-21T09:20:34.712871Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710766:0 128 -> 240 2024-11-21T09:20:34.713153Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710766:0 ProgressState 2024-11-21T09:20:34.713167Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710766:0 progress is 1/1 2024-11-21T09:20:34.713172Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710766:0 2024-11-21T09:20:34.713390Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710766 2024-11-21T09:20:34.713794Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710762 2024-11-21T09:20:34.791985Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7439660036719736721:2420] [0] Resolve database: name# /Root 2024-11-21T09:20:34.792165Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7439660036719736721:2420] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T09:20:34.792178Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7439660036719736721:2420] [0] Send request: schemeShardId# 72057594046644480 2024-11-21T09:20:34.792381Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7439660036719736721:2420] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:9339" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1732180834 } EndTime { seconds: 1732180834 } } 2024-11-21T09:20:34.793124Z node 13 :TX_PROXY DEBUG: actor# [13:7439660036719733290:2135] Handle TEvNavigate describe path /Root/table/byValue/indexImplTable 2024-11-21T09:20:34.793140Z node 13 :TX_PROXY DEBUG: Actor# [13:7439660036719736727:4846] HANDLE EvNavigateScheme /Root/table/byValue/indexImplTable 2024-11-21T09:20:34.793186Z node 13 :TX_PROXY DEBUG: Actor# [13:7439660036719736727:4846] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T09:20:34.793215Z node 13 :TX_PROXY DEBUG: Actor# [13:7439660036719736727:4846] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table/byValue/indexImplTable" Options { ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T09:20:34.793525Z node 13 :TX_PROXY DEBUG: Actor# [13:7439660036719736727:4846] Handle TEvDescribeSchemeResult Forward to# [13:7439660036719736725:2421] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 693 Record# Status: StatusSuccess Path: "/Root/table/byValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710764 CreateStep: 1732180834741 ParentPathId: 8 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "Value" Type: "Decimal(22,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 22 DecimalScale: 9 } IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Value" KeyColumnNames: "Key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 10 MaxPartitionsCount: 10 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 12 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 |95.8%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCompatTest::CommitOffsets [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> KqpEffects::UpdateOn_Literal [GOOD] |95.8%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::Insert >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast >> KqpInplaceUpdate::Negative_SingleRowWithValueCast >> KqpImmediateEffects::InsertDuplicates >> KqpImmediateEffects::TxWithReadAtTheEnd >> KqpWrite::InsertRevert >> KqpWrite::CastValuesOptional >> test.py::test[blocks-date_equals_scalar--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Analyze] >> KqpEffects::InsertAbort_Literal_Success >> KqpInplaceUpdate::SingleRowPgNotNull >> KqpInplaceUpdate::SingleRowSimple >> KqpImmediateEffects::ReplaceDuplicates ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Literal [GOOD] Test command err: Trying to start YDB, gRPC: 9852, MsgBus: 20223 2024-11-21T09:20:34.691388Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660033324743781:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:34.691596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004682/r3tmp/tmp0WjxOS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9852, node 1 2024-11-21T09:20:34.741274Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:34.741450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:34.741459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:34.741461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:34.741491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20223 TClient is connected to server localhost:20223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:34.778684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.782328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.791223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:34.791245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:34.792334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:34.839563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.851240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.861121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:34.900366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660033324745314:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.900388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:34.920588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:34.974549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.028314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.082083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.136424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.147725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.155652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660037619713132:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:35.155674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:35.155713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660037619713137:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:35.156142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:35.160851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660037619713139:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 13033, MsgBus: 23043 2024-11-21T09:20:35.538230Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660040371592579:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:35.538425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004682/r3tmp/tmpLYCWnZ/pdisk_1.dat 2024-11-21T09:20:35.546411Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13033, node 2 2024-11-21T09:20:35.556765Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:35.556777Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:35.556778Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:35.556805Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23043 TClient is connected to server localhost:23043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:35.638720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:35.638762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:35.639835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:35.640491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:35.644380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:35.652619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:35.667917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:35.681624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:35.773598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660040371594108:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:35.773623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:35.779506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.785274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.792281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.798583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.853370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.862057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:35.870779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660040371594625:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:35.870807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:35.870822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660040371594630:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:35.871416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:35.875536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660040371594632:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpWrite::CastValues >> KqpInplaceUpdate::Negative_SingleRowListFromRange >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd >> KqpWrite::CastValuesOptional [GOOD] >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates >> KqpInplaceUpdate::SingleRowPgNotNull [GOOD] >> KqpInplaceUpdate::SingleRowSimple [GOOD] >> KqpWrite::CastValues [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange [GOOD] >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect >> KqpImmediateEffects::InsertDuplicates [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpWrite::InsertRevert [GOOD] >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 26058, MsgBus: 24842 2024-11-21T09:20:36.479457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660041793853029:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.479656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00466e/r3tmp/tmpGZHAYe/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26058, node 1 2024-11-21T09:20:36.533384Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:36.534115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.534128Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.534129Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.534159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24842 TClient is connected to server localhost:24842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:36.579784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.579811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.580868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:36.581153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.590879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.651746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.669389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.682008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.734083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660041793854561:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.734122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.758933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.764678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.772251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.778908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.786015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.793323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.801018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660041793855052:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.801043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.801066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660041793855057:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.801635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.806421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660041793855059:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::ReplaceExistingKey >> test.py::test[blocks-date_greater_or_equal--Analyze] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Debug] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast [GOOD] Test command err: Trying to start YDB, gRPC: 21383, MsgBus: 9975 2024-11-21T09:20:36.466592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660044120080600:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.466728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004678/r3tmp/tmpiReVoU/pdisk_1.dat 2024-11-21T09:20:36.513990Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21383, node 1 2024-11-21T09:20:36.526792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.526805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.526807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.526843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9975 TClient is connected to server localhost:9975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:36.566870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.566890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.567930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.571244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.581927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.640561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.657809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.667805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.711212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044120082134:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.711237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.743218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.748565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.757325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.764741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.772004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.778562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.786991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044120082625:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.787012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044120082630:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.787013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.787438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.792302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660044120082632:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:36.936725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast [GOOD] Test command err: Trying to start YDB, gRPC: 32382, MsgBus: 12922 2024-11-21T09:20:36.459489Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660042661955524:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.459504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00467c/r3tmp/tmpefp6TA/pdisk_1.dat 2024-11-21T09:20:36.507498Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32382, node 1 2024-11-21T09:20:36.521705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.521718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.521721Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.521742Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12922 TClient is connected to server localhost:12922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:36.559539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.559565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.560658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.584468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.595959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.656646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.670950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.681555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.706619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042661957056:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.706637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.742627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.746998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.757859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.764622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.771803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.779076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.787020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042661957547:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.787038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.787062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042661957552:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.787512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.792256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660042661957554:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:36.921698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple [GOOD] Test command err: Trying to start YDB, gRPC: 64340, MsgBus: 19905 2024-11-21T09:20:36.545566Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660045592875588:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.545599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00466c/r3tmp/tmpiwyaXC/pdisk_1.dat 2024-11-21T09:20:36.598638Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64340, node 1 2024-11-21T09:20:36.609171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.609184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.609186Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.609218Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19905 TClient is connected to server localhost:19905 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:20:36.645940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.645961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:20:36.647109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.673532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.686655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.701846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.720072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.730814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.797563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045592877126:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.797588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.827408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.831951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.841861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.896424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.904976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.912272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.919590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045592877641:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.919615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.919619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045592877646:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.920107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.925024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660045592877648:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.051636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValues [GOOD] Test command err: Trying to start YDB, gRPC: 61538, MsgBus: 12281 2024-11-21T09:20:36.600315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660044066823720:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.600330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004668/r3tmp/tmpNWxq75/pdisk_1.dat 2024-11-21T09:20:36.639665Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61538, node 1 2024-11-21T09:20:36.655154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.655170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.655172Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.655201Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12281 TClient is connected to server localhost:12281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.698272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.700264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.700283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.701416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:36.709753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.769044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.783115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.793594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.839347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044066825254:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.839367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.862119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.866665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.876538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.883462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.890618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.897498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.905837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044066825746:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.905850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.905868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044066825751:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.906315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.911317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660044066825753:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull [GOOD] Test command err: Trying to start YDB, gRPC: 1542, MsgBus: 65024 2024-11-21T09:20:36.538820Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660044876982292:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.538968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004662/r3tmp/tmp9cJWp3/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1542, node 1 2024-11-21T09:20:36.582584Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:36.589631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.589641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.589643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.589666Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65024 TClient is connected to server localhost:65024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.633216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.636297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.638732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.638749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.639905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:36.694909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.709715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.718937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.779173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044876983828:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.779194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.803620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.808567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.813282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.820658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.828357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.834706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.842832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044876984319:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.842854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.842857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660044876984324:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.843256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.848272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660044876984326:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:37.001650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange [GOOD] Test command err: Trying to start YDB, gRPC: 6075, MsgBus: 9698 2024-11-21T09:20:36.642627Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660042000578956:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.642865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00465f/r3tmp/tmp1d6tVp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6075, node 1 2024-11-21T09:20:36.694360Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:36.701720Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.701735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.701738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.701779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9698 TClient is connected to server localhost:9698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:36.742825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.742846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.744054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:36.770436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.776267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.790200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.806129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.814432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.900541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042000580487:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.900565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.929220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.934859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.946965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.952337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.960958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.968199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.976142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042000580989:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.976167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.976191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042000580994:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.976889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.981162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660042000580996:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.111756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 2155, MsgBus: 14380 2024-11-21T09:20:36.519373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660042460739579:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.519533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004671/r3tmp/tmp1kdRy4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2155, node 1 2024-11-21T09:20:36.565005Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:36.571773Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.571787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.571789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.571818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14380 TClient is connected to server localhost:14380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.613312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.619718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.619742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.620820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:36.626231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.687233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.702108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:20:36.710891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.773553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042460741112:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.773580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.805298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.810655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.821035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.827559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.881547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.891125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.899932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042460741628:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.899955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.900007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042460741633:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.900596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.904220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660042460741635:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:37.030627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates [GOOD] >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 18798, MsgBus: 14577 2024-11-21T09:20:36.407215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660045172624315:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.407402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00467a/r3tmp/tmp4T3tJa/pdisk_1.dat 2024-11-21T09:20:36.457208Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18798, node 1 2024-11-21T09:20:36.471247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.471259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.471260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.471286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14577 TClient is connected to server localhost:14577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:36.507401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.507429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.508593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.535062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.547682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.561877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.575637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.584175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.690045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045172625847:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.690076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.722168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.727769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.737001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.743838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.750989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.757796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.766777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045172626361:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.766788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045172626366:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.766805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.767314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.771377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660045172626368:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:36.922245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19896, MsgBus: 3631 2024-11-21T09:20:37.266652Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660048898372720:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:37.266793Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00467a/r3tmp/tmpCSDriM/pdisk_1.dat 2024-11-21T09:20:37.272824Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19896, node 2 2024-11-21T09:20:37.281524Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:37.281534Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:37.281536Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:37.281561Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3631 TClient is connected to server localhost:3631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:37.367192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:37.367217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:37.368293Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:37.368483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.378405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.386050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.402008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.411248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.515883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048898374251:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.515908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.520623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.526599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.535116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.542153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.549180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.556142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.565331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048898374743:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.565356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.565362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048898374748:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.565971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:37.569203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660048898374750:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.723995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 21333, MsgBus: 19373 2024-11-21T09:20:36.457942Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660042485862472:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.458278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004674/r3tmp/tmpZmkONR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21333, node 1 2024-11-21T09:20:36.507912Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:36.513234Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.513248Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.513250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.513281Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19373 TClient is connected to server localhost:19373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.553494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.556451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.558237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.558253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.559441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:36.616452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.632278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.640476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.711110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042485864008:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.711145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.736601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.741343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.750498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.757699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.765002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.771792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.780155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042485864503:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.780173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.780202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042485864508:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.780727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.785284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660042485864510:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:20:36.952124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24708, MsgBus: 2106 2024-11-21T09:20:37.114054Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660048978861348:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:37.114246Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004674/r3tmp/tmp64Yjob/pdisk_1.dat 2024-11-21T09:20:37.121532Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24708, node 2 2024-11-21T09:20:37.130204Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:37.130216Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:37.130218Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:37.130248Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2106 TClient is connected to server localhost:2106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:37.214209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:37.214234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:37.215276Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:37.216482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.227475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.234851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.292452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.304686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.357082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048978862888:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.357104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.361285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.366805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.422717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.430302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.437233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.444532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.453452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048978863385:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.453475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.453475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048978863390:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.453970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:37.456994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660048978863392:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.644562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Duplicates [GOOD] Test command err: Trying to start YDB, gRPC: 9057, MsgBus: 11744 2024-11-21T09:20:36.561163Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660042154648743:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.561382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00466a/r3tmp/tmpQZqljG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9057, node 1 2024-11-21T09:20:36.615271Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:36.615540Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.615553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.615555Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.615576Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11744 TClient is connected to server localhost:11744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.660644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.661220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.661245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:20:36.662307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:36.668586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.727806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.744402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.752809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.807616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042154650276:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.807638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.832244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.837665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.849196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.855579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.862529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.870120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.877919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042154650766:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.877943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.877948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660042154650771:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.878510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.883104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660042154650773:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 17537, MsgBus: 18713 2024-11-21T09:20:37.226594Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660049883310524:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:37.226640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00466a/r3tmp/tmpsY3PPW/pdisk_1.dat 2024-11-21T09:20:37.234391Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17537, node 2 2024-11-21T09:20:37.242328Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:37.242344Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:37.242345Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:37.242381Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18713 TClient is connected to server localhost:18713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:37.326835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:37.326861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:37.327956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:37.329183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.339836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.346811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.361819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.374752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.481191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660049883312053:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.481213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.486645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.541690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.549117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.556136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.563087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.570396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.578683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660049883312568:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.578706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.578711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660049883312573:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.579395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:37.583220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660049883312575:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.800062Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660049883312892:2467], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jd70c1cda5t63zf7ccbhwz74. SessionId : ydb://session/3?node_id=2&id=MTc5M2M5ZDAtZmI3MzgzYWQtOTA4ZGE5YmItODVjZDczYzI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2024-11-21T09:20:37.800125Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660049883312894:2468], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTc5M2M5ZDAtZmI3MzgzYWQtOTA4ZGE5YmItODVjZDczYzI=. CustomerSuppliedId : . TraceId : 01jd70c1cda5t63zf7ccbhwz74. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439660049883312889:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:37.801002Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTc5M2M5ZDAtZmI3MzgzYWQtOTA4ZGE5YmItODVjZDczYzI=, ActorId: [2:7439660049883312860:2454], ActorState: ExecuteState, TraceId: 01jd70c1cda5t63zf7ccbhwz74, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 28528, MsgBus: 62019 2024-11-21T09:20:36.623419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660045540894432:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.623599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004665/r3tmp/tmp9x49jc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28528, node 1 2024-11-21T09:20:36.674448Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:36.682469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.682482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.682483Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.682510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62019 TClient is connected to server localhost:62019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:20:36.724304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.724325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.725439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.751245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.759665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.818983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.832273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.842750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.864751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045540895968:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.864778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.889734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.943942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.952081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.960732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.967723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.974764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.983055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045540896463:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.983076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.983094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045540896468:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.983659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.988144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660045540896470:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.155211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32472, MsgBus: 29627 2024-11-21T09:20:37.499642Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660046167364025:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:37.499886Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004665/r3tmp/tmpI46zbO/pdisk_1.dat 2024-11-21T09:20:37.506232Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32472, node 2 2024-11-21T09:20:37.515590Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:37.515603Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:37.515605Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:37.515644Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29627 TClient is connected to server localhost:29627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:37.599860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:37.599889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:37.600973Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:37.602126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.603930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.612752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.630224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.642226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.735781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660046167365556:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.735814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.739840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.745928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.759487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.766285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.773009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.780319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.788680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660046167366049:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.788700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.788702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660046167366054:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.789328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:37.793338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660046167366056:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.931056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TDSProxyFaultTolerancePatchTest::mirror3dc [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGet_ErasureNone ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 14797, MsgBus: 1404 2024-11-21T09:20:36.473610Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660045450514241:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:36.473627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004675/r3tmp/tmpQ21BJZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14797, node 1 2024-11-21T09:20:36.519328Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:36.526686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:36.526697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:36.526699Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:36.526727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1404 TClient is connected to server localhost:1404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:36.573845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:36.573865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:36.574978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:36.594716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.599534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.660133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.677069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.689297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:36.742014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045450515778:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.742035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.768877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.774633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.785769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.839594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.848584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.855560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:36.863891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045450516283:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.863914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.863920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660045450516288:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:36.864385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:36.868765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660045450516290:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.025365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.130346Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660049745484025:2484], TxId: 281474976715676, task: 1. Ctx: { TraceId : 01jd70c0qc65c0vajs8821nyz2. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ODJmMzJiZWMtYmM0OWViZTAtZGVjZGY4ODctMzg4NDAwMjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2024-11-21T09:20:37.130410Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660049745484026:2485], TxId: 281474976715676, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd70c0qc65c0vajs8821nyz2. SessionId : ydb://session/3?node_id=1&id=ODJmMzJiZWMtYmM0OWViZTAtZGVjZGY4ODctMzg4NDAwMjc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439660049745484022:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:37.131194Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODJmMzJiZWMtYmM0OWViZTAtZGVjZGY4ODctMzg4NDAwMjc=, ActorId: [1:7439660049745483871:2454], ActorState: ExecuteState, TraceId: 01jd70c0qc65c0vajs8821nyz2, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 28802, MsgBus: 21119 2024-11-21T09:20:37.326327Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660048382701426:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:37.326477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004675/r3tmp/tmpaIOy5B/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28802, node 2 2024-11-21T09:20:37.339489Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:37.341766Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:37.341779Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:37.341780Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:37.341812Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21119 TClient is connected to server localhost:21119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:37.426888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:37.426922Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:37.427986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:37.428665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.436025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.445773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.460957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.473168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:37.579014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048382702957:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.579048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.583027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.588874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.643397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.654090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.660752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.667986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.676831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048382703475:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.676853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660048382703480:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.676859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:37.677438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:20:37.681168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660048382703482:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:20:37.873150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:20:37.936241Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660048382703999:2491], TxId: 281474976715677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ODA5YzBjZGUtZmI1MTYyYzUtYTVmNTM0NDUtYWIxOTM2ZWE=. CustomerSuppliedId : . TraceId : 01jd70c1gm7y334htcjvjr5rz2. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:20:37.936293Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660048382704000:2492], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd70c1gm7y334htcjvjr5rz2. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODA5YzBjZGUtZmI1MTYyYzUtYTVmNTM0NDUtYWIxOTM2ZWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439660048382703996:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:20:37.936464Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODA5YzBjZGUtZmI1MTYyYzUtYTVmNTM0NDUtYWIxOTM2ZWE=, ActorId: [2:7439660048382703767:2454], ActorState: ExecuteState, TraceId: 01jd70c1gm7y334htcjvjr5rz2, Create QueryResponse for error on request, msg: >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> TDSProxyPatchTest::SecuredErrorOnGet_ErasureNone [GOOD] >> TDSProxyPatchTest::MovedOk_Erasure4Plus2Block >> TDSProxyPatchTest::MovedOk_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_0_0_VdiskErrors >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_0_0_VdiskErrors [GOOD] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[PipeResets] >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[PipeResets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_0_0_VdiskErrors [GOOD] Test command err: 2024-11-21T09:20:38.967043Z node 11 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [11:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:20:38.967140Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T09:20:38.967145Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T09:20:38.967150Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:20:38.967154Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:20:38.967159Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T09:20:38.967162Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T09:20:38.970236Z node 11 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T09:20:38.970263Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T09:20:38.970268Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T09:20:38.970302Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T09:20:38.970317Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T09:20:38.970347Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T09:20:38.970365Z node 11 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T09:20:38.970372Z node 11 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TCdcStreamWithRebootsTests::DisableStream[PipeResets] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[PipeResets] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[TabletReboots] >> TCdcStreamWithRebootsTests::Attributes[PipeResets] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[PipeResets] >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[TabletReboots] >> TCdcStreamWithRebootsTests::CreateDropRecreate[PipeResets] >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[TabletReboots] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |95.9%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable |95.9%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-date_greater_or_equal--Debug] [GOOD] >> test.py::test[blocks-date_greater_or_equal--ForceBlocks] >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--false] [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleTable [GOOD] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStream[TabletReboots] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[PipeResets] >> TCdcStreamWithRebootsTests::SplitTable[PipeResets] >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[TabletReboots] >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[PipeResets] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> TPersQueueTest::DefaultMeteringMode >> TPersQueueTest::ReadRuleServiceTypeMigration [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[TabletReboots] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TPQCompatTest::LongProducerAndLongMessageGroupId [GOOD] >> TPQCompatTest::ReadWriteSessions >> test.py::test[blocks-date_greater_or_equal--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Plan] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Results] >> TCdcStreamWithRebootsTests::DisableStream[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DisableStream[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.689896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.689931Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.692052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.692148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.692173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.694220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.694298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.696050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700582Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.718548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.718601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.718651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.718692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.718699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.719265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.719274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.719277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.719642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.720059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.720071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.720076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.720081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.720639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.721022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.721053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.721174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.721192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.721195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.721234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.721239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.721260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.721267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.721539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.721544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.721563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.721566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.721607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.721611Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.721617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.721619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.721622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.721625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.721627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.721629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.721636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.721640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.721643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ToSchemeBoard Send, to populator: [12:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T09:20:42.696891Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T09:20:42.697067Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:20:42.697080Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:20:42.697090Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/2 2024-11-21T09:20:42.697093Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/2 2024-11-21T09:20:42.697098Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/2, is published: false 2024-11-21T09:20:42.697151Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.697158Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:1 ProgressState at tablet: 72057594046678944 2024-11-21T09:20:42.697244Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:42.697255Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:42.697259Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:42.697283Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T09:20:42.697289Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:20:42.697436Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:42.697447Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:42.697451Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:42.697455Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:20:42.697459Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:20:42.697469Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/2, is published: true FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:20:42.697611Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 152 } } 2024-11-21T09:20:42.697617Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2024-11-21T09:20:42.697632Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 152 } } 2024-11-21T09:20:42.697642Z node 12 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 152 } } 2024-11-21T09:20:42.697721Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 51539609867 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T09:20:42.697727Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 1 2024-11-21T09:20:42.697738Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:1, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 51539609867 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T09:20:42.697742Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:20:42.697749Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 51539609867 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T09:20:42.697756Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.697759Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.697763Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:20:42.697767Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:1 129 -> 240 2024-11-21T09:20:42.698449Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:42.698582Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:42.698606Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.698622Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.698638Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.698644Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:1 ProgressState 2024-11-21T09:20:42.698653Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:1 progress is 2/2 2024-11-21T09:20:42.698657Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2024-11-21T09:20:42.698662Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 2/2, is published: true 2024-11-21T09:20:42.698666Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 2/2 2024-11-21T09:20:42.698670Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:20:42.698673Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:20:42.698683Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:20:42.698689Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T09:20:42.698692Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T09:20:42.698703Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1004 2024-11-21T09:20:42.699156Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:20:42.699163Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:20:42.699213Z node 12 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:20:42.699230Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:20:42.699234Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [12:715:2620] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:20:42.699293Z node 12 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:42.699319Z node 12 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 34us result status StatusSuccess 2024-11-21T09:20:42.699403Z node 12 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateDisabled SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> test.py::test[blocks-date_greater_or_equal--Results] [GOOD] >> test.py::test[blocks-date_greater_scalar--Analyze] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[PipeResets] [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:19:02.569600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:02.569620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:02.569624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:02.569629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:02.569637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:02.569642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:02.569655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:02.569755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:02.577137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:02.577150Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:19:02.578486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:02.578540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:02.578569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:02.580101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:02.580156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:02.580236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:02.580393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:02.580829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:02.581018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:02.581025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:02.581033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:02.581037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:02.581041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:02.581067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:19:02.581808Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:19:02.591257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:02.591302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:02.591335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:02.591363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:02.591367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:02.591735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:02.591751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:02.591778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:02.591786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:02.591789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:02.591791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:02.592019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:02.592025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:02.592027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:02.592221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:02.592227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:02.592233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:02.592238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:02.592595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:02.592869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:02.592896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:02.593014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:02.593029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:02.593033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:02.593061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:02.593065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:02.593082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:02.593089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:02.593336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:02.593342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:02.593362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:02.593367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:02.593414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:02.593417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:02.593424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:02.593426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:02.593430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:02.593433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:02.593435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:02.593437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:02.593443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:02.593446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:02.593448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... meshard: 72057594046678944 2024-11-21T09:20:44.698574Z node 103 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 40us result status StatusSuccess 2024-11-21T09:20:44.698690Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:44.699177Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:44.699207Z node 103 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 36us result status StatusSuccess 2024-11-21T09:20:44.699281Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> test.py::test[blocks-date_greater_scalar--Analyze] [GOOD] >> test.py::test[blocks-date_greater_scalar--Debug] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.691847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.691865Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.693847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.693952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.693974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.696377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.696446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.696570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.696790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700857Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.713466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.716037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.716045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.717135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.718291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.719274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719999Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.720016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.720020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.720025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.720029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.720032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.720035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.720044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.720048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.720051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.405252Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:45.405258Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T09:20:45.405263Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:20:45.405301Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.405305Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:45.405309Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T09:20:45.405312Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T09:20:45.405345Z node 23 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.405376Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.405379Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:45.405383Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T09:20:45.405386Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:20:45.405425Z node 23 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.405481Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.405485Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:45.405489Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:20:45.405492Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:20:45.405523Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.405526Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:45.405532Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:20:45.405535Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:20:45.405543Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T09:20:45.405571Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.405575Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:45.405578Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T09:20:45.406143Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.406167Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:20:45.406181Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.406240Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:20:45.406280Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.406294Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.406299Z node 23 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:20:45.406309Z node 23 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 4/4 2024-11-21T09:20:45.406313Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T09:20:45.406317Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2024-11-21T09:20:45.406322Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T09:20:45.406326Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:20:45.406330Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:20:45.406353Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:20:45.406357Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T09:20:45.406360Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T09:20:45.406364Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:20:45.406367Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T09:20:45.406370Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T09:20:45.406374Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T09:20:45.406377Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T09:20:45.406380Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T09:20:45.406389Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T09:20:45.406474Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:20:45.406480Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T09:20:45.406491Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T09:20:45.406495Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T09:20:45.406500Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:20:45.406936Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.406958Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.406969Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.406978Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.406982Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:45.407423Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:20:45.407501Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:20:45.407507Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:20:45.407560Z node 23 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:20:45.407577Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:20:45.407582Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [23:819:2713] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:20:45.407641Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:45.407670Z node 23 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 39us result status StatusPathDoesNotExist 2024-11-21T09:20:45.407709Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::Attributes[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[PipeResets] [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.691633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.691653Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.693327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.693420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.693436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.695300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.695343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.696067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700576Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.717550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.717608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.717684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.717691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.718243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.718270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.718302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.718310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.718314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.718319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.718744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.718761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.718765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.719205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.720242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.720286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.720437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.720460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.720466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.720521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.720528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.720553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.720564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.720968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.720977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.721002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.721008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.721061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.721068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.721076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.721080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.721085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.721089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.721093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.721097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.721107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.721111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.721115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.097481Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.097484Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:20:46.097488Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:20:46.097491Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:20:46.097560Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.097566Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.097569Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:20:46.097571Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:20:46.097573Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:20:46.097682Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 164 } } 2024-11-21T09:20:46.097687Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:20:46.097697Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 164 } } 2024-11-21T09:20:46.097705Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 164 } } 2024-11-21T09:20:46.097780Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.097787Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.097791Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:20:46.097793Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T09:20:46.097795Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:20:46.097801Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T09:20:46.097842Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.097845Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:20:46.097853Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.097857Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:20:46.097861Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.097867Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.097870Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.097872Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:20:46.097876Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T09:20:46.098986Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.099010Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.099028Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.099037Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.099049Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.099103Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.099107Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T09:20:46.099114Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T09:20:46.099116Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:20:46.099122Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T09:20:46.099132Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:397:2372] message: TxId: 1003 2024-11-21T09:20:46.099135Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:20:46.099139Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:20:46.099142Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:20:46.099147Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:20:46.099150Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:20:46.099152Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:20:46.099160Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:20:46.099163Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:20:46.099164Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:20:46.099170Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:20:46.099566Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:20:46.099574Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:596:2528] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:20:46.099637Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:46.099664Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 31us result status StatusSuccess 2024-11-21T09:20:46.099748Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeNewAndOldImages PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatDynamoDBStreamsJson VirtualTimestamps: false AwsRegion: "ru-central1" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::Attributes[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.689797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.689814Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.691628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.691767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.691795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.694419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.694477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.696093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700709Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.713464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.715996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.716004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.717024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.718158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.718872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719667Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.719674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.719677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.719683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.719689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.719696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.719699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.719702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.062531Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.062535Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:20:46.062539Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:20:46.062544Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:20:46.062625Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 141 } } 2024-11-21T09:20:46.062630Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:20:46.062644Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 141 } } 2024-11-21T09:20:46.062654Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 141 } } 2024-11-21T09:20:46.062707Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.062714Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.062717Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:20:46.062721Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:20:46.062724Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:20:46.062799Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.062803Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:20:46.062813Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.062817Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:20:46.062825Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.062832Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.062836Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.062839Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:20:46.062844Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T09:20:46.062936Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.062943Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.062946Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:20:46.062949Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T09:20:46.062953Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:20:46.062960Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T09:20:46.064264Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.064294Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.064319Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.064336Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.064355Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.064360Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T09:20:46.064368Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T09:20:46.064372Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:20:46.064377Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T09:20:46.064387Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:397:2372] message: TxId: 1003 2024-11-21T09:20:46.064392Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:20:46.064397Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:20:46.064401Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:20:46.064413Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:20:46.064417Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:20:46.064419Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:20:46.064431Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:20:46.064434Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:20:46.064437Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:20:46.064445Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:20:46.064526Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.064992Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:20:46.065001Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:598:2530] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:20:46.065069Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:46.065100Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 36us result status StatusSuccess 2024-11-21T09:20:46.065181Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "key" Value: "value" } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false UserAttributes { Key: "key" Value: "value" } AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.642299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.642318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.642324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.642328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.642340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.642344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.642352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.642412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.650413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.650426Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.651831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.651925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.651941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.653685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.653746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.653849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.653968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.654498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.654689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.654699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.654708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.654714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.654717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.654739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.655566Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.666549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.666595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.666628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.666660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.666665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.667090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.667106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.667128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.667135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.667138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.667141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.667435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.667441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.667444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.667680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.667687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.667692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.667697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.668050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.668347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.668377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.668483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.668498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.668502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.668536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.668540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.668558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.668565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.668844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.668849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.668871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.668876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.668932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.668937Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.668944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.668947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.668950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.668953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.668956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.668958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.668965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.668968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.668970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 09:20:46.193783Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:20:46.193820Z node 23 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.193846Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.193850Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:46.193854Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T09:20:46.193857Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T09:20:46.193902Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.193906Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:46.193910Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T09:20:46.193914Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 6 2024-11-21T09:20:46.193947Z node 23 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.194004Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.194009Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:46.194012Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:20:46.194016Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:20:46.194046Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.194050Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:46.194055Z node 23 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:20:46.194058Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:20:46.194065Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/5, is published: true 2024-11-21T09:20:46.194111Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.194116Z node 23 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:20:46.194119Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/5, is published: true 2024-11-21T09:20:46.194486Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:20:46.194980Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:20:46.195006Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:20:46.195029Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:20:46.195035Z node 23 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:20:46.195045Z node 23 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 5/5 2024-11-21T09:20:46.195049Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2024-11-21T09:20:46.195054Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 5/5, is published: true 2024-11-21T09:20:46.195058Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2024-11-21T09:20:46.195063Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:20:46.195067Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:20:46.195094Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:20:46.195099Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T09:20:46.195102Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T09:20:46.195107Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:20:46.195110Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T09:20:46.195113Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T09:20:46.195117Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:20:46.195120Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T09:20:46.195123Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T09:20:46.195128Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T09:20:46.195131Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:4 2024-11-21T09:20:46.195137Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:4 2024-11-21T09:20:46.195144Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T09:20:46.195241Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:20:46.195274Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:20:46.195279Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T09:20:46.195290Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T09:20:46.195296Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T09:20:46.195300Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:20:46.195329Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.195358Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.195371Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.195387Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.195398Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.195402Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:20:46.195800Z node 23 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:20:46.195881Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:20:46.195887Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:20:46.195942Z node 23 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:20:46.195959Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:20:46.195963Z node 23 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [23:828:2721] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:20:46.196028Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:46.196062Z node 23 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 42us result status StatusPathDoesNotExist 2024-11-21T09:20:46.196099Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.689797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.689814Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.691411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.691474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.691496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.694140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.694188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.695996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700689Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.713584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.716004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.716011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.717026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.718269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.718875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.719676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.719680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.719688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.719696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.719705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.719710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.719713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 78944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T09:20:46.151499Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T09:20:46.151577Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 142 } } 2024-11-21T09:20:46.151581Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T09:20:46.151591Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 142 } } 2024-11-21T09:20:46.151598Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 142 } } 2024-11-21T09:20:46.151845Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.151854Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.151856Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:20:46.151858Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T09:20:46.151861Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:20:46.151934Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.151938Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 2 2024-11-21T09:20:46.151945Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.151949Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:20:46.151953Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 335 RawX2: 111669152016 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:20:46.151961Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:46.151963Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:20:46.151966Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:20:46.151968Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T09:20:46.152079Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.152086Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.152088Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:20:46.152090Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T09:20:46.152092Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T09:20:46.152098Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T09:20:46.152734Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.152755Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.153300Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.153320Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:20:46.153330Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.153342Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:20:46.153393Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:20:46.153410Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:20:46.153414Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T09:20:46.153420Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T09:20:46.153422Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T09:20:46.153426Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T09:20:46.153435Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:459:2424] message: TxId: 1003 2024-11-21T09:20:46.153440Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T09:20:46.153444Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:20:46.153447Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:20:46.153452Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:20:46.153455Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:20:46.153456Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:20:46.153459Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:20:46.153461Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:20:46.153463Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:20:46.153471Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:20:46.153473Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T09:20:46.153475Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T09:20:46.153480Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T09:20:46.153959Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:20:46.153971Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:653:2575] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:20:46.154041Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:46.154073Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 38us result status StatusSuccess 2024-11-21T09:20:46.154141Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] |95.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] |95.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> test.py::test[blocks-date_greater_scalar--Debug] [GOOD] >> test.py::test[blocks-date_greater_scalar--ForceBlocks] |95.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] |95.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersQueueTest::AllEqual [GOOD] |95.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T09:19:46.962585Z :WriteRAW INFO: Random seed for debugging is 1732180786962580 2024-11-21T09:19:47.030910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659833143352082:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:47.031034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:47.033208Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659835019231537:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:47.033468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:47.050138Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00422a/r3tmp/tmp3lQqnK/pdisk_1.dat 2024-11-21T09:19:47.055827Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:47.077954Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9345, node 1 2024-11-21T09:19:47.093389Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00422a/r3tmp/yandexvkZglb.tmp 2024-11-21T09:19:47.093399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00422a/r3tmp/yandexvkZglb.tmp 2024-11-21T09:19:47.093451Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00422a/r3tmp/yandexvkZglb.tmp 2024-11-21T09:19:47.093485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:47.096682Z INFO: TTestServer started on Port 63854 GrpcPort 9345 TClient is connected to server localhost:63854 PQClient connected to localhost:9345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:47.113231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T09:19:47.131249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:47.131272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:47.132647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:47.153144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:47.153172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:47.154502Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:47.154721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:47.283907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659835019231831:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:47.283926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659835019231823:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:47.283944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:47.285231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T09:19:47.288369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659835019231837:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T09:19:47.370225Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659833143353049:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:47.370287Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659835019231880:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:47.370351Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTVjZDg4MDQtNDA0MzQzNDctZTdlNzI5MjktNTdkN2IxMjE=, ActorId: [2:7439659835019231821:2277], ActorState: ExecuteState, TraceId: 01jd70ag2kcyrrzmc59h0818c6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:47.370485Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzFiYjcyM2QtMzRjZmIzNmEtOGQ1NmZjZWYtZWUwNzlmYTE=, ActorId: [1:7439659833143353008:2299], ActorState: ExecuteState, TraceId: 01jd70ag4pf79y6m2mrrxcybmw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:47.370811Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:47.370812Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:47.371405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:19:47.433503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:47.496942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:9345", true, true, 1000); 2024-11-21T09:19:47.524202Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd70ag9pbtbdgnd2vcer0v3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JmNThmMmEtNmIzNmJhMTctNTJiYzEwZDMtYTIwZWRkZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659833143353423:2919] 2024-11-21T09:19:52.031467Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659833143352082:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:52.031496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:19:52.033534Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659835019231537:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:52.033567Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:19:53.543515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:9345 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T09:19:53.554322Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:9345 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" ... opic: "test-topic" message_group_id: "src" from ipv6:[::1]:43628 2024-11-21T09:20:47.035732Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:43628 proto=v1 topic=test-topic durationSec=0 2024-11-21T09:20:47.035734Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T09:20:47.036104Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T09:20:47.036137Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T09:20:47.036145Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:20:47.036147Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T09:20:47.036153Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660092026483924:2470] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T09:20:47.036684Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660092026483924:2470] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T09:20:47.053814Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660092026483924:2470] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T09:20:47.053912Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439660092026483954:2470] connected; active server actors: 1 2024-11-21T09:20:47.053935Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660092026483924:2470] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T09:20:47.053944Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660092026483924:2470] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T09:20:47.053994Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439660092026483954:2470] disconnected; active server actors: 1 2024-11-21T09:20:47.054002Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439660092026483954:2470] disconnected no session 2024-11-21T09:20:47.069052Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660092026483924:2470] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T09:20:47.069068Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660092026483924:2470] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T09:20:47.069071Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660092026483924:2470] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T09:20:47.069080Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T09:20:47.069319Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:20:47.069364Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2024-11-21T09:20:47.069337Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7439660092026483972:2470], now have 1 active actors on pipe 2024-11-21T09:20:47.069433Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:20:47.069443Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:20:47.069473Z node 16 :PERSQUEUE INFO: new Cookie src|ea0ee927-357334a1-980cb994-7f580818_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T09:20:47.069502Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T09:20:47.069525Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:20:47.069708Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:20:47.069714Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:20:47.069731Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:20:47.069814Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|ea0ee927-357334a1-980cb994-7f580818_0 2024-11-21T09:20:47.070181Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732180847070 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:47.070220Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ea0ee927-357334a1-980cb994-7f580818_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T09:20:47.070348Z :INFO: [] MessageGroupId [src] SessionId [src|ea0ee927-357334a1-980cb994-7f580818_0] Write session: close. Timeout = 0 ms 2024-11-21T09:20:47.070363Z :INFO: [] MessageGroupId [src] SessionId [src|ea0ee927-357334a1-980cb994-7f580818_0] Write session will now close 2024-11-21T09:20:47.070368Z :DEBUG: [] MessageGroupId [src] SessionId [src|ea0ee927-357334a1-980cb994-7f580818_0] Write session: aborting 2024-11-21T09:20:47.070503Z :INFO: [] MessageGroupId [src] SessionId [src|ea0ee927-357334a1-980cb994-7f580818_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:20:47.070506Z :DEBUG: [] MessageGroupId [src] SessionId [src|ea0ee927-357334a1-980cb994-7f580818_0] Write session: destroy 2024-11-21T09:20:47.070667Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|ea0ee927-357334a1-980cb994-7f580818_0 grpc read done: success: 0 data: 2024-11-21T09:20:47.070676Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ea0ee927-357334a1-980cb994-7f580818_0 grpc read failed 2024-11-21T09:20:47.070680Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ea0ee927-357334a1-980cb994-7f580818_0 grpc closed 2024-11-21T09:20:47.070684Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ea0ee927-357334a1-980cb994-7f580818_0 is DEAD 2024-11-21T09:20:47.070793Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:20:47.070904Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:47.070917Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7439660092026483972:2470] destroyed 2024-11-21T09:20:47.070922Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:20:47.074794Z :INFO: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Starting read session 2024-11-21T09:20:47.074803Z :DEBUG: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Starting cluster discovery 2024-11-21T09:20:47.074829Z :INFO: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27624: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27624
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:27624. " 2024-11-21T09:20:47.074831Z :DEBUG: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Restart cluster discovery in 0.008551s 2024-11-21T09:20:47.084304Z :DEBUG: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Starting cluster discovery 2024-11-21T09:20:47.084384Z :INFO: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27624: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27624
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:27624. " 2024-11-21T09:20:47.084389Z :DEBUG: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Restart cluster discovery in 0.017100s 2024-11-21T09:20:47.102348Z :DEBUG: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Starting cluster discovery 2024-11-21T09:20:47.102396Z :INFO: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27624: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27624
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:27624. " 2024-11-21T09:20:47.102403Z :DEBUG: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Restart cluster discovery in 0.036741s 2024-11-21T09:20:47.139331Z :DEBUG: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Starting cluster discovery 2024-11-21T09:20:47.139401Z :NOTICE: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27624: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27624
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:27624. " } 2024-11-21T09:20:47.139469Z :NOTICE: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27624: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27624
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:27624. " } 2024-11-21T09:20:47.139500Z :INFO: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Closing read session. Close timeout: 0.000000s 2024-11-21T09:20:47.139508Z :NOTICE: [/Root] [/Root] [feb7df30-b3732891-fb93f30d-36b95da0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } |96.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] >> ResultFormatter::Primitive [GOOD] >> TPQCompatTest::ReadWriteSessions [GOOD] >> ResultFormatter::Struct [GOOD] |96.0%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::AllEqual [GOOD] Test command err: 2024-11-21T09:19:34.395646Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:19:34.395672Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T09:19:34.633361Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:19:34.633385Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST === Server->StartServer(false); 2024-11-21T09:19:34.874355Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439659777227657968:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:34.874609Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:34.876642Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439659777713001399:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:34.876882Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:34.894333Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ebe/r3tmp/tmpDwpHDu/pdisk_1.dat 2024-11-21T09:19:34.898457Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache created TServer::EnableGrpc on GrpcPort 16466, node 3 2024-11-21T09:19:34.923225Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:34.926317Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003ebe/r3tmp/yandexAS4TRO.tmp 2024-11-21T09:19:34.926328Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003ebe/r3tmp/yandexAS4TRO.tmp 2024-11-21T09:19:34.926383Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003ebe/r3tmp/yandexAS4TRO.tmp 2024-11-21T09:19:34.926422Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:34.931587Z INFO: TTestServer started on Port 18338 GrpcPort 16466 TClient is connected to server localhost:18338 PQClient connected to localhost:16466 === TenantModeEnabled() = 0 === Init PQ - start server on port 16466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:34.957730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:19:34.957775Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:34.957817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T09:19:34.957851Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:19:34.957860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:34.958512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T09:19:34.958539Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:19:34.958594Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:34.958621Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:19:34.958623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T09:19:34.958626Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:19:34.959049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:19:34.959063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2024-11-21T09:19:34.959066Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:19:34.959115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:34.959120Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:19:34.959123Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:19:34.959470Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:34.959479Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:34.959482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:19:34.959486Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2024-11-21T09:19:34.959992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:34.960345Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T09:19:34.960379Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:19:34.960863Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180775010, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:19:34.960898Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439659777227658516 RawX2: 12884904242 } } Step: 1732180775010 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T09:19:34.960907Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:19:34.960954Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:19:34.960965Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:19:34.960994Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:19:34.961024Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T09:19:34.961379Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:19:34.961389Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:19:34.961431Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:19:34.961437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7439659777227658537:2372], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T09:19:34.961443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:34.961448Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:19:34.961459Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:19:34.961461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T09:19:34.961465Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2024-11-21T09:19:34.961467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T09:19:34.961470Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:19:34.961471Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2024-11-21T09 ... topic=account/topic durationSec=0 2024-11-21T09:20:44.128619Z node 19 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T09:20:44.128955Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: describe result for acl check 2024-11-21T09:20:44.128983Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T09:20:44.128989Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:20:44.128990Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T09:20:44.128993Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T09:20:44.129353Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) Select from the table 2024-11-21T09:20:44.131078Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) Update the table 2024-11-21T09:20:44.133772Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T09:20:44.133784Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T09:20:44.133786Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) Start idle 2024-11-21T09:20:44.133792Z node 19 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 5 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T09:20:44.133918Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037901, NodeId 19, Generation: 1 2024-11-21T09:20:44.133919Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:20:44.133923Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] server connected, pipe [19:7439660079712090410:2591], now have 1 active actors on pipe 2024-11-21T09:20:44.133927Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2024-11-21T09:20:44.133931Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2024-11-21T09:20:44.133947Z node 19 :PERSQUEUE INFO: new Cookie 123|96516a97-9f6fd78a-decd15fc-546e9a32_0 generated for partition 0 topic 'rt3.dc1--account--topic' owner 123 2024-11-21T09:20:44.133966Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T09:20:44.133985Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:20:44.134013Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2024-11-21T09:20:44.134019Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2024-11-21T09:20:44.134031Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:20:44.134044Z node 19 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 0 MaxSeqNo: 2 sessionId: 123|96516a97-9f6fd78a-decd15fc-546e9a32_0 2024-11-21T09:20:44.134242Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732180844134 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:44.134270Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|96516a97-9f6fd78a-decd15fc-546e9a32_0" topic: "account/topic" cluster: "dc1" 2024-11-21T09:20:44.134362Z :DEBUG: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write 1 messages with Id from 1 to 1 2024-11-21T09:20:44.134384Z :DEBUG: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write session: try to update token 2024-11-21T09:20:44.134390Z :DEBUG: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Send 1 message(s) (0 left), first sequence number is 3 2024-11-21T09:20:44.134491Z :INFO: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write session: close. Timeout = 10000 ms 2024-11-21T09:20:44.134578Z node 19 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: 123|96516a97-9f6fd78a-decd15fc-546e9a32_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T09:20:44.134658Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T09:20:44.134689Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2024-11-21T09:20:44.134696Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2024-11-21T09:20:44.134714Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T09:20:44.134731Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:20:44.134761Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2024-11-21T09:20:44.134767Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2024-11-21T09:20:44.134778Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message topic: rt3.dc1--account--topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 370 offset: -1 2024-11-21T09:20:44.134795Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Send write quota request. Topic: "rt3.dc1--account--topic". Partition: 0. Amount: 374. Cookie: 3 2024-11-21T09:20:45.134470Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) Update the table 2024-11-21T09:20:45.155268Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2024-11-21T09:20:45.155281Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439660079712090384:2591] (SourceId=123, PreferedPartition=(NULL)) Start idle 2024-11-21T09:20:47.627459Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Got quota. Topic: "rt3.dc1--account--topic". Partition: 0: Cookie: 3 2024-11-21T09:20:47.627555Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2024-11-21T09:20:47.627591Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 441 count 1 nextOffset 3 batches 1 2024-11-21T09:20:47.627635Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 429 WTime 1732180847627 2024-11-21T09:20:47.627672Z node 19 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:20:47.628372Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 374 2024-11-21T09:20:47.628388Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T09:20:47.628395Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T09:20:47.628438Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T09:20:47.628460Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:20:47.628792Z :DEBUG: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { queued_in_partition_duration_ms: 3492 throttled_on_topic_duration_ms: 3492 } 2024-11-21T09:20:47.628804Z :DEBUG: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write session: acknoledged message 1 2024-11-21T09:20:47.636798Z :INFO: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write session will now close 2024-11-21T09:20:47.636816Z :DEBUG: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write session: aborting 2024-11-21T09:20:47.637041Z :INFO: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:20:47.637052Z :DEBUG: [] MessageGroupId [123] SessionId [123|96516a97-9f6fd78a-decd15fc-546e9a32_0] Write session: destroy 2024-11-21T09:20:47.637194Z node 19 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: 123|96516a97-9f6fd78a-decd15fc-546e9a32_0 grpc read done: success: 0 data: 2024-11-21T09:20:47.637213Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: 123|96516a97-9f6fd78a-decd15fc-546e9a32_0 grpc read failed 2024-11-21T09:20:47.637219Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: 123|96516a97-9f6fd78a-decd15fc-546e9a32_0 grpc closed 2024-11-21T09:20:47.637227Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: 123|96516a97-9f6fd78a-decd15fc-546e9a32_0 is DEAD 2024-11-21T09:20:47.637564Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:20:47.637614Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:47.637631Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] server disconnected, pipe [19:7439660079712090410:2591] destroyed 2024-11-21T09:20:47.637650Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::DropOwner. |96.0%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] |96.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [GOOD] Test command err: 2024-11-21T09:19:21.268678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659722645251167:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:21.268836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:21.273947Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659722185020079:2228];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ee6/r3tmp/tmpziMtaY/pdisk_1.dat 2024-11-21T09:19:21.293025Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:21.293867Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:21.294443Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:21.306539Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21837, node 1 2024-11-21T09:19:21.317098Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003ee6/r3tmp/yandexxwsjPL.tmp 2024-11-21T09:19:21.317108Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003ee6/r3tmp/yandexxwsjPL.tmp 2024-11-21T09:19:21.317160Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003ee6/r3tmp/yandexxwsjPL.tmp 2024-11-21T09:19:21.317192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:21.320386Z INFO: TTestServer started on Port 13972 GrpcPort 21837 TClient is connected to server localhost:13972 PQClient connected to localhost:21837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:21.368699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:21.368721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:21.370182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:21.389442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:21.389465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:21.389637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:21.390358Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:21.390572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T09:19:21.395976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T09:19:21.513094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659722185020245:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:21.513115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659722185020256:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:21.513122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:21.513956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T09:19:21.517274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659722185020260:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T09:19:21.541699Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659722645252214:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:21.541772Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmM5N2QyNzMtYWJmOGI5YTktNzk0NmNlMmYtNWJmYTkzZWY=, ActorId: [1:7439659722645252173:2300], ActorState: ExecuteState, TraceId: 01jd709pxkc5hgptmhqbxkehsp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:21.542084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:21.542188Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:21.589162Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659722185020301:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:21.589254Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmEyMjFmNjUtZGQ5ZGMxNjktNTkyM2Q3MGItM2QzZjU5NTY=, ActorId: [2:7439659722185020229:2280], ActorState: ExecuteState, TraceId: 01jd709px8asrk8gwyz2sedxbm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:21.589460Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:21.599204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:21.613179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T09:19:21.637728Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd709q0q91v8w0jgwwf4pxah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUwY2RiZDYtN2UyMjE5YjctMjk1MzQ2NjMtNDZjZWJhZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659722645252634:3031] 2024-11-21T09:19:26.268934Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659722645251167:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:26.268963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:19:26.271519Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659722185020079:2228];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:26.271547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: /Root/PQ/rt3.dc1--demo Create topic: /Root/PQ/rt3.dc1--demo AddTopic: /Root/PQ/rt3.dc1--demo ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = /Root/PQ/rt3.dc1/demo, dc = unknown 2024-11-21T09:19:26.665977Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2024-11-21T09:19:26.671200Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439659744120089703:3374] connected; active server actors: 1 2024-11-21T09:19:26.671321Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--demo] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T09:19:26.671419Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--demo] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T09:19:26.671606Z node 1 :PERSQUEUE_REA ... PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_10711562138990619236_v1 grpc read failed 2024-11-21T09:20:48.014629Z node 27 :PQ_READ_PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_10711562138990619236_v1 grpc closed 2024-11-21T09:20:48.014639Z node 27 :PQ_READ_PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_10711562138990619236_v1 is DEAD 2024-11-21T09:20:48.014694Z node 27 :PQ_READ_PROXY DEBUG: new grpc connection 2024-11-21T09:20:48.014701Z node 27 :PQ_READ_PROXY DEBUG: new session created cookie 6 2024-11-21T09:20:48.014734Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:48.014744Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_27_5_10711562138990619236_v1 2024-11-21T09:20:48.014754Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [27:7439660095681897186:2560] destroyed 2024-11-21T09:20:48.014762Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_5_10711562138990619236_v1 2024-11-21T09:20:48.014806Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [27:7439660095681897183:2557] disconnected; active server actors: 1 2024-11-21T09:20:48.014811Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [27:7439660095681897183:2557] client user disconnected session shared/user_27_5_10711562138990619236_v1 2024-11-21T09:20:48.014828Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2024-11-21T09:20:48.014852Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 read init: from# ipv6:[::1]:49584, request# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2024-11-21T09:20:48.014896Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 auth for : user 2024-11-21T09:20:48.014905Z node 27 :PQ_METACACHE DEBUG: Handle describe topics 2024-11-21T09:20:48.014906Z node 27 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2024-11-21T09:20:48.014910Z node 27 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly 2024-11-21T09:20:48.014979Z node 27 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 12 ResultSet [{ Path: Root/LbCommunal/account/topic2-mirrored-from-dc2 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T09:20:48.014996Z node 27 :PQ_METACACHE DEBUG: Got describe topics SC response 2024-11-21T09:20:48.015009Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 Handle describe topics response 2024-11-21T09:20:48.015023Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 auth is DEAD 2024-11-21T09:20:48.015025Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 auth ok: topics# 1, initDone# 0 2024-11-21T09:20:48.015190Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 register session: topic# rt3.dc2--account--topic2 ===Got response: 2024-11-21T09:20:48.015250Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7439660095681897191:2561] connected; active server actors: 1 2024-11-21T09:20:48.015258Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] consumer "user" register session for pipe [27:7439660095681897191:2561] session shared/user_27_6_6242612377402176962_v1 2024-11-21T09:20:48.015262Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user register readable partition 0 status: SUCCESS init_response { session_id: "shared/user_27_6_6242612377402176962_v1" } 2024-11-21T09:20:48.015266Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user family created family=1 (Status=Free, Partitions=[0]) 2024-11-21T09:20:48.015273Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user register reading session ReadingSession "shared/user_27_6_6242612377402176962_v1" (Sender=[27:7439660095681897188:2561], Pipe=[27:7439660095681897191:2561], Partitions=[], ActiveFamilyCount=0) 2024-11-21T09:20:48.015274Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user rebalancing was scheduled 2024-11-21T09:20:48.015278Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T09:20:48.015281Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_27_6_6242612377402176962_v1" (Sender=[27:7439660095681897188:2561], Pipe=[27:7439660095681897191:2561], Partitions=[], ActiveFamilyCount=0) 2024-11-21T09:20:48.015289Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user family 1 status Active partitions [0] session "shared/user_27_6_6242612377402176962_v1" sender [27:7439660095681897188:2561] lock partition 0 for ReadingSession "shared/user_27_6_6242612377402176962_v1" (Sender=[27:7439660095681897188:2561], Pipe=[27:7439660095681897191:2561], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2024-11-21T09:20:48.015297Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T09:20:48.015303Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing duration: 0.000022s 2024-11-21T09:20:48.015448Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 assign: record# { Partition: 0 TabletId: 72075186224037898 Topic: "topic2-mirrored-from-dc2" Generation: 1 Step: 3 Session: "shared/user_27_6_6242612377402176962_v1" ClientId: "user" PipeClient { RawX1: 7439660095681897191 RawX2: 4503715591490049 } Path: "/Root/LbCommunal/account/topic2-mirrored-from-dc2" } 2024-11-21T09:20:48.015464Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 INITING TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) 2024-11-21T09:20:48.015563Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:20:48.015574Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server connected, pipe [27:7439660095681897194:2564], now have 1 active actors on pipe 2024-11-21T09:20:48.015595Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037898 Generation: 1 2024-11-21T09:20:48.015644Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2-mirrored-from-dc2' requestId: 2024-11-21T09:20:48.015653Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] got client message batch for topic 'rt3.dc2--account--topic2' partition 0 2024-11-21T09:20:48.015656Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Created session shared/user_27_6_6242612377402176962_v1 on pipe: [27:7439660095681897194:2564] 2024-11-21T09:20:48.015667Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_27_6_6242612377402176962_v1:1 with generation 1 2024-11-21T09:20:48.015687Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] Topic 'rt3.dc2--account--topic2' partition 0 user user session is set to 0 (startOffset 0) session shared/user_27_6_6242612377402176962_v1 2024-11-21T09:20:48.015716Z node 28 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T09:20:48.016319Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'topic2-mirrored-from-dc2' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2024-11-21T09:20:48.016323Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T09:20:48.016407Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1732180848006 CreateTimestampMS: 1732180848006 SizeLag: 0 WriteTimestampEstimateMS: 0 } Cookie: 18446744073709551615 } 2024-11-21T09:20:48.016417Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 INIT DONE TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2024-11-21T09:20:48.016434Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 sending to client partition status ===Got response: status: SUCCESS start_partition_session_request { partition_session { partition_session_id: 1 path: "account/topic2-mirrored-from-dc2" } partition_offsets { } } 2024-11-21T09:20:48.016839Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 grpc read done: success# 0, data# { } 2024-11-21T09:20:48.016853Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 grpc read failed 2024-11-21T09:20:48.016858Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 grpc closed 2024-11-21T09:20:48.016877Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6242612377402176962_v1 is DEAD 2024-11-21T09:20:48.016963Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:48.016972Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session shared/user_27_6_6242612377402176962_v1 2024-11-21T09:20:48.016980Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [27:7439660095681897194:2564] destroyed 2024-11-21T09:20:48.016987Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_6_6242612377402176962_v1 2024-11-21T09:20:48.017047Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7439660095681897191:2561] disconnected; active server actors: 1 2024-11-21T09:20:48.017055Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7439660095681897191:2561] client user disconnected session shared/user_27_6_6242612377402176962_v1 |96.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> TPersQueueTest::CheckDeleteTopic [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession |96.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> YdbTableSplit::SplitByLoadWithDeletes >> YdbTableSplit::SplitByLoadWithReads >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> YdbTableSplit::RenameTablesAndSplit |96.0%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbTableSplit::MergeByNoLoadAfterSplit >> TNetClassifierTest::TestInitFromRemoteSource |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> BasicStatistics::Simple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: 2024-11-21T09:18:41.347211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:18:41.347246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:18:41.347254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037e4/r3tmp/tmp6O9RxG/pdisk_1.dat 2024-11-21T09:18:41.411271Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30547, node 1 2024-11-21T09:18:41.498449Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:41.498463Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:41.498466Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:41.498521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:18:41.502246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:18:41.576033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:41.576060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:41.587161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28418 2024-11-21T09:18:41.979303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:18:42.699344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:42.699364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:42.731032Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:18:42.731587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:42.772668Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:42.778139Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:18:42.778154Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:18:42.782334Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:18:42.782428Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:18:42.782452Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:18:42.782455Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:18:42.782459Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:18:42.782463Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:18:42.782466Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:18:42.782470Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:18:42.782527Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:18:42.952014Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:42.952034Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:42.952793Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:18:42.954063Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:18:42.954133Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:18:42.954573Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:18:42.957684Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:18:42.957694Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:18:42.957701Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:18:42.958855Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:42.958872Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:42.959794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:18:42.960986Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:18:42.961008Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:18:42.962698Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:18:42.973774Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:42.994869Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:18:43.099165Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:18:43.252819Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:43.958306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:43.958328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:43.960609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:18:44.017638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2273:3055], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:44.017667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:44.017995Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2278:3059]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:18:44.018020Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:18:44.018027Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2280:3061] 2024-11-21T09:18:44.018034Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2280:3061] 2024-11-21T09:18:44.018134Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2281:2831] 2024-11-21T09:18:44.018175Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2280:3061], server id = [2:2281:2831], tablet id = 72075186224037897, status = OK 2024-11-21T09:18:44.018208Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2281:2831], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:18:44.018222Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T09:18:44.018266Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:18:44.018272Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2278:3059], StatRequests.size() = 1 2024-11-21T09:18:44.020166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2285:3065], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:44.020181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:44.020266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2290:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:44.021209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:18:44.189148Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:18:44.189165Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:18:44.280776Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2280:3061], schemeshard count = 1 2024-11-21T09:18:44.488766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2292:3072], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:18:44.585043Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2416:3155]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:18:44.585093Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:18:44.585098Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2416:3155], StatRequests.size() = 1 2024-11-21T09:18:44.592451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd708j9g0wtsktwkcwec024h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQyMDM5MGItOWM4ZDc0MTItMzUyZDdiNTgtOTE0ZjE3YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:18:44.606642Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2431:2850]], StatType[ 0 ], StatRequestsCount[ ... 4-11-21T09:20:41.048517Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 115 ] 2024-11-21T09:20:41.048526Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 115, ReplyToActorId = [2:6246:4522], StatRequests.size() = 1 2024-11-21T09:20:41.970938Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 116 ], ReplyToActorId[ [2:6279:4536]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:41.971044Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2024-11-21T09:20:41.971050Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:6279:4536], StatRequests.size() = 1 2024-11-21T09:20:42.893331Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:6312:4550]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:42.893444Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2024-11-21T09:20:42.893451Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:6312:4550], StatRequests.size() = 1 2024-11-21T09:20:43.245131Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:20:43.794744Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:6345:4564]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:43.794849Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T09:20:43.794857Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:6345:4564], StatRequests.size() = 1 2024-11-21T09:20:44.420224Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T09:20:44.420247Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T09:20:44.420251Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T09:20:44.420253Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T09:20:44.945074Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6385:4581]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:44.945180Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T09:20:44.945187Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6385:4581], StatRequests.size() = 1 2024-11-21T09:20:45.350535Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:20:45.350602Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:20:45.350758Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:20:45.424553Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T09:20:45.424573Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 218.000000s, at schemeshard: 72075186224037889 2024-11-21T09:20:45.424675Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 2024-11-21T09:20:45.435962Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T09:20:45.974214Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6418:4597]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:45.974321Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T09:20:45.974326Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6418:4597], StatRequests.size() = 1 2024-11-21T09:20:46.337750Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:20:46.337778Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:20:46.337784Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T09:20:46.337788Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:20:46.337900Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:20:46.341217Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:20:46.342712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6439:4614], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:46.342731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6449:4619], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:46.342739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:46.344831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T09:20:46.355062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6453:4622], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T09:20:46.563877Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6572:4686]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:46.563932Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T09:20:46.563939Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6572:4686], StatRequests.size() = 1 2024-11-21T09:20:46.578938Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTEyOGRlMzMtMjBhYzRiMzgtZmQ2MmU2MmItMzJjMDdmNWI=, TxId: 2024-11-21T09:20:46.578959Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTEyOGRlMzMtMjBhYzRiMzgtZmQ2MmU2MmItMzJjMDdmNWI=, TxId: 2024-11-21T09:20:46.579048Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:20:46.590205Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:20:46.590227Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:20:47.028881Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6616:4708]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:47.028990Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T09:20:47.028995Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6616:4708], StatRequests.size() = 1 2024-11-21T09:20:47.998466Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6655:4728]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:47.998562Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T09:20:47.998571Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6655:4728], StatRequests.size() = 1 2024-11-21T09:20:48.350587Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:20:48.350688Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:20:48.350692Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:20:48.350698Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T09:20:48.350701Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:20:48.350780Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:20:48.351226Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:20:48.354387Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTcxZTk0YWUtNTdjYTNlNmYtNDNiN2MxZDMtYTUyMGFmMjc=, TxId: 2024-11-21T09:20:48.354404Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTcxZTk0YWUtNTdjYTNlNmYtNDNiN2MxZDMtYTUyMGFmMjc=, TxId: 2024-11-21T09:20:48.354537Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:20:48.365628Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:20:48.365644Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:20:48.979722Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6725:4770]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:48.979819Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T09:20:48.979825Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6725:4770], StatRequests.size() = 1 2024-11-21T09:20:49.977436Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6764:4790]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:49.977510Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2024-11-21T09:20:49.977515Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6764:4790], StatRequests.size() = 1 2024-11-21T09:20:50.330687Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:20:50.330767Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:20:50.330772Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:20:50.330879Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:20:50.330973Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:20:50.924632Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:6797:4806]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:50.924763Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2024-11-21T09:20:50.924773Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:6797:4806], StatRequests.size() = 1 >> HttpRequest::Probe [GOOD] >> TNetClassifierTest::TestInitFromFile >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: 2024-11-21T09:18:42.511911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:18:42.511945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:18:42.511953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037cc/r3tmp/tmpq81wKo/pdisk_1.dat 2024-11-21T09:18:42.575686Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2037, node 1 2024-11-21T09:18:42.663645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:42.663659Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:42.663662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:42.663720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:18:42.668576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:18:42.742424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:42.742445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:42.753342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64343 2024-11-21T09:18:43.144042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:18:43.849247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:43.849268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:43.880899Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:18:43.881550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:43.922727Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:43.928727Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:18:43.928743Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:18:43.933312Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:18:43.933407Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:18:43.933419Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:18:43.933422Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:18:43.933426Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:18:43.933430Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:18:43.933433Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:18:43.933437Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:18:43.933501Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:18:44.102805Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:44.102826Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:44.103677Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:18:44.105061Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:18:44.105134Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:18:44.105593Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:18:44.108469Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:18:44.108481Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:18:44.108489Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:18:44.109636Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:44.109654Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:44.110546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:18:44.111564Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:18:44.111583Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:18:44.113324Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:18:44.124496Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:44.145569Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:18:44.248959Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:18:44.413004Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:45.110881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:45.110907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:45.114595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:18:45.146009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:45.146059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:45.146085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:45.146099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:45.146110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:45.146122Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:45.146134Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:45.146146Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:45.146161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:45.146175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:45.146203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:45.146221Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:45.150485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:45.150505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:45.150533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:45.150551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:45.150565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:45.150578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cle ... :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:20:49.702073Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T09:20:49.702103Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T09:20:50.306713Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T09:20:50.306738Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=NZhe % 2024-11-21T09:20:50.306744Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T09:20:51.549296Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T09:20:51.549395Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:20:51.602072Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:20:51.602133Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T09:20:51.602139Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:20:51.602348Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T09:20:51.613925Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T09:20:51.614060Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T09:20:51.614078Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T09:20:51.614215Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T09:20:51.625612Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T09:20:51.625687Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T09:20:51.625895Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8587:6445], server id = [2:8592:6450], tablet id = 72075186224037899, status = OK 2024-11-21T09:20:51.625931Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8587:6445], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.625972Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8588:6446], server id = [2:8593:6451], tablet id = 72075186224037900, status = OK 2024-11-21T09:20:51.625979Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8588:6446], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626127Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8589:6447], server id = [2:8594:6452], tablet id = 72075186224037901, status = OK 2024-11-21T09:20:51.626137Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8589:6447], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626159Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T09:20:51.626285Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8590:6448], server id = [2:8595:6453], tablet id = 72075186224037902, status = OK 2024-11-21T09:20:51.626295Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8590:6448], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626331Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8591:6449], server id = [2:8596:6454], tablet id = 72075186224037903, status = OK 2024-11-21T09:20:51.626337Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8591:6449], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626401Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T09:20:51.626477Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T09:20:51.626510Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8587:6445], server id = [2:8592:6450], tablet id = 72075186224037899 2024-11-21T09:20:51.626514Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.626552Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T09:20:51.626584Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T09:20:51.626625Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8588:6446], server id = [2:8593:6451], tablet id = 72075186224037900 2024-11-21T09:20:51.626628Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.626637Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8597:6455], server id = [2:8598:6456], tablet id = 72075186224037904, status = OK 2024-11-21T09:20:51.626645Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8597:6455], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626689Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8589:6447], server id = [2:8594:6452], tablet id = 72075186224037901 2024-11-21T09:20:51.626691Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.626754Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8590:6448], server id = [2:8595:6453], tablet id = 72075186224037902 2024-11-21T09:20:51.626759Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.626778Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8599:6457], server id = [2:8601:6459], tablet id = 72075186224037905, status = OK 2024-11-21T09:20:51.626788Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8599:6457], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626798Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8591:6449], server id = [2:8596:6454], tablet id = 72075186224037903 2024-11-21T09:20:51.626801Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.626824Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8600:6458], server id = [2:8603:6461], tablet id = 72075186224037906, status = OK 2024-11-21T09:20:51.626830Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8600:6458], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626894Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T09:20:51.626937Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8602:6460], server id = [2:8605:6463], tablet id = 72075186224037907, status = OK 2024-11-21T09:20:51.626945Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8602:6460], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626977Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8604:6462], server id = [2:8606:6464], tablet id = 72075186224037908, status = OK 2024-11-21T09:20:51.626981Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8604:6462], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T09:20:51.626989Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T09:20:51.627053Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T09:20:51.627084Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8597:6455], server id = [2:8598:6456], tablet id = 72075186224037904 2024-11-21T09:20:51.627086Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.627103Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T09:20:51.627126Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T09:20:51.627132Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T09:20:51.627155Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T09:20:51.627181Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T09:20:51.627242Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:20:51.627267Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8599:6457], server id = [2:8601:6459], tablet id = 72075186224037905 2024-11-21T09:20:51.627269Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.627758Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8600:6458], server id = [2:8603:6461], tablet id = 72075186224037906 2024-11-21T09:20:51.627768Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.627851Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T09:20:51.627913Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8602:6460], server id = [2:8605:6463], tablet id = 72075186224037907 2024-11-21T09:20:51.627915Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.628006Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8604:6462], server id = [2:8606:6464], tablet id = 72075186224037908 2024-11-21T09:20:51.628009Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T09:20:51.641377Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDExMTM0Y2QtZDNlMDA4MjAtZWE1ZWRkMy0xNmJlMTc2MA==, TxId: 2024-11-21T09:20:51.641407Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDExMTM0Y2QtZDNlMDA4MjAtZWE1ZWRkMy0xNmJlMTc2MA==, TxId: 2024-11-21T09:20:51.641587Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:20:51.653084Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:20:51.653109Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=NZhe %, ActorId=[1:3396:3253] 2024-11-21T09:20:51.653478Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:8624:5538]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T09:20:51.653541Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:20:51.653547Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T09:20:51.654055Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:20:51.654068Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T09:20:51.654076Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2024-11-21T09:20:51.656810Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T09:19:52.150858Z :WriteRAW INFO: Random seed for debugging is 1732180792150854 2024-11-21T09:19:52.220036Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659854072955740:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:52.220289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:52.222337Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659853852951276:2218];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:52.240015Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00421f/r3tmp/tmp6hLsZe/pdisk_1.dat 2024-11-21T09:19:52.243839Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:52.244677Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:52.265959Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14520, node 1 2024-11-21T09:19:52.279226Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00421f/r3tmp/yandex6rRoBA.tmp 2024-11-21T09:19:52.279236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00421f/r3tmp/yandex6rRoBA.tmp 2024-11-21T09:19:52.279282Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/00421f/r3tmp/yandex6rRoBA.tmp 2024-11-21T09:19:52.279329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:52.283934Z INFO: TTestServer started on Port 32574 GrpcPort 14520 TClient is connected to server localhost:32574 PQClient connected to localhost:14520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:52.303966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T09:19:52.320021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:52.320041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:52.321227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:19:52.342354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:52.342374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:52.343473Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:52.343703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:52.457103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659854072956622:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:52.457129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:52.457202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659854072956649:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:52.457763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T09:19:52.458223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659854072956680:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:52.458243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:52.461352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659854072956651:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T09:19:52.476751Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659853852951414:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:52.476845Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjljNjMxODYtNGQ3ZTE1Y2MtNWM3ZmQzYzEtMzMyZGMzMDc=, ActorId: [2:7439659853852951388:2277], ActorState: ExecuteState, TraceId: 01jd70an4k2dnj5ke689n1stq5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:52.477254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:52.477302Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:52.524230Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659854072956817:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:52.524297Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2VjNDg4YjUtMTk2YTA4N2YtM2QzNmNkNjctNDdkMjEzNWQ=, ActorId: [1:7439659854072956619:2299], ActorState: ExecuteState, TraceId: 01jd70an484kjtd102zkfyadv3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:52.524531Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:52.539730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:52.603574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:14520", true, true, 1000); 2024-11-21T09:19:52.637052Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd70an99akyg6ggwj226yr3f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiODExMDQtZjM4MmUxMDgtMTlkMDgzM2MtMzYxM2YxN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659854072957124:2936] 2024-11-21T09:19:57.220300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659854072955740:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:57.220339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:19:57.222333Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659853852951276:2218];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:57.222366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:19:57.659191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:14520 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T09:19:57.670473Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:14520 MetaRequest { CmdCreateTopic { ... O: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:37764 2024-11-21T09:20:52.547483Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37764 proto=v1 topic=test-topic durationSec=0 2024-11-21T09:20:52.547487Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T09:20:52.547834Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T09:20:52.547879Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T09:20:52.547888Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:20:52.547889Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T09:20:52.547895Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660112668702385:2482] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T09:20:52.548470Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660112668702385:2482] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T09:20:52.565191Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660112668702385:2482] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T09:20:52.565380Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439660112668702420:2482] connected; active server actors: 1 2024-11-21T09:20:52.565443Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660112668702385:2482] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T09:20:52.565456Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660112668702385:2482] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T09:20:52.565597Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439660112668702420:2482] disconnected; active server actors: 1 2024-11-21T09:20:52.565609Z node 16 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439660112668702420:2482] disconnected no session 2024-11-21T09:20:52.580542Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660112668702385:2482] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T09:20:52.580560Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660112668702385:2482] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T09:20:52.580564Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439660112668702385:2482] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T09:20:52.580573Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T09:20:52.580758Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 15, Generation: 1 2024-11-21T09:20:52.580778Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:20:52.580786Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7439660112668702435:2482], now have 1 active actors on pipe 2024-11-21T09:20:52.580795Z node 15 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:20:52.580802Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:20:52.580839Z node 15 :PERSQUEUE INFO: new Cookie src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T09:20:52.580881Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T09:20:52.580927Z node 15 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:20:52.580999Z node 15 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T09:20:52.581011Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T09:20:52.581038Z node 15 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:20:52.581070Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0 2024-11-21T09:20:52.581453Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732180852581 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:20:52.581504Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T09:20:52.581626Z :INFO: [] MessageGroupId [src] SessionId [src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0] Write session: close. Timeout = 0 ms 2024-11-21T09:20:52.581637Z :INFO: [] MessageGroupId [src] SessionId [src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0] Write session will now close 2024-11-21T09:20:52.581644Z :DEBUG: [] MessageGroupId [src] SessionId [src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0] Write session: aborting 2024-11-21T09:20:52.581796Z :INFO: [] MessageGroupId [src] SessionId [src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0] Write session: gracefully shut down, all writes complete 2024-11-21T09:20:52.581802Z :DEBUG: [] MessageGroupId [src] SessionId [src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0] Write session: destroy 2024-11-21T09:20:52.581948Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0 grpc read done: success: 0 data: 2024-11-21T09:20:52.581958Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0 grpc read failed 2024-11-21T09:20:52.581963Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0 grpc closed 2024-11-21T09:20:52.581969Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec226657-e0d6d3c9-ffc06a58-29f13ecc_0 is DEAD 2024-11-21T09:20:52.582313Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:20:52.582354Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:20:52.582371Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7439660112668702435:2482] destroyed 2024-11-21T09:20:52.582380Z node 15 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:20:52.587386Z :INFO: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Starting read session 2024-11-21T09:20:52.587399Z :DEBUG: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Starting cluster discovery 2024-11-21T09:20:52.587445Z :INFO: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19916: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19916
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19916. " 2024-11-21T09:20:52.587450Z :DEBUG: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Restart cluster discovery in 0.008551s 2024-11-21T09:20:52.596348Z :DEBUG: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Starting cluster discovery 2024-11-21T09:20:52.596439Z :INFO: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19916: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19916
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19916. " 2024-11-21T09:20:52.596445Z :DEBUG: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Restart cluster discovery in 0.013110s 2024-11-21T09:20:52.610407Z :DEBUG: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Starting cluster discovery 2024-11-21T09:20:52.610480Z :INFO: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19916: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19916
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19916. " 2024-11-21T09:20:52.610485Z :DEBUG: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Restart cluster discovery in 0.039975s 2024-11-21T09:20:52.651357Z :DEBUG: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Starting cluster discovery 2024-11-21T09:20:52.651440Z :NOTICE: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19916: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19916
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19916. " } 2024-11-21T09:20:52.651538Z :NOTICE: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19916: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19916
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:19916. " } 2024-11-21T09:20:52.651588Z :INFO: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Closing read session. Close timeout: 0.000000s 2024-11-21T09:20:52.651598Z :NOTICE: [/Root] [/Root] [5e3991-84bd1162-c93c52b5-d51b27ad] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TPersQueueTest::DefaultMeteringMode [GOOD] >> TPersQueueTest::DisableWrongSettings >> TNetClassifierTest::TestInitFromFile [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2024-11-21T09:20:51.038971Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660107314720559:2185];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:51.039040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046bd/r3tmp/tmpDfsVKC/pdisk_1.dat 2024-11-21T09:20:51.122454Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:51.122594Z node 1 :HTTP ERROR: (#30,[::1]:16553) connection closed with error: Connection refused 2024-11-21T09:20:51.123430Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T09:20:51.135237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:51.135250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:51.135252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:51.135300Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:20:51.140475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:51.140517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:51.141608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2024-11-21T09:20:52.249034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660114369904676:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:52.249051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046b9/r3tmp/tmpkGNnDi/pdisk_1.dat 2024-11-21T09:20:52.306425Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:52.307849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/0046b9/r3tmp/yandex151sUE.tmp 2024-11-21T09:20:52.307859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/0046b9/r3tmp/yandex151sUE.tmp 2024-11-21T09:20:52.307923Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/0046b9/r3tmp/yandex151sUE.tmp 2024-11-21T09:20:52.307968Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:20:52.350250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:52.350280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:52.351453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TNetClassifierTest::TestInitFromBadlyFormattedFile |96.0%| [TA] $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows >> ColumnBuildTest::ValidDefaultValue >> ColumnBuildTest::AlreadyExists |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2024-11-21T09:20:54.484944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660120926387529:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:54.485067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00469f/r3tmp/tmpmbqXzh/pdisk_1.dat 2024-11-21T09:20:54.536657Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:54.544882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/00469f/r3tmp/yandexShK7QP.tmp 2024-11-21T09:20:54.544905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/00469f/r3tmp/yandexShK7QP.tmp 2024-11-21T09:20:54.544950Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2024-11-21T09:20:54.544957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/jptk/00469f/r3tmp/yandexShK7QP.tmp 2024-11-21T09:20:54.544995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:20:54.584584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:54.584613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:54.585755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> test.py::test[blocks-date_greater_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_scalar--Plan] [GOOD] |96.0%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest |96.0%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:20:54.839751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:54.839775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:54.839779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:54.839783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:54.839793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:54.839801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:54.839808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:54.839885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:54.851576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:54.851599Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:54.854419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:54.855168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:54.855205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:54.857499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:54.857805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:54.859010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.859705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:54.863288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.863645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:54.863657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.863693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:54.863699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:54.863704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:54.863717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.865067Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:20:54.883170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:54.883609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.883683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:54.883731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:54.883739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:54.884521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:54.884531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:54.884535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:54.884930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:54.885346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.885369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.885973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:54.886425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:54.887304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:54.887512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.887540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:54.887548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.887618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:54.887625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.887649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:54.887661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:54.888179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:54.888255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:20:54.888354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888362Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:54.888372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:54.888376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.888380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:54.888385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.888392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:54.888396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:54.888407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:54.888412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:54.888416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:20:54.888748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:54.888762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:54.888767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:20:54.888771Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:20:54.888775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:54.888788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:787:2666] TestWaitNotification: OK eventTxId 105 2024-11-21T09:20:56.322595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2024-11-21T09:20:56.322660Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 80us result status StatusSuccess 2024-11-21T09:20:56.322777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2024-11-21T09:20:56.323032Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } 2024-11-21T09:20:56.323728Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T09:20:56.323760Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1139:3014], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:20:56.323797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2024-11-21T09:20:56.323825Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2024-11-21T09:20:56.323832Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1139:3014], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:20:56.324329Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T09:20:56.324348Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:20:56.325009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2024-11-21T09:20:56.325049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2024-11-21T09:20:56.325095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2024-11-21T09:20:56.325662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2024-11-21T09:20:56.325690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2024-11-21T09:20:56.325717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2024-11-21T09:20:56.325726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2024-11-21T09:20:56.325747Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2024-11-21T09:20:56.325763Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2024-11-21T09:20:56.325942Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1139:3014] 2024-11-21T09:20:56.326015Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[blocks-date_greater_scalar--Plan] [GOOD] >> ColumnBuildTest::ValidDefaultValue [GOOD] >> TSubscriberTest::StrongNotificationAfterCommit >> TSubscriberTest::Sync >> TSubscriberTest::SyncPartial >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[PipeResets] [GOOD] >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::Sync [GOOD] |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:20:54.847308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:54.847335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:54.847340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:54.847345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:54.847357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:54.847368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:54.847377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:54.847466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:54.858574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:54.858594Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:54.861625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:54.862424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:54.862460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:54.864001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:54.864194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:54.864332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.864422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:54.865806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.866051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:54.866065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.866105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:54.866112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:54.866118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:54.866130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.867360Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:20:54.884496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:54.884551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:54.884655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:54.884664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:54.885431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:54.885444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:54.885448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:54.885918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:54.886396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.886409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.886414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.886419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.887007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:54.887452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:54.887492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:54.887641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.887668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:54.887676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.887737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:54.887745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.887770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:54.887782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:54.888285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:54.888322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:20:54.888402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888409Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:54.888419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:54.888422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.888427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:54.888432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.888435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:54.888438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:54.888449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:54.888454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:54.888457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:20:54.888749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:54.888766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:54.888770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:20:54.888775Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:20:54.888779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:54.888793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2024-11-21T09:20:57.221426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2024-11-21T09:20:57.221431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2024-11-21T09:20:57.221435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2024-11-21T09:20:57.221453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2024-11-21T09:20:57.221895Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T09:20:57.221907Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T09:20:57.221973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2024-11-21T09:20:57.221998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2024-11-21T09:20:57.222024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2024-11-21T09:20:57.222028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2024-11-21T09:20:57.222036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2024-11-21T09:20:57.232819Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1808:3675], Recipient [1:748:2640]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:20:57.232839Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:20:57.286085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2024-11-21T09:20:57.286131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 664 RawX2: 4294969875 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2024-11-21T09:20:57.286143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2024-11-21T09:20:57.286151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2024-11-21T09:20:57.286764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2024-11-21T09:20:57.286783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2024-11-21T09:20:57.286797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2024-11-21T09:20:57.286801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2024-11-21T09:20:57.286808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2024-11-21T09:20:57.286825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:568:2508] message: TxId: 281474976725761 2024-11-21T09:20:57.286832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2024-11-21T09:20:57.286837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2024-11-21T09:20:57.286842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2024-11-21T09:20:57.286872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2024-11-21T09:20:57.287483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2024-11-21T09:20:57.287498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2024-11-21T09:20:57.287511Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2024-11-21T09:20:57.287534Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T09:20:57.287933Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T09:20:57.287952Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T09:20:57.287960Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T09:20:57.288321Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T09:20:57.288340Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T09:20:57.288344Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2024-11-21T09:20:57.288364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T09:20:57.288369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1157:3032] TestWaitNotification: OK eventTxId 106 2024-11-21T09:20:57.288673Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2024-11-21T09:20:57.288770Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } >> TSubscriberTest::SyncPartial [GOOD] >> TSubscriberCombinationsTest::CombinationsRootDomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2024-11-21T09:20:57.402628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T09:20:57.403037Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T09:20:57.403062Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T09:20:57.403070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T09:20:57.403079Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T09:20:57.403148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T09:20:57.403161Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.403170Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T09:20:57.403176Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.403232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T09:20:57.403242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T09:20:57.403248Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Update to strong state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.403265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T09:20:57.403272Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T09:20:57.403276Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2024-11-21T09:20:57.411406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T09:20:57.411745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T09:20:57.411764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T09:20:57.411769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T09:20:57.411775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T09:20:57.411792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T09:20:57.411800Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.411806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T09:20:57.411811Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.411835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T09:20:57.411849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:33:2064], cookie# 1 2024-11-21T09:20:57.411854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2064], cookie# 1 2024-11-21T09:20:57.411858Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 1 2024-11-21T09:20:57.411866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T09:20:57.411869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T09:20:57.411875Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 1 2024-11-21T09:20:57.411879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T09:20:57.411883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T09:20:57.411886Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.411889Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:34:2064], cookie# 1 2024-11-21T09:20:57.411893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2024-11-21T09:20:57.411896Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:35:2064], cookie# 1 2024-11-21T09:20:57.411899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2024-11-21T09:20:57.411907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 2 2024-11-21T09:20:57.411914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 2 2024-11-21T09:20:57.411916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T09:20:57.411920Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2064], cookie# 2 2024-11-21T09:20:57.411923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 2 2024-11-21T09:20:57.411929Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2024-11-21T09:20:57.411932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:34:2064], cookie# 2 2024-11-21T09:20:57.411934Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:32:2064][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T09:20:57.411938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T09:20:57.411941Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.411944Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:35:2064], cookie# 2 2024-11-21T09:20:57.411947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Unexpected sync response: sender# [1:35:2064], cookie# 2 2024-11-21T09:20:57.411952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 3 2024-11-21T09:20:57.411959Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 3 2024-11-21T09:20:57.411962Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T09:20:57.411964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:34:2064], cookie# 3 2024-11-21T09:20:57.411966Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:32:2064][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T09:20:57.411969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 3 2024-11-21T09:20:57.411976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2064], cookie# 3 2024-11-21T09:20:57.411978Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Unexpected sync response: sender# [1:35:2064], cookie# 3 2024-11-21T09:20:57.411981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T09:20:57.411984Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:20:54.839601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:54.839634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:54.839640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:54.839645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:54.839657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:54.839673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:54.839681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:54.839780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:54.851553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:54.851575Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:20:54.854555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:54.855437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:54.855469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:54.857498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:54.857788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:54.859007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.859715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:54.861724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.863583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:54.863600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.863647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:54.863656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:54.863662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:54.863679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.865197Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:20:54.882731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:54.883613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.883687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:54.883732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:54.883740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:54.884524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.884532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:54.884535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:54.884538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:54.885003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885023Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:54.885603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.885621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.885627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.886192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:54.886615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:54.887310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:54.887516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:54.887541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:54.887547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.887615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:54.887622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:54.887649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:54.887661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:54.888146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:54.888192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:20:54.888294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:54.888302Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:54.888313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:54.888317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.888322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:54.888327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:54.888331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:54.888335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:54.888347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:54.888353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:54.888357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:20:54.888660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:54.888674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:20:54.888678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:20:54.888682Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:20:54.888686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:54.888700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.626632Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2047:3914], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.627212Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2048:3915], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.627639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2049:3916], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.628188Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2050:3917], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.628724Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2051:3918], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.629263Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2052:3919], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.629813Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2053:3920], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.630470Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2054:3921], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.631083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2055:3922], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.631660Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2056:3923], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.632302Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2057:3924], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.632938Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2058:3925], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.633508Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2059:3926], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.634054Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2060:3927], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.634611Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3928], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.635155Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3929], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.635721Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3930], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.636316Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3931], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.636901Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3932], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.637489Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3933], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.638041Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2067:3934], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.638583Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2068:3935], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T09:20:57.639183Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2069:3936], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.691848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.691865Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.693604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.693680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.693703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.695613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.695678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.696007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700708Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.713457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.716039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.716047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.717049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.718133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.718865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719667Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.719676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.719680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.719689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.719696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.719705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.719709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.719713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... e 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:20:57.498506Z node 64 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:20:57.498511Z node 64 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T09:20:57.498515Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:20:57.498638Z node 64 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:20:57.498650Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:20:57.498654Z node 64 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:20:57.498658Z node 64 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:20:57.498662Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T09:20:57.498678Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 2/3, is published: true 2024-11-21T09:20:57.499332Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:20:57.499345Z node 64 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:57.499411Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:20:57.499436Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 3/3 2024-11-21T09:20:57.499440Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2024-11-21T09:20:57.499446Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 3/3, is published: true 2024-11-21T09:20:57.499449Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2024-11-21T09:20:57.499455Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:20:57.499459Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:20:57.499494Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:20:57.499498Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:1 2024-11-21T09:20:57.499501Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:1 2024-11-21T09:20:57.499507Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:20:57.499510Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:2 2024-11-21T09:20:57.499513Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:2 2024-11-21T09:20:57.499521Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:20:57.499673Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:20:57.499681Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:20:57.499690Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:20:57.499695Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:20:57.499701Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:20:57.499829Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:20:57.499844Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:20:57.501688Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:20:57.502347Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 702 RawX2: 274877909544 } TabletId: 72075186233409549 State: 4 2024-11-21T09:20:57.502372Z node 64 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:20:57.502472Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 706 RawX2: 274877909546 } TabletId: 72075186233409550 State: 4 2024-11-21T09:20:57.502481Z node 64 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:20:57.506343Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:20:57.506513Z node 64 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2024-11-21T09:20:57.506981Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:20:57.507048Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:20:57.507194Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:20:57.507250Z node 64 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2024-11-21T09:20:57.507293Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T09:20:57.507319Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409550 2024-11-21T09:20:57.507655Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:20:57.507662Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:20:57.507675Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:57.508410Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:20:57.508424Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:20:57.508553Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T09:20:57.508561Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T09:20:57.508578Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1004 2024-11-21T09:20:57.508654Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:20:57.508660Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2024-11-21T09:20:57.508677Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T09:20:57.508680Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T09:20:57.508745Z node 64 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:20:57.508764Z node 64 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T09:20:57.508771Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:20:57.508776Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [64:992:2867] 2024-11-21T09:20:57.508791Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:20:57.508794Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [64:992:2867] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 2024-11-21T09:20:57.508865Z node 64 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:57.508915Z node 64 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 64us result status StatusPathDoesNotExist 2024-11-21T09:20:57.508948Z node 64 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2024-11-21T09:20:57.404725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T09:20:57.405093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T09:20:57.405134Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2024-11-21T09:20:57.405143Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2024-11-21T09:20:57.405153Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:34:2065] 2024-11-21T09:20:57.405159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2024-11-21T09:20:57.405173Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.405207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2065] 2024-11-21T09:20:57.405214Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.405232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T09:20:57.405257Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2065], cookie# 1 2024-11-21T09:20:57.405265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2024-11-21T09:20:57.405271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2024-11-21T09:20:57.405279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2024-11-21T09:20:57.405284Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T09:20:57.405288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T09:20:57.405296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:34:2065], cookie# 1 2024-11-21T09:20:57.405301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T09:20:57.405306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:35:2065], cookie# 1 2024-11-21T09:20:57.405313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T09:20:57.405319Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2065], cookie# 1 2024-11-21T09:20:57.405323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Unexpected sync response: sender# [1:36:2065], cookie# 1 |96.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubscriberTest::ReconnectOnFailure >> TSubscriberTest::ReconnectOnFailure [GOOD] >> TCdcStreamWithRebootsTests::SplitTable[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2024-11-21T09:20:58.931515Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T09:20:58.931972Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T09:20:58.932002Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T09:20:58.932024Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T09:20:58.932036Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T09:20:58.932048Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:32:2064][path] Set up state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.932062Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T09:20:58.932075Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T09:20:58.932081Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.932655Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T09:20:58.932676Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.932684Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T09:20:58.932689Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.932694Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T09:20:58.932698Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.942965Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:43:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T09:20:58.942996Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T09:20:58.943009Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.943043Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:44:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T09:20:58.943049Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T09:20:58.943059Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T09:20:58.943065Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.943076Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T09:20:58.943080Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.943219Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:43:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T09:20:58.943229Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:33:2064] 2024-11-21T09:20:58.943237Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:32:2064][path] Update to strong state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TPersQueueTest::ReadWithoutConsumerFederation [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::SplitTable[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:41.329052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:41.329069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:41.329073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:41.329076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:41.329084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:41.329086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:41.329093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:41.329144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:41.335922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:41.335935Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:41.337341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:41.337413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:41.337430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:41.339329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:41.339395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:41.339471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.339628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:41.340317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.340529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:41.340537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.340546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:41.340550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:41.340555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:41.340578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:41.341440Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:41.351654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:41.351702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.351729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:41.351755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:41.351760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.352290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.352319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:41.352366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.352376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:41.352382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:41.352389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:41.352802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.352819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:41.352825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:41.353247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.353258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.353262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.353267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.353712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:41.353968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:41.354003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:41.354111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.354126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:41.354129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.354157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:41.354161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.354178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:41.354185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:41.354419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:41.354424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:41.354441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.354444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:41.354481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.354484Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:41.354489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:41.354492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.354495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:41.354498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.354500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:41.354502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:41.354508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:41.354511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:41.354513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... on: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:59.131289Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:20:59.131339Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 58us result status StatusSuccess 2024-11-21T09:20:59.131495Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:59.131555Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:20:59.131575Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 23us result status StatusSuccess 2024-11-21T09:20:59.131634Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardColumnTableTTL::AlterColumnTable |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> TPersQueueTest::DisableWrongSettings [GOOD] >> TPersQueueTest::DisableDeduplication ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:00.458873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:00.458915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:00.458921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:00.458932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:00.458936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:00.458940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:00.458947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:00.459024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:00.471165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:00.471184Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:00.473923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:00.474684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:00.474723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:00.476850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:00.477071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:00.481433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.482223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:00.484731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:00.486785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:00.486791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:00.486805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.488169Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:00.504605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:00.505368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.505458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:00.505494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:00.505502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:00.506291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:00.506302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:00.506307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:00.506789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:00.507196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.507220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.507761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:00.508163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:00.509114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:00.509328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509364Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.509422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:00.509429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.509451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:00.509462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:00.509938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:00.509994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.510000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:00.510059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.510066Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:00.510075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:00.510079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.510084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:00.510089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.510093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:00.510096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:00.510107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:00.510111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:00.510115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:00.510426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:00.510442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:00.510446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:00.510451Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:00.510456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:00.510470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:21:00.511233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:21:00.511306Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:21:00.511524Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:21:00.512881Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:21:00.513495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:00.514439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.514475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2024-11-21T09:21:00.514600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2024-11-21T09:21:00.514863Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:21:00.515673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:00.515705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T09:21:00.515846Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:00.458885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:00.458910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:00.458915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:00.458919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:00.458924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:00.458928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:00.458936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:00.459024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:00.471173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:00.471192Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:00.473906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:00.474545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:00.474580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:00.476778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:00.477013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:00.481430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.482391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:00.484377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:00.486788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:00.486794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:00.486807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.488157Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:00.505908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:00.505969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:00.506122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:00.506129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:00.506736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:00.506748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:00.506753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:00.507325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:00.507771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.507790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.508263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:00.508659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:00.509121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:00.509320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.509426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:00.509433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.509451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:00.509459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:00.509963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:00.509998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.510003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:00.510056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.510062Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:00.510071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:00.510074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.510079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:00.510083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.510087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:00.510090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:00.510100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:00.510105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:00.510108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:00.510387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:00.510402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:00.510406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:00.510411Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:00.510415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:00.510428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... atedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:00.599627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 178 } } 2024-11-21T09:21:00.599636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 178 } } 2024-11-21T09:21:00.600883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:00.600933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T09:21:00.600957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:00.600963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:00.600969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:00.600980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.600984Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:21:00.600989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:00.600995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T09:21:00.601162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:00.601170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:00.601196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:00.601200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:00.601206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:00.601213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.601216Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.601220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:00.601226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T09:21:00.603278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:00.603312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:00.603322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:00.604317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:00.604383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:21:00.604459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.604504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:21:00.604576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.604638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:21:00.604647Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T09:21:00.604660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T09:21:00.604665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T09:21:00.604671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T09:21:00.604742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.604748Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:21:00.604754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T09:21:00.604757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T09:21:00.604762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T09:21:00.604776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T09:21:00.604781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T09:21:00.604787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:21:00.604791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:21:00.604855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:00.604860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T09:21:00.604864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T09:21:00.604869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:21:00.604872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T09:21:00.604876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T09:21:00.604885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:00.605641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:21:00.605653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T09:21:00.605778Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:00.605831Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 59us result status StatusSuccess 2024-11-21T09:21:00.605960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false >> TSchemeShardTTLTests::AlterTableShouldSuccess >> TSchemeShardTTLTestsWithReboots::MoveTable >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: 2024-11-21T09:18:45.124364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:18:45.124697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:18:45.124711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00475a/r3tmp/tmp3M7kLh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25247, node 1 TClient is connected to server localhost:27992 2024-11-21T09:18:45.323211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:18:45.341866Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:45.343354Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:45.343367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:45.343372Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:45.343430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:18:45.384878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:45.384909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:45.395348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:45.499858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T09:18:45.521209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:45.521252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:45.521286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:45.521303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:45.521318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:45.521335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:45.521350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:45.521370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:18:45.521385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:18:45.521400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:18:45.521415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:18:45.521430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:18:45.525212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:18:45.525230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:18:45.525241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:18:45.525246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:18:45.525262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:18:45.525268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:18:45.525280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:18:45.525286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:18:45.525295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:18:45.525300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:18:45.525307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:18:45.525312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:18:45.525359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:18:45.525365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:18:45.525381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:18:45.525387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:18:45.525398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:18:45.525403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:18:45.525420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:18:45.525425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:18:45.525437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:18:45.525442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:18:45.528183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:18:45.528202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:18:45.528265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:18:45.528283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:18:45.528301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:18:45.528316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:18:45.528331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:18:45.528346Z node 1 :TX_COLUMNSHA ... etableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"},"tiering2":{"tieringRuleId":"tiering2","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"},"tiering2":{"tieringRuleId":"tiering2","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} REQUEST=DROP OBJECT tier2 (TYPE TIER);EXPECTATION=0;WAITING=1 2024-11-21T09:19:44.917880Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjdlYzAwNDgtZGQ3ZmRlZWItMjU0NWNlODItZjNlNzhmOWY=, ActorId: [1:3518:4719], ActorState: ExecuteState, TraceId: 01jd70adp12renrscev8f78hq9, Create QueryResponse for error on request, msg: 2024-11-21T09:19:44.918218Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715762. Ctx: { TraceId: 01jd70adp12renrscev8f78hq9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdlYzAwNDgtZGQ3ZmRlZWItMjU0NWNlODItZjNlNzhmOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT tier2 (TYPE TIER);RESULT=
: Error: preparation problem: tiering in using by table ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT tier2 (TYPE TIER);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tier1 (TYPE TIER);EXPECTATION=0;WAITING=1 2024-11-21T09:19:55.724229Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjRkZWRhZDQtZGQ3MTMzNGEtYjlhZTE1ZjEtYzcxODEzNTQ=, ActorId: [1:3802:4926], ActorState: ExecuteState, TraceId: 01jd70ar7da5gz2wv7skm65gex, Create QueryResponse for error on request, msg: 2024-11-21T09:19:55.724495Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715775. Ctx: { TraceId: 01jd70ar7da5gz2wv7skm65gex, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRkZWRhZDQtZGQ3MTMzNGEtYjlhZTE1ZjEtYzcxODEzNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT tier1 (TYPE TIER);RESULT=
: Error: preparation problem: tiering in using by table ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT tier1 (TYPE TIER);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tiering2 (TYPE TIERING_RULE);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tiering2 (TYPE TIERING_RULE);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT tiering2 (TYPE TIERING_RULE);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);EXPECTATION=0;WAITING=1 2024-11-21T09:20:17.532103Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2NhNjg2YS0zYmM0YWIxYy1hY2RlYTVjMy0zZWJiNzQ2Nw==, ActorId: [1:4418:5380], ActorState: ExecuteState, TraceId: 01jd70bdgy7vftg3qasxz0ms9q, Create QueryResponse for error on request, msg: 2024-11-21T09:20:17.532349Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715802. Ctx: { TraceId: 01jd70bdgy7vftg3qasxz0ms9q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NhNjg2YS0zYmM0YWIxYy1hY2RlYTVjMy0zZWJiNzQ2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);RESULT=
: Error: preparation problem: tiering in using by table ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2024-11-21T09:20:28.330784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715812:0, at schemeshard: 72057594046644480 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);EXPECTATION=1;WAITING=0 incorrect snapshot SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} TieringsCount incorrect: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}};expectation=0 REQUEST=DROP OBJECT tiering1 (TYPE TIERING_RULE);RESULT=;EXPECTATION=1 SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc2"}}}} 2024-11-21T09:20:39.454822Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:20:39.454871Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:20:39.454897Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; REQUEST=DROP OBJECT tier2 (TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier2 (TYPE TIER);RESULT=;EXPECTATION=1 2024-11-21T09:20:50.382214Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:20:50.382251Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:20:50.382272Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; FINISHED_REQUEST=DROP OBJECT tier2 (TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier1 (TYPE TIER);EXPECTATION=1;WAITING=0 incorrect snapshot SNAPSHOT: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}} TiersCount incorrect: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}}}};expectation=0 REQUEST=DROP OBJECT tier1 (TYPE TIER);RESULT=;EXPECTATION=1 SNAPSHOT: {"rules":{},"tiers":{}} SNAPSHOT: {"rules":{},"tiers":{}} SNAPSHOT: {"rules":{},"tiers":{}} SNAPSHOT: {"rules":{},"tiers":{}} 2024-11-21T09:21:01.427192Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:21:01.427223Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:21:01.427236Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:00.458877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:00.458910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:00.458916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:00.458920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:00.458925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:00.458928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:00.458936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:00.459023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:00.471165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:00.471182Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:00.474001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:00.474762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:00.474800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:00.476815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:00.477030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:00.481449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.482222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:00.484272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:00.486781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:00.486787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:00.486801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.488159Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:00.504605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:00.505374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.505458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:00.505511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:00.505518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:00.506291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:00.506302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:00.506306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:00.506726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:00.507125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.507146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.507695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:00.508149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:00.509113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:00.509308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.509406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:00.509413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.509439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:00.509451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:00.509949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:00.509984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:00.510045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.510053Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:00.510065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:00.510068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.510073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:00.510077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.510081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:00.510084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:00.510095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:00.510100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:00.510103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:00.510376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:00.510396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:00.510400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:00.510404Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:00.510408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:00.510422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... perationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.200786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.200800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.200814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.200827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.200839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.201667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.201708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.201726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.201744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.201755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.201771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.201786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.201804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.203161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.203204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.203247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.203258Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T09:21:02.203274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T09:21:02.203279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:21:02.203286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T09:21:02.203307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2599:3865] message: TxId: 103 2024-11-21T09:21:02.203315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:21:02.203330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T09:21:02.203334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T09:21:02.203578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2024-11-21T09:21:02.205368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:21:02.205384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:3915:5115] TestWaitNotification: OK eventTxId 103 2024-11-21T09:21:02.205531Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:02.205591Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 69us result status StatusSuccess 2024-11-21T09:21:02.205746Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2024-11-21T09:21:02.206384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:02.206420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.208003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2024-11-21T09:21:02.208710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:02.208743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T09:21:02.208824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T09:21:02.208832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T09:21:02.208960Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T09:21:02.208984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T09:21:02.208988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4348:5547] TestWaitNotification: OK eventTxId 104 |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:02.406976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:02.406998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:02.407003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:02.407008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:02.407014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:02.407017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:02.407026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:02.407100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:02.417187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:02.417205Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:02.420051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:02.420642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:02.420678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:02.422203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:02.422417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:02.422499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.422590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:02.423594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.423803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:02.423810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.423838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:02.423843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:02.423848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:02.423860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.424879Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:02.438988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:02.439054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.439095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:02.439130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:02.439138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.439622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.439644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:02.439671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.439679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:02.439683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:02.439687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:02.440065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.440077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:02.440082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:02.440522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.440537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.440543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.440550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.441103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:02.441485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:02.441524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:02.441677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.441697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:02.441706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.441753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:02.441759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.441786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:02.441797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:02.442224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:02.442231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:02.442265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.442270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:02.442336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.442342Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:02.442353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:02.442358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.442364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:02.442369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.442373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:02.442377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:02.442387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:02.442393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:02.442397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:02.442654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:02.442668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:02.442672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:02.442677Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:02.442681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:02.442694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... t { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 5000004 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:02.543641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T09:21:02.543663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 104 at step: 5000004 2024-11-21T09:21:02.543753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.543766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:02.543771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#104:0 HandleReply TEvOperationPlan, operationId: 104:0, stepId: 5000004, at schemeshard: 72057594046678944 2024-11-21T09:21:02.543826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 129 2024-11-21T09:21:02.543846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T09:21:02.544516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:02.544527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:02.544588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.544593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T09:21:02.544706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.544714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:02.544778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:21:02.544788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T09:21:02.544792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T09:21:02.544797Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T09:21:02.544802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:02.544813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T09:21:02.545524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T09:21:02.566404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 161 } } 2024-11-21T09:21:02.566421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:02.566443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 161 } } 2024-11-21T09:21:02.566455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 161 } } FAKE_COORDINATOR: Erasing txId 104 2024-11-21T09:21:02.566636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:21:02.566642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:02.566655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:21:02.566663Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:02.566670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T09:21:02.566679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.566718Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.566722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:02.566727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T09:21:02.567249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.567336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.567383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.567389Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T09:21:02.567398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T09:21:02.567402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:21:02.567407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T09:21:02.567418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 104 2024-11-21T09:21:02.567424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T09:21:02.567428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T09:21:02.567432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T09:21:02.567450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:21:02.567759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T09:21:02.567767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:434:2408] TestWaitNotification: OK eventTxId 104 2024-11-21T09:21:02.567854Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:02.567897Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 49us result status StatusSuccess 2024-11-21T09:21:02.567986Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamCreatedWithInitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:42.393746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:42.393761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:42.393764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:42.393768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:42.393777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:42.393780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:42.393785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:42.393852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:42.400838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:42.400850Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:42.402231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:42.402288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:42.402304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:42.404766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:42.404820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:42.404918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.405084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:42.405636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:42.405814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:42.405821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:42.405828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:42.405832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:42.405836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:42.405861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:42.406760Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:42.417033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:42.417074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:42.417104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:42.417132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:42.417137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:42.417539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.417553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:42.417575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:42.417580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:42.417582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:42.417585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:42.417852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:42.417858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:42.417861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:42.418095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:42.418100Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:42.418104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:42.418109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:42.418472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:42.418779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:42.418806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:42.418906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:42.418922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:42.418926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:42.418959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:42.418963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:42.418983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:42.418993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:42.419262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:42.419268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:42.419286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:42.419289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:42.419328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:42.419332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:42.419338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:42.419340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:42.419343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:42.419346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:42.419349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:42.419351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:42.419358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:42.419361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:42.419363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.807003Z node 80 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.807134Z node 80 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.807148Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.807150Z node 80 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.807154Z node 80 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:21:02.807157Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:02.807355Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 343597385997 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T09:21:02.807362Z node 80 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:02.807368Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 343597385997 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T09:21:02.807378Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.807380Z node 80 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.807383Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:02.807386Z node 80 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T09:21:02.807457Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.807462Z node 80 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.807467Z node 80 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:21:02.807471Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:21:02.807535Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.807539Z node 80 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.807543Z node 80 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:21:02.807546Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T09:21:02.807553Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T09:21:02.807600Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.807604Z node 80 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.807607Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T09:21:02.808290Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.808317Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:21:02.808328Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.808340Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.808353Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:21:02.808415Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.808422Z node 80 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:21:02.808431Z node 80 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 4/4 2024-11-21T09:21:02.808435Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T09:21:02.808439Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2024-11-21T09:21:02.808443Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T09:21:02.808448Z node 80 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:21:02.808451Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:21:02.808484Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:21:02.808489Z node 80 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T09:21:02.808492Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T09:21:02.808497Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:02.808500Z node 80 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T09:21:02.808503Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T09:21:02.808507Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:02.808510Z node 80 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T09:21:02.808513Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T09:21:02.808521Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:21:02.808585Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.808605Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.808609Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.808622Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:02.808626Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:21:02.808636Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:02.808640Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:21:02.808646Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:21:02.809034Z node 80 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:21:02.809086Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:21:02.809092Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:21:02.809128Z node 80 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:21:02.809139Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:21:02.809141Z node 80 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [80:748:2652] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:21:02.809182Z node 80 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:02.809202Z node 80 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 26us result status StatusPathDoesNotExist 2024-11-21T09:21:02.809228Z node 80 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTableCreatedWithInitialScan[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.689877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.689897Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.691463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.691528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.691547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.694228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.694286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.696030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700640Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.713467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.716001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.716007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.717034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.718082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.718859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719693Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.719699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.719702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.719708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.719713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.719720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.719723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.719726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 09:21:02.808581Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:21:02.808660Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.808664Z node 82 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.808668Z node 82 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T09:21:02.808671Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T09:21:02.808713Z node 82 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.808938Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.808942Z node 82 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.808944Z node 82 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T09:21:02.808947Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 6 2024-11-21T09:21:02.808992Z node 82 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.809014Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.809016Z node 82 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.809018Z node 82 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:21:02.809020Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:21:02.809049Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.809051Z node 82 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.809055Z node 82 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:21:02.809057Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:02.809062Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/5, is published: true 2024-11-21T09:21:02.809086Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.809088Z node 82 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:02.809090Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/5, is published: true 2024-11-21T09:21:02.809420Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.809438Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:21:02.809445Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:21:02.809927Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.809981Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.809985Z node 82 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:21:02.809992Z node 82 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 5/5 2024-11-21T09:21:02.809994Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2024-11-21T09:21:02.809997Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 5/5, is published: true 2024-11-21T09:21:02.810000Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 5/5 2024-11-21T09:21:02.810003Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:21:02.810005Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:21:02.810018Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:21:02.810021Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T09:21:02.810023Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T09:21:02.810026Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:21:02.810028Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T09:21:02.810030Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T09:21:02.810032Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:02.810035Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T09:21:02.810036Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T09:21:02.810039Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T09:21:02.810043Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:4 2024-11-21T09:21:02.810045Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:4 2024-11-21T09:21:02.810050Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T09:21:02.810109Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:02.810113Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T09:21:02.810119Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T09:21:02.810122Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T09:21:02.810125Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:21:02.810137Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.810146Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.810153Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.810160Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.810169Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.810172Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:02.810479Z node 82 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:21:02.810550Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:21:02.810554Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:21:02.810590Z node 82 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:21:02.810601Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:21:02.810604Z node 82 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [82:827:2721] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:21:02.810643Z node 82 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:02.810665Z node 82 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 29us result status StatusPathDoesNotExist 2024-11-21T09:21:02.810689Z node 82 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> TSchemeShardTTLTests::ShouldCheckQuotas ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004644/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_tenants.py.TestTenants.test_yql_operations_over_dynamic_nodes.enable_alter_database_create_hive_first--true/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004644/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_tenants.py.TestTenants.test_yql_operations_over_dynamic_nodes.enable_alter_database_create_hive_first--true/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004644/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_tenants.py.TestTenants.test_yql_operations_over_dynamic_nodes.enable_alter_database_create_hive_first--true/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004644/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_tenants.py.TestTenants.test_yql_operations_over_dynamic_nodes.enable_alter_database_create_hive_first--true/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004644/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_tenants.py.test_operation_with_locks.enable_alter_database_create_hive_first--false/cluster/slot_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004644/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_tenants.py.test_operation_with_locks.enable_alter_database_create_hive_first--false/cluster/slot_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004644/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_tenants.py.test_operation_with_locks.enable_alter_database_create_hive_first--false/cluster/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/stash.py:104: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/jptk/004644/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/chunk19/testing_out_stuff/test_tenants.py.test_operation_with_locks.enable_alter_database_create_hive_first--false/cluster/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:03.935598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:03.935622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:03.935627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:03.935636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:03.935642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:03.935645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:03.935654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:03.935732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:03.946499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:03.946522Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:03.949738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:03.950535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:03.950590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:03.952798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:03.953431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:03.953537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:03.953645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:03.955066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:03.955375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:03.955387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:03.955436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:03.955445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:03.955451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:03.955469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.957029Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:03.973424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:03.973488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.973541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:03.973576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:03.973581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.974288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:03.974330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:03.974374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.974384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:03.974389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:03.974394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:03.974863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.974881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:03.974886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:03.975366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.975380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.975386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:03.975392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:03.975906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:03.976405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:03.976460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:03.976659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:03.976686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:03.976695Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:03.976746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:03.976753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:03.976780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:03.976791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:03.977249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:03.977257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:03.977296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:03.977301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:03.977378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.977386Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:03.977397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:03.977400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:03.977406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:03.977412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:03.977416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:03.977419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:03.977431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:03.977436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:03.977439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:03.977705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:03.977719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:03.977724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:03.977729Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:03.977734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:03.977748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... HEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:21:04.074608Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:21:04.074611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:04.074619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T09:21:04.076201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:21:04.076254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:21:04.076858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 254 } } 2024-11-21T09:21:04.076870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:04.076898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 254 } } 2024-11-21T09:21:04.076910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 254 } } 2024-11-21T09:21:04.077029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 401 RawX2: 4294969668 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:04.077037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:04.077049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 401 RawX2: 4294969668 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:04.077055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:04.077075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 401 RawX2: 4294969668 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:04.077089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:04.077092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T09:21:04.077885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.077971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.099099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:04.099124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:04.099147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:04.099155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:04.099162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:04.099173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:04.099177Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.099181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:04.099186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:04.099192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T09:21:04.099725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.099838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.099849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2024-11-21T09:21:04.099858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:21:04.099862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2024-11-21T09:21:04.099874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T09:21:04.099879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2024-11-21T09:21:04.100343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.100357Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:21:04.100369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:21:04.100373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:04.100379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T09:21:04.100394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T09:21:04.100401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:04.100408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:21:04.100412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:21:04.100441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:21:04.100445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:21:04.100914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:21:04.100930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:429:2393] TestWaitNotification: OK eventTxId 102 2024-11-21T09:21:04.101039Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.101084Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 55us result status StatusSuccess 2024-11-21T09:21:04.101227Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:03.958393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:03.958414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:03.958418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:03.958425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:03.958429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:03.958431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:03.958437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:03.958499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:03.968594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:03.968617Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:03.971641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:03.972432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:03.972486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:03.974157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:03.974594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:03.974689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:03.974773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:03.976013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:03.976274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:03.976283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:03.976313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:03.976318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:03.976322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:03.976332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.977563Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:03.991721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:03.991797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.991860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:03.991902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:03.991910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.992557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:03.992601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:03.992649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.992659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:03.992664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:03.992669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:03.993192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.993206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:03.993211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:03.993592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.993602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.993608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:03.993615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:03.994159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:03.994573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:03.994628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:03.994800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:03.994824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:03.994833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:03.994907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:03.994914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:03.994945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:03.994957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:03.995431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:03.995441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:03.995490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:03.995495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:03.995574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:03.995581Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:03.995592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:03.995596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:03.995602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:03.995607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:03.995612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:03.995615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:03.995627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:03.995633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:03.995637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:03.995921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:03.995935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:03.995939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:03.995945Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:03.995949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:03.995965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... G: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T09:21:04.173209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:21:04.173233Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:21:04.173244Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T09:21:04.173309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T09:21:04.173314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T09:21:04.173320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T09:21:04.173343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:04.173362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:04.173370Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T09:21:04.173374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T09:21:04.173774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.173786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T09:21:04.173797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T09:21:04.173801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T09:21:04.173807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T09:21:04.173817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710760 2024-11-21T09:21:04.173824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T09:21:04.173828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T09:21:04.173832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T09:21:04.173844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T09:21:04.174330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T09:21:04.174347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T09:21:04.174357Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T09:21:04.174370Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:21:04.174735Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:21:04.174752Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:21:04.174759Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T09:21:04.175116Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:21:04.175136Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:21:04.175141Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T09:21:04.175165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:21:04.175171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:464:2428] TestWaitNotification: OK eventTxId 102 2024-11-21T09:21:04.175281Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.175338Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 65us result status StatusSuccess 2024-11-21T09:21:04.175450Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:04.047344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:04.047367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:04.047372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:04.047377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:04.047382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:04.047386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:04.047395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:04.047474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:04.057472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:04.057489Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:04.060291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:04.061033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:04.061079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:04.062251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:04.062412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:04.062502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:04.062583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:04.063447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:04.063696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:04.063706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:04.063744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:04.063751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:04.063757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:04.063769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.065080Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:04.081619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:04.081694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.081741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:04.081780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:04.081787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.082366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:04.082386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:04.082418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.082427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:04.082431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:04.082435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:04.082875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.082891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:04.082896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:04.083300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.083311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.083316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:04.083321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:04.083873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:04.084260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:04.084299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:04.084435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:04.084453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:04.084457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:04.084488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:04.084492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:04.084507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:04.084514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:04.084920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:04.084928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:04.084954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:04.084959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:04.085015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.085022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:04.085030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:04.085034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:04.085039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:04.085043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:04.085047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:04.085050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:04.085062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:04.085067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:04.085071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:04.085315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:04.085332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:04.085336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:04.085340Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:04.085344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:04.085356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 5186233409546 2024-11-21T09:21:04.242090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:04.242130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 382 RawX2: 4294969647 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:04.242142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#103:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 200 2024-11-21T09:21:04.242174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 129 2024-11-21T09:21:04.242218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:21:04.242229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:04.243320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:04.243331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:04.243384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:21:04.243414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:04.243417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T09:21:04.243421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 4 2024-11-21T09:21:04.243749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.243765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:04.243978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:21:04.243989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:21:04.243994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:21:04.244000Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T09:21:04.244005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T09:21:04.244191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:21:04.244199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T09:21:04.244203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T09:21:04.244229Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T09:21:04.244233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:21:04.244242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T09:21:04.244686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 242 } } 2024-11-21T09:21:04.244701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2024-11-21T09:21:04.244726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 242 } } 2024-11-21T09:21:04.244739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 242 } } 2024-11-21T09:21:04.244955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 546 RawX2: 4294969788 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T09:21:04.244965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2024-11-21T09:21:04.244981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 546 RawX2: 4294969788 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T09:21:04.244987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:04.244993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 546 RawX2: 4294969788 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T09:21:04.245006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:04.245009Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.245013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:21:04.245019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T09:21:04.245964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:21:04.246022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T09:21:04.246046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.246063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.246107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.246113Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T09:21:04.246126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T09:21:04.246130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:21:04.246136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T09:21:04.246148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:410:2377] message: TxId: 103 2024-11-21T09:21:04.246154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:21:04.246159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T09:21:04.246163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T09:21:04.246183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:04.246612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:21:04.246622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:572:2511] TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2024-11-21T09:21:04.247225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:04.247285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2024-11-21T09:21:04.247295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } } }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.247372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2024-11-21T09:21:04.247887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:04.247922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:01.792624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:01.792647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:01.792652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:01.792658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:01.792664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:01.792667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:01.792677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:01.792751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:01.804696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:01.804718Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:01.807600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:01.808449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:01.808503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:01.810116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:01.810341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:01.810449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:01.810541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:01.811770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:01.812072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:01.812084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:01.812129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:01.812138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:01.812145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:01.812162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:01.813562Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:01.827146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:01.827223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:01.827280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:01.827312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:01.827316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:01.828016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:01.828042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:01.828085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:01.828095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:01.828100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:01.828105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:01.828507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:01.828516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:01.828521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:01.828945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:01.828960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:01.828967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:01.828975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:01.829508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:01.829936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:01.829990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:01.830175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:01.830196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:01.830206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:01.830250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:01.830254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:01.830278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:01.830286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:01.830701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:01.830708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:01.830757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:01.830761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:01.830846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:01.830852Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:01.830863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:01.830867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:01.830887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:01.830892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:01.830897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:01.830900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:01.830913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:01.830918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:01.830922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:01.831157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:01.831169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:01.831173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:01.831178Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:01.831183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:01.831197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Size 619 rowCount 2 cpuUsage 0 2024-11-21T09:21:04.232955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T09:21:04.232987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.233032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.233078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040229500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T09:21:04.233795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.233802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:21:04.234238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.234251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T09:21:04.235675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.235691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T09:21:04.235889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.235901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:04.235945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.235981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.229500Z, at schemeshard: 72057594046678944 2024-11-21T09:21:04.236036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.229500Z, at schemeshard: 72057594046678944 2024-11-21T09:21:04.236054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:04.236580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.231500Z, at schemeshard: 72057594046678944 2024-11-21T09:21:04.236631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.231500Z, at schemeshard: 72057594046678944 2024-11-21T09:21:04.236677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.236687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.231500Z, at schemeshard: 72057594046678944 2024-11-21T09:21:04.236691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.299827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2024-11-21T09:21:04.299883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2024-11-21T09:21:04.299902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0 2024-11-21T09:21:04.299914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 5: RowCount 1, DataSize 43 2024-11-21T09:21:04.299963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2024-11-21T09:21:04.299970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0 2024-11-21T09:21:04.299975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 2, DataSize 603 2024-11-21T09:21:04.299988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T09:21:04.299994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0 2024-11-21T09:21:04.299998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:04.300008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T09:21:04.300013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0 2024-11-21T09:21:04.300017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 0, DataSize 0 2024-11-21T09:21:04.300026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 619 row count 2 2024-11-21T09:21:04.300031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0 2024-11-21T09:21:04.300036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409550 followerId=0, pathId 6: RowCount 2, DataSize 619, with borrowed parts 2024-11-21T09:21:04.312553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.312578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T09:21:04.313201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:04.313248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:04.313258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.233500Z, at schemeshard: 72057594046678944 2024-11-21T09:21:04.313271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:05.234399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:05.234421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:05.234424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:05.234427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:05.234431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:05.234433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:05.234439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:05.234518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:05.243570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:05.243591Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:05.246541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:05.247297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:05.247344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:05.249115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:05.249352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:05.249449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.249526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:05.250709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.250991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.251004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.251044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:05.251052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:05.251059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:05.251072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.252590Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:05.268335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:05.268400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.268445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:05.268483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:05.268490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.269264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.269287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:05.269327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.269336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:05.269339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:05.269344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:05.270057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.270070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:05.270075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:05.270476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.270487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.270493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.270499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.271045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:05.271444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:05.271486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:05.271649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.271674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:05.271684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.271733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:05.271740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.271765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:05.271776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:05.272319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.272327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:05.272355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.272360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:05.272421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.272430Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:05.272443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:05.272446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.272451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:05.272455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.272459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:05.272462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:05.272473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:05.272478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:05.272481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:05.272722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:05.272734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:05.272738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:05.272742Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:05.272746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:05.272761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:21:05.273584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:21:05.273666Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:21:05.273922Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:21:05.275160Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:21:05.275678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:05.275755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.275773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2024-11-21T09:21:05.275869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2024-11-21T09:21:05.276122Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:21:05.276863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:05.276909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T09:21:05.277024Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:05.232958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:05.232986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:05.232991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:05.233001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:05.233005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:05.233008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:05.233014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:05.233088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:05.242918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:05.242940Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:05.247139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:05.248006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:05.248059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:05.250007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:05.250190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:05.250265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.250323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:05.251795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.252048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.252067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.252105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:05.252112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:05.252118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:05.252130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.253520Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:05.268113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:05.268198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.268314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:05.268355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:05.268360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.269224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.269252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:05.269307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.269318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:05.269323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:05.269329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:05.269818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.269831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:05.269836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:05.270341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.270353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.270360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.270367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.270880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:05.271314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:05.271369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:05.271527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.271553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:05.271562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.271639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:05.271646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.271677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:05.271689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:05.272229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.272240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:05.272289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.272294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:05.272373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.272381Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:05.272392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:05.272397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.272403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:05.272408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.272412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:05.272416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:05.272431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:05.272440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:05.272443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:05.272716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:05.272732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:05.272736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:05.272742Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:05.272746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:05.272761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... UG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T09:21:05.373811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2024-11-21T09:21:05.373912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.373931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:05.373941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:21:05.373988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T09:21:05.374006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T09:21:05.374730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.374740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:05.374786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.374791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T09:21:05.374799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.374805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:05.374950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:05.374960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:05.374964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:21:05.374968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T09:21:05.374972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:21:05.374983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T09:21:05.375818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:21:05.386611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 224 } } 2024-11-21T09:21:05.386629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:05.386656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 224 } } 2024-11-21T09:21:05.386669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 224 } } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:21:05.386828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:05.386837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:05.386852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:05.386859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:05.386865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:05.386877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.386882Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.386886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:05.386892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T09:21:05.387685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.387791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.387850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.387857Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:21:05.387871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:21:05.387875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:05.387880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T09:21:05.387894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 102 2024-11-21T09:21:05.387900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:05.387904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:21:05.387908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:21:05.387930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:05.388403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:21:05.388416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:451:2415] TestWaitNotification: OK eventTxId 102 2024-11-21T09:21:05.388516Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:05.388578Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 67us result status StatusSuccess 2024-11-21T09:21:05.388703Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:05.943873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:05.943900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:05.943908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:05.943914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:05.943919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:05.943923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:05.943933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:05.944028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:05.954328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:05.954350Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:05.957402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:05.958193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:05.958252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:05.960249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:05.960584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:05.960706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.960824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:05.962451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.962744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.962757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.962809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:05.962817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:05.962823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:05.962839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.964322Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:05.979961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:05.980044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.980101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:05.980141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:05.980147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.980987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.981018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:05.981063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.981073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:05.981078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:05.981083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:05.981615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.981628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:05.981633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:05.981996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.982005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.982011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.982016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.982573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:05.982992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:05.983043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:05.983231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.983256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:05.983262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.983311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:05.983317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.983344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:05.983356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:05.983803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.983812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:05.983852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.983858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:05.983928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.983935Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:05.983946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:05.983950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.983956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:05.983961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.983965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:05.983969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:05.983980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:05.983985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:05.983989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:05.984303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:05.984323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:05.984328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:05.984333Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:05.984338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:05.984352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:21:05.985277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:21:05.985364Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:21:05.985647Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:21:05.986977Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:21:05.987539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:05.987598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.987684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2024-11-21T09:21:05.987841Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:21:05.988964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:05.988997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2024-11-21T09:21:05.989142Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:06.014634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:06.014665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.014670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:06.014675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:06.014682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:06.014685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:06.014711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.014803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:06.024935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:06.024954Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:06.027749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:06.028538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:06.028591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:06.030359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:06.030616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:06.030723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.030817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:06.032087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.032374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.032386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.032428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:06.032435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.032442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:06.032456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.033962Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:06.047771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:06.047838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.047887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:06.047919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:06.047923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.048485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.048508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:06.048540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.048548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:06.048551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:06.048555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:06.049034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.049046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:06.049049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:06.052448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.052464Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.052471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.052477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.053063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:06.054089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:06.054166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:06.054402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.054449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.054458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.054533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:06.054541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.054574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.054590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:06.055344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.055353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.055400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.055406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:06.055494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.055504Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:06.055517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:06.055522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.055528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:06.055533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.055541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:06.055545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:06.055558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:06.055564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:06.055568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:06.055906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.055929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.055935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:06.055940Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:06.055945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.055962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... D DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2024-11-21T09:21:06.104790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.104812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.104821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2024-11-21T09:21:06.104848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2024-11-21T09:21:06.104871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.104882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T09:21:06.106262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.106272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.106308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:06.106347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.106352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:21:06.106358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:21:06.106373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.106379Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:06.106696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:06.106717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:06.106722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:21:06.106727Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:21:06.106732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:06.106926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:06.106936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:06.106939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:21:06.106943Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:21:06.106947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:06.106961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T09:21:06.107006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 308 } } 2024-11-21T09:21:06.107011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:06.107025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 308 } } 2024-11-21T09:21:06.107035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 308 } } 2024-11-21T09:21:06.107173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.107181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:06.107195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.107200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:06.107207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.107217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.107220Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.107225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:06.107230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T09:21:06.108082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:06.108130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:06.108157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.108505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.108566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.108572Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:21:06.108582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:21:06.108585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:06.108591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T09:21:06.108603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 101 2024-11-21T09:21:06.108608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:06.108612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:21:06.108616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:21:06.108633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:21:06.109014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:21:06.109024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:343:2318] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T09:21:06.109862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:06.109909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.109975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2024-11-21T09:21:06.110427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.110452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:06.112191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:06.112234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.112240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:06.112250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:06.112256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:06.112259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:06.112269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.112368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:06.122484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:06.122513Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:06.126049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:06.126845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:06.126901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:06.128567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:06.128855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:06.128981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.129069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:06.130834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.131099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.131115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.131154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:06.131162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.131169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:06.131185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.132860Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:06.149194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:06.149279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.149331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:06.149375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:06.149383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.152480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.152518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:06.152563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.152573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:06.152578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:06.152583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:06.153245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.153261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:06.153267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:06.153696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.153707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.153712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.153718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.154306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:06.154734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:06.154791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:06.154968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.154999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.155010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.155067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:06.155076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.155104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.155118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:06.155639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.155649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.155692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.155699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:06.155772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.155781Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:06.155791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:06.155796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.155802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:06.155807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.155810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:06.155814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:06.155825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:06.155831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:06.155835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:06.156136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.156156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.156161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:06.156165Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:06.156171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.156187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ndRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:06.240949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 200 } } 2024-11-21T09:21:06.240957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 200 } } 2024-11-21T09:21:06.241051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.241057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T09:21:06.241069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.241074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:06.241081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.241091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.241095Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:21:06.241099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:06.241105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T09:21:06.241166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.241192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:06.241203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.241206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:06.241213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.241218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.241221Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.241225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:06.241229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T09:21:06.241981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:06.242000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:06.242007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:06.242472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:06.242500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:21:06.242514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.242531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:21:06.242579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.242605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T09:21:06.242611Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T09:21:06.242622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T09:21:06.242625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T09:21:06.242630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T09:21:06.242655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.242660Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:21:06.242664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T09:21:06.242666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T09:21:06.242669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T09:21:06.242679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T09:21:06.242683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T09:21:06.242688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:21:06.242692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:21:06.242748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:06.242752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T09:21:06.242755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T09:21:06.242759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:21:06.242762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T09:21:06.242764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T09:21:06.242770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:06.243990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:21:06.244006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T09:21:06.244125Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:06.244171Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 54us result status StatusSuccess 2024-11-21T09:21:06.244312Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:06.104237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:06.104259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.104265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:06.104270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:06.104276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:06.104281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:06.104290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.104365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:06.114525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:06.114544Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:06.117247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:06.118012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:06.118067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:06.120043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:06.120311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:06.120434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.120551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:06.121929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.122212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.122223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.122261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:06.122268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.122274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:06.122286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.123473Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:06.138988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:06.139069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.139127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:06.139166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:06.139174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.139812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.139843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:06.139890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.139901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:06.139906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:06.139910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:06.140576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.140590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:06.140595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:06.140981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.140991Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.140997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.141004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.141585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:06.141940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:06.141988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:06.142158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.142182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.142192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.142244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:06.142250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.142278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.142289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:06.142785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.142794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.142837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.142843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:06.142914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.142921Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:06.142934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:06.142938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.142943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:06.142948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.142953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:06.142956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:06.142968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:06.142974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:06.142978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:06.143260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.143281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.143286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:06.143291Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:06.143296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.143311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... end tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T09:21:06.237061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2024-11-21T09:21:06.237131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.237148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.237154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:21:06.237217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T09:21:06.237236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T09:21:06.238059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.238071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:06.238120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.238125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T09:21:06.238227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.238254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:06.238358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:06.238371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:06.238374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:21:06.238379Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T09:21:06.238384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T09:21:06.238395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T09:21:06.239248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:21:06.250000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 232 } } 2024-11-21T09:21:06.250016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:06.250043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 232 } } 2024-11-21T09:21:06.250057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 232 } } 2024-11-21T09:21:06.250208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:06.250229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:06.250243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:06.250249Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:06.250256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:06.250267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.250271Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.250275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:06.250293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T09:21:06.250958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.251014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.251073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.251081Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:21:06.251094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:21:06.251098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:06.251108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T09:21:06.251121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 102 2024-11-21T09:21:06.251126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:06.251132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:21:06.251136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:21:06.251158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:06.251549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:21:06.251560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:498:2426] TestWaitNotification: OK eventTxId 102 2024-11-21T09:21:06.251672Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:06.251735Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 67us result status StatusSuccess 2024-11-21T09:21:06.251872Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL |96.1%| [TA] $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:06.061428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:06.061455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.061460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:06.061465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:06.061470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:06.061474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:06.061484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.061564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:06.071355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:06.071376Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:06.075311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:06.076122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:06.076192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:06.077923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:06.078648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:06.078771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.078866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:06.080763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.081064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.081076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.081114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:06.081122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.081129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:06.081141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.082670Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:06.100195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:06.100336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.100399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:06.100440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:06.100448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.101804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.101832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:06.101872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.101881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:06.101885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:06.101890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:06.103311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.103329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:06.103335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:06.103876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.103889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.103895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.103902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.104541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:06.105897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:06.105956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:06.106117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.106166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.106174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.106238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:06.106246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.106270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.106281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:06.107903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.107916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.107949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.107955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:06.108009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.108016Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:06.108027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:06.108031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.108036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:06.108042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.108046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:06.108050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:06.108062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:06.108068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:06.108072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:06.108405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.108420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.108425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:06.108429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:06.108434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.108446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:21:06.287686Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:21:06.287728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T09:21:06.287750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:21:06.287773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T09:21:06.287778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T09:21:06.287784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T09:21:06.287847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.287868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.287876Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T09:21:06.287881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T09:21:06.288299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.288313Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T09:21:06.288325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T09:21:06.288329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T09:21:06.288335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T09:21:06.288346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710760 2024-11-21T09:21:06.288351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T09:21:06.288356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T09:21:06.288360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T09:21:06.288372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T09:21:06.288743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T09:21:06.288758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T09:21:06.288768Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T09:21:06.288781Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:21:06.289195Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:21:06.289213Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:21:06.289224Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T09:21:06.290256Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T09:21:06.290291Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T09:21:06.290296Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T09:21:06.290316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:21:06.290322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:464:2428] TestWaitNotification: OK eventTxId 102 2024-11-21T09:21:06.290433Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:06.290494Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 71us result status StatusSuccess 2024-11-21T09:21:06.290608Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> TSchemeShardTTLTestsWithReboots::CopyTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:02.268794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:02.268822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:02.268828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:02.268832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:02.268837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:02.268841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:02.268851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:02.268981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:02.279076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:02.279103Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:02.282247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:02.283064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:02.283113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:02.284790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:02.285086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:02.285194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.285303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:02.286854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.287132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:02.287145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.287183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:02.287191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:02.287197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:02.287214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.288585Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:02.303902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:02.303979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.304036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:02.304073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:02.304080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.304865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.304907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:02.304954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.304964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:02.304969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:02.304974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:02.305573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.305587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:02.305592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:02.306138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.306150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.306156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.306162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.306769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:02.307251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:02.307309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:02.307535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.307566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:02.307576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.307628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:02.307635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.307662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:02.307675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:02.308263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:02.308274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:02.308313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.308319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:02.308389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.308397Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:02.308408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:02.308412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.308418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:02.308423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.308427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:02.308431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:02.308443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:02.308450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:02.308454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:02.308771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:02.308792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:02.308797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:02.308802Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:02.308806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:02.308824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:21:06.725320Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.725333Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.725381Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:06.725426Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.725433Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:21:06.725438Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T09:21:06.725558Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.725567Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:06.725934Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:06.725950Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:06.725955Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:21:06.725960Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:21:06.725965Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:06.726082Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:06.726092Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:06.726095Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:21:06.726100Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:21:06.726103Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:06.726116Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T09:21:06.726333Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 289 } } 2024-11-21T09:21:06.726341Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:06.726355Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 289 } } 2024-11-21T09:21:06.726367Z node 18 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 289 } } FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:06.726474Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.726479Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:06.726488Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.726491Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:06.726495Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:06.726503Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.726505Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.726508Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:06.726511Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T09:21:06.726968Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:06.727253Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:06.727271Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.727286Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.727299Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.727304Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:21:06.727316Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:21:06.727318Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:06.727322Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T09:21:06.727336Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:339:2314] message: TxId: 101 2024-11-21T09:21:06.727342Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:06.727347Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:21:06.727352Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:21:06.727371Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:21:06.727744Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:21:06.727753Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:340:2315] TestWaitNotification: OK eventTxId 101 2024-11-21T09:21:06.727840Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:06.727884Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 52us result status StatusSuccess 2024-11-21T09:21:06.727985Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:06.973059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:06.973087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.973092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:06.973098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:06.973104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:06.973108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:06.973118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.973198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:06.983250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:06.983268Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:06.985697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:06.986491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:06.986534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:06.987952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:06.988108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:06.988218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.988303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:06.989243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.989480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.989491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.989526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:06.989530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.989535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:06.989549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.990798Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:07.003416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:07.003512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.003580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:07.003622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:07.003629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.004487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.004514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:07.004562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.004572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:07.004576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:07.004581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:07.004934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.004944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:07.004948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:07.005290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.005297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.005303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.005310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.005845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:07.006252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:07.006306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:07.006497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.006519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:07.006529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.006581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:07.006587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.006616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:07.006627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:07.007105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:07.007112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:07.007156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:07.007161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:07.007235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.007241Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:07.007253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:07.007258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.007264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:07.007270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.007274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:07.007278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:07.007288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:07.007294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:07.007298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:07.007553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:07.007565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:07.007569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:07.007574Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:07.007577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:07.007590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... dSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:07.077241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T09:21:07.077277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T09:21:07.077553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.077574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:07.077582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:21:07.077666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T09:21:07.077694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T09:21:07.079102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:07.079109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:07.079167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:07.079173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T09:21:07.079245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.079252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:21:07.079421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:07.079433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:07.079438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:21:07.079443Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T09:21:07.079449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:07.079459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T09:21:07.079525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 250 } } 2024-11-21T09:21:07.079531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:07.079545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 250 } } 2024-11-21T09:21:07.079557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 250 } } 2024-11-21T09:21:07.079632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:07.079635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:07.079643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:07.079647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:07.079651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:07.079662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.079665Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.079667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:07.079671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T09:21:07.080256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:21:07.080353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.080368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.080414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.080421Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:21:07.080433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:21:07.080437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:07.080443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T09:21:07.080456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T09:21:07.080463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:07.080468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:21:07.080472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:21:07.080490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:21:07.080862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:21:07.080872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:386:2360] TestWaitNotification: OK eventTxId 102 2024-11-21T09:21:07.080983Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:07.081019Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 44us result status StatusSuccess 2024-11-21T09:21:07.081148Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] Test command err: 2024-11-21T09:19:21.129761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659720933768447:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:21.129906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:21.132112Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659723529690507:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:21.132315Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:21.149134Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003eed/r3tmp/tmpK47nvH/pdisk_1.dat 2024-11-21T09:19:21.154860Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:21.172226Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18733, node 1 2024-11-21T09:19:21.181466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003eed/r3tmp/yandexNp1vXF.tmp 2024-11-21T09:19:21.181475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003eed/r3tmp/yandexNp1vXF.tmp 2024-11-21T09:19:21.181523Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003eed/r3tmp/yandexNp1vXF.tmp 2024-11-21T09:19:21.181556Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:21.184760Z INFO: TTestServer started on Port 8556 GrpcPort 18733 TClient is connected to server localhost:8556 PQClient connected to localhost:18733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:21.229853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:21.229881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:21.231348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:21.251022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:21.251043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:21.251946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:21.252040Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:21.252274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T09:19:21.263438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T09:19:21.346887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659720933769414:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:21.346907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659720933769441:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:21.346913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:21.347403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T09:19:21.347849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659720933769472:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:21.347880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:21.349875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659720933769443:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T09:19:21.367224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:21.367294Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659723529690877:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:21.367366Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQ1MzBlNTgtYzIxMzhjOTktOWM1N2IyMGQtNjZiZGYwMzU=, ActorId: [2:7439659723529690838:2280], ActorState: ExecuteState, TraceId: 01jd709pr97syxk1tsrvys19x0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:21.367793Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:21.424099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:21.436699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:19:21.438756Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659720933769800:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:21.438827Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGRiYjBiNmEtMmQ5NTAwZmQtZTM0YTcyOWQtM2QyYjhhOWY=, ActorId: [1:7439659720933769411:2300], ActorState: ExecuteState, TraceId: 01jd709pr21p134zdfpr8erqx4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:21.439027Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T09:19:21.461643Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd709pv767n8ntj4cbw3me9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFjMDE1ZmQtZTNhZDQwOTgtMzVlNDdjYTYtZWVlZjAxMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659720933769954:3044] 2024-11-21T09:19:26.130082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659720933768447:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:26.130100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:19:26.132645Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659723529690507:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:26.132672Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:18733 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2024-11-21T09:19:26.566571Z node 1 :PQ_METACACHE DEBUG: HandleDescribeAllTopics 2024-11-21T09:19:26.566584Z node 1 :PQ_METACACHE DEBUG: ProcessDescribeAllTopics 2024-11-21T09:19:26.566585Z node 1 :PQ_METACACHE DEBUG: Describe all topics - send empty response 2024-11-21T09:19:26.566587Z node 1 :PQ_METACACHE DEBUG: Send describe all topics response ... ERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T09:21:06.862470Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T09:21:06.862488Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_566569203706207751_v1 2024-11-21T09:21:06.862496Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_566569203706207751_v1 2024-11-21T09:21:06.862498Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_566569203706207751_v1 2024-11-21T09:21:06.862501Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_566569203706207751_v1 2024-11-21T09:21:06.862503Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_566569203706207751_v1 2024-11-21T09:21:06.862514Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 33 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 35 count 6 size 272 endOffset 40 max time lag 0ms effective offset 35 2024-11-21T09:21:06.862516Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 33 added 0 blobs, size 0 count 0 last offset 35 2024-11-21T09:21:06.862522Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 33. All data is from uncompacted head. 2024-11-21T09:21:06.862530Z node 25 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:06.862544Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 35 2024-11-21T09:21:06.862555Z node 25 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:21:06.862711Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 35 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 37 WriteTimestampMS: 1732180866820 CreateTimestampMS: 1732180866819 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 36 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 38 WriteTimestampMS: 1732180866829 CreateTimestampMS: 1732180866829 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 508 RealReadOffset: 36 WaitQuotaTimeMs: 0 } Cookie: 35 } 2024-11-21T09:21:06.862737Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 37 endOffset 40 2024-11-21T09:21:06.862750Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 40 ReadOffset 37 ReadGuid dea59ef5-80964f82-ae63fbf3-56076948 has messages 1 2024-11-21T09:21:06.862772Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 37, endOffset# 40, WTime# 1732180866829, sizeLag# 508 2024-11-21T09:21:06.862782Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1TEvPartitionReady. Aval parts: 0 2024-11-21T09:21:06.862793Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 read done: guid# dea59ef5-80964f82-ae63fbf3-56076948, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 352 2024-11-21T09:21:06.862803Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 response to read: guid# dea59ef5-80964f82-ae63fbf3-56076948 2024-11-21T09:21:06.862884Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 Process answer. Aval parts: 1 Bytes readed: 352 Offset: 35 from session 1 Offset: 36 from session 1 2024-11-21T09:21:06.863178Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 grpc read done: success# 1, data# { read_request { bytes_size: 400 } } 2024-11-21T09:21:06.863239Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 got read request: guid# 49fe5b6-916df728-f3f86957-921b5d67 2024-11-21T09:21:06.863267Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 performing read request: guid# 9420c05b-54e8dc2d-361a8dd5-128cab7e, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 320, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T09:21:06.863282Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 320 maxTimeLagMs 0 readTimestampMs 0 readOffset 37 EndOffset 40 ClientCommitOffset 0 committedOffset 0 Guid 9420c05b-54e8dc2d-361a8dd5-128cab7e 2024-11-21T09:21:06.863442Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T09:21:06.863453Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T09:21:06.863461Z node 25 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:21:06.863476Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 34 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 37 count 3 size 320 endOffset 40 max time lag 0ms effective offset 37 2024-11-21T09:21:06.863479Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 34 added 0 blobs, size 0 count 0 last offset 37 2024-11-21T09:21:06.863494Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 34. All data is from uncompacted head. 2024-11-21T09:21:06.863505Z node 25 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:06.863531Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 37 2024-11-21T09:21:06.863670Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 37 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 39 WriteTimestampMS: 1732180866831 CreateTimestampMS: 1732180866831 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1732180866833 CreateTimestampMS: 1732180866833 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1732180866836 CreateTimestampMS: 1732180866835 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 60 RealReadOffset: 39 WaitQuotaTimeMs: 0 } Cookie: 37 } 2024-11-21T09:21:06.863701Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset40 2024-11-21T09:21:06.863706Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 40 ReadOffset 40 ReadGuid 9420c05b-54e8dc2d-361a8dd5-128cab7e has messages 1 2024-11-21T09:21:06.863748Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 read done: guid# 9420c05b-54e8dc2d-361a8dd5-128cab7e, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 522 2024-11-21T09:21:06.863766Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 response to read: guid# 9420c05b-54e8dc2d-361a8dd5-128cab7e 2024-11-21T09:21:06.863840Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 Process answer. Aval parts: 0 Bytes readed: 522 Offset: 37 from session 1 Offset: 38 from session 1 Offset: 39 from session 1 2024-11-21T09:21:06.864072Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_9079203380924635637_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 39 } } } } 2024-11-21T09:21:06.864084Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer session _24_1_9079203380924635637_v1 closed with error: reason# can't commit when reading without a consumer 2024-11-21T09:21:06.864171Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer session _24_1_9079203380924635637_v1 is DEAD 2024-11-21T09:21:06.864517Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:06.864528Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864536Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439660174074099380:2533] destroyed 2024-11-21T09:21:06.864539Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:06.864541Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864544Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439660174074099381:2532] destroyed 2024-11-21T09:21:06.864546Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:06.864547Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864549Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439660174074099378:2531] destroyed 2024-11-21T09:21:06.864551Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:06.864553Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864555Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439660174074099385:2536] destroyed 2024-11-21T09:21:06.864556Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:06.864558Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864560Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439660174074099383:2535] destroyed 2024-11-21T09:21:06.864569Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864571Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864573Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864575Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_9079203380924635637_v1 2024-11-21T09:21:06.864576Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_9079203380924635637_v1 >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPersQueueTest::DisableDeduplication [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.097257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.097288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.097293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.097297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.097308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.097312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.097320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.097391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.108465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.108485Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.110348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.110430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.110450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.112659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.112723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.112799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.113125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.114383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.115724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.115727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.115746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.116728Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.129949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.130020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.130087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.130093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.130725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.130737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.130741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.131176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.131464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.131482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.131941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.132306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.132800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.132930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132951Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.132993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.132997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.133015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.133022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.133404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.133436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.133494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133500Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.133508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.133511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.133520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.133527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.133537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.133541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.133545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... PathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409547 2024-11-21T09:21:06.934719Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:06.934730Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:21:06.934742Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:06.935041Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.935055Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.935062Z node 106 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:06.935066Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T09:21:06.935070Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:06.935083Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:21:06.935323Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T09:21:06.935331Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:21:06.935336Z node 106 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T09:21:06.935907Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T09:21:06.935938Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:21:06.935967Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T09:21:06.936010Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:21:06.936018Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:21:06.936052Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.936072Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 455266535530 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.936079Z node 106 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T09:21:06.936100Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.936108Z node 106 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T09:21:06.936112Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:21:06.936121Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:06.936129Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:06.936134Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T09:21:06.936141Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:21:06.936148Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T09:21:06.936152Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T09:21:06.936160Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:06.936165Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T09:21:06.936169Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:21:06.936173Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:21:06.936697Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:21:06.936728Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.937695Z node 106 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.937710Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.937744Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:21:06.937769Z node 106 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.937774Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [106:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T09:21:06.937780Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [106:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T09:21:06.937932Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.937946Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.937951Z node 106 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:06.937956Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:21:06.937960Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:06.938085Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.938097Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.938104Z node 106 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:06.938108Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:21:06.938112Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:06.938125Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T09:21:06.938129Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [106:120:2146] 2024-11-21T09:21:06.938186Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:06.938192Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:21:06.938201Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:06.938684Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.939095Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:06.939124Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T09:21:06.939135Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T09:21:06.939205Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T09:21:06.939626Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:21:06.939636Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:21:06.939698Z node 106 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:21:06.939716Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:06.939721Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [106:714:2673] TestWaitNotification: OK eventTxId 1003 >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:07.768880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:07.768921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:07.768927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:07.768936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:07.768942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:07.768945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:07.768956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:07.769043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:07.779410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:07.779431Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:07.782535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:07.783333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:07.783386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:07.784918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:07.785159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:07.785265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.785345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:07.786633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:07.786899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:07.786913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:07.786948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:07.786955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:07.786962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:07.786975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.788412Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:07.805342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:07.805430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.805492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:07.805533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:07.805542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.806377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.806409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:07.806456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.806466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:07.806470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:07.806475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:07.807029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.807046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:07.807051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:07.807580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.807595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.807601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.807608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.808256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:07.808709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:07.808763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:07.808968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.808996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:07.809006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.809062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:07.809069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.809099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:07.809112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:07.809622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:07.809633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:07.809673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:07.809678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:07.809747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.809755Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:07.809765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:07.809769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.809774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:07.809779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.809783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:07.809787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:07.809799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:07.809805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:07.809809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:07.810097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:07.810110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:07.810115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:07.810120Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:07.810124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:07.810137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:21:07.811009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:21:07.811101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:21:07.811377Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:21:07.812608Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:21:07.813201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:07.813261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.813273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2024-11-21T09:21:07.813356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2024-11-21T09:21:07.813510Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:21:07.814354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:07.814385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T09:21:07.814530Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |96.1%| [TA] {RESULT} $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::CheckCounters >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:45.369575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:45.369591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:45.369594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:45.369596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:45.369603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:45.369606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:45.369611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:45.369654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:45.376543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:45.376557Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:45.378249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:45.378309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:45.378323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:45.380117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:45.380163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:45.380267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:45.380405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:45.381025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:45.381232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:45.381238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:45.381247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:45.381251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:45.381255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:45.381278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:45.382259Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:45.394473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:45.394523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.394560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:45.394595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:45.394599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.395106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:45.395122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:45.395146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.395151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:45.395154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:45.395157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:45.395469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.395475Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:45.395478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:45.395787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.395801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.395804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:45.395808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:45.396245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:45.396592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:45.396626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:45.396742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:45.396762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:45.396766Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:45.396807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:45.396811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:45.396828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:45.396837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:45.397168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:45.397176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:45.397197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:45.397200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:45.397250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.397257Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:45.397265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:45.397269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:45.397273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:45.397278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:45.397281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:45.397284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:45.397294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:45.397299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:45.397302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.638943Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:07.638949Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T09:21:07.638956Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:21:07.639020Z node 81 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.639051Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.639055Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:07.639058Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T09:21:07.639062Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T09:21:07.639143Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.639147Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:07.639150Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T09:21:07.639154Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:21:07.639199Z node 81 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.639284Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.639287Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:07.639291Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:21:07.639294Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:21:07.639330Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.639334Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:07.639337Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:21:07.639340Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:07.639347Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T09:21:07.639369Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.639372Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:21:07.639375Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T09:21:07.640298Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.640334Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:21:07.640354Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:21:07.640392Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.640573Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.640623Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.640630Z node 81 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:21:07.640645Z node 81 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 4/4 2024-11-21T09:21:07.640649Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T09:21:07.640655Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2024-11-21T09:21:07.640660Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T09:21:07.640667Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:21:07.640672Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:21:07.640707Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:21:07.640713Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T09:21:07.640717Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T09:21:07.640722Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:07.640725Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T09:21:07.640728Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T09:21:07.640733Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T09:21:07.640737Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T09:21:07.640740Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T09:21:07.640749Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T09:21:07.640832Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.640933Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:07.640955Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T09:21:07.640971Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T09:21:07.640976Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T09:21:07.640981Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:21:07.641402Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.641427Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.641438Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.641444Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:21:07.642216Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:21:07.642288Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:21:07.642295Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:21:07.642361Z node 81 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:21:07.642396Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:21:07.642400Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [81:793:2699] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:21:07.642469Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:07.642512Z node 81 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 53us result status StatusPathDoesNotExist 2024-11-21T09:21:07.642548Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:08.076580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:08.076603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:08.076608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:08.076617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:08.076623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:08.076627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:08.076636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:08.076717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:08.086412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:08.086433Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:08.089513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:08.090316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:08.090366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:08.092095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:08.092321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:08.092407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.092489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:08.093318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.093557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:08.093566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.093596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:08.093601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:08.093606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:08.093617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.094963Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:08.107156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:08.107227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.107285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:08.107316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:08.107321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.107929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.107951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:08.107985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.107993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:08.107998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:08.108002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:08.108363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.108374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:08.108379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:08.108653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.108660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.108666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.108672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.109122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:08.109405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:08.109447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:08.109582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.109600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:08.109636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.109678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:08.109682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.109704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:08.109712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:08.110030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:08.110035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:08.110063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.110066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:08.110115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.110120Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:08.110129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:08.110133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.110139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:08.110143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.110147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:08.110151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:08.110161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:08.110166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:08.110170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:08.110353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:08.110361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:08.110365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:08.110368Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:08.110370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:08.110380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:21:08.110898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:21:08.110964Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:21:08.111212Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:21:08.112479Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:21:08.112995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:08.113058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.113072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 } }, at schemeshard: 72057594046678944 2024-11-21T09:21:08.113169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1732180868 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2024-11-21T09:21:08.113326Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:21:08.113787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1732180868 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:08.113811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1732180868 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T09:21:08.113898Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:00.458881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:00.458906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:00.458912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:00.458917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:00.458922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:00.458926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:00.458934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:00.459023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:00.471165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:00.471183Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:00.473890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:00.474522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:00.474560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:00.476806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:00.477008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:00.481428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.482226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:00.484253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.486772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:00.486780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:00.486786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:00.486800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.488290Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:00.505596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:00.505651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.505698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:00.505730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:00.505736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:00.506382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.506390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:00.506394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:00.506398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:00.507077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:00.507598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.507614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.507619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.508177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:00.508645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:00.509121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:00.509308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.509404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:00.509411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:00.509439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:00.509451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:00.509932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:00.509976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:00.509981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:00.510044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:00.510051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:00.510061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:00.510065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.510071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:00.510076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:00.510080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:00.510084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:00.510094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:00.510100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:00.510104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:00.510372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:00.510384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:00.510389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:00.510394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:00.510397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:00.510409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... onId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.048415Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.048653Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.048863Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049147Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049202Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049240Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049624Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049724Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.049745Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050050Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050104Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050129Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050147Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050165Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050398Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050422Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050439Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050490Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050505Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050526Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.050541Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.051430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.051476Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.051488Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:21:08.051510Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:21:08.051515Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:08.051524Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T09:21:08.051567Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:2579:3845] message: TxId: 101 2024-11-21T09:21:08.051577Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:08.051597Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:21:08.051602Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:21:08.051856Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2024-11-21T09:21:08.052105Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.053261Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:21:08.053276Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:2580:3846] TestWaitNotification: OK eventTxId 101 2024-11-21T09:21:08.053443Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:08.053532Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 106us result status StatusSuccess 2024-11-21T09:21:08.053659Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTestsWithReboots::AlterTable >> ColumnShardTiers::DSConfigsStub [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: 2024-11-21T09:19:02.866365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:19:02.866681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:19:02.866695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004745/r3tmp/tmp5WYmIr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21360, node 1 TClient is connected to server localhost:25243 2024-11-21T09:19:02.969029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:19:02.983881Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:02.984360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:02.984369Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:02.984372Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:02.984414Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:03.025473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:03.025495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:03.035817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:03.139012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T09:19:03.159064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:19:03.159104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:19:03.159136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:19:03.159148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:19:03.159159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:19:03.159173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:19:03.159185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:19:03.159198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:19:03.159210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:19:03.159221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:19:03.159232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:19:03.159243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:19:03.162084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:19:03.162105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:19:03.162119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:19:03.162126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:19:03.162145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:19:03.162150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:19:03.162160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:19:03.162167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:19:03.162176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:19:03.162182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:19:03.162189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:19:03.162196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:19:03.162247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:19:03.162252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:19:03.162263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:19:03.162267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:19:03.162275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:19:03.162278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:19:03.162290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:19:03.162294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:19:03.162301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:19:03.162304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:19:03.164420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:19:03.164439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:19:03.164471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:19:03.164483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:19:03.164495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:19:03.164505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:19:03.164518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:712:2593];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:19:03.164533Z node 1 :TX_COLUMNSHA ... ecretKey"}},"Endpoint":"fake"},"Name":"abc"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}} TieringsCount incorrect: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}};expectation=1 2024-11-21T09:19:40.100055Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:19:40.100690Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:19:40.101529Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; FINISHED_REQUEST=CREATE OBJECT tier2 (TYPE TIER) WITH tierConfig = `Name : "abc" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );EXPECTATION=1;WAITING=1 SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}} REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT tier1 (TYPE TIER) SET tierConfig = `Name : "abc1" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT tier1 (TYPE TIER) SET tierConfig = `Name : "abc1" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;RESULT=;EXPECTATION=1 SNAPSHOT: {"rules":{"tiering1":{"tieringRuleId":"tiering1","description":{"rules":[{"durationForEvict":"864000.000000s","tierName":"tier1"},{"durationForEvict":"1728000.000000s","tierName":"tier2"}]},"defaultColumn":"timestamp"}},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}} FINISHED_REQUEST=ALTER OBJECT tier1 (TYPE TIER) SET tierConfig = `Name : "abc1" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { Value: { Data: "secretAccessKey" } } SecretableSecretKey: { Value: { Data: "secretSecretKey" } } } `;EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier1(TYPE TIER);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tier1(TYPE TIER);RESULT=
: Error: Execution, code: 1060
:1:24: Error: Executing DROP OBJECT TIER
: Error: preparation problem: tiering in using by table ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT tier1(TYPE TIER);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tiering1(TYPE TIERING_RULE);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT tiering1(TYPE TIERING_RULE);RESULT=
: Error: Execution, code: 1060
:1:27: Error: Executing DROP OBJECT TIERING_RULE
: Error: preparation problem: tiering in using by table ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT tiering1(TYPE TIERING_RULE);EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2024-11-21T09:20:34.482438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715796:0, at schemeshard: 72057594046644480 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=
: Info: Execution, code: 1060
:1:12: Info: Executing DROP TABLE
: Info: Success, code: 4 ;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tiering1(TYPE TIERING_RULE);EXPECTATION=1;WAITING=1 SNAPSHOT: REQUEST=DROP OBJECT tiering1(TYPE TIERING_RULE);RESULT=;EXPECTATION=1 {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}} TiersCount incorrect: {"rules":{},"tiers":{"tier1":{"tierName":"tier1","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc1"}},"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}};expectation=0 2024-11-21T09:20:45.526418Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:20:45.526460Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:20:45.526485Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; FINISHED_REQUEST=DROP OBJECT tiering1(TYPE TIERING_RULE);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier1(TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier1(TYPE TIER);RESULT=;EXPECTATION=1 SNAPSHOT: {"rules":{},"tiers":{"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}} TiersCount incorrect: {"rules":{},"tiers":{"tier2":{"tierName":"tier2","tierConfig":{"ObjectStorage":{"SecretableAccessKey":{"Value":{"Data":"secretAccessKey"}},"Bucket":"fake","SecretableSecretKey":{"Value":{"Data":"secretSecretKey"}},"Endpoint":"fake"},"Name":"abc"}}}};expectation=0 2024-11-21T09:20:56.594004Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:20:56.594038Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:20:56.594057Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; FINISHED_REQUEST=DROP OBJECT tier1(TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier2(TYPE TIER);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT tier2(TYPE TIER);RESULT=;EXPECTATION=1 SNAPSHOT: {"rules":{},"tiers":{}} 2024-11-21T09:21:07.736959Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:21:07.736989Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037889;self_id=[1:712:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; 2024-11-21T09:21:07.737001Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037890;self_id=[1:719:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=manager.cpp:215;path_id=3;tiering_name=tiering1;event=not_found; FINISHED_REQUEST=DROP OBJECT tier2(TYPE TIER);EXPECTATION=1;WAITING=1 |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> BasicStatistics::TwoTables [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> TCdcStreamWithRebootsTests::CreateDropRecreate[PipeResets] [GOOD] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2024-11-21T09:18:39.813085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:18:39.813120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:18:39.813131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037fa/r3tmp/tmp3lFpih/pdisk_1.dat 2024-11-21T09:18:39.895540Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2240, node 1 2024-11-21T09:18:40.003957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:40.003975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:40.003978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:40.004038Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:18:40.008891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:18:40.085048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:40.085077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:40.096103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29931 2024-11-21T09:18:40.495248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:18:41.219117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:41.219137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:41.251345Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:18:41.251951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:41.294193Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:41.299940Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:18:41.299954Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:18:41.304559Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:18:41.304657Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:18:41.304675Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:18:41.304680Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:18:41.304686Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:18:41.304691Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:18:41.304696Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:18:41.304701Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:18:41.304761Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:18:41.475331Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:41.475350Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:41.476061Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:18:41.477385Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:18:41.477447Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:18:41.477959Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T09:18:41.480921Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:18:41.480931Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:18:41.480937Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T09:18:41.482128Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:41.482145Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:41.483069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:18:41.484170Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:18:41.484190Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:18:41.486137Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:18:41.497704Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:41.518785Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:18:41.622683Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:18:41.776252Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:42.550447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:42.550488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:42.560156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T09:18:42.625271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2273:3055], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:42.625303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:42.625645Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2278:3059]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:18:42.625673Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:18:42.625681Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2280:3061] 2024-11-21T09:18:42.625688Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2280:3061] 2024-11-21T09:18:42.625790Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2281:2831] 2024-11-21T09:18:42.625833Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2280:3061], server id = [2:2281:2831], tablet id = 72075186224037897, status = OK 2024-11-21T09:18:42.625868Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2281:2831], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:18:42.626483Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T09:18:42.626545Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:18:42.626553Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2278:3059], StatRequests.size() = 1 2024-11-21T09:18:42.630367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2285:3065], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:42.630384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:42.630446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2290:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:42.631930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:18:42.778395Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:18:42.778417Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:18:42.890823Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2280:3061], schemeshard count = 1 2024-11-21T09:18:43.098296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2292:3072], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:18:43.194817Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2416:3155]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:18:43.194864Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:18:43.194869Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2416:3155], StatRequests.size() = 1 2024-11-21T09:18:43.220425Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd708gxw9qt8nb04rgajte0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDJiODFiODMtOTVjNjZlZDMtYzdhMzA2YjktOWQxMjAyZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:18:43.238810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... endBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T09:21:00.704845Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6574:4681]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:00.704959Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T09:21:00.704970Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6574:4681], StatRequests.size() = 1 2024-11-21T09:21:01.304962Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:21:01.305175Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:21:01.305278Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:21:01.358664Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037889 2024-11-21T09:21:01.358693Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 190.000000s, at schemeshard: 72075186224037889 2024-11-21T09:21:01.358804Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 73 2024-11-21T09:21:01.380619Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T09:21:02.154748Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6609:4699]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:02.154869Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T09:21:02.154881Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6609:4699], StatRequests.size() = 1 2024-11-21T09:21:02.688303Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:02.688337Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:02.688349Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T09:21:02.688354Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:21:02.688518Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:21:02.691667Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:02.692721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6630:4716], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:02.692750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6639:4721], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:02.692858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:02.695543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T09:21:02.710869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6644:4724], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T09:21:02.972992Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6761:4786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:02.973072Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T09:21:02.973080Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6761:4786], StatRequests.size() = 1 2024-11-21T09:21:02.988925Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTI0NGI1OTAtYjFhYjBiZjQtODZkZDE2YmItYTRkNTY3NGY=, TxId: 2024-11-21T09:21:02.988950Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTI0NGI1OTAtYjFhYjBiZjQtODZkZDE2YmItYTRkNTY3NGY=, TxId: 2024-11-21T09:21:02.989074Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:21:03.000564Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T09:21:03.000589Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:03.631255Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6806:4806]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:03.631340Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T09:21:03.631348Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6806:4806], StatRequests.size() = 1 2024-11-21T09:21:05.031959Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6847:4828]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:05.032056Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T09:21:05.032065Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6847:4828], StatRequests.size() = 1 2024-11-21T09:21:05.576352Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:21:05.576404Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:05.576410Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:05.576420Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2024-11-21T09:21:05.576425Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T09:21:05.576509Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:21:05.577150Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:05.581349Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTgxYzEwMmEtMTZkYTg3ZjUtNjYzMzc5NTAtNDI5NzU4YzM=, TxId: 2024-11-21T09:21:05.581370Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTgxYzEwMmEtMTZkYTg3ZjUtNjYzMzc5NTAtNDI5NzU4YzM=, TxId: 2024-11-21T09:21:05.581506Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:21:05.592984Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T09:21:05.593008Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:06.459026Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6919:4872]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:06.459130Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T09:21:06.459141Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6919:4872], StatRequests.size() = 1 2024-11-21T09:21:07.866689Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6958:4892]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:07.866795Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2024-11-21T09:21:07.866807Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6958:4892], StatRequests.size() = 1 2024-11-21T09:21:08.389108Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T09:21:08.389191Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:21:08.389246Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:08.389251Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:08.389260Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T09:21:08.389264Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:21:08.389345Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T09:21:08.390060Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:08.390147Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:21:08.393558Z node 2 :SYSTEM_VIEWS WARN: [72075186224037896] TEvIntervalQuerySummary, wrong stage: node id# 2 2024-11-21T09:21:08.393678Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmU1NjBjOGQtZGRiNDI2NWYtMWQ0YzIyNjgtM2M4N2IxNDk=, TxId: 2024-11-21T09:21:08.393686Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmU1NjBjOGQtZGRiNDI2NWYtMWQ0YzIyNjgtM2M4N2IxNDk=, TxId: 2024-11-21T09:21:08.393813Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:21:08.408984Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:21:08.409012Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:09.235388Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7021:4929]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:09.235495Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2024-11-21T09:21:09.235506Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7021:4929], StatRequests.size() = 1 2024-11-21T09:21:09.235646Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7023:4931]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:09.236432Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2024-11-21T09:21:09.236445Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7023:4931], StatRequests.size() = 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:09.711557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:09.711580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:09.711585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:09.711590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:09.711595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:09.711599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:09.711607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:09.711677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:09.720775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:09.720789Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:09.723241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:09.723893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:09.723935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:09.725360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:09.725631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:09.725730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.725813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:09.726995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.727219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:09.727230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.727267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:09.727274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:09.727281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:09.727293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.728512Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:09.743615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:09.743674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.743711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:09.743738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:09.743742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.744296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.744320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:09.744351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.744360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:09.744364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:09.744368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:09.744744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.744755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:09.744759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:09.745129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.745140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.745146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.745152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.745687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:09.746060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:09.746101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:09.746241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.746261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:09.746285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.746328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:09.746334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.746356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:09.746366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:09.746749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:09.746756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:09.746780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.746784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:09.746830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.746836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:09.746847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:09.746851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.746856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:09.746861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.746867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:09.746870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:09.746881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:09.746886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:09.746890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:09.747142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:09.747156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:09.747160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:09.747164Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:09.747168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:09.747182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:21:09.747906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:21:09.747995Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:21:09.748315Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T09:21:09.749566Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T09:21:09.750076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:09.750146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.750226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2024-11-21T09:21:09.750389Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:21:09.750948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:09.750975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2024-11-21T09:21:09.751086Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T09:21:09.751591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:09.751629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.751660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2024-11-21T09:21:09.752070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:09.752090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateDropRecreate[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.689835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.689851Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.691453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.691541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.691563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.694260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.694306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.696088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700594Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.713421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.715989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.717183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.718342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.718894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.719937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.719941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.719950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.719958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.719968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.719972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.719976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ration: 2 2024-11-21T09:21:09.705102Z node 70 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:09.705109Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1005:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 300647713037 } Origin: 72075186233409546 State: 2 TxId: 1005 Step: 0 Generation: 2 2024-11-21T09:21:09.705119Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1005:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.705122Z node 70 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.705128Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:09.705133Z node 70 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:1 129 -> 240 2024-11-21T09:21:09.705150Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:09.705231Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [70:201:2204], Recipient [70:121:2147]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 7] Version: 2 } 2024-11-21T09:21:09.705235Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:21:09.705242Z node 70 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:21:09.705250Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:21:09.705253Z node 70 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:21:09.705257Z node 70 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T09:21:09.705260Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T09:21:09.705268Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 2/3, is published: true 2024-11-21T09:21:09.705272Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:09.705801Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.705848Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.705863Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.706518Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:21:09.706528Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.706551Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.706555Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.706569Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:21:09.706572Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.706584Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.706587Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.706591Z node 70 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1005:1 2024-11-21T09:21:09.706620Z node 70 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [70:329:2317] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1005 at schemeshard: 72057594046678944 2024-11-21T09:21:09.706685Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [70:121:2147], Recipient [70:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:21:09.706692Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T09:21:09.706697Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.706701Z node 70 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:1 ProgressState 2024-11-21T09:21:09.706711Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:09.706715Z node 70 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:1 progress is 3/3 2024-11-21T09:21:09.706718Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2024-11-21T09:21:09.706723Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 3/3, is published: true 2024-11-21T09:21:09.706732Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [70:401:2376] message: TxId: 1005 2024-11-21T09:21:09.706737Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2024-11-21T09:21:09.706743Z node 70 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:21:09.706747Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:21:09.706756Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:21:09.706760Z node 70 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:1 2024-11-21T09:21:09.706763Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:1 2024-11-21T09:21:09.706777Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:09.706781Z node 70 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:2 2024-11-21T09:21:09.706783Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:2 2024-11-21T09:21:09.706792Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T09:21:09.706845Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:21:09.706849Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.707235Z node 70 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:09.707251Z node 70 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [70:401:2376] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1005 at schemeshard: 72057594046678944 2024-11-21T09:21:09.707292Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:21:09.707297Z node 70 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [70:959:2808] 2024-11-21T09:21:09.707324Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [70:989:2836], Recipient [70:121:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:09.707329Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:09.707332Z node 70 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T09:21:09.707346Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [70:961:2810], Recipient [70:121:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:09.707350Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:09.707355Z node 70 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2024-11-21T09:21:09.707432Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [70:1001:2848], Recipient [70:121:2147]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2024-11-21T09:21:09.707437Z node 70 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:21:09.707448Z node 70 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:09.707486Z node 70 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 32us result status StatusSuccess 2024-11-21T09:21:09.707603Z node 70 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409550 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:09.023405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:09.023434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:09.023440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:09.023445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:09.023451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:09.023455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:09.023463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:09.023544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:09.033669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:09.033694Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:09.036428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:09.037162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:09.037218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:09.038690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:09.038898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:09.038992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.039084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:09.040083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.040433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:09.040446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.040488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:09.040496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:09.040502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:09.040517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.041871Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:09.056997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:09.057083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.057149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:09.057189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:09.057196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.057994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.058022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:09.058074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.058085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:09.058090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:09.058096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:09.058562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.058572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:09.058577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:09.058949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.058958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.058964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.058971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.059490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:09.059885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:09.059936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:09.060118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.060141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:09.060151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.060223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:09.060230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.060260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:09.060271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:09.060711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:09.060718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:09.060762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.060767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:09.060842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.060848Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:09.060860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:09.060864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.060870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:09.060876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.060879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:09.060883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:09.060909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:09.060915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:09.060919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:09.061180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:09.061196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:09.061201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:09.061206Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:09.061210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:09.061225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 2024-11-21T09:21:10.408231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.408676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.409976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.410014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.410034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.410053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.410070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.410081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.410109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.410118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:21:10.410136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:21:10.410144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:10.410151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T09:21:10.410170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2599:3865] message: TxId: 101 2024-11-21T09:21:10.410176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:10.410186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:21:10.410189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:21:10.410445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2024-11-21T09:21:10.410745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.411802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:21:10.411816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2600:3866] TestWaitNotification: OK eventTxId 101 2024-11-21T09:21:10.411974Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:10.412037Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 75us result status StatusSuccess 2024-11-21T09:21:10.412232Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T09:21:10.413046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:10.413089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:10.413153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2024-11-21T09:21:10.413777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:10.413806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPQCachingProxyTest::TestDeregister >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError >> TPQCachingProxyTest::OutdatedSession >> TPQCachingProxyTest::MultipleSessions |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> IncrementalBackup::BackupRestore >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TPQCachingProxyTest::TestDeregister [GOOD] >> TPQCachingProxyTest::MultipleSessions [GOOD] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2024-11-21T09:21:11.893772Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:21:11.893798Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:21:11.897716Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:21:11.897740Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T09:21:11.897748Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2024-11-21T09:21:11.897764Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2024-11-21T09:21:11.893133Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:21:11.893171Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:21:11.896950Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:21:11.896973Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T09:21:11.896985Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T09:21:11.896991Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T09:21:11.897002Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2024-11-21T09:21:11.893134Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:21:11.893175Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:21:11.897068Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:21:11.897107Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T09:21:11.897121Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T09:21:11.897127Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2024-11-21T09:21:11.897132Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T09:21:11.897139Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1 2024-11-21T09:21:11.897144Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2024-11-21T09:21:11.897149Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2024-11-21T09:21:11.897152Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2 >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> TReplicationTests::CreateSequential ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2024-11-21T09:20:50.162540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660101833208189:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:50.162775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00379e/r3tmp/tmpx9lVJX/pdisk_1.dat 2024-11-21T09:20:50.227637Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22111, node 1 2024-11-21T09:20:50.262605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:50.262626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:50.264015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:50.275929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:50.275945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:50.275947Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:50.275979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:50.339421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.340272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:20:50.340852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:20:50.340860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:20:50.341189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:50.341196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:20:50.341285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:20:50.341608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.342377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850386, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.342399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:20:50.342444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:20:50.342826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.342853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.342864Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:20:50.342871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:20:50.342880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:20:50.342886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:20:50.343190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:20:50.343207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:20:50.343210Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:20:50.343218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:10467 2024-11-21T09:20:50.429810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660101833209107:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.429839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.529551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.529656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:20:50.529831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.529842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T09:20:50.530429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:20:50.530546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:50.530659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:50.530669Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:20:50.530729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:50.530740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:50.530741Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:20:50.537292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:20:50.537316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:20:50.537711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:20:50.591402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:20:50.591415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:20:50.591440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T09:20:50.591943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.592856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850638, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.592875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180850638 2024-11-21T09:20:50.592916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T09:20:50.593350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.593415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.593430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.593692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:20:50.593703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:20:50.593707Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057 ... : { TraceId: 01jd70d1ega3y8tdgdxc4d7v4z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVmNmQ4NjItYjgwMWVkZDgtYWYwMzU3YjEtYTIyZWMyY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.609564Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771548. Ctx: { TraceId: 01jd70d1eg81kxjp7kw5ynqhvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkxYTJlMzAtNWYxZjBkZTEtNGNkYThmZDYtNWQ2NTQ1NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.609979Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771550. Ctx: { TraceId: 01jd70d1ehbjqe8126752152rr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2NjOWM0NzUtZjE5MGY5YTYtMzY5MTVmMDctMjAzZDM0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.610017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771551. Ctx: { TraceId: 01jd70d1eh8p9s3212sy11671j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjNDY1ZjItZGMzMDYyOWItNzQ5YmY0MGMtNTg4NDZkNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.610028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771552. Ctx: { TraceId: 01jd70d1eh8f7sncq3gkmn1qyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ3MmIxNTktZjM0MDM2ZTgtYWEwOTJkMmEtNGQ5NGQyZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.610371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771553. Ctx: { TraceId: 01jd70d1eh3d9hmewm8xqjkntw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJkOGRiOWYtNTc4ZjI5NzEtZWZjN2FlMTYtZWYyN2Y1ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.610446Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771554. Ctx: { TraceId: 01jd70d1eh7hbsd14wv6wjf85x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U0MTY0YTYtM2YzYjY4NjUtMzYyNTY2NGItMzViZDVjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.612338Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771556. Ctx: { TraceId: 01jd70d1ek30veywt6wg4cr33y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVmNmQ4NjItYjgwMWVkZDgtYWYwMzU3YjEtYTIyZWMyY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.612338Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771557. Ctx: { TraceId: 01jd70d1ekbaper10trezaxr8v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2NjOWM0NzUtZjE5MGY5YTYtMzY5MTVmMDctMjAzZDM0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.612415Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771558. Ctx: { TraceId: 01jd70d1ekca3vpxstgx9xk0fn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjNDY1ZjItZGMzMDYyOWItNzQ5YmY0MGMtNTg4NDZkNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.612498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771555. Ctx: { TraceId: 01jd70d1ekacrq8jmqydb1q2gd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ1Yjg1NjgtODhjOTM5M2YtZmJhODZlMDItNjUxMTZmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.612576Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771560. Ctx: { TraceId: 01jd70d1em78zwtvyzcxrymxv8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDI3MzczMjQtYWQyNjQ1NWUtMjNmYjZkY2MtZjZkMjEyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.612767Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771559. Ctx: { TraceId: 01jd70d1ek1wng6r9a5bxyfqac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkxYTJlMzAtNWYxZjBkZTEtNGNkYThmZDYtNWQ2NTQ1NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.612968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771561. Ctx: { TraceId: 01jd70d1ek0ak4ze67gz5qa94z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmZmNTFiOGQtMjQzYmZmYmUtZDlmMzE5YWQtN2FhNDY3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.613316Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771562. Ctx: { TraceId: 01jd70d1emfej7j14ys1qyv4f3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ3MmIxNTktZjM0MDM2ZTgtYWEwOTJkMmEtNGQ5NGQyZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.613361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771563. Ctx: { TraceId: 01jd70d1em5edsbegg1n76sjkj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJkOGRiOWYtNTc4ZjI5NzEtZWZjN2FlMTYtZWYyN2Y1ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.613395Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771564. Ctx: { TraceId: 01jd70d1em9tda3wadsp583vdx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U0MTY0YTYtM2YzYjY4NjUtMzYyNTY2NGItMzViZDVjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732180850638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T09:21:10.617237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771566. Ctx: { TraceId: 01jd70d1er63x9xqgvq1hv96yr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVmNmQ4NjItYjgwMWVkZDgtYWYwMzU3YjEtYTIyZWMyY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.617238Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771565. Ctx: { TraceId: 01jd70d1er2hf4tzgnx104m5g9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2NjOWM0NzUtZjE5MGY5YTYtMzY5MTVmMDctMjAzZDM0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.620229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771568. Ctx: { TraceId: 01jd70d1evcsm09scayxtbj2v1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDI3MzczMjQtYWQyNjQ1NWUtMjNmYjZkY2MtZjZkMjEyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.620242Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771567. Ctx: { TraceId: 01jd70d1ev7t5se1y8s8vddq73, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjNDY1ZjItZGMzMDYyOWItNzQ5YmY0MGMtNTg4NDZkNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.620687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771569. Ctx: { TraceId: 01jd70d1ev59f6w71ncnjyqjmd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ3MmIxNTktZjM0MDM2ZTgtYWEwOTJkMmEtNGQ5NGQyZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.620724Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771570. Ctx: { TraceId: 01jd70d1evasvr4t2edzb7w584, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmZmNTFiOGQtMjQzYmZmYmUtZDlmMzE5YWQtN2FhNDY3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.621114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771571. Ctx: { TraceId: 01jd70d1ev68wnpqp4154zeb06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkxYTJlMzAtNWYxZjBkZTEtNGNkYThmZDYtNWQ2NTQ1NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:10.621123Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976771572. Ctx: { TraceId: 01jd70d1ev5b9w51tx49qsfgtt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ1Yjg1NjgtODhjOTM5M2YtZmJhODZlMDItNjUxMTZmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732180850638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T09:21:10.661047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T09:21:10.661121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:10.661588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2024-11-21T09:21:10.830140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 8328 rowCount 103 cpuUsage 0 2024-11-21T09:21:10.830168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 28584 rowCount 355 cpuUsage 0 2024-11-21T09:21:10.930388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T09:21:10.930455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 103, DataSize 8328 2024-11-21T09:21:10.930514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 355, DataSize 28584 2024-11-21T09:21:10.930866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2024-11-21T09:20:50.163047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660102753891729:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:50.163208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00374d/r3tmp/tmpTTvrj6/pdisk_1.dat 2024-11-21T09:20:50.228794Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24952, node 1 2024-11-21T09:20:50.263513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:50.263537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:50.265031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:50.275903Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:50.275911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:50.275913Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:50.275944Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:50.335919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.337043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.337064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.337800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:20:50.337863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:20:50.337873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:20:50.338312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:50.338323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:20:50.338378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:50.338728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850386, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:20:50.340433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:20:50.340797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340847Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:20:50.340866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:20:50.340882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:20:50.340900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:20:50.341252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:20:50.341270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:20:50.341274Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:20:50.341293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:26109 2024-11-21T09:20:50.440141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660102753892650:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.440167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.529560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.529676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:20:50.529829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.529842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T09:20:50.530437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:20:50.530546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.530651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.530655Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:20:50.530695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.530703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.530704Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:20:50.537422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:20:50.537457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:20:50.537927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:20:50.591172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:20:50.591188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:20:50.591212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:20:50.591677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.592482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850638, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.592496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180850638 2024-11-21T09:20:50.592519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:20:50.592857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.592925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.592942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.593087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.593098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.593100Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057 ... oot, SessionId: ydb://session/3?node_id=1&id=NTc4MGIxNGEtNmIzZTRlM2MtZWI1NDE4NzAtMjQwODk0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.146071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763646. Ctx: { TraceId: 01jd70d1z8cc8nmqr39qsq5c43, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY3NDk3Ni1hYThmMjk3LWI0NzI3MDMwLTlmNzA1MGM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.146211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763645. Ctx: { TraceId: 01jd70d1z8fv97phs8xdgdwrzg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAzMmQ4MTItN2U0M2FjOWEtNTQ1MjYwODUtNWIyZjBlMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.147292Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763649. Ctx: { TraceId: 01jd70d1za62vadp2sh9rmfd2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI0ZDJlYzUtZjhiZjQwMmItZmViMThmY2QtMTJlOTg0MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.147343Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763650. Ctx: { TraceId: 01jd70d1za51er37xsww4ybqkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg4MmIwNTgtZTgyNmY1ZWItYjdiNDI0ZDAtZjNjOTI3OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.147406Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763648. Ctx: { TraceId: 01jd70d1zabgwwygrd6m7qq5kf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZmZmI1NzMtMTlmOTJkMGYtZjBjNjQ0MDAtZGU3MDQ2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.147441Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763647. Ctx: { TraceId: 01jd70d1z9508fkyxt21x5s8c5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMzZGY1ZjAtYWJkNGM0ZmItYTkwZmFjYjAtZjk2NGM0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.147534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763651. Ctx: { TraceId: 01jd70d1za2ebdaqb64wb2xatg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzc4MGJmNzAtNWU5YTY4MDEtZWRiMWY4LTI0ZTg2ZTQy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.148748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763652. Ctx: { TraceId: 01jd70d1zb8db7v7skdaghae7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcyZDI4MGItMzNmMTM5NTUtODZjNGZkNmMtMTdjODU4YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.148908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763653. Ctx: { TraceId: 01jd70d1zbdy8kaf13ybkqb643, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY3NDk3Ni1hYThmMjk3LWI0NzI3MDMwLTlmNzA1MGM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.148948Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763655. Ctx: { TraceId: 01jd70d1zb172mj818ka4jk3q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNhYjM2OGEtM2Y4N2U5MDMtYzE0ZjViNzctNjk1ZTc3NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.148983Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763656. Ctx: { TraceId: 01jd70d1zb8b408m0w7j3ktbyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAzMmQ4MTItN2U0M2FjOWEtNTQ1MjYwODUtNWIyZjBlMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.149365Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763654. Ctx: { TraceId: 01jd70d1zb62xmvz0c5247jdx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc4MGIxNGEtNmIzZTRlM2MtZWI1NDE4NzAtMjQwODk0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.149611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763657. Ctx: { TraceId: 01jd70d1zccq21dp98nt47tqe5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMzZGY1ZjAtYWJkNGM0ZmItYTkwZmFjYjAtZjk2NGM0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.150380Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763659. Ctx: { TraceId: 01jd70d1zd1vgh8q3re9316zf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZmZmI1NzMtMTlmOTJkMGYtZjBjNjQ0MDAtZGU3MDQ2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.150807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763658. Ctx: { TraceId: 01jd70d1zd2ts9w9tsg8247dbk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI0ZDJlYzUtZjhiZjQwMmItZmViMThmY2QtMTJlOTg0MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.151652Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763660. Ctx: { TraceId: 01jd70d1zddd9nq476m2fng3h3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzc4MGJmNzAtNWU5YTY4MDEtZWRiMWY4LTI0ZTg2ZTQy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.152255Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763661. Ctx: { TraceId: 01jd70d1zd6djptxqqw3rcs5wz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg4MmIwNTgtZTgyNmY1ZWItYjdiNDI0ZDAtZjNjOTI3OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.152981Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763662. Ctx: { TraceId: 01jd70d1zebwcw6ycwc0mnj272, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcyZDI4MGItMzNmMTM5NTUtODZjNGZkNmMtMTdjODU4YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.153209Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763663. Ctx: { TraceId: 01jd70d1zf8rykm58cdb3bxsh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY3NDk3Ni1hYThmMjk3LWI0NzI3MDMwLTlmNzA1MGM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2024-11-21T09:21:11.153934Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763668. Ctx: { TraceId: 01jd70d1zh4cpjz43se9p0ckb9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI0ZDJlYzUtZjhiZjQwMmItZmViMThmY2QtMTJlOTg0MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.153945Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763665. Ctx: { TraceId: 01jd70d1zfa1kc02zpksgcdrs5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAzMmQ4MTItN2U0M2FjOWEtNTQ1MjYwODUtNWIyZjBlMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.153995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763664. Ctx: { TraceId: 01jd70d1zf59tj37qbs9kd486m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNhYjM2OGEtM2Y4N2U5MDMtYzE0ZjViNzctNjk1ZTc3NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.154037Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763666. Ctx: { TraceId: 01jd70d1zg877yatrp6701c4c9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc4MGIxNGEtNmIzZTRlM2MtZWI1NDE4NzAtMjQwODk0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.154134Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763667. Ctx: { TraceId: 01jd70d1zg8hh21hfcmh646dfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMzZGY1ZjAtYWJkNGM0ZmItYTkwZmFjYjAtZjk2NGM0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.154925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763670. Ctx: { TraceId: 01jd70d1zhfd15k6h0xm1gwwy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZmZmI1NzMtMTlmOTJkMGYtZjBjNjQ0MDAtZGU3MDQ2YmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.154981Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763669. Ctx: { TraceId: 01jd70d1zhbsbgvcz7wjbh01k9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzc4MGJmNzAtNWU5YTY4MDEtZWRiMWY4LTI0ZTg2ZTQy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.155033Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763671. Ctx: { TraceId: 01jd70d1zj6y2pegfgagjvgvkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg4MmIwNTgtZTgyNmY1ZWItYjdiNDI0ZDAtZjNjOTI3OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:11.155101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763672. Ctx: { TraceId: 01jd70d1zjdjq5a5xjfdm7r05s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcyZDI4MGItMzNmMTM5NTUtODZjNGZkNmMtMTdjODU4YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T09:21:11.157195Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976763673. Ctx: { TraceId: 01jd70d1zk4wvmvdt0n027cdf3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY3NDk3Ni1hYThmMjk3LWI0NzI3MDMwLTlmNzA1MGM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) Table has 2 shards >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2024-11-21T09:21:13.212122Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:21:13.212150Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T09:21:13.215773Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:21:13.215798Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2024-11-21T09:21:13.215815Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T09:21:13.215821Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T09:21:13.215850Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2024-11-21T09:21:13.215857Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2024-11-21T09:21:13.215863Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T09:21:13.215877Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |96.2%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> TSequenceReboots::CreateSequencesWithIndexedTable >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> TSequenceReboots::CreateSequence >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> AutoConfig::GetASPoolsith1CPU [GOOD] |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:12.890782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:12.890811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:12.890816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:12.890821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:12.890849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:12.890853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:12.890862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:12.890965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:12.904338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:12.904366Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:12.907556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:12.908385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:12.908424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:12.910806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:12.911002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:12.912670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:12.913409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:12.915480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:12.917549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:12.917569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:12.917617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:12.917626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:12.917633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:12.917651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:12.918973Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:12.933070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:12.933875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:12.933939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:12.933983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:12.933990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:12.934773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:12.934797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:12.934832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:12.934840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:12.934842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:12.934846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:12.935212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:12.935239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:12.935244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:12.935555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:12.935564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:12.935569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:12.935574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:12.935934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:12.936291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:12.936948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:12.937151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:12.937178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:12.937185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:12.937244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:12.937250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:12.937275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:12.937287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:12.937725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:12.937733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:12.937774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:12.937779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:12.937848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:12.937854Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:12.937864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:12.937868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:12.937874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:12.937879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:12.937884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:12.937887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:12.937897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:12.937902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:12.937906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:12.938168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:12.938182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:12.938186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:12.938191Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:12.938197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:12.938208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ediator stepId#5000003 2024-11-21T09:21:14.822455Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2024-11-21T09:21:14.822482Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [8:123:2149], Recipient [8:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:21:14.822486Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:21:14.822500Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:14.822503Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:14.822522Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:14.822539Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:14.822541Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T09:21:14.822544Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T09:21:14.822608Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.822615Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:21:14.822625Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:14.822629Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:21:14.822633Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:14.822638Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T09:21:14.822643Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:14.822647Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:21:14.822649Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:21:14.822665Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:21:14.822668Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T09:21:14.822670Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:21:14.822673Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T09:21:14.822774Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [8:201:2204], Recipient [8:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2024-11-21T09:21:14.822780Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:21:14.822790Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:14.822798Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:14.822802Z node 8 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:21:14.822806Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:21:14.822810Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:14.822819Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:14.822967Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [8:201:2204], Recipient [8:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2024-11-21T09:21:14.822973Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:21:14.822981Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:14.822988Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:14.822991Z node 8 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:21:14.822995Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:21:14.822998Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:21:14.823008Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T09:21:14.823012Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:14.823041Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [8:123:2149], Recipient [8:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2024-11-21T09:21:14.823045Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2024-11-21T09:21:14.823049Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:14.823054Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:21:14.823062Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:14.823427Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:14.823553Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:21:14.823558Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:14.823717Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:21:14.823722Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:14.823735Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T09:21:14.823772Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:21:14.823777Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T09:21:14.823816Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [8:439:2396], Recipient [8:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:14.823823Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:14.823826Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:21:14.823845Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [8:355:2336], Recipient [8:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2024-11-21T09:21:14.823849Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:21:14.823857Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:21:14.823871Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:21:14.823875Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:437:2394] 2024-11-21T09:21:14.823891Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [8:439:2396], Recipient [8:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:14.823895Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:14.823899Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2024-11-21T09:21:14.823941Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [8:440:2397], Recipient [8:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:21:14.823945Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:21:14.823953Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:14.823973Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 18us result status StatusPathDoesNotExist 2024-11-21T09:21:14.824005Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[TabletReboots] [GOOD] |96.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |96.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:08.474947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:08.474969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:08.474975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:08.474980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:08.474985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:08.474989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:08.474998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:08.475074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:08.485017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:08.485037Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:08.487684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:08.488469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:08.488511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:08.490123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:08.490332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:08.490429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.490517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:08.491780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.492041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:08.492051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.492088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:08.492095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:08.492102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:08.492114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.493474Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:08.507116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:08.507184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.507228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:08.507260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:08.507267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.507775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.507797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:08.507826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.507834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:08.507838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:08.507841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:08.508238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.508249Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:08.508254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:08.508560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.508568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.508572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.508576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.509093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:08.509448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:08.509487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:08.509648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.509670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:08.509678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.509723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:08.509730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.509753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:08.509763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:08.510164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:08.510171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:08.510203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.510209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:08.510269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.510275Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:08.510286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:08.510290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.510297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:08.510302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.510307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:08.510311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:08.510321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:08.510327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:08.510331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:08.510593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:08.510605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:08.510610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:08.510614Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:08.510619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:08.510633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Id: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T09:21:15.325662Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:15.325674Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:15.325715Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:15.325753Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:15.325759Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T09:21:15.325765Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T09:21:15.325894Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.325903Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:15.326300Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:15.326316Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:15.326320Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:21:15.326325Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T09:21:15.326330Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:15.326442Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:15.326453Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T09:21:15.326456Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T09:21:15.326460Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T09:21:15.326463Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:15.326471Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T09:21:15.326595Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 238 } } 2024-11-21T09:21:15.326600Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:15.326611Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 238 } } 2024-11-21T09:21:15.326622Z node 28 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 238 } } 2024-11-21T09:21:15.326706Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:15.326712Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:15.326722Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:15.326726Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:15.326730Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T09:21:15.326736Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.326739Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.326741Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:15.326745Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T09:21:15.327154Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:15.327491Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T09:21:15.327511Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.327525Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.327572Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.327579Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T09:21:15.327590Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T09:21:15.327594Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:15.327599Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T09:21:15.327610Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:342:2317] message: TxId: 101 2024-11-21T09:21:15.327619Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T09:21:15.327623Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T09:21:15.327627Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T09:21:15.327646Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:21:15.327996Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T09:21:15.328005Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:343:2318] TestWaitNotification: OK eventTxId 101 2024-11-21T09:21:15.328096Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:15.328149Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 45us result status StatusSuccess 2024-11-21T09:21:15.328285Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:06.619460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:06.619484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.619489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:06.619495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:06.619500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:06.619504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:06.619514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:06.619594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:06.629261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:06.629277Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:06.633357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:06.634121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:06.634170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:06.638295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:06.638636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:06.638769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.638872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:06.640393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.640689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.640703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.640743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:06.640752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.640758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:06.640787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.642181Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:06.660418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:06.660509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.660576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:06.660623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:06.660633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.661503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.661530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:06.661577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.661586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:06.661591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:06.661596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:06.662095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.662110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:06.662115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:06.662688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.662703Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.662709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.662716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.663348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:06.663944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:06.664006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:06.664221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:06.664259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:06.664271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.664332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:06.664341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:06.664369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.664382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:06.664917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:06.664925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:06.664957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:06.664962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:06.665016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.665022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:06.665033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:06.665036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.665042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:06.665047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:06.665051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:06.665055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:06.665065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:06.665071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:06.665075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:06.665382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.665395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:06.665398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:06.665402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:06.665404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:06.665417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... eshard: 72057594046678944 2024-11-21T09:21:15.631908Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:15.631913Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:15.631918Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:15.631933Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.632955Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:123:2149] sender: [37:235:2058] recipient: [37:15:2062] 2024-11-21T09:21:15.634649Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:15.634699Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.634733Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:15.634773Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:15.634778Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.635194Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.635214Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:15.635239Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.635246Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:15.635248Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:15.635251Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:15.635595Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.635604Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:15.635607Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:15.635928Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.635937Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.635941Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:15.635944Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:15.635964Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:15.636305Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:15.636336Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:15.636466Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.636486Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 158913792106 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:15.636496Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:15.636546Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:15.636552Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:15.636574Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:15.636585Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:15.637072Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:15.637083Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:15.637113Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:15.637116Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:202:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:15.637169Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.637174Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:15.637183Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:15.637185Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:15.637189Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:15.637198Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:15.637201Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:15.637203Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:15.637212Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:15.637216Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:15.637218Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:15.637290Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:15.637297Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:15.637300Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:15.637302Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:15.637305Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:15.637315Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T09:21:15.637912Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T09:21:15.637990Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T09:21:15.638132Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] Bootstrap 2024-11-21T09:21:15.639301Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] Become StateWork (SchemeCache [37:270:2262]) 2024-11-21T09:21:15.639881Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:15.639932Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:15.639948Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2024-11-21T09:21:15.640024Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2024-11-21T09:21:15.640171Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:21:15.640664Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:15.640689Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T09:21:15.640939Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithAwsRegion[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.690026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.690043Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.691410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.691476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.691496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.694157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.694213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.695998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700709Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.713473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.715984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.717090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.718371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.718870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719679Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.719688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.719691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.719700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.719707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.719717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.719722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.719725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.658598Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.658604Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:15.658610Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:21:15.658615Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:21:15.658715Z node 144 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.658724Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.658727Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:15.658730Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:21:15.658734Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:21:15.658830Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 198 } } 2024-11-21T09:21:15.658836Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:21:15.658853Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 198 } } 2024-11-21T09:21:15.658864Z node 144 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 198 } } 2024-11-21T09:21:15.659049Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:15.659059Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:21:15.659073Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:15.659079Z node 144 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:15.659086Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:15.659099Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.659102Z node 144 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.659106Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:15.659111Z node 144 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T09:21:15.659208Z node 144 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.659217Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.659221Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:15.659225Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T09:21:15.659229Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:21:15.659240Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T09:21:15.660480Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.660498Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.660724Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.660753Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.660822Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.660845Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.660850Z node 144 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T09:21:15.660871Z node 144 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T09:21:15.660874Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:21:15.660877Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T09:21:15.660900Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [144:400:2375] message: TxId: 1003 2024-11-21T09:21:15.660906Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:21:15.660912Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:21:15.660917Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:21:15.660929Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:15.660934Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:21:15.660937Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:21:15.660948Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:15.660951Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:21:15.660953Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:21:15.660958Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:21:15.661557Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:15.661568Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [144:598:2530] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:21:15.661681Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:15.661736Z node 144 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 64us result status StatusSuccess 2024-11-21T09:21:15.661809Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeNewAndOldImages PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatDynamoDBStreamsJson VirtualTimestamps: false AwsRegion: "ru-central1" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.691615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.691637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.691640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.691644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.691658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.691661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.691667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.691719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.698527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.698539Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.699900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.699961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.699981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.701968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.702042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.702150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.702385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.703025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.703244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.703254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.703263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.703269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.703274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.703307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.704348Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.714443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.716003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.716009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.717085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.718456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.718886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719709Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.719718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.719722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.719731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.719737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.719747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.719752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.719755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... HARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.780191Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.780195Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:15.780201Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:21:15.780223Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:21:15.780343Z node 144 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.780353Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.780357Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:15.780360Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:21:15.780364Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:21:15.780444Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T09:21:15.780449Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:21:15.780465Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T09:21:15.780476Z node 144 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T09:21:15.780621Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:15.780626Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:21:15.780638Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:15.780646Z node 144 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:15.780653Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:15.780661Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.780665Z node 144 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.780669Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:15.780674Z node 144 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T09:21:15.780747Z node 144 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.780755Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.780758Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:15.780762Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T09:21:15.780765Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:21:15.780774Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T09:21:15.782089Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.782106Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.782306Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.782333Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.782404Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:15.782429Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:15.782433Z node 144 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T09:21:15.782441Z node 144 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T09:21:15.782443Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:21:15.782446Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T09:21:15.782458Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [144:400:2375] message: TxId: 1003 2024-11-21T09:21:15.782462Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:21:15.782466Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:21:15.782468Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:21:15.782475Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:15.782478Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:21:15.782480Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:21:15.782490Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:15.782493Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:21:15.782495Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:21:15.782501Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:21:15.783003Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:15.783015Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [144:598:2530] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:21:15.783114Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:15.783146Z node 144 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 36us result status StatusSuccess 2024-11-21T09:21:15.783210Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPopulatorTest::Boot >> TPopulatorTestWithResets::UpdateAck >> TCdcStreamWithRebootsTests::CreateStream[TabletReboots] [GOOD] >> TPopulatorTest::MakeDir |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |96.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TPopulatorTest::Boot [GOOD] >> TPopulatorTest::MakeDir [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:41.485881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:41.485896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:41.485900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:41.485902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:41.485911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:41.485913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:41.485919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:41.485965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:41.492493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:41.492505Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:41.493820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:41.493895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:41.493911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:41.495574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:41.495620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:41.495699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.495821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:41.496276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.496473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:41.496481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.496488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:41.496493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:41.496498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:41.496529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:41.497480Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:41.507463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:41.507521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.507558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:41.507587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:41.507591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.507999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.508016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:41.508046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.508057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:41.508061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:41.508065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:41.508377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.508384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:41.508387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:41.508644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.508650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.508654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.508658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.509042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:41.509320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:41.509359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:41.509473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.509488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:41.509492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.509525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:41.509529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.509550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:41.509558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:41.509827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:41.509834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:41.509858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.509863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:41.509913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.509917Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:41.509924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:41.509926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.509930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:41.509933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.509936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:41.509938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:41.509945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:41.509948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:41.509951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:15.894897Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:15.894933Z node 52 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 42us result status StatusSuccess 2024-11-21T09:21:15.895032Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:15.895076Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:15.895094Z node 52 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 20us result status StatusSuccess 2024-11-21T09:21:15.895144Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2024-11-21T09:21:16.670676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:16.670698Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2024-11-21T09:21:16.698807Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2024-11-21T09:21:16.698838Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2024-11-21T09:21:16.698992Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699002Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699006Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699124Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2024-11-21T09:21:16.699128Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:21:16.700376Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2024-11-21T09:21:16.700390Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2024-11-21T09:21:16.700430Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2024-11-21T09:21:16.700433Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T09:21:16.721572Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2024-11-21T09:21:16.721596Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2121] Successful handshake: replica# [1:15:2062] 2024-11-21T09:21:16.721604Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2121] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:16.721615Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2024-11-21T09:21:16.721618Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2122] Successful handshake: replica# [1:18:2065] 2024-11-21T09:21:16.721622Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2122] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:16.721631Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2024-11-21T09:21:16.721637Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:93:2120] Successful handshake: replica# [1:12:2059] 2024-11-21T09:21:16.721641Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:93:2120] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:16.721660Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:94:2121] 2024-11-21T09:21:16.721672Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:94:2121] 2024-11-21T09:21:16.721687Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T09:21:16.721697Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T09:21:16.721713Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:94:2121] 2024-11-21T09:21:16.721724Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:95:2122] 2024-11-21T09:21:16.721730Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T09:21:16.721736Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T09:21:16.721745Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:95:2122] 2024-11-21T09:21:16.721754Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2024-11-21T09:21:16.721761Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T09:21:16.721769Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:95:2122] 2024-11-21T09:21:16.721775Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2024-11-21T09:21:16.721780Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T09:21:16.721790Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:93:2120] 2024-11-21T09:21:16.721796Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T09:21:16.721804Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2024-11-21T09:21:16.721813Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:93:2120] 2024-11-21T09:21:16.721817Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T09:21:16.721822Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2024-11-21T09:21:16.721831Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:93:2120] 2024-11-21T09:21:16.721836Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2024-11-21T09:21:16.721843Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:94:2121] 2024-11-21T09:21:16.721847Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T09:21:16.721854Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T09:21:16.721864Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:95:2122] 2024-11-21T09:21:16.721869Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2024-11-21T09:21:16.721876Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2024-11-21T09:21:16.721882Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T09:21:16.721889Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 0 2024-11-21T09:21:16.721894Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:94:2121], cookie# 0 2024-11-21T09:21:16.721898Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2024-11-21T09:21:16.721903Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 100 2024-11-21T09:21:16.721910Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 0 2024-11-21T09:21:16.721914Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:94:2121], cookie# 0 2024-11-21T09:21:16.721919Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T09:21:16.721924Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:93:2120] 2024-11-21T09:21:16.721930Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T09:21:16.721936Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 0 2024-11-21T09:21:16.721939Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 0 2024-11-21T09:21:16.721944Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2024-11-21T09:21:16.721949Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 100 2024-11-21T09:21:16.721954Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2024-11-21T09:21:16.721959Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2024-11-21T09:21:16.722073Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 0 2024-11-21T09:21:16.722077Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 0 2024-11-21T09:21:16.722082Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T09:21:16.722085Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2024-11-21T09:21:16.722089Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2024-11-21T09:21:16.722169Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 0 2024-11-21T09:21:16.722172Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 0 2024-11-21T09:21:16.722175Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 100 2024-11-21T09:21:16.722178Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 100 2024-11-21T09:21:16.722226Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 0 2024-11-21T09:21:16.722228Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 0 2024-11-21T09:21:16.722258Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T09:21:16.722260Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 100 TestWaitNotification: OK eventTxId 100 |96.2%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2024-11-21T09:21:16.670676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:16.670698Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2024-11-21T09:21:16.672573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:16.672593Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2024-11-21T09:21:16.699464Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2024-11-21T09:21:16.699491Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2024-11-21T09:21:16.699704Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699715Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699721Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699846Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2024-11-21T09:21:16.699855Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2024-11-21T09:21:16.699874Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2024-11-21T09:21:16.699882Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2024-11-21T09:21:16.699889Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2024-11-21T09:21:16.699914Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T09:21:16.699920Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699925Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699930Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.699951Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T09:21:16.699955Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2024-11-21T09:21:16.699960Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2024-11-21T09:21:16.699966Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2024-11-21T09:21:16.699972Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2024-11-21T09:21:16.699985Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T09:21:16.700041Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:93:2120], cookie# 100 2024-11-21T09:21:16.700125Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:94:2121], cookie# 100 2024-11-21T09:21:16.700132Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2024-11-21T09:21:16.700172Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2122], cookie# 100 2024-11-21T09:21:16.700177Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:21:16.700579Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2024-11-21T09:21:16.700596Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2024-11-21T09:21:16.700613Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.700620Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.700626Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.700705Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2024-11-21T09:21:16.700710Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2024-11-21T09:21:16.700723Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2024-11-21T09:21:16.700729Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2024-11-21T09:21:16.700735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2024-11-21T09:21:16.700742Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.700746Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.700766Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 100 2024-11-21T09:21:16.700771Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:16.700779Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 100 2024-11-21T09:21:16.700783Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2024-11-21T09:21:16.700788Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2024-11-21T09:21:16.700793Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2024-11-21T09:21:16.700797Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2024-11-21T09:21:16.700806Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 100 2024-11-21T09:21:16.700847Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T09:21:16.700871Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T09:21:16.700874Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2024-11-21T09:21:16.700903Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T09:21:16.700908Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 >> IncrementalBackup::BackupRestore [GOOD] >> YdbTableSplit::SplitByLoadWithReads [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStream[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.627206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.627227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.627232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.627236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.627247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.627251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.627260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.627322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.638174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.638191Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.640086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.640173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.640197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.642547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.642614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.642722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.642845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.643436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.643663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.643673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.643684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.643690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.643696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.643726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.644862Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.661131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.661197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.661246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.661287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.661293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.661813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.661837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.661869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.661878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.661883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.661887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.662219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.662229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.662234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.662527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.662536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.662542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.662549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.663103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.663460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.663504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.663674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.663699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.663707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.663756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.663762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.663789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.663800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.664143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.664151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.664183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.664188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.664277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.664284Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.664294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.664299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.664304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.664309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.664313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.664317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.664328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.664333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.664337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... HARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.485278Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.485284Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:16.485287Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T09:21:16.485291Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:21:16.485383Z node 144 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.485391Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.485394Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:16.485397Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:21:16.485401Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:21:16.485488Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 158 } } 2024-11-21T09:21:16.485493Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:21:16.485508Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 158 } } 2024-11-21T09:21:16.485519Z node 144 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 158 } } 2024-11-21T09:21:16.485681Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:16.485687Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T09:21:16.485698Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:16.485703Z node 144 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:16.485709Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 618475292938 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:16.485721Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:16.485725Z node 144 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:16.485729Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:16.485733Z node 144 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T09:21:16.485814Z node 144 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.485824Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.485827Z node 144 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:16.485831Z node 144 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T09:21:16.485836Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:21:16.485849Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T09:21:16.486969Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.486984Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.487156Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:16.487179Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:16.487241Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:16.487264Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:21:16.487300Z node 144 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T09:21:16.487311Z node 144 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T09:21:16.487315Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:21:16.487320Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T09:21:16.487333Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [144:400:2375] message: TxId: 1003 2024-11-21T09:21:16.487337Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T09:21:16.487341Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:21:16.487345Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:21:16.487354Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:16.487357Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:21:16.487359Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:21:16.487369Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:16.487372Z node 144 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:21:16.487375Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:21:16.487384Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:21:16.487862Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:16.487873Z node 144 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [144:598:2530] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:21:16.487969Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:16.488007Z node 144 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 42us result status StatusSuccess 2024-11-21T09:21:16.488093Z node 144 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::BackupRestore [GOOD] Test command err: 2024-11-21T09:21:12.441473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:21:12.442110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:21:12.442151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f9d/r3tmp/tmp53rTbl/pdisk_1.dat 2024-11-21T09:21:12.607879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T09:21:12.608690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:12.608759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T09:21:12.608789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:21:12.608804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:12.609017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T09:21:12.609041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:21:12.609080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:12.609086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:21:12.609094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:12.609098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:12.609153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:12.609158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:21:12.609160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:12.609216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:12.609219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:12.609223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T09:21:12.609227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:12.609712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:12.609798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:12.609829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:21:12.609998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T09:21:12.610002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T09:21:12.610006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T09:21:12.625016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T09:21:12.625043Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:12.668700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:12.668736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:12.679306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:12.781827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:21:12.781878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T09:21:12.781896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T09:21:12.782775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:12.782801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T09:21:12.782841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:21:12.782852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T09:21:12.783120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:21:12.783129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:21:12.783164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:21:12.783169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T09:21:12.783231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:12.783238Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T09:21:12.783247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:12.783251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:12.783256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:12.783261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:12.783265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:12.783269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:12.783278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T09:21:12.783283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T09:21:12.783287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T09:21:12.783727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T09:21:12.783745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T09:21:12.783749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T09:21:12.783754Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:21:12.783759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:21:12.783773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T09:21:12.783778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T09:21:12.783945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T09:21:12.787856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:21:12.787911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:12.787921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, schema: Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1, at schemeshard: 72057594046644480 2024-11-21T09:21:12.787996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T09:21:12.788008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T09:21:12.788014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T09:21:12.788032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] wa ... opId# 281474976715667:2 ProgressState 2024-11-21T09:21:15.157076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715667:2 progress is 5/5 2024-11-21T09:21:15.157080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 5/5 2024-11-21T09:21:15.157085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715667, ready parts: 5/5, is published: true 2024-11-21T09:21:15.157095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715667 2024-11-21T09:21:15.157100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 5/5 2024-11-21T09:21:15.157104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2024-11-21T09:21:15.157108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:0 2024-11-21T09:21:15.157118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 5 2024-11-21T09:21:15.157122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:1 2024-11-21T09:21:15.157125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:1 2024-11-21T09:21:15.157129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2024-11-21T09:21:15.157132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:2 2024-11-21T09:21:15.157135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:2 2024-11-21T09:21:15.157150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2024-11-21T09:21:15.157154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:3 2024-11-21T09:21:15.157157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:3 2024-11-21T09:21:15.157161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T09:21:15.157164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:4 2024-11-21T09:21:15.157167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:4 2024-11-21T09:21:15.157172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2024-11-21T09:21:15.157221Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715667 datashard 72075186224037892 state Ready 2024-11-21T09:21:15.157228Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2024-11-21T09:21:15.607808Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:15.607851Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 1 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2024-11-21T09:21:15.607890Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:15.607967Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:1003:2791]][0][1:1153:2888] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2024-11-21T09:21:15.607998Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T09:21:15.608005Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T09:21:15.608034Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T09:21:15.608039Z node 1 :PERSQUEUE DEBUG: waiting read cookie 2 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T09:21:15.608053Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:21:15.965066Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:15.965104Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 2 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2024-11-21T09:21:15.965137Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:15.965213Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:1003:2791]][0][1:1153:2888] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2024-11-21T09:21:15.965244Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T09:21:15.965253Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T09:21:15.965283Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:21:15.965294Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 3 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T09:21:15.965299Z node 1 :PERSQUEUE DEBUG: waiting read cookie 3 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T09:21:16.149233Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T09:21:16.301710Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:16.301747Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 3 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2024-11-21T09:21:16.301778Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:16.301857Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:1003:2791]][0][1:1153:2888] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2024-11-21T09:21:16.301901Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T09:21:16.301908Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T09:21:16.301927Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:21:16.301935Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 4 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T09:21:16.301938Z node 1 :PERSQUEUE DEBUG: waiting read cookie 4 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T09:21:16.628296Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:16.628335Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 4 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2024-11-21T09:21:16.628369Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:16.628443Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:1003:2791]][0][1:1153:2888] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2024-11-21T09:21:16.628466Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T09:21:16.628473Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T09:21:16.628493Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:21:16.628500Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 5 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T09:21:16.628517Z node 1 :PERSQUEUE DEBUG: waiting read cookie 5 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T09:21:16.965378Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:16.965432Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] waiting read cookie 5 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2024-11-21T09:21:16.965468Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:16.965544Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:1003:2791]][0][1:1153:2888] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2024-11-21T09:21:16.965574Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T09:21:16.965582Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T09:21:16.965606Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:21:16.965615Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037890, Partition: 0, State: StateIdle] read cookie 6 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T09:21:16.965621Z node 1 :PERSQUEUE DEBUG: waiting read cookie 6 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T09:21:17.008322Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd70d7nn1gcm179chrza6e5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJiODY5NmYtNDZjOTMzMWYtYWJiOTY5OTAtM2NmYzI0Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 5 } items { uint32_value: 200 } } 2024-11-21T09:21:17.026735Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70d7pqf3sxv4k562q499z5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJlNzRhOTItNGIyMWU1MmQtYWY0YzFlZDAtZTYzYzFiMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 5 } items { uint32_value: 200 } } |96.2%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [GOOD] >> TTopicYqlTest::CreateAndAlterTopicYql >> TPopulatorTest::RemoveDir ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2024-11-21T09:20:50.162747Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660105770763932:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:50.162781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037a8/r3tmp/tmp2WznJW/pdisk_1.dat 2024-11-21T09:20:50.228438Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24254, node 1 2024-11-21T09:20:50.263976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:50.264007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:50.265349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:50.275906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:50.275920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:50.275922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:50.275966Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:50.335916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.337079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.337098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.337857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:20:50.337936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:20:50.337948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:20:50.338413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:50.338422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:20:50.338463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:50.338845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850386, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:20:50.340558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:20:50.340951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340998Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:20:50.341005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:20:50.341011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:20:50.341019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:20:50.341401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:20:50.341414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:20:50.341418Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:20:50.341439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:9680 2024-11-21T09:20:50.410777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660105770764717:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.410795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.529546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.529656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:20:50.529822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.529832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T09:20:50.530463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:20:50.530648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.530688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.530697Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:20:50.530736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.530743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.530745Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:20:50.537292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:20:50.537316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:20:50.537684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:20:50.591632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:20:50.591646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:20:50.591667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:20:50.592096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.592847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850638, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.592860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180850638 2024-11-21T09:20:50.592885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:20:50.593283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.593354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.593373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.593621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.593632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.593635Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 7205759 ... default}. Database not set, use /Root 2024-11-21T09:21:15.614324Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821146. Ctx: { TraceId: 01jd70d6ax2xw1yh98h23ygfb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBmMTU3ZDgtMzVkYzMxZmUtOTNkMmU0YmItOWRiNTY3ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.614328Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821147. Ctx: { TraceId: 01jd70d6ax5hkywy1r4pk512hv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJjY2Q5YjQtYzY3MDJhZWUtOGMxOTI2YzItZGE1NjMxNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.614423Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821148. Ctx: { TraceId: 01jd70d6ax08d5zxsze4x0vnq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFhYzljZGYtZmY2ZmY3OS0xZTg3MDFkNS0yNmQyNGFhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.614646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821149. Ctx: { TraceId: 01jd70d6ax3hmx73229qzarcza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNlMjg4MjktODJjYjk0N2EtODIyZDI0YWUtMjgyZTQyMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.615079Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821150. Ctx: { TraceId: 01jd70d6aydkz7k8bzbp7f01zg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRmNDc0MDAtMTA2YmMxMWQtYzI1NzBlZmItYWFlMmU5YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.615091Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821151. Ctx: { TraceId: 01jd70d6aycgpewjtx2sptzf79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2M4OGFhZS04MzM2Zjg5Zi03NjEyMGNjZC03ZDBhZWQ4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.615237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821153. Ctx: { TraceId: 01jd70d6ay85q4rd680fzjag2y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZlMjU5ZjEtMTQ4OWViYzItZmRiMTIzZmUtMmZjZDkzY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.615341Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821152. Ctx: { TraceId: 01jd70d6ay8a74f3f6ssredz89, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQ0NTYxMjMtMTNlYjljNDQtYjM2NjI2NzMtZWE4ZmQ3MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.615856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821154. Ctx: { TraceId: 01jd70d6aybcvn145vdrh4c11d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFkNGQyODUtMTAwMTUxMWEtOGJkZTdlODgtNDg3Mzk2ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.615879Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821156. Ctx: { TraceId: 01jd70d6az0cbqe1grbxa5pmxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJjY2Q5YjQtYzY3MDJhZWUtOGMxOTI2YzItZGE1NjMxNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.615965Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821155. Ctx: { TraceId: 01jd70d6ay777z7qf61a733cge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIyMjM2OGYtNWEwMmQwM2YtMTVjNzA1M2EtZDNlYTc1NGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.616036Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821157. Ctx: { TraceId: 01jd70d6az2atbeqembtqqstqt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFhYzljZGYtZmY2ZmY3OS0xZTg3MDFkNS0yNmQyNGFhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.616540Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821158. Ctx: { TraceId: 01jd70d6azdpgwyj1ep0564v3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBmMTU3ZDgtMzVkYzMxZmUtOTNkMmU0YmItOWRiNTY3ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.616549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821159. Ctx: { TraceId: 01jd70d6az2rw15yxj52rsvznd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNlMjg4MjktODJjYjk0N2EtODIyZDI0YWUtMjgyZTQyMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2024-11-21T09:21:15.616970Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821160. Ctx: { TraceId: 01jd70d6b03jnvz00837zxpeg0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQ0NTYxMjMtMTNlYjljNDQtYjM2NjI2NzMtZWE4ZmQ3MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.617043Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821161. Ctx: { TraceId: 01jd70d6az314knyfm0bydphdf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZlMjU5ZjEtMTQ4OWViYzItZmRiMTIzZmUtMmZjZDkzY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.617188Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821163. Ctx: { TraceId: 01jd70d6b0atgr51grbjazrshx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2M4OGFhZS04MzM2Zjg5Zi03NjEyMGNjZC03ZDBhZWQ4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.617406Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821162. Ctx: { TraceId: 01jd70d6b0frv2f4zym8dsbaqx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRmNDc0MDAtMTA2YmMxMWQtYzI1NzBlZmItYWFlMmU5YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.617545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821164. Ctx: { TraceId: 01jd70d6b0fwtetvecgz6bkzya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJjY2Q5YjQtYzY3MDJhZWUtOGMxOTI2YzItZGE1NjMxNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2024-11-21T09:21:15.617705Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821165. Ctx: { TraceId: 01jd70d6b0carbbyk73b0412sp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFkNGQyODUtMTAwMTUxMWEtOGJkZTdlODgtNDg3Mzk2ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.617737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821166. Ctx: { TraceId: 01jd70d6b1823w9hrny4fyc4fk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIyMjM2OGYtNWEwMmQwM2YtMTVjNzA1M2EtZDNlYTc1NGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T09:21:15.618442Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821168. Ctx: { TraceId: 01jd70d6b19yfgh2eces81zj9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFhYzljZGYtZmY2ZmY3OS0xZTg3MDFkNS0yNmQyNGFhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.618499Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821167. Ctx: { TraceId: 01jd70d6b1ch0pzzybshmr2x84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNlMjg4MjktODJjYjk0N2EtODIyZDI0YWUtMjgyZTQyMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.618832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821169. Ctx: { TraceId: 01jd70d6b1f7pnky39dcraymsj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBmMTU3ZDgtMzVkYzMxZmUtOTNkMmU0YmItOWRiNTY3ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.619018Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821170. Ctx: { TraceId: 01jd70d6b11hnxqhf5dfv43x0s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZlMjU5ZjEtMTQ4OWViYzItZmRiMTIzZmUtMmZjZDkzY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:15.619124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976821171. Ctx: { TraceId: 01jd70d6b1cq9962y76n7esxqd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQ0NTYxMjMtMTNlYjljNDQtYjM2NjI2NzMtZWE4ZmQ3MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T09:21:15.656098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:15.661145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:15.756277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T09:21:15.756360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:15.756425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:15.756809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T09:19:16.488095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659701179871333:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:16.488112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003f4b/r3tmp/tmpyvNkGK/pdisk_1.dat 2024-11-21T09:19:16.522810Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:16.550356Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13695, node 1 2024-11-21T09:19:16.556316Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T09:19:16.556449Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T09:19:16.589866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:16.589891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:16.590070Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003f4b/r3tmp/yandex2CAOUx.tmp 2024-11-21T09:19:16.590085Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003f4b/r3tmp/yandex2CAOUx.tmp 2024-11-21T09:19:16.590134Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003f4b/r3tmp/yandex2CAOUx.tmp 2024-11-21T09:19:16.590177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:16.590963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:16.630697Z INFO: TTestServer started on Port 2675 GrpcPort 13695 TClient is connected to server localhost:2675 PQClient connected to localhost:13695 === TenantModeEnabled() = 0 === Init PQ - start server on port 13695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:16.668349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:19:16.668394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:16.668447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T09:19:16.668514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:19:16.668527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:16.668705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T09:19:16.668741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:19:16.668849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:16.668871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:19:16.668881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T09:19:16.668887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T09:19:16.669017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:16.669028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:19:16.669030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:19:16.669075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:16.669082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:16.669085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:19:16.669090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 waiting... 2024-11-21T09:19:16.669493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:16.669528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:19:16.669536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2024-11-21T09:19:16.669539Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:19:16.669571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T09:19:16.669595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:19:16.669884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180756719, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:19:16.669902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439659701179871590 RawX2: 4294969514 } } Step: 1732180756719 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T09:19:16.669908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:19:16.669939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:19:16.669945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:19:16.669962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:19:16.669982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T09:19:16.670043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:19:16.670049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:19:16.670069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:19:16.670086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439659701179871647:2243], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T09:19:16.670090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:16.670096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:19:16.670102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:19:16.670104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T09:19:16.670115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2024-11-21T09:19:16.670121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T09:19:16.670122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:19:16.670124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2024-11-21T09:19:16.670129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T09:19:16.670136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:19:16.670138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T09:19:16.670428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:19:16.670444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:19:16.670447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:19:16.670450Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 7205759404 ... :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T09:21:07.681296Z node 25 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:07.681312Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:07.681466Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732180867575 CreateTimestampMS: 1732180867574 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732180867575 CreateTimestampMS: 1732180867574 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732180867575 CreateTimestampMS: 1732180867574 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 7 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T09:21:07.681500Z node 25 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T09:21:07.681507Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T09:21:07.681512Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T09:21:07.681507Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) wait data in partition inited, cookie 1 from offset3 2024-11-21T09:21:07.681533Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user debug offset 0 count 3 size 530 endOffset 3 max time lag 0ms effective offset 0 2024-11-21T09:21:07.681514Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732180867571 CreateTimestampMS: 1732180867570 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732180867571 CreateTimestampMS: 1732180867570 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732180867571 CreateTimestampMS: 1732180867570 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 7 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T09:21:07.681543Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T09:21:07.681552Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T09:21:07.681521Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 6ab48963-9c6038fb-783d126f-7215d7c3 has messages 1 2024-11-21T09:21:07.681555Z node 25 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T09:21:07.681542Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) wait data in partition inited, cookie 1 from offset3 2024-11-21T09:21:07.681547Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 3 ReadOffset 3 ReadGuid cecafcc6-8fdb44aa-3689c87f-142b152d has messages 1 2024-11-21T09:21:07.681556Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 read done: guid# 6ab48963-9c6038fb-783d126f-7215d7c3, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1), size# 276 2024-11-21T09:21:07.681563Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 response to read: guid# 6ab48963-9c6038fb-783d126f-7215d7c3 2024-11-21T09:21:07.681588Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:07.681647Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 Process answer. Aval parts: 0 2024-11-21T09:21:07.681668Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 read done: guid# cecafcc6-8fdb44aa-3689c87f-142b152d, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 276 2024-11-21T09:21:07.681671Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 response to read: guid# cecafcc6-8fdb44aa-3689c87f-142b152d 2024-11-21T09:21:07.681701Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 Process answer. Aval parts: 0 2024-11-21T09:21:07.681721Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732180867567 CreateTimestampMS: 1732180867566 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732180867569 CreateTimestampMS: 1732180867566 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732180867569 CreateTimestampMS: 1732180867566 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T09:21:07.681750Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) wait data in partition inited, cookie 1 from offset3 2024-11-21T09:21:07.681759Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) EndOffset 3 ReadOffset 3 ReadGuid 55c9b129-73681160-e9bcb3da-15a649f5 has messages 1 2024-11-21T09:21:07.681780Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 read done: guid# 55c9b129-73681160-e9bcb3da-15a649f5, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3), size# 386 2024-11-21T09:21:07.681787Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 response to read: guid# 55c9b129-73681160-e9bcb3da-15a649f5 2024-11-21T09:21:07.681825Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 Process answer. Aval parts: 0 Got data event with total 3 messages, current total messages: 3 Got data event with total 3 messages, current total messages: 6 2024-11-21T09:21:07.682063Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 grpc read done: success# 1, data# { read_request { bytes_size: 276 } } 2024-11-21T09:21:07.682125Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 got read request: guid# 9ce8c727-7d977ae0-9b64c00e-b5bc824 2024-11-21T09:21:07.682634Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 grpc read done: success# 1, data# { read_request { bytes_size: 276 } } 2024-11-21T09:21:07.682674Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 got read request: guid# 7b092b10-1627dbb0-e542baf9-fb9d4c8e Got data event with total 3 messages, current total messages: 9 2024-11-21T09:21:07.685425Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 grpc read done: success# 0, data# { } 2024-11-21T09:21:07.685439Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 grpc read failed 2024-11-21T09:21:07.685445Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 grpc closed 2024-11-21T09:21:07.685463Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_24_1_16598115829040863896_v1 is DEAD 2024-11-21T09:21:07.685778Z node 24 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [24:7439660177766515171:2481] disconnected; active server actors: 1 2024-11-21T09:21:07.685789Z node 24 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [24:7439660177766515171:2481] client debug disconnected session shared/debug_24_1_16598115829040863896_v1 2024-11-21T09:21:07.685881Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:07.685895Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_24_1_16598115829040863896_v1 2024-11-21T09:21:07.685907Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439660177766515178:2484] destroyed 2024-11-21T09:21:07.685911Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:07.685914Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_24_1_16598115829040863896_v1 2024-11-21T09:21:07.685916Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439660177766515177:2486] destroyed 2024-11-21T09:21:07.685918Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:07.685920Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_24_1_16598115829040863896_v1 2024-11-21T09:21:07.685922Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439660177766515176:2485] destroyed 2024-11-21T09:21:07.685941Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_24_1_16598115829040863896_v1 2024-11-21T09:21:07.685945Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_24_1_16598115829040863896_v1 2024-11-21T09:21:07.685947Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_24_1_16598115829040863896_v1 >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> TPopulatorTest::RemoveDir [GOOD] >> KqpJoinOrder::TPCDS94-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin-ColumnStore >> KqpJoin::JoinDupColumnRight >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin-ColumnStore >> KqpFlipJoin::RightSemi_1 >> KqpIndexLookupJoin::RightSemi >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup >> KqpJoin::ExclusionJoin >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup >> KqpFlipJoin::Inner_1 >> KqpFlipJoin::Right_1 >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup >> KqpJoinOrder::TPCDS87-StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin-ColumnStore Test command err: 2024-11-21T09:21:17.928715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:17.928734Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2024-11-21T09:21:17.947552Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2024-11-21T09:21:17.947582Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2024-11-21T09:21:17.947752Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.947763Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.947767Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.947878Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2024-11-21T09:21:17.947883Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2024-11-21T09:21:17.947896Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2024-11-21T09:21:17.947902Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2024-11-21T09:21:17.947907Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2024-11-21T09:21:17.947926Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T09:21:17.947930Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.947933Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.947936Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.947948Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T09:21:17.947951Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2024-11-21T09:21:17.947954Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2024-11-21T09:21:17.947957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2024-11-21T09:21:17.947961Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2024-11-21T09:21:17.947970Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T09:21:17.948006Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:93:2120], cookie# 100 2024-11-21T09:21:17.948077Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:94:2121], cookie# 100 2024-11-21T09:21:17.948081Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2024-11-21T09:21:17.948102Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2122], cookie# 100 2024-11-21T09:21:17.948105Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T09:21:17.948432Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2024-11-21T09:21:17.948439Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2024-11-21T09:21:17.948451Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.948455Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.948459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T09:21:17.948515Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2024-11-21T09:21:17.948519Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2024-11-21T09:21:17.948526Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [Owner ... oard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2024-11-21T09:21:17.949193Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2024-11-21T09:21:17.949199Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2024-11-21T09:21:17.949204Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:93:2120], cookie# 101 2024-11-21T09:21:17.949207Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949210Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949215Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949232Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:94:2121], cookie# 101 2024-11-21T09:21:17.949237Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2024-11-21T09:21:17.949245Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2024-11-21T09:21:17.949250Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2024-11-21T09:21:17.949255Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2024-11-21T09:21:17.949287Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:95:2122], cookie# 101 2024-11-21T09:21:17.949296Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 101 2024-11-21T09:21:17.949337Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 101 2024-11-21T09:21:17.949342Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2024-11-21T09:21:17.949373Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 101 2024-11-21T09:21:17.949378Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T09:21:17.949590Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 101, event size# 219, preserialized size# 2 2024-11-21T09:21:17.949596Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2024-11-21T09:21:17.949608Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949614Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949620Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949651Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 101, event size# 381, preserialized size# 0 2024-11-21T09:21:17.949656Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2024-11-21T09:21:17.949665Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2024-11-21T09:21:17.949671Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2024-11-21T09:21:17.949676Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:17.949690Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:93:2120], cookie# 101 2024-11-21T09:21:17.949695Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949700Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949704Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T09:21:17.949731Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:94:2121], cookie# 101 2024-11-21T09:21:17.949734Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2024-11-21T09:21:17.949737Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2024-11-21T09:21:17.949740Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2024-11-21T09:21:17.949743Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2024-11-21T09:21:17.949765Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:95:2122], cookie# 101 2024-11-21T09:21:17.949782Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:93:2120], cookie# 101 2024-11-21T09:21:17.949809Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:94:2121], cookie# 101 2024-11-21T09:21:17.949814Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2024-11-21T09:21:17.949858Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:95:2122], cookie# 101 2024-11-21T09:21:17.949862Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 >> KqpJoin::IdxLookupPartialWithTempTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:02.481403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:02.481424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:02.481430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:02.481435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:02.481441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:02.481444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:02.481453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:02.481532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:02.491225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:02.491243Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:02.493353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:02.493430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:02.493463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:02.495502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:02.495570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:02.495656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.495808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:02.496410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.496649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:02.496658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.496670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:02.496676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:02.496682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:02.496717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:02.497792Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:02.513556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:02.513635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.513696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:02.513738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:02.513746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.514501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.514532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:02.514582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.514593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:02.514598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:02.514603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:02.515113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.515127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:02.515133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:02.515513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.515526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.515532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.515539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.516070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:02.516484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:02.516544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:02.516727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:02.516753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:02.516760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.516815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:02.516822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:02.516861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:02.516875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:02.517306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:02.517319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:02.517368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:02.517374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:02.517464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:02.517472Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:02.517483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:02.517488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.517493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:02.517499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:02.517503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:02.517507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:02.517519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:02.517524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:02.517528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... e: TxMoveTable, at tablet72057594046678944 2024-11-21T09:21:18.034153Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:21:18.034156Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T09:21:18.034163Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T09:21:18.034207Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:21:18.034227Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2024-11-21T09:21:18.034238Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:18.034243Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:21:18.034654Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:18.034677Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:18.035117Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:18.035140Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:18.035168Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:18.035173Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:18.035197Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:21:18.035218Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:18.035222Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T09:21:18.035227Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:21:18.035256Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:18.035262Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:18.035274Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:18.035277Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:18.035280Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T09:21:18.035416Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:18.035424Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:18.035426Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:18.035429Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:21:18.035431Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:18.035714Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:18.035730Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:18.035734Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:18.035741Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:21:18.035745Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:21:18.035756Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:21:18.036058Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:18.036066Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:18.036070Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:21:18.036077Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:21:18.036080Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:21:18.036084Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:21:18.036088Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:21:18.036091Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:21:18.036093Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:21:18.036106Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:18.036108Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:21:18.036164Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:18.036170Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:21:18.036178Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:18.036254Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:18.036772Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:18.036899Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:21:18.036946Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:21:18.036952Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:21:18.037008Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:21:18.037025Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:18.037029Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:463:2438] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:21:18.037096Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:18.037127Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 40us result status StatusSuccess 2024-11-21T09:21:18.037196Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] |96.2%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] >> KqpJoinOrder::Chain65Nodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] Test command err: 2024-11-21T09:19:19.974868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659712335580197:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:19.975122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:19.976992Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659710929229516:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:19.977147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ef4/r3tmp/tmpSVcxJp/pdisk_1.dat 2024-11-21T09:19:19.998986Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:20.000790Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:20.014926Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22303, node 1 2024-11-21T09:19:20.026005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003ef4/r3tmp/yandexMCZ9Iu.tmp 2024-11-21T09:19:20.026018Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003ef4/r3tmp/yandexMCZ9Iu.tmp 2024-11-21T09:19:20.026069Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003ef4/r3tmp/yandexMCZ9Iu.tmp 2024-11-21T09:19:20.026110Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:20.029213Z INFO: TTestServer started on Port 26252 GrpcPort 22303 TClient is connected to server localhost:26252 PQClient connected to localhost:22303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:20.075240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:20.075280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:20.076831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:20.095437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:20.095469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:20.096097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:20.096405Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:20.096736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T09:19:20.108681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T09:19:20.204569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659715224197170:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:20.204593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659715224197145:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:20.204602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:20.205588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T09:19:20.209105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659715224197174:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T09:19:20.232684Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659716630548561:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:20.232756Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWJkMTIzYS1kNTZlMzI4Ny03MmM0MDZmMy00NjFhOWYxNA==, ActorId: [1:7439659716630548520:2300], ActorState: ExecuteState, TraceId: 01jd709nmq8pz3kdsbnc46ykx1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:20.233193Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:20.233426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:20.270281Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659715224197215:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:20.270366Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Zjg4MmE5NWEtMWM5YzZiZTMtZTQzZDIzNGUtZGFkZWQxYg==, ActorId: [2:7439659715224197143:2280], ActorState: ExecuteState, TraceId: 01jd709nmc9gg725kg650vdrnf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:20.270578Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:20.291080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:20.354255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T09:19:20.378017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jd709nsc2ywh9ehaq27xv564, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I0NDlkMDMtYzIwNzFlNjUtOGJjMjIwMzYtM2Y3MzFiMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659716630548982:3043] 2024-11-21T09:19:24.975434Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659712335580197:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:24.975465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:19:24.977477Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659710929229516:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:24.977505Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 1 partitions CallPersQueueGRPC request to localhost:22303 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2024-11-21T09:19:26.383065Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:22303 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPer ... Partition=0, SeqNo=(NULL) 2024-11-21T09:21:18.388628Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437415:2467] (SourceId=12345678, PreferedPartition=(NULL)) Start idle 2024-11-21T09:21:18.388636Z node 23 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T09:21:18.388806Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:18.388822Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [23:7439660222765437461:2467], now have 1 active actors on pipe 2024-11-21T09:21:18.388851Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 24, Generation: 1 2024-11-21T09:21:18.388904Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T09:21:18.388914Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T09:21:18.388940Z node 24 :PERSQUEUE INFO: new Cookie 12345678|c3c6dbe3-efb40c3b-e10ea090-ce7d03ee_0 generated for partition 0 topic 'rt3.dc1--topic1' owner 12345678 2024-11-21T09:21:18.388986Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T09:21:18.389011Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:18.389162Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T09:21:18.389177Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T09:21:18.389207Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:18.389338Z node 23 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|c3c6dbe3-efb40c3b-e10ea090-ce7d03ee_0 2024-11-21T09:21:18.389834Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|c3c6dbe3-efb40c3b-e10ea090-ce7d03ee_0 grpc read done: success: 0 data: 2024-11-21T09:21:18.389841Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c3c6dbe3-efb40c3b-e10ea090-ce7d03ee_0 grpc read failed 2024-11-21T09:21:18.389889Z node 23 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|c3c6dbe3-efb40c3b-e10ea090-ce7d03ee_0 2024-11-21T09:21:18.389901Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c3c6dbe3-efb40c3b-e10ea090-ce7d03ee_0 is DEAD Finish: 0 2024-11-21T09:21:18.389995Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Random seed for debugging is 1732180878389987 2024-11-21T09:21:18.389985Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:21:18.390125Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:18.390138Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [23:7439660222765437461:2467] destroyed 2024-11-21T09:21:18.390149Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:21:18.394212Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Wait for "init_response" 2024-11-21T09:21:18.394227Z node 23 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T09:21:18.394239Z node 23 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T09:21:18.394351Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "test-message-group-id" } 2024-11-21T09:21:18.394374Z node 23 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "test-message-group-id" from ipv6:[::1]:38256 2024-11-21T09:21:18.394383Z node 23 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:38256 proto=v1 topic=topic1 durationSec=0 2024-11-21T09:21:18.394386Z node 23 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T09:21:18.394764Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T09:21:18.394805Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T09:21:18.394811Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:21:18.394813Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T09:21:18.394818Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437464:2477] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T09:21:18.395275Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437464:2477] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T09:21:18.396834Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437464:2477] (SourceId=test-message-group-id, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T09:21:18.396937Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7439660222765437479:2477] connected; active server actors: 1 2024-11-21T09:21:18.396954Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437464:2477] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=test-message-group-id 2024-11-21T09:21:18.396957Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437464:2477] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Update the table 2024-11-21T09:21:18.397031Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7439660222765437479:2477] disconnected; active server actors: 1 2024-11-21T09:21:18.397040Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7439660222765437479:2477] disconnected no session 2024-11-21T09:21:18.399726Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437464:2477] (SourceId=test-message-group-id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T09:21:18.399738Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437464:2477] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T09:21:18.399741Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439660222765437464:2477] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Start idle 2024-11-21T09:21:18.399749Z node 23 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T09:21:18.399906Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:18.399922Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [23:7439660222765437489:2477], now have 1 active actors on pipe 2024-11-21T09:21:18.399953Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 24, Generation: 1 2024-11-21T09:21:18.400005Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T09:21:18.400019Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T09:21:18.400036Z node 24 :PERSQUEUE INFO: new Cookie test-message-group-id|755604ba-c780bd8e-2d434a3b-4a052d65_0 generated for partition 0 topic 'rt3.dc1--topic1' owner test-message-group-id 2024-11-21T09:21:18.400062Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T09:21:18.400077Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:18.400186Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T09:21:18.400193Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T09:21:18.400217Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T09:21:18.400279Z node 23 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group-id|755604ba-c780bd8e-2d434a3b-4a052d65_0 Init response: status: SUCCESS init_response { session_id: "test-message-group-id|755604ba-c780bd8e-2d434a3b-4a052d65_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP } 2024-11-21T09:21:18.400637Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Session ID is "test-message-group-id|755604ba-c780bd8e-2d434a3b-4a052d65_0" 2024-11-21T09:21:18.400839Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Wait for session to die 2024-11-21T09:21:18.401545Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group-id|755604ba-c780bd8e-2d434a3b-4a052d65_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T09:21:18.401564Z node 23 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: bad write request - 'blocks_headers' at position 0 is invalid: given codec (id 3) is not configured for the topic. Configured codecs are raw (id 0), gzip (id 1), lzop (id 2) sessionId: test-message-group-id|755604ba-c780bd8e-2d434a3b-4a052d65_0 2024-11-21T09:21:18.401670Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group-id|755604ba-c780bd8e-2d434a3b-4a052d65_0 is DEAD status: BAD_REQUEST issues { message: "bad write request - \'blocks_headers\' at position 0 is invalid: given codec (id 3) is not configured for the topic. Configured codecs are raw (id 0), gzip (id 1), lzop (id 2)" issue_code: 500003 severity: 1 } 2024-11-21T09:21:18.401762Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:21:18.401852Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:18.401865Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [23:7439660222765437489:2477] destroyed 2024-11-21T09:21:18.401875Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> KqpJoin::ExclusionJoin [GOOD] >> KqpJoin::FullOuterJoin >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 >> KqpJoin::JoinDupColumnRight [GOOD] >> KqpJoin::JoinDupColumnRightPure >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightSemi_2 >> KqpIndexLookupJoin::RightSemi [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] >> KqpJoin::IdxLookupSelf >> KqpFlipJoin::Inner_1 [GOOD] >> KqpFlipJoin::Inner_2 >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] Test command err: 2024-11-21T09:20:24.965244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659993436698538:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:24.965272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003855/r3tmp/tmpIxchZH/pdisk_1.dat 2024-11-21T09:20:24.995347Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:20:25.009506Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10066, node 1 2024-11-21T09:20:25.023512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003855/r3tmp/yandexlApDQl.tmp 2024-11-21T09:20:25.023529Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003855/r3tmp/yandexlApDQl.tmp 2024-11-21T09:20:25.023582Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003855/r3tmp/yandexlApDQl.tmp 2024-11-21T09:20:25.023618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:20:25.028659Z INFO: TTestServer started on Port 19043 GrpcPort 10066 TClient is connected to server localhost:19043 PQClient connected to localhost:10066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:25.065545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:25.065574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:25.066599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:25.092114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:20:25.103232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T09:20:25.220097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659997731666582:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:25.220123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659997731666593:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:25.220128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:25.220702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439659997731666625:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:25.220718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:25.220792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T09:20:25.222344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439659997731666596:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T09:20:25.252315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:20:25.258831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:25.294830Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659997731666791:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:20:25.294929Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDkxYTFlOTQtYjUxNzM5ZDQtNzdiZDcxZjAtNmVhZTQ3ZDU=, ActorId: [1:7439659997731666579:2304], ActorState: ExecuteState, TraceId: 01jd70bn41akbetm5s7r2a61y7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:20:25.295435Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:20:25.318552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439659997731666939:2596] 2024-11-21T09:20:29.965766Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659993436698538:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:29.965820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:20:30.429908Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T09:20:30.433700Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T09:20:30.434186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439660019206503714:2759], Recipient [1:7439659997731666209:2181]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:20:30.434196Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:20:30.434198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T09:20:30.434205Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439660019206503710:2756], Recipient [1:7439659997731666209:2181]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T09:20:30.434208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T09:20:30.439904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:20:30.440024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:20:30.440104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T09:20:30.440127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T09:20:30.440137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T09:20:30.440147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T09:20:30.440159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [Owner ... eAvailableSize 2024-11-21T09:21:19.008200Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:21:19.008222Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439660213393021817:2741], Partition 4, Sender [0:0:0], Recipient [5:7439660217687989223:2756], Cookie: 0 2024-11-21T09:21:19.008223Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:21:19.008234Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439660217687989223:2756]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:21:19.008238Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:21:19.008239Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:21:19.008241Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:21:19.008244Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:21:19.008249Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:21:19.008263Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:21:19.008264Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:21:19.008268Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:21:19.010150Z :INFO: [/Root] SessionId [producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0] PartitionId [4] Generation [1] Write session: close. Timeout 1.000000s 2024-11-21T09:21:19.010166Z :INFO: [/Root] SessionId [producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0] PartitionId [4] Generation [1] Write session will now close 2024-11-21T09:21:19.010179Z :DEBUG: [/Root] SessionId [producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0] PartitionId [4] Generation [1] Write session: aborting 2024-11-21T09:21:19.010387Z :INFO: [/Root] SessionId [producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0] PartitionId [4] Generation [1] Write session: gracefully shut down, all writes complete >>>>> Session-0 Release() >>>>> Session-0 Closing reading session 2024-11-21T09:21:19.010412Z :INFO: [/Root] [/Root] [adf9edc5-90d0182f-d48d702c-6554c98b] Closing read session. Close timeout: 5.000000s 2024-11-21T09:21:19.010427Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:2:1:0:1 2024-11-21T09:21:19.010436Z :INFO: [/Root] [/Root] [adf9edc5-90d0182f-d48d702c-6554c98b] Counters: { Errors: 3 CurrentSessionLifetimeMs: 1006 BytesRead: 11 MessagesRead: 1 BytesReadCompressed: 31 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:21:19.010515Z :DEBUG: [/Root] SessionId [producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0] PartitionId [4] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T09:21:19.010538Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2024-11-21T09:21:19.010542Z :DEBUG: [/Root] SessionId [producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0] PartitionId [4] Generation [1] Write session is aborting and will not restart 2024-11-21T09:21:19.010607Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0 grpc read done: success: 0 data: 2024-11-21T09:21:19.010619Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0 grpc read failed 2024-11-21T09:21:19.010626Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0 grpc closed 2024-11-21T09:21:19.010629Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0 is DEAD 2024-11-21T09:21:19.010656Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_11414876051923722340_v1 grpc read done: success# 0, data# { } 2024-11-21T09:21:19.010658Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_11414876051923722340_v1 grpc read failed >>>>> 2024-11-21T09:21:19.010663Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_11414876051923722340_v1 grpc closed 2024-11-21T09:21:19.010676Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_11414876051923722340_v1 is DEAD Session-0 Received TSessionClosedEvent message 2024-11-21T09:21:19.010671Z :INFO: [/Root] [/Root] [adf9edc5-90d0182f-d48d702c-6554c98b] Closing read session. Close timeout: 0.000000s SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2024-11-21T09:21:19.010715Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:2:1:0:1 2024-11-21T09:21:19.010725Z :INFO: [/Root] [/Root] [adf9edc5-90d0182f-d48d702c-6554c98b] Counters: { Errors: 3 CurrentSessionLifetimeMs: 1006 BytesRead: 11 MessagesRead: 1 BytesReadCompressed: 31 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T09:21:19.010757Z :NOTICE: [/Root] [/Root] [adf9edc5-90d0182f-d48d702c-6554c98b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T09:21:19.010849Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7439660221982956819:2835], Recipient [5:7439660221982956821:2835]: NActors::TEvents::TEvPoison 2024-11-21T09:21:19.010867Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037898 (partition=4) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:21:19.010929Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439660221982956850:3634], Recipient [5:7439660213393021817:2741]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:19.010929Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7439660221982956731:2811] disconnected; active server actors: 1 2024-11-21T09:21:19.010932Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7439660221982956731:2811] client test-consumer disconnected session test-consumer_5_1_11414876051923722340_v1 2024-11-21T09:21:19.010938Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:19.010940Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:19.010945Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [5:7439660221982956849:2835] destroyed 2024-11-21T09:21:19.010948Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer family 2 status Active partitions [2] destroyed. 2024-11-21T09:21:19.010952Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439660221982956735:3590], Recipient [5:7439660204803086774:2652]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:19.010952Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:19.010953Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:19.010956Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_11414876051923722340_v1 2024-11-21T09:21:19.010960Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7439660221982956734:2814] destroyed 2024-11-21T09:21:19.010969Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7439660213393021817:2741], Partition 4, Sender [5:7439660213393021817:2741], Recipient [5:7439660217687989223:2756], Cookie: 0 2024-11-21T09:21:19.010972Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_11414876051923722340_v1 2024-11-21T09:21:19.010974Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7439660213393021817:2741], Recipient [5:7439660217687989223:2756]: NKikimr::TEvPQ::TEvPipeDisconnected 2024-11-21T09:21:19.010994Z :DEBUG: [/Root] SessionId [producer-1|a85e285f-1abf0864-376f7aa2-144efdf6_0] PartitionId [4] Generation [1] Write session: destroy 2024-11-21T09:21:19.010977Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2024-11-21T09:21:19.010982Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::DropOwner. 2024-11-21T09:21:19.010986Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2024-11-21T09:21:19.010989Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:21:19.010997Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:21:19.010999Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:21:19.011002Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:21:19.024091Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439660200508118620:2435], Partition 0, Sender [0:0:0], Recipient [5:7439660200508118678:2438], Cookie: 0 2024-11-21T09:21:19.024124Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439660200508118678:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:21:19.024128Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T09:21:19.024144Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T09:21:19.024167Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T09:21:19.024170Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T09:21:19.024175Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T09:21:19.043563Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7439660179033281020:2141]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T09:21:19.043581Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T09:21:19.043592Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7439660179033281020:2141], Recipient [5:7439660179033281020:2141]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T09:21:19.043594Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1+StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.131397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.131420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.131425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.131431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.131444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.131448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.131466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.131553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.142214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.142230Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.144196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.144299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.144325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.146628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.146689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.146767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.146947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.147538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.147787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.147796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.147807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.147813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.147819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.147852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.149068Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.164762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.164826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.164877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.164931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.164940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.165473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.165508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.165541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.165550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.165554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.165558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.165946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.165959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.165964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.166254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.166261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.166265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.166280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.166658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.166930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.166959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.167070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.167088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.167092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.167131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.167136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.167151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.167157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.167517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.167528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.167563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.167567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.167608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.167612Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.167619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.167622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.167625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.167628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.167631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.167633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.167641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.167645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.167647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:21:18.538560Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.538567Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.538570Z node 155 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:18.538573Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T09:21:18.538575Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:18.538582Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:21:18.538613Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:18.538616Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:21:18.538622Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:18.538762Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T09:21:18.538766Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:21:18.538769Z node 155 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T09:21:18.538912Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T09:21:18.538928Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T09:21:18.539008Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:18.539020Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 665719933034 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:18.539024Z node 155 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T09:21:18.539037Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T09:21:18.539041Z node 155 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T09:21:18.539044Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:21:18.539049Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:18.539054Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:18.539057Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T09:21:18.539061Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:21:18.539063Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T09:21:18.539065Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T09:21:18.539070Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:18.539073Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T09:21:18.539075Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:21:18.539077Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:21:18.539357Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.539375Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:21:18.539381Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:21:18.539464Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.539507Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:21:18.539659Z node 155 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:18.539668Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:18.539686Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:21:18.539700Z node 155 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:18.539702Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [155:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T09:21:18.539705Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [155:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T09:21:18.539787Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.539793Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.539797Z node 155 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:18.539799Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:21:18.539802Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:18.539856Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.539861Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.539863Z node 155 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:18.539865Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:21:18.539867Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:18.539872Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T09:21:18.539875Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [155:121:2147] 2024-11-21T09:21:18.539903Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:18.539906Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:21:18.539911Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:18.540231Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.540454Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:18.540478Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T09:21:18.540487Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T09:21:18.540544Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T09:21:18.540865Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:21:18.540873Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:21:18.540942Z node 155 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:21:18.540960Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:18.540964Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [155:714:2675] TestWaitNotification: OK eventTxId 1003 >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCH5-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull >> KqpJoin::FullOuterJoin [GOOD] >> KqpJoin::CrossJoinCount >> KqpFlipJoin::Right_2 [GOOD] >> KqpFlipJoin::Right_3 >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup >> KqpFlipJoin::Inner_2 [GOOD] >> KqpFlipJoin::Inner_3 >> KqpJoin::JoinDupColumnRightPure [GOOD] >> KqpJoin::JoinLeftPureFull >> KqpFlipJoin::RightSemi_2 [GOOD] >> KqpFlipJoin::RightSemi_3 >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup >> KqpIndexLookupJoin::Left+StreamLookup >> KqpJoin::IdxLookupSelf [GOOD] >> KqpJoin::JoinAggregateSingleRow >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup >> KqpJoinOrder::TPCDS94-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS87-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::Inner+StreamLookup >> KqpJoin::CrossJoinCount [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] >> KqpFlipJoin::Right_3 [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup >> KqpJoinOrder::TestJoinHint1+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex+StreamLookupJoin-ColumnStore >> KqpJoin::JoinLeftPureFull [GOOD] >> KqpJoin::JoinLeftPureExclusion >> KqpFlipJoin::RightSemi_3 [GOOD] >> KqpFlipJoin::RightOnly_1 >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2+StreamLookupJoin-ColumnStore >> KqpJoin::JoinAggregateSingleRow [GOOD] >> KqpJoin::JoinAggregate >> KqpIndexLookupJoin::Left+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left-StreamLookup >> KqpFlipJoin::Inner_3 [GOOD] >> KqpFlipJoin::LeftSemi_1 >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:05.927267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:05.927296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:05.927301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:05.927306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:05.927312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:05.927315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:05.927325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:05.927413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:05.938536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:05.938559Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:05.941472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:05.942269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:05.942348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:05.944297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:05.944540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:05.944648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.944750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:05.945894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.946185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.946195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.946237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:05.946244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:05.946251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:05.946265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.947798Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:05.964829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:05.964930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.964997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:05.965040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:05.965048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.965875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.965911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:05.965961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.965971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:05.965976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:05.965981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:05.966620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.966636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:05.966642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:05.967127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.967138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.967145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.967151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.967747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:05.968192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:05.968266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:05.968480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:05.968507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:05.968516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.968569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:05.968575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:05.968604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:05.968616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:05.969069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:05.969077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:05.969168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:05.969174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:05.969254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:05.969262Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:05.969274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:05.969278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.969284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:05.969289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:05.969294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:05.969298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:05.969310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:05.969316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:05.969320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:05.969597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:05.969610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:05.969614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:05.969619Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:05.969624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:05.969636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ard::TEvSchemaChanged> complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.093747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T09:21:06.093754Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T09:21:06.093766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T09:21:06.093774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:21:06.093780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T09:21:06.093792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 103 2024-11-21T09:21:06.093798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T09:21:06.093803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T09:21:06.093808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T09:21:06.093827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:06.094361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T09:21:06.094370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:496:2460] TestWaitNotification: OK eventTxId 103 2024-11-21T09:21:09.961828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:21:09.961855Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:11.265716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0081 2024-11-21T09:21:11.265744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0125 2024-11-21T09:21:11.296200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T09:21:11.296283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T09:21:11.296323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T09:21:11.296330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:11.296362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T09:21:11.296366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T09:21:11.296368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:11.306561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:21:13.748954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0036 2024-11-21T09:21:13.749003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0024 2024-11-21T09:21:13.789570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T09:21:13.789618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T09:21:13.789636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T09:21:13.789644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:13.789677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T09:21:13.789683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T09:21:13.789685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:13.799844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:21:16.197500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0024 2024-11-21T09:21:16.197521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0036 2024-11-21T09:21:16.227877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T09:21:16.227934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T09:21:16.227954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T09:21:16.227962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:16.227996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T09:21:16.228002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T09:21:16.228006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:16.238155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:21:18.645676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0012 2024-11-21T09:21:18.645719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2024-11-21T09:21:18.678480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T09:21:18.678547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T09:21:18.678570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T09:21:18.678579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:18.678622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T09:21:18.678628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T09:21:18.678631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:18.688812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:21:21.100843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T09:21:21.100907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:21.100986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:21.101044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60025000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:21.101275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:21.101503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:21.101516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:21.102610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:21.102646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:21.102652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.025000Z, at schemeshard: 72057594046678944 2024-11-21T09:21:21.102660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup >> KqpJoin::JoinLeftPureExclusion [GOOD] >> KqpJoin::JoinLeftPureCross >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:08.918978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:08.918994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:08.918998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:08.919002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:08.919006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:08.919009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:08.919016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:08.919073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:08.927963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:08.927983Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:08.929826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:08.929922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:08.929968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:08.932317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:08.932395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:08.932481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.932661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:08.933337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.933625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:08.933636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.933649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:08.933656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:08.933662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:08.933707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:08.935046Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:08.948163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:08.948273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.948338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:08.948379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:08.948385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.948851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.948871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:08.948914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.948921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:08.948924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:08.948927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:08.949294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.949309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:08.949315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:08.949679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.949689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.949695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.949702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.950090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:08.950831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:08.950887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:08.951062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.951087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:08.951094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.951145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:08.951151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.951201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:08.951211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:08.951571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:08.951580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:08.951622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.951627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:08.951695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.951700Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:08.951709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:08.951711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.951715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:08.951719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.951721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:08.951724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:08.951732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:08.951737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:08.951740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... terRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1003 at step: 5000004 2024-11-21T09:21:21.619881Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:21.619899Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 219043334250 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:21.619906Z node 51 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1003:0 HandleReply TEvOperationPlan, operationId: 1003:0, stepId: 5000004, at schemeshard: 72057594046678944 2024-11-21T09:21:21.619958Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2024-11-21T09:21:21.619977Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T09:21:21.621120Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:21.621130Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:21:21.621181Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:21.621186Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [51:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:21:21.621318Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:21.621326Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:21.621397Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T09:21:21.621402Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:21.621415Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T09:21:21.621426Z node 51 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 226 } } 2024-11-21T09:21:21.621475Z node 51 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:21.621488Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:21.621492Z node 51 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:21.621496Z node 51 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:21:21.621501Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:21.621511Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:21:21.621584Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:21.621590Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:21.621601Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:21.621605Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:21.621611Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:21.621621Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:21.621624Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:21.621628Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:21.621633Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T09:21:21.622352Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:21.622377Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:21.622388Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:21.622403Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:21.622408Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:21:21.622420Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:21:21.622425Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:21:21.622433Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:21:21.622438Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:21:21.622442Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:21:21.622446Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:21:21.622466Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:21:21.622943Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:21:21.622951Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:21:21.623003Z node 51 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:21:21.623020Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:21.623024Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:444:2419] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:21:21.623085Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:21.623116Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 42us result status StatusSuccess 2024-11-21T09:21:21.623204Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoin::JoinAggregate [GOOD] >> KqpJoin::JoinConvert >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin >> KqpFlipJoin::LeftSemi_1 [GOOD] >> KqpFlipJoin::LeftSemi_2 >> KqpFlipJoin::RightOnly_1 [GOOD] >> KqpFlipJoin::RightOnly_2 >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] >> KqpJoin::ComplexJoin >> KqpJoinOrder::FiveWayJoin+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup >> KqpJoinOrder::TPCH10-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TestJoinOrderHintsComplex+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull >> KqpJoinOrder::TPCDS94+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS87-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup >> KqpJoin::JoinLeftPureCross [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin [GOOD] >> KqpJoin::JoinConvert [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 >> KqpFlipJoin::RightOnly_2 [GOOD] >> KqpFlipJoin::RightOnly_3 >> KqpJoinOrder::CanonizedJoinOrderTPCH2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin+ColumnStore >> KqpJoin::ComplexJoin [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 5657, MsgBus: 12105 2024-11-21T09:21:18.380406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660225368732127:2251];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.380440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00443c/r3tmp/tmp7cepu9/pdisk_1.dat 2024-11-21T09:21:18.451988Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5657, node 1 2024-11-21T09:21:18.480901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.480936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.482034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:18.491280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.491294Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.491303Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.491336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12105 TClient is connected to server localhost:12105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.586789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.589267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:18.590617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.613708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.633666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:18.642440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.707770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225368733460:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.707793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.820093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.826263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.835054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.841643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.856148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225368733974:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225368733979:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.876636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660225368733981:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.071515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.076922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.094724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.100804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.108055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29711, MsgBus: 22217 2024-11-21T09:21:19.445954Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660226922540083:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.445970Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00443c/r3tmp/tmpviLli4/pdisk_1.dat 2024-11-21T09:21:19.456234Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29711, node 2 2024-11-21T09:21:19.465894Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.465911Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.465914Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.465955Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22217 TClient is connected to server localhost:22217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.546337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.546375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.547448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.548580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.549747Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:19.560752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... 24-11-21T09:21:21.592919Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.599890Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.608712Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.624842Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660236212946743:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.624865Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.624867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660236212946748:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.625420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:21.627312Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660236212946750:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:21.803769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.810327Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.817331Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.831638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.838072Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.845166Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30111, MsgBus: 9117 2024-11-21T09:21:22.178831Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660240086734996:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:22.178867Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00443c/r3tmp/tmpPmYlBf/pdisk_1.dat 2024-11-21T09:21:22.190049Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30111, node 5 2024-11-21T09:21:22.197725Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:22.197738Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:22.197740Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:22.197776Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9117 TClient is connected to server localhost:9117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:22.278909Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:22.278937Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:22.279966Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:22.281652Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.287703Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.298061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.318151Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.329297Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.480783Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240086736528:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.480821Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.482933Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.489561Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.495938Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.503155Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.509546Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.517163Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.529260Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240086737039:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.529306Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240086737044:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.529306Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.529940Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.538072Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660240086737046:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:22.740905Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.748850Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.764662Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.776944Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.783215Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.790241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureCross [GOOD] Test command err: Trying to start YDB, gRPC: 29993, MsgBus: 16098 2024-11-21T09:21:18.380275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660223991734768:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.380478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00444c/r3tmp/tmp8WChyh/pdisk_1.dat 2024-11-21T09:21:18.454348Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29993, node 1 2024-11-21T09:21:18.482270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.482293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.483436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:18.491994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.492003Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.492004Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.492027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16098 TClient is connected to server localhost:16098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.578569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.589972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.656336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.675228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.685412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.711610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223991736304:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.711631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.819958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.824451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.842156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.855762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223991736807:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223991736812:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660223991736814:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:19.065767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.072069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.079687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1227, MsgBus: 18466 2024-11-21T09:21:19.438025Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660226206671056:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.438268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00444c/r3tmp/tmpCFaY5f/pdisk_1.dat 2024-11-21T09:21:19.446948Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1227, node 2 2024-11-21T09:21:19.456187Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.456198Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.456200Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.456243Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18466 TClient is connected to server localhost:18466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.538103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.538133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.539203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.540346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.550394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.559062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.574934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.586553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.715938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660226206672584:2374], DatabaseId: /Root, PoolId: ... X_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.352772Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.357519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.381469Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.447874Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:21.458977Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.564964Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660237065145398:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.564997Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.571082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.577411Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.586323Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.593217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.599936Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.607448Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.616895Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660237065145896:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.616931Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.616957Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660237065145901:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.617536Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:21.620066Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660237065145903:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 8761, MsgBus: 27191 2024-11-21T09:21:22.162850Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660238982536278:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:22.162876Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00444c/r3tmp/tmp5TVYMH/pdisk_1.dat 2024-11-21T09:21:22.176059Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8761, node 5 2024-11-21T09:21:22.188154Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:22.188167Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:22.188170Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:22.188241Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27191 TClient is connected to server localhost:27191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:22.264450Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:22.264493Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:22.265764Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:22.266187Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.267728Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.278136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.287427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.310153Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.320401Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.489148Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660238982537832:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.489168Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.494907Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.501289Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.514922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.525031Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.541002Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.552697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.568102Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660238982538332:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.568136Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.568142Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660238982538337:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.568757Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.572526Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660238982538339:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinConvert [GOOD] Test command err: Trying to start YDB, gRPC: 22933, MsgBus: 25763 2024-11-21T09:21:18.585733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660223164719391:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.585748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004426/r3tmp/tmpkPoNqR/pdisk_1.dat 2024-11-21T09:21:18.648324Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22933, node 1 2024-11-21T09:21:18.657244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.657254Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.657256Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.657284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25763 TClient is connected to server localhost:25763 2024-11-21T09:21:18.691627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.691665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:18.693567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.709625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.715515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:18.724166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.785473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.801106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.809278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.853607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223164720941:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.853630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.885519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.891050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.897498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.904758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.911914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.918728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.927253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223164721434:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.927277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.927281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223164721439:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.927764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.932276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660223164721441:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:19.120105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.125489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.135911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31919, MsgBus: 14604 2024-11-21T09:21:19.460839Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660227493918232:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.460923Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004426/r3tmp/tmpIL1HTU/pdisk_1.dat 2024-11-21T09:21:19.470009Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31919, node 2 2024-11-21T09:21:19.487167Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.487177Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.487179Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.487215Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14604 TClient is connected to server localhost:14604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.562040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.562063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.562933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.563518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.564693Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:19.572807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.580576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.601233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.611346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part propo ... don't have access permissions } 2024-11-21T09:21:21.596399Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.601408Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.607194Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.613908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.621490Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.627830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.635354Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.643750Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660238537870705:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.643759Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660238537870710:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.643771Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.644396Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:21.648817Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660238537870712:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:21.812012Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.818080Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.831168Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22789, MsgBus: 63031 2024-11-21T09:21:22.174612Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660242824034306:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:22.174930Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004426/r3tmp/tmp2HQN1R/pdisk_1.dat 2024-11-21T09:21:22.188370Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22789, node 5 2024-11-21T09:21:22.196617Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:22.196628Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:22.196629Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:22.196669Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63031 TClient is connected to server localhost:63031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:22.274986Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:22.275022Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:21:22.277858Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:22.278244Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.279345Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:22.287973Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:22.345050Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.364190Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.374985Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.477823Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660242824035829:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.477848Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.482871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.488140Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.496438Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.502742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.510163Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.517942Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.532760Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660242824036330:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.532791Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.532795Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660242824036335:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.533396Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.538359Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660242824036337:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:22.697603Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.704353Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.713614Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.768505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::ComplexJoin [GOOD] >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 15996, MsgBus: 13555 2024-11-21T09:21:18.460898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660221969430669:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.461073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004442/r3tmp/tmperoA4W/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15996, node 1 2024-11-21T09:21:18.530917Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:18.536915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.536925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.536927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.536952Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13555 2024-11-21T09:21:18.561483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.561506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.564051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.606425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.614841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.633522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.657007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.671278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.747467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660221969432205:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.747486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.819983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.825670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.841697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.856191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660221969432718:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660221969432723:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660221969432725:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.074077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.080819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31089, MsgBus: 3690 2024-11-21T09:21:19.335361Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660228726163742:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.335636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004442/r3tmp/tmptip6Ln/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31089, node 2 2024-11-21T09:21:19.349520Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:19.350288Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.350299Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.350301Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.350339Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3690 TClient is connected to server localhost:3690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.435815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.435845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.436967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.437583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.446915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.454177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.470707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.480804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.653922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660228726165283:2374], DatabaseId: /Root, PoolId: d ... Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.455375Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660237999295576:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.455398Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.461434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.468630Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.481207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.535568Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.544520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.550845Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.559735Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660237999296070:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.559758Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.559765Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660237999296075:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.560295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:21.565305Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660237999296077:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 8894, MsgBus: 22709 2024-11-21T09:21:22.431091Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660240423129569:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:22.431286Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004442/r3tmp/tmpVDEk2z/pdisk_1.dat 2024-11-21T09:21:22.441500Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8894, node 5 2024-11-21T09:21:22.447082Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:22.447092Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:22.447094Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:22.447123Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22709 TClient is connected to server localhost:22709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:22.531404Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:22.531432Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:22.532481Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:22.533702Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.536586Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:22.545679Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.553721Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.571011Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.581437Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.722390Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240423131114:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.722424Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.727888Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.736001Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.748775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.762681Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.776870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.790472Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.799416Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240423131618:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.799443Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.799554Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240423131623:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.800288Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.803100Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660240423131625:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:22.965568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.971356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.979119Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.986438Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.994066Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::LeftSemi_3 [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup [GOOD] |96.3%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 23475, MsgBus: 4939 2024-11-21T09:21:18.444800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660225231261560:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.444941Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004438/r3tmp/tmpGkqP8w/pdisk_1.dat 2024-11-21T09:21:18.499897Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23475, node 1 2024-11-21T09:21:18.516388Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.516406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.516408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.516447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4939 2024-11-21T09:21:18.544068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.544092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.545218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.582701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:18.591292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.607808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.669903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.684147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.754771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225231262953:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.754796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.819961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.825150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.842165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.855785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225231263457:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225231263462:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660225231263464:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:19.073722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.080187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.093918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.101013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.107496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30553, MsgBus: 30102 2024-11-21T09:21:19.531115Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660229302601015:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.531134Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004438/r3tmp/tmpsRkZUB/pdisk_1.dat 2024-11-21T09:21:19.545266Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30553, node 2 2024-11-21T09:21:19.550316Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.550329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.550332Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.550373Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30102 TClient is connected to server localhost:30102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.631580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.631605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting...2024-11-21T09:21:19.632693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.633862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.636537Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:19.645097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.657558Z node 2 :FLAT_T ... 11-21T09:21:21.908582Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.915217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.930953Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.945140Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660238017965604:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.945157Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660238017965609:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.945161Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.946021Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:21.949254Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660238017965611:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:22.123607Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.133717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.146397Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.153198Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.160159Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.167275Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14176, MsgBus: 14251 2024-11-21T09:21:22.498942Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660240220845055:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004438/r3tmp/tmpvac514/pdisk_1.dat 2024-11-21T09:21:22.504678Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 14176, node 5 2024-11-21T09:21:22.517710Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:22.517905Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:22.517916Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:22.517918Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:22.517951Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14251 TClient is connected to server localhost:14251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:22.598614Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:22.598645Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:22.599731Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:22.600872Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.603764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.616128Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.634142Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.644459Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.828598Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240220846459:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.828628Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.834393Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.840697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.854230Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.867140Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.873988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.881400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.889781Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240220846960:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.889809Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.889827Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240220846965:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.890504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.894413Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660240220846967:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:23.088521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.094623Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.105317Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.112107Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.118876Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.126007Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup >> TTopicYqlTest::CreateAndAlterTopicYql [GOOD] >> TTopicYqlTest::BadRequests ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3125, MsgBus: 2866 2024-11-21T09:21:18.438990Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660224990902954:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.439158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004443/r3tmp/tmpcbrpt4/pdisk_1.dat 2024-11-21T09:21:18.509994Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3125, node 1 2024-11-21T09:21:18.532234Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.532248Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.532249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.532283Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:18.540355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.540380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.541413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2866 TClient is connected to server localhost:2866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.587254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.592593Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:18.605477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.623848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.642329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.653724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.745466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224990904496:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.745508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.819965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.824651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.841683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.895717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.904855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.912850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224990905011:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.912873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.912873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224990905016:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.913324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.918414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660224990905018:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.097836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.103350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.115132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.121678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.129037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.135823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26390, MsgBus: 20328 2024-11-21T09:21:19.515750Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660228156427206:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.515994Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004443/r3tmp/tmp2kCh10/pdisk_1.dat 2024-11-21T09:21:19.525046Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26390, node 2 2024-11-21T09:21:19.534455Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.534473Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.534474Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.534518Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20328 TClient is connected to server localhost:20328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.616063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.616091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.617170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.617808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.620436Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:19.629526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... uboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.669066Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660238752064436:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.669092Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.673498Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.679025Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.691156Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.705198Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.712599Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.726807Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.741565Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660238752064936:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.741571Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660238752064941:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.741584Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.742165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:21.746693Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660238752064943:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:21.930011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.935676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.943009Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.950207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.956964Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.964156Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13719, MsgBus: 7237 2024-11-21T09:21:22.274853Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660243085826465:2211];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:22.274892Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004443/r3tmp/tmp3lVaR5/pdisk_1.dat 2024-11-21T09:21:22.287438Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13719, node 5 2024-11-21T09:21:22.299865Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:22.299879Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:22.299882Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:22.299924Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7237 TClient is connected to server localhost:7237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:22.374704Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:22.374737Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:22.375788Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:22.377552Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.387965Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:22.396690Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.417362Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.430068Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.575208Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660243085827832:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.575236Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.580750Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.587118Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.601249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.608257Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.615369Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.622274Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.638490Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660243085828344:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.638531Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.638551Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660243085828349:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.639426Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.642200Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660243085828351:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] >> KqpJoinOrder::TPCH10-StreamLookupJoin-ColumnStore [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpJoinOrder::TPCH10+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS16-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin-ColumnStore >> KqpJoin::TwoJoinsWithQueryService >> KqpJoinOrder::TPCH8-StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin+ColumnStore [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> KqpJoin::TwoJoinsWithQueryService [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin-ColumnStore [GOOD] >> BasicStatistics::TwoServerlessDbs [GOOD] >> KqpJoinOrder::TPCH10+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH10-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCDS64+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FourWayJoinLeftFirst+StreamLookupJoin-ColumnStore |96.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:09.231070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:09.231086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:09.231090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:09.231095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:09.231098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:09.231101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:09.231107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:09.231166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:09.239625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:09.239643Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:09.241903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:09.242662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:09.242707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:09.243940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:09.244119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:09.244226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.244307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:09.245332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.245585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:09.245594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.245631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:09.245638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:09.245644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:09.245657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.246669Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:21:09.258279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:09.258348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.258403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:09.258436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:09.258441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.258909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.258930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:09.258960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.258967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:09.258969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:09.258972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:09.259293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.259302Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:09.259305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:09.259647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.259675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.259680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.259685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.260282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:09.260700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:09.260744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:09.260871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.260903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:09.260910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.260958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:09.260962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.260979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:09.260987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:09.261346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:09.261353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:09.261402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.261408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:09.261478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.261485Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:09.261496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:09.261501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.261507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:09.261512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.261517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:09.261521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:09.261530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:09.261534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:09.261537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:09.261789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:09.261802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:09.261807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:09.261811Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:09.261815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:09.261829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 60030000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 46 } } 2024-11-21T09:21:24.327558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T09:21:24.327588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T09:21:24.327594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2024-11-21T09:21:24.328091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:24.328135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:24.328142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:24.328154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T09:21:24.328191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:24.328616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T09:21:24.328650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T09:21:24.328801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:24.328825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:24.328832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:21:24.328938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T09:21:24.328968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T09:21:24.330318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:24.330339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:21:24.330402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:24.330409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T09:21:24.330549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:24.330560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:24.330668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:24.330680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:21:24.330684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:21:24.330689Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T09:21:24.330694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:21:24.330708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T09:21:24.331021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 254 } } 2024-11-21T09:21:24.331033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:24.331051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 254 } } 2024-11-21T09:21:24.331065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 254 } } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:21:24.331469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:24.331478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:24.331488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:24.331492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:24.331496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T09:21:24.331503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:24.331505Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:24.331508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:24.331511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T09:21:24.332117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:21:24.332141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:24.332166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:24.332203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T09:21:24.332222Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T09:21:24.332231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:21:24.332234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:24.332239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T09:21:24.332251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T09:21:24.332259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:21:24.332264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:21:24.332266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:21:24.332279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T09:21:24.332654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:21:24.332664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:607:2564] TestWaitNotification: OK eventTxId 102 2024-11-21T09:21:24.332739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:24.332748Z node 1 :FLAT_TX_SCHEMESHARD ERROR: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2024-11-21T09:21:24.333108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:24.333124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:24.333132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.039500Z, at schemeshard: 72057594046678944 2024-11-21T09:21:24.333139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 20774, MsgBus: 7441 2024-11-21T09:21:18.390213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660223994235856:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.390436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004434/r3tmp/tmpv20dWg/pdisk_1.dat 2024-11-21T09:21:18.452497Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20774, node 1 2024-11-21T09:21:18.491714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.491728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.491730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.491756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:18.491802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.491816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.492951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7441 TClient is connected to server localhost:7441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.580669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.584516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:18.596841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.613536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.633273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.645130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.718187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223994237402:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.718207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.820103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.825709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.841893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.855727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223994237915:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223994237920:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660223994237922:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.073695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.079479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.093928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17971, MsgBus: 30686 2024-11-21T09:21:19.474550Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660228305260153:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.474750Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004434/r3tmp/tmpS4ItOr/pdisk_1.dat 2024-11-21T09:21:19.496303Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17971, node 2 2024-11-21T09:21:19.502642Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.502656Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.502659Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.502695Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30686 TClient is connected to server localhost:30686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.574703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.574728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.575839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.577465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.588878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.598223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.616759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19 ... :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.546776Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.553643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.566028Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.573211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.580349Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.587025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.595754Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240793105687:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.595776Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660240793105692:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.595781Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.596290Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.600462Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660240793105694:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:22.806860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.814294Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.827694Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.840315Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23744, MsgBus: 23010 2024-11-21T09:21:23.115003Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660244561153544:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:23.115190Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004434/r3tmp/tmpvHJBmu/pdisk_1.dat 2024-11-21T09:21:23.124301Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23744, node 6 2024-11-21T09:21:23.134309Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:23.134322Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:23.134324Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:23.134356Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23010 TClient is connected to server localhost:23010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:23.215538Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:23.215572Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:23.216669Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:23.217322Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.221456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.232166Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.251804Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.262042Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.433141Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660244561155091:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.433163Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.438797Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.445648Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.455563Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.469765Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.476137Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.482985Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.491358Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660244561155591:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.491393Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.491402Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660244561155596:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.492292Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:23.496473Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660244561155598:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:23.678167Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.685209Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.693719Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.707685Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:45.923976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:45.923995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:45.924000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:45.924005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:45.924017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:45.924021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:45.924030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:45.924089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:45.934636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:45.934653Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:45.936683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:45.936772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:45.936795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:45.938910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:45.938955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:45.939044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:45.939161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:45.939674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:45.939864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:45.939872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:45.939882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:45.939888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:45.939893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:45.939927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:45.941093Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:45.957070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:45.957131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.957170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:45.957208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:45.957216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.957759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:45.957779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:45.957810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.957821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:45.957825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:45.957829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:45.958184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.958193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:45.958197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:45.958488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.958497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.958502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:45.958507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:45.959050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:45.959430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:45.959471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:45.959623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:45.959644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:45.959650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:45.959697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:45.959703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:45.959724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:45.959733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:45.960018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:45.960024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:45.960045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:45.960049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:45.960090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:45.960094Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:45.960101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:45.960103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:45.960106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:45.960109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:45.960111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:45.960113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:45.960120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:45.960123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:45.960125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ablet: 72075186233409546, partId: 2 2024-11-21T09:21:24.965238Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 635655162132 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:24.965242Z node 148 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:24.965246Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 339 RawX2: 635655162132 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:24.965254Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:24.965257Z node 148 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:21:24.965260Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:24.965265Z node 148 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T09:21:24.965331Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.965338Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.965340Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:24.965342Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:21:24.965345Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:24.965395Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.965401Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.965403Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:24.965405Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T09:21:24.965408Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T09:21:24.965457Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.965463Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.965465Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:24.965467Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T09:21:24.965469Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T09:21:24.965986Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.966002Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.966005Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:21:24.966008Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T09:21:24.966011Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T09:21:24.966023Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T09:21:24.966572Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:21:24.967123Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.967146Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:21:24.967204Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T09:21:24.967210Z node 148 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T09:21:24.967223Z node 148 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T09:21:24.967227Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T09:21:24.967233Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T09:21:24.967247Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [148:465:2430] message: TxId: 1003 2024-11-21T09:21:24.967253Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T09:21:24.967260Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:21:24.967264Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:21:24.967276Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:24.967279Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:21:24.967282Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:21:24.967284Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:21:24.967286Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:21:24.967288Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:21:24.967300Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:21:24.967303Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T09:21:24.967305Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T09:21:24.967311Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T09:21:24.967374Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.967399Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.967429Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.967751Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:24.968426Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:24.968440Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [148:665:2587] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:21:24.968547Z node 148 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:21:24.968617Z node 148 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 78us result status StatusSuccess 2024-11-21T09:21:24.968730Z node 148 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 15664, MsgBus: 2502 2024-11-21T09:21:19.115422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660226619980630:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.115554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004423/r3tmp/tmpndOR0R/pdisk_1.dat 2024-11-21T09:21:19.179122Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15664, node 1 2024-11-21T09:21:19.189146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.189156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.189158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.189184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2502 2024-11-21T09:21:19.217104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.217141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.218241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.235034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.245840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.259302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.275984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.285308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.410804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660226619982172:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.410830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.442202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.448222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.458505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.466208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.479170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.485932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.494819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660226619982686:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.494841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660226619982691:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.494847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.495400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:19.499615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660226619982693:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:19.702534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.711451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21817, MsgBus: 18517 2024-11-21T09:21:19.991798Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660227512399510:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.991947Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004423/r3tmp/tmprGP2au/pdisk_1.dat 2024-11-21T09:21:20.002692Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21817, node 2 2024-11-21T09:21:20.013246Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:20.013261Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:20.013264Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:20.013302Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18517 TClient is connected to server localhost:18517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:20.092065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:20.092089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:20.093170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:20.094982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.100867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.110675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.126823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.138713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.267379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660231807368336:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.267401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Se ... f is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.852760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.863070Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.020635Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660244196967619:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.020665Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.026630Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.033404Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.042427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.048919Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.056399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.063596Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.080280Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660244196968123:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.080306Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.080312Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660244196968128:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.081013Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:23.083522Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660244196968130:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:23.259821Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.274901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25611, MsgBus: 25503 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004423/r3tmp/tmpc0ICWM/pdisk_1.dat 2024-11-21T09:21:23.637422Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:21:23.637924Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25611, node 6 2024-11-21T09:21:23.648163Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:23.648178Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:23.648180Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:23.648237Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25503 TClient is connected to server localhost:25503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:23.726265Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:23.726299Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:23.727414Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:23.729200Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.732861Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:23.749290Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.758487Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.782444Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.794213Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.930206Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660246828577157:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.930235Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.936454Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.944277Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.953194Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.966770Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.981427Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.994509Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.003309Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660251123544964:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:24.003351Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:24.003363Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660251123544969:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:24.004021Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:24.007482Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660251123544971:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:24.218031Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.244647Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 6604, MsgBus: 32595 2024-11-21T09:21:18.432756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660224454814542:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.433002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00442d/r3tmp/tmpEeyDh5/pdisk_1.dat 2024-11-21T09:21:18.511662Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6604, node 1 2024-11-21T09:21:18.534293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.534310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.534311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.534349Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:18.534570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.534589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.536655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32595 TClient is connected to server localhost:32595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.589899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:18.603720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.668563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.687487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.698614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.744459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224454816071:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.744505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.820161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.825580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.842134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.863653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.878324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224454816575:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.878346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224454816580:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.878348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.878846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.883084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660224454816582:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:19.073604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.079811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.093871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.101034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.108562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20152, MsgBus: 22128 2024-11-21T09:21:19.516433Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660228804695906:2058];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00442d/r3tmp/tmp0AGE6c/pdisk_1.dat 2024-11-21T09:21:19.519297Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:19.529888Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20152, node 2 2024-11-21T09:21:19.534811Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.534823Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.534824Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.534847Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22128 TClient is connected to server localhost:22128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.616614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.616643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.617685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.619328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.621760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.633995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at scheme ... rt proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.401397Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.536318Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660242601190002:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.536348Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.542499Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.549461Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.559097Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.566628Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.573226Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.580516Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.588719Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660242601190509:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.588741Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660242601190514:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.588744Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.589354Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.593402Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660242601190516:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:22.766879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.778120Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16570, MsgBus: 15779 2024-11-21T09:21:23.141811Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660245095813173:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:23.141953Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00442d/r3tmp/tmpw6TNjH/pdisk_1.dat 2024-11-21T09:21:23.151456Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16570, node 6 2024-11-21T09:21:23.162308Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:23.162340Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:23.162344Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:23.162394Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15779 TClient is connected to server localhost:15779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:23.244221Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:23.244255Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:23.244946Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.245363Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T09:21:23.247090Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:23.249142Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.260999Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:23.284900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.294933Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.488856Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660245095814734:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.488917Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.493987Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.500433Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.511323Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.518076Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.525572Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.540594Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.556648Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660245095815235:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.556670Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.556720Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660245095815240:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.557371Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:23.565761Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660245095815242:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:23.766440Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.773044Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 13190, MsgBus: 22574 2024-11-21T09:21:20.317694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660230718888169:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:20.317710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00440f/r3tmp/tmpUaJrm3/pdisk_1.dat 2024-11-21T09:21:20.362082Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13190, node 1 2024-11-21T09:21:20.375329Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:20.375340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:20.375343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:20.375369Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22574 TClient is connected to server localhost:22574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:20.418706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:20.418746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:20.419848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:20.441596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.449445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.510816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.533763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.543762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.629695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660230718889720:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.629773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.635793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.641476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.647982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.654786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.661993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.669031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.680694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660230718890213:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.680715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.680810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660230718890218:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.681464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:20.689937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660230718890220:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:20.889308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.896353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.907572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.921160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.927980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.934615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9885, MsgBus: 20803 2024-11-21T09:21:21.208151Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660236723357322:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:21.208547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00440f/r3tmp/tmpJFsFPG/pdisk_1.dat 2024-11-21T09:21:21.219169Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9885, node 2 2024-11-21T09:21:21.240369Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:21.240383Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:21.240385Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:21.240433Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20803 TClient is connected to server localhost:20803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:21.308796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:21.308825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:21.309815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:21.310433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.314830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.323748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at scheme ... 11-21T09:21:23.336281Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.342826Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.350259Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.358956Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660245513586179:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.358974Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660245513586184:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.358982Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.359582Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:23.363213Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660245513586186:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:23.544581Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.550627Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.560004Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.567272Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.574335Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.580909Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61751, MsgBus: 17525 2024-11-21T09:21:23.920621Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660243638355071:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:23.920783Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00440f/r3tmp/tmpDXUbjz/pdisk_1.dat 2024-11-21T09:21:23.934424Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61751, node 5 2024-11-21T09:21:23.948429Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:23.948446Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:23.948448Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:23.948504Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17525 TClient is connected to server localhost:17525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:24.025341Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:24.025373Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:24.025644Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.026787Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:24.036515Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.046114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.065637Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.081441Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.232820Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660247933323917:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:24.232848Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:24.238910Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.246065Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.260878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.274692Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.288699Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.302831Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.318158Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660247933324421:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:24.318185Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:24.318186Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660247933324426:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:24.318772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:24.322485Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660247933324428:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:24.528095Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.533778Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.539830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.547052Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.554446Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.560926Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple+StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 25795, MsgBus: 23940 2024-11-21T09:21:18.409319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660222683342464:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.409527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004449/r3tmp/tmpQRkhu6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25795, node 1 2024-11-21T09:21:18.473950Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:18.491106Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.491118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.491120Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.491145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:18.510278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.510310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.511370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23940 TClient is connected to server localhost:23940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.591642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.602183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.617410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:18.638459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.653514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.717675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660222683344002:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.717703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.820033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.825332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.841946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.855707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660222683344515:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660222683344520:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660222683344522:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.070237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.076597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.093877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","PlanNodeId":3,"Columns":["Key","Value"],"E-Rows":"4","Table":"FJ_Table_1","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["FJ_Table_2"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1732180879172,"Host":"ghrun-qcxhsi27zq","ResultRows":2,"ResultBytes":26,"OutputRows":2,"StartTimeMs":1732180879172,"IngressRows":2,"ComputeTimeUs":27,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":2,"DstStageId":0,"Bytes":26}],"WaitInputTimeUs":330,"TaskId":1,"OutputBytes":26}],"PeakMemoryUsageBytes":65536,"CpuTimeUs":3246}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":26,"Max":26,"Min":26}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":342,"Max":342,"Min":342},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":26,"Max":26,"Min":26},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1732180879169,"WaitInputTimeUs":{"Count":1,"Sum":330,"Max":330,"Min":330},"OutputBytes":{"Count":1,"Sum":26,"Max":26,"Min":26},"CpuTimeUs":{"Count":1,"Sum":2262,"Max":2262,"Min":2262},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":318,"Max":318,"Min":318},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"ComputeNodes":[{"Tasks":[{"InputBytes":26,"FinishTimeMs":1732180879178,"Host":"ghrun-qcxhsi27zq","OutputRows":2,"StartTimeMs":1732180879172,"InputRows":2,"ComputeTimeUs":19,"InputChannels":[{"WaitTimeUs":3178,"ChannelId":1,"Rows":2,"SrcStageId":0,"Bytes":26}],"NodeId":1,"OutputChannels":[{"ChannelId":2,"Rows":2,"DstStageId":2,"Bytes":42}],"WaitInputTimeUs":4311,"TaskId":2,"OutputBytes":42}],"PeakMemoryUsageBytes":196608,"DurationUs":6000,"CpuTimeUs":482}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":42,"Max":42,"Min":42}},"Name":"6 ... e pool default not found or you don't have access permissions } 2024-11-21T09:21:22.551976Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.559047Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.565864Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.573231Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.581048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.595036Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.603117Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660242677297555:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.603141Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660242677297560:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.603143Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.603790Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:22.607186Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660242677297562:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:22.815508Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.824015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.832510Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.839196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32249, MsgBus: 9464 2024-11-21T09:21:23.129771Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660247032428671:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:23.129808Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004449/r3tmp/tmpR7F3U8/pdisk_1.dat 2024-11-21T09:21:23.139093Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32249, node 6 2024-11-21T09:21:23.150063Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:23.150077Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:23.150078Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:23.150113Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9464 TClient is connected to server localhost:9464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:23.229891Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:23.229919Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:23.231032Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:23.232247Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.233858Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:23.241139Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.257426Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.281100Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.292878Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.467467Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660247032430212:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.467497Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.473063Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.479607Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.490251Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.497359Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.503993Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.511331Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.520582Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660247032430713:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.520608Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.521599Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660247032430718:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.522236Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:23.524035Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660247032430720:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:23.730714Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.741531Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.750050Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.763334Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2024-11-21T09:18:39.811882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:18:39.811926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:18:39.811935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003804/r3tmp/tmp4Zes1N/pdisk_1.dat 2024-11-21T09:18:39.895537Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24439, node 1 2024-11-21T09:18:40.003959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:40.003975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:40.003978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:40.004038Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:18:40.009037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:18:40.085134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:40.085155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:40.096104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10678 2024-11-21T09:18:40.495248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:18:41.210124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:41.210149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:41.241991Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:18:41.242549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:41.283090Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:41.288382Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:18:41.288399Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:18:41.292400Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:18:41.292479Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:18:41.292500Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:18:41.292504Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:18:41.292507Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:18:41.292510Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:18:41.292513Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:18:41.292517Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:18:41.292569Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:18:41.462543Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:41.462567Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:41.463357Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T09:18:41.464666Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T09:18:41.465721Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T09:18:41.466226Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T09:18:41.469048Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:18:41.469058Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:18:41.469065Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T09:18:41.470142Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:41.470159Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:41.471783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:18:41.473427Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:18:41.473448Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:18:41.475228Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:18:41.486474Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:41.507616Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:18:41.617259Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:18:41.781297Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:42.483622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:18:42.988765Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:43.079822Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T09:18:43.079841Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T09:18:43.079850Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2489:2902], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T09:18:43.079978Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2490:2903] 2024-11-21T09:18:43.080005Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2490:2903], schemeshard id = 72075186224037899 2024-11-21T09:18:43.767746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:18:44.153059Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:44.316729Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2024-11-21T09:18:44.316747Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2024-11-21T09:18:44.316757Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2969:3102], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2024-11-21T09:18:44.316919Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2971:3104] 2024-11-21T09:18:44.316982Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2971:3104], schemeshard id = 72075186224037905 2024-11-21T09:18:45.103417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3100:3358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:45.103453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:45.106015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2024-11-21T09:18:45.156441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3252:3394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:45.156503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:45.156899Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3257:3398]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:18:45.156954Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:18:45.157000Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T09:18:45.157005Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3260:3401] 2024-11-21T09:18:45.157012Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3260:3401] 2024-11-21T09:18:45.157145Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3261:3237] 2024-11-21T09:18:45.157193Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3260:3401], server id = [2:3261:3237], tablet id = 72075186224037897, status = OK 2024-11-21T09:18:45.157235Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:3261:3237], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:18:45.157249Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T09:18:45.157306Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:18:45.157312Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3257:3398], StatRequests.size() = 1 2024-11-21T09:18:45.159130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3265:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Erro ... -11-21T09:21:18.348491Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:8840:6336], StatRequests.size() = 1 2024-11-21T09:21:18.713578Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:18.713610Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:18.713620Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T09:21:18.713626Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:21:18.713760Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T09:21:18.716591Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:18.717743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8865:6356], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.717762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8875:6361], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.717773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.720913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T09:21:18.734662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8879:6364], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T09:21:19.020367Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:9004:6432]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:19.020436Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T09:21:19.020485Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2024-11-21T09:21:19.020492Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T09:21:19.020533Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:21:19.020548Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:9004:6432], StatRequests.size() = 1 2024-11-21T09:21:19.035437Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjgyYzQzYWYtMWM0Yzc3MTMtYjMwYzAwMDYtOWYxYjkwOTk=, TxId: 2024-11-21T09:21:19.035462Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjgyYzQzYWYtMWM0Yzc3MTMtYjMwYzAwMDYtOWYxYjkwOTk=, TxId: 2024-11-21T09:21:19.035584Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:21:19.047084Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:21:19.047108Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:19.120633Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:21:19.120663Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:21:19.172689Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3605:3351], schemeshard count = 1 2024-11-21T09:21:19.510903Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T09:21:19.510930Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 193.000000s, at schemeshard: 72075186224037899 2024-11-21T09:21:19.511012Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2024-11-21T09:21:19.522968Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T09:21:19.718808Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9054:6460]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:19.718941Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T09:21:19.718949Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9054:6460], StatRequests.size() = 1 2024-11-21T09:21:21.089930Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9101:6490]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:21.090040Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T09:21:21.090050Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9101:6490], StatRequests.size() = 1 2024-11-21T09:21:21.546868Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:21:21.568247Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:21.568278Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:21.568289Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2024-11-21T09:21:21.568295Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T09:21:21.568392Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T09:21:21.569005Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:21.574177Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTVkYTMwZWUtZGJlMjMyNGMtMzYzYWIzZjAtZjllMzQzZGY=, TxId: 2024-11-21T09:21:21.574198Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTVkYTMwZWUtZGJlMjMyNGMtMzYzYWIzZjAtZjllMzQzZGY=, TxId: 2024-11-21T09:21:21.574304Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:21:21.586497Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T09:21:21.586524Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:21.716688Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2024-11-21T09:21:21.716721Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 207.000000s, at schemeshard: 72075186224037905 2024-11-21T09:21:21.717100Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2024-11-21T09:21:21.729404Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T09:21:22.624423Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9183:6544]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:22.624554Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T09:21:22.624563Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9183:6544], StatRequests.size() = 1 2024-11-21T09:21:24.228540Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9237:6576]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:24.228634Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T09:21:24.228643Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9237:6576], StatRequests.size() = 1 2024-11-21T09:21:24.665556Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 3 2024-11-21T09:21:24.665760Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:21:24.665882Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:21:24.687106Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:24.687137Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:24.687148Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2024-11-21T09:21:24.687153Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2024-11-21T09:21:24.687267Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T09:21:24.687983Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:24.693000Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWEwMTM2MWEtY2Y4NmNmYmYtYmFmY2U3Yy0xN2Q3OWQwOA==, TxId: 2024-11-21T09:21:24.693032Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWEwMTM2MWEtY2Y4NmNmYmYtYmFmY2U3Yy0xN2Q3OWQwOA==, TxId: 2024-11-21T09:21:24.693208Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:21:24.705333Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2024-11-21T09:21:24.705359Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:25.671711Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9312:6624]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:25.671834Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T09:21:25.671843Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9312:6624], StatRequests.size() = 1 2024-11-21T09:21:25.671998Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9314:6626]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:25.673029Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T09:21:25.673048Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9314:6626], StatRequests.size() = 1 >> KqpJoinOrder::TPCH8-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH8+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS16-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23-StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 24181, MsgBus: 18621 2024-11-21T09:21:20.972992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660233836076994:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:20.973175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004405/r3tmp/tmpVJpS3D/pdisk_1.dat 2024-11-21T09:21:21.031488Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24181, node 1 2024-11-21T09:21:21.042048Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:21.042060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:21.042061Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:21.042092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18621 TClient is connected to server localhost:18621 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:21.074026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:21.074068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:21:21.075055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:21.083730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.095116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.160919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.176665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.188035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.277484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660238131045831:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.277515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.309914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.322550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.334423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.341651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.348352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.364030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.384849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660238131046345:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.384874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.385021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660238131046350:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.385736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:21.389475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660238131046352:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:21.589891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.595570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.607203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.614068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.620938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.627845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8395, MsgBus: 24729 2024-11-21T09:21:21.852497Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660236906764156:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:21.852718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004405/r3tmp/tmpWPJLcg/pdisk_1.dat 2024-11-21T09:21:21.863066Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8395, node 2 2024-11-21T09:21:21.871452Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:21.871465Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:21.871468Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:21.871504Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24729 TClient is connected to server localhost:24729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:21.953061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:21.953088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:21.954183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:21.954876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.956122Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:21.960660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.972468Z node 2 :FLAT ... opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.002480Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.009339Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.025187Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660252457973756:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.025227Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.025369Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660252457973761:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.025994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:25.029834Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660252457973763:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:25.212451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.218489Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.226168Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.232951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.240306Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.246514Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24509, MsgBus: 24578 2024-11-21T09:21:25.501522Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660254845882629:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:25.501695Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004405/r3tmp/tmpumxLBZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24509, node 6 2024-11-21T09:21:25.516766Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:25.519045Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:25.519058Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:25.519060Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:25.519095Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24578 TClient is connected to server localhost:24578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:25.601600Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:25.601631Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:25.602713Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:25.604452Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.606148Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:25.616559Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.626248Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.643895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.654065Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.820843Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660254845884171:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.820874Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.827034Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.833924Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.842160Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.848974Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.856132Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.870376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.880700Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660254845884671:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.880723Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.880788Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660254845884676:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.881418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:25.883506Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660254845884678:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:26.112178Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.118753Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.134257Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.193820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.204063Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.216536Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore >> KqpJoin::LeftJoinWithNull+StreamLookupJoin >> KqpJoin::IdxLookupLeftPredicate >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS90-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::MultiJoins >> KqpJoinOrder::FourWayJoinLeftFirst+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin+ColumnStore >> KqpJoin::RightSemiJoin_SimpleKey >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin-ColumnStore >> TSequenceReboots::CreateSequence [GOOD] >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin+ColumnStore >> TPersQueueTest::TestWriteStat [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateSequence [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:14.626441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:14.626485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:14.626495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:14.626502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:14.626533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:14.626538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:14.626547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:14.626665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:14.672757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:14.672779Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:14.675249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:14.675363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:14.675401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:14.690164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:14.690261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:14.699188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:14.711148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:14.717902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:14.734524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:14.734558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:14.734576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:14.734589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:14.734596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:14.734672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:14.736770Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:14.768438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:14.777646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.777769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:14.777840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:14.777852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.778676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:14.778720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:14.778772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.778789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:14.778793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:14.778798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:14.779458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.779478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:14.779483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:14.779910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.779927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.779932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:14.779938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:14.780530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:14.781009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:14.785597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:14.785934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:14.785981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:14.785992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:14.786080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:14.786089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:14.786127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:14.786142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:14.786825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:14.786841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:14.786886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:14.786893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:14.786977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.786986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:14.786997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:14.787002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:14.787007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:14.787012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:14.787017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:14.787021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:14.787037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:14.787043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:14.787048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 301545 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:27.526312Z node 52 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#1002:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2024-11-21T09:21:27.526363Z node 52 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 240 2024-11-21T09:21:27.526395Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:27.526411Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:27.526424Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:21:27.526430Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 2024-11-21T09:21:27.526855Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:27.526866Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000003 first txId#1002 countTxs#1 2024-11-21T09:21:27.526873Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000003 2024-11-21T09:21:27.526882Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1002:0 2024-11-21T09:21:27.526918Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [52:121:2147], Recipient [52:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:21:27.526923Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:21:27.526953Z node 52 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:27.526958Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:27.527004Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:21:27.527027Z node 52 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:27.527032Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [52:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T09:21:27.527049Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [52:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T09:21:27.527131Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:21:27.527138Z node 52 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T09:21:27.527148Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:27.527153Z node 52 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:21:27.527157Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:21:27.527164Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T09:21:27.527169Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:21:27.527175Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:21:27.527180Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:21:27.527204Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:21:27.527210Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2024-11-21T09:21:27.527214Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:21:27.527217Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:21:27.527344Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [52:203:2206], Recipient [52:121:2147]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2024-11-21T09:21:27.527351Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:21:27.527366Z node 52 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.527377Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.527384Z node 52 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:21:27.527389Z node 52 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:21:27.527393Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:27.527406Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:27.527487Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [52:203:2206], Recipient [52:121:2147]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 2 } 2024-11-21T09:21:27.527493Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:21:27.527500Z node 52 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.527508Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.527512Z node 52 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:21:27.527515Z node 52 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:21:27.527519Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:21:27.527527Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T09:21:27.527532Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [52:356:2337] 2024-11-21T09:21:27.527538Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:21:27.528270Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:27.528585Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.528595Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:27.528666Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.528671Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:21:27.528689Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [52:356:2337] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1002 at schemeshard: 72057594046678944 2024-11-21T09:21:27.528703Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:21:27.528708Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [52:357:2338] 2024-11-21T09:21:27.528748Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [52:359:2340], Recipient [52:121:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:27.528758Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:27.528762Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 2024-11-21T09:21:27.528831Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [52:381:2361], Recipient [52:121:2147]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:21:27.528837Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:21:27.528850Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:27.528902Z node 52 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/seq" took 47us result status StatusSuccess 2024-11-21T09:21:27.528979Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq" PathDescription { Self { Name: "seq" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "seq" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::TestJoinOrderHintsSimple+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin+ColumnStore >> KqpJoin::JoinWithDuplicates >> KqpJoinOrder::TPCDS87-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88+StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:09.627227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:09.627255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:09.627260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:09.627264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:09.627271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:09.627275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:09.627283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:09.627362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:09.637607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:09.637626Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:09.640025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:09.640096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:09.640126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:09.642414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:09.642487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:09.642575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.642732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:09.643334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.643591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:09.643600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.643612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:09.643619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:09.643624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:09.643662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:09.644857Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:09.658160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:09.658233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.658280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:09.658311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:09.658316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.658789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.658816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:09.658854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.658863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:09.658866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:09.658870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:09.659179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.659187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:09.659190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:09.659491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.659501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.659507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.659514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.659943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:09.660254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:09.660296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:09.660413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:09.660429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:09.660434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.660467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:09.660471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:09.660495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:09.660502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:09.660767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:09.660773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:09.660801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:09.660805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:09.660861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:09.660865Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:09.660873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:09.660875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.660879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:09.660882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:09.660894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:09.660898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:09.660906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:09.660909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:09.660912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... hard: 72057594046678944 2024-11-21T09:21:27.642324Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:27.642364Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:21:27.642402Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:27.642407Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T09:21:27.642413Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T09:21:27.642521Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:21:27.642529Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2024-11-21T09:21:27.642762Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.642777Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.642781Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:21:27.642787Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:21:27.642792Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:21:27.643018Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.643029Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.643032Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:21:27.643036Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:21:27.643043Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:21:27.643073Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T09:21:27.643605Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 237 } } 2024-11-21T09:21:27.643616Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:27.643635Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 237 } } 2024-11-21T09:21:27.643647Z node 72 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 237 } } 2024-11-21T09:21:27.644086Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T09:21:27.644100Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:27.644134Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T09:21:27.644140Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:27.644147Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T09:21:27.644157Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:27.644160Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:21:27.644164Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:27.644169Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2024-11-21T09:21:27.644486Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.644506Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:21:27.648587Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:21:27.648647Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:21:27.648675Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:21:27.648684Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T09:21:27.648702Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:21:27.648707Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:21:27.648714Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T09:21:27.648720Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:21:27.648726Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:21:27.648730Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:21:27.648761Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T09:21:27.649637Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T09:21:27.649649Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T09:21:27.649713Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:21:27.649734Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:21:27.649738Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:398:2373] TestWaitNotification: OK eventTxId 1002 2024-11-21T09:21:27.649821Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:27.649872Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 80us result status StatusSuccess 2024-11-21T09:21:27.649976Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::TPCDS94-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore >> KqpJoin::IdxLookupLeftPredicate [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] >> KqpJoin::LeftJoinWithNull-StreamLookupJoin >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] >> KqpJoin::RightSemiJoin_SecondaryIndex >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::MultiJoins [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup >> KqpJoinOrder::TPCH8+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds+StreamLookupJoin-ColumnStore >> KqpJoin::JoinWithDuplicates [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCH3-StreamLookupJoin-ColumnStore >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] >> KqpJoin::FullOuterJoin2 >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup >> OlapEstimationRowsCorrectness::TPCH10 >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS90-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS90-StreamLookupJoin+ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 14859, MsgBus: 3642 2024-11-21T09:21:18.380824Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660225849461435:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.380991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00442c/r3tmp/tmpHrrNFd/pdisk_1.dat 2024-11-21T09:21:18.455989Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14859, node 1 2024-11-21T09:21:18.482429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.482468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.483411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:18.492758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.492771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.492772Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.492798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3642 TClient is connected to server localhost:3642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.590636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.598995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.661695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.679133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.688969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.710063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225849462970:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.710084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.820048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.826702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.841952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.855713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225849463473:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225849463478:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.876099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660225849463480:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:19.070882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.076755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.093996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.101179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.118564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.124714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.136142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.143325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.149929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.156953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.164233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.171182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.227251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:19.234023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.240930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.248074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.254635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.261887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.268846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.275559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.282891 ... 0263999220801:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.890707Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:27.900706Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660263999220803:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:28.049615Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.056254Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.068425Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.082746Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.096796Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.113989Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.121549Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.176670Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.187883Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.201555Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.208445Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.215124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.222098Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.334733Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:28.342507Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.399095Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.411337Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.418329Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.425614Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.432378Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.439535Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.497051Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.510283Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.523724Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.533262Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.591443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.603030Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.615510Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.629496Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.684755Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.698963Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.714511Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.769986Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.825775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.840017Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.853543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.867773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.900853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:28.908652Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.922914Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.937418Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.992973Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.006775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.021124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.034521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.054679Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.070960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.077953Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.090821Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.097951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] >> KqpJoin::RightTableKeyPredicate >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] >> KqpJoin::JoinLeftPureInner >> TTopicYqlTest::BadRequests [GOOD] >> KqpJoin::FullOuterJoin2 [GOOD] >> KqpJoin::FullOuterJoinSizeCheck >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin+ColumnStore >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] Test command err: 2024-11-21T09:19:26.614558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659744654349978:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:26.614583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:26.617748Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659741126585805:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:26.617990Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:26.634893Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003ed1/r3tmp/tmpS9fl4s/pdisk_1.dat 2024-11-21T09:19:26.640181Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:26.657305Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22621, node 1 2024-11-21T09:19:26.670479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003ed1/r3tmp/yandexBIh2a8.tmp 2024-11-21T09:19:26.670490Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003ed1/r3tmp/yandexBIh2a8.tmp 2024-11-21T09:19:26.670536Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003ed1/r3tmp/yandexBIh2a8.tmp 2024-11-21T09:19:26.670567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:26.675339Z INFO: TTestServer started on Port 25463 GrpcPort 22621 TClient is connected to server localhost:25463 PQClient connected to localhost:22621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:26.714690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:26.714724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:26.716232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:26.736849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:26.736871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:26.737445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:26.737821Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:26.738007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T09:19:26.744272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T09:19:26.877847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659741126586160:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:26.877871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439659741126586142:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:26.877929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:26.878845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T09:19:26.881534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439659741126586166:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T09:19:26.906534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T09:19:26.906703Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439659744654351030:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:26.906773Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTMxMmZlZDktZGYzODc2NmQtOGRjMmU3MWItZjBlNzFkMGM=, ActorId: [1:7439659744654350989:2300], ActorState: ExecuteState, TraceId: 01jd709w536bnw1ssqbbnceg0h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:26.907211Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:26.938797Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439659741126586207:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:19:26.938851Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWI1ZDRjZGEtOGE1NjZiM2QtN2M1OTVhMTctNjdhZjkzOGI=, ActorId: [2:7439659741126586135:2280], ActorState: ExecuteState, TraceId: 01jd709w4xdx49nxtt6c645e7d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:19:26.939025Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:19:26.964281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T09:19:27.027786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T09:19:27.051969Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jd709w9y6pfzjgtf2nb6g2tp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTZmZjliMmMtZjJkNTYyYWEtOTlhNjNlNTAtZTZlZWM4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439659748949318747:3034] 2024-11-21T09:19:31.614898Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439659744654349978:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:31.614929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:19:31.617962Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439659741126585805:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:31.617991Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 2 partitions CallPersQueueGRPC request to localhost:22621 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2024-11-21T09:19:33.049287Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:22621 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 2 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } 202 ... path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T09:21:24.582080Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T09:21:24.622168Z node 25 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd70df3rbarxdghqr11f42r2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=OGM5MzU4OGUtOWU4OGQ0NGEtODZiNGQ2MTYtZTZiODVlMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [25:7439660248232577217:3030] 2024-11-21T09:21:29.167047Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7439660248232575848:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:29.167093Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:21:29.168267Z node 26 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[26:7439660249378304486:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:29.168305Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T09:21:29.784385Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T09:21:29.784451Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T09:21:29.784974Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T09:21:29.785032Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T09:21:29.785042Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T09:21:29.785044Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T09:21:29.785048Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T09:21:29.785053Z node 25 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:21:29.785076Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T09:21:29.785089Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T09:21:29.785128Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:29.785134Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [25:7439660269707414164:3291], now have 1 active actors on pipe 2024-11-21T09:21:29.785259Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T09:21:29.785416Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [25:7439660248232576059:2198] txId 281474976720679 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T09:21:29.787517Z node 26 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [25:7439660269707414166:3293] connected; active server actors: 1 2024-11-21T09:21:29.787767Z node 26 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T09:21:29.787797Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T09:21:29.787833Z node 25 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T09:21:29.787935Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Step TInitConfigStep 2024-11-21T09:21:29.787982Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037892] Config applied version 0 actor [25:7439660248232576059:2198] txId 281474976720679 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T09:21:29.788032Z node 26 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T09:21:29.788073Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T09:21:29.788142Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [25:7439660269707414225:2440] 2024-11-21T09:21:29.788433Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:29.788445Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [26:7439660270853141706:2379], now have 1 active actors on pipe 2024-11-21T09:21:29.788478Z node 26 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] TEvClientConnected TabletId 72075186224037892, NodeId 25, Generation 1 2024-11-21T09:21:29.788898Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Completed. 2024-11-21T09:21:29.788909Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--legacy--topic1' partition 0 generation 1 [25:7439660269707414225:2440] 2024-11-21T09:21:29.788914Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T09:21:29.789173Z node 26 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] BALANCER INIT DONE for rt3.dc1--legacy--topic1: (0, 72075186224037892) 2024-11-21T09:21:29.789367Z node 26 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] TEvClientConnected TabletId 72057594046644480, NodeId 25, Generation 2 2024-11-21T09:21:29.789431Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:21:29.789440Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T09:21:29.789508Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 >> KqpJoinOrder::FiveWayJoinWithPreds+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95-StreamLookupJoin+ColumnStore >> KqpJoin::RightTableKeyPredicate [GOOD] >> KqpJoin::RightTableIndexPredicate >> KqpJoinOrder::TPCH3-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH3+StreamLookupJoin-ColumnStore >> KqpJoin::JoinLeftPureInner [GOOD] >> KqpJoin::JoinLeftPureInnerConverted >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic >> KqpJoin::FullOuterJoinSizeCheck [GOOD] >> KqpJoin::FullOuterJoinNotNullJoinKey >> KqpJoinOrder::TPCH10-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS9_SMALL-StreamLookupJoin-ColumnStore >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Nulls >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin >> YdbProxy::ReadNonExistentTopic [GOOD] >> TSchemeShardTTLTests::CheckCounters [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin+ColumnStore >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] >> KqpJoinOrder::TPCDS88+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore >> KqpJoin::JoinLeftPureInnerConverted [GOOD] >> KqpJoin::JoinMismatchDictKeyTypes >> KqpJoin::RightTableIndexPredicate [GOOD] >> KqpJoin::RightTableValuePredicate >> KqpJoinOrder::TPCDS9_SMALL+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:21:08.475530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:08.475552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:08.475557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:08.475561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:08.475565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:08.475569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:08.475576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:08.475648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:08.483497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:08.483518Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:08.486257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:08.486278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:08.486314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:08.487876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:08.487921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:08.488014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.488082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:08.488650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.488929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:08.488941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.488956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:08.488962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:08.488966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:08.489003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.490424Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:08.504101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:08.504172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.504244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:08.504276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:08.504282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.504926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.504951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:08.504988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.504997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:08.505001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:08.505006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:08.505347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.505356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:08.505359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:08.505646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.505654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.505660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.505667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.506157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:08.506491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:08.506539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:08.506717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:08.506736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:08.506743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.506779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:08.506784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:08.506803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:08.506812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:08.507231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:08.507238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:08.507273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:08.507278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:21:08.507342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:08.507348Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:08.507360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:08.507364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.507370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:08.507375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:08.507380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:08.507383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:08.507394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:08.507399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:08.507403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:21:08.507639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:08.507653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:21:08.507657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:21:08.507662Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:21:08.507666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:08.507680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... at schemeshard: 72057594046678944 2024-11-21T09:21:31.430768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.430815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.430918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.430926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:31.430933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 107:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:21:31.430946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T09:21:31.430950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T09:21:31.430960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T09:21:31.430975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:352:2327] message: TxId: 107 2024-11-21T09:21:31.430981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T09:21:31.430986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T09:21:31.430990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T09:21:31.431025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:21:31.431029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:21:31.431096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:31.431102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:21:31.431114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:31.431672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T09:21:31.431686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1399:3305] 2024-11-21T09:21:31.431969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 2024-11-21T09:21:31.502019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:31.502085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:31.502152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T09:21:31.502176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T09:21:31.502185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:31.502289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T09:21:31.502297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T09:21:31.502300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:31.572968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T09:21:31.573003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:31.573051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:31.573093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1732194158677459 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:31.573114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1732194158677459 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T09:21:31.573229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:21:31.573411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T09:21:31.573451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:31.573458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T09:21:31.573700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T09:21:31.573709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T09:21:31.575202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:31.575240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T09:21:31.575251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:31.575259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2024-11-21T14:02:38.677459Z, at schemeshard: 72057594046678944 2024-11-21T09:21:31.575267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:31.575276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T09:21:31.575279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2024-11-21T14:02:38.677459Z, at schemeshard: 72057594046678944 2024-11-21T09:21:31.575283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T09:21:31.596052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:21:31.638096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:31.638142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:31.638161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T09:21:31.638185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T09:21:31.638193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:31.638261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T09:21:31.638265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T09:21:31.638268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:31.661168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T09:21:31.716239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:31.716286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:31.716304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T09:21:31.716321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T09:21:31.716335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T09:21:31.716402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T09:21:31.716407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T09:21:31.716409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2024-11-21T09:20:27.859645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660004733598738:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:27.859665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004336/r3tmp/tmpiVdjbF/pdisk_1.dat 2024-11-21T09:20:27.931670Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4338 TServer::EnableGrpc on GrpcPort 15040, node 1 2024-11-21T09:20:27.960680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:27.960698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:27.961761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:27.998418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:27.998439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:27.998441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:27.998471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:28.082597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:20:28.861455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T09:20:28.922538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660009028566870:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:28.922540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660009028566882:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:28.922557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:28.923124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:2, at schemeshard: 72057594046644480 2024-11-21T09:20:28.924157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660009028566884:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:20:29.303643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.357337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.471977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.524323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:20:29.576756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:20:32.859899Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439660004733598738:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:32.859932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:20:42.931947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:20:42.932011Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:31.292872Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660281501502915:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004336/r3tmp/tmpLeFY6p/pdisk_1.dat 2024-11-21T09:21:31.295630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:31.309432Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20639 TServer::EnableGrpc on GrpcPort 10592, node 2 2024-11-21T09:21:31.347847Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:31.347863Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:31.347864Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:31.347897Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:31.396542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:31.396594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:31.397039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.397606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:31.405820Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull >> KqpJoinOrder::TPCDS23-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin-ColumnStore >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:07.280710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:07.280736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:07.280741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:07.280747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:07.280752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:07.280756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:07.280766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:07.280846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:07.291162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:07.291183Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:07.293524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:07.293639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:07.293691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:07.296716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:07.296809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:07.296915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.297254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:07.298119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:07.298429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:07.298456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:07.298471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:07.298479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:07.298485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:07.298539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:07.300266Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:07.317912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:07.318004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.318074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:07.318120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:07.318129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.319099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.319132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:07.319207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.319219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:07.319224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:07.319230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:07.319734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.319745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:07.319751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:07.322215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.322236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.322245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.322254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.322935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:07.323462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:07.323520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:07.323736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:07.323766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:07.323775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.323854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:07.323861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:07.323901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:07.323915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:07.324437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:07.324453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:07.324503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:07.324509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:07.324599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:07.324607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:07.324619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:07.324623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.324630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:07.324636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:07.324641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:07.324645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:07.324659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:07.324665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:07.324670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:21:31.850425Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:31.850439Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:21:31.850453Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:31.850458Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:31.850465Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:31.850475Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:31.850478Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T09:21:31.850935Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:31.850960Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:21:31.851351Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.851431Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 250 } } 2024-11-21T09:21:31.851436Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:31.851447Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 250 } } 2024-11-21T09:21:31.851455Z node 97 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 250 } } 2024-11-21T09:21:31.851585Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:31.851595Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2024-11-21T09:21:31.851607Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:31.851610Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:21:31.851614Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:21:31.851620Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:31.851622Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.851625Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:21:31.851627Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T09:21:31.851630Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T09:21:31.852027Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.852354Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.852426Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.852436Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2024-11-21T09:21:31.852443Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T09:21:31.852447Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T09:21:31.852457Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T09:21:31.852462Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T09:21:31.853047Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.853060Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:21:31.853073Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:21:31.853077Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:21:31.853082Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:21:31.853091Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:21:31.853097Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:21:31.853101Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:21:31.853131Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:31.853135Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:21:31.853778Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:21:31.853786Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:21:31.853838Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:21:31.853855Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:31.853860Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:522:2486] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:21:31.853927Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:21:31.853982Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 68us result status StatusSuccess 2024-11-21T09:21:31.854106Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.3%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] Test command err: Trying to start YDB, gRPC: 18526, MsgBus: 9789 2024-11-21T09:21:27.536786Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660263255771843:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:27.536878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043e2/r3tmp/tmpy4rfT8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18526, node 1 2024-11-21T09:21:27.595482Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:27.608680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:27.608697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:27.608700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:27.608738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9789 TClient is connected to server localhost:9789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:27.666635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:27.666656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:27.667868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:27.671590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.673967Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:27.682669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.704047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.726735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.736075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.862704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660263255773254:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.862729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.887267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.893610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.907414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.915501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.928616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.945205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.954937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660263255773768:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.954963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.954994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660263255773773:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.955675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:27.962586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660263255773775:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:28.134752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.142042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.152942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10567, MsgBus: 62259 2024-11-21T09:21:28.433116Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660268756438322:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:28.433252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043e2/r3tmp/tmpwjVz0c/pdisk_1.dat 2024-11-21T09:21:28.442731Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10567, node 2 2024-11-21T09:21:28.453366Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:28.453381Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:28.453384Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:28.453423Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62259 TClient is connected to server localhost:62259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:28.533899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:28.533932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:28.535179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:28.535522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.537002Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:28.541879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.555730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.573818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.583111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ... pe: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:30.462870Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.653879Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660275475996344:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:30.653911Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:30.665278Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.673103Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.679670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.694254Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.708598Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.722287Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.738513Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660275475996847:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:30.738536Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:30.738562Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660275475996852:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:30.739208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:30.742061Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660275475996854:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:30.952913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.960461Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.019269Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24588, MsgBus: 29336 2024-11-21T09:21:31.418033Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660278230144752:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043e2/r3tmp/tmpmVYtIe/pdisk_1.dat 2024-11-21T09:21:31.422823Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:31.429714Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24588, node 5 2024-11-21T09:21:31.440733Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:31.440747Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:31.440749Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:31.440791Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29336 TClient is connected to server localhost:29336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:31.517969Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:31.518002Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:31.519520Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:31.520232Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.532573Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.542300Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.609443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.628660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.728936Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660278230146153:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.728963Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.736114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.744146Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.757836Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.771677Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.785968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.799649Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.811299Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660278230146654:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.811338Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.811356Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660278230146659:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.812043Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:31.820952Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660278230146661:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:32.031064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.036854Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCH3+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH3-StreamLookupJoin+ColumnStore >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 25701, MsgBus: 22884 2024-11-21T09:21:27.514143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660262736392706:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:27.514243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043e7/r3tmp/tmp4BoF1B/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25701, node 1 2024-11-21T09:21:27.574477Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:27.574626Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:27.574628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:27.574630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:27.574661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22884 TClient is connected to server localhost:22884 2024-11-21T09:21:27.616426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:27.616459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:27.617460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:27.640321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.644814Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:27.669110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.737542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:27.756491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.768501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.838653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660262736394099:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.838678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.867632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.873075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.879466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.886453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.892857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.900435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.910812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660262736394591:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.910839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.910935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660262736394596:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.911617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:27.913414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660262736394598:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:28.115685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.181750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.193887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15437, MsgBus: 62373 2024-11-21T09:21:28.582542Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660266075960979:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:28.582565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043e7/r3tmp/tmpcinEja/pdisk_1.dat 2024-11-21T09:21:28.591001Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15437, node 2 2024-11-21T09:21:28.603940Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:28.603949Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:28.603951Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:28.603977Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62373 TClient is connected to server localhost:62373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:28.682773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:28.682802Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:28.683910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:28.685197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.692432Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:28.704783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.720708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.739676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.754396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part propo ... D_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660276137344286:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:30.945210Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:30.950915Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.959291Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.966319Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.973072Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.980183Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.987573Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.012467Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660280432312084:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.012533Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.012564Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660280432312089:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.013241Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:31.014939Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660280432312091:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:31.204420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.211664Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.225604Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3699, MsgBus: 6642 2024-11-21T09:21:31.534395Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660279525290960:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:31.534410Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043e7/r3tmp/tmpB9vcEP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3699, node 5 2024-11-21T09:21:31.550563Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:31.552941Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:31.552950Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:31.552951Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:31.552986Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6642 TClient is connected to server localhost:6642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:31.634923Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:31.634951Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:31.636157Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:31.636824Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.640738Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:31.650822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.660764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.691458Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:31.722110Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.894739Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660279525292500:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.894771Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.900327Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.907518Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.919395Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.932528Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.947953Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.003196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.021154Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660283820260311:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.021192Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660283820260316:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.021192Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.022112Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:32.030895Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660283820260318:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:32.252707Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.259797Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.268003Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 18810, MsgBus: 1719 2024-11-21T09:21:27.619045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660261634518384:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:27.619131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ce/r3tmp/tmpXnGUsx/pdisk_1.dat 2024-11-21T09:21:27.694569Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18810, node 1 2024-11-21T09:21:27.713722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:27.713736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:27.713738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:27.713779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:27.720142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:27.720168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:27.721364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1719 TClient is connected to server localhost:1719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:27.762801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.764826Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:27.770835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.799872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.858029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:27.867984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.918096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660261634519786:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.918125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.946766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.956931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.962988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.970827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.977155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.984354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.992614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660261634520278:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.992641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660261634520283:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.992642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.993217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:27.997842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660261634520285:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:28.174113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.182980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.194899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.208459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.223608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.236837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2727, MsgBus: 27035 2024-11-21T09:21:28.722795Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660267974991631:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:28.722816Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ce/r3tmp/tmph2EATE/pdisk_1.dat 2024-11-21T09:21:28.731998Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2727, node 2 2024-11-21T09:21:28.743125Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:28.743136Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:28.743137Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:28.743165Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27035 TClient is connected to server localhost:27035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:28.823058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:28.823089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:21:28.824397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:28.825365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.828414Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.841204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... rt proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:30.944167Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.086033Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660278447379560:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.086100Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.091572Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.098360Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.106297Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.114130Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.131486Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.144698Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.158962Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660278447380072:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.158987Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.159044Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660278447380077:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.160010Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:31.169682Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660278447380079:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:31.385760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.392806Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21557, MsgBus: 30676 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ce/r3tmp/tmpLJZd4W/pdisk_1.dat 2024-11-21T09:21:31.719740Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660281751654845:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:31.721660Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:31.732467Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21557, node 5 2024-11-21T09:21:31.744023Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:31.744040Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:31.744042Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:31.744088Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30676 TClient is connected to server localhost:30676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:31.820255Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:31.820284Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:31.820634Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.821539Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:31.824593Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.830629Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.840237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.861734Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.874292Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.055280Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660286046623558:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.055311Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.061217Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.068158Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.079530Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.094121Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.107525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.114656Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.132903Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660286046624061:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.132938Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.133095Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660286046624066:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.133997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:32.137836Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660286046624068:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:32.316551Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.323307Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] >> KqpJoin::RightTableValuePredicate [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+StreamLookupJoin-ColumnStore [GOOD] >> OlapEstimationRowsCorrectness::TPCH2 >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCH11-StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableValuePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 64290, MsgBus: 10128 2024-11-21T09:21:27.621935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660263162571492:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:27.622044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043cc/r3tmp/tmpIetjnx/pdisk_1.dat 2024-11-21T09:21:27.667557Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64290, node 1 2024-11-21T09:21:27.680290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:27.680300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:27.680312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:27.680338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10128 TClient is connected to server localhost:10128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:27.720947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:27.720971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:27.722227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:27.746450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.749946Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:27.757234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.773405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:27.788091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.799299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.954743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660263162572888:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.954772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.985632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.991200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.998135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.005141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.012383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.019224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.031637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660267457540693:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.031671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.031751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660267457540698:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.032579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:28.039529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660267457540700:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:28.260320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.270587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.278498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.292386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.300830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14564, MsgBus: 3738 2024-11-21T09:21:28.738508Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660268450155288:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043cc/r3tmp/tmphj9nsH/pdisk_1.dat 2024-11-21T09:21:28.746818Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:28.753191Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14564, node 2 2024-11-21T09:21:28.764001Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:28.764014Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:28.764015Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:28.764051Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3738 TClient is connected to server localhost:3738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:28.835545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:28.835571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:28.836645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:28.839886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.852894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.868063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.883740Z no ... propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.168736Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.198085Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.213106Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.345574Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660278546764323:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.345598Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.352426Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.359331Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.374234Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.386235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.393671Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.400495Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.416450Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660278546764835:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.416478Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.416482Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660278546764840:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.417153Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:31.420771Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660278546764842:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:31.641808Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7437, MsgBus: 21644 2024-11-21T09:21:32.124277Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660282405909990:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:32.124305Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043cc/r3tmp/tmpbIgfiP/pdisk_1.dat 2024-11-21T09:21:32.139227Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7437, node 5 2024-11-21T09:21:32.151685Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:32.151700Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:32.151703Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:32.151766Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21644 TClient is connected to server localhost:21644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:32.224508Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:32.224542Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:32.225588Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:32.226936Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.231584Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:32.233559Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:32.246826Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.268920Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.327927Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.481641Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660282405911542:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.481666Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.488499Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.496591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.506400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.513387Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.528384Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.541772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.560827Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660282405912046:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.560862Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.560965Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660282405912051:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.561734Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:32.569242Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660282405912053:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:32.761730Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] Test command err: Trying to start YDB, gRPC: 19304, MsgBus: 9379 2024-11-21T09:21:28.301856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660267096316967:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:28.302009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043bc/r3tmp/tmpqTWIn8/pdisk_1.dat 2024-11-21T09:21:28.360125Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19304, node 1 2024-11-21T09:21:28.365440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:28.365452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:28.365454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:28.365482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9379 TClient is connected to server localhost:9379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:28.403049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:28.403068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:28.404220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:28.438332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.442731Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:28.453886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.515479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.531983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.544993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.644409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660267096318512:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.644459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.649999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.655218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.663592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.670087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.684816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.699255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.717961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660267096319016:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.717998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.718074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660267096319021:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.718860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:28.725960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660267096319023:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:28.919608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.926084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25159, MsgBus: 62727 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043bc/r3tmp/tmpD31xJm/pdisk_1.dat 2024-11-21T09:21:29.183219Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660269489677178:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:29.184697Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:29.198415Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25159, node 2 2024-11-21T09:21:29.217747Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:29.217757Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:29.217759Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:29.217790Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62727 TClient is connected to server localhost:62727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:29.282271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:29.282300Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:29.283303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:29.284552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.285963Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:29.293578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:29.301858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.324530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.382652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.497887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadSe ... 658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.342141Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.407095Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.417465Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.498212Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660280786119681:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.498239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.502649Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.509317Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.519987Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.534337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.551835Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.607679Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.618642Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660280786120200:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.618664Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.618698Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660280786120205:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.619276Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:31.623853Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660280786120207:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:31.816108Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.822714Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.834198Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22235, MsgBus: 14735 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043bc/r3tmp/tmppuJyMo/pdisk_1.dat 2024-11-21T09:21:32.148349Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 22235, node 5 2024-11-21T09:21:32.164997Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:32.166796Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:32.166805Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:32.166808Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:32.166841Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14735 2024-11-21T09:21:32.221975Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:32.222018Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:32.222729Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:32.229353Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.234870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.259609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.280601Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.296946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.464828Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660285266400963:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.464856Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.469742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.481123Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.492134Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.499222Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.506871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.521557Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.536162Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660285266401463:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.536186Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660285266401468:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.536187Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.536957Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:32.540650Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660285266401470:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:32.747847Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.097257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.097279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.097284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.097288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.097336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.097340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.097355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.097417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.106542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.106618Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.108233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.108310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.108330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.110955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.111018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.112368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.113118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.114469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.115718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.115722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.115745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.116815Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.130135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.130181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.130247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.130252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.130809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.130820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.130824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.131154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131163Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.131447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.131463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.131984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.132345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.132809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.132937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.133000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.133004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.133020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.133027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.133376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.133421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.133487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133492Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.133498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.133500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.133507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.133511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.133519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.133522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.133525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 2057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T09:21:31.518458Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:21:31.518528Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T09:21:31.518534Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: false 2024-11-21T09:21:31.518539Z node 173 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2024-11-21T09:21:31.518634Z node 173 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.518647Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.518653Z node 173 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:21:31.518658Z node 173 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T09:21:31.518664Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T09:21:31.518763Z node 173 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:31.518785Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 743029344360 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:31.518797Z node 173 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2024-11-21T09:21:31.518820Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T09:21:31.518830Z node 173 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T09:21:31.518834Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T09:21:31.518845Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:31.518854Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:21:31.518859Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T09:21:31.518866Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T09:21:31.518870Z node 173 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T09:21:31.518874Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T09:21:31.518884Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:21:31.518891Z node 173 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 3, subscribers: 1 2024-11-21T09:21:31.518895Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T09:21:31.518898Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 11 2024-11-21T09:21:31.518901Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:21:31.519008Z node 173 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.519019Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.519023Z node 173 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:21:31.519027Z node 173 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2024-11-21T09:21:31.519030Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:21:31.519063Z node 173 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:21:31.519959Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.520022Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T09:21:31.520045Z node 173 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:31.520050Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:31.520092Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:21:31.520118Z node 173 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:31.520123Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [173:203:2206], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T09:21:31.520128Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [173:203:2206], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2024-11-21T09:21:31.520297Z node 173 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.520326Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.520331Z node 173 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:21:31.520336Z node 173 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T09:21:31.520340Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T09:21:31.520477Z node 173 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.520487Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.520491Z node 173 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:21:31.520495Z node 173 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:21:31.520500Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:21:31.520512Z node 173 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T09:21:31.520517Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [173:122:2148] 2024-11-21T09:21:31.520613Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:31.520619Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:21:31.520633Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:31.521177Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.521503Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:21:31.521529Z node 173 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T09:21:31.521540Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T09:21:31.521621Z node 173 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1004 2024-11-21T09:21:31.522086Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:21:31.522097Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:21:31.522179Z node 173 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:21:31.522195Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:21:31.522200Z node 173 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [173:959:2898] TestWaitNotification: OK eventTxId 1004 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23377, MsgBus: 10615 2024-11-21T09:21:18.380464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660223127958551:2226];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.380498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004453/r3tmp/tmp7ESxIe/pdisk_1.dat 2024-11-21T09:21:18.452274Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23377, node 1 2024-11-21T09:21:18.481336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.481360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.482056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:18.493541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.493563Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.493565Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.493593Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10615 TClient is connected to server localhost:10615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.583068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.588065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:18.591650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:18.658563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.673304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.681532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.708089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223127959912:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.708120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.820039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.825788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.880019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.890813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.897948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.904548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.912990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223127960427:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.913010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660223127960432:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.913012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.913473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.918514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660223127960434:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.108783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.113952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.121617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.129015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.136016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.153791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.159701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.171588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.189424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.199915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.206184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.213303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.219862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.268612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:19.274912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.283080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.290319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.297085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.303891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.310659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.317885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... 72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:32.005274Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:32.005285Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:32.005299Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:32.005303Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:32.005312Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:32.005321Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:32.005334Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:32.005342Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:32.005351Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:32.005358Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:32.005404Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:32.005408Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:32.005415Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:32.005419Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:32.005432Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:32.005435Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:32.005441Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:32.005445Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:32.005451Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:32.005454Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:32.005459Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:32.005462Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:32.005484Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:32.005487Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:32.005498Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:32.005507Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:32.005515Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:32.005523Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:32.005537Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:32.005545Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:32.005554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:32.005561Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:32.008818Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:32.008835Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:32.008845Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:32.008849Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:32.008865Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:32.008873Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:32.008892Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:32.008897Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:32.008906Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:32.008918Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:32.008924Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:32.008929Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:32.008961Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:32.008966Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:32.008982Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:32.008986Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:32.008996Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:32.009000Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:32.009016Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:32.009019Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:32.009029Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:32.009033Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2295, MsgBus: 21707 2024-11-21T09:21:27.647337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660261208021035:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:27.647405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043c7/r3tmp/tmpQHjDug/pdisk_1.dat 2024-11-21T09:21:27.704596Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2295, node 1 2024-11-21T09:21:27.718152Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:27.718167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:27.718170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:27.718222Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21707 2024-11-21T09:21:27.746709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:27.746742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:27.747929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:27.773485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.777415Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:27.784082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:27.850888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.870606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.881038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.972371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660261208022425:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.972423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.978459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.983927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.991102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.997914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.052080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.061312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.069684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660265502990236:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.069706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.069749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660265502990241:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.070345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:28.074454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660265502990243:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:28.226720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.232115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.243016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.250298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.264553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.271070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16941, MsgBus: 24040 2024-11-21T09:21:28.517371Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660267174029734:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:28.518555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043c7/r3tmp/tmpTOpC7d/pdisk_1.dat 2024-11-21T09:21:28.529970Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16941, node 2 2024-11-21T09:21:28.535231Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:28.535246Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:28.535249Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:28.535281Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24040 TClient is connected to server localhost:24040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:28.616695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:28.616718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:28.617752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:28.618021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.620971Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:28.631746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.697516Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.877801Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660278129354718:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.877826Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.883875Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.890613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.898442Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.911374Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.918358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.925229Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.933728Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660278129355219:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.933754Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660278129355224:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.933776Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.934318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:31.938390Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660278129355226:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:32.165024Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.186081Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27293, MsgBus: 6531 2024-11-21T09:21:32.436626Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660282971504378:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:32.436818Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043c7/r3tmp/tmp9cgN9p/pdisk_1.dat 2024-11-21T09:21:32.457140Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27293, node 6 2024-11-21T09:21:32.463265Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:32.463276Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:32.463278Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:32.463306Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6531 TClient is connected to server localhost:6531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:32.539594Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:32.539629Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:32.539939Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.540646Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:21:32.541579Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:32.551430Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.565710Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.588719Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.600049Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.772685Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660282971505939:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.772709Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.779110Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.786246Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.793521Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.801717Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.814688Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.828411Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.845283Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660282971506440:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.845305Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.845400Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660282971506445:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.846165Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:32.856009Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660282971506447:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:33.022898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.039958Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_FullScan >> KqpJoinOrder::TPCDS9-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS9_SMALL-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS9_SMALL-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCDS9_SMALL+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95-StreamLookupJoin-ColumnStore |96.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 14060, MsgBus: 6971 2024-11-21T09:21:18.434500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660225575332959:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.434553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004440/r3tmp/tmpklhUVD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14060, node 1 2024-11-21T09:21:18.527525Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:18.531788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.531805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.531806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.531838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:18.534836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.534868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.535953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6971 TClient is connected to server localhost:6971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.591973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.603781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.619396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.641031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.652775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.741781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225575334347:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.741810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.820040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.826125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.842223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.855712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225575334862:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660225575334867:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660225575334869:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.072249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.078086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.093880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1809, MsgBus: 3091 2024-11-21T09:21:19.299834Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660227115161091:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.299849Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004440/r3tmp/tmpYzn3VP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1809, node 2 2024-11-21T09:21:19.316275Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:19.316454Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.316464Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.316465Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.316499Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3091 TClient is connected to server localhost:3091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.400078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.400105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.401246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.402385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.410028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.418188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.432622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.444959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... OpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.752305Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.764476Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.919287Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660280253493953:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.919313Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.925819Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.931649Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.939995Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.946412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.954445Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.969317Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.983550Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660280253494458:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.983580Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.983639Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660280253494463:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.984328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:31.987351Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660280253494465:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:32.176569Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.188928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31076, MsgBus: 21441 2024-11-21T09:21:32.509386Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660284771456285:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:32.509422Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004440/r3tmp/tmpnanInM/pdisk_1.dat 2024-11-21T09:21:32.532954Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31076, node 6 2024-11-21T09:21:32.544761Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:32.544775Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:32.544778Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:32.544814Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21441 TClient is connected to server localhost:21441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:32.611309Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:32.611347Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:32.611629Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.612396Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:32.622639Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.633322Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.655287Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.667990Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.836735Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660284771457842:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.836755Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.841594Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.851060Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.865148Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.878070Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.892287Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.907186Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.924704Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660284771458343:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.924744Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.924786Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660284771458348:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.925492Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:32.932789Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660284771458350:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:33.139367Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.157590Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull [GOOD] >> KqpJoinOrder::TPCDS34-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2024-11-21T09:20:50.162888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660105704899845:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:50.162930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037b5/r3tmp/tmpkYXTkx/pdisk_1.dat 2024-11-21T09:20:50.230495Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25403, node 1 2024-11-21T09:20:50.263678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:50.263702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:50.265082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:50.275913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:50.275926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:50.275927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:50.275960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:50.335916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.336802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.336817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.337723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:20:50.337799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:20:50.337807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:20:50.338288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:50.338369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:50.338378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:20:50.338783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850386, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:20:50.340553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:20:50.340927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340973Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:20:50.340981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:20:50.340992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:20:50.341002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:20:50.341405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:20:50.341425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:20:50.341428Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:20:50.341446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:3080 2024-11-21T09:20:50.410730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660105704900626:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.410752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.529601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.529729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:20:50.529891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.529901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T09:20:50.530539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:20:50.530652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.530788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.530792Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:20:50.530824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.530831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.530833Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:20:50.537401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:20:50.537431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:20:50.537881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:20:50.591291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:20:50.591302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:20:50.591319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:20:50.591676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.592346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850638, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.592357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180850638 2024-11-21T09:20:50.592388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:20:50.592706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.592787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.592804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.592935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.592948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.592951Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 7205759 ... ydb://session/3?node_id=1&id=NmU2ODFiNjQtMzI4M2NmYzUtMmQ1Y2VjOTAtZjdhZTk0Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.053857Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882247. Ctx: { TraceId: 01jd70dndcarqsp6fnhzvendnn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M4NmU2OTAtYjUwOTNmMWYtNGRkYWNhOTYtZGI4Yjk1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.053961Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882250. Ctx: { TraceId: 01jd70dndd9mbecvf86bq6p6dh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRlZTRiMzUtMjg0ZTM2N2EtYmM1N2UwNTItNmQ5MGQwMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.053965Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882251. Ctx: { TraceId: 01jd70dndd268w1rxkrd1494pg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJmYjgzNjEtY2UxNTJlNDYtMmY3ZWY0ZTAtODMxODkyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.054100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882252. Ctx: { TraceId: 01jd70dnddehr6taggxxqh89sh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWJkOTRkZjMtNTgyODRiYy1lM2YxNDFiOS01ZDUzMjYz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.054386Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882249. Ctx: { TraceId: 01jd70dndcbte41vs9143bhxgd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2IxYmNhZmQtNzY0NWUwOTUtYzM5YTgyZWItODdiYmYzMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.055017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882255. Ctx: { TraceId: 01jd70dndd4h9keexm0pb8sbk4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRkNjZkNTgtY2YyYjI0NmEtYjFjYzZlY2EtMTNkM2MyNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.055067Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882253. Ctx: { TraceId: 01jd70dnddfc5k7y1v1smf3srg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWZkMDk2NTctNmRmOWQ2OWQtZTlkNjhhZTItZmE2M2JjZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.055130Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882254. Ctx: { TraceId: 01jd70dnddb9wpgmn4t6sra3se, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMyNTRmYzYtMWEzYzVmZWUtNmQ2NjhjOTYtZTBjOGUwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.056663Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882257. Ctx: { TraceId: 01jd70dndf6a7ag1b72mfpchgx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE3MmMzY2MtOTM4ZjZkNjMtZGYxOTMwMjMtZDAwZjUxNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.056676Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882256. Ctx: { TraceId: 01jd70dndf43jkqn8znysk3bv8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M4NmU2OTAtYjUwOTNmMWYtNGRkYWNhOTYtZGI4Yjk1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.057538Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882259. Ctx: { TraceId: 01jd70dndh2a09kmrqw18ejzb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWJkOTRkZjMtNTgyODRiYy1lM2YxNDFiOS01ZDUzMjYz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.057544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882260. Ctx: { TraceId: 01jd70dndhbdv62pf9j48sh818, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2IxYmNhZmQtNzY0NWUwOTUtYzM5YTgyZWItODdiYmYzMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.057655Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882262. Ctx: { TraceId: 01jd70dndh79wbb2wnsw24hxsj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU2ODFiNjQtMzI4M2NmYzUtMmQ1Y2VjOTAtZjdhZTk0Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.057663Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882261. Ctx: { TraceId: 01jd70dndheatxqxq0e733a4k0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJmYjgzNjEtY2UxNTJlNDYtMmY3ZWY0ZTAtODMxODkyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.057721Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882258. Ctx: { TraceId: 01jd70dndh91m852nsw4xz1ccq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRlZTRiMzUtMjg0ZTM2N2EtYmM1N2UwNTItNmQ5MGQwMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.058521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882263. Ctx: { TraceId: 01jd70dndh9q23cw7qk064ntq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMyNTRmYzYtMWEzYzVmZWUtNmQ2NjhjOTYtZTBjOGUwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.058721Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882264. Ctx: { TraceId: 01jd70dndhb0qe18gnfqv3cm70, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWZkMDk2NTctNmRmOWQ2OWQtZTlkNjhhZTItZmE2M2JjZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.058816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882265. Ctx: { TraceId: 01jd70dndh03czwtz1vwtqyrmf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE3MmMzY2MtOTM4ZjZkNjMtZGYxOTMwMjMtZDAwZjUxNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.059482Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882266. Ctx: { TraceId: 01jd70dndjavw3wr0wg0p4502v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRkNjZkNTgtY2YyYjI0NmEtYjFjYzZlY2EtMTNkM2MyNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.059731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882268. Ctx: { TraceId: 01jd70dndj14mvpqy1v1tv3hny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJmYjgzNjEtY2UxNTJlNDYtMmY3ZWY0ZTAtODMxODkyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.059836Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882269. Ctx: { TraceId: 01jd70dndjes2zqztt2jdkfncp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWJkOTRkZjMtNTgyODRiYy1lM2YxNDFiOS01ZDUzMjYz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.060007Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882267. Ctx: { TraceId: 01jd70dndj8yj75ep0b5qvn048, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M4NmU2OTAtYjUwOTNmMWYtNGRkYWNhOTYtZGI4Yjk1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2024-11-21T09:21:31.060977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882270. Ctx: { TraceId: 01jd70dndkabwx13wm4d2c4vkt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2IxYmNhZmQtNzY0NWUwOTUtYzM5YTgyZWItODdiYmYzMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.060991Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882273. Ctx: { TraceId: 01jd70dndk7f0a9q4djzdakv1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMyNTRmYzYtMWEzYzVmZWUtNmQ2NjhjOTYtZTBjOGUwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.061090Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882271. Ctx: { TraceId: 01jd70dndkd8mw4jb82khmqz62, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU2ODFiNjQtMzI4M2NmYzUtMmQ1Y2VjOTAtZjdhZTk0Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.061096Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882272. Ctx: { TraceId: 01jd70dndk39a8g7cgedj4n5km, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRlZTRiMzUtMjg0ZTM2N2EtYmM1N2UwNTItNmQ5MGQwMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.061561Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882274. Ctx: { TraceId: 01jd70dndm9850ydve903k3v1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRkNjZkNTgtY2YyYjI0NmEtYjFjYzZlY2EtMTNkM2MyNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.061565Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882275. Ctx: { TraceId: 01jd70dndk5bfy7snd03z7cyes, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE3MmMzY2MtOTM4ZjZkNjMtZGYxOTMwMjMtZDAwZjUxNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:31.061702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976882276. Ctx: { TraceId: 01jd70dndkadt2m70wy77bmbrn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWZkMDk2NTctNmRmOWQ2OWQtZTlkNjhhZTItZmE2M2JjZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) Table has 4 shards >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16+StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22909, MsgBus: 7000 2024-11-21T09:21:19.649017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660227122802027:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.649040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004411/r3tmp/tmpuwbR2m/pdisk_1.dat 2024-11-21T09:21:19.702140Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22909, node 1 2024-11-21T09:21:19.710816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.710830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.710832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.710871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7000 TClient is connected to server localhost:7000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:19.751090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.751119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.752298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:19.752864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.764308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.826269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.842181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.852004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.963800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660227122803570:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.963826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.995795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.001693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.055808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.067205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.121966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.130118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.141276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660231417771383:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.141299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.141304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660231417771388:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.141936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:20.150495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660231417771390:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:20.302501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.308272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.319053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.326281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.333117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.350581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.356748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.368583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.423451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.431223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.438174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.444872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.452536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.507364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:20.516180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.530309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.544606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.557396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.565608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.578248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.585701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.592531 ... 72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:33.221716Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:33.221719Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:33.221732Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:33.221735Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:33.221745Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:33.221752Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:33.221764Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:33.221766Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:33.221774Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:33.221777Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:33.221808Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:33.221811Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:33.221817Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:33.221821Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:33.221832Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:33.221835Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:33.221842Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:33.221845Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:33.221852Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:33.221855Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:33.221860Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:33.221863Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:33.221884Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:33.221887Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:33.221899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:33.221902Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:33.221910Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:33.221914Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:33.221928Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:33.221931Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:33.221940Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:33.221943Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:33.224468Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:33.224481Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:33.224487Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:33.224490Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:33.224499Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:33.224502Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:33.224506Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:33.224510Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:33.224515Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:33.224523Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:33.224526Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:33.224529Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:33.224545Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:33.224554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:33.224562Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:33.224565Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:33.224572Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:33.224575Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:33.224584Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:33.224587Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:33.224594Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:33.224596Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoin::RightSemiJoin_FullScan [GOOD] >> KqpJoin::RightSemiJoin_ComplexKey >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] >> OlapEstimationRowsCorrectness::TPCH11 >> KqpJoinOrder::TPCH11-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH11+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS90-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92-StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8818, MsgBus: 13091 2024-11-21T09:21:18.458268Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660224468291027:2145];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.458445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00442f/r3tmp/tmpeO3heU/pdisk_1.dat 2024-11-21T09:21:18.521127Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8818, node 1 2024-11-21T09:21:18.551847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.551858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.551860Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.551890Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:18.558842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.558867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.560018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13091 TClient is connected to server localhost:13091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.597847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.609887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:18.626426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.648101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.703744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.787177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224468292480:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.787199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.819953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.825322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.841684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.855778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224468292981:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224468292986:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660224468292988:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.071267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.077237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.093629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.100796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.116966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.122035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.128867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.135925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.142837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.150352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.156645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.164093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.214248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:19.221429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.234851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.241128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.248122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.255486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.261808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.268741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.27618 ... 24-11-21T09:21:33.121078Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:33.121091Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:33.121095Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:33.121104Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:33.121107Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:33.121120Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:33.121124Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:33.121133Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:33.121136Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:33.121268Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:33.121273Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:33.121285Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:33.121290Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:33.121309Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:33.121324Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:33.121335Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:33.121341Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:33.121352Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:33.121357Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:33.121363Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:33.121369Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:33.121405Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:33.121409Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:33.121422Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:33.121426Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:33.121435Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:33.121439Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:33.121453Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:33.121457Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:33.121466Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:33.121469Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:33.124039Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:33.124059Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:33.124069Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:33.124073Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:33.124088Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:33.124105Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:33.124115Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:33.124122Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:33.124129Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:33.124137Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:33.124143Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:33.124153Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:33.124181Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:33.124192Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:33.124222Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:33.124232Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:33.124242Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:33.124251Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:33.124265Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:33.124274Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:33.124284Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:33.124292Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:33.862365Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439660268424837111:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:33.862394Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore >> SystemView::PgTablesOneSchemeShardDataQuery >> SystemView::Nodes [GOOD] >> SystemView::PDisksFields >> DbCounters::TabletsSimple >> SystemView::TabletsFields >> SystemView::QueryStatsRetries >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore [GOOD] >> SystemView::PartitionStatsOneSchemeShard >> SystemView::VSlotsFields >> TPersQueueTest::TestWriteSessionsConflicts [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword >> KqpJoinOrder::TPCDS34-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix >> KqpJoinOrder::TPCDS95-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin-ColumnStore >> SystemView::TopPartitionsFields ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23084, MsgBus: 29353 2024-11-21T09:21:24.706348Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660250520199970:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:24.706584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043f5/r3tmp/tmp7VFPEZ/pdisk_1.dat 2024-11-21T09:21:24.761842Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23084, node 1 2024-11-21T09:21:24.775012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:24.775025Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:24.775028Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:24.775064Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29353 2024-11-21T09:21:24.807580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:24.807605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:24.808693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:24.838207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.840358Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:24.843847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.860906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.879512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.891129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.019617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660254815168806:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.019647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.046863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.053407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.068083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.122591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.135877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.149813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.165370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660254815169322:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.165398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.165405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660254815169327:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.166098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:25.169480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660254815169329:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:25.353258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.359126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.366371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.373205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.380113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.398115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.403888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.415340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.422204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.429253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.436367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.443501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.451411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.505955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:25.512401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.520469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.526906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.533812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.540752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.547741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.554869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... 0292481699205:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.916367Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:34.920408Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660292481699207:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:35.150609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.157518Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.166401Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.180555Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.186720Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.206218Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.212522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.222352Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.228943Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.236046Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.243532Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.249721Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.264681Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.325664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:35.333672Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.341183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.348291Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.354768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.362357Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.369237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.375749Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.383106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.390782Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.397100Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.404147Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.412074Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.425580Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.432053Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.439271Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.446298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.503011Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.508976Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.519006Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.530406Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.537615Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.544663Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.558848Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.586976Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:35.593802Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.600086Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.608503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.663111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.670671Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.684804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.691453Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.697970Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.714467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.720920Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.734101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.747520Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS95-StreamLookupJoin+ColumnStore [GOOD] >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsFollowers >> KqpJoinOrder::TPCDS9-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS9+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] >> KqpJoinOrder::TPCH11+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCDS16+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16-StreamLookupJoin+ColumnStore |96.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 18128, MsgBus: 23155 2024-11-21T09:21:31.316454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660280630016414:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:31.316482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ae/r3tmp/tmpfPUcw7/pdisk_1.dat 2024-11-21T09:21:31.379444Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18128, node 1 2024-11-21T09:21:31.389158Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:31.389174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:31.389177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:31.389211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23155 2024-11-21T09:21:31.416132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:31.416158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:31.417978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:31.450859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.462782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.486069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.504889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.514633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:31.622135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660280630017736:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.622159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.653683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.662740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.673467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.687858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.701477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.716522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.731473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660280630018251:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.731489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.731589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660280630018256:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:31.732167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:31.738423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660280630018258:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:31.920337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:31.928903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24449, MsgBus: 30869 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ae/r3tmp/tmpMy7xIM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24449, node 2 2024-11-21T09:21:32.232045Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:32.234525Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:32.234537Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:32.234539Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:32.234569Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30869 TClient is connected to server localhost:30869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:32.318289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:32.318316Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:32.318536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.324258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:32.324959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.333053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.352969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.365174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.530901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660282014458421:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.530995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.533144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.538977Z ... part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:35.292993Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:35.444320Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660294857046430:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:35.444345Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:35.449451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.455685Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.467887Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.481096Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.488138Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.495707Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.503733Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660294857046943:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:35.503776Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:35.503862Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660294857046948:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:35.504435Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:35.508419Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660294857046950:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:35.674335Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.694413Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9130, MsgBus: 25801 2024-11-21T09:21:36.074631Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660300961892071:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.074677Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ae/r3tmp/tmpOodl9E/pdisk_1.dat 2024-11-21T09:21:36.087137Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9130, node 6 2024-11-21T09:21:36.097168Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:36.097186Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:36.097190Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:36.097236Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25801 TClient is connected to server localhost:25801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:36.174728Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.174757Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.176248Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:36.177867Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.179726Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:36.191928Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.213469Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.269787Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.288527Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.450729Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660300961893621:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.450761Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.456036Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.462495Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.521030Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.531939Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.538589Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.552875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.568863Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660300961894132:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.568896Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.568989Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660300961894137:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.569539Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:36.572332Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660300961894139:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:36.744600Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.760505Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22819, MsgBus: 13330 2024-11-21T09:21:18.397095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660224061725965:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:18.397236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004429/r3tmp/tmph1jb07/pdisk_1.dat 2024-11-21T09:21:18.452585Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22819, node 1 2024-11-21T09:21:18.492355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.492370Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.492371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.492404Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:18.496681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.496697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.497830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13330 TClient is connected to server localhost:13330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:18.584545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.591702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.660170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.677391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:18.689829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.711905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224061727361:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.711936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.819965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.825390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.835080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.841672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.855911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224061727864:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660224061727869:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660224061727871:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:19.095288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.100795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.108184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.115666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.122392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.139946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.146274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.157246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.163843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.171475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.178072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.185060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.192319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.239650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:19.245584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.255039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.262037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.268541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.276144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.282900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.289665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.296 ... 24-11-21T09:21:35.094581Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:35.094596Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:35.094599Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:35.094619Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:35.094628Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:35.094643Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:35.094651Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:35.094660Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:35.094668Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038688;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:35.094710Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:35.094718Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:35.094725Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:35.094733Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:35.094746Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:35.094755Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:35.094762Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:35.094765Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:35.094772Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:35.094775Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:35.094782Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:35.094785Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:35.094812Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:35.094821Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:35.094835Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:35.094838Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:35.094847Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:35.094850Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:35.094864Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:35.094873Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:35.094882Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:35.094889Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:35.095377Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:35.095390Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:35.095399Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:35.095403Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:35.095418Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:35.095428Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:35.095436Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:35.095441Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:35.095449Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:35.095453Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:35.095459Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:35.095468Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:35.095497Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:35.095507Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:35.095523Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:35.095531Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:35.095541Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:35.095550Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:35.095565Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:35.095573Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:35.095583Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:35.095591Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:35.971140Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439660274426499693:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:35.971188Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> SystemView::QueryStatsRetries [GOOD] >> SystemView::StoragePoolsFields >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges >> KqpJoinOrder::TPCH3-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH5+StreamLookupJoin-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::QueryStats >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable [GOOD] >> KqpJoinOrder::TPCDS92-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92+StreamLookupJoin-ColumnStore |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpJoinOrder::TPCDS23+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23-StreamLookupJoin+ColumnStore >> ColumnShardTiers::TieringUsage [GOOD] >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11979, MsgBus: 2577 2024-11-21T09:21:25.392192Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660252692860181:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:25.392226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ea/r3tmp/tmpgf9Uty/pdisk_1.dat 2024-11-21T09:21:25.448850Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11979, node 1 2024-11-21T09:21:25.462471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:25.462488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:25.462490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:25.462528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2577 2024-11-21T09:21:25.492423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:25.492449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:25.493546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:25.506130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.508185Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:25.518218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.534783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.551780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.563641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.688155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252692861722:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.688192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.721057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.726698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.737398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.743735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.750803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.758444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.766538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252692862235:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.766560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252692862240:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.766565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.767119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:25.771309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660252692862242:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:25.963099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.968907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.975407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.982175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.989179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.006301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.012604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.025151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.039517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.053098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.059869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.074830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.088500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.195470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:26.206201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.217529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.229439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.285609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.298607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.313334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.325657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... 0297949412161:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:35.960006Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:35.963168Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660297949412163:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:36.155624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.217061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.272821Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.286735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.301149Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.330331Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.338059Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.351112Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.367800Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.377708Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.392430Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.407617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.421858Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.499486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:36.508719Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.518313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.532057Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.547193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.559803Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.575809Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.588808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.601918Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.616419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.630335Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.644546Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.659481Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.715023Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.771485Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.784066Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.839705Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.853821Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.908905Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.917087Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.930912Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.944475Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.953065Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.960490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.994364Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:37.001441Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.014795Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.028660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.043359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.056568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.063363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.070064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.077463Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.095285Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.103742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.112945Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.127190Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2024-11-21T09:19:03.340443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:19:03.340760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:19:03.340776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004744/r3tmp/tmpiiib3c/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32436, node 1 TClient is connected to server localhost:26376 2024-11-21T09:19:03.441834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:19:03.456819Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:19:03.457405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:19:03.457414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:19:03.457417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:19:03.457461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:03.498662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:03.498688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:03.508997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);EXPECTATION=1;WAITING=1 2024-11-21T09:19:13.629661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:649:2540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:13.629686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:13.763434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2024-11-21T09:19:13.994669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:789:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:13.994693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:13.994741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:794:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:19:13.995283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:19:14.149796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:796:2637], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:19:14.286114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:19:14.344692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2024-11-21T09:19:14.656313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T09:19:14.972507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:19:15.052426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:19:15.478162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T09:19:15.753740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 2024-11-21T09:19:37.411565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:1, at schemeshard: 72057594046644480 2024-11-21T09:19:37.795394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.296176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715726:0, at schemeshard: 72057594046644480 2024-11-21T09:19:38.398381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );EXPECTATION=1;WAITING=1 2024-11-21T09:20:00.055850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715766:0, at schemeshard: 72057594046644480 2024-11-21T09:20:00.604154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2024-11-21T09:20:01.216013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715792:0, at schemeshard: 72057594046644480 2024-11-21T09:20:01.315790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715795:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tiering2 (TYPE TIERING_RU ... ;self_id=[1:5692:6286];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037898;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T09:21:37.591435Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037898;self_id=[1:5692:6286];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037898;fline=columnshard_impl.cpp:515;problem=Background activities cannot be started: no index at tablet; 2024-11-21T09:21:37.707411Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2024-11-21T09:21:37.707442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=with_appended.cpp:80;portions=14,;task_id=23deee4-a7ea11ef-81f45043-a1284c7e; 2024-11-21T09:21:37.707514Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T09:21:37.707527Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=19;portion=13;before_size=168336;after_size=166456;before_rows=3039;after_rows=3038; 2024-11-21T09:21:37.707532Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T09:21:37.707560Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2024-11-21T09:21:37.707566Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=19;portion=11;before_size=166456;after_size=164936;before_rows=3038;after_rows=3037; 2024-11-21T09:21:37.707570Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2024-11-21T09:21:37.707584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T09:21:37.707589Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=19;portion=12;before_size=164936;after_size=163056;before_rows=3037;after_rows=3036; 2024-11-21T09:21:37.707592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T09:21:37.707598Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2024-11-21T09:21:37.707603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=19;portion=14;before_size=163056;after_size=164576;before_rows=3036;after_rows=3037; 2024-11-21T09:21:37.707607Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2024-11-21T09:21:37.707657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=manager.cpp:14;event=unlock;process_id=CS::GENERAL::23deee4-a7ea11ef-81f45043-a1284c7e; 2024-11-21T09:21:37.707670Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;fline=granule.cpp:99;event=OnCompactionFinished;info=(granule:19;path_id:19;size:164576;portions_count:6;); 2024-11-21T09:21:37.707675Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;tablet_id=72075186224037897;fline=columnshard_impl.cpp:509;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T09:21:37.707686Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;tablet_id=72075186224037897;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:21:37.707695Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;tablet_id=72075186224037897;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T09:21:37.707708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;tablet_id=72075186224037897;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=3;drop=0;skip=0; 2024-11-21T09:21:37.707715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;tablet_id=72075186224037897;fline=manager.cpp:9;event=lock;process_id=CS::CLEANUP::PORTIONS::27362ea-a7ea11ef-86fa632b-7114680b; 2024-11-21T09:21:37.707725Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;tablet_id=72075186224037897;fline=columnshard_impl.cpp:1006;background=cleanup;skip_reason=no_changes; 2024-11-21T09:21:37.707733Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;tablet_id=72075186224037897;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T09:21:37.707746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;task_id=23deee4-a7ea11ef-81f45043-a1284c7e;tablet_id=72075186224037897;fline=columnshard_impl.cpp:928;background=ttl;skip_reason=no_changes; 2024-11-21T09:21:37.707801Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037897 Save Batch GenStep: 1:12 Blob count: 1 2024-11-21T09:21:37.707810Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:21:37.707827Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=9;external_task_id=23deee4-a7ea11ef-81f45043-a1284c7e;mem=8605;cpu=0; 2024-11-21T09:21:37.707863Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037897 Save Batch GenStep: 1:13 Blob count: 1 2024-11-21T09:21:37.707889Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;local_tx_no=31;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;commit_tx_id=140737488355338;commit_lock_id=140737488355338;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T09:21:37.707896Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;local_tx_no=31;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;commit_tx_id=140737488355338;commit_lock_id=140737488355338;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=19;portion=15;before_size=164576;after_size=166456;before_rows=3037;after_rows=3038; 2024-11-21T09:21:37.707900Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;local_tx_no=31;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;commit_tx_id=140737488355338;commit_lock_id=140737488355338;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T09:21:37.707924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;local_tx_no=31;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037897;tx_state=complete;fline=storage.cpp:66;event=granule_locked;path_id=19; 2024-11-21T09:21:37.708149Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:5723:6305];tablet_id=72075186224037897;parent=[1:5687:6282];fline=manager.h:99;event=ask_data;request=request_id=55;19={portions_count=3};; 2024-11-21T09:21:37.708187Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:5723:6305];tablet_id=72075186224037897;parent=[1:5687:6282];fline=columnshard_impl.cpp:954;background=cleanup;changes_info=type=CS::CLEANUP::PORTIONS;details=(drop 3 portions(portion_id:13;path_id:19;records_count:1;min_schema_snapshot:(plan_step=14500;tx_id=281474976715885;);schema_version:1;level:0;column_size:1880;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1723542278500;tx_id=18446744073709551615;);)(portion_id:12;path_id:19;records_count:1;min_schema_snapshot:(plan_step=14500;tx_id=281474976715885;);schema_version:1;level:0;column_size:1880;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1723542278500;tx_id=18446744073709551615;);)(portion_id:11;path_id:19;records_count:1;min_schema_snapshot:(plan_step=14500;tx_id=281474976715885;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;));remove_snapshot:(plan_step=1723542278500;tx_id=18446744073709551615;);));; 2024-11-21T09:21:37.708261Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037897 2024-11-21T09:21:37.708288Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[32] (CS::CLEANUP::PORTIONS) apply at tablet 72075186224037897 2024-11-21T09:21:37.708396Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=2;blobs=2;rows=48;bytes=56144;raw_bytes=54452; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=2;rows=2990;bytes=110312;raw_bytes=3593660; inactive portions=3;blobs=3;rows=3;bytes=5280;raw_bytes=3259; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 72075186224037897 2024-11-21T09:21:37.708402Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037897;self_id=[1:5687:6282];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=72075186224037897;external_task_id=27362ea-a7ea11ef-86fa632b-7114680b;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=72075186224037897; 2024-11-21T09:21:37.708431Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:5723:6305];tablet_id=72075186224037897;parent=[1:5687:6282];fline=manager.h:99;event=ask_data;request=request_id=56;19={portions_count=1};; Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037896 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037896 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037897 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037897 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037899 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037899 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037898 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037898 >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5773, MsgBus: 12893 2024-11-21T09:21:19.585251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660229805726386:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.585274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004416/r3tmp/tmp5CSf2z/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5773, node 1 2024-11-21T09:21:19.636720Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:19.644598Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.644610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.644612Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.644645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12893 2024-11-21T09:21:19.686439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.686461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.687547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.716571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.720704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.735046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.753547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.762378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.875517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660229805727930:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.875542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.902505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.909332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.920846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.934622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.990109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.997631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.005655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660234100695742:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.005689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660234100695747:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.005694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:20.006297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:20.010523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660234100695749:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:20.212914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.218892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.228229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.234729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.242092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.259049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.265540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.277050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.284286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.291353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.297817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.305208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.312276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.362915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:20.370714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.383604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.396653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.410265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.417265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.472331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.480625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.49442 ... 24-11-21T09:21:35.600513Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:35.600522Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:35.600527Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:35.600533Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:35.600535Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:35.600548Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:35.600556Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:35.600564Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:35.600571Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:35.600726Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:35.600735Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:35.600741Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:35.600743Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:35.600751Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:35.600753Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:35.600757Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:35.600760Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:35.600764Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:35.600765Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:35.600769Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:35.600771Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:35.600788Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:35.600790Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:35.600799Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:35.600806Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:35.600811Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:35.600814Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:35.600821Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:35.600827Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:35.600833Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:35.600834Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:35.601614Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:35.601627Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:35.601635Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:35.601640Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:35.601654Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:35.601662Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:35.601670Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:35.601679Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:35.601686Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:35.601695Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:35.601700Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:35.601708Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:35.601737Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:35.601750Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:35.601766Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:35.601776Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:35.601786Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:35.601796Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:35.601813Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:35.601823Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:35.601834Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:35.601837Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:36.660284Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439660278099423617:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.660346Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpJoinOrder::TPCDS96-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96+StreamLookupJoin-ColumnStore >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore [GOOD] |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS9_SMALL-StreamLookupJoin+ColumnStore [GOOD] >> TLocalTests::TestAlterTenant >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> KqpJoinOrder::Chain65Nodes [GOOD] >> KqpJoinOrder::DatetimeConstantFold+StreamLookupJoin-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.113058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.113075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.113079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.113082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.113091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.113093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.113107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.113168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.121088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.121114Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.122628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.122692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.122712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.124640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.124709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.124792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.124976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.125624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.125891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.125903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.125915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.125921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.125927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.125965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.127141Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.144007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.144065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.144108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.144144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.144151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.144703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.144730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.144759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.144767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.144771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.144775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.145115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.145123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.145128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.145417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.145424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.145429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.145434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.146002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.146343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.146375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.146524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.146547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.146553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.146600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.146606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.146629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.146639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.146988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.146995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.147019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.147023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.147081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.147086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.147094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.147098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.147103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.147107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.147111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.147115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.147124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.147129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.147133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 2024-11-21T09:21:37.739483Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.739496Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.739500Z node 163 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:37.739505Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T09:21:37.739510Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:37.739881Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.739897Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.739902Z node 163 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:37.739906Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T09:21:37.739910Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:37.739923Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:21:37.740388Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T09:21:37.740398Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:21:37.740407Z node 163 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T09:21:37.740440Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T09:21:37.740460Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T09:21:37.740541Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:37.740559Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 700079671400 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:37.740566Z node 163 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T09:21:37.740586Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T09:21:37.740595Z node 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T09:21:37.740599Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:21:37.740608Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:37.740616Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:37.740621Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T09:21:37.740627Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:21:37.740631Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T09:21:37.740635Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T09:21:37.740643Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:37.740649Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T09:21:37.740652Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:21:37.740656Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:21:37.740772Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.740787Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.741158Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:37.741169Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:37.741200Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:21:37.741220Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:37.741225Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T09:21:37.741229Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T09:21:37.741357Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.741367Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.741372Z node 163 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:37.741376Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:21:37.741380Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:37.741458Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.741467Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.741470Z node 163 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:37.741474Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:21:37.741477Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:37.741487Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T09:21:37.741491Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [163:122:2148] 2024-11-21T09:21:37.741520Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:37.741525Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:21:37.741533Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:37.741942Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.742246Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:37.742270Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:21:37.742279Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T09:21:37.742290Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 TestWaitNotification wait txId: 1003 2024-11-21T09:21:37.742757Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:21:37.742768Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:21:37.742842Z node 163 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:21:37.742859Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:37.742864Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [163:725:2687] TestWaitNotification: OK eventTxId 1003 >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] >> OlapEstimationRowsCorrectness::TPCH3 |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5857, MsgBus: 16715 2024-11-21T09:21:23.355469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660244914708389:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:23.355518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043fc/r3tmp/tmpbrfjS8/pdisk_1.dat 2024-11-21T09:21:23.414758Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5857, node 1 2024-11-21T09:21:23.424395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:23.424407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:23.424409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:23.424447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16715 TClient is connected to server localhost:16715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:23.491200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:23.491227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:23.492313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:23.492500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.500669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.514835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.532833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.542481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:23.661298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660244914709792:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.661332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.698865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.704989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.714151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.721166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.775478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.786948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.800375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660244914710307:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.800409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.800465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660244914710312:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.801184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:23.804478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660244914710314:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:24.009479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.016400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.029457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.043396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.050247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.067671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.073687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.090356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.099051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.106467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.113425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.119926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.127157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.182708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:24.194448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.204922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.211083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.219684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.232685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.239634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.254003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:24.26777 ... 72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:37.537220Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:37.537231Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:37.537246Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:37.537255Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:37.537265Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:37.537274Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:37.537289Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:37.537297Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:37.537308Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:37.537316Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:37.537533Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:37.537543Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:37.537551Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:37.537555Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:37.537570Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:37.537580Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:37.537588Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:37.537597Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:37.537604Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:37.537612Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:37.537618Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:37.537627Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:37.537655Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:37.537665Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:37.537679Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:37.537688Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:37.537699Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:37.537708Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:37.537722Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:37.537730Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:37.537739Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:37.537742Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:37.538866Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:37.538885Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:37.538894Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:37.538899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:37.538914Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:37.538923Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:37.538932Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:37.538942Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:37.538950Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:37.538959Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:37.538966Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:37.538975Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:37.539005Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:37.539015Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:37.539032Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:37.539041Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:37.539052Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:37.539057Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:37.539071Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:37.539075Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:37.539085Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:37.539089Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038692;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> SystemView::QueryStats [GOOD] >> SystemView::QueryStatsFields >> TLocalTests::TestAlterTenant [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3377, MsgBus: 2959 2024-11-21T09:21:18.468296Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660221955222454:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004436/r3tmp/tmp1xrKdP/pdisk_1.dat 2024-11-21T09:21:18.501571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:18.522110Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3377, node 1 2024-11-21T09:21:18.537946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:18.537963Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:18.537965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:18.537997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2959 2024-11-21T09:21:18.564984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:18.565013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:18.566082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:18.598095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.608941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.626145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.645861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.659841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:18.748255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660221955223824:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.748283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.820034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.826069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.834889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.842030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.849565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.856153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:18.866582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660221955224337:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.866619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660221955224342:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:18.867509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:18.869220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660221955224344:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.074205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.080123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.087415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.093680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.101123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.117460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.123215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.128800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.136037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.143236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.150273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.156676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.164417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.219292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:19.226759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.234373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.240973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.248124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.255194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.261615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.268876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.276111Z ... 24-11-21T09:21:36.075872Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:36.075892Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:36.075901Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:36.075916Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:36.075925Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:36.075944Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:36.075953Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:36.075968Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:36.075976Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:36.076008Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:36.076019Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:36.076033Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:36.076041Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:36.076050Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:36.076056Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:36.076061Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:36.076067Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:36.076072Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:36.076078Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:36.076081Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:36.076090Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:36.076107Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:36.076115Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:36.076127Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:36.076130Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:36.076136Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:36.076139Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:36.076147Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:36.076154Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:36.076160Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:36.076166Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:36.076263Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:36.076274Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:36.076282Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:36.076285Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:36.076296Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:36.076299Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:36.076306Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:36.076310Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:36.076317Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:36.076321Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:36.076326Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:36.076330Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:36.076353Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:36.076362Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:36.076375Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:36.076383Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:36.076393Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:36.076401Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:36.076414Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:36.076422Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:36.076430Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:36.076437Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:37.089455Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439660283170967331:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:37.089482Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2024-11-21T09:21:36.130777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660301886238900:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.130816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003578/r3tmp/tmpjn9Jdp/pdisk_1.dat 2024-11-21T09:21:36.252522Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:36.260544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.268290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.271636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25682, node 1 2024-11-21T09:21:36.335864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:36.335884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:36.335886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:36.335927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:36.413606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.420002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.501894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660301886239364:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.501899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660301886239375:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.501917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.503522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:21:36.505074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660301886239378:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:21:36.729888Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660301886239458:2299] TxId: 281474976715662. Ctx: { TraceId: 01jd70dtpm7d45xeew13ee4fpy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1MWEyNGItNTQ5YTIwMy0yZjhjM2JjLWZiZmY3NDM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:36.730963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70dtpm7d45xeew13ee4fpy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1MWEyNGItNTQ5YTIwMy0yZjhjM2JjLWZiZmY3NDM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:36.753029Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660301886239465:2312], owner: [1:7439660301886239461:2310], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:36.753198Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660301886239465:2312], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:36.753405Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660301886239465:2312], row count: 1, finished: 1 2024-11-21T09:21:36.753417Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660301886239465:2312], owner: [1:7439660301886239461:2310], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:36.757774Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180896728, txId: 281474976715661] shutting down 2024-11-21T09:21:36.979509Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660302748956031:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.979712Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003578/r3tmp/tmptXO6Rj/pdisk_1.dat 2024-11-21T09:21:36.993349Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20349, node 2 2024-11-21T09:21:37.006489Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:37.006506Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:37.006509Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:37.006558Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:37.079657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:37.079686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:37.080815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:37.082026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:37.285433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.293028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660307043924045:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.293028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660307043924036:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.293048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.293654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:21:37.301448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660307043924050:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:21:37.399537Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70dvgc32vgcheg5n5jwy1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWNiOTcwYWYtMjhhZDI1OWUtMTI1ZjcyOGItMzNkM2VhZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:37.400092Z node 2 :SYSTEM_VIEWS INFO: Scan started, actor: [2:7439660307043924135:2318], owner: [2:7439660307043924131:2316], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:37.404391Z node 2 :SYSTEM_VIEWS INFO: Scan prepared, actor: [2:7439660307043924135:2318], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:37.405585Z node 2 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [2:7439660307043924135:2318], row count: 4, finished: 1 2024-11-21T09:21:37.405607Z node 2 :SYSTEM_VIEWS INFO: Scan finished, actor: [2:7439660307043924135:2318], owner: [2:7439660307043924131:2316], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:37.406361Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180897398, t ... t; our snapshot: [step: 1732180898196, txId: 281474976715671] shutting down 2024-11-21T09:21:38.213504Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd70dwcqdnejszrx7eppk7fx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmQyMzNmNTUtYzllNGQyMDEtYzg0NzFjM2UtYWUwYzM5ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:38.213948Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439660308468099220:2387], owner: [3:7439660308468099216:2385], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:38.214163Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439660308468099220:2387], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:38.218774Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439660308468099220:2387], row count: 3, finished: 1 2024-11-21T09:21:38.218813Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439660308468099220:2387], owner: [3:7439660308468099216:2385], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:38.219387Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180898212, txId: 281474976715673] shutting down 2024-11-21T09:21:38.233390Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd70dwdb2s3tnbt52rgsdvbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzNiM2U4ZmUtZTI0ZGUxODItNmI1NDcxYjEtY2JkMWM0OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:38.233911Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439660308468099252:2396], owner: [3:7439660308468099248:2394], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:38.234063Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439660308468099252:2396], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:38.234169Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439660308468099252:2396], row count: 3, finished: 1 2024-11-21T09:21:38.234180Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439660308468099252:2396], owner: [3:7439660308468099248:2394], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:38.234708Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180898233, txId: 281474976715675] shutting down 2024-11-21T09:21:38.247103Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd70dwdv7b2rp9yxhj3rfefk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmJiY2EzY2UtNjQ2Zjk1MmMtNjgzMTM3MWUtNTcxYzI3ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:38.247585Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439660308468099283:2405], owner: [3:7439660308468099280:2403], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:38.248307Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439660308468099283:2405], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:38.248467Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439660308468099283:2405], row count: 4, finished: 1 2024-11-21T09:21:38.248480Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439660308468099283:2405], owner: [3:7439660308468099280:2403], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:38.248982Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180898246, txId: 281474976715677] shutting down 2024-11-21T09:21:38.261834Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd70dwe9bpyd4d3kad33ezv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTljNWY5NTctMzAyNGJhMGEtNzQ2MDhlYTUtZWU3ZmJkYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:38.262506Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439660308468099316:2414], owner: [3:7439660308468099312:2412], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:38.263340Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439660308468099316:2414], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:38.263608Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439660308468099316:2414], row count: 4, finished: 1 2024-11-21T09:21:38.263617Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439660308468099316:2414], owner: [3:7439660308468099312:2412], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:38.264179Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180898258, txId: 281474976715679] shutting down 2024-11-21T09:21:38.620754Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660308706821576:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:38.620975Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003578/r3tmp/tmpCZr0GG/pdisk_1.dat 2024-11-21T09:21:38.635739Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3204, node 4 2024-11-21T09:21:38.651115Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:38.651129Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:38.651132Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:38.651171Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:38.726376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:38.726408Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:38.726857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:38.727337Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:38.928497Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.940618Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660308706822428:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:38.940625Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660308706822439:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:38.940645Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:38.941157Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:21:38.945087Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660308706822442:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:21:39.158007Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70dx3v8vx4av4m41efemqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MTQzMWNiNjktNGYzYTUzZGYtMjcxZGFiZDUtZGFmMGQ5Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:39.159022Z node 4 :SYSTEM_VIEWS INFO: Scan started, actor: [4:7439660313001789832:2332], owner: [4:7439660313001789830:2331], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:39.159311Z node 4 :SYSTEM_VIEWS INFO: Scan prepared, actor: [4:7439660313001789832:2332], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:39.159524Z node 4 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [4:7439660313001789832:2332], row count: 4, finished: 1 2024-11-21T09:21:39.159563Z node 4 :SYSTEM_VIEWS INFO: Scan finished, actor: [4:7439660313001789832:2332], owner: [4:7439660313001789830:2331], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:39.159597Z node 4 :SYSTEM_VIEWS INFO: Scan started, actor: [4:7439660313001789838:2335], owner: [4:7439660313001789830:2331], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:39.160151Z node 4 :SYSTEM_VIEWS INFO: Scan prepared, actor: [4:7439660313001789838:2335], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:39.160314Z node 4 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [4:7439660313001789838:2335], row count: 4, finished: 1 2024-11-21T09:21:39.160322Z node 4 :SYSTEM_VIEWS INFO: Scan finished, actor: [4:7439660313001789838:2335], owner: [4:7439660313001789830:2331], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:39.161398Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180899156, txId: 281474976715661] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS9_SMALL-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 4932, MsgBus: 17904 2024-11-21T09:21:22.667878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660239099296623:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:22.667890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004404/r3tmp/tmpxbMXJB/pdisk_1.dat 2024-11-21T09:21:22.719266Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4932, node 1 2024-11-21T09:21:22.732142Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:22.732156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:22.732158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:22.732203Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17904 TClient is connected to server localhost:17904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:22.768978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:22.769000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:22.770154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:22.799004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.801932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:22.808956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.828739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:22.848330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:22.858491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:22.972749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660239099298161:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:22.972777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.003502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.009410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.021673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.028036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.034970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.042017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.050604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660243394265970:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.050630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.050651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660243394265975:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:23.051196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:23.055488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660243394265977:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:23.273214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.280151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.287882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.301361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.308963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.333277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.339416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.350479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.364548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.372067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.385609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.394825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.406211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.459834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:23.465540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.476517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.483366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.490743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.497367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.504428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:23.510849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, b ... 72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:37.927407Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:37.927411Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:37.927425Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:37.927429Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:37.927438Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:37.927442Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:37.927455Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:37.927459Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:37.927468Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:37.927470Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:37.927893Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:37.927902Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:37.927913Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:37.927918Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:37.927936Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:37.927940Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:37.927949Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:37.927955Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:37.927963Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:37.927969Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:37.927974Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:37.927978Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:37.928017Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:37.928024Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:37.928041Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:37.928046Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:37.928058Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:37.928062Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:37.928079Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:37.928083Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:37.928093Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:37.928097Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:37.928170Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:37.928174Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:37.928182Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:37.928187Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:37.928220Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:37.928224Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:37.928232Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:37.928236Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:37.928245Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:37.928250Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:37.928256Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:37.928259Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:37.928291Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:37.928296Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:37.928311Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:37.928316Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:37.928326Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:37.928329Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:37.928344Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:37.928348Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:37.928357Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:37.928361Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH5+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore >> TNodeBrokerTest::ExtendLeaseRestartRace >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::PartitionStatsTtlFields |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] Test command err: 2024-11-21T09:21:39.198783Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T09:21:39.236049Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T09:21:39.257558Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:39.259599Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:39.260950Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:39.261103Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:21:39.261490Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:39.261500Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:39.261554Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:21:39.264711Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:21:39.264776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:21:39.264789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:21:39.264811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:39.264825Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:39.264839Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:39.290515Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:39.290565Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:39.301393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:39.301441Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:39.301458Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:39.301469Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:39.301494Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:39.301502Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:39.301508Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:39.301515Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:39.312444Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:39.312506Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T09:21:39.312767Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T09:21:39.312777Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T09:21:39.314469Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T09:21:39.315195Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T09:21:39.315471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/00498b/r3tmp/tmpAUOfQR/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T09:21:39.316388Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/00498b/r3tmp/tmpAUOfQR/pdisk_1.dat 2024-11-21T09:21:39.317132Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:21:39.317166Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:39.317180Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T09:21:39.317223Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:21:39.317282Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:21:39.317761Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T09:21:39.317836Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:39.329049Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:21:39.329274Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:21:39.329337Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:21:39.329346Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:21:39.329377Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:21:39.329395Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:21:39.329496Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:39.329505Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:39.329511Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:39.330147Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:312:2281] 2024-11-21T09:21:39.331284Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T09:21:39.331299Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:307:2278] 2024-11-21T09:21:39.331455Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:312:2281] 2024-11-21T09:21:39.331477Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:39.331484Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:39.331487Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:39.350383Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T09:21:39.350407Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T09:21:39.357028Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T09:21:39.357081Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 5 Memory: 5 Network: 1) 2024-11-21T09:21:39.357260Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:39.357268Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:39.357285Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:369:2316] 2024-11-21T09:21:39.357618Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:369:2316] 2024-11-21T09:21:39.357707Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:39.357715Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:39.357719Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:39.359937Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Alter tenant /dc-1/users/tenant-1 2024-11-21T09:21:39.359966Z node 1 :LOCAL DEBUG: Updated resoure limit: CPU: 10 Memory: 10 Network: 10 2024-11-21T09:21:39.359969Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:39.359991Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Alter tenant /dc-1/users/tenant-2 2024-11-21T09:21:39.359999Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Alter tenant /dc-1/users/tenant-unknown ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 27311, MsgBus: 10960 2024-11-21T09:21:33.803234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660290432717650:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:33.803481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004393/r3tmp/tmpCYjVfv/pdisk_1.dat 2024-11-21T09:21:33.857716Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27311, node 1 2024-11-21T09:21:33.875369Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:33.875384Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:33.875387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:33.875422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10960 2024-11-21T09:21:33.904896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:33.904923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:33.905655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:33.935728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.945190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.963468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.984115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:34.000748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.152429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660294727686483:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.152585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.159775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.215368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.228406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.284191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.291542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.355498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.414577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660294727687031:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.414602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660294727687036:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.414604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.415259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:34.418180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660294727687038:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:34.579202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.585872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.592292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.598646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.606478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8545, MsgBus: 22072 2024-11-21T09:21:35.106304Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660296902776300:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:35.106318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004393/r3tmp/tmp8Sy8Iv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8545, node 2 2024-11-21T09:21:35.122687Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:35.123565Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:35.123567Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:35.123568Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:35.123594Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22072 TClient is connected to server localhost:22072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:35.206568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:35.206602Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:35.207721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:35.209534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:35.213407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.223765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:35.245048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0 ... opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.742214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.749282Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.756246Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.763712Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.779332Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660307409306363:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.779355Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660307409306368:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.779363Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.780066Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:37.783324Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439660307409306370:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:37.971725Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.978751Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.987439Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.001630Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.008420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14690, MsgBus: 28162 2024-11-21T09:21:38.496432Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660309321162421:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:38.496449Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004393/r3tmp/tmpqOAUdl/pdisk_1.dat 2024-11-21T09:21:38.512982Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14690, node 5 2024-11-21T09:21:38.519137Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:38.519149Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:38.519151Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:38.519192Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28162 TClient is connected to server localhost:28162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:38.597207Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:38.597236Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:38.598160Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:21:38.599563Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.601003Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:38.602230Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:38.616978Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:38.643050Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:38.660251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:38.807403Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660309321163960:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:38.807442Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:38.816822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.822615Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.834746Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.848400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.855557Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.862712Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.878156Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660309321164467:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:38.878178Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:38.878204Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660309321164472:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:38.878896Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:38.882683Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660309321164474:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:39.087855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.094412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.107134Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.114054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.120972Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS9+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCDS9-StreamLookupJoin+ColumnStore >> TSlotIndexesPoolTest::Init [GOOD] >> KqpJoinOrder::TPCDS96+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCDS78+StreamLookupJoin-ColumnStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11582, MsgBus: 8570 2024-11-21T09:21:24.851201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660247868521388:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:24.851278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043f1/r3tmp/tmpnCgFrS/pdisk_1.dat 2024-11-21T09:21:24.911470Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11582, node 1 2024-11-21T09:21:24.920488Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:24.920503Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:24.920505Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:24.920561Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8570 2024-11-21T09:21:24.952504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:24.952533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:24.953639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:24.966128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.975497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.990769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.007757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.017706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.158450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252163490083:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.158475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.185893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.192440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.205023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.212239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.219069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.273078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.284020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252163490601:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.284049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.284092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252163490606:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.284705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:25.288111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660252163490608:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:25.486091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.540773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.547795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.554994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.569337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.586777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.593015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.604161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.610758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.618446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.625251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.632086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.639106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.691573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:25.697993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.709439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.715788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.723012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.730147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.736687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.751575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.758183 ... =RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:38.960872Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:38.960876Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:38.960889Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:38.960899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:38.960899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:38.960901Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:38.960906Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:38.960910Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:38.960912Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:38.960917Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:38.960919Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:38.961331Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:38.961341Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:38.961346Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:38.961349Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:38.961358Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:38.961360Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:38.961365Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:38.961372Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:38.961376Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:38.961382Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:38.961387Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:38.961394Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:38.961419Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:38.961428Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:38.961439Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:38.961448Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:38.961454Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:38.961456Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:38.961465Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:38.961467Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:38.961473Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:38.961476Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:38.961520Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:38.961526Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:38.961531Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:38.961536Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:38.961544Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:38.961550Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:38.961554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:38.961560Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:38.961564Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:38.961570Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:38.961574Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:38.961580Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:38.961593Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:38.961599Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:38.961607Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:38.961613Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:38.961619Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:38.961624Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:38.961646Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:38.961652Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:38.961657Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:38.961663Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] >> OlapEstimationRowsCorrectness::TPCDS78 |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] >> KqpJoinOrder::DatetimeConstantFold+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin-ColumnStore >> TTenantPoolTests::TestSensorsConfigForStaticSlot >> TTenantPoolTests::TestStateStatic |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 30836, MsgBus: 2572 2024-11-21T09:21:20.863470Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660231600235390:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:20.863568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00440a/r3tmp/tmpeOdpF1/pdisk_1.dat 2024-11-21T09:21:20.915813Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30836, node 1 2024-11-21T09:21:20.926752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:20.926764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:20.926766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:20.926796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2572 TClient is connected to server localhost:2572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:20.964733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:20.964763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:20.965887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:20.996865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:20.999999Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:21.001392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:21.017093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.034453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.043713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:21.171851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660235895204225:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.171892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.197676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.203728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.215352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.222881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.236501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.243602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.259440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660235895204738:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.259463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.259464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660235895204743:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:21.260148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:21.264093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660235895204745:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:21.464586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.475094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.488737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.495246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.502147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.526936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.532916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.543828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.550823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.558133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.564976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.571654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.579204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.635553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:21.642219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.648769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.655924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.663081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.670648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.676912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:21.684123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.899923Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:37.906077Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660304528816219:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:38.195049Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.202119Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.211998Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.225982Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.240678Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.268399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.276870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.289247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.303354Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.316409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.372426Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.379255Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.393953Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.512106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:38.521155Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.534080Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.591123Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.646257Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.660236Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.673808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.688052Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.694975Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.750823Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.757893Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.771819Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.778590Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.785750Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.792524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.851883Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.862829Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.876456Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.883209Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.890134Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.897494Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.904682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.911408Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.918470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.945410Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:38.953451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.961115Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.974595Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:38.989127Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.002275Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.057335Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.065728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.079473Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.094100Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.100697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.114804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:39.128568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 >> TTenantPoolTests::TestStateStatic [GOOD] >> SystemView::QueryStatsFields [GOOD] >> SystemView::QueryStatsAllTables >> TNodeBrokerTest::TestRandomActions |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61-StreamLookupJoin-ColumnStore >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] Test command err: 2024-11-21T09:21:40.840563Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T09:21:40.877751Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T09:21:40.891443Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:40.893282Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:40.893682Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:40.893781Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:21:40.894094Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:40.894102Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:40.894133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:21:40.896059Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:21:40.896109Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:21:40.896122Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:21:40.896143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:40.896156Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:40.896169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:40.919576Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:40.919629Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:40.930569Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:40.930633Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:40.930649Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:40.930660Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:40.930685Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:40.930694Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:40.930700Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:40.930709Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:40.941651Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:40.941719Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T09:21:40.941930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T09:21:40.941940Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T09:21:40.943469Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T09:21:40.943674Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T09:21:40.943914Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/004965/r3tmp/tmpbRCv5V/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T09:21:40.943988Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/004965/r3tmp/tmpbRCv5V/pdisk_1.dat 2024-11-21T09:21:40.944169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:21:40.944194Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:40.944226Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T09:21:40.944266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:21:40.944300Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:21:40.944813Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T09:21:40.944981Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:40.959080Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:21:40.976165Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:21:40.976262Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:21:40.976300Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:21:40.976330Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:21:40.976357Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:21:40.976519Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T09:21:40.976526Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T09:21:40.980423Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T09:21:40.980469Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:40.980603Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:40.980609Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:40.980634Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:365:2314] 2024-11-21T09:21:40.980712Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-1 2024-11-21T09:21:40.980718Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:95:2130] 2024-11-21T09:21:40.981201Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:365:2314] 2024-11-21T09:21:40.981225Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:40.981230Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:40.981232Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore [GOOD] >> TEnumerationTest::TestPublish [GOOD] >> TNodeBrokerTest::NodeNameExpiration >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2024-11-21T09:21:40.825817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:40.825845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:40.825850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:40.825856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:40.825869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:40.825872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:40.825881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:40.825973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:40.828818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:40.828835Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:40.830650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:40.830891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:40.830923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T09:21:40.832031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:40.832138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:40.832238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T09:21:40.832321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T09:21:40.832942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T09:21:40.833212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T09:21:40.833223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T09:21:40.833238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:40.833246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T09:21:40.833252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:40.833267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T09:21:40.871009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T09:21:40.872948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T09:21:40.873022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T09:21:40.873062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T09:21:40.873071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T09:21:40.873826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T09:21:40.873846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T09:21:40.873937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T09:21:40.873946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T09:21:40.873949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:40.873952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:40.874331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T09:21:40.874341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T09:21:40.874344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:40.874716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T09:21:40.874725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T09:21:40.874729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T09:21:40.874734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:40.875211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:40.875566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:40.878014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:21:40.878240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T09:21:40.878247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T09:21:40.878253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T09:21:41.043557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T09:21:41.043618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T09:21:41.043629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T09:21:41.043701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:41.043709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T09:21:41.043740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T09:21:41.043752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T09:21:41.044391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T09:21:41.044405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T09:21:41.044448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T09:21:41.044453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T09:21:41.044533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T09:21:41.044542Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:41.044554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:41.044559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:41.044584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:41.044590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:41.044594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:41.044599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:41.044609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T09:21:41.044616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T09:21:41.044621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T09:21:41.045029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T09:21:41.045044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T09:21:41.045049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T09:21:41.045054Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T09:21:41.045059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T09:21:41.045074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T09:21:41.045079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T0 ... dified from very-static to static 2024-11-21T09:21:41.226743Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:380:2338] 2024-11-21T09:21:41.226772Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.226777Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.226804Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.226808Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.238117Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [1:383:2337], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } Version { Items { Kind: 10 Id: 4 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } } } } 2024-11-21T09:21:41.238142Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T09:21:41.238174Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T09:21:41.238189Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:380:2338]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T09:21:41.238196Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T09:21:41.238208Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:382:2336]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T09:21:41.238712Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } 2024-11-21T09:21:41.238723Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) static slot label modified from static to static-again 2024-11-21T09:21:41.238727Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:380:2338] 2024-11-21T09:21:41.238756Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.238761Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.238786Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.238789Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.250145Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [1:383:2337], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } Version { Items { Kind: 10 Id: 5 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } } } } 2024-11-21T09:21:41.250167Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T09:21:41.250198Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T09:21:41.250213Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:380:2338]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T09:21:41.250219Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T09:21:41.250229Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:382:2336]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T09:21:41.251048Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } 2024-11-21T09:21:41.251074Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.251080Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.251093Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.251100Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.262482Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [1:383:2337], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } Version { Items { Kind: 10 Id: 6 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } Items { Kind: 10 Id: 6 Generation: 1 } } } } 2024-11-21T09:21:41.262503Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T09:21:41.262535Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T09:21:41.262546Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:380:2338]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T09:21:41.262551Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T09:21:41.262556Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:382:2336]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T09:21:41.263174Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } 2024-11-21T09:21:41.263192Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.263195Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.263210Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.263214Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.274672Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [1:383:2337], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } Version { Items { Kind: 10 Id: 7 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } Items { Kind: 10 Id: 6 Generation: 1 } Items { Kind: 10 Id: 7 Generation: 1 } } } } 2024-11-21T09:21:41.274696Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T09:21:41.274731Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T09:21:41.274748Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:380:2338]: Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T09:21:41.274754Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T09:21:41.274763Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:382:2336]: Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T09:21:41.275591Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } 2024-11-21T09:21:41.275623Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.275630Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T09:21:41.275647Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T09:21:41.275650Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] >> TNodeBrokerTest::NodeNameReuseRestart >> TNodeBrokerTest::BasicFunctionality >> SystemView::PDisksFields [GOOD] >> SystemView::GroupsFields ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10386, MsgBus: 24070 2024-11-21T09:21:29.324499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660272939117785:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:29.324507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ba/r3tmp/tmprXo7Z7/pdisk_1.dat 2024-11-21T09:21:29.401928Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10386, node 1 2024-11-21T09:21:29.424396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:29.424409Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:29.424411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:29.424444Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24070 2024-11-21T09:21:29.463374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:29.463396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:29.465320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:29.484623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.493827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.513698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.531828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:29.540993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.648521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660272939119333:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.648566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.684543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.693185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.707513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.720649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.735820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.749718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.765142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660272939119851:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.765164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.765179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660272939119856:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.765884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:29.768937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660272939119858:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:29.979013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.986278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.993388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.000242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.009929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.035310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.042141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.049810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.063860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.077911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.091725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.098762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.113053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.183554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:30.191869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.204016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.218502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.231348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.238106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.245977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.252034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.261 ... 0317885639654:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:40.061756Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:40.065337Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660317885639656:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:40.240140Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.245920Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.254827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.262480Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.269306Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.287288Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.293865Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.304398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.311492Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.318212Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.324941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.332376Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.338963Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.399023Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:40.405410Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.416965Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.430257Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.437465Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.443777Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.450997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.458067Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.465417Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.472889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.479252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.486571Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.541581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.549791Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.555884Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.563004Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.570426Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.577201Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.584105Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.591475Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.605565Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.612413Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.619212Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.626234Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.649674Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:40.655968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.668495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.675259Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.682898Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.696807Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.703160Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.757945Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.765946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.780963Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.788197Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.802840Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.816084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsTables >> TSlotIndexesPoolTest::Ranges [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH21+StreamLookupJoin-ColumnStore >> TNodeBrokerTest::TestListNodesEpochDeltas |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9+StreamLookupJoin-ColumnStore >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2024-11-21T09:21:41.784798Z node 2 :TX_PROXY DEBUG: actor# [2:190:2086] Bootstrap 2024-11-21T09:21:41.806333Z node 2 :TX_PROXY DEBUG: actor# [2:190:2086] Become StateWork (SchemeCache [2:200:2089]) 2024-11-21T09:21:41.806468Z node 1 :TX_PROXY DEBUG: actor# [1:189:2134] Bootstrap 2024-11-21T09:21:41.807574Z node 1 :TX_PROXY DEBUG: actor# [1:189:2134] Become StateWork (SchemeCache [1:202:2139]) 2024-11-21T09:21:41.807627Z node 3 :TX_PROXY DEBUG: actor# [3:191:2086] Bootstrap 2024-11-21T09:21:41.808671Z node 3 :TX_PROXY DEBUG: actor# [3:191:2086] Become StateWork (SchemeCache [3:204:2089]) 2024-11-21T09:21:41.818579Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:41.820560Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:41.820929Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:41.821322Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:21:41.821669Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:41.821678Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:41.821702Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:21:41.824415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:21:41.824485Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:21:41.824500Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:21:41.824559Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:41.824573Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:41.824790Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:41.867651Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:41.867701Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:41.878630Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:41.878690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:41.878706Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:41.878718Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:41.878740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:41.878748Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:41.878754Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:41.878764Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:41.889715Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:41.889785Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T09:21:41.889945Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T09:21:41.889951Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T09:21:41.891471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T09:21:41.891814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T09:21:41.891896Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 3 DeclarativePDiskManagement: true } 2024-11-21T09:21:41.891917Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T09:21:41.892117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } } } } 2024-11-21T09:21:41.892225Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat 2024-11-21T09:21:41.892232Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat 2024-11-21T09:21:41.892238Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat 2024-11-21T09:21:41.892458Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:21:41.892482Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:41.892498Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T09:21:41.892528Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:21:41.892558Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:21:41.893030Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T09:21:41.893117Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:41.904065Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:21:41.904129Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 3 Devices# [] 2024-11-21T09:21:41.904135Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T09:21:41.906122Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T09:21:41.906281Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T09:21:41.906410Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 46032982631581332 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T09:21:41.908291Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T09:21:41.908432Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T09:21:41.908461Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/00494e/r3tmp/tmpsiuA5s/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10911818859277064256 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T09:21:41.908650Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:21:41.908709Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:21:41.908714Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:21:41.908745Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:21:41.908750Z node 3 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:21:41.908780Z node 3 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:21:41.908788Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:21:41.908814Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:21:41.908833Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:21:41.908838Z node 3 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:21:41.908844Z node 3 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-2 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:21:41.908853Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:21:41.908858Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:21:41.908864Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:21:41.908874Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:21:41.908987Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:41.908997Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:41.909001Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:41.909024Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:439:2285] 2024-11-21T09:21:41.910847Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T09:21:41.910862Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:425:2282] 2024-11-21T09:21:41.911059Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:439:2285] 2024-11-21T09:21:41.911070Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:41.911078Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:41.911084Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:41.911209Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T09:21:41.911214Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T09:21:41.911250Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T09:21:41.911253Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2024-11-21T09:21:41.916286Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T09:21:41.916332Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:41.916457Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:41.916462Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:41.916478Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:449:2097] 2024-11-21T09:21:41.916545Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-1 2024-11-21T09:21:41.916549Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:426:2094] 2024-11-21T09:21:41.916652Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2024-11-21T09:21:41.916664Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:41.916715Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:41.916718Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:41.916724Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[3:457:2097] 2024-11-21T09:21:41.916743Z node 3 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-2 2024-11-21T09:21:41.916746Z node 3 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [3:427:2094] 2024-11-21T09:21:41.917170Z node 3 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[3:457:2097] 2024-11-21T09:21:41.917183Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:449:2097] 2024-11-21T09:21:41.917228Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:41.917235Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:41.917237Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:41.917253Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:41.917256Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:41.917258Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2024-11-21T09:21:39.899866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:39.899893Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:39.908385Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:39.908826Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:39.908913Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:39.908920Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:39.908928Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:39.909070Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:39.909736Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:39.909746Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:39.909751Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:39.909754Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:39.909803Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:39.909846Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:39.909865Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:39.909871Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:39.942125Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:39.942176Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T09:21:39.942181Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:39.942192Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:39.952579Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:580:2206], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:39.953014Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:533:2179], Recipient [1:544:2185]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:39.953029Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:39.953060Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:39.953174Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:582:2208], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:39.953219Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:533:2179], Recipient [1:544:2185]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:39.953225Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:39.953235Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:39.954043Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:39.954070Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:39.954091Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:583:2185], Recipient [1:544:2185]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:39.954096Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:39.954101Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:39.954105Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:39.954122Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:39.954127Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:39.954219Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:39.954257Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:39.965142Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:39.965172Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:39.965178Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:39.965181Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:39.965240Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2024-11-21T09:21:39.965246Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:39.966093Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:596:2215], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:39.966123Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272040960, Sender [1:533:2179], Recipient [1:544:2185]: NKikimr::NNodeBroker::TEvNodeBroker::TEvCompactTables 2024-11-21T09:21:39.966128Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvCompactTables 2024-11-21T09:21:39.969648Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268828683, Sender [1:537:2181], Recipient [1:544:2185]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T09:21:39.970004Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268828683, Sender [1:537:2181], Recipient [1:544:2185]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T09:21:39.970381Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268828683, Sender [1:537:2181], Recipient [1:544:2185]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T09:21:40.416466Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:678:2238], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.416612Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:544:2185]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T09:21:40.416621Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.416637Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.417054Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:683:2239], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.417114Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:684:2240], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.417146Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:685:2241], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.417157Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:686:2242], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.417220Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:683:2239] 2024-11-21T09:21:40.417224Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.417232Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.417277Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:684:2240] 2024-11-21T09:21:40.417281Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.417286Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.417306Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:687:2243], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.417331Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:688:2244], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.417368Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:689:2245], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.417374Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:685:2241] 2024-11-21T09:21:40.417377Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.417382Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.417400Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:686:2242] 2024-11-21T09:21:40.417403Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.417407Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.417413Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:687:2243] 2024-11-21T09:21:40.417416Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.417421Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.417460Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:688:2244] 2024-11-21T09:21:40.417466Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.417471Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.417480Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2 ... 9:21:40.613025Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:40.624068Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:40.624101Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624126Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.624133Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=1 expired=0 2024-11-21T09:21:40.624166Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624176Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624184Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624191Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624198Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624255Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624262Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624270Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624279Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:40.624632Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:708:2253], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.624678Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:533:2179], Recipient [1:544:2185]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:40.624685Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.624693Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.624744Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:710:2255], Recipient [1:544:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.624759Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:533:2179], Recipient [1:544:2185]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:40.624762Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.624767Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z ... rebooting node broker 2024-11-21T09:21:40.624895Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:537:2181], Recipient [1:544:2185]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T09:21:40.624934Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2024-11-21T09:21:40.624938Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2024-11-21T09:21:40.627350Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:40.628436Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:40.628556Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete ... OnActivateExecutor tabletId# 72057594037936129 2024-11-21T09:21:40.629541Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:40.629551Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:40.629563Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:40.629633Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:40.629638Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:40.629642Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:40.629646Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:40.629711Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute ... captured cache request ... captured cache request ... sending extend lease request ... waiting for response 2024-11-21T09:21:40.682718Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:40.682823Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:40.682838Z node 1 :NODE_BROKER DEBUG: Loaded current epoch: #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.682864Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:40.682877Z node 1 :NODE_BROKER DEBUG: Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2024-11-21T09:21:40.682907Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:40.682925Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:40.682929Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=1 expired=0 2024-11-21T09:21:40.682950Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:40.683002Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:757:2290], Recipient [1:718:2258]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.683048Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:533:2179], Recipient [1:718:2258]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2024-11-21T09:21:40.683065Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:21:40.683069Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1024 2024-11-21T09:21:40.683073Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:21:40.683075Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) is now active 2024-11-21T09:21:40.683078Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) enqueue tx 2024-11-21T09:21:40.683080Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) starts new tx 2024-11-21T09:21:40.683086Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1024 2024-11-21T09:21:40.683091Z node 1 :NODE_BROKER DEBUG: Update node #1024 host1:1001 lease in database lease=2 expire=1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.694083Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:21:40.694154Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800024000 Epoch { Id: 2 Version: 3 Start: 3600024000 End: 7200024000 NextEnd: 10800024000 } } 2024-11-21T09:21:40.694179Z node 1 :NODE_BROKER DEBUG: Extended lease of #1024 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2024-11-21T09:21:40.694185Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) completed tx 2024-11-21T09:21:40.694190Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) unlink from parent 2024-11-21T09:21:40.694194Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1024 2024-11-21T09:21:40.694199Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active ... waiting for epoch update 2024-11-21T09:21:40.694334Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:776:2304], Recipient [1:718:2258]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.694361Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:533:2179], Recipient [1:718:2258]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:40.694368Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.694382Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.838547Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:718:2258], Recipient [1:718:2258]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:40.838572Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:40.838579Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:40.838585Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:40.838602Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:40.838611Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2024-11-21T09:21:40.889664Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:797:2306], Recipient [1:718:2258]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.889767Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:718:2258]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2024-11-21T09:21:40.889777Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.889784Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:40.901269Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:40.901297Z node 1 :NODE_BROKER DEBUG: Move to new epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2024-11-21T09:21:40.901317Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:40.901324Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=1 expired=0 2024-11-21T09:21:40.901362Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2024-11-21T09:21:40.901372Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:40.901519Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:809:2311], Recipient [1:718:2258]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.901544Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:533:2179], Recipient [1:718:2258]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:40.901550Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.901558Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2024-11-21T09:21:40.901608Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:811:2313], Recipient [1:718:2258]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:40.901622Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:533:2179], Recipient [1:718:2258]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:40.901626Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:40.901645Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCC ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2024-11-21T09:21:42.146325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:42.146354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:42.152129Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:42.152781Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:42.152950Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.152960Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.152971Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:42.153181Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:42.154307Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:42.154322Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.154327Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.154331Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.154372Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:42.154437Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:42.154456Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.154463Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.186090Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:42.186148Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T09:21:42.186154Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:42.186165Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.196471Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:245:2199], Recipient [1:215:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.196872Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:204:2172], Recipient [1:215:2178]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.196902Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.196917Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.202562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:42.208128Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:275:2227], Recipient [1:215:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.208228Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:204:2172], Recipient [1:215:2178]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "/dc-1/SharedDB" } 2024-11-21T09:21:42.208241Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.208257Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "/dc-1/SharedDB" 2024-11-21T09:21:42.209039Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/SharedDB TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.209091Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "/dc-1/SharedDB": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:42.209125Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:276:2178], Recipient [1:215:2178]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.209132Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.209141Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.209145Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.209156Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.209161Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: /dc-1/SharedDB 2024-11-21T09:21:42.209268Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:42.209301Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:42.220100Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.220121Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:42.220139Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:42.220144Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:42.220194Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T09:21:42.220201Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.220526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:21:42.224389Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:312:2257], Recipient [1:215:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.224445Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:204:2172], Recipient [1:215:2178]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB" } 2024-11-21T09:21:42.224451Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.224465Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB" 2024-11-21T09:21:42.224736Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/ServerlessDB TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } 2024-11-21T09:21:42.224764Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:3 2024-11-21T09:21:42.224776Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:313:2178], Recipient [1:215:2178]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.224780Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.224786Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.224789Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.224798Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.224802Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: /dc-1/ServerlessDB 2024-11-21T09:21:42.224830Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:42.224854Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:42.235786Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.235812Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1001 2024-11-21T09:21:42.235821Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:42.235826Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2024-11-21T09:21:42.235882Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T09:21:42.235891Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] >> TNodeBrokerTest::MinDynamicNodeIdShifted >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges >> TSlotIndexesPoolTest::Expansion [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2024-11-21T09:21:41.128367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:41.128395Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:41.133068Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:41.134273Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:41.134396Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.134401Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.134410Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:41.134571Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:41.135418Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:41.135445Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.135450Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.135454Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.135615Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:41.135656Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:41.135672Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:41.135677Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:41.170742Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:41.170806Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T09:21:41.170813Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:41.170824Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.181378Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:571:2205], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.181796Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039946, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigRequest { Config { EpochDuration: 10000000 } } 2024-11-21T09:21:41.181808Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvSetConfigRequest 2024-11-21T09:21:41.181816Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.181820Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.181844Z node 1 :NODE_BROKER DEBUG: TTxUpdateConfig Execute Config { EpochDuration: 10000000 } 2024-11-21T09:21:41.181889Z node 1 :NODE_BROKER DEBUG: Update config in database config=EpochDuration: 10000000 2024-11-21T09:21:41.194387Z node 1 :NODE_BROKER DEBUG: TTxUpdateConfig Complete 2024-11-21T09:21:41.194425Z node 1 :NODE_BROKER ERROR: Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2024-11-21T09:21:41.194468Z node 1 :NODE_BROKER TRACE: TTxUpdateConfig reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigResponse { Status { Code: OK } } 2024-11-21T09:21:41.194475Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.194604Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:575:2209], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.194648Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:41.194667Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:41.194677Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:41.195498Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:41.195531Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:41.195556Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:576:2184], Recipient [1:535:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.195561Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.195566Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.195570Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.195590Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:41.195594Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:41.195694Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:41.195737Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:41.206514Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:41.206539Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:41.206546Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:41.206552Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:41.206609Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T09:21:41.206617Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.206750Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:589:2214], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.206766Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:587:2072], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T09:21:41.206770Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:41.206783Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } } 2024-11-21T09:21:41.206829Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:589:2214], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:41.206863Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:593:2215], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.206873Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:591:2072], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T09:21:41.206876Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:41.206884Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T09:21:41.206904Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:593:2215], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:41.206933Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:595:2217], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.206943Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:41.206946Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.206953Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:41.302521Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:620:2218], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.302581Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T09:21:41.302590Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.302608Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:41.303113Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:621:2219], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.303158Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:622:2220], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.303221Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:623:2221], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.303261Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:624:2222], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.303302Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:625:2223], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.303334Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:621:2219] 2024-11-21T09:21:41.303340Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.303350Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:41.303369Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:622:2220] 2024-11-21T09:21:41.303373Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.303380Z ... RACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:41.399265Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:41.399277Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:635:2184], Recipient [1:535:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.399286Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.399291Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.399295Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.399311Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:41.399314Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:41.399341Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1001 to database resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:05:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:41.399382Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=4 2024-11-21T09:21:41.410332Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:41.410358Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1001 2024-11-21T09:21:41.410365Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 3 to 4 2024-11-21T09:21:41.410370Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2024-11-21T09:21:41.410421Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7500025000 Name: "slot-1" } 2024-11-21T09:21:41.410427Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.410515Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2024-11-21T09:21:41.410520Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.410529Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z 2024-11-21T09:21:41.410579Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 4 } 2024-11-21T09:21:41.410581Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.410585Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z 2024-11-21T09:21:41.410679Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:641:2237], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.410686Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:41.410688Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.410691Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z 2024-11-21T09:21:41.534756Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:535:2184], Recipient [1:535:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:41.534781Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:41.534787Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.534792Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.534812Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:41.534822Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.565619Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2024-11-21T09:21:41.565660Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.565668Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:41.565801Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:621:2219] 2024-11-21T09:21:41.565806Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.565819Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:41.565825Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:623:2221] 2024-11-21T09:21:41.565828Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.565832Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:41.565837Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:624:2222] 2024-11-21T09:21:41.565841Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.565844Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:41.565850Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:625:2223] 2024-11-21T09:21:41.565853Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.565856Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:41.565862Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:622:2220] 2024-11-21T09:21:41.565865Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.565869Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:41.565873Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:626:2224] 2024-11-21T09:21:41.565877Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.565880Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:41.565885Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:627:2225] 2024-11-21T09:21:41.565888Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.565891Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:41.576773Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:41.576797Z node 1 :NODE_BROKER DEBUG: Node #1024 host1:1001 has expired 2024-11-21T09:21:41.576807Z node 1 :NODE_BROKER DEBUG: Move to new epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576819Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:05:00.025000Z 2024-11-21T09:21:41.576823Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=1 expired=1 2024-11-21T09:21:41.576851Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576856Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576862Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576867Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576871Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576908Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576917Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576924Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.576931Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.577228Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:658:2243], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.577278Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:41.577287Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.577296Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.577391Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2024-11-21T09:21:41.577399Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.577405Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.025000Z - 1970-01-01T02:05:00.025000Z - 1970-01-01T02:10:00.025000Z 2024-11-21T09:21:41.577512Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:662:2244], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.577532Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:660:2072], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1057 } 2024-11-21T09:21:41.577537Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:41.577554Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T09:21:41.577583Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:662:2244], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5152, MsgBus: 5923 2024-11-21T09:21:27.586783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660263933099223:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:27.586884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043d9/r3tmp/tmpG4Zn1t/pdisk_1.dat 2024-11-21T09:21:27.649730Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5152, node 1 2024-11-21T09:21:27.676171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:27.676183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:27.676185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:27.676234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:27.686693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:27.686720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:27.687825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5923 TClient is connected to server localhost:5923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:27.726035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.728652Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:27.734249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.750079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.767063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:27.777778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.909512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660263933100617:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.909547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.943605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.950373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.957135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.963340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.970572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.984935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.000933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660268228068428:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.000962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.001022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660268228068433:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.001540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:28.004404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660268228068435:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:28.195871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.202554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.215472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.231295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.243904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.268816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.275908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.286034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.299231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.313121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.321457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.334705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.348726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.405866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:28.413362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.425478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.432658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.439534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.446647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.453431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.459729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but ... 72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:41.980828Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:41.980834Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:41.980842Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:41.980848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:41.980853Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:41.980855Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:41.980862Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:41.980868Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:41.980873Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:41.980875Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:41.980913Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:41.980919Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:41.980924Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:41.980926Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:41.980933Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:41.980939Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:41.980943Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:41.980945Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:41.980949Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:41.980951Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:41.980955Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:41.980961Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:41.980974Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:41.980980Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:41.980982Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:41.980987Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:41.980988Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:41.980990Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:41.980996Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:41.980999Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:41.980999Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:41.981002Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:41.981012Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:41.981015Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:41.981018Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:41.981022Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:41.981025Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:41.981027Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:41.981029Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:41.981033Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:41.981039Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:41.981042Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:41.981048Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:41.981055Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:41.981084Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:41.981093Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:41.981107Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:41.981115Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:41.981125Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:41.981133Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:41.981146Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:41.981154Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:41.981163Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:41.981172Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> TLocalTests::TestAddTenant |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpJoinOrder::TPCDS61-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61+StreamLookupJoin-ColumnStore >> TLocalTests::TestAddTenant [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2024-11-21T09:20:50.446232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660104880197308:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:50.446367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0036b3/r3tmp/tmpThee5U/pdisk_1.dat 2024-11-21T09:20:50.489968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1143, node 1 2024-11-21T09:20:50.501171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:50.501186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:50.501188Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:50.501227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:50.546664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:50.546697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:50.548112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:50.568945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.570034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.570057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.570479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:20:50.570531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:20:50.570538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:20:50.570896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:50.570910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:20:50.570963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:50.571295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.572239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850617, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.572264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:20:50.572320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:20:50.572708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.572749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.572761Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:20:50.572772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:20:50.572782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:20:50.572792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:20:50.573291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:20:50.573311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:20:50.573316Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:20:50.573331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Triggering split by load TClient is connected to server localhost:63340 2024-11-21T09:20:50.688071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660104880198231:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.688094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.716117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.716286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:20:50.716481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.716495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.717150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T09:20:50.717208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.717278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.717303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:20:50.717384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:20:50.717505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.717522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.717525Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:20:50.717574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.717582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.717583Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:20:50.719472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:20:50.719501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:20:50.719913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:20:50.771765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:20:50.771776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:20:50.771803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:20:50.772231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.772963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850820, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.772974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180850820 2024-11-21T09:20:50.772993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:20:50.773351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.773430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.773455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:20:50.773692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:20:50.773715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:20:50.773724Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublis ... ode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850820 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) Table has 2 shards Fast forward > 10h to trigger the merge TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850820 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T09:21:40.837838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.8193 2024-11-21T09:21:40.837872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.839 2024-11-21T09:21:40.938109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T09:21:40.938213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:40.938313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.938464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:21:40.938844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T09:21:40.938871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:40.939829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxSplitTablePartition, at tablet72057594046644480 2024-11-21T09:21:40.942291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:21:40.942338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:21:40.942964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:40.944119Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7439660319629738093:8540] 2024-11-21T09:21:40.946289Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2024-11-21T09:21:40.946328Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2024-11-21T09:21:40.946368Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T09:21:40.947312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId#281474976710658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710658 TabletId: 72075186224037891 2024-11-21T09:21:40.947337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 131 2024-11-21T09:21:40.947763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:21:40.953186Z node 1 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2024-11-21T09:21:40.953232Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T09:21:40.953253Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2024-11-21T09:21:40.953270Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2024-11-21T09:21:40.953389Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T09:21:40.953548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:21:40.954380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976710658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710658 TabletId: 72075186224037890 2024-11-21T09:21:40.954471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976710658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710658 TabletId: 72075186224037889 2024-11-21T09:21:40.954614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 131 -> 132 2024-11-21T09:21:40.955204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:21:40.955282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:21:40.955305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:21:40.955514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:21:40.955529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:21:40.955534Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2024-11-21T09:21:40.956815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2024-11-21T09:21:40.956834Z node 1 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2024-11-21T09:21:40.956834Z node 1 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2024-11-21T09:21:40.956897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2024-11-21T09:21:40.956917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T09:21:40.956932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T09:21:40.957320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976710658:0 2024-11-21T09:21:40.957451Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T09:21:40.957481Z node 1 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T09:21:40.957564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2024-11-21T09:21:40.957635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2024-11-21T09:21:40.961125Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2024-11-21T09:21:40.961144Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2024-11-21T09:21:40.961290Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2024-11-21T09:21:40.961302Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2024-11-21T09:21:40.961313Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2024-11-21T09:21:40.961450Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T09:21:40.961739Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2024-11-21T09:21:40.961754Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2024-11-21T09:21:41.053823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T09:21:41.053938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:21:41.054316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732180850820 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2024-11-21T09:21:41.948685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:41.948703Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:41.957619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2024-11-21T09:21:41.961827Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:41.962460Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:41.962518Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.962523Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.962530Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:41.962709Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:41.963502Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:41.963513Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.963518Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.963521Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.963697Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:41.963726Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:41.963742Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.029000Z - 1970-01-01T01:00:00.029000Z - 1970-01-01T02:00:00.029000Z 2024-11-21T09:21:41.963749Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.029000Z - 1970-01-01T01:00:00.029000Z - 1970-01-01T02:00:00.029000Z 2024-11-21T09:21:41.996024Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:41.996053Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.029000Z 2024-11-21T09:21:41.996057Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:41.996064Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.996121Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:588:2228], Recipient [1:539:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.996386Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:585:2226], Recipient [1:539:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:41.996393Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.996404Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.029000Z - 1970-01-01T01:00:00.029000Z - 1970-01-01T02:00:00.029000Z 2024-11-21T09:21:41.996476Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:601:2233], Recipient [1:539:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.996513Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:585:2226], Recipient [1:539:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:41.996519Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:41.996529Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:41.997234Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:41.997265Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:41.997288Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:602:2187], Recipient [1:539:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.997294Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.997298Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.997302Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.997316Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:41.997321Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:41.997406Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:41.997444Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:42.008161Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.008180Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:19001 2024-11-21T09:21:42.008188Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:42.008192Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:19001 to epoch cache 2024-11-21T09:21:42.008263Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200029000 Name: "slot-0" } 2024-11-21T09:21:42.008270Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.008366Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:614:2239], Recipient [1:539:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.008389Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:585:2226], Recipient [1:539:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:42.008393Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.008399Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:42.008462Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.008470Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:42.008478Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:615:2187], Recipient [1:539:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.008481Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.008483Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.008486Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.008495Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.008497Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:42.008515Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:19001 to database resolvehost=host2 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:42.008539Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:42.019297Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.019319Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:19001 2024-11-21T09:21:42.019326Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:42.019331Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:19001 to epoch cache 2024-11-21T09:21:42.019384Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200029000 Name: "slot-1" } 2024-11-21T09:21:42.019393Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.019491Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:620:2244], Recipient [1:539:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.019514Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:585:2226], Recipient [1:539:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:42.019517Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.019523Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:42.019595Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.019603Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:42.019612Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:621:2187], Recipient [1:539:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.019615Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.019617Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.019619Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.019628Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.019630Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:42.019648Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.019656Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200029000 Name: "slot-0" } 2024-11-21T09:21:42.019658Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.019694Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:624:2247], Recipient [1:539:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.019705Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:585:2226], Recipient [1:539:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:42.019707Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.019712Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:42.019738Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.019748Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:42.019756Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:625:2187], Recipient [1:539:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.019760Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.019763Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.019766Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.019770Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.019772Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:42.019778Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.019785Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200029000 Name: "slot-1" } 2024-11-21T09:21:42.019787Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.019818Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:628:2250], Recipient [1:539:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.019828Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:585:2226], Recipient [1:539:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:42.019830Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.019834Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:42.019856Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.019861Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:42.019868Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:629:2187], Recipient [1:539:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.019870Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.019872Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.019873Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.019875Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.019877Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:42.019882Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.019888Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200029000 Name: "slot-1" } 2024-11-21T09:21:42.019890Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.019927Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:632:2253], Recipient [1:539:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.019943Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:585:2226], Recipient [1:539:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:42.019947Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.019953Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:42.019981Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.019989Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:42.019998Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:633:2187], Recipient [1:539:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.020002Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.020005Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.020008Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.020012Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.020014Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:42.020020Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.020027Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200029000 Name: "slot-0" } 2024-11-21T09:21:42.020029Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 >> TNodeBrokerTest::NodeNameExpiration [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2024-11-21T09:21:43.529303Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T09:21:43.551683Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T09:21:43.562730Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:43.564151Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:43.564505Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:43.564596Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:21:43.564847Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:43.564853Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:43.564872Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:21:43.566324Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:21:43.566356Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:21:43.566365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:21:43.566377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:43.566384Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:43.566409Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:43.589304Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:43.589355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:43.600224Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:43.600275Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:43.600292Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:43.600303Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:43.600328Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:43.600336Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:43.600343Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:43.600351Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:43.611298Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:43.611357Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T09:21:43.611565Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T09:21:43.611574Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T09:21:43.613249Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T09:21:43.613470Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T09:21:43.613781Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/004934/r3tmp/tmpo1uSRe/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T09:21:43.613873Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/004934/r3tmp/tmpo1uSRe/pdisk_1.dat 2024-11-21T09:21:43.614040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:21:43.614060Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:43.614071Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T09:21:43.614097Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:21:43.614124Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:21:43.614432Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T09:21:43.614462Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:43.625367Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:21:43.625542Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:21:43.625606Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:21:43.625615Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:21:43.625645Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:21:43.625663Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:21:43.625756Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:43.625764Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:43.625768Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:43.625793Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:312:2281] 2024-11-21T09:21:43.626167Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T09:21:43.626176Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:307:2278] 2024-11-21T09:21:43.626322Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:312:2281] 2024-11-21T09:21:43.626342Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:43.626350Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:43.626353Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:43.640357Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T09:21:43.640383Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T09:21:43.645746Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T09:21:43.645805Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 5 Memory: 1 Network: 1) 2024-11-21T09:21:43.645939Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:43.645945Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:43.645962Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:369:2316] 2024-11-21T09:21:43.646307Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:369:2316] 2024-11-21T09:21:43.646414Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:43.646424Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:43.646427Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:43.648518Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2024-11-21T09:21:43.648607Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2024-11-21T09:21:43.648622Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:43.648680Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:43.648685Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:43.648695Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:402:2336] 2024-11-21T09:21:43.648823Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:402:2336] 2024-11-21T09:21:43.648846Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:43.648852Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:43.648855Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:43.648944Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-unknown to schemeshard 72057594046578944 2024-11-21T09:21:43.648964Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusPathDoesNotExist Path: "/dc-1/users/tenant-unknown" 2024-11-21T09:21:43.648973Z node 1 :LOCAL ERROR: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2024-11-21T09:21:43.648986Z node 1 :LOCAL ERROR: Unknown domain dc-3 |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2024-11-21T09:21:41.839209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:41.839233Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:41.847439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2024-11-21T09:21:41.851431Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:41.851925Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:41.851979Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.851986Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.851993Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:41.852385Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:41.853873Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:41.853887Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.853892Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.853896Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.853917Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:41.853942Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:41.853959Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T09:21:41.853965Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T09:21:41.886557Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:41.886594Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.028000Z 2024-11-21T09:21:41.886600Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:41.886610Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.886677Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:584:2228], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.887013Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:581:2226], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:41.887023Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.887034Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T09:21:41.887110Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:597:2233], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.887147Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2226], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:41.887151Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:41.887160Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:41.887753Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:41.887778Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:41.887798Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:598:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.887802Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.887808Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.887811Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.887824Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:41.887828Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:41.887916Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:41.887954Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:41.898847Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:41.898874Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:19001 2024-11-21T09:21:41.898885Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:41.898890Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:19001 to epoch cache 2024-11-21T09:21:41.898969Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T09:21:41.898978Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.899129Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:610:2239], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.899164Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2226], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:41.899170Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:41.899178Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:41.899268Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:41.899281Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:41.899293Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:611:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.899297Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.899302Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.899305Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.899317Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:41.899321Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:41.899349Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:19001 to database resolvehost=host2 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:41.899377Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:41.910040Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:41.910058Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:19001 2024-11-21T09:21:41.910063Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:41.910066Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:19001 to epoch cache 2024-11-21T09:21:41.910116Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-1" } 2024-11-21T09:21:41.910125Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.910205Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:616:2244], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.910225Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2226], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:41.910230Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:41.910236Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:41.910327Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: ( ... tx 2024-11-21T09:21:42.328621Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.328623Z node 1 :NODE_BROKER DEBUG: Registration request from host4:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:42.328642Z node 1 :NODE_BROKER DEBUG: Adding node #1027 host4:19001 to database resolvehost=host4 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 04:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=3 authorizedbycertificate=false 2024-11-21T09:21:42.328674Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=7 2024-11-21T09:21:42.339560Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.339581Z node 1 :NODE_BROKER DEBUG: Added node #1027 host4:19001 2024-11-21T09:21:42.339586Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 6 to 7 2024-11-21T09:21:42.339589Z node 1 :NODE_BROKER DEBUG: Add node #1027 host4:19001 to epoch cache 2024-11-21T09:21:42.339652Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 14400028000 Name: "slot-3" } 2024-11-21T09:21:42.339660Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.339810Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:702:2293], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.339827Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:581:2226], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.339832Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.339843Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.028000Z - 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z 2024-11-21T09:21:42.463073Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:535:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:42.463103Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:42.463111Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.463116Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.463134Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:42.463142Z node 1 :NODE_BROKER DEBUG: Removing node #1025 from database 2024-11-21T09:21:42.463168Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.493796Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2024-11-21T09:21:42.493818Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.493824Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:42.493914Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:648:2251] 2024-11-21T09:21:42.493919Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.493923Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:42.493934Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:649:2252] 2024-11-21T09:21:42.493951Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.493954Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:42.493961Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:650:2253] 2024-11-21T09:21:42.493964Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.493968Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:42.493974Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:651:2254] 2024-11-21T09:21:42.493977Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.493981Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:42.493986Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:653:2256] 2024-11-21T09:21:42.493990Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.493993Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:42.493998Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:654:2257] 2024-11-21T09:21:42.494001Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.494005Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:42.494011Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:652:2255] 2024-11-21T09:21:42.494014Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.494018Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:42.504768Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:42.504789Z node 1 :NODE_BROKER DEBUG: Remove node #1025 host2:19001 2024-11-21T09:21:42.504799Z node 1 :NODE_BROKER DEBUG: Move to new epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504829Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.028000Z 2024-11-21T09:21:42.504834Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=3 expired=0 2024-11-21T09:21:42.504860Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504869Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504889Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504895Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504901Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504910Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504916Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504936Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.504942Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.505217Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:716:2298], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.505241Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:581:2226], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.505247Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.505253Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.028000Z - 1970-01-01T04:00:00.028000Z - 1970-01-01T05:00:00.028000Z 2024-11-21T09:21:42.505302Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:718:2300], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.505331Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2226], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:42.505335Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.505345Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:42.505412Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.505440Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:42.505453Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:719:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.505457Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.505461Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.505464Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.505472Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.505476Z node 1 :NODE_BROKER DEBUG: Registration request from host5:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:42.505498Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host5:19001 to database resolvehost=host5 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:42.505529Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=9 2024-11-21T09:21:42.516095Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.516113Z node 1 :NODE_BROKER DEBUG: Added node #1025 host5:19001 2024-11-21T09:21:42.516120Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 8 to 9 2024-11-21T09:21:42.516124Z node 1 :NODE_BROKER DEBUG: Add node #1025 host5:19001 to epoch cache 2024-11-21T09:21:42.516172Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 18000028000 Name: "slot-1" } 2024-11-21T09:21:42.516178Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] >> KqpJoinOrder::TPCH21+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] >> KqpJoinOrder::CanonizedJoinOrderLookupBug >> TNodeBrokerTest::BasicFunctionality [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2024-11-21T09:21:42.386633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:42.386657Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:42.391956Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:42.392507Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:42.392606Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.392614Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.392625Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:42.392804Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:42.393927Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:42.393938Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.393942Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.393946Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.393963Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:42.394008Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:42.394026Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.394032Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.425875Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:42.425915Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T09:21:42.425922Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:42.425945Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.436289Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:575:2205], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.436649Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.436663Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.436678Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.529317Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:598:2206], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.529456Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T09:21:42.529464Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.529480Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.529841Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:601:2207], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.529899Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:602:2208], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.529997Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:603:2209], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.530011Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:604:2210], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.530046Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:601:2207] 2024-11-21T09:21:42.530050Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.530057Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.530065Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:605:2211], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.530094Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:602:2208] 2024-11-21T09:21:42.530097Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.530103Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.530123Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:606:2212], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.530152Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:607:2213], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.530178Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:603:2209] 2024-11-21T09:21:42.530181Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.530186Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.530207Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:604:2210] 2024-11-21T09:21:42.530210Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.530215Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.530228Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:605:2211] 2024-11-21T09:21:42.530231Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.530236Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.530244Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:606:2212] 2024-11-21T09:21:42.530248Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.530253Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.530268Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:607:2213] 2024-11-21T09:21:42.530271Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.530276Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.584579Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:539:2184], Recipient [1:539:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:42.584618Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:42.584625Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.584630Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.584647Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:42.584656Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:42.615308Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2024-11-21T09:21:42.615330Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.615334Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:42.615407Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:601:2207] 2024-11-21T09:21:42.615411Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.615415Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:42.615422Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:602:2208] 2024-11-21T09:21:42.615425Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.615428Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:42.615438Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:604:2210] 2024-11-21T09:21:42.615441Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.615444Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:42.615449Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:605:2211] 2024-11-21T09:21:42.615452Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.615455Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:42.615460Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:606:2212] 2024-11-21T09:21:42.615463Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.615466Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:42.615471Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:607:2213] 2024-11-21T09:21:42.615474Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.615477Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:42.615482Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:603:2209] 2024-11-21T09:21:42.615486Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.615489Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:42.626291Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:42.626318Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:42.626334Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:42.626340Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=0 expired=0 2024-11-21T09:21:42.626354Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:42.626363Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:42.626371Z ... 8944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.831771Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:42.831781Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:727:2270], Recipient [1:688:2270]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.831784Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.831791Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.831793Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.831806Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.831809Z node 1 :NODE_BROKER DEBUG: Registration request from host4:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:42.831831Z node 1 :NODE_BROKER DEBUG: Adding node #1027 host4:1001 to database resolvehost=host4.yandex.net address=1.2.3.7 dc=1 location=DC=1/M=2/R=3/U=7/ lease=1 expire=Thu, 01 Jan 1970 04:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=3 authorizedbycertificate=false 2024-11-21T09:21:42.831861Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=7 2024-11-21T09:21:42.842690Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.842715Z node 1 :NODE_BROKER DEBUG: Added node #1027 host4:1001 2024-11-21T09:21:42.842725Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 6 to 7 2024-11-21T09:21:42.842730Z node 1 :NODE_BROKER DEBUG: Add node #1027 host4:1001 to epoch cache 2024-11-21T09:21:42.842799Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } Expire: 14400025000 Name: "slot-3" } 2024-11-21T09:21:42.842807Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.842947Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:732:2306], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.842972Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.842978Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.842989Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T09:21:42.843059Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:734:2308], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.843077Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2024-11-21T09:21:42.843081Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.843087Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T09:21:42.843135Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:736:2310], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.843149Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.843153Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.843158Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T09:21:42.843206Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:738:2312], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.843221Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2024-11-21T09:21:42.843225Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.843233Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T09:21:42.843293Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:740:2314], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.843308Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.843312Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.843318Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T09:21:42.986788Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:688:2270], Recipient [1:688:2270]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:42.986818Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:42.986823Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.986826Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.986839Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:42.986847Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T09:21:43.037855Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:758:2315], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.037923Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2024-11-21T09:21:43.037933Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.037939Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:43.049355Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:43.049377Z node 1 :NODE_BROKER DEBUG: Move to new epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T09:21:43.049393Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2024-11-21T09:21:43.049396Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=4 expired=0 2024-11-21T09:21:43.049428Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T09:21:43.049436Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:43.049605Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:770:2320], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.049634Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.049638Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.049643Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T09:21:43.049701Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:772:2322], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.049716Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.049719Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.049722Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T09:21:43.049756Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:774:2324], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.049761Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.049763Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.049766Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T09:21:43.049796Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:776:2326], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.049810Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2024-11-21T09:21:43.049813Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.049816Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T09:21:43.049847Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:778:2328], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.049855Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.049857Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.049860Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T09:21:43.049896Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:780:2330], Recipient [1:688:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.049905Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:688:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2024-11-21T09:21:43.049907Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.049910Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2024-11-21T09:21:42.050129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:42.050153Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:42.054721Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:42.055115Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:42.055195Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.055201Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.055210Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:42.055351Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:42.056175Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:42.056185Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.056188Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.056190Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.056251Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:42.056288Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:42.056301Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:42.056305Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:42.087885Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:42.087916Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T09:21:42.087921Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:42.087929Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.098211Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:575:2205], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.098619Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.098633Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.098646Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:42.098716Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:577:2207], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.098748Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:42.098754Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.098764Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:42.099466Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.099487Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:42.099505Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:578:2184], Recipient [1:539:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.099508Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.099512Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.099514Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.099526Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.099529Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:42.099597Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:42.099619Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:42.110402Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.110429Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:42.110436Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:42.110441Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:42.110505Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2024-11-21T09:21:42.110513Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.110644Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:590:2213], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.110666Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T09:21:42.110671Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:42.110705Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } } 2024-11-21T09:21:42.110751Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:592:2215], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.110765Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.110769Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.110779Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:42.110834Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:594:2217], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.110852Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:42.110856Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:42.110865Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:42.110949Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:42.110962Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:42.110976Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:595:2184], Recipient [1:539:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.110978Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:42.110981Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:42.110983Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:42.110991Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:42.110994Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:42.111013Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:42.111035Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:42.121964Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:42.121991Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1001 2024-11-21T09:21:42.121999Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:42.122004Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2024-11-21T09:21:42.122068Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200024000 Name: "slot-1" } 2024-11-21T09:21:42.122078Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.122227Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:600:2222], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.122250Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListN ... ateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.933077Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #5 2024-11-21T09:21:42.933083Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:771:2310] 2024-11-21T09:21:42.933086Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.933089Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #5 2024-11-21T09:21:42.943968Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:42.943995Z node 1 :NODE_BROKER DEBUG: Node #1026 host3:1001 has expired 2024-11-21T09:21:42.944004Z node 1 :NODE_BROKER DEBUG: Node #1027 host1:1001 has expired 2024-11-21T09:21:42.944012Z node 1 :NODE_BROKER DEBUG: Move to new epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944028Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T05:00:00.024000Z 2024-11-21T09:21:42.944034Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #5 nodes=2 expired=2 2024-11-21T09:21:42.944076Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944084Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944093Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944100Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944107Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944114Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944122Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944129Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944136Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:42.944556Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:810:2337], Recipient [1:651:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.944588Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:651:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.944594Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.944600Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944673Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:812:2339], Recipient [1:651:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.944687Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:651:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.944690Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.944695Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:42.944748Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:814:2341], Recipient [1:651:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:42.944760Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:651:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:42.944764Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:42.944768Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:43.068714Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:651:2244], Recipient [1:651:2244]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:43.068744Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:43.068752Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:43.068757Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:43.068778Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:43.068787Z node 1 :NODE_BROKER DEBUG: Removing node #1026 from database 2024-11-21T09:21:43.068810Z node 1 :NODE_BROKER DEBUG: Removing node #1027 from database 2024-11-21T09:21:43.068820Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.099613Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:651:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 6 } 2024-11-21T09:21:43.099658Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.099665Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T09:21:43.099778Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:768:2307] 2024-11-21T09:21:43.099783Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.099788Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T09:21:43.099802Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:769:2308] 2024-11-21T09:21:43.099806Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.099811Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T09:21:43.099817Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:770:2309] 2024-11-21T09:21:43.099820Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.099823Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T09:21:43.099829Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:773:2312] 2024-11-21T09:21:43.099832Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.099836Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T09:21:43.099841Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:774:2313] 2024-11-21T09:21:43.099845Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.099848Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T09:21:43.099854Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:771:2310] 2024-11-21T09:21:43.099858Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.099861Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T09:21:43.099866Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:772:2311] 2024-11-21T09:21:43.099869Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.099872Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T09:21:43.110878Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:43.110906Z node 1 :NODE_BROKER DEBUG: Node #1024 host1:1001 has expired 2024-11-21T09:21:43.110915Z node 1 :NODE_BROKER DEBUG: Node #1025 host4:1001 has expired 2024-11-21T09:21:43.110921Z node 1 :NODE_BROKER DEBUG: Remove node #1026 host3:1001 2024-11-21T09:21:43.110927Z node 1 :NODE_BROKER DEBUG: Remove node #1027 host1:1001 2024-11-21T09:21:43.110934Z node 1 :NODE_BROKER DEBUG: Move to new epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.110953Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T06:00:00.024000Z 2024-11-21T09:21:43.110958Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #6 nodes=0 expired=2 2024-11-21T09:21:43.110992Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111001Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111012Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111022Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111030Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111036Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111044Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111053Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111060Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:43.111409Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:828:2346], Recipient [1:651:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.111448Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:651:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.111455Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.111463Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:43.111525Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:830:2348], Recipient [1:651:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.111537Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:651:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.111541Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.111546Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z >> TNodeBrokerTest::FixedNodeId >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore [GOOD] >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] Test command err: Trying to start YDB, gRPC: 2287, MsgBus: 3209 2024-11-21T09:21:27.901147Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660264226255888:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:27.901163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043c1/r3tmp/tmpBVrJNP/pdisk_1.dat 2024-11-21T09:21:27.957580Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2287, node 1 2024-11-21T09:21:27.973094Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:27.973106Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:27.973108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:27.973141Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3209 2024-11-21T09:21:28.001422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:28.001444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:28.002559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:28.020471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.030579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.094477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.117187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.127256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:28.205084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660268521224715:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.205129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.237719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.248807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.257318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.311429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.321605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.335037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.353333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660268521225232:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.353361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.353378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660268521225237:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.354157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:28.361499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660268521225239:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:28.564066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.570770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.579250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.593464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.600405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.667011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.673933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.684656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.691201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.698896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.705630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.712269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.719550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.795379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:28.801607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.856502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.866755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.873088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.880002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.887166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.894014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.949402Z ... 0330056562653:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:43.461016Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:43.467373Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660330056562655:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:43.612600Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.618527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.629420Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.635815Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.690959Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.708640Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.715283Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.727548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.734292Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.741064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.748174Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.755504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.810882Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.867292Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:43.874398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.881495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.895425Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.902191Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.909591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.923490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.930411Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.937259Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.945147Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.952366Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.965717Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.972577Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.979453Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.986358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:43.993484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.000242Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.007405Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.014440Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.021198Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.028278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.035605Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.049499Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.056351Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.081186Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:44.088092Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.098599Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.112656Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.119529Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.133563Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.140336Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.195338Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.203363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.264581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.271231Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.280671Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:44.287364Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore [GOOD] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2024-11-21T09:21:43.241149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:43.241169Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:43.245609Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:43.246053Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:43.246136Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:43.246145Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:43.246154Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:43.246313Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:43.247052Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:43.247060Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:43.247063Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:43.247066Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:43.247077Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:43.247109Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:43.247122Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:43.247127Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:43.278953Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:43.278993Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T09:21:43.278999Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:43.279010Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:43.289353Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:573:2205], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.289738Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.289748Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.289763Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:43.289835Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:575:2207], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.289872Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:43.289877Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:43.289889Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:43.290567Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:43.290592Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:43.290612Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:576:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:43.290617Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:43.290623Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:43.290627Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:43.290645Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:43.290650Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:43.290743Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:43.290781Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:43.301692Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:43.301720Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:43.301728Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:43.301733Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:43.301796Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T09:21:43.301804Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:43.301977Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:530:2180], Recipient [1:537:2184]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T09:21:43.302003Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2024-11-21T09:21:43.302009Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2024-11-21T09:21:43.303680Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:43.304526Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:43.304615Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:43.304621Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:43.304628Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:43.304676Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:43.304680Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:43.304683Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:43.304687Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:43.304722Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:43.304739Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:43.304777Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:43.304788Z node 1 :NODE_BROKER DEBUG: Loaded current epoch: #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:43.304806Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:43.304816Z node 1 :NODE_BROKER DEBUG: Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2024-11-21T09:21:43.304829Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:43.304842Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T09:21:43.304846Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=1 expired=0 2024-11-21T09:21:43.304860Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:43.305972Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:628:2241], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.306018Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:43.306024Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:43.306035Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:43.306120Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:43.306137Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:43.306150Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:629:2214], Recipient [1:594:2214]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:43.306154Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:43.306158Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:43.306161Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:43.306170Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:43.306173Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:43.306193Z node 1 :NODE_BROKER DEBUG: Adding node #1026 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:43.306216Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:43.316873Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:43.316902Z node 1 :NODE_BROKER DEBUG: Added node #1026 host2:1001 2024-11-21T09:21:43.316909Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:43.316914Z node 1 :NODE_BROKER DEBUG: Add node #1026 host2:1001 to epoch cache 2024-11-21T09:21:43.316955Z node 1 : ... :43.420743Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:43.420751Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:665:2253] 2024-11-21T09:21:43.420755Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.420758Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:43.420777Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:666:2254] 2024-11-21T09:21:43.420781Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.420786Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:43.481997Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:594:2214], Recipient [1:594:2214]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:43.482021Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:43.482026Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:43.482031Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:43.482045Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:43.482054Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.522965Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2024-11-21T09:21:43.523007Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.523014Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:43.523074Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:660:2248] 2024-11-21T09:21:43.523079Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.523083Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:43.523120Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:661:2249] 2024-11-21T09:21:43.523123Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.523127Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:43.523144Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:662:2250] 2024-11-21T09:21:43.523147Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.523151Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:43.523157Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:663:2251] 2024-11-21T09:21:43.523160Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.523163Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:43.523168Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:664:2252] 2024-11-21T09:21:43.523171Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.523175Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:43.523180Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:665:2253] 2024-11-21T09:21:43.523183Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.523186Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:43.523191Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:666:2254] 2024-11-21T09:21:43.523194Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.523198Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:43.533888Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:43.533921Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.533937Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:43.533942Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=2 expired=0 2024-11-21T09:21:43.533975Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.533986Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.533995Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.534002Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.534008Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.534016Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.534021Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.534027Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.534034Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:43.534273Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:671:2259], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.534298Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.534301Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.534305Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.534335Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:673:2261], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.534344Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:43.534346Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:43.534349Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.534376Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:675:2263], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.534392Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2024-11-21T09:21:43.534395Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:21:43.534398Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1024 2024-11-21T09:21:43.534402Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:21:43.534404Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) is now active 2024-11-21T09:21:43.534407Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) enqueue tx 2024-11-21T09:21:43.534409Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) starts new tx 2024-11-21T09:21:43.534419Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1024 2024-11-21T09:21:43.534424Z node 1 :NODE_BROKER DEBUG: Update node #1024 host1:1001 lease in database lease=2 expire=1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.545383Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:21:43.545462Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800025000 Epoch { Id: 2 Version: 4 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2024-11-21T09:21:43.545486Z node 1 :NODE_BROKER DEBUG: Extended lease of #1024 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2024-11-21T09:21:43.545491Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) completed tx 2024-11-21T09:21:43.545495Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) unlink from parent 2024-11-21T09:21:43.545499Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1024 2024-11-21T09:21:43.545502Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T09:21:43.545619Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:679:2267], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:43.545642Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1026 } 2024-11-21T09:21:43.545648Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:21:43.545654Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1026 2024-11-21T09:21:43.545659Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:21:43.545662Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) is now active 2024-11-21T09:21:43.545665Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) enqueue tx 2024-11-21T09:21:43.545669Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) starts new tx 2024-11-21T09:21:43.545679Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1026 2024-11-21T09:21:43.545686Z node 1 :NODE_BROKER DEBUG: Update node #1026 host2:1001 lease in database lease=2 expire=1970-01-01T03:00:00.025000Z 2024-11-21T09:21:43.556733Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:21:43.556811Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1026 Expire: 10800025000 Epoch { Id: 2 Version: 4 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2024-11-21T09:21:43.556836Z node 1 :NODE_BROKER DEBUG: Extended lease of #1026 host2:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2024-11-21T09:21:43.556842Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) completed tx 2024-11-21T09:21:43.556847Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) unlink from parent 2024-11-21T09:21:43.556851Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1026 2024-11-21T09:21:43.556855Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24077, MsgBus: 4093 2024-11-21T09:21:32.169350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660284475228786:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:32.169709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ab/r3tmp/tmp2A1Nqw/pdisk_1.dat 2024-11-21T09:21:32.227468Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24077, node 1 2024-11-21T09:21:32.244257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:32.244275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:32.244277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:32.244318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4093 2024-11-21T09:21:32.272245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:32.272275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:32.273338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:32.300863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.311233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:32.327512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.345568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.356939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:32.508121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660284475230332:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.508150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.539770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.545216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.555139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.569748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.575835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.586670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.598950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660284475230844:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.598969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660284475230849:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.598974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:32.599561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:32.605459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660284475230851:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:32.867694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.874355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.884396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.891135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.897783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.919123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.928351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.943078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.953704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.961261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.976920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:32.990596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.012812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.071795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:33.078694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.087951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.094239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.101671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.115140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.123404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.143532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.198837 ... 3;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:21:43.981612Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:21:43.981627Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:21:43.981636Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:21:43.981650Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:21:43.981664Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:21:43.981677Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:21:43.981691Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:21:43.981707Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;self_id=[5:7439660332720887343:5773];tablet_id=72075186224038703;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:21:43.982103Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:43.982116Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:43.982125Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:43.982129Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:43.982149Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:43.982157Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:43.982166Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:43.982176Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:43.982183Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:43.982191Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:43.982197Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:43.982206Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:43.982233Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:43.982237Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:43.982247Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:43.982254Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:43.982261Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:43.982270Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:43.982285Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:43.982294Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:43.982301Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:43.982308Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:43.982360Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:43.982367Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:43.982372Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:43.982374Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:43.982381Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:43.982388Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:43.982392Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:43.982394Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:43.982398Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:43.982403Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:43.982407Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:43.982409Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:43.982430Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:43.982439Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:43.982453Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:43.982462Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:43.982468Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:43.982475Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:43.982483Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:43.982489Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:43.982498Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:43.982505Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.099006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.099034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.099039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.099043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.099051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.099055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.099062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.099128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.109943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.109963Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.112587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.112672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.112691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.114985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.115040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.115145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.116063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.116327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.116340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.116350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.116356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.116361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.116394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.117778Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.134970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.135042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.135099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.135139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.135145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.135770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.135802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.135839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.135847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.135851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.135855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.136266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.136279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.136284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.136615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.136623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.136628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.136633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.137229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.137641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.137680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.137839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.137862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.137868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.137924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.137930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.137954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.137965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.138437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.138453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.138485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.138490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.138550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.138556Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.138565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.138568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.138573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.138577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.138580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.138583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.138595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.138600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.138604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:43.797275Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:21:43.797286Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:21:43.797326Z node 234 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.797332Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.797335Z node 234 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:43.797338Z node 234 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T09:21:43.797341Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:43.797349Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:21:43.797412Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T09:21:43.797432Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:21:43.797447Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T09:21:43.797451Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:21:43.797456Z node 234 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T09:21:43.797512Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:43.797524Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 1005022349418 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:43.797531Z node 234 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T09:21:43.797546Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T09:21:43.797552Z node 234 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T09:21:43.797555Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:21:43.797562Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:43.797566Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:43.797570Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T09:21:43.797574Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:21:43.797577Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T09:21:43.797579Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T09:21:43.797585Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:21:43.797588Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T09:21:43.797591Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:21:43.797593Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:21:43.797999Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.798026Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:21:43.798032Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:21:43.798040Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:21:43.798043Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:21:43.798356Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:21:43.798370Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T09:21:43.798412Z node 234 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:43.798415Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:43.798448Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:21:43.798470Z node 234 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:43.798478Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [234:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T09:21:43.798482Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [234:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 2024-11-21T09:21:43.798577Z node 234 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.798584Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.798588Z node 234 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:43.798590Z node 234 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:21:43.798593Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:21:43.798665Z node 234 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.798672Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.798674Z node 234 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:21:43.798676Z node 234 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:21:43.798679Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:21:43.798685Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T09:21:43.798688Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [234:120:2146] 2024-11-21T09:21:43.798700Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:21:43.798703Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:21:43.798708Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:21:43.799097Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.799323Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:21:43.799338Z node 234 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T09:21:43.799346Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T09:21:43.799386Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T09:21:43.799625Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:21:43.799629Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:21:43.799671Z node 234 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:21:43.799682Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:21:43.799685Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [234:864:2801] TestWaitNotification: OK eventTxId 1003 >> KqpJoinOrder::TPCDS9-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88-StreamLookupJoin-ColumnStore >> TLocalTests::TestRemoveTenantWhileResolving |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpJoinOrder::TPCDS23-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34+StreamLookupJoin-ColumnStore |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11165, MsgBus: 14777 2024-11-21T09:21:27.618025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660261041876622:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:27.618167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043d3/r3tmp/tmpqKaItZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11165, node 1 2024-11-21T09:21:27.689249Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:27.692342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:27.692357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:27.692358Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:27.692387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14777 2024-11-21T09:21:27.720304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:27.720329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:27.721327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:27.738531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.747247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.812945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.832116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.845570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:27.944651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660261041878148:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.944712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:27.950571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.957667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:27.970906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.025017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.033817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.040233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.048278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660265336845960:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.048304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660265336845965:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.048306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:28.048873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:28.053276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660265336845967:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:28.326353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.332805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.342107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.355303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.362380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.387177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.394276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.404358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.458917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.467549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.481368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.495438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.503032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.603634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:28.616004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.630563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.647109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.657455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.670593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.684651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.698919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:28.708 ... 72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:43.768695Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:43.768707Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:43.768737Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:43.768747Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:43.768759Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:43.768768Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:43.768784Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:43.768794Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:43.768804Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:43.768815Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038701;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:43.768890Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:43.768902Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:43.768909Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:43.768913Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:43.768927Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:43.768937Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:43.768946Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:43.768955Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:43.768963Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:43.768972Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:43.768978Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:43.768990Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:43.769024Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:43.769035Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:43.769052Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:43.769062Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:43.769073Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:43.769082Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:43.769096Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:43.769105Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:43.769116Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:43.769125Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038689;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:43.769314Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:43.769330Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:43.769339Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:43.769348Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:43.769365Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:43.769375Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:43.769385Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:43.769394Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:43.769401Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:43.769408Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:43.769412Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:43.769419Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:43.769439Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:43.769447Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:43.769457Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:43.769464Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:43.769470Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:43.769479Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:43.769499Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:43.769510Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:43.769520Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:43.769527Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> TNodeBrokerTest::SingleDomainModeBannedIds >> SystemView::TopPartitionsFields [GOOD] >> SystemView::TopPartitionsFollowers >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TNodeBrokerTest::TestListNodes >> KqpJoinOrder::TPCDS61+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61-StreamLookupJoin+ColumnStore |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] Test command err: 2024-11-21T09:21:45.765716Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T09:21:45.796438Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T09:21:45.808344Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:45.810178Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:45.810642Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:45.810780Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:21:45.811141Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:45.811150Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:45.811178Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:21:45.813553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:21:45.813600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:21:45.813612Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:21:45.813636Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:45.813648Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:45.813661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:45.837043Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:45.837093Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:45.847900Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:45.847947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:45.847963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:45.847970Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:45.847991Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:45.847996Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:45.848001Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:45.848006Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:45.858716Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:45.858767Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T09:21:45.858935Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T09:21:45.858945Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T09:21:45.859948Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T09:21:45.860100Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T09:21:45.860299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/004905/r3tmp/tmpvLK5ML/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T09:21:45.860361Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/004905/r3tmp/tmpvLK5ML/pdisk_1.dat 2024-11-21T09:21:45.860492Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:21:45.860510Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:45.860521Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T09:21:45.860550Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:21:45.860586Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:21:45.860844Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T09:21:45.860874Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:45.871618Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:21:45.871742Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:21:45.871782Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:21:45.871787Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:21:45.871807Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:21:45.871822Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:21:45.871909Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:45.871918Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:45.871924Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:45.871949Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:312:2281] 2024-11-21T09:21:45.872290Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T09:21:45.872300Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:307:2278] 2024-11-21T09:21:45.872406Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:312:2281] 2024-11-21T09:21:45.872420Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:45.872426Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:45.872428Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:45.888783Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T09:21:45.888804Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T09:21:45.888843Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2024-11-21T09:21:45.892712Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T09:21:45.892741Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Missing task for /dc-1/users/tenant-1 2024-11-21T09:21:45.892780Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2024-11-21T09:21:45.892795Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:45.892868Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:45.892872Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:45.892898Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:396:2337] 2024-11-21T09:21:45.893035Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:396:2337] 2024-11-21T09:21:45.893047Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:45.893052Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:45.893054Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk >> TDynamicNameserverTest::TestCacheUsage >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::FixedNodeId [GOOD] >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> OlapEstimationRowsCorrectness::TPCDS87 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2024-11-21T09:21:44.902282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:44.902304Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:44.910126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2024-11-21T09:21:44.914349Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:44.914847Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:44.914907Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.914914Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:44.914921Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:44.915279Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:44.916267Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:44.916278Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.916282Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:44.916286Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:44.916306Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:44.916333Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:44.916351Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T09:21:44.916357Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T09:21:44.917801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:21:44.953080Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:44.953126Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.028000Z 2024-11-21T09:21:44.953133Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:44.953143Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:44.953199Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:608:2252], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:44.953547Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:605:2250], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:44.953557Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:44.953570Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T09:21:44.953659Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:621:2257], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:44.953700Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:605:2250], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:44.953705Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:44.953715Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:44.954394Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:44.954419Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:44.954438Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:622:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.954441Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.954445Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.954447Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:44.954460Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:44.954463Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:44.954546Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:44.954586Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:44.965478Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:44.965503Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:19001 2024-11-21T09:21:44.965514Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:44.965519Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:19001 to epoch cache 2024-11-21T09:21:44.965575Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T09:21:44.965583Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:44.965713Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:634:2263], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:44.965742Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:605:2250], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:44.965747Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:44.965757Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:44.965850Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:44.965862Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:44.965875Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:635:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.965879Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.965883Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.965887Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:44.965898Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:44.965902Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:44.965926Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:19001 to database resolvehost=host2 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:44.965962Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:44.976842Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:44.976867Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:19001 2024-11-21T09:21:44.976874Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:44.976892Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:19001 to epoch cache 2024-11-21T09:21:44.976946Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-1" } 2024-11-21T09:21:44.976957Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:44.977089Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:640:2268], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:44.977111Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:605:2250], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database" } 2024-11-21T09:21:44.977118Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:44.977128Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database" 2024-11-21T09:21:44.977461Z node 1 :NODE_BROKER TRACE: Handle TEvTx ... NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.977495Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.977499Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.977503Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:44.977516Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:44.977520Z node 1 :NODE_BROKER DEBUG: Registration request from host3:19001 (not fixed) tenant: /dc-1/yet-another-database 2024-11-21T09:21:44.977548Z node 1 :NODE_BROKER DEBUG: Adding node #1026 host3:19001 to database resolvehost=host3 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:44.977584Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=4 2024-11-21T09:21:44.988354Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:44.988375Z node 1 :NODE_BROKER DEBUG: Added node #1026 host3:19001 2024-11-21T09:21:44.988380Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 3 to 4 2024-11-21T09:21:44.988383Z node 1 :NODE_BROKER DEBUG: Add node #1026 host3:19001 to epoch cache 2024-11-21T09:21:44.988429Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1026 Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T09:21:44.988451Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:44.988557Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:653:2274], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:44.988580Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:605:2250], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database" } 2024-11-21T09:21:44.988585Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:44.988591Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database" 2024-11-21T09:21:44.988695Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/yet-another-database TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:44.988708Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database": scope id# <72057594046678944:3>: serviced subdomain# 72057594046678944:3 2024-11-21T09:21:44.988722Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:654:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.988726Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.988731Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.988734Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:44.988743Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:44.988747Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/yet-another-database 2024-11-21T09:21:44.988772Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:44.999555Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:44.999617Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-1" } 2024-11-21T09:21:44.999626Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:44.999752Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:659:2279], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:44.999798Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:605:2250], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:44.999804Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:44.999814Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:44.999914Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:44.999929Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:44.999943Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:660:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.999948Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:44.999953Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.999956Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:44.999965Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:44.999969Z node 1 :NODE_BROKER DEBUG: Registration request from host4:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:44.999998Z node 1 :NODE_BROKER DEBUG: Adding node #1027 host4:19001 to database resolvehost=host4 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:45.000028Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=5 2024-11-21T09:21:45.010864Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.010886Z node 1 :NODE_BROKER DEBUG: Added node #1027 host4:19001 2024-11-21T09:21:45.010891Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 4 to 5 2024-11-21T09:21:45.010895Z node 1 :NODE_BROKER DEBUG: Add node #1027 host4:19001 to epoch cache 2024-11-21T09:21:45.010942Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T09:21:45.010948Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.011044Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:665:2284], Recipient [1:535:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.011061Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:605:2250], Recipient [1:535:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:45.011066Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.011072Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:45.011149Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.011161Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:45.011175Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:666:2187], Recipient [1:535:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.011179Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.011183Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.011186Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.011212Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.011215Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:45.011238Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=2 authorizedbycertificate=false 2024-11-21T09:21:45.022227Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.022286Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-2" } 2024-11-21T09:21:45.022292Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] Test command err: Trying to start YDB, gRPC: 20568, MsgBus: 12031 2024-11-21T09:21:24.967690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660249552113664:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:24.967982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ec/r3tmp/tmpes1AgK/pdisk_1.dat 2024-11-21T09:21:25.026400Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20568, node 1 2024-11-21T09:21:25.037431Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:25.037446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:25.037448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:25.037488Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12031 2024-11-21T09:21:25.067737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:25.067769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:25.068879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:25.102553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.104339Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:25.309803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660253847081561:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.309827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.339965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.401700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660253847081665:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.401721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.404034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.411744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660253847081742:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.411765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.414187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.423790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660253847081819:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.423816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.423878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660253847081824:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.424536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T09:21:25.428473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660253847081826:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } Trying to start YDB, gRPC: 27598, MsgBus: 9152 2024-11-21T09:21:25.867477Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660252114106525:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:25.867665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043ec/r3tmp/tmpYAszVY/pdisk_1.dat 2024-11-21T09:21:25.876112Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27598, node 2 2024-11-21T09:21:25.887058Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:25.887072Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:25.887073Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:25.887115Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9152 TClient is connected to server localhost:9152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:25.967845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:25.967874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:25.969045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:25.969090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.972584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:25.984973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.999357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:26.010734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.163556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660256409075350:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:26.163577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:26.167679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.175651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:26.186139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... 0340808748070:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:45.060298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:45.063190Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660340808748072:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:45.229901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.237221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.246683Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.260526Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.268099Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.293249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.300113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.309636Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.324048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.337325Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.344237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.351246Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.358210Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.456464Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:45.463814Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.470353Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.477799Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.484463Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.491330Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.498405Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.505589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.512465Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.520237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.533839Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.548188Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.561783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.569213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.583418Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.596751Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.610951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.624895Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.638608Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.652994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.666576Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.673794Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.688056Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.701666Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.736287Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:45.743317Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.750275Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.757486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.764064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.779002Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.792446Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.799409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.806041Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.820631Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.827940Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.883120Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:45.890582Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2024-11-21T09:21:45.081861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:45.081884Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:45.086725Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:45.087213Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:45.087331Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.087339Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.087355Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:45.087518Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:45.088318Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:45.088331Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.088336Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.088340Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.088395Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:45.088446Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:45.088466Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:45.088473Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:45.120503Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:45.120536Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T09:21:45.120542Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:45.120549Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.130881Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:571:2205], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.131301Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:45.131316Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.131334Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:45.131419Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:573:2207], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.131464Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: true Path: "dc-1" } 2024-11-21T09:21:45.131471Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.131481Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: true Path: "dc-1" 2024-11-21T09:21:45.132197Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.132240Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: true Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:45.132257Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:574:2184], Recipient [1:535:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.132261Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.132265Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.132268Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.132286Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.132289Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (fixed) tenant: dc-1 2024-11-21T09:21:45.132307Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=NEVER servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:45.132338Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:45.143301Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.143329Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:45.143336Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:45.143341Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:45.143402Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18446744073709551615 Name: "slot-0" } 2024-11-21T09:21:45.143414Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.143553Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:586:2213], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.143581Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T09:21:45.143587Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:45.143604Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18446744073709551615 Name: "slot-0" } } 2024-11-21T09:21:45.143653Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:588:2215], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.143669Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2024-11-21T09:21:45.143674Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:21:45.143679Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1024 2024-11-21T09:21:45.143684Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:21:45.143688Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) is now active 2024-11-21T09:21:45.143691Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) enqueue tx 2024-11-21T09:21:45.143695Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) starts new tx 2024-11-21T09:21:45.143708Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1024 2024-11-21T09:21:45.143724Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:21:45.143738Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 18446744073709551615 Epoch { Id: 1 Version: 2 Start: 24000 End: 3600024000 NextEnd: 7200024000 } } 2024-11-21T09:21:45.143742Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) completed tx 2024-11-21T09:21:45.143746Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) unlink from parent 2024-11-21T09:21:45.143749Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1024 2024-11-21T09:21:45.143752Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T09:21:45.143813Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:590:2217], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.143831Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:45.143836Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.143844Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:45.143937Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.143952Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:45.143965Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:591:2184], Recipient [1:535:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.143969Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.143973Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.143976Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.143981Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.143984Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:45.143999Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.144011Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18446744073709551615 N ... t2:1001 to epoch cache 2024-11-21T09:21:45.155441Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200024000 Name: "slot-1" } 2024-11-21T09:21:45.155448Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.155569Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:602:2227], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.155613Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: true Path: "dc-1" } 2024-11-21T09:21:45.155620Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.155630Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: true Path: "dc-1" 2024-11-21T09:21:45.155710Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.155722Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: true Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:45.155734Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:603:2184], Recipient [1:535:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.155736Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.155739Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.155741Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.155752Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.155755Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (fixed) tenant: dc-1 2024-11-21T09:21:45.155765Z node 1 :NODE_BROKER DEBUG: Fix ID for node: #1025 host2:1001 2024-11-21T09:21:45.166737Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.166764Z node 1 :NODE_BROKER DEBUG: Fix ID for node #1025 host2:1001 2024-11-21T09:21:45.166819Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } 2024-11-21T09:21:45.166829Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.166961Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:608:2232], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.166983Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T09:21:45.166988Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:45.167003Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } } 2024-11-21T09:21:45.167043Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:610:2234], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.167054Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1025 } 2024-11-21T09:21:45.167058Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:21:45.167062Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1025 2024-11-21T09:21:45.167066Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:21:45.167068Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) is now active 2024-11-21T09:21:45.167070Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) enqueue tx 2024-11-21T09:21:45.167073Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) starts new tx 2024-11-21T09:21:45.167087Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1025 2024-11-21T09:21:45.167099Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:21:45.167109Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1025 Expire: 18446744073709551615 Epoch { Id: 1 Version: 3 Start: 24000 End: 3600024000 NextEnd: 7200024000 } } 2024-11-21T09:21:45.167112Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) completed tx 2024-11-21T09:21:45.167115Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) unlink from parent 2024-11-21T09:21:45.167117Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1025 2024-11-21T09:21:45.167119Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T09:21:45.167160Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:612:2236], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.167169Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T09:21:45.167172Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:45.167180Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } } 2024-11-21T09:21:45.167211Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:614:2238], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.167226Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:45.167230Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.167235Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:45.167313Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.167321Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:45.167331Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:615:2184], Recipient [1:535:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.167333Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.167336Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.167338Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.167343Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.167345Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:45.167356Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.167364Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } 2024-11-21T09:21:45.167366Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.167401Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:618:2241], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.167411Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1025 } 2024-11-21T09:21:45.167413Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:21:45.167415Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1025 2024-11-21T09:21:45.167417Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:21:45.167419Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) is now active 2024-11-21T09:21:45.167421Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) enqueue tx 2024-11-21T09:21:45.167423Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) starts new tx 2024-11-21T09:21:45.167425Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1025 2024-11-21T09:21:45.167428Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:21:45.167434Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1025 Expire: 18446744073709551615 Epoch { Id: 1 Version: 3 Start: 24000 End: 3600024000 NextEnd: 7200024000 } } 2024-11-21T09:21:45.167437Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) completed tx 2024-11-21T09:21:45.167439Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) unlink from parent 2024-11-21T09:21:45.167440Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1025 2024-11-21T09:21:45.167442Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2024-11-21T09:21:45.996749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:45.996773Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:46.001312Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:46.001757Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:46.001839Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.001846Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.001855Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:46.002062Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:46.002591Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:46.002605Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.002610Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.002614Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.002631Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:46.002672Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:46.002690Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.002697Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.024326Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:46.024364Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2024-11-21T09:21:46.024371Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:46.024381Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.034717Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:198:2196], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.035262Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039946, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigRequest { Config { BannedNodeIds { From: 1025 To: 1032 } } } 2024-11-21T09:21:46.035279Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvSetConfigRequest 2024-11-21T09:21:46.035287Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.035291Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.035315Z node 1 :NODE_BROKER DEBUG: TTxUpdateConfig Execute Config { BannedNodeIds { From: 1025 To: 1032 } } 2024-11-21T09:21:46.035351Z node 1 :NODE_BROKER DEBUG: Update config in database config=BannedNodeIds { From: 1025 To: 1032 } 2024-11-21T09:21:46.046155Z node 1 :NODE_BROKER DEBUG: TTxUpdateConfig Complete 2024-11-21T09:21:46.046198Z node 1 :NODE_BROKER TRACE: TTxUpdateConfig reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigResponse { Status { Code: OK } } 2024-11-21T09:21:46.046204Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.046293Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:202:2200], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.046306Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:46.046312Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.046321Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.046348Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:204:2202], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.046374Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:46.046377Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:46.046383Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:46.046951Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:46.046969Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:46.046986Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:205:2177], Recipient [1:171:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.046990Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.046993Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.046996Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.047008Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:46.047010Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:46.047084Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:46.047114Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:46.058049Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:46.058076Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:46.058097Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:46.058102Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:46.058162Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2024-11-21T09:21:46.058172Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.058322Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:217:2208], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.058361Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:46.058371Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:46.058381Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:46.058465Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:46.058478Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:46.058489Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:218:2177], Recipient [1:171:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.058493Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.058498Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.058501Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.058515Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:46.058519Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:46.058546Z node 1 :NODE_BROKER DEBUG: Adding node #1033 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:46.058577Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:46.069403Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:46.069441Z node 1 :NODE_BROKER DEBUG: Added node #1033 host2:1001 2024-11-21T09:21:46.069449Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:46.069453Z node 1 :NODE_BROKER DEBUG: Add node #1033 host2:1001 to epoch cache 2024-11-21T09:21:46.069509Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1033 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } 2024-11-21T09:21:46.069517Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.069637Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:223:2213], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.069671Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.6" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "6" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:2 ... DE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:171:2177], Recipient [1:171:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:46.322700Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:46.322706Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.322710Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.322727Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:46.322735Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T09:21:46.353264Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2024-11-21T09:21:46.353287Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.353294Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:46.364136Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:46.364161Z node 1 :NODE_BROKER DEBUG: Node #1024 host1:1001 has expired 2024-11-21T09:21:46.364173Z node 1 :NODE_BROKER DEBUG: Move to new epoch #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T09:21:46.364190Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T03:00:00.023000Z 2024-11-21T09:21:46.364196Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=2 expired=1 2024-11-21T09:21:46.364247Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T09:21:46.364256Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.364391Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:290:2270], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.364420Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:46.364427Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.364434Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T09:21:46.364476Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:292:2272], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.364496Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T09:21:46.364501Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:46.364517Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T09:21:46.364554Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:294:2274], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.364579Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:46.364583Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:46.364593Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:46.364688Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:46.364701Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:46.364713Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:295:2177], Recipient [1:171:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.364717Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.364721Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.364725Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.364738Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:46.364741Z node 1 :NODE_BROKER DEBUG: Registration request from host4:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:46.364746Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2024-11-21T09:21:46.364759Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:46.364764Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } 2024-11-21T09:21:46.364767Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.364899Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:164:2173], Recipient [1:171:2177]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T09:21:46.364925Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2024-11-21T09:21:46.364929Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2024-11-21T09:21:46.366832Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:46.367628Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:46.367697Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:46.368039Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.368050Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.368064Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:46.368105Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:46.368108Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.368111Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.368113Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.368178Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:46.368280Z node 1 :NODE_BROKER DEBUG: Loaded config: BannedNodeIds { From: 1024 To: 1029 } BannedNodeIds { From: 1031 To: 1032 } 2024-11-21T09:21:46.368292Z node 1 :NODE_BROKER DEBUG: Loaded current epoch: #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T09:21:46.368313Z node 1 :NODE_BROKER DEBUG: Added expired node #1024 host1:1001 2024-11-21T09:21:46.368328Z node 1 :NODE_BROKER DEBUG: Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2024-11-21T09:21:46.368338Z node 1 :NODE_BROKER DEBUG: Added node #1030 host3:1001 2024-11-21T09:21:46.368343Z node 1 :NODE_BROKER DEBUG: Loaded node #1030 host3:1001 expiring Thu, 01 Jan 1970 03:00:00 UTC 2024-11-21T09:21:46.368349Z node 1 :NODE_BROKER DEBUG: Added node #1033 host2:1001 2024-11-21T09:21:46.368354Z node 1 :NODE_BROKER DEBUG: Loaded node #1033 host2:1001 expiring Thu, 01 Jan 1970 03:00:00 UTC 2024-11-21T09:21:46.368364Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:46.368380Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T03:00:00.023000Z 2024-11-21T09:21:46.368385Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=2 expired=1 2024-11-21T09:21:46.368404Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.369384Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:338:2305], Recipient [1:304:2278]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.369434Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:304:2278]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:46.369441Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:46.369450Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:46.369544Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:46.369558Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:46.369571Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:339:2278], Recipient [1:304:2278]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.369575Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.369580Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.369582Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.369590Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:46.369593Z node 1 :NODE_BROKER DEBUG: Registration request from host4:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:46.369600Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2024-11-21T09:21:46.369608Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:46.369614Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } 2024-11-21T09:21:46.369617Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2024-11-21T09:21:46.348347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:46.348371Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:46.352945Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:46.353327Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:46.353398Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.353403Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.353410Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:46.353541Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:46.353940Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:46.353949Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.353953Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.353956Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.353968Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:46.354001Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:46.354016Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.354023Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.375243Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:46.375274Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2024-11-21T09:21:46.375279Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:46.375285Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.467001Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:207:2196], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.467298Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T09:21:46.467307Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.467317Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.467359Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:209:2198], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.467383Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:46.467387Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:46.467393Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:46.467861Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:46.467878Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:46.467896Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:210:2176], Recipient [1:168:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.467899Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.467902Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.467904Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.467916Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:46.467919Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:46.467985Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:46.468010Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:46.478730Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:46.478751Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:46.478756Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:46.478759Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:46.478805Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2024-11-21T09:21:46.478814Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.478922Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:223:2203], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.478935Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:221:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T09:21:46.478938Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:46.478950Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } } 2024-11-21T09:21:46.478988Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:223:2203], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:46.479030Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:228:2204], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.479040Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:226:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T09:21:46.479044Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:46.479052Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T09:21:46.479065Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:228:2204], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:46.479092Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:231:2205], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.479102Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:229:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T09:21:46.479105Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:46.479111Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T09:21:46.479120Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:231:2205], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:46.479137Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 1 } 2024-11-21T09:21:46.479140Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.479147Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.479176Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 2 } 2024-11-21T09:21:46.479178Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.479182Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.479203Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:233:2207], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.479211Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:160:2171], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:46.479213Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.479216Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.550274Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:168:2176], Recipient [1:168:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:46.550297Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:46.550303Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.550307Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.550322Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:46.550329Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T09:21:46.580757Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2024-11-21T09:21:46.580798Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.580804Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.580914Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:240:2210], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.580945Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:46.580966Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:46.580976Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:46.581067Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:46.581081Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:46.581092Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:241:2176], Recipient [1:168:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.581096Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.581101Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.591845Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:46.591866Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T09:21:46.591881Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.023000Z 2024-11-21T09:21:46.591887Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=1 expired=0 2024-11-21T09:21:46.591915Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T09:21:46.591924Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.591945Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.591996Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:46.592000Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:46.592023Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1001 to database resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:46.592060Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=4 2024-11-21T09:21:46.602790Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:46.602812Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1001 2024-11-21T09:21:46.602817Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 3 to 4 2024-11-21T09:21:46.602820Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2024-11-21T09:21:46.602865Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 10800023000 Name: "slot-1" } 2024-11-21T09:21:46.602871Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.602942Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2024-11-21T09:21:46.602946Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.602954Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T09:21:46.603034Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:250:2218], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.603046Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:160:2171], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:46.603048Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.603051Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T09:21:46.653826Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:168:2176], Recipient [1:168:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:46.653848Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:46.653852Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.653855Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.653870Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:46.653877Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T09:21:46.684298Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2024-11-21T09:21:46.684320Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.684325Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T09:21:46.694962Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:46.694983Z node 1 :NODE_BROKER DEBUG: Node #1024 host1:1001 has expired 2024-11-21T09:21:46.694991Z node 1 :NODE_BROKER DEBUG: Move to new epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T09:21:46.695002Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T03:00:00.023000Z 2024-11-21T09:21:46.695005Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=1 expired=1 2024-11-21T09:21:46.695026Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T09:21:46.695032Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2024-11-21T09:21:44.982332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:44.982350Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:44.991424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T09:21:44.995107Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2024-11-21T09:21:44.995983Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:44.996052Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.996059Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:44.996066Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:44.996288Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:44.997002Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:44.997031Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:44.997036Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:44.997039Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx FAKE_COORDINATOR: Erasing txId 101 2024-11-21T09:21:44.997167Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:44.997220Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:44.997237Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2024-11-21T09:21:44.997244Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2024-11-21T09:21:45.030919Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:45.030954Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2024-11-21T09:21:45.030959Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:45.030966Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.031026Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:584:2226], Recipient [1:537:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.031360Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:581:2224], Recipient [1:537:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:45.031371Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.031383Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2024-11-21T09:21:45.031467Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:597:2231], Recipient [1:537:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.031503Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2224], Recipient [1:537:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:45.031507Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.031518Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:45.032122Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.032147Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:45.032168Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:598:2187], Recipient [1:537:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.032173Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.032178Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.032182Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.032196Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.032199Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:45.032292Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:45.032331Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:45.043094Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.043113Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:19001 2024-11-21T09:21:45.043122Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:45.043127Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:19001 to epoch cache 2024-11-21T09:21:45.043170Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200026000 Name: "slot-0" } 2024-11-21T09:21:45.043176Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.043297Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:610:2237], Recipient [1:537:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.043328Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2224], Recipient [1:537:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:45.043334Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.043344Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:45.043431Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.043443Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:45.043457Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:611:2187], Recipient [1:537:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.043461Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.043466Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.043469Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.043478Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.043482Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:45.043504Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:19001 to database resolvehost=host2 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:45.043530Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:45.054311Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.054334Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:19001 2024-11-21T09:21:45.054342Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:45.054347Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:19001 to epoch cache 2024-11-21T09:21:45.054394Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200026000 Name: "slot-1" } 2024-11-21T09:21:45.054406Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.054511Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:616:2242], Recipient [1:537:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.054542Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2224], Recipient [1:537:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:45.054547Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.054575Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:45.054652Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: ( ... processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.630513Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:45.630517Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:656:2256] 2024-11-21T09:21:45.630519Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.630521Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:45.630525Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:657:2257] 2024-11-21T09:21:45.630526Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.630528Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:45.630534Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:658:2258] 2024-11-21T09:21:45.630537Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.630540Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:45.630545Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:659:2259] 2024-11-21T09:21:45.630548Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.630551Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:45.630557Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:660:2260] 2024-11-21T09:21:45.630560Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.630564Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T09:21:45.641325Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:45.641347Z node 1 :NODE_BROKER DEBUG: Remove node #1024 host1:19001 2024-11-21T09:21:45.641352Z node 1 :NODE_BROKER DEBUG: Remove node #1025 host2:19001 2024-11-21T09:21:45.641357Z node 1 :NODE_BROKER DEBUG: Move to new epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641372Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.026000Z 2024-11-21T09:21:45.641376Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=2 expired=0 2024-11-21T09:21:45.641400Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641405Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641411Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641415Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641421Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641428Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641434Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641440Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641448Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.641684Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:716:2296], Recipient [1:537:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.641707Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:581:2224], Recipient [1:537:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:45.641711Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:45.641715Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2024-11-21T09:21:45.641754Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:718:2298], Recipient [1:537:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.641775Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2224], Recipient [1:537:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:45.641778Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.641783Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:45.641853Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.641861Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:45.641872Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:719:2187], Recipient [1:537:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.641874Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.641877Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.641880Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.641889Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.641891Z node 1 :NODE_BROKER DEBUG: Registration request from host5:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:45.641907Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host5:19001 to database resolvehost=host5 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:45.641935Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=9 2024-11-21T09:21:45.652663Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.652681Z node 1 :NODE_BROKER DEBUG: Added node #1024 host5:19001 2024-11-21T09:21:45.652686Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 8 to 9 2024-11-21T09:21:45.652690Z node 1 :NODE_BROKER DEBUG: Add node #1024 host5:19001 to epoch cache 2024-11-21T09:21:45.652731Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 18000026000 Name: "slot-0" } 2024-11-21T09:21:45.652737Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:45.652848Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:724:2303], Recipient [1:537:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:45.652864Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:581:2224], Recipient [1:537:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T09:21:45.652869Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:45.652875Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T09:21:45.652952Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:45.652960Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T09:21:45.652969Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:725:2187], Recipient [1:537:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.652975Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:45.652977Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:45.652979Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:45.652989Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:45.652991Z node 1 :NODE_BROKER DEBUG: Registration request from host6:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T09:21:45.653009Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host6:19001 to database resolvehost=host6 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:45.653033Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=10 2024-11-21T09:21:45.663835Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:45.663857Z node 1 :NODE_BROKER DEBUG: Added node #1025 host6:19001 2024-11-21T09:21:45.663864Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 9 to 10 2024-11-21T09:21:45.663869Z node 1 :NODE_BROKER DEBUG: Add node #1025 host6:19001 to epoch cache 2024-11-21T09:21:45.663921Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 18000026000 Name: "slot-1" } 2024-11-21T09:21:45.663928Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx >> TLocalTests::TestAddTenantWhileResolving >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> KqpJoinOrder::TPCDS34+StreamLookupJoin-ColumnStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] Test command err: 2024-11-21T09:21:47.368728Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T09:21:47.393761Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T09:21:47.402781Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:47.403865Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:47.404166Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:47.404277Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:21:47.404541Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:47.404547Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:21:47.404567Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:21:47.406174Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:21:47.406215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:21:47.406232Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:21:47.406250Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:47.406261Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:21:47.406273Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:47.428715Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:21:47.428760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:47.439431Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:21:47.439463Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:47.439474Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:21:47.439481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:47.439497Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:21:47.439503Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:47.439507Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:21:47.439512Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:47.450252Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:21:47.450310Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T09:21:47.450482Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T09:21:47.450489Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T09:21:47.452090Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T09:21:47.452295Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T09:21:47.452543Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/0048d4/r3tmp/tmpOwnc8n/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T09:21:47.452623Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/0048d4/r3tmp/tmpOwnc8n/pdisk_1.dat 2024-11-21T09:21:47.452801Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:21:47.452824Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:47.452839Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T09:21:47.452871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:21:47.452915Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:21:47.453241Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T09:21:47.453271Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:21:47.464073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:21:47.464243Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:21:47.464297Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:21:47.464303Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:21:47.464327Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:21:47.464341Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:21:47.464411Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:47.464417Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:47.464421Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:47.464441Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:312:2281] 2024-11-21T09:21:47.464735Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T09:21:47.464741Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:307:2278] 2024-11-21T09:21:47.464837Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:312:2281] 2024-11-21T09:21:47.464850Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:47.464856Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:47.464859Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:47.481194Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T09:21:47.481215Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T09:21:47.481257Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2024-11-21T09:21:47.486536Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T09:21:47.486581Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:47.486690Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:47.486696Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:47.486710Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:396:2337] 2024-11-21T09:21:47.486754Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2024-11-21T09:21:47.486762Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:21:47.486810Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:21:47.486814Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:21:47.486821Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:402:2339] 2024-11-21T09:21:47.486992Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:396:2337] 2024-11-21T09:21:47.487017Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:47.487023Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:47.487026Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:21:47.487038Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:402:2339] 2024-11-21T09:21:47.487056Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:21:47.487059Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:21:47.487061Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk >> KqpScan::CustomWindow >> KqpScan::Offset >> KqpScan::SingleKey >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending >> KqpSplit::AfterResultMultiRange+Ascending >> KqpScan::TopSort >> KqpScan::TwoAggregatesOneFullFrameWindow >> KqpSplit::IntersectionLosesRange+Ascending >> KqpScan::EarlyFinish >> KqpScan::RightJoinSimple >> KqpScan::MultipleResults >> TPersQueueTest::TestReadRuleServiceTypePassword [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId >> KqpScan::UnionBasic >> KqpScan::UnionAggregate >> KqpScan::TaggedScalar >> KqpScan::StreamLookupByPkPrefix >> KqpScan::UnionWithPureExpr >> KqpScan::IsNullPartial >> KqpSplit::BorderKeys+Ascending >> KqpScan::Grep ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 14448, MsgBus: 3371 2024-11-21T09:21:24.715232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660248504794454:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:24.715289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043f2/r3tmp/tmpgQSbgY/pdisk_1.dat 2024-11-21T09:21:24.768656Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14448, node 1 2024-11-21T09:21:24.780294Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:24.780305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:24.780307Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:24.780338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3371 2024-11-21T09:21:24.815745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:24.815784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:24.816927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:24.844733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.846914Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:24.849762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.874353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.890663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:24.900818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:25.026561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252799763150:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.026609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.056602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.065017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.072753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.078763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.086082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.093406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.108544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252799763663:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.108560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.108626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660252799763668:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:25.109330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:25.113645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660252799763670:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:25.370220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.375805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.387344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.393780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.401128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.418979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.424923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.436344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.443092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.450240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.457355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.464222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.471417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.526672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:25.533450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.541654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.555486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.562284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.569011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.576009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:25.583189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:46.088936Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:46.092199Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660345645886738:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:46.383708Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.389470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.401350Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.415137Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.422380Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.437381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.443383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.450273Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.505394Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.560443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.569316Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.576722Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.582869Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.633753Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:46.640732Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.653597Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.660250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.666810Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.674207Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.681302Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.687849Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.695049Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.702798Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.757826Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.765378Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.772067Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.779544Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.786139Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.792767Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.800073Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.807135Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.813768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.821203Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.828359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.835065Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.842176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.849251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.870323Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:46.875435Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.883974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.890998Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.897714Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.905024Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.912552Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.918879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.925986Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.941657Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.947811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.954263Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:46.961831Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 >> KqpScan::UnionThree >> SystemView::GroupsFields [GOOD] >> SystemView::DescribeSystemFolder >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] >> OlapEstimationRowsCorrectness::TPCH9 >> KqpJoinOrder::TPCDS88-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS90+StreamLookupJoin-ColumnStore >> KqpScan::NullInKey >> KqpScan::TaggedScalar [GOOD] >> KqpScan::TooManyComputeActors >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore [GOOD] >> KqpScan::MultipleResults [GOOD] >> KqpScan::MiltiExprWithPure >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::SystemViewFailOps >> KqpScan::UnionBasic [GOOD] >> KqpScan::UnionMixed >> KqpScan::UnionWithPureExpr [GOOD] >> KqpScan::YqlTableSample >> KqpScan::Grep [GOOD] >> KqpScan::GrepByString >> KqpScan::SingleKey [GOOD] >> KqpScan::SimpleWindow >> KqpScan::TopSort [GOOD] >> TNodeBrokerTest::TestListNodes [GOOD] >> KqpScan::TopSortOverSecondaryIndexRead >> KqpSplit::IntersectionLosesRange+Ascending [GOOD] >> KqpSplit::IntersectionLosesRange+Descending >> KqpScan::RightJoinSimple [GOOD] >> KqpScan::RightOnlyJoinSimple >> KqpScan::CustomWindow [GOOD] >> KqpScan::CrossJoinOneColumn >> KqpScan::UnionThree [GOOD] >> KqpScan::UnionSameTable >> KqpScan::Offset [GOOD] >> KqpScan::Order >> KqpScan::StreamLookupByPkPrefix [GOOD] >> KqpScan::StreamLookupTryGetDataBeforeSchemeInitialization >> KqpScan::IsNullPartial [GOOD] >> KqpScan::Join ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 62565, MsgBus: 19708 2024-11-21T09:21:19.186911Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660226470732318:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:19.186926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00441c/r3tmp/tmplLmDkK/pdisk_1.dat 2024-11-21T09:21:19.237820Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62565, node 1 2024-11-21T09:21:19.249357Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:19.249369Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:19.249371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:19.249401Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19708 TClient is connected to server localhost:19708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:19.287599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:19.287621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:19.288707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:19.319728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.332749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.348083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.364039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.375286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:19.457338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660226470733852:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.457375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.484880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.491630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.500514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.507099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.513860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.521123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.529802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660226470734352:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.529831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660226470734357:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.529831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:19.530406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:19.534181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660226470734359:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:19.734504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.744519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.756674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.769386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.787130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.797748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.810908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.826173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.839540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.853443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.867597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.881164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.895810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.910060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.923460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.937561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.951515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.960290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.972061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:19.986411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.000918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:20.014 ... 72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:47.706518Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:47.706528Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:47.706544Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:47.706553Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:47.706564Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:47.706574Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:47.706587Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:47.706596Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:47.706606Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:47.706614Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:47.706715Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:47.706728Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:47.706737Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:47.706741Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:47.706756Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:47.706759Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:47.706767Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:47.706777Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:47.706784Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:47.706793Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:47.706799Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:47.706808Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:47.706837Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:47.706848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:47.706864Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:47.706873Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:47.706884Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:47.706894Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:47.706909Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:47.706917Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:47.706928Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:47.706936Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:47.707418Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:47.707432Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:47.707441Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:47.707451Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:47.707466Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:47.707475Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:47.707483Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:47.707492Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:47.707500Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:47.707509Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:47.707515Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:47.707525Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:47.707556Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:47.707566Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:47.707581Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:47.707591Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:47.707606Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:47.707615Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:47.707630Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:47.707638Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:47.707648Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:47.707657Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpSplit::AfterResultMultiRange+Unspecified >> KqpScan::UnionAggregate [GOOD] >> KqpScan::UdfFailure >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2024-11-21T09:21:46.274310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:46.274333Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:46.279191Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:46.279664Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:46.279753Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.279760Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.279768Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:46.279944Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:46.280906Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:46.280918Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.280922Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.280926Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.280941Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:46.280988Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:46.281003Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:46.281010Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:46.313102Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:46.313142Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T09:21:46.313149Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:46.313158Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.323447Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:579:2205], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.323691Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:532:2178], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:46.323699Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.323709Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:46.323765Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:581:2207], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.323794Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:532:2178], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:46.323799Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:46.323810Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:46.324481Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:46.324502Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:46.324520Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:582:2184], Recipient [1:543:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.324525Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:46.324529Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.324533Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.324546Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:46.324550Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:46.324625Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:46.324653Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:46.335523Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:46.335551Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:46.335558Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:46.335563Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T09:21:46.335617Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T09:21:46.335624Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.335797Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:536:2180], Recipient [1:543:2184]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T09:21:46.335822Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2024-11-21T09:21:46.335827Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2024-11-21T09:21:46.337353Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:46.338419Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:46.338506Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.338511Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.338519Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:46.338550Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:46.338553Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.338555Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.338557Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.338612Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:46.338630Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:46.338662Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:46.338671Z node 1 :NODE_BROKER DEBUG: Loaded current epoch: #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:46.338691Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T09:21:46.338704Z node 1 :NODE_BROKER DEBUG: Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2024-11-21T09:21:46.338714Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:46.338728Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T09:21:46.338733Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=1 expired=0 2024-11-21T09:21:46.338746Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.339575Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:634:2241], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.339600Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:532:2178], Recipient [1:600:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:46.339617Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339624Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:46.442592Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:657:2242], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.442660Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:600:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T09:21:46.442666Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.442677Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:46.442976Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:660:2243], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.443001Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:661:2244], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.443026Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:662:2245], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.443057Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:663:2246], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.443083Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:664:2247], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.443109Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:665:2248], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.443121Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:660:2243] 2024-11-21T09:21:46.443124Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.443128Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T09:21:46.443144Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:666:2249], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.443155Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:661:2244] 2024-11-21T09:21:46.443157Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.443161Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-0 ... poch #6.7 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T09:21:47.230676Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:753:2296], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:47.230683Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:532:2178], Recipient [1:600:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:47.230685Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.230688Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.7 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T09:21:47.353797Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:600:2214], Recipient [1:600:2214]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:47.353829Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:47.353842Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:47.353850Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:47.353874Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:47.353886Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.384549Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:600:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 7 } 2024-11-21T09:21:47.384572Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.384577Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.384651Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:660:2243] 2024-11-21T09:21:47.384654Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.384657Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.384661Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:661:2244] 2024-11-21T09:21:47.384663Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.384665Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.384669Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:662:2245] 2024-11-21T09:21:47.384671Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.384675Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.384680Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:663:2246] 2024-11-21T09:21:47.384683Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.384686Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.384692Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:664:2247] 2024-11-21T09:21:47.384695Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.384698Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.384702Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:665:2248] 2024-11-21T09:21:47.384705Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.384709Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.384714Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:666:2249] 2024-11-21T09:21:47.384716Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.384718Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.395415Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:47.395435Z node 1 :NODE_BROKER DEBUG: Move to new epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395448Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T07:00:00.025000Z 2024-11-21T09:21:47.395452Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #7 nodes=0 expired=0 2024-11-21T09:21:47.395461Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395469Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395476Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395482Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395488Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395494Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395500Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395507Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395514Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:47.395727Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:770:2302], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:47.395750Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:532:2178], Recipient [1:600:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:47.395755Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.395760Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395803Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:772:2304], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:47.395814Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:532:2178], Recipient [1:600:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:47.395816Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.395819Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2024-11-21T09:21:47.395845Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:774:2306], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:47.395860Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:532:2178], Recipient [1:600:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T09:21:47.395863Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T09:21:47.395877Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T09:21:47.395905Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:776:2308], Recipient [1:600:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:47.395923Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:532:2178], Recipient [1:600:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:47.395925Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:47.395932Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:47.396005Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:47.396015Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:47.396023Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:777:2214], Recipient [1:600:2214]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:47.396026Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:47.396029Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:47.396031Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:47.396040Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:47.396043Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T09:21:47.396062Z node 1 :NODE_BROKER DEBUG: Adding node #1026 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 08:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:47.396088Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=9 2024-11-21T09:21:47.406812Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:47.406834Z node 1 :NODE_BROKER DEBUG: Added node #1026 host2:1001 2024-11-21T09:21:47.406842Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 8 to 9 2024-11-21T09:21:47.406847Z node 1 :NODE_BROKER DEBUG: Add node #1026 host2:1001 to epoch cache 2024-11-21T09:21:47.406917Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1026 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 28800025000 Name: "slot-0" } 2024-11-21T09:21:47.406926Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx >> KqpSplit::AfterResultMultiRange+Ascending [GOOD] >> KqpSplit::AfterResultMultiRange+Descending >> KqpSplit::BorderKeys+Ascending [GOOD] >> KqpSplit::BorderKeys+Descending ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2024-11-21T09:21:46.114679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:46.114703Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:46.119178Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:46.119638Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:46.119734Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.119741Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.119751Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:46.119896Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:46.120574Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:46.120586Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.120591Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.120594Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.120649Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:46.120690Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:46.120707Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.120714Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.152952Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:46.152993Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T09:21:46.152999Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:46.153010Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:46.163379Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:579:2205], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.163724Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:532:2178], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:46.163734Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.163749Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.256645Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:604:2206], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.256710Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T09:21:46.256717Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.256731Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.257166Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:605:2207], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.257183Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:606:2208], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.257264Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:607:2209], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.257302Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:608:2210], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.257326Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:605:2207] 2024-11-21T09:21:46.257331Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.257339Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.257411Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:606:2208] 2024-11-21T09:21:46.257415Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.257421Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.257431Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:607:2209] 2024-11-21T09:21:46.257435Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.257441Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.257459Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:609:2211], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.257501Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:610:2212], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.257515Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:611:2213], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:46.257536Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:608:2210] 2024-11-21T09:21:46.257540Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.257546Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.257565Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:609:2211] 2024-11-21T09:21:46.257568Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.257574Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.257580Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:610:2212] 2024-11-21T09:21:46.257587Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.257592Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.257609Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:611:2213] 2024-11-21T09:21:46.257614Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.257619Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.308694Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:543:2184], Recipient [1:543:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:46.308720Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:46.308727Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:46.308732Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:46.308751Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:46.308761Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #2.2 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:46.339402Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2024-11-21T09:21:46.339421Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339428Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.339531Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:605:2207] 2024-11-21T09:21:46.339536Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339541Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.339555Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:608:2210] 2024-11-21T09:21:46.339559Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339563Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.339569Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:609:2211] 2024-11-21T09:21:46.339572Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339576Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.339582Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:610:2212] 2024-11-21T09:21:46.339585Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339589Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.339595Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:611:2213] 2024-11-21T09:21:46.339598Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339602Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.339608Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:606:2208] 2024-11-21T09:21:46.339611Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339615Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.339621Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:607:2209] 2024-11-21T09:21:46.339625Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:46.339628Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T09:21:46.350268Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:46.350291Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.2 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:46.350306Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:46.350312Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=0 expired=0 2024-11-21T09:21:46.350325Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:46.350352Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T09:21:46.350361Z ... ROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:47.517980Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.548727Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 7 } 2024-11-21T09:21:47.548758Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.548765Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.548867Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:605:2207] 2024-11-21T09:21:47.548872Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.548887Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.548917Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:606:2208] 2024-11-21T09:21:47.548920Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.548924Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.548929Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:607:2209] 2024-11-21T09:21:47.548932Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.548936Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.548941Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:608:2210] 2024-11-21T09:21:47.548944Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.548948Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.548953Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:609:2211] 2024-11-21T09:21:47.548956Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.548959Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.548964Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:610:2212] 2024-11-21T09:21:47.548967Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.548970Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.548975Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:611:2213] 2024-11-21T09:21:47.548978Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.548981Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T09:21:47.559893Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:47.559917Z node 1 :NODE_BROKER DEBUG: Move to new epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559938Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T07:00:00.024000Z 2024-11-21T09:21:47.559942Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #7 nodes=0 expired=0 2024-11-21T09:21:47.559953Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559959Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559965Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559969Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559973Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559979Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559982Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559987Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.559992Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:47.560161Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:801:2296], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:47.560184Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:532:2178], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:47.560189Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.560193Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.560236Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:803:2298], Recipient [1:543:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:47.560246Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:532:2178], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:47.560248Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.560251Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.714116Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:543:2184], Recipient [1:543:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:47.714136Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T09:21:47.714140Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:47.714143Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:47.714157Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T09:21:47.714164Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.744671Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:543:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 8 } 2024-11-21T09:21:47.744688Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.744694Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T09:21:47.744782Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:605:2207] 2024-11-21T09:21:47.744787Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.744792Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T09:21:47.744814Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:606:2208] 2024-11-21T09:21:47.744816Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.744818Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T09:21:47.744822Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:607:2209] 2024-11-21T09:21:47.744824Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.744826Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T09:21:47.744829Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:608:2210] 2024-11-21T09:21:47.744831Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.744833Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T09:21:47.744837Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:609:2211] 2024-11-21T09:21:47.744838Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.744840Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T09:21:47.744843Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:610:2212] 2024-11-21T09:21:47.744845Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.744847Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T09:21:47.744850Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:611:2213] 2024-11-21T09:21:47.744852Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:47.744854Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T09:21:47.755570Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T09:21:47.755592Z node 1 :NODE_BROKER DEBUG: Move to new epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755604Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T08:00:00.024000Z 2024-11-21T09:21:47.755607Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #8 nodes=0 expired=0 2024-11-21T09:21:47.755617Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755622Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755626Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755632Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755636Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755644Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755648Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755653Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755656Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z - 1970-01-01T09:00:00.024000Z 2024-11-21T09:21:47.755661Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3987, MsgBus: 2526 2024-11-21T09:21:33.529815Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660286568530494:2198];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00439d/r3tmp/tmpRUDJew/pdisk_1.dat 2024-11-21T09:21:33.570823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:33.590200Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3987, node 1 2024-11-21T09:21:33.608360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:33.608374Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:33.608376Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:33.608406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2526 TClient is connected to server localhost:2526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:33.664284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:33.664309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:33.664607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.665383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:21:33.671308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.687002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.703858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.714333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.895967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660286568531891:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:33.896015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:33.902231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.958274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.968964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.977883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.991812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.007060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.029179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660290863499700:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.029200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.029230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660290863499705:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.029873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:34.031629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660290863499707:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:34.220735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.230966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.242774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.257371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.270870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.303356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.309770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.319351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.325936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.333114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.351341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.361764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.375945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.437714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:34.444800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.452587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.467326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.473397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.480106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.487257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.494342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.501070Z ... 72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:47.829868Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:47.829876Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:47.829890Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:47.829898Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:47.829908Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:47.829916Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:47.829930Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:47.829938Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:47.829947Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:47.829954Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:47.830886Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:47.830893Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:47.830899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:47.830904Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:47.830908Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:47.830913Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:47.830914Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:47.830918Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:47.830925Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:47.830928Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:47.830933Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:47.830935Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:47.830937Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:47.830940Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:47.830947Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:47.830948Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:47.830951Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:47.830953Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:47.830958Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:47.830959Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:47.830963Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:47.830963Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:47.830969Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:47.830974Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:47.830991Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:47.830996Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:47.831003Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:47.831007Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:47.831019Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:47.831023Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:47.831028Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:47.831028Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:47.831039Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:47.831040Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:47.831044Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:47.831050Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:47.831060Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:47.831064Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:47.831068Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:47.831069Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:47.831079Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:47.831079Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:47.831082Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:47.831088Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpScan::NullInKey [GOOD] >> KqpScan::NullInKeySuffix >> KqpScan::MiltiExprWithPure [GOOD] >> KqpScan::LongStringCombiner >> KqpScan::UnionMixed [GOOD] >> KqpScan::Order [GOOD] >> KqpScan::TooManyComputeActors [GOOD] >> KqpScan::YqlTableSample [GOOD] >> KqpScan::GrepByString [GOOD] >> KqpScan::GrepLimit >> KqpScan::SimpleWindow [GOOD] >> KqpScan::Join [GOOD] >> KqpScan::Join2 >> KqpSplit::IntersectionLosesRange+Descending [GOOD] >> KqpScan::RightOnlyJoinSimple [GOOD] >> SystemView::DescribeSystemFolder [GOOD] >> KqpScan::UnionSameTable [GOOD] >> KqpScan::TopSortOverSecondaryIndexRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::TooManyComputeActors [GOOD] Test command err: Trying to start YDB, gRPC: 4196, MsgBus: 32568 2024-11-21T09:21:48.152367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660352661871372:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.203620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048bf/r3tmp/tmpUEtI8a/pdisk_1.dat 2024-11-21T09:21:48.233400Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4196, node 1 2024-11-21T09:21:48.279243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.279257Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.279258Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.279287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:48.311196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.311230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.312419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32568 TClient is connected to server localhost:32568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.378071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.382810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.404388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.425990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.439628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.508064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352661872772:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.508091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.655321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.677467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.691722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352661873288:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.691766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.691791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352661873293:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.692304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.696390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660352661873295:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:48.904446Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660352661873599:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd70e6v0cnz5chnxwh0rphzp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIyOWM1YjItMmY5NjJlMTYtODlhMDEzOGMtMWJjYmRkOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:48.912405Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908903, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 14605, MsgBus: 3942 2024-11-21T09:21:49.015401Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660356544327656:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.015602Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048bf/r3tmp/tmpC1NCgr/pdisk_1.dat 2024-11-21T09:21:49.026052Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14605, node 2 2024-11-21T09:21:49.032688Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.032704Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.032707Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.032752Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3942 TClient is connected to server localhost:3942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.115528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.115557Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.116642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.117757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.128960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.137378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.154342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.164955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.358937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356544329212:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.358964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.364413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.371019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.384014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.398956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.412041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.426257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.444317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356544329705:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.444341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.444498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356544329710:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.445387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.452587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660356544329712:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.632029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.746872Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmU5NzRmZGMtOWYzZWQ2YTMtYjUwYzNjZjAtYWI4MTgzNmY=, ActorId: [2:7439660356544330225:2479], ActorState: ExecuteState, TraceId: 01jd70e7jw6qw9nppvtjzq8w6y, Create QueryResponse for error on request, msg: 2024-11-21T09:21:49.746992Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909788, txId: 281474976715673] shutting down
: Warning: Type annotation, code: 1030
:7:13: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:8:18: Warning: At function: AssumeColumnOrderPartial, At function: Aggregate, At function: Filter, At function: Coalesce
:9:67: Warning: At function: And
:9:39: Warning: At function: <
:9:46: Warning: At function: -
:9:46: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
: Error: Requested too many execution units: 14, code: 2029 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionMixed [GOOD] Test command err: Trying to start YDB, gRPC: 14168, MsgBus: 27054 2024-11-21T09:21:48.189515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660352670260750:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.189549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004896/r3tmp/tmpIvpfyt/pdisk_1.dat 2024-11-21T09:21:48.251954Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14168, node 1 2024-11-21T09:21:48.276140Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.276156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.276158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.276196Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:48.290700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.290736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.291822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27054 TClient is connected to server localhost:27054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.384671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.393025Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.395608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.421293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.482624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.496530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.526430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352670262305:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.526456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.655319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.677505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.691814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352670262819:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.691836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.691855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352670262824:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.692369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.696326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660352670262826:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:48.923811Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660352670263161:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd70e6tr3asnzp0v21jv0gb9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRkMzA4M2YtOGI0NTY2MGQtOWQxZjBhMDEtNWFiMjUyMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:48.935446Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908969, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 23471, MsgBus: 19170 2024-11-21T09:21:49.079051Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660356977619999:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004896/r3tmp/tmp1Sr7tr/pdisk_1.dat 2024-11-21T09:21:49.083653Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 23471, node 2 2024-11-21T09:21:49.094232Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:49.095264Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.095275Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.095277Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.095310Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19170 TClient is connected to server localhost:19170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.179023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.179056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.180189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.180839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.184437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.195048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.218938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.229282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.412289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356977621397:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.412397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.415290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.422678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.432188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.486845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.495202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.510092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.530005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356977621904:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.530023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.530062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356977621909:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.530787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.536489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660356977621911:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.718781Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909760, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Order [GOOD] Test command err: Trying to start YDB, gRPC: 4919, MsgBus: 7126 2024-11-21T09:21:48.136850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660354615796054:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.136873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a2/r3tmp/tmp4FRsm9/pdisk_1.dat 2024-11-21T09:21:48.234807Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:48.239431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.239455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.241511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4919, node 1 2024-11-21T09:21:48.276226Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.276242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.276243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.276276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7126 TClient is connected to server localhost:7126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.372341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.391024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.441230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.465604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.482144Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.483555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.507738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660354615797379:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.507762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.637834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.656006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.680517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660354615797892:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.680547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660354615797897:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.680547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.681108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.690579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660354615797899:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:48.923080Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908962, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 22116, MsgBus: 64243 2024-11-21T09:21:49.231979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660358737308935:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.232018Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a2/r3tmp/tmpHei2Iq/pdisk_1.dat 2024-11-21T09:21:49.242935Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22116, node 2 2024-11-21T09:21:49.251785Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.251797Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.251799Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.251835Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64243 TClient is connected to server localhost:64243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.332146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.332175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.333315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.333958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.339387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.350550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.377743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.390290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.533075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660358737310463:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.533106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.538338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.545826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.558867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.572551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.578955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.586395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.594700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660358737310975:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.594721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.595043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660358737310980:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.595784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.599211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660358737310982:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.795547Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909794, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::YqlTableSample [GOOD] Test command err: Trying to start YDB, gRPC: 8794, MsgBus: 26812 2024-11-21T09:21:48.187063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660351859048117:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.187141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b1/r3tmp/tmpS7iZc5/pdisk_1.dat 2024-11-21T09:21:48.276969Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8794, node 1 2024-11-21T09:21:48.303127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.303136Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.303138Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.303175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26812 2024-11-21T09:21:48.352517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.352545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.353516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.371960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.380101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.388138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.406540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.465944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.521485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.565400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351859049524:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.565425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.642067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.654977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.676384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.684575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351859050023:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.684597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.684605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351859050028:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.685146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660351859050030:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:48.898612Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660351859050344:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd70e6trdm2gf9av9tjs5pq2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTU4MTk1MmMtZDEwMzRjNTMtNzY4ZDgxNjktMTU5ZGNhYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:48.912380Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908898, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 19127, MsgBus: 30120 2024-11-21T09:21:49.051668Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660355950523542:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b1/r3tmp/tmpagZsl4/pdisk_1.dat 2024-11-21T09:21:49.057945Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:49.060620Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19127, node 2 2024-11-21T09:21:49.070420Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.070436Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.070438Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.070472Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30120 TClient is connected to server localhost:30120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.153558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.153588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.153948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.154532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.156652Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:49.168337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.177952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.197287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.208184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.373042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355950524943:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.373079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.379304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.391379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.406265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.420232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.433363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.446669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.454983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355950525448:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.455009Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.455010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355950525453:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.455709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.459667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660355950525455:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.638779Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439660355950525746:2458], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2024-11-21T09:21:49.638874Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODhiN2ZhYTktMjk5NTE5YjUtZTgwNjFjMDItZmRjYWMzYTY=, ActorId: [2:7439660355950525739:2454], ActorState: ExecuteState, TraceId: 01jd70e7j3843pjm3qcbz10qzh, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::IntersectionLosesRange+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 23649, MsgBus: 25051 2024-11-21T09:21:48.137105Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660352352550772:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.137153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048d1/r3tmp/tmpwjcZxx/pdisk_1.dat 2024-11-21T09:21:48.238011Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:48.238435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.238451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.240072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23649, node 1 2024-11-21T09:21:48.276298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.276310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.276312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.276354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25051 TClient is connected to server localhost:25051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.371994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.380625Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.392453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.462656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.478428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.487387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.518615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352352552091:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.518642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.655094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.679620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352352552604:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352352552609:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.681023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660352352552611:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:48.952093Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660352352552928:2453] TxId: 281474976710672. Ctx: { TraceId: 01jd70e6v30py330zsyr7eyzn3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFlNjU3YWQtMjllOWI5ZmQtYjJmYzkwNDItOTQ4M2VjNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:48.952168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd70e6v30py330zsyr7eyzn3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFlNjU3YWQtMjllOWI5ZmQtYjJmYzkwNDItOTQ4M2VjNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:48.962804Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908997, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 5805, MsgBus: 26158 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048d1/r3tmp/tmpayN2vp/pdisk_1.dat 2024-11-21T09:21:49.240599Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:21:49.241417Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5805, node 2 2024-11-21T09:21:49.257260Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.257275Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.257277Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.257315Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26158 TClient is connected to server localhost:26158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.331634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.331656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.332367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.332844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:21:49.336943Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:49.342672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.360073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.381756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:49.393869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.556095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356670015383:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.556147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.560391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.566702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.579225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.586550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.593412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.601520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.616375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356670015887:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.616402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.616478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356670015892:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.617237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.620164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660356670015894:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.848866Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70e7qjetfh74pck9d1hq2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODEzOTk1MGMtMWUzNWZhYzYtNWI2ODEwZWYtYzc4MGI3NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:49.859972Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909893, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SimpleWindow [GOOD] Test command err: Trying to start YDB, gRPC: 13707, MsgBus: 13089 2024-11-21T09:21:48.136827Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660354720307813:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.136865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b6/r3tmp/tmpgi4rTp/pdisk_1.dat 2024-11-21T09:21:48.214987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13707, node 1 2024-11-21T09:21:48.240354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.240378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.242283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.279486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.279499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.279500Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.279529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13089 TClient is connected to server localhost:13089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.408863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.412544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.426638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:48.491468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.503267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.510374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.531174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660354720309139:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.531198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.654728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.676067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.684605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660354720309642:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.684632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660354720309647:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.684635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.685115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660354720309649:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:48.916608Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908955, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 7784, MsgBus: 9622 2024-11-21T09:21:49.219529Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660356281852072:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.219712Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b6/r3tmp/tmpXbWE09/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7784, node 2 2024-11-21T09:21:49.243009Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:49.243834Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.243852Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.243854Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.243877Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9622 TClient is connected to server localhost:9622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.319869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.319896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.320967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.321635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.322497Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:49.331062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.345591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.364968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.374708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.530758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356281853618:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.530782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.535994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.541830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.551084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.558089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.613414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.621238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.629508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356281854123:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.629547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.629582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356281854128:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.630185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.634135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660356281854130:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.868183Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909907, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::RightOnlyJoinSimple [GOOD] Test command err: Trying to start YDB, gRPC: 9237, MsgBus: 10495 2024-11-21T09:21:48.136775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660351662023153:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.136828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048d3/r3tmp/tmp80PY8L/pdisk_1.dat 2024-11-21T09:21:48.230821Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9237, node 1 2024-11-21T09:21:48.238580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.238604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.239659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.277501Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.277515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.277516Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.277543Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10495 TClient is connected to server localhost:10495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.382119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.389236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.412412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.432316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.492925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.514884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351662024547:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.514912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.638754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.654820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.679551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351662025060:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351662025065:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.681022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.690198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660351662025067:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:48.894138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.966002Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660351662025628:2479] TxId: 281474976715674. Ctx: { TraceId: 01jd70e6vw6awdm8d6npawk1c9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJhZjZmNGYtZTAxN2RjZjgtOWM4YjgxZDEtNGZlZDU3NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:48.972393Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909011, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 19237, MsgBus: 4205 2024-11-21T09:21:49.217335Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660355716277212:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048d3/r3tmp/tmptTQruB/pdisk_1.dat 2024-11-21T09:21:49.221677Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:49.229240Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19237, node 2 2024-11-21T09:21:49.235797Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.235811Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.235812Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.235838Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4205 TClient is connected to server localhost:4205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.273127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.281975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.318890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.318917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.320080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.337898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.357628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.415263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.519744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355716278593:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.519766Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.525390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.532679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.544337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.551349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.558121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.565607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.581248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355716279095:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.581273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355716279100:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.581276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.581939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.585609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660355716279102:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:49.768326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.848977Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909893, txId: 281474976710673] shutting down >> KqpScan::AggregateNoColumn >> KqpSplit::AfterResultMultiRange+Unspecified [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending >> KqpScan::StreamLookupTryGetDataBeforeSchemeInitialization [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified [GOOD] >> KqpScan::TwoAggregatesOneFullFrameWindow [GOOD] >> KqpScan::TwoAggregatesTwoWindows >> KqpScan::CrossJoinOneColumn [GOOD] >> KqpScan::CrossJoinCount >> SystemView::SystemViewFailOps [GOOD] >> KqpScan::PrunePartitionsByLiteral >> KqpScan::NullInKeySuffix [GOOD] >> KqpScan::NoTruncate >> KqpScan::GrepLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionSameTable [GOOD] Test command err: Trying to start YDB, gRPC: 21668, MsgBus: 15973 2024-11-21T09:21:48.436047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660351066825531:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.436312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004893/r3tmp/tmpEAHNWp/pdisk_1.dat 2024-11-21T09:21:48.481439Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21668, node 1 2024-11-21T09:21:48.494539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.494549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.494551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.494573Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15973 TClient is connected to server localhost:15973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:48.536955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.536984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.538029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.538482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.549750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.611123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.625843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.636062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.737826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351066827087:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.737852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.760901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.766393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.774322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.781435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.788838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.802969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.818350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351066827590:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.818393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.818428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351066827595:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.819307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.822840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660351066827597:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:49.070053Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660355361795242:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd70e6yv04mqx7h13tdxckh6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU1NjRjMDUtNjUxOTk4ZTktYTQ5YzRiMzktMTQ0YzVjOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:49.074992Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909116, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 8193, MsgBus: 21950 2024-11-21T09:21:49.309203Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660355546270465:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.309382Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004893/r3tmp/tmpZgPiu6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8193, node 2 2024-11-21T09:21:49.326345Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:49.330061Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.330078Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.330081Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.330130Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21950 TClient is connected to server localhost:21950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.409378Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.409402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.410488Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.411684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.412979Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:49.415351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.429367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.449142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.461419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.626158Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355546272005:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.626200Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.630074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.636154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.649371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.656056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.711812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.719626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.735831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355546272511:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.735858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.735858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355546272516:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.736597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.739371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660355546272518:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.980447Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910026, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::DescribeSystemFolder [GOOD] Test command err: 2024-11-21T09:21:36.130710Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660302843876176:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.130758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00358a/r3tmp/tmpBCf1YD/pdisk_1.dat 2024-11-21T09:21:36.250657Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:36.250924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.250943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.253995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21343, node 1 2024-11-21T09:21:36.335633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:36.335654Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:36.335656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:36.335690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:36.420638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.428542Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:36.506916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660302843876552:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.506945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.507302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660302843876579:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.508042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:21:36.509728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660302843876581:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:21:36.729882Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660302843876661:2295] TxId: 281474976715661. Ctx: { TraceId: 01jd70dtp922tzkprvph53da9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFmODgxYTAtNTRiN2YxZjctOWVkMjdkYjktYWQ3ODY5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:36.730975Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70dtp922tzkprvph53da9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFmODgxYTAtNTRiN2YxZjctOWVkMjdkYjktYWQ3ODY5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:36.750837Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660302843876668:2308], owner: [1:7439660302843876664:2306], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:36.751180Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660302843876668:2308], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:36.753277Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660302843876668:2308], row count: 1, finished: 1 2024-11-21T09:21:36.753303Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660302843876668:2308], owner: [1:7439660302843876664:2306], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:36.758249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180896726, txId: 281474976715660] shutting down 2024-11-21T09:21:37.783051Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660307138844012:2314] TxId: 281474976715663. Ctx: { TraceId: 01jd70dvz597arw998mhja0vp6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI1ZWI0MjktNGQzZDQwYTktMTQwNzgwOWYtYTI0MzFjZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:37.783128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70dvz597arw998mhja0vp6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI1ZWI0MjktNGQzZDQwYTktMTQwNzgwOWYtYTI0MzFjZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:37.783653Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660307138844019:2322], owner: [1:7439660307138844015:2320], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:37.783792Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660307138844019:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:37.783972Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660307138844019:2322], row count: 1, finished: 1 2024-11-21T09:21:37.783993Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660307138844019:2322], owner: [1:7439660307138844015:2320], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:37.784459Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180897782, txId: 281474976715662] shutting down 2024-11-21T09:21:38.803364Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70dwz1ess9h498ye9ej8sq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ4ZWYxMWMtYWIyZTU2Ny1kZmYxYWI4Mi0xOWRmMGI1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:38.803943Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660311433811352:2333], owner: [1:7439660311433811348:2331], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:38.804069Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660311433811352:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:38.804160Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660311433811352:2333], row count: 1, finished: 1 2024-11-21T09:21:38.804169Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660311433811352:2333], owner: [1:7439660311433811348:2331], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:38.804688Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180898803, txId: 281474976715664] shutting down 2024-11-21T09:21:39.824823Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70dxz09gqcw9s9eg54y9pa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzViNWIwNDYtZDAzZGI1ZTAtNjVkNjU1MTctNTdlYzkxYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:39.825539Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660315728778685:2344], owner: [1:7439660315728778681:2342], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:39.825900Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660315728778685:2344], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:39.825999Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660315728778685:2344], row count: 1, finished: 1 2024-11-21T09:21:39.826019Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660315728778685:2344], owner: [1:7439660315728778681:2342], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:39.826363Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180899824, txId: 281474976715666] shutting down 2024-11-21T09:21:40.845342Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70dyyv5nxs11kr49rp67zr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM1ZWI0YWItYmY0ZTYzOC1iNTJjODY4MC00ZTM2MDU0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:40.845801Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660320023746018:2355], owner: [1:7439660320023746014:2353], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:40.845910Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660320023746018:2355], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:40.845990Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660320023746018:2355], row count: 1, finished: 1 2024-11-21T09:21:40.846009Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660320023746018:2355], owner: [1:7439660320023746014:2353], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T09:21:40.846384Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180900845, txId: 281474976715668] shutting down 2024-11-21T09:21:41.130806Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439660302843876176:2253];send_to=[0:730719953665 ... n id: 0, table id: [72057594046644480:1:0:ds_groups] 2024-11-21T09:21:47.613043Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180907611, txId: 281474976715670] shutting down 2024-11-21T09:21:48.623769Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439660352670996933:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.623933Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00358a/r3tmp/tmpnzFmna/pdisk_1.dat 2024-11-21T09:21:48.637780Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21578, node 7 2024-11-21T09:21:48.658826Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.658840Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.658843Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.658893Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.724508Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.724540Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.726094Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.727150Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.731264Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.739603Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7439660350766756920:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.739902Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:48.740907Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.741318Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.741334Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:21:48.741588Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.741612Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.742429Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2024-11-21T09:21:48.742441Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2024-11-21T09:21:48.742583Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.742626Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.797861Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.802663Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439660350780458486:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.802714Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:48.804282Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.804673Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.804708Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:21:48.805278Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.805297Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.806162Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2024-11-21T09:21:48.806175Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2024-11-21T09:21:48.806368Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.806532Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.973874Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.036030Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439660356965965473:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.036030Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439660356965965484:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.036051Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.036578Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.040642Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439660356965965487:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2024-11-21T09:21:49.130208Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70e6zbcmzwvsb5y1zjd8f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTRhNjI4NzYtYmVkNmRjMWUtZGI4NWM1MTMtYjBjMWFjN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:49.135987Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.216325Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70e74m6sa2msjg96m3p96h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTRhNjI4NzYtYmVkNmRjMWUtZGI4NWM1MTMtYjBjMWFjN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:49.220966Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.301108Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70e77a77z185fpwk808xrj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTRhNjI4NzYtYmVkNmRjMWUtZGI4NWM1MTMtYjBjMWFjN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Path not found 2024-11-21T09:21:49.311116Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T09:21:49.311383Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:21:49.311439Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T09:21:49.311481Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:21:49.311501Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2024-11-21T09:21:49.311596Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:21:49.311787Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T09:21:49.311886Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:21:49.312769Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439660350780458710:2101], Type=268959746 2024-11-21T09:21:49.312781Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439660350780458710:2101], Type=268959746 2024-11-21T09:21:49.312784Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439660350780458710:2101], Type=268959746 2024-11-21T09:21:49.312787Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439660350780458710:2101], Type=268959746 2024-11-21T09:21:49.740534Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::TopSortOverSecondaryIndexRead [GOOD] Test command err: Trying to start YDB, gRPC: 12591, MsgBus: 18236 2024-11-21T09:21:48.136830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660350921215263:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.136866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ae/r3tmp/tmpYCjfpr/pdisk_1.dat 2024-11-21T09:21:48.217902Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12591, node 1 2024-11-21T09:21:48.239192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.239223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.240319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.278519Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.278531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.278533Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.278560Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18236 TClient is connected to server localhost:18236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.382452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.384683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.395317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.462601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.520358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.577536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.597747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660350921216608:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.597772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.702232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.711077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.765772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.775916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660350921217126:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.775944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.775953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660350921217131:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.776561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.780061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660350921217133:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:48.959299Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660350921217445:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70e6wf75q1c092sb5nr34a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MThkMjRiNGMtMmI3NzFhNGMtODBmOGNjYTktYTFiZjdjYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:48.960956Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909004, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 30724, MsgBus: 7190 2024-11-21T09:21:49.214042Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660357425500269:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ae/r3tmp/tmpVX9RUY/pdisk_1.dat 2024-11-21T09:21:49.219060Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:49.224382Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30724, node 2 2024-11-21T09:21:49.239821Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.239838Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.239840Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.239877Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7190 TClient is connected to server localhost:7190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.315906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.315928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.316141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.320590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.323444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.383486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.400800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.412400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.515719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660357425501651:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.515744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.521116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.527480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.538067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.593270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.600412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.607287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.626700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660357425502165:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.626731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660357425502171:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.626735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.627355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.629067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660357425502173:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.780099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.786336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.796601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":4,"Columns":["Fk1","Fk2","Key","Value"],"E-Rows":"No estimate","Table":"SecondaryComplexKeys","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["SecondaryComplexKeys\/Index\/indexImplTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"2"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Fk1 (1)","Fk2 (-∞, +∞)","Key (-∞, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"SecondaryComplexKeys\/Index\/indexImplTable","ReadColumns":["Fk1","Key"],"E-Cost":"No estimate"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"TopBy":"row.Fk1","Name":"Top","Limit":"2"}],"Node Type":"Top"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"TopSort","Limit":"2","TopSortBy":"row.Fk1"}],"Node Type":"TopSort"}],"Node Type":"Merge","SortColumns":["Fk1 (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryComplexKeys","reads":[{"lookup_by":["Key"],"columns":["Fk1","Fk2","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","reads":[{"lookup_by":["Fk1 (1)"],"columns":["Fk1","Key"],"scan_by":["Fk2 (-∞, +∞)","Key (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Columns":["Fk1","Fk2","Key","Value"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"SecondaryComplexKeys","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"Name":"TopSort","Limit":"2","TopSortBy":"row.Fk1"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpSplit::AfterResolve+Ascending >> KqpSplit::BorderKeys+Descending [GOOD] >> KqpScan::SecondaryIndexCustomColumnOrder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupTryGetDataBeforeSchemeInitialization [GOOD] Test command err: Trying to start YDB, gRPC: 9810, MsgBus: 21016 2024-11-21T09:21:48.170228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660350923393680:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.170729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a3/r3tmp/tmpsX2o7Y/pdisk_1.dat 2024-11-21T09:21:48.253172Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9810, node 1 2024-11-21T09:21:48.270339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.270376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.271435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.278880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.278892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.278893Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.278915Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21016 TClient is connected to server localhost:21016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.379797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.382963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.387258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:48.427828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.449005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.463882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.536300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660350923395072:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.536324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.675811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.684569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660350923395585:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.684588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.684606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660350923395590:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.685124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660350923395592:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:48.894994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:48.928995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.999493Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660350923396281:2494] TxId: 281474976715676. Ctx: { TraceId: 01jd70e6x0a13wp946x1e3dm69, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZiODEwMDgtMzk4OWJmYzItNDZmOWFiOTctMWI0ZDk2YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:49.002293Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909046, txId: 281474976715675] shutting down 2024-11-21T09:21:49.531994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:21:49.532031Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:21:49.532057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a3/r3tmp/tmpKkfkm3/pdisk_1.dat 2024-11-21T09:21:49.614349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.627781Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:49.670052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.670086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.680568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.785668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:608:2517], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.785702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.787105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.002512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:758:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.002551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.002606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:763:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.003591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.191500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:765:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Captured TEvTxProxySchemeCache::TEvResolveKeySetResult from NKikimr::NSchemeBoard::(anonymous namespace)::TAccessCheckerResolve to NKikimr::NTxProxy::TResolveTablesActor Captured TEvTxProxySchemeCache::TEvResolveKeySetResult from NKikimr::NSchemeBoard::(anonymous namespace)::TAccessCheckerResolve to KQP_TABLE_RESOLVER 2024-11-21T09:21:50.396058Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70e7xj0hvyc3xvx2n32ax6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWVmNmJjZDMtN2YwYmMyODItYWM4N2U1Yy03MzJiZDg2Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvTxProxySchemeCache::TEvResolveKeySetResult from NKikimr::NSchemeBoard::(anonymous namespace)::TAccessCheckerResolve to KQP_STREAM_LOOKUP_ACTOR 2024-11-21T09:21:50.398535Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715660] shutting down Captured TEvTxProxySchemeCache::TEvResolveKeySetResult from NKikimr::NSchemeBoard::(anonymous namespace)::TAccessCheckerResolve to NKikimr::NTxProxy::TResolveTablesActor ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::SystemViewFailOps [GOOD] Test command err: 2024-11-21T09:21:36.290344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660300184779073:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.290562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003576/r3tmp/tmp4jhvHO/pdisk_1.dat 2024-11-21T09:21:36.350724Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4849, node 1 2024-11-21T09:21:36.386873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:36.386887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:36.386889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:36.386931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:36.391288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.391316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.395462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:36.470113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.480655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.608407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660300184779981:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.608430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660300184779973:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.608451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.609196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:21:36.613030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660300184779987:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:21:36.731098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70dttz752k0dmbte2d5psc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWExMjk0MGUtYjRiMGEzZDItYzRkYjkxYS1mZTExNzgyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:36.766124Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660300184780131:2315] TxId: 281474976715663. Ctx: { TraceId: 01jd70dtzg8c7g35rwqy65x4ch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDcyMDFhYjQtNTNlNmFlZGQtMjBmODllYTgtY2E1OTgwODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:36.766192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70dtzg8c7g35rwqy65x4ch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDcyMDFhYjQtNTNlNmFlZGQtMjBmODllYTgtY2E1OTgwODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:36.766874Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660300184780138:2323], owner: [1:7439660300184780134:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T09:21:36.767074Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660300184780138:2323], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:36.767188Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660300184780138:2323], row count: 1, finished: 1 2024-11-21T09:21:36.767200Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660300184780138:2323], owner: [1:7439660300184780134:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T09:21:36.768115Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180896765, txId: 281474976715662] shutting down 2024-11-21T09:21:37.638767Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660306820282676:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:37.638911Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003576/r3tmp/tmpw3kcWO/pdisk_1.dat 2024-11-21T09:21:37.653996Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6055, node 6 2024-11-21T09:21:37.665863Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:37.665878Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:37.665880Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:37.665931Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:37.739348Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:37.739390Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:37.740437Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:37.741172Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:37.953158Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660306820283248:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.953219Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.953278Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660306820283284:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:37.954183Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:21:37.958595Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660306820283286:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:21:38.048821Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70dvyhdt1hsmj0aejsvvtn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=YWVmYzk0OWEtZTU2Njg1ODYtNDdhNGZiODgtOGEzMzAzNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:38.049524Z node 6 :SYSTEM_VIEWS INFO: Scan started, actor: [6:7439660311115250668:2308], owner: [6:7439660311115250664:2306], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:38.049671Z node 6 :SYSTEM_VIEWS INFO: Scan prepared, actor: [6:7439660311115250668:2308], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:38.049782Z node 6 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [6:7439660311115250668:2308], row count: 0, finished: 1 2024-11-21T09:21:38.049796Z node 6 :SYSTEM_VIEWS INFO: Scan finished, actor: [6:7439660311115250668:2308], owner: [6:7439660311115250664:2306], scan id: 0, table id: [720575940466 ... 351687129309:2377], owner: [7:7439660351687129305:2375], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:48.736259Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439660351687129309:2377], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:48.736337Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439660351687129309:2377], row count: 2, finished: 1 2024-11-21T09:21:48.736350Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439660351687129309:2377], owner: [7:7439660351687129305:2375], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:48.736800Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908735, txId: 281474976715672] shutting down 2024-11-21T09:21:48.748964Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd70e6p14x3cwx5gbx3cg8fh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDFjMDNmNzYtNTcwNjU1Zi04ZmQyOWU3MS1jMjIzYjUxNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:48.749438Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439660351687129341:2386], owner: [7:7439660351687129337:2384], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:48.749519Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439660351687129341:2386], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:48.749597Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439660351687129341:2386], row count: 3, finished: 1 2024-11-21T09:21:48.749606Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439660351687129341:2386], owner: [7:7439660351687129337:2384], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:48.750009Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908748, txId: 281474976715674] shutting down 2024-11-21T09:21:48.762789Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd70e6pecmxnf9p9a0ag5dza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NjYwYzY3NDAtMWFjMTEzODMtNmM4MTMyYTUtZmY0ZTJmNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:48.763202Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439660351687129373:2395], owner: [7:7439660351687129369:2393], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:48.763422Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439660351687129373:2395], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:48.763492Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439660351687129373:2395], row count: 2, finished: 1 2024-11-21T09:21:48.763507Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439660351687129373:2395], owner: [7:7439660351687129369:2393], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:48.763922Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908762, txId: 281474976715676] shutting down 2024-11-21T09:21:48.777089Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd70e6pw9x8ye5enm8gdfgqe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjcxNGNlOTgtM2JmYzNhNjQtNjJhY2E2YTgtNWExNjQzZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:48.777638Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439660351687129405:2404], owner: [7:7439660351687129401:2402], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:48.777827Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439660351687129405:2404], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:48.777927Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439660351687129405:2404], row count: 3, finished: 1 2024-11-21T09:21:48.777956Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439660351687129405:2404], owner: [7:7439660351687129401:2402], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T09:21:48.778458Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908776, txId: 281474976715678] shutting down 2024-11-21T09:21:49.139894Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439660355222547316:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.139939Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003576/r3tmp/tmpIUdfIY/pdisk_1.dat 2024-11-21T09:21:49.155220Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5012, node 8 2024-11-21T09:21:49.180190Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.180223Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.180226Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.180277Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.241207Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.241244Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.243868Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.244026Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:440, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp:774, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp:406, code: 200200 2024-11-21T09:21:49.476419Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439660355222548117:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.476437Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions }
: Error: Execution, code: 1060
:2:28: Error: Executing DROP TABLE
: Error: Incorrect scheme found while performing Kikimr operation., code: 2003
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp:406, code: 200200 2024-11-21T09:21:49.482501Z node 8 :TX_PROXY ERROR: [ReadTable [8:7439660355222548138:2304] TxId# 281474976715663] Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats] 2024-11-21T09:21:49.483196Z node 8 :TX_PROXY ERROR: [ReadTable [8:7439660355222548138:2304] TxId# 281474976715663] RESPONSE Status# ResolveError shard: 0 table: /Root/.sys/partition_stats
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats], code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats]
: Error: Bulk upsert to table '/Root/.sys/partition_stats'is not supported. Table is a system view
: Error: Check failed: path: '/Root/.sys', error: path part '.sys' is reserved by the system, source_location: ydb/core/tx/schemeshard/schemeshard__operation.cpp:935
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:133, code: 200200
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:31, code: 200200 >> KqpSplit::AfterResultMultiRange+Descending [GOOD] >> SystemView::TopPartitionsTables [GOOD] >> SystemView::TopPartitionsRanges >> KqpScan::Join2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 5785, MsgBus: 15434 2024-11-21T09:21:48.136823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660351734291813:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.136860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004899/r3tmp/tmp2A7PFw/pdisk_1.dat 2024-11-21T09:21:48.229673Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:48.238486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.238508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.239593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5785, node 1 2024-11-21T09:21:48.277428Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.277444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.277446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.277482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15434 TClient is connected to server localhost:15434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.388306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.390914Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.400906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:48.465403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.480146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.487683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.512750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351734293145:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.512777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.638820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.655160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.676943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.691670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351734293658:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.691696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.691714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351734293663:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.692202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.696355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660351734293665:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:48.950149Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660351734293979:2453] TxId: 281474976715672. Ctx: { TraceId: 01jd70e6ts0habjmyxqjve1a9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk5MWNlNGItNGQ4MTZmNmYtMmZlYWJlYTMtNWU1NmVlMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:48.950253Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70e6ts0habjmyxqjve1a9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk5MWNlNGItNGQ4MTZmNmYtMmZlYWJlYTMtNWU1NmVlMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:49.269082Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908997, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 19146, MsgBus: 15435 2024-11-21T09:21:49.447980Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660355497287689:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.447996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004899/r3tmp/tmpyAy0ei/pdisk_1.dat 2024-11-21T09:21:49.458244Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19146, node 2 2024-11-21T09:21:49.465597Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.465608Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.465610Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.465651Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15435 TClient is connected to server localhost:15435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.548184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.548233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.549413Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.550580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.562508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.570556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.587042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.597179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.744663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355497289241:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.744715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.747800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.754274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.761464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.767841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.775402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.782168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.839362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355497289755:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.839396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.839413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355497289760:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.840173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.844683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660355497289762:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.064896Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70e7xp41gz9914vv5mavs3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTIyZTljODktYTFiNDFkMTEtNzZjZTY3MzItZjlmN2ZjZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:50.376441Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910110, txId: 281474976715671] shutting down >> KqpScan::LongStringCombiner [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepLimit [GOOD] Test command err: Trying to start YDB, gRPC: 9368, MsgBus: 14216 2024-11-21T09:21:48.214963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660353431419809:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.216482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a4/r3tmp/tmpJgF0bZ/pdisk_1.dat 2024-11-21T09:21:48.292757Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9368, node 1 2024-11-21T09:21:48.308129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.308141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.308143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.308175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:48.315384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.315407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.316539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14216 TClient is connected to server localhost:14216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.378242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.381570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.384683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.449240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.480831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.492323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.570288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353431421347:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.570323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.654591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.679574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353431421850:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353431421855:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.681062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660353431421857:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:48.919370Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660353431422195:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70e6tzddpt43hgxqgy5vbd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM5ZTU5MmQtNDBkYTA1MDctNDFkNzQxMzEtNGVkOWJiZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:48.924735Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908962, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 4714, MsgBus: 62343 2024-11-21T09:21:49.102012Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660356502699598:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.102046Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a4/r3tmp/tmpnS0z5T/pdisk_1.dat 2024-11-21T09:21:49.111651Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4714, node 2 2024-11-21T09:21:49.122891Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.122906Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.122908Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.122954Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62343 TClient is connected to server localhost:62343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.202678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.202711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.203820Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.205035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.208775Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:49.215144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.277380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.302054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.314581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.424858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356502701143:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.424891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.431456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.437261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.492269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.502644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.516744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.530456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.539102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356502701661:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.539135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.539137Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356502701666:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.539946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.543854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660356502701668:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.750869Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909795, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 27686, MsgBus: 19568 2024-11-21T09:21:50.017129Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660363266696933:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.017401Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a4/r3tmp/tmpofQRAf/pdisk_1.dat 2024-11-21T09:21:50.026319Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27686, node 3 2024-11-21T09:21:50.036039Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.036055Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.036057Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.036119Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19568 TClient is connected to server localhost:19568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.117129Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.117176Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:21:50.118413Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:50.119537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.125676Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.135921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:50.157151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.169109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.342594Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660363266698480:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.342619Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.347664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.353942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.362766Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.369901Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.377501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.391648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.399593Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660363266698981:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.399616Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.399633Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660363266698986:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.400255Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.404275Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660363266698988:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.579566Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910621, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 1302, MsgBus: 16700 2024-11-21T09:21:48.209991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660352534697556:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048cc/r3tmp/tmp3rwhlg/pdisk_1.dat 2024-11-21T09:21:48.238866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:48.261061Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1302, node 1 2024-11-21T09:21:48.276047Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.276059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.276061Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.276095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:48.302996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.303031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.304160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16700 TClient is connected to server localhost:16700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.394570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.396801Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.404808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.468453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.489609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.505078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.541121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352534698947:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.541140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.637691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.654894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.709157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.718096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.726532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352534699462:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.726568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.726574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352534699467:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.727145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.731251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660352534699469:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:48.918525Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660352534699780:2453] TxId: 281474976710672. Ctx: { TraceId: 01jd70e6v6d0bet3hjb8b55bbt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk5NGNjMGEtMWU0ZmQwOWItNzgwMGEwMDMtNWY3NDAzODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:48.918620Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd70e6v6d0bet3hjb8b55bbt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk5NGNjMGEtMWU0ZmQwOWItNzgwMGEwMDMtNWY3NDAzODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710674 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:49.438819Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908962, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 30606, MsgBus: 28107 2024-11-21T09:21:49.689387Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660358992118593:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.689563Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048cc/r3tmp/tmpYYVgAb/pdisk_1.dat 2024-11-21T09:21:49.698328Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30606, node 2 2024-11-21T09:21:49.707437Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.707454Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.707456Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.707498Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28107 TClient is connected to server localhost:28107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.791331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.791355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.791664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.792534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.803353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.812542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.831800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.842165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.011744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660363287087438:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.011785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.015623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.023373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.034214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.041504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.048146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.055528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.071451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660363287087939:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.071475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.071553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660363287087944:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.072325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.075468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660363287087946:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.263543Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70e8578ymnfrebaba2gyd4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQwZDlmZDktMzhiZTM3YjQtMWE1ODgyY2QtODJiMGNlMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715674 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:50.615833Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910306, txId: 281474976715671] shutting down >> KqpScan::UdfFailure [GOOD] >> KqpScan::EarlyFinish [GOOD] >> KqpScan::DropRedundantSortByPk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRange+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 64600, MsgBus: 14448 2024-11-21T09:21:48.136794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660353106302108:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.136823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c2/r3tmp/tmpw5DtQu/pdisk_1.dat 2024-11-21T09:21:48.231740Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64600, node 1 2024-11-21T09:21:48.239264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.239287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.240397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.276157Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.276166Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.276168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.276216Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14448 TClient is connected to server localhost:14448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.371961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.383082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:48.403533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.422012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.435554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.506015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353106303455:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.506041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.638262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.655161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.663185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.717141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.726727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353106303973:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.726750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353106303978:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.726751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.727319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.730906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660353106303980:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:48.987773Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660353106304310:2453] TxId: 281474976710672. Ctx: { TraceId: 01jd70e6w12mcwfdcrrkghchss, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWViOTc4YWItMWU5OTQyZmUtNzAxZTRiMzAtYjY4N2M2OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:48.987842Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd70e6w12mcwfdcrrkghchss, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWViOTc4YWItMWU5OTQyZmUtNzAxZTRiMzAtYjY4N2M2OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:49.409935Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909032, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 23451, MsgBus: 61140 2024-11-21T09:21:49.634970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660358083045648:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.635301Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c2/r3tmp/tmpIQ1oWV/pdisk_1.dat 2024-11-21T09:21:49.643465Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23451, node 2 2024-11-21T09:21:49.653463Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.653477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.653479Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.653516Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61140 TClient is connected to server localhost:61140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.734878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.734913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.736076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.737196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.747633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.755870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.774522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.784838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.974094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660358083047209:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.974168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.978798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.986754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.999533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.006792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.020823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.034336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.043188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660362378014997:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.043213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.043214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660362378015002:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.043796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.047642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660362378015004:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.275575Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70e84418tyejkm6yq1003n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzBmZTg5NWYtOTkwNGVhMWUtNWM5ZjFlNjItYTgzOGVjYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:50.622297Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910320, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Join2 [GOOD] Test command err: Trying to start YDB, gRPC: 22541, MsgBus: 64062 2024-11-21T09:21:48.186110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660351891451841:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.186191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048db/r3tmp/tmp1OCqQB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22541, node 1 2024-11-21T09:21:48.255339Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:48.276097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.276111Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.276113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.276147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:48.286270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.286311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.287465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64062 TClient is connected to server localhost:64062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.416641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.420713Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.425395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.486994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.503090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.512056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.531203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351891453227:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.531236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.638853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.654838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.680601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351891453731:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.680629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660351891453736:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.680630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.681245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660351891453738:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:48.903078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.951309Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180908997, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 24631, MsgBus: 22553 2024-11-21T09:21:49.254858Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660356528724123:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.255018Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048db/r3tmp/tmpv2hp0G/pdisk_1.dat 2024-11-21T09:21:49.271638Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24631, node 2 2024-11-21T09:21:49.283743Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.283755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.283757Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.283793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22553 TClient is connected to server localhost:22553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.362512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.362546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.362872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.363664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:21:49.367560Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.371145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.384265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.402504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.413135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.590778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356528725664:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.590806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.596011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.602531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.614301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.621131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.628388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.635055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.643932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356528726165:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.643955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356528726170:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.643959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.644607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.648700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660356528726172:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.815498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.895980Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909935, txId: 281474976715673] shutting down 2024-11-21T09:21:49.962783Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910005, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 28072, MsgBus: 9917 2024-11-21T09:21:50.158915Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660362584202825:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.158932Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048db/r3tmp/tmpSE9UsC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28072, node 3 2024-11-21T09:21:50.175322Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:50.175787Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.175804Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.175807Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.175860Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9917 TClient is connected to server localhost:9917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.259023Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.259058Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.260097Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:50.261328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.272056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.280228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.295964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.308553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.450264Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660362584204358:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.450281Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.456470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.463437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.475704Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.482520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.496806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.510371Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.519480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660362584204871:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.519509Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.519577Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660362584204876:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.520333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.523573Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660362584204878:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.687679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.787967Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910824, txId: 281474976715673] shutting down >> KqpJoinOrder::TPCDS90+StreamLookupJoin-ColumnStore [GOOD] >> KqpScan::TwoAggregatesTwoWindows [GOOD] >> KqpScan::SqlInParameter >> KqpFlowControl::FlowControl_Unlimited >> KqpScan::SelfJoin3xSameLabels >> KqpRequestContext::TraceIdInErrorMessage >> KqpScan::PrunePartitionsByLiteral [GOOD] >> KqpScan::PrunePartitionsByExpr >> KqpSplit::AfterResult+Descending >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::LongStringCombiner [GOOD] Test command err: Trying to start YDB, gRPC: 32533, MsgBus: 65094 2024-11-21T09:21:48.137112Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660353703576927:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.137143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a9/r3tmp/tmpgmxn96/pdisk_1.dat 2024-11-21T09:21:48.218083Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32533, node 1 2024-11-21T09:21:48.239577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.239599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.242130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.278548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.278560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.278561Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.278589Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65094 TClient is connected to server localhost:65094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.371975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.388977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:48.420698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.483819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.495045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.516672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353703578254:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.516701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.638096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.649112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.683446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.692601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353703578768:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.692624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.692669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353703578773:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.693208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.696025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660353703578775:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:48.897536Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439660353703579068:2458], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029 2024-11-21T09:21:48.897633Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTYxY2IwM2ItMWNiNGNlZTctNzVhYTg1NzktYWYzNDQ1NWY=, ActorId: [1:7439660353703579061:2454], ActorState: ExecuteState, TraceId: 01jd70e6tp950egvda550ek2zw, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029 Trying to start YDB, gRPC: 9904, MsgBus: 27935 2024-11-21T09:21:49.016690Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660355135701094:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.017120Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a9/r3tmp/tmpihWCGh/pdisk_1.dat 2024-11-21T09:21:49.029035Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9904, node 2 2024-11-21T09:21:49.041220Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.041243Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.041246Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.041287Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27935 TClient is connected to server localhost:27935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.119287Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.119321Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.119613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.120325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.128244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.137617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:49.158212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.175525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.348479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355135702648:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.348513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.354446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.361458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.369631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.376529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.384628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.400815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.415751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355135703149:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.415777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.415825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355135703154:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.416366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.424819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660355135703156:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.603425Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439660355135703448:2458], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029 2024-11-21T09:21:49.603940Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjZmNmEyMGMtNTczODIzNDUtYmY5N2M3MzEtM2ExNjI1YzQ=, ActorId: [2:7439660355135703441:2454], ActorState: ExecuteState, TraceId: 01jd70e7gw12mnrynmbsf03zws, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029
: Error: Execution, code: 1060
:3:13: Error: Scan query should have a single result set., code: 2029 Trying to start YDB, gRPC: 9836, MsgBus: 13646 2024-11-21T09:21:49.943629Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660356106687093:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.943667Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048a9/r3tmp/tmps6VyWB/pdisk_1.dat 2024-11-21T09:21:49.962003Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9836, node 3 2024-11-21T09:21:49.968189Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.968226Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.968229Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.968268Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13646 TClient is connected to server localhost:13646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.043921Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.043948Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.045064Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:50.047295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.050471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.063456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:50.080354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.094348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.278663Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660360401655939:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.278704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.284824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.291919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.300578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.314817Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.321329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.328511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.337355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660360401656441:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.337387Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.337474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660360401656446:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.338271Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.341452Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660360401656448:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.772760Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910642, txId: 281474976715671] shutting down >> KqpJoinOrder::TPCDS61-StreamLookupJoin+ColumnStore [GOOD] >> KqpSplit::AfterResolve+Unspecified >> KqpScan::DecimalColumn >> KqpScan::CrossJoinCount [GOOD] >> KqpSplit::AfterResolve+Ascending [GOOD] >> KqpSplit::AfterResolve+Descending >> KqpScan::NoTruncate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::UdfFailure [GOOD] Test command err: Trying to start YDB, gRPC: 18422, MsgBus: 17492 2024-11-21T09:21:48.139480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660353419333135:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.139615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c6/r3tmp/tmpqoWRnR/pdisk_1.dat 2024-11-21T09:21:48.220407Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:48.240544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.240572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.244498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18422, node 1 2024-11-21T09:21:48.278694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.278708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.278709Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.278738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17492 TClient is connected to server localhost:17492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.391719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.394195Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.396747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.461978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.493900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.506127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.528703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353419334543:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.528725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.655011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.679483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353419335047:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353419335052:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.681049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660353419335054:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.166123Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909011, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 10000, MsgBus: 14062 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c6/r3tmp/tmpKKnHXq/pdisk_1.dat 2024-11-21T09:21:49.435065Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:49.435060Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 10000, node 2 2024-11-21T09:21:49.446908Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.446919Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.446921Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.446948Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14062 TClient is connected to server localhost:14062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:49.525681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.525710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.526561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.526721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.532524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.542434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.560858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.570739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.733266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356709160399:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.733302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.737622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: E ... node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.775417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.784002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356709160902:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.784029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660356709160907:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.784035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.784643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.788306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660356709160909:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.020500Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660356709161224:2461], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd70e7w9f46s2ksy55799hms. SessionId : ydb://session/3?node_id=2&id=MmFmMTYwNDAtMjQ5ZDg5MWEtOTVmNTg5YzMtNTNlMzg1NzM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. }. 2024-11-21T09:21:50.020763Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660356709161226:2462], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MmFmMTYwNDAtMjQ5ZDg5MWEtOTVmNTg5YzMtNTNlMzg1NzM=. TraceId : 01jd70e7w9f46s2ksy55799hms. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T09:21:50.022117Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmFmMTYwNDAtMjQ5ZDg5MWEtOTVmNTg5YzMtNTNlMzg1NzM=, ActorId: [2:7439660356709161195:2454], ActorState: ExecuteState, TraceId: 01jd70e7w9f46s2ksy55799hms, Create QueryResponse for error on request, msg: 2024-11-21T09:21:50.022243Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910026, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 10187, MsgBus: 16833 2024-11-21T09:21:50.335955Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660363142165132:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.336293Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048c6/r3tmp/tmp9DfRWP/pdisk_1.dat 2024-11-21T09:21:50.345640Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10187, node 3 2024-11-21T09:21:50.356307Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.356320Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.356321Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.356361Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16833 TClient is connected to server localhost:16833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.438038Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.438072Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.438563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.439147Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:50.443667Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.455227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.473091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.483506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.630181Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660363142166669:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.630207Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.635812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.642667Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.698917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.706332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.713426Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.720405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.728388Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660363142167182:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.728414Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.728419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660363142167187:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.729074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.733342Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660363142167189:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.944943Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439660363142167503:2461], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=OTFjNzVkNzgtMWUyNGVjYmUtYjBkNGJhZjktYjg0NWY1MA==. CustomerSuppliedId : . TraceId : 01jd70e8teasa848937gbnzm91. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. }. 2024-11-21T09:21:50.945053Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439660363142167504:2462], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd70e8teasa848937gbnzm91. SessionId : ydb://session/3?node_id=3&id=OTFjNzVkNzgtMWUyNGVjYmUtYjBkNGJhZjktYjg0NWY1MA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T09:21:50.945335Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTFjNzVkNzgtMWUyNGVjYmUtYjBkNGJhZjktYjg0NWY1MA==, ActorId: [3:7439660363142167474:2454], ActorState: ExecuteState, TraceId: 01jd70e8teasa848937gbnzm91, Create QueryResponse for error on request, msg: 2024-11-21T09:21:50.945458Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910992, txId: 281474976715671] shutting down >> KqpScan::AggregateNoColumn [GOOD] >> KqpScan::AggregateEmptyCountStar >> KqpSplit::IntersectionLosesRange+Unspecified >> KqpSplit::ChoosePartition+Ascending ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::TwoAggregatesTwoWindows [GOOD] Test command err: Trying to start YDB, gRPC: 32419, MsgBus: 25884 2024-11-21T09:21:48.136868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660353285276046:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.136917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ca/r3tmp/tmpkbtsGr/pdisk_1.dat 2024-11-21T09:21:48.227058Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32419, node 1 2024-11-21T09:21:48.240622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.240647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.241813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.278194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.278208Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.278210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.278242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25884 TClient is connected to server localhost:25884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.371991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.388143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:48.403292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.422734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.480237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.512089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353285277374:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.512128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.638854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.648105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.655022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.680828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353285277887:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.680866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.680906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353285277892:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.681630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660353285277894:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.023332Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660357580245548:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70e6v2b7zpbzzndaex11se, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDhkZmNlYzEtZDVjNzM3MDktNjFhZjYxYWYtZmNjNmZhYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:50.303811Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909067, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 19494, MsgBus: 62795 2024-11-21T09:21:50.512905Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660362381637169:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.512923Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048ca/r3tmp/tmpGsVuTI/pdisk_1.dat 2024-11-21T09:21:50.524557Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19494, node 2 2024-11-21T09:21:50.532321Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.532334Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.532336Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.532375Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62795 TClient is connected to server localhost:62795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.613432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.613459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.614604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:50.615743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.626709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.636020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.651955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.661883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.800531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660362381638715:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.800559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.804784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.811525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.818715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.832519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.839040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.845893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.854913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660362381639231:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.854931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660362381639236:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.854940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.855616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.859473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660362381639238:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:51.160463Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911202, txId: 281474976715671] shutting down >> KqpScan::StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 21166, MsgBus: 16533 2024-11-21T09:21:49.391212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660358116758291:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.391274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488f/r3tmp/tmpDSF822/pdisk_1.dat 2024-11-21T09:21:49.468922Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21166, node 1 2024-11-21T09:21:49.479877Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.479891Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.479894Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.479929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16533 2024-11-21T09:21:49.495177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.495214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.496272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.534493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.546149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.609911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.626648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.635773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.723979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660358116759697:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.724002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.757006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.763705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.775542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.781901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.788926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.796583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.804560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660358116760201:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.804589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.804602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660358116760206:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.805220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.813446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660358116760208:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.030776Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660362411727828:2453] TxId: 281474976715672. Ctx: { TraceId: 01jd70e7wt9t5794w50d85ra16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc3ZmRjMTMtNGQyM2M5N2QtZjFjNjUwM2MtZjE2NWRlNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:50.030874Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70e7wt9t5794w50d85ra16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc3ZmRjMTMtNGQyM2M5N2QtZjFjNjUwM2MtZjE2NWRlNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:50.339761Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910075, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 6451, MsgBus: 31743 2024-11-21T09:21:50.474758Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660360381904296:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.474775Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488f/r3tmp/tmpCBzSJQ/pdisk_1.dat 2024-11-21T09:21:50.486109Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6451, node 2 2024-11-21T09:21:50.494497Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.494512Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.494514Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.494554Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31743 TClient is connected to server localhost:31743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.574926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.574956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.576023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:50.577172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.583031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.591736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.608519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.617836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.768780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660360381905831:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.768824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.771405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.777236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.790256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.797218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.803988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.811615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.827087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660360381906343:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.827106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660360381906348:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.827113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.827642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.831424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660360381906350:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:51.071133Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70e8x896asdka028zhtg6q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRjYTYxZTQtNTFmOTI1NDMtNjEyYTM5N2UtNTg4NWJhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:51.357690Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911118, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17094, MsgBus: 26512 2024-11-21T09:21:33.944311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660288045714050:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:33.944341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004387/r3tmp/tmph6tBeQ/pdisk_1.dat 2024-11-21T09:21:34.007734Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17094, node 1 2024-11-21T09:21:34.013712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:34.013726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:34.013729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:34.013782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26512 2024-11-21T09:21:34.046426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:34.046459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:34.047575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:34.075645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:34.081009Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:34.085120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:34.101825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:34.124436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.135523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:34.265952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660292340682892:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.265980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.301306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.307798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.319274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.326299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.333878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.347829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.363998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660292340683406:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.364015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.364017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660292340683411:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.364646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:34.368716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660292340683413:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:34.584985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.592091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.647590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.656016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.663705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.688517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.694647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.704192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.711068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.718853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.724909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.732279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.741578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.809905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:34.865721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.872475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.878872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.886028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.893604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.899827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.906889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... 0359049574451:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.288924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.291679Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660359049574453:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.560461Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.568241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.580065Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.596093Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.607849Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.632250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.687808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.698974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.754232Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.762005Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.776558Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.789550Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.796387Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.865054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:49.928092Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.937095Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.952490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.960390Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.972794Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.985969Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.999647Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.014152Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.070116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.084336Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.097825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.104650Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.119533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.133108Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.147409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.161173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.175221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.189084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.203670Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.217136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.231847Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.245109Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.258700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.281158Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T09:21:50.288272Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.300669Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.314787Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.370033Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.377343Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.384251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.391138Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.446851Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.463111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.470394Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.483522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.497281Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::NoTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 5287, MsgBus: 22505 2024-11-21T09:21:48.942111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660351964010648:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.942371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004892/r3tmp/tmpqM9ZC7/pdisk_1.dat 2024-11-21T09:21:49.010228Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5287, node 1 2024-11-21T09:21:49.023800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.023817Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.023819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.023855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22505 2024-11-21T09:21:49.042982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.043011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.044107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.088729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.091668Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:49.102535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:49.121553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.139993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.153560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.270880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660356258979488:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.270908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.305120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.311518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.320047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.334861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.348707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.405127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.417010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660356258980006:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.417033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.417216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660356258980011:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.417744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.424258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660356258980013:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.612735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.656837Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660356258980427:2471] TxId: 281474976715674. Ctx: { TraceId: 01jd70e7j8fbc2ze3v6t477f5b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzEwZDgwOGYtNTExMWQ4Y2YtNWVhNWFmZTAtMWM4ZmY1ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:49.659263Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909704, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 14342, MsgBus: 7562 2024-11-21T09:21:49.828739Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660358547426147:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.828995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004892/r3tmp/tmpFQqDCN/pdisk_1.dat 2024-11-21T09:21:49.843605Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14342, node 2 2024-11-21T09:21:49.850608Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.850630Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.850633Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.850682Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7562 TClient is connected to server localhost:7562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.929331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.929367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.930371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.931669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.934033Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:49.935907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.946116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.964098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.973383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.168565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660362842395000:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.168604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.174607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.181650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.188040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.195283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.202721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.217064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.232764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660362842395492:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.232794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660362842395497:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.232797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.233425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.236365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660362842395499:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:50.397940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.443086Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910488, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 27743, MsgBus: 32577 2024-11-21T09:21:50.736633Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660361122967987:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.736648Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004892/r3tmp/tmpLn8lgq/pdisk_1.dat 2024-11-21T09:21:50.745655Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27743, node 3 2024-11-21T09:21:50.754405Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.754416Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.754417Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.754451Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32577 TClient is connected to server localhost:32577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.838605Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.838648Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.838990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.839580Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:50.850332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.859437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.878908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.935641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.053349Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660365417936857:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.053383Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.058574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.064256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.069826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.077122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.083944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.091819Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.106939Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660365417937352:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.106960Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660365417937357:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.106960Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.107651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.111280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660365417937359:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:51.319795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.372580Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911412, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::CrossJoinCount [GOOD] Test command err: Trying to start YDB, gRPC: 11383, MsgBus: 61512 2024-11-21T09:21:48.136816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660353374955577:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:48.136852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00489c/r3tmp/tmpYJtLD8/pdisk_1.dat 2024-11-21T09:21:48.220578Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11383, node 1 2024-11-21T09:21:48.239082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.239106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.242849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:48.279450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.279458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.279459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.279482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61512 TClient is connected to server localhost:61512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:48.380414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.385491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.463845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.481572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.496335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.519769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353374956911:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.519790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.639097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.647990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.654795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.669544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.679475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353374957425:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.679568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660353374957430:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.681028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660353374957432:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:48.984339Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660353374957769:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70e6v15kbqnxtem7n1c8j3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmMwZDE3MDQtZjBiZGJjNTEtNDRjYWJhYjAtYTZkYTQ2MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:48.998516Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909032, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 7941, MsgBus: 13973 2024-11-21T09:21:49.227257Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660355231552218:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:49.227274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00489c/r3tmp/tmpiM9vuq/pdisk_1.dat 2024-11-21T09:21:49.238752Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7941, node 2 2024-11-21T09:21:49.249208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.249222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.249224Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.249257Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13973 TClient is connected to server localhost:13973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.328030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.328056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.329080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:49.330000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.334000Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:49.335157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.344898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.361277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.371220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:49.546171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355231553777:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.546194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.551230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.557689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.565591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.572102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.579141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.586829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.598688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355231554277:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.598727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.598780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660355231554282:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:49.599415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:49.606642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660355231554284:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:49.799544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.023687Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180909942, txId: 281474976715673] shutting down 2024-11-21T09:21:50.399742Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910124, txId: 281474976715676] shutting down Trying to start YDB, gRPC: 25426, MsgBus: 21702 2024-11-21T09:21:50.538673Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660359655307242:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.538695Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00489c/r3tmp/tmpjbcKOk/pdisk_1.dat 2024-11-21T09:21:50.550201Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25426, node 3 2024-11-21T09:21:50.558357Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.558373Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.558375Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.558419Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21702 TClient is connected to server localhost:21702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.638828Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.638854Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.639966Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:50.641209Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.645566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.658425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.676715Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.687081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.832793Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660359655308801:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.832826Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.838520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.845088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.853317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.860128Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.867468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.881981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.897219Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660359655309303:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.897257Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.897263Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660359655309308:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.897948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.901472Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660359655309310:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:51.109378Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.407013Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911230, txId: 281474976715673] shutting down >> KqpScan::Join3 >> KqpSplit::StreamLookupSplitAfterFirstResult >> KqpScan::SecondaryIndexCustomColumnOrder [GOOD] >> KqpScan::SelectExistsUnexpected >> KqpScan::IsNull >> KqpScan::LeftSemiJoinSimple >> KqpScan::CrossJoin >> KqpScan::SqlInParameter [GOOD] >> KqpScan::SqlInLiteral >> KqpScan::SelfJoin3xSameLabels [GOOD] >> KqpScan::SelfJoin3x >> KqpFlowControl::FlowControl_Unlimited [GOOD] >> KqpFlowControl::FlowControl_BigLimit >> KqpScan::DropRedundantSortByPk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61-StreamLookupJoin+ColumnStore [GOOD] >> KqpScan::DqSourceLiteralRange Test command err: Trying to start YDB, gRPC: 23175, MsgBus: 29050 2024-11-21T09:21:34.261193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660293379230962:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:34.261239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004378/r3tmp/tmpZJrq3f/pdisk_1.dat 2024-11-21T09:21:34.306815Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23175, node 1 2024-11-21T09:21:34.320457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:34.320472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:34.320473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:34.320513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29050 TClient is connected to server localhost:29050 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:34.360094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:34.360124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:21:34.361258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:34.387634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:34.396284Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:34.404501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:34.464313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:34.523563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:34.536171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:34.577025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660293379232376:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.577049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.610002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.618471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.627018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.634149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.640909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.647861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.659138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660293379232869:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.659164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.659198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660293379232874:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:34.659922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:34.661528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660293379232876:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:34.909803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.914905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.969572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.977247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.031813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.051960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.058073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.068085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.075476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.082042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.089208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.095944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.103059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.160731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T09:21:35.170956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.181110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.186870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.194270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.200964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.207730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T09:21:35.215224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... 72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:49.635386Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:49.635397Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:49.635412Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:49.635421Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:49.635432Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:49.635441Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:49.635456Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:49.635465Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:49.635474Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:49.635482Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:49.635534Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:49.635544Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:49.635552Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:49.635556Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:49.635570Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:49.635579Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:49.635587Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:49.635596Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:49.635603Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:49.635612Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:49.635617Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:49.635622Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:49.635692Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:49.635702Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:49.635716Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:49.635725Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:49.635735Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:49.635744Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:49.635758Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:49.635766Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:49.635776Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:49.635784Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038700;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:49.636581Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:49.636593Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:49.636603Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:49.636608Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:49.636623Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:49.636633Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:49.636642Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:49.636651Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:49.636660Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:49.636670Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:49.636676Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:49.636680Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:49.636712Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:49.636723Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:49.636741Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:49.636749Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:49.636761Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:49.636770Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:49.636786Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:49.636795Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:49.636806Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:49.636814Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038699;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpScan::PrunePartitionsByExpr [GOOD] >> KqpSplit::AfterResolve+Unspecified [GOOD] >> KqpSplit::AfterResult+Ascending >> KqpScan::PureExpr >> KqpSplit::ChoosePartition+Descending >> KqpSplit::AfterResolve+Descending [GOOD] >> OlapEstimationRowsCorrectness::TPCDS87 [GOOD] >> OlapEstimationRowsCorrectness::TPCDS96 >> KqpScan::AggregateEmptyCountStar [GOOD] >> KqpScan::AggregateEmptySum >> KqpScan::RightSemiJoinSimple >> KqpSplit::IntersectionLosesRange+Unspecified [GOOD] >> KqpSplit::StreamLookupDeliveryProblem >> KqpScan::DecimalColumn [GOOD] >> KqpScan::DqSourceFullScan >> SystemView::QueryStatsAllTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::PrunePartitionsByExpr [GOOD] Test command err: Trying to start YDB, gRPC: 22761, MsgBus: 28935 2024-11-21T09:21:50.683312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660361689960493:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.683631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004889/r3tmp/tmpSj9AA0/pdisk_1.dat 2024-11-21T09:21:50.746668Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22761, node 1 2024-11-21T09:21:50.754908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.754919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.754921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.754956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28935 2024-11-21T09:21:50.783823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.783867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.785030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.800823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.810463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.829129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.845233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.857236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.015878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660365984929339:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.015903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.043616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.050220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.063640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.070787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.077367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.084189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.092943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660365984929844:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.092960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.092986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660365984929849:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.093669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.097587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660365984929851:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:51.310739Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660365984930176:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd70e95zewa3sb6gphcv1176, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjMzMmZjOTktNmJmYWQ1ZDAtZjQyZDczMDAtZWNkY2Y2NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:51.313223Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911356, txId: 281474976710671] shutting down 2024-11-21T09:21:51.330285Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660365984930231:2465] TxId: 281474976710674. Ctx: { TraceId: 01jd70e96jepn7b0bg09eyv8r7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFmNjU3NzktOTBiNzMyODUtNjM3ODFmMDAtMzhkNGRkZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:51.331727Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911377, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 2280, MsgBus: 17645 2024-11-21T09:21:51.555309Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660365264078093:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004889/r3tmp/tmpHldBG6/pdisk_1.dat 2024-11-21T09:21:51.569564Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 2280, node 2 2024-11-21T09:21:51.573610Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.573623Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.573625Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.573664Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:51.574136Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17645 TClient is connected to server localhost:17645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.656829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.656859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.657186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.657845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:51.659692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.673974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.690010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.701475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.889297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660365264079484:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.889327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.893748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.900414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.910792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.924757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.931456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.945654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.955771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660365264079987:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.955795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.955797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660365264079992:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.956445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.966134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660365264079994:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.176546Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912217, txId: 281474976715671] shutting down >> KqpScan::AggregateNoColumnNoRemaps >> KqpSplit::AfterResult+Descending [GOOD] >> KqpSplit::AfterResult+Unspecified >> KqpSplit::UndeliveryOnFinishedRead >> KqpScan::StreamLookup [GOOD] >> KqpScan::StreamLookupByFullPk >> KqpScan::IsNull [GOOD] >> KqpScan::GrepRange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResolve+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 62475, MsgBus: 16009 2024-11-21T09:21:50.817557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660363368683749:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.817885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004886/r3tmp/tmpkq9PAa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62475, node 1 2024-11-21T09:21:50.886996Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:50.889800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.889811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.889813Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.889856Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16009 2024-11-21T09:21:50.917793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.917824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.918904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.936250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.946024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.008487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.026877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.036179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.126988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367663652578:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.127018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.153880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.161649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.168440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.175350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.230134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.238746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.254564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367663653093:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.254595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.254656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367663653098:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.255309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.258748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660367663653100:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:51.457109Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660367663653413:2453] TxId: 281474976710672. Ctx: { TraceId: 01jd70e9aj45azggq5jv5w0ahy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk5MjFkNDYtMzE3NzI0NDEtNGRhZWRhMTUtZWY1YWFmNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:51.457195Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd70e9aj45azggq5jv5w0ahy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk5MjFkNDYtMzE3NzI0NDEtNGRhZWRhMTUtZWY1YWFmNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:51.465854Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911503, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 6064, MsgBus: 1996 2024-11-21T09:21:51.712018Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660364592436370:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.712047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004886/r3tmp/tmpnknKWC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6064, node 2 2024-11-21T09:21:51.729382Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:51.731555Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.731580Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.731582Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.731621Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1996 TClient is connected to server localhost:1996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.812288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.812324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.813420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:51.815068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.826783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.834827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.853874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.865320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.004684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660368887405215:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.004707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.010575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.017486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.029554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.035997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.046721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.057205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.066102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660368887405726:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.066130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660368887405731:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.066132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.066670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.070227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660368887405733:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.285551Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70ea4e6xfcyxmecxf9mwca, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZmNzViYWEtMTM2ZDFhZi0yNDkwMjEwNy1kYTQyOTI3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:52.295416Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912329, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::QueryStatsAllTables [GOOD] Test command err: 2024-11-21T09:21:36.273028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660299998528521:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.273126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00355f/r3tmp/tmpBMt705/pdisk_1.dat 2024-11-21T09:21:36.383982Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:36.389259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.389280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.391268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7515, node 1 2024-11-21T09:21:36.421079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:36.421088Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:36.421089Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:36.421117Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:36.505869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.513736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.585304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.616549Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2024-11-21T09:21:36.616605Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2024-11-21T09:21:36.621849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660299998529450:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.621876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.621940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660299998529462:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.622793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T09:21:36.629048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660299998529464:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:21:36.715665Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000014867FAF98E8 2024-11-21T09:21:36.715697Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7439660299998529432:2306], queryUid: , queryText: "\n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n ", keepInCache: 0, split: 0{ TraceId: 01jd70dtvc3a4y2n7wtj1n7ypv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA3MWQ5NjItODVhOWI2MjQtZDNmNTE2Y2MtODJiZDllMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2024-11-21T09:21:36.715748Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2024-11-21T09:21:36.715763Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7439660299998529432:2306], queueSize: 1 2024-11-21T09:21:36.715883Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7439660299998529432:2306], compileActor: [1:7439660299998529551:2316] 2024-11-21T09:21:36.762340Z node 1 :KQP_YQL INFO: TraceId: 01jd70dtvc3a4y2n7wtj1n7ypv, SessionId: CompileActor 2024-11-21 09:21:36.761 INFO ydb-core-sys_view-ut_kqp(pid=1960806, tid=0x00007F1030E72640) [KQP] kqp_host.cpp:1338: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! $17 $2)) ) 2024-11-21T09:21:36.762562Z node 1 :KQP_YQL TRACE: TraceId: 01jd70dtvc3a4y2n7wtj1n7ypv, SessionId: CompileActor 2024-11-21 09:21:36.762 TRACE ydb-core-sys_view-ut_kqp(pid=1960806, tid=0x00007F1030E72640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! (Commit! $17 $2) (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2024-11-21T09:21:36.762627Z node 1 :KQP_YQL DEBUG: TraceId: 01jd70dtvc3a4y2n7wtj1n7ypv, SessionId: CompileActor 2024-11-21 09:21:36.762 DEBUG ydb-core-sys_view-ut_kqp(pid=1960806, tid=0x00007F1030E72640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 45us 2024-11-21T09:21:36.764674Z node 1 :KQP_YQL INFO: TraceId: 01jd70dtvc3a4y2n7wtj1n7ypv, SessionId: CompileActor 2024-11-21 09:21:36.764 INFO ydb-core-sys_view-ut_kqp(pid=1960806, tid=0x00007F1030E72640) [RESULT] yql_result_provider.cpp:1416: RewriteIO 2024-11-21T09:21:36.765892Z node 1 :KQP_YQL DEBUG: TraceId: 01jd70dtvc3a4y2n7wtj1n7ypv, SessionId: CompileActor 2024-11-21 09:21:36.765 DEBUG ydb-core-sys_view-ut_kqp(pid=1960806, tid=0x00007F1030E72640) [perf] type_ann_expr.cpp:44: Execution of [TypeAnnotationTransformer::DoTransform] took 1.14ms 2024-11-21T09:21:36.766047Z node 1 :KQP_YQL DEBUG: TraceId: 01jd70dtvc3a4y2n7wtj1n7ypv, SessionId: CompileActor 2024-11-21 09:21:36.766 DEBUG ydb-core-sys_view-ut_kqp(pid=1960806, tid=0x00007F1030E72640) [perf] yql_expr_constraint.cpp:3134: Execution of [ConstraintTransformer::DoTransform] took 98us 2024-11-21T09:21:36.766226Z node 1 :KQP_YQL TRACE: TraceId: 01jd70dtvc3a4y2n7wtj1n7ypv, SessionId: CompileActor 2024-11-21 09:21:36.766 TRACE ydb-core-sys_view-ut_kqp(pid=1960806, tid=0x00007F1030E72640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (DataSource '"kikimr" '"db")) (let $2 (Key '('table (String '"Root/.sys/pg_tables")))) (let $3 (DataSink 'result)) (let $4 (PgType 'bool)) (let $5 (PgType 'name)) (let $6 (ListType (StructType '('"hasindexes" $4) '('"hasrules" $4) '('"hastriggers" $4) '('"rowsecurity" $4) '('"schemaname" $5) '('"tablename" $5) '('"tableowner" $5) '('"tablespace" $5)))) (let ... th status: LookupError; 2024-11-21T09:21:49.911487Z node 46 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22014, node 46 2024-11-21T09:21:49.929392Z node 46 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:49.929409Z node 46 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:49.929411Z node 46 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:49.929466Z node 46 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:49.998271Z node 46 :HIVE WARN: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:49.998316Z node 46 :HIVE WARN: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:49.999504Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:49.999513Z node 46 :HIVE WARN: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:21:50.005395Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.235018Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7439660361644461922:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.235038Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7439660361644461933:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.235045Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.235832Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.241389Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [46:7439660361644461936:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:21:50.330927Z node 46 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70e84ta5st95zcmg3t69b6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=ZGQ0NzZkODItY2I3NmUxYmQtOWEzNTAyYi02YWYzY2E5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:50.343189Z node 46 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70e87we83w7y62merppgff, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=NDY4MDg4OWItODY0M2M2YmMtYWY2NzBiY2YtZjI2NmZiMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:50.343764Z node 46 :SYSTEM_VIEWS INFO: Scan started, actor: [46:7439660361644462081:2323], owner: [46:7439660361644462077:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_minute] 2024-11-21T09:21:50.343982Z node 46 :SYSTEM_VIEWS INFO: Scan prepared, actor: [46:7439660361644462081:2323], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:50.344075Z node 46 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [46:7439660361644462081:2323], row count: 1, finished: 1 2024-11-21T09:21:50.344086Z node 46 :SYSTEM_VIEWS INFO: Scan finished, actor: [46:7439660361644462081:2323], owner: [46:7439660361644462077:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_minute] 2024-11-21T09:21:50.344933Z node 46 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180910342, txId: 281474976715662] shutting down 2024-11-21T09:21:51.376679Z node 51 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[51:7439660367038129363:2135];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00355f/r3tmp/tmpU1ntWJ/pdisk_1.dat 2024-11-21T09:21:51.380517Z node 51 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:51.392554Z node 51 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6143, node 51 2024-11-21T09:21:51.415617Z node 51 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.415645Z node 51 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.415647Z node 51 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.415685Z node 51 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.476435Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.476473Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.478068Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:51.480422Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.485628Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.733132Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [51:7439660367038130181:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.733133Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [51:7439660367038130189:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.733157Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.733912Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.738279Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [51:7439660367038130195:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:21:51.853795Z node 51 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70e9kmeatzck584tm5v9ts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=51&id=M2JlMTE4YTUtN2Q4NzViOWMtNDNlNzJjMC0xZDJiNWM5NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:51.866877Z node 51 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70e9qg8d7hrdhmhw68ps6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=51&id=MTQ4OGM0NS0xOWFlMGQzMi00MTQxNjMwMS1hOTIyY2RjZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:51.867407Z node 51 :SYSTEM_VIEWS INFO: Scan started, actor: [51:7439660367038130340:2323], owner: [51:7439660367038130336:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2024-11-21T09:21:51.867604Z node 51 :SYSTEM_VIEWS INFO: Scan prepared, actor: [51:7439660367038130340:2323], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:51.867770Z node 51 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [51:7439660367038130340:2323], row count: 1, finished: 1 2024-11-21T09:21:51.867782Z node 51 :SYSTEM_VIEWS INFO: Scan finished, actor: [51:7439660367038130340:2323], owner: [51:7439660367038130336:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2024-11-21T09:21:51.868346Z node 51 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911866, txId: 281474976715662] shutting down >> KqpScan::Effects >> KqpScan::StreamExecuteScanQueryCancelation >> KqpSplit::ChoosePartition+Ascending [GOOD] >> KqpSplit::BorderKeys+Unspecified >> KqpScan::JoinSimple >> KqpScan::LeftSemiJoinSimple [GOOD] >> KqpScan::JoinWithParams >> KqpScan::SelectExistsUnexpected [GOOD] >> KqpScan::SqlInLiteral [GOOD] >> KqpScan::SelfJoin3x [GOOD] >> KqpFlowControl::FlowControl_BigLimit [GOOD] >> KqpFlowControl::FlowControl_SmallLimit >> KqpScan::Join3 [GOOD] >> KqpScan::Join3TablesNoRemap >> KqpScan::DqSourceLiteralRange [GOOD] >> KqpSplit::StreamLookupSplitAfterFirstResult [GOOD] >> KqpSplit::StreamLookupRetryAttemptForFinishedRead >> KqpScan::PureExpr [GOOD] >> KqpScan::RestrictSqlV0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SqlInLiteral [GOOD] Test command err: Trying to start YDB, gRPC: 27204, MsgBus: 8354 2024-11-21T09:21:51.482292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660364780457168:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.482351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487f/r3tmp/tmpwlAaX2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27204, node 1 2024-11-21T09:21:51.560704Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:51.562205Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.562220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.562222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.562251Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8354 2024-11-21T09:21:51.582437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.582467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.583595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.623938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.626019Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:51.630113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:51.695380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.715584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:51.728951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.805828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660364780458569:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.805862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.837874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.843772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.854502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.861179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.868247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.874855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.884870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660364780459070:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.884906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.884949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660364780459075:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.885634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.888454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660364780459077:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.133789Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660369075426712:2464] TxId: 281474976715673. Ctx: { TraceId: 01jd70e9z58s587v18v8qpd47t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIwZDMwZTgtM2M3ZDJiODgtNDdhZjYxYjMtMTY1ZjVhYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:52.138487Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912175, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 17202, MsgBus: 1586 2024-11-21T09:21:52.360514Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660368634956462:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.360531Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487f/r3tmp/tmpOrtSms/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17202, node 2 2024-11-21T09:21:52.376744Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:52.378011Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.378036Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.378038Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.378069Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1586 TClient is connected to server localhost:1586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.460531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.460556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.461773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.462897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.474907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.492623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.509620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.520121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.660838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660368634957998:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.660864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.666137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.672520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.680555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.686899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.694312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.701625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.717842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660368634958509:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.717871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.717945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660368634958514:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.718635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.721440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660368634958516:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.974662Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913015, txId: 281474976715672] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SelectExistsUnexpected [GOOD] Test command err: Trying to start YDB, gRPC: 9158, MsgBus: 3162 2024-11-21T09:21:50.832032Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660362698769593:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.832297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004882/r3tmp/tmptAq6Hl/pdisk_1.dat 2024-11-21T09:21:50.884464Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9158, node 1 2024-11-21T09:21:50.894677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.894686Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.894687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.894715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3162 TClient is connected to server localhost:3162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:50.933288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.933316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:50.934428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.963040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.968728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.031550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.050911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.061420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.181630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660366993738457:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.181666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.208602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.214549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.224089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.231146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.286346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.295040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.310009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660366993738975:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.310031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660366993738980:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.310036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.310754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.314369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660366993738982:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:51.523159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.531952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.539980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.728603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.828014Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911874, txId: 281474976715681] shutting down 2024-11-21T09:21:51.853379Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911895, txId: 281474976715683] shutting down 2024-11-21T09:21:51.904446Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911944, txId: 281474976715685] shutting down 2024-11-21T09:21:51.962729Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912007, txId: 281474976715687] shutting down Trying to start YDB, gRPC: 5186, MsgBus: 2050 2024-11-21T09:21:52.141320Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660370931080386:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.141337Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004882/r3tmp/tmpVSNh9K/pdisk_1.dat 2024-11-21T09:21:52.152292Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5186, node 2 2024-11-21T09:21:52.162681Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.162695Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.162697Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.162746Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2050 TClient is connected to server localhost:2050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.241450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.241482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.242583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.244124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.250687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.261161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.281168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.290778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.466392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370931081932:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.466445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.472329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.479507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.492137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.505576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.512672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.526790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.542332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370931082433:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.542372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.542379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370931082438:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.543070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.546483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660370931082440:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.737085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.851713Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912833, txId: 281474976715673] shutting down 2024-11-21T09:21:52.935478Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912917, txId: 281474976715676] shutting down >> KqpScan::AggregateEmptySum [GOOD] >> KqpScan::RightSemiJoinSimple [GOOD] >> KqpScan::SecondaryIndex >> KqpSplit::StreamLookupDeliveryProblem [GOOD] >> KqpScan::DqSourceFullScan [GOOD] >> KqpScan::DqSource ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SelfJoin3x [GOOD] Test command err: Trying to start YDB, gRPC: 4746, MsgBus: 20483 2024-11-21T09:21:51.509834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660367452715314:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.509908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004879/r3tmp/tmpDbkOc2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4746, node 1 2024-11-21T09:21:51.582314Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:51.589053Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.589063Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.589065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.589092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20483 2024-11-21T09:21:51.610174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.610205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.611361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:51.646353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.649506Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:51.660508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.721943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.742594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.751918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.819006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367452716708:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.819053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.854441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.862925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.875129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.882920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.937194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.945313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.954874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367452717224:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.954907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.954915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367452717229:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.955634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.958395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660367452717231:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:52.128245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.235973Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660371747685058:2479] TxId: 281474976710674. Ctx: { TraceId: 01jd70ea0yb293kn0h9srwrc4y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDEzMDE0NTktODAyMDdjZDYtYzRlYmZhZGQtZTM4ZmNhZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:52.263483Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912280, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 27938, MsgBus: 7703 2024-11-21T09:21:52.376839Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660371743867821:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.377105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004879/r3tmp/tmpywM7We/pdisk_1.dat 2024-11-21T09:21:52.384106Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27938, node 2 2024-11-21T09:21:52.393681Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.393698Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.393700Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.393741Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7703 TClient is connected to server localhost:7703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.476975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.477009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.478213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.479272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.481047Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:52.489087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.501525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.518186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.533370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.690842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660371743869385:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.690875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.695174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.701880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.715879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.722680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.736889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.750508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.759012Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660371743869887:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.759037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.759046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660371743869894:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.759655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.763809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660371743869896:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.952755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.078817Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913106, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSourceLiteralRange [GOOD] Test command err: Trying to start YDB, gRPC: 2483, MsgBus: 24674 2024-11-21T09:21:48.198056Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660352119887384:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b8/r3tmp/tmpnmCwOp/pdisk_1.dat 2024-11-21T09:21:48.242523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:48.261823Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2483, node 1 2024-11-21T09:21:48.276272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:48.276281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:48.276282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:48.276318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24674 2024-11-21T09:21:48.337453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:48.337488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:48.338568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:48.371961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.380178Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:48.389331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.461864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:48.481001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:48.491191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.555518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352119888737:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.555540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.632693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.638872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.649001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.655112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.662688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.670758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:48.684646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352119889243:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.684673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660352119889248:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.684673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:48.685205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:48.689916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660352119889250:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } [[[2];[200u];["Value3"]];[[3];[300u];["Value4"]]] 2024-11-21T09:21:51.015311Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911055, txId: 281474976715771] shutting down Trying to start YDB, gRPC: 26279, MsgBus: 18608 2024-11-21T09:21:51.366890Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660366268005294:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b8/r3tmp/tmpOJWcEJ/pdisk_1.dat 2024-11-21T09:21:51.374975Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:51.377730Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26279, node 2 2024-11-21T09:21:51.388338Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.388352Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.388354Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.388392Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18608 TClient is connected to server localhost:18608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:51.469427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.469458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.469775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.470432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:51.472227Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:51.477704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:51.489102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.510216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.524644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.727825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660366268006716:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.727855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.734361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.743597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.749424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.756584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.763523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.770438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.787005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660366268007207:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.787038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660366268007212:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.787040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.787705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.790087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660366268007214:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 15894, MsgBus: 29974 2024-11-21T09:21:52.480619Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660368625266610:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.480848Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0048b8/r3tmp/tmpZ2YNjI/pdisk_1.dat 2024-11-21T09:21:52.495104Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15894, node 3 2024-11-21T09:21:52.505829Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.505845Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.505848Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.505893Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29974 TClient is connected to server localhost:29974 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.580959Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.580989Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.582069Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.589501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.596274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.604431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.622722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.633071Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.804091Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660368625268153:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.804143Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.807565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.813402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.827395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.833845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.841092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.848623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.864454Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660368625268654:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.864482Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.864483Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660368625268659:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.865179Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.868684Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660368625268661:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.049928Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.102153Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913148, txId: 281474976715673] shutting down 2024-11-21T09:21:53.126349Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913169, txId: 281474976715675] shutting down >> KqpScan::GrepRange [GOOD] >> KqpScan::GrepNonKeyColumns >> KqpSplit::AfterResult+Ascending [GOOD] >> KqpSplit::ChoosePartition+Descending [GOOD] >> KqpSplit::ChoosePartition+Unspecified >> KqpScan::JoinWithParams [GOOD] >> KqpScan::LMapFunction >> KqpScan::StreamLookupByFullPk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupDeliveryProblem [GOOD] Test command err: Trying to start YDB, gRPC: 17319, MsgBus: 19585 2024-11-21T09:21:51.875894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660364588571332:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.876054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004852/r3tmp/tmpHwQLvM/pdisk_1.dat 2024-11-21T09:21:51.933333Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17319, node 1 2024-11-21T09:21:51.945861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.945877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.945879Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.945933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19585 2024-11-21T09:21:51.977436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.977464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.978561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.009653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.014439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.029630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.046943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.057944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.181313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660368883540178:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.181347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.210778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.218418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.228122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.239437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.246595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.260246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.268850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660368883540691:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.268856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660368883540696:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.268886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.269478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.273212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660368883540698:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.456175Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660368883541013:2453] TxId: 281474976715672. Ctx: { TraceId: 01jd70ea9157yjsjrcmctwb290, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgwZDlhYzgtYzk2MzY3MGEtOWNmN2I5ZmUtMjRlZjk4ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:52.456261Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70ea9157yjsjrcmctwb290, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgwZDlhYzgtYzk2MzY3MGEtOWNmN2I5ZmUtMjRlZjk4ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:52.466752Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912504, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 25122, MsgBus: 17743 2024-11-21T09:21:52.749494Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660371992317912:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.749515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004852/r3tmp/tmpT5k15g/pdisk_1.dat 2024-11-21T09:21:52.763727Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25122, node 2 2024-11-21T09:21:52.773009Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.773021Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.773023Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.773080Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17743 TClient is connected to server localhost:17743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.851432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.851474Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.851794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.852482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.857553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.866069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.883406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.940093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.057387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660376287286765:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.057433Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.061560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.067781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.079815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.085895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.093360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.100317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.108144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660376287287259:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.108175Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.108183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660376287287264:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.108745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.113278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660376287287266:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.283936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.337329Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70eb4a8pqd4h1whbvehtzq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTk4YWJiNjktOWY1Y2ExMWQtZTI2MmQyZmEtNmExMTkyNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:53.338379Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd70eb4a8pqd4h1whbvehtzq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTk4YWJiNjktOWY1Y2ExMWQtZTI2MmQyZmEtNmExMTkyNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:53.338867Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd70eb4a8pqd4h1whbvehtzq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTk4YWJiNjktOWY1Y2ExMWQtZTI2MmQyZmEtNmExMTkyNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:53.379174Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd70eb5x6n7vnjnn20jgh79h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2NiOTU2MGMtM2FmYTNhYWMtYmMzOTcyOC1iYzQyNGE0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- 2024-11-21T09:21:53.381110Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913421, txId: 281474976715675] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 18861, MsgBus: 8012 2024-11-21T09:21:50.518552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660360672288442:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:50.518770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488c/r3tmp/tmpt8oznX/pdisk_1.dat 2024-11-21T09:21:50.574500Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18861, node 1 2024-11-21T09:21:50.583165Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:50.583177Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:50.583179Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:50.583232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8012 TClient is connected to server localhost:8012 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:50.619658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:50.619682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:21:50.620824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:50.651313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.660841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.677525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:50.696783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.717119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:50.815488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660360672289981:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.815521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.850854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.856826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.867428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.874614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.881523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.895793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:50.904163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660360672290494:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.904196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660360672290499:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.904220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:50.904979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:50.908227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660360672290501:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:51.148280Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660364967258123:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70e8yq2pz7nerjk4ppgnt5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JkY2UxM2YtYWM2MmI3OWItNmI1YzNhNGQtZjUwYjllOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:51.528362Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660364967258211:2454] TxId: 281474976715673. Ctx: { TraceId: 01jd70e8yq2pz7nerjk4ppgnt5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JkY2UxM2YtYWM2MmI3OWItNmI1YzNhNGQtZjUwYjllOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:51.529360Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180911195, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 64889, MsgBus: 11429 2024-11-21T09:21:51.795110Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660366945258151:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.795128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488c/r3tmp/tmpZrNN7B/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64889, node 2 2024-11-21T09:21:51.810581Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:51.810596Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.810599Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.810601Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.810655Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11429 TClient is connected to server localhost:11429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.895583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.895615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.896620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:51.897353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.909528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.918530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.936595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.946359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.126312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660371240226996:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.126343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.130664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.136660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.148384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.155226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.162895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.178231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.192309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660371240227498:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.192339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.192340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660371240227503:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.193041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.196775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660371240227505:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.523784Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912504, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 24097, MsgBus: 17027 2024-11-21T09:21:52.700534Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660371548685875:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.700562Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00488c/r3tmp/tmp6QR2rG/pdisk_1.dat 2024-11-21T09:21:52.709783Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24097, node 3 2024-11-21T09:21:52.719669Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.719682Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.719684Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.719718Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17027 TClient is connected to server localhost:17027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.801267Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.801307Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.802413Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.803567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.809778Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.821555Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.839287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.849453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.989548Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660371548687412:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.989584Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.995278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.002044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.009138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.016288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.024481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.037857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.053424Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660375843655219:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.053456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.053484Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660375843655224:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.054162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.057592Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660375843655226:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.349360Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913316, txId: 281474976715671] shutting down >> KqpScan::AggregateNoColumnNoRemaps [GOOD] >> KqpScan::AggregateWithFunction >> KqpScan::Effects [GOOD] >> KqpScan::FullFrameWindow >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] >> OlapEstimationRowsCorrectness::TPCH21 >> KqpScan::JoinSimple [GOOD] >> KqpScan::Join4 >> KqpScan::Limit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResult+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 63544, MsgBus: 25920 2024-11-21T09:21:51.603896Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660365692472504:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.604248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004862/r3tmp/tmpmk68S9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63544, node 1 2024-11-21T09:21:51.660502Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:51.662511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.662515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.662517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.662551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25920 TClient is connected to server localhost:25920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:51.705709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.705732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.706810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:51.736313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.738232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:51.747982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.763901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.786559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.797912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.959597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660365692474064:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.959619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.991515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.998088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.008187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.015083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.029591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.043693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.060683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660369987441866:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.060716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.060754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660369987441871:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.061575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.070517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660369987441873:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.253715Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660369987442184:2453] TxId: 281474976715672. Ctx: { TraceId: 01jd70ea3h07qtxgkjfcd4ja4a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTlkM2FmOWItZDQ4MGRjYWYtMmE5OTBhNzQtOGU5Y2M0YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:52.253826Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70ea3h07qtxgkjfcd4ja4a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTlkM2FmOWItZDQ4MGRjYWYtMmE5OTBhNzQtOGU5Y2M0YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:52.264582Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912301, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 19853, MsgBus: 62681 2024-11-21T09:21:52.478349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660370217578274:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004862/r3tmp/tmp9kkiIl/pdisk_1.dat 2024-11-21T09:21:52.486606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:52.488361Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19853, node 2 2024-11-21T09:21:52.499005Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.499033Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.499035Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.499081Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62681 TClient is connected to server localhost:62681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.580761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.580795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.581124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.581912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.584898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.597108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.615176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.625607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.803526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370217579685:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.803555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.808518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.815391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.827348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.834193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.840713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.848298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.856884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370217580187:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.856906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.856927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370217580192:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.857489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.861078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660370217580194:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.045291Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70eaw4dcrgbvdsq92as6pw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzY0ZGM5MGEtYjU0MzYyZWItNjMwMzU3OGItZTIzMmU2Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:53.478469Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913092, txId: 281474976715671] shutting down >> KqpScan::RestrictSqlV0 [GOOD] >> KqpSplit::AfterResult+Unspecified [GOOD] >> KqpScan::Join3TablesNoRemap [GOOD] >> KqpScan::Join3Tables >> KqpSplit::BorderKeys+Unspecified [GOOD] >> KqpFlowControl::FlowControl_SmallLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupByFullPk [GOOD] Test command err: Trying to start YDB, gRPC: 15278, MsgBus: 1699 2024-11-21T09:21:51.902411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660366954940286:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.902428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00484d/r3tmp/tmp0AWcPR/pdisk_1.dat 2024-11-21T09:21:51.952925Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15278, node 1 2024-11-21T09:21:51.968454Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.968471Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.968474Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.968519Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1699 TClient is connected to server localhost:1699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:52.003530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.003554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.004659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.009901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.021408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.037667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.055643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.065645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.240787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660371249909138:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.240815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.269546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.276406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.289053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.343627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.351339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.358477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.415712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660371249909659:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.415765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.415769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660371249909664:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.416578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.420310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660371249909666:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:52.619723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.710693Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660371249910246:2487] TxId: 281474976710675. Ctx: { TraceId: 01jd70eah11ve77p7w9cawh6hc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk0OTE0NS03YWYwYzlhZS04ZmFiOWMzOC05MjI1ZGI4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:52.714715Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912756, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 11548, MsgBus: 21694 2024-11-21T09:21:52.989340Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660368153329836:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.989423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00484d/r3tmp/tmpjFUdTD/pdisk_1.dat 2024-11-21T09:21:52.998804Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11548, node 2 2024-11-21T09:21:53.008178Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.008191Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.008195Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.008243Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21694 TClient is connected to server localhost:21694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.089528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.089561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.090661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.091875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.094306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.107809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.124103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.136572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.287876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660372448298665:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.287897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.293596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.300336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.310245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.316724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.323978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.331488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.346726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660372448299175:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.346750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.346763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660372448299180:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.347393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.351574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660372448299182:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.517431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.550724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.597368Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913638, txId: 281474976715675] shutting down >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] >> KqpScan::GrepNonKeyColumns [GOOD] >> KqpSplit::UndeliveryOnFinishedRead [GOOD] >> KqpSplit::StreamLookupSplitBeforeReading >> KqpScan::CrossJoin [GOOD] >> KqpScan::CountDistinct ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResult+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 24948, MsgBus: 61132 2024-11-21T09:21:51.593817Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660367126068321:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.594322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004873/r3tmp/tmpHWSgGG/pdisk_1.dat 2024-11-21T09:21:51.649138Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24948, node 1 2024-11-21T09:21:51.659759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.659775Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.659778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.659812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61132 TClient is connected to server localhost:61132 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:51.693406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.693442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:21:51.694509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.720951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.723499Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:51.736966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.799145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:51.815625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.827330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.895418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367126069712:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.895445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.919356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.925451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.938205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.945167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.952858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.965837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.974895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367126070205:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.974914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.974973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367126070210:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.975690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.979439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660367126070212:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2024-11-21T09:21:52.196411Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70ea1pd87jsw6vc9eph3hc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYyOGQ3ZTEtOWFmYjc2OWQtYjNkMjM3MmUtYzFhMjI5MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:52.704635Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912245, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 21682, MsgBus: 17094 2024-11-21T09:21:52.888570Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660371368868020:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.888858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004873/r3tmp/tmpyj7oA0/pdisk_1.dat 2024-11-21T09:21:52.897291Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21682, node 2 2024-11-21T09:21:52.906567Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.906579Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.906581Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.906627Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17094 TClient is connected to server localhost:17094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.990804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.990827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.991593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.991863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:21:53.000463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.008151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.025815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.037996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.221830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375663836867:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.221868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.227659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.234354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.247486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.254062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.261508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.267995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.285408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375663837369:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.285438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.285525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375663837374:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.286186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.295518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660375663837376:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.475358Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70eb9qc90sb5rkzvvvcsy8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTVjZWNiZjAtOGUyOWJiYmUtZjE3ZmJiZDgtNDg3ODZlZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:53.886442Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913519, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpFlowControl::FlowControl_SmallLimit [GOOD] Test command err: Trying to start YDB, gRPC: 5373, MsgBus: 18021 2024-11-21T09:21:51.510605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660364729724950:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.510668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487c/r3tmp/tmpkf6zAy/pdisk_1.dat 2024-11-21T09:21:51.564422Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5373, node 1 2024-11-21T09:21:51.572588Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.572601Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.572602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.572630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18021 2024-11-21T09:21:51.610249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.610273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.611325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.639329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.643580Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:51.652142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.668339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.688401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.701191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.861118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660364729726358:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.861164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.867257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.873720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.882433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.937788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.945628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.953170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:51.961073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660364729726872:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.961110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.961132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660364729726877:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.961842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:51.965851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660364729726879:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.117837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.199348Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660369024694736:2479] TxId: 281474976715674. Ctx: { TraceId: 01jd70ea0mekt90p7myj0kc4kn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJkNTVjMGUtYzZhNTQyYzQtNmQwZDlmNzItZDMwMzIxYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:52.212029Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912245, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 24264, MsgBus: 15979 2024-11-21T09:21:52.408819Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660370694163942:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.409067Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487c/r3tmp/tmpFTfdhg/pdisk_1.dat 2024-11-21T09:21:52.420748Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24264, node 2 2024-11-21T09:21:52.430930Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.430943Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.430946Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.430993Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15979 TClient is connected to server localhost:15979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.509331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.509360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.510452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.511747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.518394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.527183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.546703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.556783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.721561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370694165482:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.721601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.727654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.734428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.742798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.750149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.757407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.764628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.780327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370694165987:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.780356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660370694165992:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.780360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.780961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.784076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660370694165994:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.982936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.072192Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913106, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 2069, MsgBus: 10166 2024-11-21T09:21:53.326429Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660375032115031:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.326748Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00487c/r3tmp/tmpB1tMUK/pdisk_1.dat 2024-11-21T09:21:53.337040Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2069, node 3 2024-11-21T09:21:53.344974Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.344989Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.344991Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.345033Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10166 TClient is connected to server localhost:10166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.426805Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.426831Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.427977Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.429071Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.429741Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:53.433000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.444079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.462114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.471714Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.611161Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660375032116565:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.611201Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.616798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.623425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.632646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.646542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.653381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.660301Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.668782Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660375032117077:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.668811Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.668851Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660375032117082:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.669520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.673531Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660375032117084:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.838760Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.930828Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913960, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::RestrictSqlV0 [GOOD] Test command err: Trying to start YDB, gRPC: 11392, MsgBus: 10231 2024-11-21T09:21:52.576374Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660369145674829:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.576394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00479c/r3tmp/tmpSfQVg1/pdisk_1.dat 2024-11-21T09:21:52.636471Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11392, node 1 2024-11-21T09:21:52.646342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.646359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.646361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.646404Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10231 2024-11-21T09:21:52.678085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.678107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.679179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.704334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.713367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.772162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.789446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.799928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.884996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660369145676387:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.885023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.915733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.921636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.977042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.988156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.994862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.002061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.010516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660373440644199:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.010547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.010549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660373440644204:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.011153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.015215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660373440644206:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:53.194430Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660373440644506:2454] TxId: 281474976710672. Ctx: { TraceId: 01jd70eb126bbbbdytntr1a54a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQ2ZDdiZTItZjJjNDk4NTQtNzQyOGY5OTctMTNiYmNiNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:53.197275Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913193, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 61244, MsgBus: 23857 2024-11-21T09:21:53.478464Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660373463279749:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.478545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00479c/r3tmp/tmpq9BTst/pdisk_1.dat 2024-11-21T09:21:53.487044Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61244, node 2 2024-11-21T09:21:53.495738Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.495749Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.495751Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.495783Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23857 TClient is connected to server localhost:23857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.578685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.578719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.579806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.581488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.592798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.601595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.620131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.629589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.785706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660373463281299:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.785773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.790729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.797797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.807496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.814783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.828563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.835579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.851554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660373463281801:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.851575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.851599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660373463281806:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.852309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.855389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660373463281808:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.039182Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439660377758249405:2458], status: GENERIC_ERROR, issues:
:1:0: Error: V0 syntax is disabled 2024-11-21T09:21:54.039274Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzYyYTU4N2ItNWZhNjZjMTktNDI3M2JmMDgtZjc1NzQ3YmY=, ActorId: [2:7439660377758249398:2454], ActorState: ExecuteState, TraceId: 01jd70ebvm05aw7ysa3y7ne15t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:1:0: Error: V0 syntax is disabled >> KqpScan::DqSource [GOOD] >> KqpScan::LMapFunction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 4132, MsgBus: 5455 2024-11-21T09:21:51.856631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660364767955113:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.856648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00485b/r3tmp/tmpe34Cxw/pdisk_1.dat 2024-11-21T09:21:51.917048Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4132, node 1 2024-11-21T09:21:51.929936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.929964Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.929966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.929997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5455 2024-11-21T09:21:51.958342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.958375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:51.959421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.993170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.006319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.022639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.040703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.050871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.186261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660369062923959:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.186307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.220371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.226322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.232950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.239267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.246185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.253851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.268853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660369062924471:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.268899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.268917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660369062924476:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.269541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.273478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660369062924478:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.484656Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660369062924790:2453] TxId: 281474976715672. Ctx: { TraceId: 01jd70eaancc9c3m9wmra4c42m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM2YWQyYzYtOWJhMGNhNmItZGU5NGYzMGYtOGM2NDEzYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:52.484747Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70eaancc9c3m9wmra4c42m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM2YWQyYzYtOWJhMGNhNmItZGU5NGYzMGYtOGM2NDEzYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:52.927006Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912525, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 2043, MsgBus: 19109 2024-11-21T09:21:53.124286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660374142512665:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.124417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00485b/r3tmp/tmpPKSga2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2043, node 2 2024-11-21T09:21:53.140086Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:53.142096Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.142121Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.142124Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.142181Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19109 TClient is connected to server localhost:19109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.224403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.224435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.227635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.227748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.228856Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:53.230106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.241007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.256234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.269701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.455428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660374142514221:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.455446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.461012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.466793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.478400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.485698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.492392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.498894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.508760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660374142514711:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.508778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660374142514716:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.508781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.509427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.512835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660374142514718:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.685915Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70ebga6m9f27rztmhk1hdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWI0YWQxOWEtNTA1NTUyNzAtNTI3Nzk1ZmMtMTcyZGJjZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715674 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:53.984947Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913729, txId: 281474976715671] shutting down >> KqpScan::SecondaryIndex [GOOD] >> KqpScan::Limit [GOOD] >> KqpScan::LimitOverSecondaryIndexRead >> KqpScan::AggregateWithFunction [GOOD] >> KqpScan::BoolFlag ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] Test command err: Trying to start YDB, gRPC: 19482, MsgBus: 7255 2024-11-21T09:21:52.193609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660370642693325:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.193788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047ac/r3tmp/tmpzPHITP/pdisk_1.dat 2024-11-21T09:21:52.253335Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19482, node 1 2024-11-21T09:21:52.268390Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.268406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.268408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.268447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7255 2024-11-21T09:21:52.294508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.294533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.295543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.327787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.335079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.397743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.417287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.430189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.525728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660370642694872:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.525816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.533863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.540290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.554356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.561208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.616107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.624802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.632610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660370642695387:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.632650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.632661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660370642695392:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.633457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.637357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660370642695394:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.833911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.894579Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70eap913vqvr28npavasy8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI3NmIxYTgtODA4YTAxNmYtNGEyYzQ0MDAtYmJlMTUxNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:52.895877Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd70eap913vqvr28npavasy8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI3NmIxYTgtODA4YTAxNmYtNGEyYzQ0MDAtYmJlMTUxNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:52.896358Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd70eap913vqvr28npavasy8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI3NmIxYTgtODA4YTAxNmYtNGEyYzQ0MDAtYmJlMTUxNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:52.946137Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660370642695935:2482] TxId: 281474976715676. Ctx: { TraceId: 01jd70ear26vb9txdfzeqf740y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRmM2U0NzMtZmEyMjkxMDEtZWMzNThhZjAtYjBlYTlkNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:52.946227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd70ear26vb9txdfzeqf740y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRmM2U0NzMtZmEyMjkxMDEtZWMzNThhZjAtYjBlYTlkNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715677 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:53.332417Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912994, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 17316, MsgBus: 26064 2024-11-21T09:21:53.477173Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660375656040924:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.477525Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047ac/r3tmp/tmpRS7SCN/pdisk_1.dat 2024-11-21T09:21:53.486829Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17316, node 2 2024-11-21T09:21:53.495756Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.495769Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.495771Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.495810Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26064 TClient is connected to server localhost:26064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.579133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.579159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.579510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.580141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.583272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.639438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.658979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.669447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.789322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375656042480:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.789358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.795346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.802407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.814781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.828466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.835269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.849852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.865677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375656042984:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.865708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375656042989:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.865721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.866413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.869403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660375656042991:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.029955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.085102Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70ebvm08840kx84w6y01wc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk5MjhhMTAtMzFhMDdjNWMtNWQwOGJiOTYtYWIyNDg4NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:54.086494Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd70ebvm08840kx84w6y01wc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk5MjhhMTAtMzFhMDdjNWMtNWQwOGJiOTYtYWIyNDg4NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:54.087132Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd70ebvm08840kx84w6y01wc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk5MjhhMTAtMzFhMDdjNWMtNWQwOGJiOTYtYWIyNDg4NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:54.125868Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd70ebx93fsf9m00y7w6f48s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTM5NjUyOGUtNDQ2OGVlNDctNmYzNWFmZjgtMWFmYjQ2M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- 2024-11-21T09:21:54.127719Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914170, txId: 281474976715675] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepNonKeyColumns [GOOD] Test command err: Trying to start YDB, gRPC: 21711, MsgBus: 8515 2024-11-21T09:21:52.192538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660368223828705:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.192809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047af/r3tmp/tmpfwipWF/pdisk_1.dat 2024-11-21T09:21:52.250061Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21711, node 1 2024-11-21T09:21:52.258837Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.258856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.258859Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.258903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8515 TClient is connected to server localhost:8515 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:52.294051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.294093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:21:52.295161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.324379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.335395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.351083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.366559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.376803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.502675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660368223830254:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.502706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.532516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.539317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.547158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.554369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.561204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.568085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.576939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660368223830769:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.576960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.576970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660368223830774:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.577616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.581111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660368223830776:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.735030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.779928Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912826, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 27569, MsgBus: 24995 2024-11-21T09:21:53.082652Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660374063180266:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.082953Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047af/r3tmp/tmpggCduZ/pdisk_1.dat 2024-11-21T09:21:53.091201Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27569, node 2 2024-11-21T09:21:53.102061Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.102076Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.102079Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.102121Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24995 TClient is connected to server localhost:24995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.182884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.182926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.184009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.184741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.188241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.199897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.215180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.230226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.396654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660374063181825:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.396676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.402822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.408932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.414711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.422431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.429469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.443647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.451949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660374063182315:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.451976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.451997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660374063182320:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.452792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.456374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660374063182322:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.656815Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913701, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 25846, MsgBus: 29669 2024-11-21T09:21:53.782859Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660372451790337:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.783022Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047af/r3tmp/tmp5PJ4Af/pdisk_1.dat 2024-11-21T09:21:53.797508Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25846, node 3 2024-11-21T09:21:53.803258Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.803273Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.803275Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.803311Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29669 TClient is connected to server localhost:29669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.883298Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.883330Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.884454Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.885657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.889159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.899586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.917624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.927963Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.073761Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660376746759169:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.073797Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.079990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.086538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.094336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.100982Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.108034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.115165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.123847Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660376746759680:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.123870Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.123876Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660376746759685:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.124528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.129107Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660376746759687:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.309838Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914352, txId: 281474976715671] shutting down >> KqpScan::Join4 [GOOD] >> KqpScan::JoinLeftOnly ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::LMapFunction [GOOD] Test command err: Trying to start YDB, gRPC: 24404, MsgBus: 25095 2024-11-21T09:21:52.325020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660367906673777:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.325186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a8/r3tmp/tmpl1CFVS/pdisk_1.dat 2024-11-21T09:21:52.382365Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24404, node 1 2024-11-21T09:21:52.400464Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.400478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.400481Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.400514Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25095 2024-11-21T09:21:52.426211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.426241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.427327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.460120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.470086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.485220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:21:52.509437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.520375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.652330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367906675325:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.652360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.685910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.692339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.701771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.716278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.730003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.743539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.751528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367906675836:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.751549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.751555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367906675841:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.752160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.756298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660367906675843:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:52.963784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.024902Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660372201643704:2479] TxId: 281474976710674. Ctx: { TraceId: 01jd70eatx764a6tw4a78w5rrf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzNDA0YTQtYjU4NjEwNjAtYzNlNTY0YjAtMmJiNWM4YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:53.028988Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913071, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 25563, MsgBus: 3792 2024-11-21T09:21:53.197688Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660375620888365:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.197940Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a8/r3tmp/tmpeTu8RI/pdisk_1.dat 2024-11-21T09:21:53.211944Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25563, node 2 2024-11-21T09:21:53.224957Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.224969Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.224971Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.225009Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3792 TClient is connected to server localhost:3792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.298161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.298189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.299327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.300508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.310276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.318948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.333051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.346915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.486117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375620889903:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.486139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.491899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.498027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.506133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.513242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.519963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.527142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.535927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375620890415:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.535941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375620890420:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.535948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.536548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.540260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660375620890422:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.735903Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913778, txId: 281474976715671] shutting down 2024-11-21T09:21:53.759769Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913806, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 13591, MsgBus: 21666 2024-11-21T09:21:53.890237Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660375103626044:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.890425Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a8/r3tmp/tmpitu8Td/pdisk_1.dat 2024-11-21T09:21:53.904842Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13591, node 3 2024-11-21T09:21:53.910624Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.910642Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.910644Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.910688Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21666 TClient is connected to server localhost:21666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.990468Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.990500Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.991538Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.992329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.185383Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660379398593936:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.185411Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.188279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.244426Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660379398594036:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.244461Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.244489Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660379398594041:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.245068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.247014Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660379398594043:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:21:54.352764Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914394, txId: 281474976715662] shutting down [[[2];[1000];["Dogecoin"]];[[4];[1];["XTC"]];[[5];[2];["Cardano"]];[[6];[3];["Tether"]]] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSource [GOOD] Test command err: Trying to start YDB, gRPC: 2833, MsgBus: 29731 2024-11-21T09:21:51.698240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660367144136930:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:51.698501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00485f/r3tmp/tmpaBLIQa/pdisk_1.dat 2024-11-21T09:21:51.754566Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2833, node 1 2024-11-21T09:21:51.765304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:51.765318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:51.765321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:51.765376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29731 TClient is connected to server localhost:29731 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:51.798646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:51.798677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:21:51.799694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:51.832813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.836455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:51.838993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.904070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.963543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:51.976543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.042601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660371439105787:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.042638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.076829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.084199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.092109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.099208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.106381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.113602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.121230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660371439106280:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.121260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.121271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660371439106285:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.121860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.126556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660371439106287:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.310606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.368475Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660371439106751:2472] TxId: 281474976715674. Ctx: { TraceId: 01jd70ea6gefj60v8w90s85js3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODgyMzk3ZjgtYThiYWRhNzMtYTMyZWMxYjctM2NkYTAyY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:52.438641Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912413, txId: 281474976715673] shutting down 2024-11-21T09:21:52.476081Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660371439106832:2484] TxId: 281474976715676. Ctx: { TraceId: 01jd70ea9q06nh0xe434cny11f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2ODNmMTctMjE5YjEyMjAtNzQzODdkOTYtYmI4ZjM0YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:52.530141Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912518, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 7215, MsgBus: 9307 2024-11-21T09:21:52.789188Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660371338466018:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.789212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00485f/r3tmp/tmpBWGoYO/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7215, node 2 2024-11-21T09:21:52.804764Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:52.806692Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.806704Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.806705Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.806761Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9307 TClient is connected to server localhost:9307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.889666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.889697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.890798Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:52.891971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.902786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.911917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.929498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.945160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.122709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375633434867:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.122739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.127763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.135018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.142593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.149669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.163981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.177932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.193879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375633435369:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.193908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375633435374:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.193909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.194652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.197236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660375633435376:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.346250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 1879, MsgBus: 6505 2024-11-21T09:21:53.693472Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660375527193785:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.693775Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00485f/r3tmp/tmpLXh48A/pdisk_1.dat 2024-11-21T09:21:53.701792Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1879, node 3 2024-11-21T09:21:53.710698Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.710720Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.710722Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.710758Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6505 TClient is connected to server localhost:6505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.794307Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.794340Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.795441Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.796065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.807234Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.816838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.835742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.846706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.989336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660375527195336:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.989386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.994254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.002387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.010058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.017312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.024600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.039647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.054323Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660379822163134:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.054365Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.054389Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660379822163139:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.055060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.058398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660379822163141:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.251442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.312129Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914352, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 18013, MsgBus: 25893 2024-11-21T09:21:52.732138Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660369158858448:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.732155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00478d/r3tmp/tmpTcf4Z2/pdisk_1.dat 2024-11-21T09:21:52.789616Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18013, node 1 2024-11-21T09:21:52.801040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.801058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.801060Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.801099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25893 2024-11-21T09:21:52.833822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.833855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.834917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.845864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.857685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.871288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.886391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.897688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.040393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660373453827318:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.040429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.068029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.073634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.086424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.092899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.099916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.107175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.115593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660373453827820:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.115618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.115624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660373453827825:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.116303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.120515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660373453827827:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:53.301399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.371557Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660373453828392:2479] TxId: 281474976710674. Ctx: { TraceId: 01jd70eb5hfdc5r3r7m58vxsnj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWM1OGI3YTEtYzM0ZjQ1ZGUtZjA2MWUyMDYtYWFmNzc4NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:53.373724Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913414, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 63105, MsgBus: 1041 2024-11-21T09:21:53.598138Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660375082575883:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.598382Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00478d/r3tmp/tmpi9D2QY/pdisk_1.dat 2024-11-21T09:21:53.608398Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63105, node 2 2024-11-21T09:21:53.618101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.618111Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.618114Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.618167Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1041 TClient is connected to server localhost:1041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.698552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.698598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.699694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.700949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.713050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.721236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.740423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.752380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.896634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375082577422:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.896663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.901709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.908575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.919747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.933456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.940182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.947084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.955676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375082577934:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.955699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660375082577939:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.955706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.956351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.960504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660375082577941:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.141508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.149092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.157709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.353656Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914394, txId: 281474976715677] shutting down 2024-11-21T09:21:54.390702Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914436, txId: 281474976715679] shutting down 2024-11-21T09:21:54.409172Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914450, txId: 281474976715681] shutting down 2024-11-21T09:21:54.453519Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914499, txId: 281474976715683] shutting down >> RemoteTopicReader::ReadTopic >> KqpScan::Join3Tables [GOOD] >> KqpSplit::ChoosePartition+Unspecified [GOOD] |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] >> KqpScan::CountDistinct [GOOD] >> KqpScan::Counters |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::ChoosePartition+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 17484, MsgBus: 61459 2024-11-21T09:21:52.591080Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660368949192150:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.591146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004795/r3tmp/tmpdAaiMY/pdisk_1.dat 2024-11-21T09:21:52.639087Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17484, node 1 2024-11-21T09:21:52.649074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.649091Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.649092Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.649121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61459 TClient is connected to server localhost:61459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:52.691685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.691713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.692897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.699214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.709389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.769530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.787423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.796145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.931633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660368949193549:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.931661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.966317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.020918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.030095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.036481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.043953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.051244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.059794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660373244161361:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.059821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.059830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660373244161366:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.060402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.064039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660373244161368:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:53.268900Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660373244161682:2453] TxId: 281474976710672. Ctx: { TraceId: 01jd70eb378a8wprnfans6r40s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRlNDk0NGQtZDg5M2Y1ZGUtNTA2YWRkYWMtMmIzNDBiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:53.268983Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd70eb378a8wprnfans6r40s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRlNDk0NGQtZDg5M2Y1ZGUtNTA2YWRkYWMtMmIzNDBiMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:53.699630Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913316, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 3823, MsgBus: 16623 2024-11-21T09:21:53.874871Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660374787765577:2232];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004795/r3tmp/tmpzGUBRQ/pdisk_1.dat 2024-11-21T09:21:53.880172Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:53.881842Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3823, node 2 2024-11-21T09:21:53.892704Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.892719Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.892722Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.892762Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16623 TClient is connected to server localhost:16623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.974786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.974831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.975877Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.976674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.980079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.990816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.008182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.018544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.199473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379082734229:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.199511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.203083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.209221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.220367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.227366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.234123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.242079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.257064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379082734730:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.257088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.257093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379082734735:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.257740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.261192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660379082734737:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.422778Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70ec7df9s1fq6cw0ywkneh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWJlZGM2NDItNzFjM2NhNjAtNDQxNTcxYzMtMTk4YzQ5Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715673 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2024-11-21T09:21:54.897770Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914471, txId: 281474976715671] shutting down >> KqpScan::FullFrameWindow [GOOD] >> KqpScan::EmptySet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Join3Tables [GOOD] Test command err: Trying to start YDB, gRPC: 14811, MsgBus: 9561 2024-11-21T09:21:52.086898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660370512255555:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.087110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b2/r3tmp/tmp4DRbuI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14811, node 1 2024-11-21T09:21:52.143329Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:52.147480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.147494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.147496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.147531Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9561 2024-11-21T09:21:52.188160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.188189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.189315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.214676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.220728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:52.227738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.244404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.262229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.271934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.396192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660370512257106:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.396243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.429808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.436104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.442105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.449669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.463334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.470557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.488765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660370512257621:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.488794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660370512257626:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.488806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.489537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.497577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660370512257628:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:52.696653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.838429Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660370512258211:2479] TxId: 281474976715674. Ctx: { TraceId: 01jd70eajs2qks63q81a7y2fh6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc0NGI1NTItMTZhMDk0ZmItN2RiZWE0NjQtZTBjYTQ0OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:52.955218Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180912882, txId: 281474976715673] shutting down 2024-11-21T09:21:53.050481Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660374807225700:2508] TxId: 281474976715676. Ctx: { TraceId: 01jd70easwaxczayprsp6v3hbc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQxYmMyZDgtMmVhN2Y4NTQtMzMyNDIxYzItNTRjNGQxODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:53.161021Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913092, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 19621, MsgBus: 7394 2024-11-21T09:21:53.355734Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660372182563025:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.355802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b2/r3tmp/tmpHlYZwl/pdisk_1.dat 2024-11-21T09:21:53.365403Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19621, node 2 2024-11-21T09:21:53.375620Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.375633Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.375636Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.375677Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7394 TClient is connected to server localhost:7394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.455825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.455857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.456959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.458113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.462129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 wa ... 1:53.489333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.499824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.685277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660372182564581:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.685313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.691093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.697116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.709989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.715934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.723019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.730306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.738394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660372182565072:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.738424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.738424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660372182565077:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.738919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.743349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660372182565079:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.925990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.012102Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914051, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 61752, MsgBus: 25745 2024-11-21T09:21:54.276059Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660378389061197:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.276245Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047b2/r3tmp/tmpMrM6nj/pdisk_1.dat 2024-11-21T09:21:54.284571Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61752, node 3 2024-11-21T09:21:54.294867Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.294881Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.294883Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.294926Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25745 TClient is connected to server localhost:25745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.377932Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.377965Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:54.378251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.378932Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:54.381241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.390556Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.407306Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.416872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.545119Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660378389062734:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.545151Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.549383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.556194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.563595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.570062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.577352Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.584522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.600316Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660378389063241:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.600357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660378389063246:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.600360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.601079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.604731Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660378389063248:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.764458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.880533Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914919, txId: 281474976715673] shutting down 2024-11-21T09:21:54.956444Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914996, txId: 281474976715675] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] Test command err: 2024-11-21T09:21:53.350856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:21:53.351246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:21:53.351265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004780/r3tmp/tmpUp4HJL/pdisk_1.dat 2024-11-21T09:21:53.457272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.472939Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:53.514998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.515027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.525457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:53.645100Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.644 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [KQP] kqp_host.cpp:1338: Compiled query: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint64)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/Test"))) (Void) $3)) ) 2024-11-21T09:21:53.645322Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.645 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 23us 2024-11-21T09:21:53.646192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:609:2518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.646231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.647149Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] type_ann_expr.cpp:44: Execution of [TypeAnnotationTransformer::DoTransform] took 579us 2024-11-21T09:21:53.647202Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] yql_expr_constraint.cpp:3134: Execution of [ConstraintTransformer::DoTransform] took 25us 2024-11-21T09:21:53.647213Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] yql_expr_csee.cpp:599: Execution of [UpdateCompletness] took 5us 2024-11-21T09:21:53.647250Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] yql_expr_csee.cpp:612: Execution of [EliminateCommonSubExpressions] took 33us 2024-11-21T09:21:53.647669Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('columnConstrains '())) (let $3 '('"Key" (OptionalType (DataType 'Uint64)) $2 '())) (let $4 '('"Value" (OptionalType (DataType 'String)) $2 '())) (let $5 (KiCreateTable! world $1 '"/Root/Test" '($3 $4) '('"Key") '() '() '() '() '() '() '"table" '"false" '0 '0)) (return (Commit! $5 $1 '('('"mode" '"flush")))) ) 2024-11-21T09:21:53.647679Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:59: Begin, root #75 2024-11-21T09:21:53.647683Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #75, status: Ok 2024-11-21T09:21:53.647753Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:466: Register async execution for node #74 2024-11-21T09:21:53.647762Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.647 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:87: Finish, output #75, status: Async 2024-11-21T09:21:53.648338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.864617Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.864 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:133: Completed async execution for node #74 2024-11-21T09:21:53.864648Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.864 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #74 2024-11-21T09:21:53.864656Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.864 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:59: Begin, root #75 2024-11-21T09:21:53.864661Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.864 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #75, status: Ok 2024-11-21T09:21:53.864671Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.864 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:577: Node #75 finished execution 2024-11-21T09:21:53.864684Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.864 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:594: Node #75 created 0 trackable nodes: 2024-11-21T09:21:53.864688Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.864 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:87: Finish, output #75, status: Ok 2024-11-21T09:21:53.864692Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=YWYyMDRjZDktYmIyOGYyMWYtNGMyNzg5MTYtNmU4ZmRkMTY= 2024-11-21 09:21:53.864 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #75 2024-11-21T09:21:53.865731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.865751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.865798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:717:2593], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.866530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.053152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:719:2595], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:21:54.086749Z node 1 :KQP_YQL INFO: TraceId: 01jd70ebp93qznbh2fra9p1h1g, SessionId: CompileActor 2024-11-21 09:21:54.086 INFO ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [KQP] kqp_host.cpp:1338: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/Test"))) (PersistableRepr '((AsStruct '('"Key" (Uint32 '"201")) '('"Value" (String '"Value1"))) (AsStruct '('"Key" (Uint32 '"202")) '('"Value" (String '"Value2"))) (AsStruct '('"Key" (Uint32 '"203")) '('"Value" (String '"Value3"))) (AsStruct '('"Key" (Uint32 '"803")) '('"Value" (String '"Value3"))))) '('('mode 'replace)))) ) 2024-11-21T09:21:54.086845Z node 1 :KQP_YQL DEBUG: TraceId: 01jd70ebp93qznbh2fra9p1h1g, SessionId: CompileActor 2024-11-21 09:21:54.086 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 21us 2024-11-21T09:21:54.087725Z node 1 :KQP_YQL DEBUG: TraceId: 01jd70ebp93qznbh2fra9p1h1g, SessionId: CompileActor 2024-11-21 09:21:54.087 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] type_ann_expr.cpp:44: Execution of [TypeAnnotationTransformer::DoTransform] took 222us 2024-11-21T09:21:54.087852Z node 1 :KQP_YQL DEBUG: TraceId: 01jd70ebp93qznbh2fra9p1h1g, SessionId: CompileActor 2024-11-21 09:21:54.087 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] yql_expr_constraint.cpp:3134: Execution of [ConstraintTransformer::DoTransform] took 85us 2024-11-21T09:21:54.087868Z node 1 :KQP_YQL DEBUG: TraceId: 01jd70ebp93qznbh2fra9p1h1g, SessionId: CompileActor 2024-11-21 09:21:54.087 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9BC0) [perf] yql_expr_csee.cpp:599: Execution of [UpdateCompletness] took 8us 2024-11-21T09:21:54.087922Z node 1 :KQP_YQL DEBUG: TraceId: 01jd70ebp93qznbh2fra9p1h1g, SessionId: CompileActor 2024-11-21 09:21:54.087 DEBUG ydb-core-kqp-ut-scan(pid=1985385, tid=0x00007F7DC7CB9B ... sk: 1. Tasks execution finished 2024-11-21T09:21:54.289597Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:861:2691], TxId: 281474976715662, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YTM5OTMyZWItZDExNGVmZi1mZDQ4YjlkNS04OTg3NmJiNA==. TraceId : 01jd70ebxzeyasmgbdzyn3c906. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T09:21:54.289623Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 1. pass away 2024-11-21T09:21:54.289650Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:21:54.289776Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:862:2692], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YTM5OTMyZWItZDExNGVmZi1mZDQ4YjlkNS04OTg3NmJiNA==. TraceId : 01jd70ebxzeyasmgbdzyn3c906. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T09:21:54.289782Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:862:2692], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YTM5OTMyZWItZDExNGVmZi1mZDQ4YjlkNS04OTg3NmJiNA==. TraceId : 01jd70ebxzeyasmgbdzyn3c906. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T09:21:54.289787Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T09:21:54.289790Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. Tasks execution finished 2024-11-21T09:21:54.289794Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:862:2692], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YTM5OTMyZWItZDExNGVmZi1mZDQ4YjlkNS04OTg3NmJiNA==. TraceId : 01jd70ebxzeyasmgbdzyn3c906. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T09:21:54.289801Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2024-11-21T09:21:54.289809Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:21:54.290011Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down Trying to start YDB, gRPC: 10539, MsgBus: 15458 2024-11-21T09:21:54.450321Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660379784215085:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.450426Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004780/r3tmp/tmpGmHLXF/pdisk_1.dat 2024-11-21T09:21:54.460174Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10539, node 2 2024-11-21T09:21:54.474197Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.474208Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.474210Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.474243Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15458 TClient is connected to server localhost:15458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.552645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.552693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:54.553015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.553691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:54.560980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.568974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.587806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.599507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.721260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379784216628:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.721289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.725339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.732444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.745588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.759458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.765983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.773015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.781666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379784217133:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.781690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.781710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379784217138:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.782252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.786407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660379784217140:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.946637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.017582Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70ecrc86tp9h7ffbsd5975, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGJjMzRlODktM2VkNTJmN2UtYTAxZGI0NTgtYzYyMDg2NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:55.019434Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd70ecrc86tp9h7ffbsd5975, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGJjMzRlODktM2VkNTJmN2UtYTAxZGI0NTgtYzYyMDg2NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:55.020042Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd70ecrc86tp9h7ffbsd5975, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGJjMzRlODktM2VkNTJmN2UtYTAxZGI0NTgtYzYyMDg2NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:55.070183Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd70ecteddyhcbdgac64pagw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzJhZTdkZjgtZTVjZGI0YjEtOThmZjYzZDctMTRiNzlmYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715677 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2024-11-21T09:21:55.081251Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180915115, txId: 281474976715675] shutting down >> TPersQueueTest::TestReadPartitionByGroupId [GOOD] >> TPersQueueTest::TestReadPartitionStatus >> KqpScan::LimitOverSecondaryIndexRead [GOOD] >> KqpScan::Like >> KqpScan::BoolFlag [GOOD] >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-false >> KqpScan::JoinLeftOnly [GOOD] >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTestExtSubdomainReboots::Fake [GOOD] >> KqpScan::StreamExecuteScanQueryCancelation [GOOD] >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::BoolFlag [GOOD] Test command err: Trying to start YDB, gRPC: 15253, MsgBus: 2018 2024-11-21T09:21:52.877010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660368312263181:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.877206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004781/r3tmp/tmppu7NdT/pdisk_1.dat 2024-11-21T09:21:52.934361Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15253, node 1 2024-11-21T09:21:52.949008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.949017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.949019Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.949067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2018 2024-11-21T09:21:52.978497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.978525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.979590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.011586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.015623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.030342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.049003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.058792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.166564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660372607232019:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.166591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.196231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.202754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.212577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.219707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.233280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.240286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.296175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660372607232534:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.296197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.296257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660372607232539:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.296894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.302286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660372607232541:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.521345Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660372607232867:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70eb9k4jh0zmnyqh5qg9ew, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ5NGUwZi04Nzc0NjhjMC0yMjViMWUxMC00YTAwZGQ0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:53.839040Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913568, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 62087, MsgBus: 6746 2024-11-21T09:21:53.962494Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660375759835325:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.962736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004781/r3tmp/tmpVe72Uu/pdisk_1.dat 2024-11-21T09:21:53.972289Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62087, node 2 2024-11-21T09:21:53.981423Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.981438Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.981440Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.981478Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6746 TClient is connected to server localhost:6746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.062969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.062997Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:54.064078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:54.064777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.066670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.081172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.096113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.109078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.257877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660380054804145:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.257924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.261938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.267972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.322563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.332575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.338941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.346157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.355015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660380054804660:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.355046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.355078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660380054804665:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.355648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.359108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660380054804667:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.660590Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914625, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 7730, MsgBus: 62977 2024-11-21T09:21:54.859894Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660377057557536:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.860048Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004781/r3tmp/tmpxdQnW2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7730, node 3 2024-11-21T09:21:54.873162Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:54.874715Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.874725Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.874727Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.874757Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62977 TClient is connected to server localhost:62977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.960506Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.960536Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:54.961597Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:54.961782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.971468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.980361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.000988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.013711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.156012Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660381352526394:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.156056Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.161327Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.168868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.179806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.193450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.200547Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.207671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.223817Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660381352526896:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.223865Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.223888Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660381352526901:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.224658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:55.228006Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660381352526903:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:55.406239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.462911Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180915507, txId: 281474976715675] shutting down >> KqpScan::Counters [GOOD] >> KqpScan::EmptySet [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::JoinLeftOnly [GOOD] Test command err: Trying to start YDB, gRPC: 6707, MsgBus: 11546 2024-11-21T09:21:53.172379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660376100491312:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.172530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004766/r3tmp/tmpZoyIsc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6707, node 1 2024-11-21T09:21:53.229831Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:53.233654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.233673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.233675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.233707Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11546 TClient is connected to server localhost:11546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:53.273221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.273250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.274331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.281097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.289355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.352556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.411536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.423874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.524766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376100492876:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.524795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.559167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.565857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.575844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.583193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.590332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.597201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.606004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376100493381:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.606062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376100493386:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.606063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.606855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.610386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660376100493388:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:53.762701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.825772Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660376100493945:2479] TxId: 281474976710674. Ctx: { TraceId: 01jd70ebkx9wsffvmstrn21pat, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ0NTJmODItODEyNWZiNzItZTE2NWM3MjktNzM2ZDQzNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:53.830189Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913869, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 8927, MsgBus: 20182 2024-11-21T09:21:54.051990Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660376899129609:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.052021Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004766/r3tmp/tmpZKuZzn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8927, node 2 2024-11-21T09:21:54.068016Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:54.069906Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.069920Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.069921Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.069951Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20182 TClient is connected to server localhost:20182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.152356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.152388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:54.153435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:54.154686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.157252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.165005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.184360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.193657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.333211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660376899131147:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.333230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.339220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.345126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.353124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.360024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.366630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.373952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.383631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660376899131660:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.383648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660376899131665:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.383660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.384327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.387117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660376899131667:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.561834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.596746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.651187Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914695, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 6344, MsgBus: 10984 2024-11-21T09:21:54.961849Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660378218724591:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.962010Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004766/r3tmp/tmpxx5LUP/pdisk_1.dat 2024-11-21T09:21:54.976591Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6344, node 3 2024-11-21T09:21:54.982315Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.982328Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.982330Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.982374Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10984 TClient is connected to server localhost:10984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:55.062146Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:55.062178Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:55.063263Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:55.063981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.064973Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:21:55.070681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.080097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.098526Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.110409Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.272058Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660382513693428:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.272088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.277586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.284705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.298563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.312441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.319147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.326211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.334921Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660382513693933:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.334952Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.334953Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660382513693938:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.335576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:55.339548Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660382513693940:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:55.514075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.595113Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180915633, txId: 281474976715673] shutting down 2024-11-21T09:21:55.630052Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180915668, txId: 281474976715675] shutting down |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::Fake [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Counters [GOOD] Test command err: Trying to start YDB, gRPC: 14597, MsgBus: 24679 2024-11-21T09:21:52.382758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660367830528170:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:52.382915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a2/r3tmp/tmpQZJh49/pdisk_1.dat 2024-11-21T09:21:52.453541Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14597, node 1 2024-11-21T09:21:52.462356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:52.462370Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:52.462372Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:52.462412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24679 2024-11-21T09:21:52.484122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:52.484149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:52.485216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24679 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:52.517917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.520901Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.525143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.542016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.559418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.569077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:52.703353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367830529720:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.703399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.733604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.739152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.750663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.764613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.819909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.827348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:52.835621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367830530234:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.835647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.835646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660367830530239:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:52.836275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:52.840286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660367830530241:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:53.009167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.085871Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660372125498103:2479] TxId: 281474976710674. Ctx: { TraceId: 01jd70eaweamrprwa1vqqem7rw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJiZjY2NzEtNzUwMmNlMzEtZjdhMDkzOWQtZmUwNTFjMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:54.168944Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913134, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 32621, MsgBus: 17136 2024-11-21T09:21:54.531970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660379209719539:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.532002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a2/r3tmp/tmp2GhvV4/pdisk_1.dat 2024-11-21T09:21:54.545989Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32621, node 2 2024-11-21T09:21:54.553389Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.553402Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.553404Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.553462Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17136 TClient is connected to server localhost:17136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.632478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.632509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:54.633586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:54.634838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.638543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.647424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.663573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.675933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.786308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379209721097:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.786337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.790549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.796359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.808516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.814969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.822000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.829157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.838090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379209721599:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.838109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.838154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660379209721604:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.838767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.842063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660379209721606:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:55.122383Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180915108, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 12069, MsgBus: 21705 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0047a2/r3tmp/tmpYpmSBr/pdisk_1.dat 2024-11-21T09:21:55.403343Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:55.405027Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 12069, node 3 2024-11-21T09:21:55.412701Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:55.412716Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:55.412718Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:55.412754Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21705 TClient is connected to server localhost:21705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:55.495787Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:55.495836Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:55.496908Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:55.497294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.509842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.518416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.535101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.544041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.666775Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660382807794290:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.666796Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.670627Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.677501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.690721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.704600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.711747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.725891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.733746Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660382807794794:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.733776Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660382807794799:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.733776Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.734264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:55.738503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660382807794801:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:55.946776Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180915990, txId: 281474976715671] shutting down >> PartitionStats::Collector >> KqpScan::Like [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::EmptySet [GOOD] Test command err: Trying to start YDB, gRPC: 32560, MsgBus: 32518 2024-11-21T09:21:53.159225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660376197059037:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.159443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00476b/r3tmp/tmpE8xCBM/pdisk_1.dat 2024-11-21T09:21:53.213283Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32560, node 1 2024-11-21T09:21:53.223501Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.223523Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.223525Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.223559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32518 TClient is connected to server localhost:32518 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:53.260073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TClient::Ls response: 2024-11-21T09:21:53.260099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:53.261277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.291887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.300538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.363627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.382563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.395534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.468264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376197060605:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.468301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.502664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.508087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.520562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.527054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.534342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.541434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.549619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376197061106:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.549645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376197061111:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.549648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.550331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.554598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660376197061113:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.753408Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439660376197061408:2458], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029 2024-11-21T09:21:53.753519Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjUxMWVkY2MtZWQ4YThhYmEtYTFhZDU3YzEtNzY5YzI3NTg=, ActorId: [1:7439660376197061401:2454], ActorState: ExecuteState, TraceId: 01jd70ebjf3cmzcasf9qney2df, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 30167, MsgBus: 29483 2024-11-21T09:21:54.040462Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660380215824250:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.040491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00476b/r3tmp/tmpALhy8z/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30167, node 2 2024-11-21T09:21:54.057552Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:54.057737Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.057747Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.057748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.057785Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29483 TClient is connected to server localhost:29483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.140779Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.140805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:54.141869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:54.143046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.154902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.163648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.182383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.193643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.316281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660380215825787:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.316302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.320820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.327614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.339289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.346160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.353348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.359979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.368498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660380215826290:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.368524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.368529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660380215826295:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.369112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.373594Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660380215826297:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:55.421421Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914646, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 10565, MsgBus: 8399 2024-11-21T09:21:55.577768Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660383076643324:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:55.577798Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00476b/r3tmp/tmpudOtuX/pdisk_1.dat 2024-11-21T09:21:55.588852Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10565, node 3 2024-11-21T09:21:55.598212Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:55.598223Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:55.598224Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:55.598262Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8399 TClient is connected to server localhost:8399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:55.680893Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:55.680929Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:55.681140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.682043Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:55.693258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.702656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.716129Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.725994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.830898Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660383076644872:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.830929Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.835169Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.890104Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.900442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.907224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.962388Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.970377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.978351Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660383076645391:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.978378Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.978382Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660383076645396:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.978896Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:55.983432Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660383076645398:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:56.145921Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180916144, txId: 281474976715671] shutting down >> PartitionStats::Collector [GOOD] >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Like [GOOD] Test command err: Trying to start YDB, gRPC: 20972, MsgBus: 5197 2024-11-21T09:21:54.133511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660379171766011:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.133664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004762/r3tmp/tmp6U8Dpo/pdisk_1.dat 2024-11-21T09:21:54.186647Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20972, node 1 2024-11-21T09:21:54.197752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.197762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.197764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.197797Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5197 TClient is connected to server localhost:5197 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:54.234129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.234160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:21:54.235204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.263071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.274606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.289983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.306281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.316038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.429653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660379171767554:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.429700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.452085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.458786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.464864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.472187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.479189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.486318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:54.495063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660379171768059:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.495087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.495129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660379171768064:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:54.495818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:54.498956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660379171768066:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:54.680699Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660379171768387:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70ecfbfb146dg0vw2nz1gf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk5YTc5MTEtMzk1MzRjMS1lYTM2MjVlNi03ZmEzYmVjZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:54.682148Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180914723, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 11976, MsgBus: 16220 2024-11-21T09:21:54.802992Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660379464460576:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:54.803009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004762/r3tmp/tmpEO1hbt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11976, node 2 2024-11-21T09:21:54.817874Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:54.819740Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:54.819751Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:54.819752Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:54.819774Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16220 TClient is connected to server localhost:16220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:54.903257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:54.903292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:54.904433Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:54.905605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.908399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.921282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.937222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:54.949103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.093545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660383759429411:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default ... art proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.136831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.145543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660383759429922:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.145566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.145570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660383759429927:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.146187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:55.150496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660383759429929:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:55.307305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.314282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.326719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":4,"Columns":["Fk1","Fk2","Key","Value"],"E-Rows":"No estimate","Table":"SecondaryComplexKeys","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["SecondaryComplexKeys\/Index\/indexImplTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"2"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Fk1 (1)","Fk2 (-∞, +∞)","Key (-∞, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"SecondaryComplexKeys\/Index\/indexImplTable","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryComplexKeys","reads":[{"lookup_by":["Key"],"columns":["Fk1","Fk2","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","reads":[{"lookup_by":["Fk1 (1)"],"columns":["Key"],"scan_by":["Fk2 (-∞, +∞)","Key (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Columns":["Fk1","Fk2","Key","Value"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"SecondaryComplexKeys","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 22169, MsgBus: 6736 2024-11-21T09:21:55.686300Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660383285464316:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:55.686683Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004762/r3tmp/tmpR13Vjy/pdisk_1.dat 2024-11-21T09:21:55.695192Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22169, node 3 2024-11-21T09:21:55.705137Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:55.705151Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:55.705153Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:55.705198Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6736 TClient is connected to server localhost:6736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:55.788582Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:55.788619Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:55.788967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.789618Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:55.801034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.810261Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.828290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.837887Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:55.967985Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660383285465855:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.968014Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.972516Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.977897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:55.983812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:56.038865Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:56.094039Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:56.103645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:56.111729Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660387580433668:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:56.111755Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:56.111762Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439660387580433673:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:56.112385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:56.116928Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439660387580433675:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:56.298388Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2024-11-21T09:18:40.728497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:18:40.728542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:18:40.728550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037e6/r3tmp/tmpC0WWoS/pdisk_1.dat 2024-11-21T09:18:40.801651Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16447, node 1 2024-11-21T09:18:40.887447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:40.887461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:40.887463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:40.887488Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:18:40.891198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:18:40.964744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:40.964771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:40.975815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24115 2024-11-21T09:18:41.377239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:18:42.090914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:42.090940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:42.123283Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T09:18:42.123806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:42.168155Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:42.173813Z node 3 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:18:42.173833Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:18:42.177697Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:18:42.177724Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:18:42.177742Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:18:42.177745Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:18:42.177749Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:18:42.177752Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:18:42.177756Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:18:42.177760Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:18:42.177817Z node 3 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:18:42.336828Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:42.336851Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1814:2548], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:42.337752Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1826:2557] 2024-11-21T09:18:42.338387Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1847:2569] 2024-11-21T09:18:42.338427Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [3:1847:2569], schemeshard id = 72075186224037889 2024-11-21T09:18:42.339196Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared1 2024-11-21T09:18:42.341875Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:18:42.341884Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:18:42.341891Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2024-11-21T09:18:42.342599Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:42.342615Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:42.343775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:18:42.344861Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:18:42.344880Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:18:42.346702Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:18:42.357986Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:42.390560Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:18:42.484487Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:18:42.658902Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:43.275847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:18:43.904530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:43.904556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:43.936888Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:18:43.937408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:44.002647Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:44.008597Z node 2 :STATISTICS INFO: [72075186224038907] OnActivateExecutor 2024-11-21T09:18:44.008620Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Execute 2024-11-21T09:18:44.013309Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Complete 2024-11-21T09:18:44.013336Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Execute 2024-11-21T09:18:44.013347Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:18:44.013350Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ColumnStatistics: column count# 0 2024-11-21T09:18:44.013353Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:18:44.013357Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:18:44.013360Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:18:44.013363Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Complete 2024-11-21T09:18:44.013428Z node 2 :STATISTICS INFO: [72075186224038907] Subscribed for config changes 2024-11-21T09:18:44.110740Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038907, at schemeshard: 72075186224038899 2024-11-21T09:18:44.110764Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3049:2552], at schemeshard: 72075186224038899, StatisticsAggregatorId: 72075186224038907, at schemeshard: 72075186224038899 2024-11-21T09:18:44.111699Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3057:2558] 2024-11-21T09:18:44.112954Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3096:2578] 2024-11-21T09:18:44.112984Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectSchemeShard, pipe server id = [2:3096:2578], schemeshard id = 72075186224038899 2024-11-21T09:18:44.114027Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Execute: database# /Root/Shared2 2024-11-21T09:18:44.116571Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:18:44.116582Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:18:44.116589Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared2/.metadata/_statistics 2024-11-21T09:18:44.117659Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:44.117675Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:44.119520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730657:1, at schemeshard: 72075186224038899 2024-11-21T09:18:44.120157Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976730657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224038899 PathId: 3 } 2024-11-21T09:18:44.120172Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976730657 2024-11-21T09:18:44.122131Z node 2 :HIVE WARN: HIVE#72075186224038898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:18:44.133193Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:44.154708Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Complete 2024-11-21T09:18:44.238909Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976730657. Doublechecking... 2024-11-21T09:18:44.373092Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:45.052809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:18:45.451945Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:45.650692Z node 3 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T09:18:45.650710Z node 3 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: ... ISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [3:12360:5550], StatRequests.size() = 1 2024-11-21T09:21:49.379103Z node 3 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T09:21:49.379137Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:21:49.442920Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:4980:3114], schemeshard count = 1 2024-11-21T09:21:49.972650Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038899 2024-11-21T09:21:49.972693Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 199.000000s, at schemeshard: 72075186224038899 2024-11-21T09:21:49.972840Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038899, stats size# 25 2024-11-21T09:21:49.985089Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2024-11-21T09:21:51.183396Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [3:12433:5582]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:51.183527Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T09:21:51.183537Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [3:12433:5582], StatRequests.size() = 1 2024-11-21T09:21:51.685335Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2024-11-21T09:21:51.685388Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:51.685402Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 3] is data table. 2024-11-21T09:21:51.685407Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 3] 2024-11-21T09:21:51.685548Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2024-11-21T09:21:51.686519Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:51.687840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12451:5208], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.687865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12461:5213], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.687878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:51.692094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038899 2024-11-21T09:21:51.711602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:12465:5216], DatabaseId: /Root/Shared2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2024-11-21T09:21:52.027717Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:12593:5284]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:52.027833Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:21:52.027851Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:12595:5286] 2024-11-21T09:21:52.027862Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:12595:5286] 2024-11-21T09:21:52.028080Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:12596:5287] 2024-11-21T09:21:52.028121Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:12595:5286], server id = [2:12596:5287], tablet id = 72075186224038907, status = OK 2024-11-21T09:21:52.028136Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectNode, pipe server id = [2:12596:5287], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:21:52.028145Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T09:21:52.028236Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:21:52.028253Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:12593:5284], StatRequests.size() = 1 2024-11-21T09:21:52.046197Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDVlMTEzZC1iNTQ3OTRjZC1iM2NhMTljMi1mZmQzMmQzNA==, TxId: 2024-11-21T09:21:52.046224Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDVlMTEzZC1iNTQ3OTRjZC1iM2NhMTljMi1mZmQzMmQzNA==, TxId: 2024-11-21T09:21:52.046384Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2024-11-21T09:21:52.069050Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 3] 2024-11-21T09:21:52.069075Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:52.111333Z node 2 :STATISTICS DEBUG: [72075186224038907] EvFastPropagateCheck 2024-11-21T09:21:52.111371Z node 2 :STATISTICS DEBUG: [72075186224038907] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:21:52.185773Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:12595:5286], schemeshard count = 1 2024-11-21T09:21:52.589574Z node 3 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:21:52.621998Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:52.622028Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:52.655176Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T09:21:52.655207Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 238.000000s, at schemeshard: 72075186224037899 2024-11-21T09:21:52.655427Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2024-11-21T09:21:52.689822Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T09:21:53.040797Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [3:12665:5603]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:53.040936Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T09:21:53.040949Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [3:12665:5603], StatRequests.size() = 1 2024-11-21T09:21:54.620658Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [3:12738:5624]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:54.620750Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T09:21:54.620760Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:12738:5624], StatRequests.size() = 1 2024-11-21T09:21:55.035437Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2024-11-21T09:21:55.035465Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:55.056629Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038909 2024-11-21T09:21:55.056671Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 188.000000s, at schemeshard: 72075186224038909 2024-11-21T09:21:55.056766Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038909, stats size# 26 2024-11-21T09:21:55.080450Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2024-11-21T09:21:55.719251Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 3 2024-11-21T09:21:55.719431Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:21:55.719618Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T09:21:55.730220Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:55.730251Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:55.730261Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2024-11-21T09:21:55.730267Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T09:21:55.730382Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared1 2024-11-21T09:21:55.731183Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:55.735972Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=N2Y4OTY2ODUtYzE1Zjg5ZDItY2JjNmY5YWYtODQzYjU0YmU=, TxId: 2024-11-21T09:21:55.735993Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=N2Y4OTY2ODUtYzE1Zjg5ZDItY2JjNmY5YWYtODQzYjU0YmU=, TxId: 2024-11-21T09:21:55.736159Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:21:55.758467Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T09:21:55.758492Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:56.123937Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:12830:5666]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:56.124036Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T09:21:56.124046Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:12830:5666], StatRequests.size() = 1 2024-11-21T09:21:56.124189Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:12832:5358]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:56.125304Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:21:56.125363Z node 2 :STATISTICS DEBUG: [72075186224038907] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2024-11-21T09:21:56.125369Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T09:21:56.125438Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:21:56.125452Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:12832:5358], StatRequests.size() = 1 >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet >> PartitionStats::CollectorOverload [GOOD] >> SystemView::TopPartitionsFollowers [GOOD] >> SystemView::TabletsShards |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] >> RemoteTopicReader::ReadTopic [GOOD] >> SystemView::TabletsShards [GOOD] |96.7%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] Test command err: Trying to start YDB, gRPC: 31735, MsgBus: 15388 2024-11-21T09:21:29.479646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660272478133854:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:29.479731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043b6/r3tmp/tmpv30ioP/pdisk_1.dat 2024-11-21T09:21:29.535862Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31735, node 1 2024-11-21T09:21:29.551336Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:29.551347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:29.551349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:29.551382Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15388 2024-11-21T09:21:29.579908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:29.579934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:29.580851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:29.613909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.619974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.635092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.653076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.665346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:29.804968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660272478135247:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.804994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.835332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.840711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.853400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.860035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.915162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.926060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:29.939356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660272478135759:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.939380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.939462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660272478135764:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:29.940118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:29.944572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660272478135766:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:30.173264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:30.194327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:21:30.194394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:21:30.194441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:21:30.194462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:21:30.194479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:21:30.194500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7439660276773103563:2474];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:21:30.194503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:21:30.194512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7439660276773103563:2474];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:21:30.194519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:21:30.194536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:21:30.194549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7439660276773103563:2474];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:21:30.194555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:21:30.194566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7439660276773103563:2474];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:21:30.194571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:21:30.194581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7439660276773103563:2474];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:21:30.194589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439660276773103558:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:21:30.194596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7439660276773103563:2474];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11 ... 72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:56.212850Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:56.212854Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:56.212869Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:56.212872Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:56.212896Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:56.212900Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:56.212914Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:56.212917Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:56.212926Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:56.212931Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038696;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:56.213352Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:56.213358Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:56.213367Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:56.213372Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:56.213387Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:56.213391Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:56.213400Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:56.213404Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:56.213413Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:56.213418Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:56.213424Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:56.213428Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:56.213457Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:56.213462Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:56.213478Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:56.213482Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:56.213493Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:56.213498Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:56.213512Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:56.213516Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:56.213526Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:56.213529Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038702;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:56.213645Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:56.213650Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:56.213659Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:56.213663Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:56.213678Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:56.213682Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:56.213691Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:56.213695Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:56.213703Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:56.213707Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:56.213713Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:56.213717Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:56.213747Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:56.213751Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:56.213766Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:56.213770Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:56.213781Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:56.213786Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:56.213800Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:56.213804Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:56.213813Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:56.213816Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2024-11-21T09:21:55.216760Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660383522388751:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:55.216781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004281/r3tmp/tmpaIQsI9/pdisk_1.dat 2024-11-21T09:21:55.290990Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12196 TServer::EnableGrpc on GrpcPort 18711, node 1 2024-11-21T09:21:55.319532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:55.319568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:55.320582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:55.362335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:55.362348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:55.362350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:55.362389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:55.452278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:56.222810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T09:21:56.313332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660387817356882:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:56.313335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660387817356874:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:56.313352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:56.313992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:2, at schemeshard: 72057594046644480 2024-11-21T09:21:56.315148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660387817356888:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:21:56.726862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:56.785246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:56.840056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T09:21:56.894872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:21:56.956394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T09:21:57.164255Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324746:2685] Handshake: worker# [1:7439660383522389312:2278] 2024-11-21T09:21:57.164935Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324746:2685] Create read session: session# [1:7439660392112324747:2277] 2024-11-21T09:21:57.165044Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324746:2685] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T09:21:57.179685Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324746:2685] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 0 Data: 9b Codec: RAW }] } } 2024-11-21T09:21:57.179825Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324746:2685] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T09:21:57.198058Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324746:2685] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 9b Codec: RAW }] } } 2024-11-21T09:21:57.296666Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324862:2727] Handshake: worker# [1:7439660383522389312:2278] 2024-11-21T09:21:57.297232Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324862:2727] Create read session: session# [1:7439660392112324863:2277] 2024-11-21T09:21:57.297295Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324862:2727] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T09:21:57.299373Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439660392112324862:2727] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 9b Codec: RAW }] } } >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> TKeyValueTest::TestCopyRangeWorks >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce [GOOD] >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TabletsShards [GOOD] Test command err: 2024-11-21T09:21:36.880051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660301551737813:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.880119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034e4/r3tmp/tmpmxBkhB/pdisk_1.dat 2024-11-21T09:21:36.939126Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26473, node 1 2024-11-21T09:21:36.954831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:36.954859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:36.954861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:36.954904Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:36.979095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.979126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:4745 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:37.040626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:37.053031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:37.062603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:37.071433Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660304543244932:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:37.071627Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:37.073229Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660307077233598:2198];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:37.073613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:37.073644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:21:37.074947Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T09:21:37.075419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:37.075748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:37.075756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:37.076133Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:37.076662Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T09:21:37.076936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:37.083314Z node 5 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2024-11-21T09:21:37.083330Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2024-11-21T09:21:37.087157Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [5:7439660304543244939:2058], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T09:21:37.087232Z node 5 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T09:21:37.087293Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2024-11-21T09:21:37.087299Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2024-11-21T09:21:37.087377Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T09:21:37.087382Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2024-11-21T09:21:37.087387Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2024-11-21T09:21:37.087391Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T09:21:37.087395Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2024-11-21T09:21:37.087398Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2024-11-21T09:21:37.087402Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2024-11-21T09:21:37.087404Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2024-11-21T09:21:37.087407Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2024-11-21T09:21:37.087410Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2024-11-21T09:21:37.087412Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2024-11-21T09:21:37.087420Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2024-11-21T09:21:37.087422Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2024-11-21T09:21:37.087425Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2024-11-21T09:21:37.087428Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2024-11-21T09:21:37.087431Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2024-11-21T09:21:37.087434Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2024-11-21T09:21:37.087436Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2024-11-21T09:21:37.087451Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2024-11-21T09:21:37.000000Z 2024-11-21T09:21:37.087474Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:37.087670Z node 5 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [5:7439660304543244939:2058], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2024-11-21T09:21:37.087699Z node 5 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [5:7439660304543244939:2058], database# /Root/Tenant1, no sysview processor 2024-11-21T09:21:37.089259Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2024-11-21T09:21:37.129263Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Tenant1 2024-11-21T09:21:37.130284Z node 5 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2024-11-21T09:21:37.130731Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2024-11-21T09:21:37.133821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:37.142540Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660304713524652:2197];send_to=[0:7307199536658146131:7762515]; waiting... 2024-11-21T09:21:37.145460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:37.145483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:37.145645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:37.145658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:37.146499Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:37.147777Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T09:21:37.147897Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:21:37.148132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:37.148364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:37.171074Z node 2 :SYSTEM_VIEWS INFO: [72075186224037899] OnActivateExecutor 2024-11-21T09:21:37.176917Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Execute 2024-11-21T09:21:37.177784Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:37.177812Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T09:21:37.181718Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Complete 2024-11-21T09:21:37.181743Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInit::Execute 2024-11-21T09:21:37.181822Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T09:21:37.181826Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval metrics: query count# 0 2024-11-21T09:21:37.181831Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval query tops: total query count# 0 2024-11-21T09:21:37.181835Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T09:21:37.181840Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 6, result count# 0 20 ... .773929Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T09:21:56.773937Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439660342677097830:2573], Recipient [6:7439660338382129994:2199]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.773939Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.773944Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037889, from:72057594046644480 is reset 2024-11-21T09:21:56.773967Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T09:21:56.774011Z node 10 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [10:7439660344092628819:2281], Recipient [10:7439660344092628831:2296]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T09:21:56.774033Z node 10 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2024-11-21T09:21:56.774099Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:21:56.774128Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7439660342677097777:2522], Recipient [6:7439660338382129994:2199]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:56.774130Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:56.774131Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T09:21:56.774136Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439660342677097832:2574], Recipient [6:7439660338382129994:2199]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774136Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T09:21:56.774137Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774138Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037890, from:72057594046644480 is reset 2024-11-21T09:21:56.774145Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439660342677097833:2575], Recipient [6:7439660338382129994:2199]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774146Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774147Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037891, from:72057594046644480 is reset 2024-11-21T09:21:56.774154Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439660342677097829:2572], Recipient [6:7439660338382129994:2199]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774155Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774156Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2024-11-21T09:21:56.774161Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439660342677097835:2577], Recipient [6:7439660338382129994:2199]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774163Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774164Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037893, from:72057594046644480 is reset 2024-11-21T09:21:56.774169Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439660342677097834:2576], Recipient [6:7439660338382129994:2199]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774171Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:21:56.774172Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037892, from:72057594046644480 is reset 2024-11-21T09:21:56.774221Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:21:56.774302Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7439660346250711162:2060], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T09:21:56.774356Z node 10 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [10:7439660344092628818:2280], Recipient [10:7439660344092628830:2295]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T09:21:56.774387Z node 10 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2024-11-21T09:21:56.774381Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7439660346250711162:2060], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T09:21:56.774631Z node 10 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268829696, Sender [10:7439660344092628530:2111], Recipient [10:7439660344092628574:2270]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T09:21:56.774787Z node 10 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.775347Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7439660344092628292:2058], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T09:21:56.775398Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439660344092628292:2058], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T09:21:56.775479Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7439660342677097908:2631], Recipient [6:7439660338382129994:2199]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:56.775489Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:21:56.775491Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T09:21:57.828180Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7439660390797876149:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:57.828200Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034e4/r3tmp/tmpbr9AnA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29235, node 11 2024-11-21T09:21:57.843571Z node 11 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:57.850196Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:57.850212Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:57.850214Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:57.850254Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:57.928565Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:57.928593Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:57.929610Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:57.930344Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:57.932807Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:58.065220Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439660395092844189:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:58.065235Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439660395092844200:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:58.065240Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:58.065813Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:21:58.067204Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439660395092844203:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:21:58.172048Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70efnv193bkv6nqjt348bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YjFiZmYyNTctZGE2YTRhM2YtOGRjZDJhNzItZDA5NjJmOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:58.172679Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7439660395092844291:2318], owner: [11:7439660395092844287:2316], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:58.172902Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7439660395092844291:2318], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:58.173106Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7439660395092844291:2318], row count: 3, finished: 1 2024-11-21T09:21:58.173125Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7439660395092844291:2318], owner: [11:7439660395092844287:2316], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T09:21:58.173984Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918171, txId: 281474976715661] shutting down >> TKeyValueTest::TestConcatWorks >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk |96.7%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 13337, MsgBus: 16716 2024-11-21T09:21:53.174072Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660376153504314:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:53.174247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004775/r3tmp/tmpKcWWr0/pdisk_1.dat 2024-11-21T09:21:53.227701Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13337, node 1 2024-11-21T09:21:53.241226Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:53.241238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:53.241240Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:53.241284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16716 TClient is connected to server localhost:16716 2024-11-21T09:21:53.275007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:53.275029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:21:53.276091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:53.306120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.316285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.378270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.395661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.404978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:53.468382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376153505856:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.468402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.500136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.506018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.513231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.519931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.527063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.534494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:53.542776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376153506349:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.542802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.542814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660376153506354:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:53.543446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:53.547415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660376153506356:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:21:53.776952Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660376153506702:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70ebjzfm9d67n9t61xby6q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:21:54.759475Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T09:21:54.759546Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660376153506702:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70ebjzfm9d67n9t61xby6q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T09:21:54.760720Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660376153506712:2465], TxId: 281474976715672, task: 5. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=. CustomerSuppliedId : . TraceId : 01jd70ebjzfm9d67n9t61xby6q. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660376153506702:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:21:54.760896Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=, ActorId: [1:7439660376153506662:2454], ActorState: ExecuteState, TraceId: 01jd70ebjzfm9d67n9t61xby6q, Create QueryResponse for error on request, msg: 2024-11-21T09:21:54.760992Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660376153506713:2466], TxId: 281474976715672, task: 6. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=. TraceId : 01jd70ebjzfm9d67n9t61xby6q. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660376153506702:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:21:54.760997Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180913820, txId: 281474976715671] shutting down 2024-11-21T09:21:54.761034Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660376153506707:2461], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jd70ebjzfm9d67n9t61xby6q. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660376153506702:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:21:54.761046Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660376153506714:2467], TxId: 281474976715672, task: 7. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=. TraceId : 01jd70ebjzfm9d67n9t61xby6q. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660376153506702:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:21:54.761087Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660376153506715:2468], TxId: 281474976715672, task: 8. Ctx: { TraceId : 01jd70ebjzfm9d67n9t61xby6q. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660376153506702:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:21:54.761121Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660376153506716:2469], TxId: 281474976715672, task: 9. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=. TraceId : 01jd70ebjzfm9d67n9t61xby6q. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439660376153506702:2454], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T09:21:54.761136Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660376153506708:2462], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWFlNzE0MjAtMzExMThmNzQtM2U5MWM5YzktMmYwMmQ3N2Y=. TraceId : 01jd70ebjzfm9d67n9t61xby6q. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:74396603761535067 ... KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917054, txId: 281474976715715] shutting down 2024-11-21T09:21:57.026742Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917068, txId: 281474976715717] shutting down 2024-11-21T09:21:57.047701Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917089, txId: 281474976715719] shutting down 2024-11-21T09:21:57.070497Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917110, txId: 281474976715721] shutting down 2024-11-21T09:21:57.093608Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917138, txId: 281474976715723] shutting down 2024-11-21T09:21:57.116050Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917159, txId: 281474976715725] shutting down 2024-11-21T09:21:57.136173Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917180, txId: 281474976715727] shutting down 2024-11-21T09:21:57.155127Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917201, txId: 281474976715729] shutting down 2024-11-21T09:21:57.176226Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917222, txId: 281474976715731] shutting down 2024-11-21T09:21:57.195042Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917236, txId: 281474976715733] shutting down 2024-11-21T09:21:57.217831Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917264, txId: 281474976715735] shutting down 2024-11-21T09:21:57.237039Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917278, txId: 281474976715737] shutting down 2024-11-21T09:21:57.255042Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917299, txId: 281474976715739] shutting down 2024-11-21T09:21:57.273042Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917313, txId: 281474976715741] shutting down 2024-11-21T09:21:57.290360Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917334, txId: 281474976715743] shutting down 2024-11-21T09:21:57.309339Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917355, txId: 281474976715745] shutting down 2024-11-21T09:21:57.330996Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917376, txId: 281474976715747] shutting down 2024-11-21T09:21:57.356004Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917397, txId: 281474976715749] shutting down 2024-11-21T09:21:57.375938Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917418, txId: 281474976715751] shutting down 2024-11-21T09:21:57.399312Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917439, txId: 281474976715753] shutting down 2024-11-21T09:21:57.422812Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917467, txId: 281474976715755] shutting down 2024-11-21T09:21:57.442329Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917488, txId: 281474976715757] shutting down 2024-11-21T09:21:57.462204Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917502, txId: 281474976715759] shutting down 2024-11-21T09:21:57.482584Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917523, txId: 281474976715761] shutting down 2024-11-21T09:21:57.500187Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917544, txId: 281474976715763] shutting down 2024-11-21T09:21:57.518096Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917558, txId: 281474976715765] shutting down 2024-11-21T09:21:57.536497Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917579, txId: 281474976715767] shutting down 2024-11-21T09:21:57.554021Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917600, txId: 281474976715769] shutting down 2024-11-21T09:21:57.572697Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917614, txId: 281474976715771] shutting down 2024-11-21T09:21:57.592502Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917635, txId: 281474976715773] shutting down 2024-11-21T09:21:57.612648Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917656, txId: 281474976715775] shutting down 2024-11-21T09:21:57.631761Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917677, txId: 281474976715777] shutting down 2024-11-21T09:21:57.650058Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917691, txId: 281474976715779] shutting down 2024-11-21T09:21:57.669407Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917712, txId: 281474976715781] shutting down 2024-11-21T09:21:57.689476Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917733, txId: 281474976715783] shutting down 2024-11-21T09:21:57.706386Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917747, txId: 281474976715785] shutting down 2024-11-21T09:21:57.746834Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917789, txId: 281474976715787] shutting down 2024-11-21T09:21:57.766375Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917810, txId: 281474976715789] shutting down 2024-11-21T09:21:57.786612Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917831, txId: 281474976715791] shutting down 2024-11-21T09:21:57.809499Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917852, txId: 281474976715793] shutting down 2024-11-21T09:21:57.827954Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917873, txId: 281474976715795] shutting down 2024-11-21T09:21:57.847769Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917894, txId: 281474976715797] shutting down 2024-11-21T09:21:57.866488Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917908, txId: 281474976715799] shutting down 2024-11-21T09:21:57.885229Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917929, txId: 281474976715801] shutting down 2024-11-21T09:21:57.905003Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917950, txId: 281474976715803] shutting down 2024-11-21T09:21:57.923659Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917964, txId: 281474976715805] shutting down 2024-11-21T09:21:57.943432Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180917985, txId: 281474976715807] shutting down 2024-11-21T09:21:57.961677Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918006, txId: 281474976715809] shutting down 2024-11-21T09:21:57.980414Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918020, txId: 281474976715811] shutting down 2024-11-21T09:21:57.998779Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918041, txId: 281474976715813] shutting down 2024-11-21T09:21:58.017306Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918062, txId: 281474976715815] shutting down 2024-11-21T09:21:58.037141Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918076, txId: 281474976715817] shutting down 2024-11-21T09:21:58.055208Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918097, txId: 281474976715819] shutting down 2024-11-21T09:21:58.071418Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918118, txId: 281474976715821] shutting down 2024-11-21T09:21:58.092115Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918132, txId: 281474976715823] shutting down 2024-11-21T09:21:58.112634Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918153, txId: 281474976715825] shutting down 2024-11-21T09:21:58.131496Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918174, txId: 281474976715827] shutting down 2024-11-21T09:21:58.152541Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918195, txId: 281474976715829] shutting down 2024-11-21T09:21:58.177480Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918216, txId: 281474976715831] shutting down 2024-11-21T09:21:58.197233Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918237, txId: 281474976715833] shutting down 2024-11-21T09:21:58.215719Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918258, txId: 281474976715835] shutting down 2024-11-21T09:21:58.239695Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918279, txId: 281474976715837] shutting down 2024-11-21T09:21:58.257525Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918300, txId: 281474976715839] shutting down 2024-11-21T09:21:58.275373Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918321, txId: 281474976715841] shutting down 2024-11-21T09:21:58.293898Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918335, txId: 281474976715843] shutting down 2024-11-21T09:21:58.311708Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918356, txId: 281474976715845] shutting down 2024-11-21T09:21:58.329048Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918370, txId: 281474976715847] shutting down 2024-11-21T09:21:58.346554Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180918391, txId: 281474976715849] shutting down >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2024-11-21T09:21:58.683642Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T09:21:58.683904Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2024-11-21T09:21:58.687669Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T09:21:58.687685Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2024-11-21T09:21:58.688524Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T09:21:58.688547Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2024-11-21T09:21:58.688505Z ErrorReason# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2024-11-21T09:21:58.687791Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T09:21:58.687825Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1732180918687 ErrorReason# 2024-11-21T09:21:58.690448Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T09:21:58.690496Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1732180918690 ErrorReason# 2024-11-21T09:21:58.691580Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T09:21:58.691599Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1732180918691 ErrorReason# >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 30266, MsgBus: 14650 2024-11-21T09:21:33.488383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660289678734358:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:33.515839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0043a2/r3tmp/tmps1YHU1/pdisk_1.dat 2024-11-21T09:21:33.541782Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30266, node 1 2024-11-21T09:21:33.560714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:33.560724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:33.560726Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:33.560767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14650 TClient is connected to server localhost:14650 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:21:33.618068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:33.618090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:21:33.619402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:33.635380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.640236Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:21:33.649574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.688769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.708348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.722352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:33.785643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660289678735737:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:33.785671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:33.813914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.820263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.829386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.835747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.850713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.874813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:21:33.892876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660289678736242:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:33.892908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:33.893038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660289678736247:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:33.893692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:21:33.898868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660289678736249:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:21:34.088664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:21:34.101451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:21:34.101527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:21:34.101570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:21:34.101594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:21:34.101615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:21:34.101635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:21:34.101657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:21:34.101681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T09:21:34.101705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T09:21:34.101728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T09:21:34.101750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T09:21:34.101771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7439660293973704087:2478];tablet_id=72075186224037932;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T09:21:34.102598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7439660293973704081:2472];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:21:34.102624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7439660293973704081:2472];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:21:34.102658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7439660293973704081:2472];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:21:34.102672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7439660293973704081:2472];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:21:34.102687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7439660293973704081:247 ... 38693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:57.478594Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:57.478595Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:57.478603Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:57.478614Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:57.478621Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:57.478629Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:57.478630Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:57.478634Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:57.478642Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:57.478646Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:57.478650Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:57.478652Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:57.478660Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:57.478661Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:57.478664Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:57.478665Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:57.478670Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:57.478673Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:57.478680Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:57.478690Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:57.478699Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:57.478700Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:57.478704Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:57.478708Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038693;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:57.478719Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:57.478725Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:57.478735Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:57.478739Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:57.478753Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:57.478762Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:57.478763Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T09:21:57.478767Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T09:21:57.478773Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:57.478776Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T09:21:57.478776Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T09:21:57.478779Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T09:21:57.478792Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T09:21:57.478802Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T09:21:57.478809Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T09:21:57.478813Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T09:21:57.478822Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T09:21:57.478825Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T09:21:57.478831Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T09:21:57.478836Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T09:21:57.478860Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T09:21:57.478868Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T09:21:57.478881Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T09:21:57.478887Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T09:21:57.478898Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T09:21:57.478907Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T09:21:57.478921Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T09:21:57.478929Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T09:21:57.478938Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T09:21:57.478941Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038703;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropExternalTableWithReboots |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> BasicStatistics::TwoDatabases [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateExternalTableWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: 2024-11-21T09:18:43.166299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:18:43.166351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:18:43.166356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0037cb/r3tmp/tmpWYrjo6/pdisk_1.dat 2024-11-21T09:18:43.238580Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20933, node 1 2024-11-21T09:18:43.323368Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:18:43.323379Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:18:43.323382Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:18:43.323430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:18:43.327155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:18:43.400809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:43.400833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:43.411763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8422 2024-11-21T09:18:43.833805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:18:44.531272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:44.531298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:44.573711Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T09:18:44.574345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:44.618214Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:44.625482Z node 3 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T09:18:44.625499Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T09:18:44.629658Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T09:18:44.629681Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T09:18:44.629692Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:18:44.629696Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T09:18:44.629699Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:18:44.629703Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:18:44.629706Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:18:44.629710Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T09:18:44.629763Z node 3 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T09:18:44.808439Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:44.808461Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1816:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T09:18:44.809795Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1828:2558] 2024-11-21T09:18:44.810899Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1848:2571] 2024-11-21T09:18:44.811064Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [3:1848:2571], schemeshard id = 72075186224037889 2024-11-21T09:18:44.811838Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database1 2024-11-21T09:18:44.815148Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:18:44.815158Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:18:44.815166Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database1/.metadata/_statistics 2024-11-21T09:18:44.816180Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:44.816197Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:44.817618Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T09:18:44.818629Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T09:18:44.818646Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T09:18:44.820429Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:18:44.831344Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:44.852554Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T09:18:44.955778Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T09:18:45.099626Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:45.746748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:18:46.380373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:46.380393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:46.412482Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:18:46.413024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:46.468716Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:18:46.472792Z node 2 :STATISTICS INFO: [72075186224038907] OnActivateExecutor 2024-11-21T09:18:46.472816Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Execute 2024-11-21T09:18:46.477534Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Complete 2024-11-21T09:18:46.477608Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Execute 2024-11-21T09:18:46.477619Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T09:18:46.477622Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ColumnStatistics: column count# 0 2024-11-21T09:18:46.477626Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ScheduleTraversals: table count# 0 2024-11-21T09:18:46.477629Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalOperations: table count# 0 2024-11-21T09:18:46.477633Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalTables: table count# 0 2024-11-21T09:18:46.477636Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Complete 2024-11-21T09:18:46.477687Z node 2 :STATISTICS INFO: [72075186224038907] Subscribed for config changes 2024-11-21T09:18:46.575997Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038907, at schemeshard: 72075186224038899 2024-11-21T09:18:46.576021Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3052:2550], at schemeshard: 72075186224038899, StatisticsAggregatorId: 72075186224038907, at schemeshard: 72075186224038899 2024-11-21T09:18:46.577170Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3062:2558] 2024-11-21T09:18:46.578206Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3096:2574] 2024-11-21T09:18:46.578252Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectSchemeShard, pipe server id = [2:3096:2574], schemeshard id = 72075186224038899 2024-11-21T09:18:46.579648Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Execute: database# /Root/Database2 2024-11-21T09:18:46.581826Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:18:46.581843Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:18:46.581966Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T09:18:46.581970Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T09:18:46.581977Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database2/.metadata/_statistics 2024-11-21T09:18:46.584110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730657:1, at schemeshard: 72075186224038899 2024-11-21T09:18:46.584742Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976730657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224038899 PathId: 3 } 2024-11-21T09:18:46.584756Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976730657 2024-11-21T09:18:46.586674Z node 2 :HIVE WARN: HIVE#72075186224038898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T09:18:46.597766Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:18:46.640396Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Complete 2024-11-21T09:18:46.713066Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976730657. Doublechecking... 2024-11-21T09:18:46.867460Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T09:18:47.579066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3454:3326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:47.579099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:18:47.581600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Ope ... equestId[ 120 ] 2024-11-21T09:21:53.599217Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:10098:4708], StatRequests.size() = 1 2024-11-21T09:21:54.234694Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224038899 2024-11-21T09:21:54.234746Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 188.000000s, at schemeshard: 72075186224038899 2024-11-21T09:21:54.234910Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038899, stats size# 49 2024-11-21T09:21:54.246706Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2024-11-21T09:21:55.446756Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:10159:4734]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:55.446833Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T09:21:55.446842Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:10159:4734], StatRequests.size() = 1 2024-11-21T09:21:55.949718Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2024-11-21T09:21:55.949752Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:55.949764Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 3] is data table. 2024-11-21T09:21:55.949769Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 3] 2024-11-21T09:21:55.949890Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2024-11-21T09:21:55.950772Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:55.951942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10175:4376], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.951965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10186:4381], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.952064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:55.955579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038899 2024-11-21T09:21:55.971274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:10189:4384], DatabaseId: /Root/Database2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2024-11-21T09:21:56.201690Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:10310:4449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:56.201780Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T09:21:56.201791Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:10312:4451] 2024-11-21T09:21:56.201800Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:10312:4451] 2024-11-21T09:21:56.201981Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10312:4451], server id = [2:10313:4452], tablet id = 72075186224038907, status = OK 2024-11-21T09:21:56.201993Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:10313:4452] 2024-11-21T09:21:56.202015Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectNode, pipe server id = [2:10313:4452], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T09:21:56.202020Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T09:21:56.202039Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T09:21:56.202051Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:10310:4449], StatRequests.size() = 1 2024-11-21T09:21:56.215096Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzQ1N2M2NzItN2UxMTQ5OWYtMjJhYzM3OTAtNGI2ZjAxOTA=, TxId: 2024-11-21T09:21:56.215119Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzQ1N2M2NzItN2UxMTQ5OWYtMjJhYzM3OTAtNGI2ZjAxOTA=, TxId: 2024-11-21T09:21:56.215233Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2024-11-21T09:21:56.226966Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 3] 2024-11-21T09:21:56.226988Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:56.310386Z node 2 :STATISTICS DEBUG: [72075186224038907] EvFastPropagateCheck 2024-11-21T09:21:56.310416Z node 2 :STATISTICS DEBUG: [72075186224038907] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T09:21:56.393460Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:10312:4451], schemeshard count = 1 2024-11-21T09:21:56.911716Z node 3 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T09:21:56.922175Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:21:56.922210Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:56.922221Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T09:21:56.922227Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:21:56.922333Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database1 2024-11-21T09:21:56.922960Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:56.926885Z node 3 :SYSTEM_VIEWS WARN: [72075186224037896] TEvIntervalQuerySummary, wrong stage: node id# 3 2024-11-21T09:21:56.927082Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=NWY5MWJmNDctM2Y2ZjlhODAtZDFjYTY0ZjEtZDkyNTA1MmI=, TxId: 2024-11-21T09:21:56.927094Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=NWY5MWJmNDctM2Y2ZjlhODAtZDFjYTY0ZjEtZDkyNTA1MmI=, TxId: 2024-11-21T09:21:56.927232Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T09:21:56.938740Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T09:21:56.938762Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:21:57.386483Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [3:10411:4770]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:57.386603Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T09:21:57.386615Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [3:10411:4770], StatRequests.size() = 1 2024-11-21T09:21:59.063184Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [3:10480:4798]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:21:59.063270Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T09:21:59.063279Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [3:10480:4798], StatRequests.size() = 1 2024-11-21T09:21:59.481035Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2024-11-21T09:21:59.481061Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2024-11-21T09:21:59.481070Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 4] is data table. 2024-11-21T09:21:59.481073Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 4] 2024-11-21T09:21:59.481177Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2024-11-21T09:21:59.482037Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T09:21:59.486236Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODAzOTA3MTQtNzUyYmYzNDgtNzA4YjY4Y2EtMmJhZjYxMzg=, TxId: 2024-11-21T09:21:59.486254Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODAzOTA3MTQtNzUyYmYzNDgtNzA4YjY4Y2EtMmJhZjYxMzg=, TxId: 2024-11-21T09:21:59.486381Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2024-11-21T09:21:59.498363Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 4] 2024-11-21T09:21:59.498389Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T09:22:00.138340Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 2 2024-11-21T09:22:00.138569Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T09:22:00.138711Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T09:22:00.149309Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T09:22:00.149342Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T09:22:00.468494Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [3:10566:4812]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:22:00.468577Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T09:22:00.468588Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [3:10566:4812], StatRequests.size() = 1 2024-11-21T09:22:00.468739Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:10568:4537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T09:22:00.469751Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T09:22:00.469764Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:10568:4537], StatRequests.size() = 1 |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.099188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.099211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.099216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.099221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.099233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.099237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.099254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.099337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.109631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.109662Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.111880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.111987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.112015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.114404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.114483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.114576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.114753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.115508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.115753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.115759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.115795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.117088Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.131903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.131969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.132051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.132056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.132664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.132674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.132677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.132937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.133235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.133251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.133963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.134005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.134164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.134183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.134189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.134242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.134248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.134272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.134280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.134583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.134589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.134617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.134621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.134683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.134687Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.134694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.134697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.134701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.134704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.134707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.134709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.134716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.134720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.134723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 46678944, LocalPathId: 4] was 3 2024-11-21T09:22:00.604180Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409549 2024-11-21T09:22:00.605196Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:22:00.605248Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:00.605561Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:22:00.605603Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:00.605709Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:00.605715Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:22:00.605725Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:00.605767Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T09:22:00.605784Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:22:00.605808Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T09:22:00.605812Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T09:22:00.605820Z node 203 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T09:22:00.605858Z node 203 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:00.605875Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 121 RawX2: 871878363235 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:00.605881Z node 203 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T09:22:00.605897Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.605904Z node 203 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T09:22:00.605907Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:22:00.605915Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:00.605922Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:00.605927Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T09:22:00.605933Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T09:22:00.605936Z node 203 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T09:22:00.605940Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T09:22:00.605947Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:00.605952Z node 203 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T09:22:00.605956Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:22:00.605959Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:22:00.606438Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:22:00.606536Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:22:00.607009Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:22:00.607023Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T09:22:00.607038Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:22:00.607043Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:22:00.607072Z node 203 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:00.607119Z node 203 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:00.607124Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:00.607148Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:22:00.607167Z node 203 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:00.607172Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [203:204:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T09:22:00.607176Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [203:204:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T09:22:00.607312Z node 203 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:22:00.607325Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:22:00.607330Z node 203 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:22:00.607336Z node 203 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:22:00.607342Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:00.607412Z node 203 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:22:00.607420Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T09:22:00.607424Z node 203 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T09:22:00.607428Z node 203 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:22:00.607432Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:00.607441Z node 203 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T09:22:00.607445Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [203:125:2150] 2024-11-21T09:22:00.607495Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:00.607500Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:22:00.607513Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:00.608043Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:22:00.608355Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T09:22:00.608377Z node 203 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T09:22:00.608387Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T09:22:00.608440Z node 203 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T09:22:00.608760Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:22:00.608768Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:22:00.608825Z node 203 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:22:00.608842Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:00.608847Z node 203 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [203:865:2802] TestWaitNotification: OK eventTxId 1003 |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] >> TExternalTableTestReboots::ParallelCreateDrop |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::TopPartitionsRanges [GOOD] |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TPersQueueTest::TestReadPartitionStatus [GOOD] >> TPersQueueTest::TxCounters >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TopPartitionsRanges [GOOD] Test command err: 2024-11-21T09:21:36.154887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660299321261348:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.154996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00353f/r3tmp/tmps4APxU/pdisk_1.dat 2024-11-21T09:21:36.249479Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:36.253421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.253443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.254515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24426, node 1 2024-11-21T09:21:36.333265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:36.333278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:36.333284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:36.333322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:21:36.424948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.540198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660299321261789:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.540239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.540411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660299321261816:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.541172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:21:36.543492Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:21:36.543551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660299321261818:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:21:36.729888Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660299321261898:2294] TxId: 281474976715661. Ctx: { TraceId: 01jd70dtp90yqaz0cmaxn9x3ja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExYzEwZmMtNDUzZjFjZTUtYjA3OGY4ZDItMzk2OTllZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:36.730953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70dtp90yqaz0cmaxn9x3ja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExYzEwZmMtNDUzZjFjZTUtYjA3OGY4ZDItMzk2OTllZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:36.753825Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660299321261905:2308], owner: [1:7439660299321261901:2306], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:36.754057Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660299321261905:2308], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:36.755480Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660299321261905:2308], row count: 1, finished: 1 2024-11-21T09:21:36.755511Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660299321261905:2308], owner: [1:7439660299321261901:2306], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:36.759550Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180896728, txId: 281474976715660] shutting down 2024-11-21T09:21:37.783091Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660303616229249:2314] TxId: 281474976715663. Ctx: { TraceId: 01jd70dvz5fe7ysmqs48qh38er, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RkNzIwOGUtODc4MGNlZWQtY2ZiZDI3OGUtMjE3YTE5ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:37.783134Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70dvz5fe7ysmqs48qh38er, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RkNzIwOGUtODc4MGNlZWQtY2ZiZDI3OGUtMjE3YTE5ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:37.783662Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660303616229256:2322], owner: [1:7439660303616229252:2320], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:37.783898Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660303616229256:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:37.784011Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660303616229256:2322], row count: 1, finished: 1 2024-11-21T09:21:37.784031Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660303616229256:2322], owner: [1:7439660303616229252:2320], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:37.784456Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180897782, txId: 281474976715662] shutting down 2024-11-21T09:21:38.802010Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70dwz1bnhrd2w7zey2aqkc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNiMmUwNWMtNWM2NWIyMmEtMWMwNzFlODItM2E0MDhkOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:38.802918Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660307911196589:2333], owner: [1:7439660307911196585:2331], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:38.804401Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660307911196589:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:38.804528Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660307911196589:2333], row count: 1, finished: 1 2024-11-21T09:21:38.804545Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660307911196589:2333], owner: [1:7439660307911196585:2331], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:38.805097Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180898801, txId: 281474976715664] shutting down 2024-11-21T09:21:39.824004Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70dxyyfd0m1az7h6406b2w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjE4M2MxOTctOGQ5OGMwMGUtYmM2YWIwNWQtYWYxMTE0MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:39.824690Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660312206163922:2344], owner: [1:7439660312206163918:2342], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:39.824869Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660312206163922:2344], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:39.825012Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660312206163922:2344], row count: 1, finished: 1 2024-11-21T09:21:39.825032Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660312206163922:2344], owner: [1:7439660312206163918:2342], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:39.825608Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180899823, txId: 281474976715666] shutting down 2024-11-21T09:21:40.844891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70dyyt5pph1518t0y2ap4k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc1M2E1MzMtOTY3OGZkN2QtYjMyNDExYy03YTRkZTBlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:40.845534Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660316501131255:2355], owner: [1:7439660316501131251:2353], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:40.845682Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660316501131255:2355], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:40.845775Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660316501131255:2355], row count: 1, finished: 1 2024-11-21T09:21:40.845789Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660316501131255:2355], owner: [1:7439660316501131251:2353], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T09:21:40.846322Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180900844, txId: 281474976715668] shutting down 2024-11-21T09:21:41.155248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439660299321261348:2190];send_to=[0:7307199536658 ... ceId: 01jd70ek990d8yywyxmc050yd8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTNjMTczMTktZGVlYjliOTktNWMxNzY0OTktMjYwMWFhZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:01.736366Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180921671, txId: 281474976715676] shutting down 2024-11-21T09:22:01.749822Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd70ekc95ea7zzk5xspt7kt5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2JmNDI0NWItZGVkZGQxNDMtODg1ZWIwYmUtOWYzYzMwYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:01.750231Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439660408439933740:2427], owner: [7:7439660408439933736:2425], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T09:22:01.750404Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439660408439933740:2427], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T09:22:01.750658Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732180921000000 Rank: 3 } InclusiveFrom: false To { IntervalEndUs: 1732180921000000 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_ONE_MINUTE , rows# 2, bytes# 152, next# 2024-11-21T09:22:01.750738Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439660408439933740:2427], row count: 2, finished: 1 2024-11-21T09:22:01.750757Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439660408439933740:2427], owner: [7:7439660408439933736:2425], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T09:22:01.751204Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180921749, txId: 281474976715679] shutting down 2024-11-21T09:22:01.764422Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd70ekcq4w7kemk5pj4vte8d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzM0YmYwMjItN2Q0YjZhY2MtYjMzNzQyODktZjBjN2FiMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:01.764822Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439660408439933773:2437], owner: [7:7439660408439933769:2435], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T09:22:01.765002Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439660408439933773:2437], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T09:22:01.765205Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732180921000000 Rank: 3 } InclusiveFrom: true To { IntervalEndUs: 1732180921000000 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_ONE_MINUTE , rows# 3, bytes# 228, next# 2024-11-21T09:22:01.765304Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439660408439933773:2437], row count: 3, finished: 1 2024-11-21T09:22:01.765318Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439660408439933773:2437], owner: [7:7439660408439933769:2435], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T09:22:01.765760Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180921764, txId: 281474976715681] shutting down 2024-11-21T09:22:01.779268Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd70ekd64mpg0kn5p612xtaa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzBiNTA1ODgtNWNhMjQ1YzItNjAwZjgzZGYtY2NjODRiNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:01.779713Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439660408439933806:2447], owner: [7:7439660408439933802:2445], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T09:22:01.779869Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439660408439933806:2447], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T09:22:01.780086Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732180921000000 Rank: 0 } InclusiveFrom: false To { IntervalEndUs: 1732180921000000 Rank: 3 } InclusiveTo: false Type: TOP_PARTITIONS_ONE_MINUTE , rows# 2, bytes# 153, next# 2024-11-21T09:22:01.780177Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439660408439933806:2447], row count: 2, finished: 1 2024-11-21T09:22:01.780195Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439660408439933806:2447], owner: [7:7439660408439933802:2445], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T09:22:01.780701Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180921779, txId: 281474976715683] shutting down 2024-11-21T09:22:01.792705Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jd70ekdnayetk90yeaf0b60k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZTAyYmY5YTUtY2I4MWY2MzItZjkyZWQwNzAtOWVmNzFiMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:01.793151Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439660408439933839:2457], owner: [7:7439660408439933835:2455], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T09:22:01.793292Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439660408439933839:2457], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T09:22:01.793505Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732180921000000 Rank: 0 } InclusiveFrom: false To { IntervalEndUs: 1732180921000000 Rank: 3 } InclusiveTo: true Type: TOP_PARTITIONS_ONE_MINUTE , rows# 3, bytes# 229, next# 2024-11-21T09:22:01.793619Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439660408439933839:2457], row count: 3, finished: 1 2024-11-21T09:22:01.793637Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439660408439933839:2457], owner: [7:7439660408439933835:2455], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T09:22:01.794160Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180921792, txId: 281474976715685] shutting down 2024-11-21T09:22:01.795157Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T09:22:01.795357Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:01.795399Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T09:22:01.795540Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7439660365935866776:2055], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T09:22:01.795530Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:01.795591Z node 8 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [8:7439660365935866776:2055], database# /Root/Tenant2, processor id# 72075186224037899 2024-11-21T09:22:01.795589Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7439660367041074406:2058], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T09:22:01.795620Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439660367041074406:2058], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T09:22:01.795803Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2024-11-21T09:22:01.795905Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:01.795927Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T09:22:01.796048Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:01.796768Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7439660366567762911:2058], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T09:22:01.796825Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7439660366567762911:2058], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T09:22:01.796863Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439660365935867008:2109], Type=268959746 2024-11-21T09:22:01.796886Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439660365935867008:2109], Type=268959746 2024-11-21T09:22:01.796888Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439660365935867008:2109], Type=268959746 2024-11-21T09:22:01.796890Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439660365935867008:2109], Type=268959746 2024-11-21T09:22:01.796893Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439660365935867008:2109], Type=268959746 2024-11-21T09:22:01.796895Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439660365935867008:2109], Type=268959746 2024-11-21T09:22:01.796897Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439660365935867008:2109], Type=268959746 2024-11-21T09:22:01.796899Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439660365935867008:2109], Type=268959746 2024-11-21T09:22:01.796901Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439660367041074628:2106], Type=268959746 2024-11-21T09:22:01.796904Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439660367041074628:2106], Type=268959746 2024-11-21T09:22:01.796905Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439660367041074628:2106], Type=268959746 2024-11-21T09:22:01.796907Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439660367041074628:2106], Type=268959746 2024-11-21T09:22:01.796909Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439660367041074628:2106], Type=268959746 2024-11-21T09:22:01.796911Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439660367041074628:2106], Type=268959746 2024-11-21T09:22:01.796918Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439660367041074628:2106], Type=268959746 2024-11-21T09:22:01.796922Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439660367041074628:2106], Type=268959746 2024-11-21T09:22:01.797020Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7439660363749774315:2058], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T09:22:01.797056Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7439660363749774315:2058], database# /Root/Tenant2, processor id# 72075186224037899 >> TMonitoringTests::InvalidActorId |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.097257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.097289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.097294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.097298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.097308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.097312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.097319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.097391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.106550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.106573Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.108160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.108255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.108280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.110915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.110977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.112369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.113105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.114379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.115701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.115706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.115731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.116838Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.129363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.130018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.130082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.130086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.130668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.130678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.130681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.131023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.131361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.131383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.131928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.132313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.132806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.132960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.133028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.133034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.133056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.133066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.133448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.133480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.133532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133538Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.133545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.133549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.133558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.133565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.133574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.133578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.133582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 6678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.160820Z node 260 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:22:02.160823Z node 260 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T09:22:02.160826Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T09:22:02.161051Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.161060Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.161062Z node 260 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:22:02.161065Z node 260 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2024-11-21T09:22:02.161067Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:02.161076Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T09:22:02.161245Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T09:22:02.161270Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:22:02.161301Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T09:22:02.161304Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T09:22:02.161308Z node 260 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2024-11-21T09:22:02.161400Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.161417Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 1116691499113 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:02.161423Z node 260 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2024-11-21T09:22:02.161446Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.161456Z node 260 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T09:22:02.161460Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T09:22:02.161469Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:02.161477Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:02.161482Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T09:22:02.161489Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T09:22:02.161493Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T09:22:02.161497Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T09:22:02.161505Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:02.161512Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T09:22:02.161515Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T09:22:02.161519Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:22:02.161847Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:22:02.161857Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409548 2024-11-21T09:22:02.161884Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:02.161894Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.162142Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T09:22:02.162188Z node 260 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:02.162192Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:02.162219Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:22:02.162241Z node 260 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.162245Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [260:203:2206], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T09:22:02.162250Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [260:203:2206], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2024-11-21T09:22:02.162352Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.162359Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.162363Z node 260 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:22:02.162365Z node 260 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T09:22:02.162368Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T09:22:02.162432Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.162438Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.162440Z node 260 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:22:02.162442Z node 260 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:22:02.162445Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:02.162451Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T09:22:02.162454Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [260:120:2146] 2024-11-21T09:22:02.162483Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:02.162486Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:22:02.162492Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:02.162793Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.162990Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:02.163003Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:02.163010Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T09:22:02.163017Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 TestWaitNotification wait txId: 1004 2024-11-21T09:22:02.163263Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:22:02.163269Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:22:02.163324Z node 260 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:22:02.163334Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:22:02.163338Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [260:965:2904] TestWaitNotification: OK eventTxId 1004 |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |96.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpUniqueIndex::InsertNullInPk >> KqpIndexes::UpsertWithNullKeysSimple >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap >> KqpUniqueIndex::UpdateOnNullInComplexFk >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL >> KqpUniqueIndex::ReplaceFkAlreadyExist >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex >> KqpIndexes::WriteWithParamsFieldOrder >> KqpUniqueIndex::UpsertExplicitNullInComplexFk >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly >> KqpIndexes::UpdateIndexSubsetPk >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[TabletReboots] [GOOD] >> KqpUniqueIndex::InsertNullInPk [GOOD] >> KqpUniqueIndex::InsertNullInFk >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex [GOOD] >> KqpIndexes::DuplicateUpsertInterleave >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate >> KqpIndexes::WriteWithParamsFieldOrder [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-false [GOOD] >> KqpIndexes::UpdateIndexSubsetPk [GOOD] >> KqpIndexes::UpdateOnReadColumns >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap [GOOD] >> KqpIndexes::UpdateDeletePlan ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:56.011923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:56.011960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.011968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:56.011974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:56.011995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:56.012000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:56.012011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.012127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:56.024715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:56.024738Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.026940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:56.027045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:56.027069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:56.029340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:56.029394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.030956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.031860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.033403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:56.035170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.035176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:56.035223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:56.036636Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.051108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:56.051176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:56.051282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:56.051290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:56.051960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:56.051980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:56.051983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:56.052340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:56.052594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.052607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.053015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:56.053324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:56.054120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:56.054291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.054369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:56.054373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.054399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:56.054409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.054751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.054793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:56.054867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054872Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:56.054880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:56.054883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.054887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:56.054891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.054894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:56.054896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:56.054904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:56.054908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:56.054911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 5772Z node 41 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:05.785793Z node 41 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 134 -> 135 2024-11-21T09:22:05.785819Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:05.785828Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:22:05.786154Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:05.786164Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:05.786186Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:05.786221Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:05.786226Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T09:22:05.786234Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:22:05.786272Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:05.786277Z node 41 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T09:22:05.786280Z node 41 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 135 -> 240 2024-11-21T09:22:05.786395Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:05.786406Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:05.786410Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:22:05.786414Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T09:22:05.786418Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:05.786675Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:05.786696Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:05.786701Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:22:05.786705Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:22:05.786708Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:05.786719Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:22:05.787073Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:05.787082Z node 41 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:22:05.787091Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:22:05.787093Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:05.787097Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:22:05.787101Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:05.787104Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:22:05.787106Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:22:05.787113Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:05.787180Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:05.787188Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:22:05.787199Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:22:05.787243Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:05.787246Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:22:05.787253Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:05.787327Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:22:05.787545Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:22:05.787905Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:05.787918Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1002 2024-11-21T09:22:05.787956Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T09:22:05.787962Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 TestWaitNotification wait txId: 1003 2024-11-21T09:22:05.787975Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:22:05.787978Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:22:05.788041Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:22:05.788080Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:22:05.788084Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [41:341:2333] 2024-11-21T09:22:05.788102Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:22:05.788114Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:05.788117Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:341:2333] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:22:05.788174Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:05.788197Z node 41 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusPathDoesNotExist 2024-11-21T09:22:05.788248Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:22:05.788301Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:05.788318Z node 41 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2024-11-21T09:22:05.788380Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 8003, MsgBus: 23955 2024-11-21T09:22:05.037630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660423694594913:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:05.037733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017f0/r3tmp/tmpgPINPz/pdisk_1.dat 2024-11-21T09:22:05.088004Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8003, node 1 2024-11-21T09:22:05.102640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.102656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.102658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.102697Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23955 TClient is connected to server localhost:23955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:05.139113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.139139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.140201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.170187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.178842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.191342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.205775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.217572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.296406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660423694596459:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.296435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.318737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.324313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.336233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.342403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.350272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.357383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.365234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660423694596951:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.365249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.365288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660423694596956:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.365804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.370279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660423694596958:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.511489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::RacySplitAndDropTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:41.435096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:41.435112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:41.435116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:41.435119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:41.435128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:41.435131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:41.435137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:41.435187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:41.441966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:41.441981Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:41.443398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:41.443457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:41.443475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:41.445100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:41.445146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:41.445236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.445362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:41.445761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.445929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:41.445935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.445943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:41.445947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:41.445951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:41.445972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:41.446770Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:41.457226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:41.457279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.457320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:41.457354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:41.457359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.457834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.457854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:41.457878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.457884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:41.457887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:41.457890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:41.458147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.458156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:41.458158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:41.458385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.458390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.458393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.458397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.458754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:41.459022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:41.459052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:41.459165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:41.459180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:41.459184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.459222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:41.459225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:41.459243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:41.459251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:41.459500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:41.459504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:41.459525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:41.459528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:41.459571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:41.459575Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:41.459583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:41.459586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.459588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:41.459591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:41.459594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:41.459595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:41.459602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:41.459606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:41.459608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:05.085052Z node 229 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:22:05.085057Z node 229 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T09:22:05.085062Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:05.085284Z node 229 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:05.085299Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:05.085303Z node 229 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:22:05.085308Z node 229 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:22:05.085312Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T09:22:05.085325Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 2/3, is published: true 2024-11-21T09:22:05.085772Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:22:05.085781Z node 229 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:05.085828Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:22:05.085846Z node 229 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 3/3 2024-11-21T09:22:05.085849Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2024-11-21T09:22:05.085852Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 3/3, is published: true 2024-11-21T09:22:05.085855Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 3/3 2024-11-21T09:22:05.085858Z node 229 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:22:05.085863Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:22:05.085880Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:22:05.085883Z node 229 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:1 2024-11-21T09:22:05.085885Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:1 2024-11-21T09:22:05.085888Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:05.085890Z node 229 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:2 2024-11-21T09:22:05.085892Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:2 2024-11-21T09:22:05.085897Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:05.086177Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:05.086183Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:22:05.086190Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:05.086194Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:22:05.086197Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:05.086287Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:22:05.086300Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:22:05.087133Z node 229 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:05.087686Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 705 RawX2: 983547513388 } TabletId: 72075186233409549 State: 4 2024-11-21T09:22:05.087706Z node 229 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:22:05.087769Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 708 RawX2: 983547513389 } TabletId: 72075186233409550 State: 4 2024-11-21T09:22:05.087774Z node 229 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:22:05.088366Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:22:05.088488Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:22:05.088522Z node 229 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T09:22:05.088572Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:22:05.088624Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409549 2024-11-21T09:22:05.088668Z node 229 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2024-11-21T09:22:05.089096Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T09:22:05.089126Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409550 2024-11-21T09:22:05.089440Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:05.089446Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:22:05.089455Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:05.090052Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:22:05.090064Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T09:22:05.090160Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T09:22:05.090166Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T09:22:05.090175Z node 229 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1004 2024-11-21T09:22:05.090239Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:22:05.090244Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2024-11-21T09:22:05.090255Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T09:22:05.090257Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T09:22:05.090307Z node 229 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:22:05.090330Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:22:05.090335Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [229:992:2868] 2024-11-21T09:22:05.090360Z node 229 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T09:22:05.090378Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:22:05.090381Z node 229 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [229:992:2868] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 2024-11-21T09:22:05.090478Z node 229 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:22:05.090526Z node 229 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 61us result status StatusPathDoesNotExist 2024-11-21T09:22:05.090569Z node 229 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpIndexes::DuplicateUpsertInterleave [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] >> KqpUniqueIndex::InsertNullInFk [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpIndexes::UpdateDeletePlan [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 1770, MsgBus: 14911 2024-11-21T09:22:04.701242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660422877984343:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.701258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001972/r3tmp/tmp6t5b2F/pdisk_1.dat 2024-11-21T09:22:04.756713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1770, node 1 2024-11-21T09:22:04.802037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:04.802072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:04.803169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:04.807486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:04.807501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:04.807502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:04.807536Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14911 TClient is connected to server localhost:14911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:04.888432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.898204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.964862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.975899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.986654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.015283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427172953181:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.015334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.120400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.126335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.132456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.139832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.147805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.153791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.166561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427172953686:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.166589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.166595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427172953693:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.168604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.174235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660427172953695:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.364097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1152, MsgBus: 27092 2024-11-21T09:22:05.758074Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660424036001316:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:05.758121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001972/r3tmp/tmpCLWsZg/pdisk_1.dat 2024-11-21T09:22:05.765243Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1152, node 2 2024-11-21T09:22:05.773845Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.773862Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.773864Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.773901Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27092 TClient is connected to server localhost:27092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.859895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.859924Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.860283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.860933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.869673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.878516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.895182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.903819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.018795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428330970145:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.018816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.021550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.027978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.036187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.043398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.097903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.106235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.114757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428330970661:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.114787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428330970666:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.114787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.115423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.119232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660428330970668:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.287796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInFk [GOOD] Test command err: Trying to start YDB, gRPC: 8705, MsgBus: 4774 2024-11-21T09:22:04.699253Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660421456584441:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.699301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a44/r3tmp/tmpSRc5e3/pdisk_1.dat 2024-11-21T09:22:04.757107Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8705, node 1 2024-11-21T09:22:04.800055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:04.800083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:04.801128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:04.807581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:04.807596Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:04.807597Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:04.807631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4774 TClient is connected to server localhost:4774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:04.889272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.898786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.964978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.977737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.986240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.018352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660425751553072:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.018378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.120312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.126182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.132504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.139942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.147794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.153814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.166544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660425751553577:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.166569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.166668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660425751553585:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.168623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.174841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660425751553587:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.363327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.537261Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660425751554854:2571], TxId: 281474976715681, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd70eq1r4gvza7nb9a3v74y3. SessionId : ydb://session/3?node_id=1&id=ZGMxNzNjYTMtZTJkYTRjNmYtYTJiNDY3ODItZTI3Y2VhNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:22:05.537452Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660425751554855:2572], TxId: 281474976715681, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZGMxNzNjYTMtZTJkYTRjNmYtYTJiNDY3ODItZTI3Y2VhNzI=. TraceId : 01jd70eq1r4gvza7nb9a3v74y3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439660425751554851:2512], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:22:05.537635Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGMxNzNjYTMtZTJkYTRjNmYtYTJiNDY3ODItZTI3Y2VhNzI=, ActorId: [1:7439660425751554574:2512], ActorState: ExecuteState, TraceId: 01jd70eq1r4gvza7nb9a3v74y3, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11844, MsgBus: 14807 2024-11-21T09:22:05.758327Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660424268583901:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:05.758362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001a44/r3tmp/tmprhyt1K/pdisk_1.dat 2024-11-21T09:22:05.766676Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11844, node 2 2024-11-21T09:22:05.776862Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.776890Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.776893Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.776937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14807 TClient is connected to server localhost:14807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.858811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.858838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.859941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.860637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.871972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.879458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.895374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.904823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.018079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428563552737:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.018101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.022244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.028628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.036256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.043530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.050090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.057308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.066233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428563553237:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.066259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.066292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428563553242:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.066812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.069974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660428563553244:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.225041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleave [GOOD] Test command err: Trying to start YDB, gRPC: 18791, MsgBus: 11277 2024-11-21T09:22:04.882656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660421799956539:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.882809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0018c5/r3tmp/tmpNT42Av/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18791, node 1 2024-11-21T09:22:04.935535Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:04.937951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:04.937963Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:04.937965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:04.937994Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11277 TClient is connected to server localhost:11277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:04.983684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:04.983713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:04.985344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.013832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.023826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.085655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.099704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.109478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.198380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660426094925405:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.198440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.203139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.208387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.217164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.223608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.230996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.238449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.245974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660426094925895:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.246001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660426094925900:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.246007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.246499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.251246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660426094925902:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.469088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 245 cpu_time_us: 245 } query_phases { duration_us: 1629 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 1222 affected_shards: 1 } query_phases { duration_us: 264 cpu_time_us: 264 } query_phases { duration_us: 606 cpu_time_us: 836 } query_phases { duration_us: 2584 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 951 affected_shards: 2 } compilation { duration_us: 44080 cpu_time_us: 41797 } process_cpu_time_us: 924 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":27,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":26,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_1_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732180925526,\"TaskId\":1,\"Host\":\"ghrun-qcxhsi27zq\",\"ComputeTimeUs\":43}],\"CpuTimeUs\":138}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1732180925526,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":138,\"Max\":138,\"Min\":138}},\"CTE Name\":\"precompute_1_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":25,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":24,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1732180925526,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_3_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":23,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":22,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732180925526,\"TaskId\":2,\"Host\":\"ghrun-qcxhsi27zq\",\"ComputeTimeUs\":27}],\"CpuTimeUs\":123}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"BaseTimeMs\":1732180925526,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":123,\"Max\":123,\"Min\":123}},\"CTE Name\":\"precompute_3_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":20,\"Subplan Name\":\"CTE precompute_3_0\",\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"}],\"Iterator\":\"FlatMap\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1732180925524,\"Host\":\"ghrun-qcxhsi27zq\",\"OutputRows\":1,\"StartTimeMs\":1732180925523,\"ComputeTimeUs\":5,\"NodeId\":1,\"OutputChannels\":[{\"ChannelId\":1,\"Rows\":1,\"DstStageId\":2,\"Bytes\":29}],\"TaskId\":1,\"OutputBytes\":29}],\"PeakMemoryUsageBytes\":131072,\"DurationUs\":1000,\"CpuTimeUs\":248}],\"UseLlvm\":\"undefined\",\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":29,\"Max\":29,\"Min\":29}},\"Name\":\"19\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":0,\"StageDurationUs\":1000,\"BaseTimeMs\":1732180925523,\"CpuTimeUs\":{\"Count\":1,\"Sum\":172,\"Max\":172,\"Min\":172},\"OutputBytes\":{\"Count\":1,\"Sum ... l))\n(let $41 (DqPhyStage \'() (lambda \'() (Iterator (AsList (ToDict (FlatMap (Map $27 (lambda \'($94) (AsStruct \'(\'\"Key\" (Member $94 \'\"Key\")) \'(\'\"fk1\" (Member $94 \'\"fk1\")) \'(\'\"fk3\" (Member $94 \'\"fk3\"))))) (lambda \'($95) (block \'(\n (let $96 (AsStruct \'(\'\"Key\" (Member $95 \'\"Key\"))))\n (return (IfPresent (Lookup %kqp%tx_result_binding_1_0 $96) (lambda \'($97) (Just \'($96 $97 (Or (AggrNotEquals (Member $95 \'\"fk1\") (Member $97 \'\"fk1\")) (AggrNotEquals (Member $95 \'\"fk3\") (Member $97 \'\"fk3\")))))) (Nothing (OptionalType (TupleType $6 $38 $40)))))\n)))) (lambda \'($98) (Nth $98 \'0)) (lambda \'($99) \'((Nth $99 \'1) (Nth $99 \'2))) $24)))) \'(\'(\'\"_logical_id\" \'2838) \'(\'\"_id\" \'\"8d92c9b6-fa08cc01-e73d01a1-2b89c886\"))))\n(let $42 (DqCnValue (TDqOutput $41 \'0)))\n(let $43 (KqpTxResultBinding $39 \'1 \'0))\n(let $44 (KqpPhysicalTx \'($41) \'($42) \'($33 \'($37 $43)) $11))\n(let $45 \'\"%kqp%tx_result_binding_2_0\")\n(let $46 (DictType $6 (TupleType $38 $40)))\n(let $47 %kqp%tx_result_binding_2_0)\n(let $48 (DqPhyStage \'() (lambda \'() (Iterator (FlatMap (Map $27 (lambda \'($100) (AsStruct \'(\'\"Key\" (Member $100 \'\"Key\")) \'(\'\"fk1\" (Member $100 \'\"fk1\")) \'(\'\"fk3\" (Member $100 \'\"fk3\"))))) (lambda \'($101) (block \'(\n (let $102 \'(\'\"Key\" (Member $101 \'\"Key\")))\n (let $103 \'(\'\"fk1\" (Member $101 \'\"fk1\")))\n (let $104 \'(\'\"fk3\" (Member $101 \'\"fk3\")))\n (return (IfPresent (Lookup $47 (AsStruct $102)) (lambda \'($105) (If (Nth $105 \'1) (Just (AsStruct $102 $103 \'(\'\"fk2\" (Member (Nth $105 \'0) \'\"fk2\")) $104)) (Nothing (OptionalType $20)))) (Just (AsStruct $102 $103 \'(\'\"fk2\" (Nothing $18)) $104))))\n)))))) \'(\'(\'\"_logical_id\" \'3419) \'(\'\"_id\" \'\"d3de26ed-27522107-4e3be7da-5bd2fef3\"))))\n(let $49 (DqPhyStage \'() (lambda \'() (Iterator (Map (Filter (DictItems $47) (lambda \'($106) (Nth (Nth $106 \'1) \'1))) (lambda \'($107) (block \'(\n (let $108 (Nth (Nth $107 \'1) \'0))\n (return (AsStruct \'(\'\"Key\" (Member (Nth $107 \'0) \'\"Key\")) \'(\'\"fk1\" (Member $108 \'\"fk1\")) \'(\'\"fk2\" (Member $108 \'\"fk2\")) \'(\'\"fk3\" (Member $108 \'\"fk3\"))))\n)))))) \'(\'(\'\"_logical_id\" \'3433) \'(\'\"_id\" \'\"4c2e409-89883555-7b50ba63-2dfd7b96\"))))\n(let $50 (DqCnUnionAll (TDqOutput $48 \'0)))\n(let $51 (lambda \'($109) $109))\n(let $52 (DqPhyStage \'($50) $51 \'(\'(\'\"_logical_id\" \'3529) \'(\'\"_id\" \'\"a358c615-735236b6-612cb463-55ddfbd4\"))))\n(let $53 (DqCnUnionAll (TDqOutput $49 \'0)))\n(let $54 (DqPhyStage \'($53) $51 \'(\'(\'\"_logical_id\" \'3544) \'(\'\"_id\" \'\"5496cad9-a8d972d1-920cf00f-dfb20ad4\"))))\n(let $55 \'($48 $49 $52 $54))\n(let $56 (DqCnResult (TDqOutput $52 \'0) \'()))\n(let $57 (DqCnResult (TDqOutput $54 \'0) \'()))\n(let $58 (KqpTxResultBinding $46 \'2 \'0))\n(let $59 (KqpPhysicalTx $55 \'($56 $57) \'($33 \'($45 $58)) $11))\n(let $60 \'\"%kqp%tx_result_binding_1_1\")\n(let $61 (DqPhyStage \'() (lambda \'() (block \'(\n (let $110 \'(\'\"Key\" \'\"Value\" \'\"fk1\" \'\"fk3\"))\n (return (KqpEffects (KqpUpsertRows $15 (Iterator %kqp%tx_result_binding_1_1) $110 \'(\'(\'\"Mode\" \'\"upsert\")))))\n))) \'(\'(\'\"_logical_id\" \'4059) \'(\'\"_id\" \'\"be96d30d-8c84ba8-94440f9d-10b509fb\"))))\n(let $62 (KqpTable \'\"/Root/TestTable/Index/indexImplTable\" \'\"72057594046644480:18\" \'\"\" \'1))\n(let $63 \'\"%kqp%tx_result_binding_3_1\")\n(let $64 (ListType $20))\n(let $65 (DqPhyStage \'() (lambda \'() (KqpEffects (KqpDeleteRows $62 (Iterator %kqp%tx_result_binding_3_1)))) \'(\'(\'\"_logical_id\" \'4073) \'(\'\"_id\" \'\"50008652-77df585e-8ab2fd02-5f873e5f\"))))\n(let $66 \'\"%kqp%tx_result_binding_3_0\")\n(let $67 (DqPhyStage \'() (lambda \'() (block \'(\n (let $111 \'(\'\"fk1\" \'\"fk2\" \'\"fk3\" \'\"Key\"))\n (return (KqpEffects (KqpUpsertRows $62 (Iterator %kqp%tx_result_binding_3_0) $111 \'())))\n))) \'(\'(\'\"_logical_id\" \'4087) \'(\'\"_id\" \'\"7fa51aae-9cf281c5-79b9f1c5-8b446982\"))))\n(let $68 \'($61 $65 $67))\n(let $69 (KqpTxResultBinding $5 \'1 \'1))\n(let $70 (KqpTxResultBinding $64 \'\"3\" \'0))\n(let $71 (KqpTxResultBinding $64 \'\"3\" \'1))\n(let $72 \'(\'($60 $69) \'($66 $70) \'($63 $71)))\n(let $73 (KqpPhysicalTx $68 \'() $72 \'($35 \'(\'\"with_effects\"))))\n(let $74 \'($12 $36 $44 $59 $73))\n(return (KqpPhysicalQuery $74 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 51596 total_cpu_time_us: 46239 2024-11-21T09:22:05.600983Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 2516, MsgBus: 27672 2024-11-21T09:22:05.771076Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660427344737517:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:05.771108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0018c5/r3tmp/tmp8qNXts/pdisk_1.dat 2024-11-21T09:22:05.779217Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2516, node 2 2024-11-21T09:22:05.788739Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.788752Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.788753Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.788795Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27672 TClient is connected to server localhost:27672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.871536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.871560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.872697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.873393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.883634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.891825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.908676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.917904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.099904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660431639706360:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.100000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.103114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.109049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.120020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.126797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.134330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.141020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.150280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660431639706861:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.150308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660431639706866:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.150315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.151058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.154725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660431639706868:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.298380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 30907, MsgBus: 10103 2024-11-21T09:22:04.699291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660422004953571:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.699345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0019ff/r3tmp/tmphxhFZI/pdisk_1.dat 2024-11-21T09:22:04.757862Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30907, node 1 2024-11-21T09:22:04.799733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:04.799766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:04.800858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:04.807527Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:04.807538Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:04.807540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:04.807572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10103 TClient is connected to server localhost:10103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:04.888419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.898190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.965080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.981556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.993843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.015143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660426299922202:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.015170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.120296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.125976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.132707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.139888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.147823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.154010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.166533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660426299922706:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.166559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660426299922711:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.166561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.168640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.174514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660426299922713:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.362324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.383883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T09:22:05.395978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.450585Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 29435, MsgBus: 9249 2024-11-21T09:22:05.768796Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660425620415836:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:05.768955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0019ff/r3tmp/tmpEMLUSN/pdisk_1.dat 2024-11-21T09:22:05.777800Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29435, node 2 2024-11-21T09:22:05.787684Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.787699Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.787701Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.787745Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9249 TClient is connected to server localhost:9249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.871339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.871367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.871621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.872426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.882538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.891152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.906577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.918033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.040720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660429915384679:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.040749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.044928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.050974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.057314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.063840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.071017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.078257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.086798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660429915385182:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.086827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.086903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660429915385187:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.087648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.091520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660429915385189:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.247348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.336354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T09:22:06.345782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 20443, MsgBus: 22855 2024-11-21T09:22:05.059483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660424064922579:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:05.059898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017dd/r3tmp/tmp0Y9MEQ/pdisk_1.dat 2024-11-21T09:22:05.107527Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20443, node 1 2024-11-21T09:22:05.113074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.113089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.113090Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.113127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22855 TClient is connected to server localhost:22855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.156270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.161142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.161170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.162267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.167060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.181261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.195991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.204072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.309460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424064924120:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.309487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.330781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.336163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.390611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.399070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.405817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.413151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.421289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424064924636:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.421308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.421315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424064924641:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.421788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.425954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660424064924643:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:05.609963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.673984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 5330, MsgBus: 23598 2024-11-21T09:22:05.948588Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660426190395154:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:05.948670Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017dd/r3tmp/tmplSPfuy/pdisk_1.dat 2024-11-21T09:22:05.956006Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5330, node 2 2024-11-21T09:22:05.964368Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.964379Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.964380Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.964413Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23598 TClient is connected to server localhost:23598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:06.048921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:06.048957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:06.050033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:06.050749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.058844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.067124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.084161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.094362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.199393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660430485363994:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.199436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.203049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.209812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.217843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.225168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.232464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.239156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.247854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660430485364490:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.247883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.247952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660430485364495:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.248692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.252721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660430485364499:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.431789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateDeletePlan [GOOD] Test command err: Trying to start YDB, gRPC: 2591, MsgBus: 1116 2024-11-21T09:22:04.699334Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660419646085398:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.699394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001993/r3tmp/tmprnVft2/pdisk_1.dat 2024-11-21T09:22:04.757371Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2591, node 1 2024-11-21T09:22:04.799877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:04.799906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:04.800915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:04.807575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:04.807589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:04.807590Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:04.807614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1116 TClient is connected to server localhost:1116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:04.888594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.898217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.965114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.977375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.986358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.015205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660423941054028:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.015230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.120379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.126196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.132689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.139933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.147740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.154001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.166537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660423941054535:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.166578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.166750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660423941054542:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.168623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.174840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660423941054548:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.362891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.462306Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T09:22:05.618988Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eq3h5jwr8yycpxj86s4f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:05.620713Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, ActorId: [1:7439660423941054833:2454], ActorState: ExecuteState, TraceId: 01jd70eq3h5jwr8yycpxj86s4f, Create QueryResponse for error on request, msg: 2024-11-21T09:22:05.767821Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eq7x8bem0rb2r28gdjhp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:05.767887Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, ActorId: [1:7439660423941054833:2454], ActorState: ExecuteState, TraceId: 01jd70eq7x8bem0rb2r28gdjhp, Create QueryResponse for error on request, msg: 2024-11-21T09:22:05.777952Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T09:22:05.824377Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660423941055485:2587], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=. TraceId : 01jd70eqar80e8v90db09w66fm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:22:05.824444Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660423941055486:2588], TxId: 281474976715706, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=. CustomerSuppliedId : . TraceId : 01jd70eqar80e8v90db09w66fm. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439660423941055482:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:22:05.824674Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, ActorId: [1:7439660423941054833:2454], ActorState: ExecuteState, TraceId: 01jd70eqar80e8v90db09w66fm, Create QueryResponse for error on request, msg: 2024-11-21T09:22:05.910420Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eqc1fr2tmgcvhdfkeryr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:05.910479Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, ActorId: [1:7439660423941054833:2454], ActorState: ExecuteState, TraceId: 01jd70eqc1fr2tmgcvhdfkeryr, Create QueryResponse for error on request, msg: 2024-11-21T09:22:06.048762Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eqer5ac5dtn0x2wsfhrs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:06.048841Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWQ5Zjc4NGYtNjhmYWRiNDgtZDczY2Q4NjktZjY0MTJjZmM=, ActorId: [1:7439660423941054833:2454], ActorState: ExecuteState, TraceId: 01jd70eqer5ac5dtn0x2wsfhrs, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29299, MsgBus: 5654 2024-11-21T09:22:06.175668Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660428632480211:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:06.175688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001993/r3tmp/tmpc4eH5A/pdisk_1.dat 2024-11-21T09:22:06.186597Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29299, node 2 2024-11-21T09:22:06.193239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:06.193265Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:06.193267Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:06.193307Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5654 TClient is connected to server localhost:5654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:06.275891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:06.275915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:06.277020Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:06.278167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.290238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.299541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.313425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.323874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.455946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428632481765:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.455971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.459611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.465566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.477139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.484270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.490971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.498048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.506399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428632482268:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.506423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660428632482273:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.506424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.506934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.511121Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660428632482275:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.655752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 6615, MsgBus: 19725 2024-11-21T09:22:04.699235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660419724967369:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.699252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001971/r3tmp/tmptNJxyc/pdisk_1.dat 2024-11-21T09:22:04.758361Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6615, node 1 2024-11-21T09:22:04.800076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:04.800103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:04.801122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:04.807539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:04.807551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:04.807563Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:04.807591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19725 TClient is connected to server localhost:19725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:04.889777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.898781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.965139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.976703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.986269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.015149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424019935994:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.015171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.120369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.126262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.132585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.139916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.147780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.161770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.170214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424019936507:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.170243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.170305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424019936512:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.171306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.174415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660424019936514:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:05.363116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.541163Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eq0ncprdyxb5z8hhedqp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQ2ZGRjOWUtZjZmNzdmMzItZTFlZmRhOWUtYmJhOGI3Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:05.542812Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ2ZGRjOWUtZjZmNzdmMzItZTFlZmRhOWUtYmJhOGI3Yzg=, ActorId: [1:7439660424019937498:2512], ActorState: ExecuteState, TraceId: 01jd70eq0ncprdyxb5z8hhedqp, Create QueryResponse for error on request, msg: 2024-11-21T09:22:05.610411Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eq38etpp5bbfa3xhz280, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQ2ZGRjOWUtZjZmNzdmMzItZTFlZmRhOWUtYmJhOGI3Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:05.610471Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ2ZGRjOWUtZjZmNzdmMzItZTFlZmRhOWUtYmJhOGI3Yzg=, ActorId: [1:7439660424019937498:2512], ActorState: ExecuteState, TraceId: 01jd70eq38etpp5bbfa3xhz280, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16763, MsgBus: 16603 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001971/r3tmp/tmpePcP4m/pdisk_1.dat 2024-11-21T09:22:05.767341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:05.767590Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16763, node 2 2024-11-21T09:22:05.776319Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.776338Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.776340Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.776369Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16603 TClient is connected to server localhost:16603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.858380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.858413Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.859499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.860533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.871517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.879085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.892846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.903903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.011785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660431542805909:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.011831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.017051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.023493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.036252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.043383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.098132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.106667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.114741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660431542806425:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.114765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660431542806430:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.114768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.115398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.119391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660431542806432:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.314766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.504626Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eqz82q832xy7xpe4g70y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDljOWM3OTQtNThiNDU2ZjYtNGUzMWI3Y2EtZDU4YTI5ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:06.504706Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDljOWM3OTQtNThiNDU2ZjYtNGUzMWI3Y2EtZDU4YTI5ZmQ=, ActorId: [2:7439660431542807445:2512], ActorState: ExecuteState, TraceId: 01jd70eqz82q832xy7xpe4g70y, Create QueryResponse for error on request, msg: 2024-11-21T09:22:06.561837Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70er1a66wa7a4v2htky389, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDljOWM3OTQtNThiNDU2ZjYtNGUzMWI3Y2EtZDU4YTI5ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:06.561913Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDljOWM3OTQtNThiNDU2ZjYtNGUzMWI3Y2EtZDU4YTI5ZmQ=, ActorId: [2:7439660431542807445:2512], ActorState: ExecuteState, TraceId: 01jd70er1a66wa7a4v2htky389, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithoutExtraNullDelete [GOOD] Test command err: Trying to start YDB, gRPC: 25687, MsgBus: 21062 2024-11-21T09:22:04.941017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660420275591087:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.941036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017f6/r3tmp/tmpeZqqRy/pdisk_1.dat 2024-11-21T09:22:04.995905Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25687, node 1 2024-11-21T09:22:05.007577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.007590Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.007592Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.007619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21062 TClient is connected to server localhost:21062 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:22:05.041535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.041574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:22:05.042620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.073294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.082759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.144008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.159606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.169414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.235422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424570559924:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.235452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.258965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.265438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.272805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.327761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.336062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.343238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.352561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424570560430:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.352582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660424570560435:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.352590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.353188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.356126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660424570560437:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.537706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13111, MsgBus: 23063 2024-11-21T09:22:05.804788Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660425592702299:2248];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017f6/r3tmp/tmpoZvngQ/pdisk_1.dat 2024-11-21T09:22:05.809514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:05.812807Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13111, node 2 2024-11-21T09:22:05.822213Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.822226Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.822227Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.822260Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23063 TClient is connected to server localhost:23063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.907288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.907327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.907617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.908437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.919489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.927927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.945144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.960456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.115474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660429887670946:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.115520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.119912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.126077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.134475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.140926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.196795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.204789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.219934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660429887671453:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.219959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.219971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660429887671458:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.220496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.224653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660429887671460:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.414466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpMultishardIndex::DataColumnWriteNull+StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:56.939981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:56.940001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.940006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:56.940011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:56.940023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:56.940027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:56.940034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.940123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:56.947921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:56.947942Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.949958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:56.950036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:56.950058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:56.952437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:56.952516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.952610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.952777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.953367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.953580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.953587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.953594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:56.953599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.953603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:56.953629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:56.954549Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.968500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:56.968558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.968614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:56.968663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:56.968671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.969288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.969313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:56.969354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.969373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:56.969378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:56.969382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:56.969682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.969688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:56.969691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:56.969967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.969974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.969978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.969983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.970357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:56.970684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:56.970720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:56.970835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.970854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:56.970858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.970892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:56.970896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.970931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:56.970940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.971220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.971226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.971247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.971250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:56.971294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.971298Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:56.971307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:56.971309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.971313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:56.971316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.971319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:56.971321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:56.971328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:56.971332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:56.971334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 6039Z node 41 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:06.676059Z node 41 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 134 -> 135 2024-11-21T09:22:06.676075Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:06.676080Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:22:06.676352Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:06.676359Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:06.676375Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:06.676390Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:06.676393Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T09:22:06.676396Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:22:06.676421Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:06.676425Z node 41 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T09:22:06.676428Z node 41 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 135 -> 240 2024-11-21T09:22:06.676506Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:06.676512Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:06.676514Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:22:06.676517Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T09:22:06.676520Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:06.676711Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:06.676722Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:06.676724Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:22:06.676727Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:22:06.676729Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:06.676736Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:22:06.676972Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:06.676978Z node 41 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:22:06.676986Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:22:06.676988Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:06.676991Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:22:06.676995Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:06.676997Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:22:06.677000Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:22:06.677006Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:06.677068Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:06.677075Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:22:06.677088Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:22:06.677148Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:06.677154Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:22:06.677163Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:06.677253Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:22:06.677444Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:22:06.677772Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:06.677782Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1002 2024-11-21T09:22:06.677811Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T09:22:06.677815Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 TestWaitNotification wait txId: 1003 2024-11-21T09:22:06.677823Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:22:06.677825Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:22:06.677864Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:22:06.677878Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:22:06.677881Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [41:341:2333] 2024-11-21T09:22:06.677892Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:22:06.677899Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:06.677902Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:341:2333] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:22:06.677940Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:06.677956Z node 41 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 24us result status StatusPathDoesNotExist 2024-11-21T09:22:06.677980Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:22:06.678010Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:06.678025Z node 41 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 15us result status StatusSuccess 2024-11-21T09:22:06.678082Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 4766, MsgBus: 63519 2024-11-21T09:22:04.699261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660422755212036:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.699280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001988/r3tmp/tmpHzKK6f/pdisk_1.dat 2024-11-21T09:22:04.756818Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4766, node 1 2024-11-21T09:22:04.799982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:04.800004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:04.801101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:04.807570Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:04.807581Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:04.807582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:04.807614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63519 TClient is connected to server localhost:63519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:04.888417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.899378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.965099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.975872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.986319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.015470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427050180663:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.015492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.120304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.126229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.132486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.139860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.147877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.161647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.169908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427050181178:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.169933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.169943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427050181183:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.170519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.174237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660427050181185:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.363208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.839926Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eqad3bb5v2d1ywckdnq9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBhNzRmMGYtODZjMjA3NjYtNTQyZWE2MDgtZWEyNTk0ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:05.840993Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjBhNzRmMGYtODZjMjA3NjYtNTQyZWE2MDgtZWEyNTk0ZjA=, ActorId: [1:7439660427050182210:2512], ActorState: ExecuteState, TraceId: 01jd70eqad3bb5v2d1ywckdnq9, Create QueryResponse for error on request, msg: 2024-11-21T09:22:06.439489Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eqwr8v7pc96pmv8gsdmg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBhNzRmMGYtODZjMjA3NjYtNTQyZWE2MDgtZWEyNTk0ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:06.439562Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjBhNzRmMGYtODZjMjA3NjYtNTQyZWE2MDgtZWEyNTk0ZjA=, ActorId: [1:7439660427050182210:2512], ActorState: ExecuteState, TraceId: 01jd70eqwr8v7pc96pmv8gsdmg, Create QueryResponse for error on request, msg: 2024-11-21T09:22:06.517548Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eqzm75vv0rx4yhx07wdf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBhNzRmMGYtODZjMjA3NjYtNTQyZWE2MDgtZWEyNTk0ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:06.517611Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjBhNzRmMGYtODZjMjA3NjYtNTQyZWE2MDgtZWEyNTk0ZjA=, ActorId: [1:7439660427050182210:2512], ActorState: ExecuteState, TraceId: 01jd70eqzm75vv0rx4yhx07wdf, Create QueryResponse for error on request, msg: 2024-11-21T09:22:06.608788Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70er4e4bjsvc15k6taggz7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBhNzRmMGYtODZjMjA3NjYtNTQyZWE2MDgtZWEyNTk0ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:06.608857Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjBhNzRmMGYtODZjMjA3NjYtNTQyZWE2MDgtZWEyNTk0ZjA=, ActorId: [1:7439660427050182210:2512], ActorState: ExecuteState, TraceId: 01jd70er4e4bjsvc15k6taggz7, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] Test command err: Trying to start YDB, gRPC: 3635, MsgBus: 63312 2024-11-21T09:22:04.765401Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660423462892640:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:04.765525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001920/r3tmp/tmpCbl4aw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3635, node 1 2024-11-21T09:22:04.821342Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:04.823042Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:04.823048Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:04.823049Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:04.823072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63312 2024-11-21T09:22:04.866243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:04.866279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:04.867330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:04.896861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.908308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.969388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.987540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:04.997285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.035847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427757861482:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.035878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.120295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.175841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.181960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.236068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.290882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.301167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.309749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427757862003:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.309775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.309776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427757862008:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.310274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.314466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660427757862010:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.493648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.601115Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439660427757862591:2504], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestTable/Index/indexImplTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:22:05.601188Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODg0NGNjYWQtOWRhZTAyMzUtOGU0M2MxZjItN2U3YjhjYzg=, ActorId: [1:7439660427757862304:2454], ActorState: ExecuteState, TraceId: 01jd70eq4ybyv0fx7vr10v5gm9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:22:05.603661Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439660427757862597:2507], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Required global index not found, index name: WrongView, code: 2003 2024-11-21T09:22:05.603864Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODg0NGNjYWQtOWRhZTAyMzUtOGU0M2MxZjItN2U3YjhjYzg=, ActorId: [1:7439660427757862304:2454], ActorState: ExecuteState, TraceId: 01jd70eq518mz2te18s2yrvn9f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:22:05.651405Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 17095, MsgBus: 13528 2024-11-21T09:22:06.023916Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660430621451402:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:06.023935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001920/r3tmp/tmpJbPqwv/pdisk_1.dat 2024-11-21T09:22:06.033996Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17095, node 2 2024-11-21T09:22:06.043249Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:06.043263Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:06.043265Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:06.043295Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13528 TClient is connected to server localhost:13528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:06.126733Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:06.126761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:06.127020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.127798Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:06.127874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:06.138112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.145950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.162573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.172467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.304944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660430621452949:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.304972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.308948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.314667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.323041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.330457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.337350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.344092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:06.352851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660430621453453:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.352888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.352933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660430621453458:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.353499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:06.357484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660430621453460:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:06.542559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpUniqueIndex::ReplaceFkPartialColumnSet >> KqpIndexes::CheckUpsertNonEquatableType-NotNull >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> KqpUniqueIndex::UpdateFkSameValue >> KqpIndexes::MultipleModifications >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin >> KqpUniqueIndex::InsertFkAlreadyExist >> KqpIndexes::ExplainCollectFullDiagnostics >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin >> KqpMultishardIndex::SortedRangeReadDesc >> KqpMultishardIndex::SecondaryIndexSelectNull >> KqpMultishardIndex::DataColumnUpsertMixedSemantic+StreamLookup >> KqpUniqueIndex::ReplaceFkPartialColumnSet [GOOD] >> KqpUniqueIndex::UpdateFkAlreadyExist >> KqpMultishardIndex::DataColumnWriteNull+StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnWrite-StreamLookup >> KqpMultishardIndex::DataColumnUpsertMixedSemantic-StreamLookup >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL >> KqpIndexMetadata::HandleNotReadyIndex >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> TPersQueueTest::TxCounters [GOOD] >> KqpIndexes::MultipleModifications [GOOD] >> KqpIndexes::InnerJoinWithNonIndexWherePredicate >> KqpUniqueIndex::InsertFkAlreadyExist [GOOD] >> KqpUniqueIndex::InsertFkDuplicate >> KqpIndexes::ExplainCollectFullDiagnostics [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap >> KqpMultishardIndex::SortedRangeReadDesc [GOOD] >> KqpMultishardIndex::SortByPk-StreamLookup >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin >> KqpMultishardIndex::DataColumnUpsertMixedSemantic+StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnSelect-StreamLookup >> KqpMultishardIndex::DataColumnUpsertMixedSemantic-StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnWrite+StreamLookup >> KqpMultishardIndex::SecondaryIndexSelectNull [GOOD] >> KqpMultishardIndex::SortByPk+StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T09:19:22.591272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439659726544771504:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:22.591325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:19:22.621654Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:22.625717Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T09:19:22.626484Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439659725993305829:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:19:22.626545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/003edd/r3tmp/tmptbEeTO/pdisk_1.dat 2024-11-21T09:19:22.649328Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31835, node 1 2024-11-21T09:19:22.660130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/jptk/003edd/r3tmp/yandex1Uv0Mu.tmp 2024-11-21T09:19:22.660139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/jptk/003edd/r3tmp/yandex1Uv0Mu.tmp 2024-11-21T09:19:22.660184Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/jptk/003edd/r3tmp/yandex1Uv0Mu.tmp 2024-11-21T09:19:22.660239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:19:22.663516Z INFO: TTestServer started on Port 5701 GrpcPort 31835 TClient is connected to server localhost:5701 PQClient connected to localhost:31835 === TenantModeEnabled() = 0 === Init PQ - start server on port 31835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:19:22.691364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:22.691397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:22.692878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:22.729931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:19:22.729954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:19:22.730970Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:19:22.731231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:19:22.736572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:19:22.736618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:22.736668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T09:19:22.736732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:19:22.736741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:22.737293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T09:19:22.737313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:19:22.737347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:22.737357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:19:22.737358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T09:19:22.737361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T09:19:22.737696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:22.737706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:19:22.737710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T09:19:22.737808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T09:19:22.737822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T09:19:22.737826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T09:19:22.738019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:22.738029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:22.738033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T09:19:22.738038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T09:19:22.738704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:22.739003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T09:19:22.739034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T09:19:22.739489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180762788, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:19:22.739518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439659726544772026 RawX2: 4294969628 } } Step: 1732180762788 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T09:19:22.739527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T09:19:22.739571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T09:19:22.739580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T09:19:22.739611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:19:22.739621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T09:19:22.739888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:19:22.739899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:19:22.739942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:19:22.739950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439659726544772075:2368], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T09:19:22.739958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:19:22.739968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T09:19:22.739978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T09:19:22.739985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T09:19:22.739990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T09:19:22.739993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T09:19:22.739995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T09:19:22.739997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T09:19:22.740006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T09:19:22.740018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T09:19:22.740021Z node 1 :FLAT_TX_SCHEMESHARD D ... rtition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:22:08.382371Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T09:22:08.382377Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7439660440013297440:2454] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T09:22:08.382378Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T09:22:08.382443Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2024-11-21T09:22:08.382467Z node 32 :PERSQUEUE INFO: new Cookie 123|8d01df23-f012922c-90604633-68e6d5b7_0 generated for partition 0 topic 'topic' owner 123 2024-11-21T09:22:08.382533Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 123|8d01df23-f012922c-90604633-68e6d5b7_0 2024-11-21T09:22:08.383175Z node 32 :PQ_READ_PROXY DEBUG: new Describe partition request 2024-11-21T09:22:08.383231Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/topic" include_location: true 2024-11-21T09:22:08.383242Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[32:7439660440013297446:2456]: Bootstrap 2024-11-21T09:22:08.383514Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7439660440013297446:2456]: Request location 2024-11-21T09:22:08.383554Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7439660440013297455:2457] connected; active server actors: 1 2024-11-21T09:22:08.383570Z node 32 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 32, Generation 1 2024-11-21T09:22:08.383575Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7439660440013297446:2456]: Got location 2024-11-21T09:22:08.383591Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7439660440013297455:2457] disconnected; active server actors: 1 2024-11-21T09:22:08.383594Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7439660440013297455:2457] disconnected no session 2024-11-21T09:22:08.384066Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 123|8d01df23-f012922c-90604633-68e6d5b7_0 grpc read done: success: 0 data: 2024-11-21T09:22:08.384077Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|8d01df23-f012922c-90604633-68e6d5b7_0 grpc read failed 2024-11-21T09:22:08.384081Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|8d01df23-f012922c-90604633-68e6d5b7_0 grpc closed 2024-11-21T09:22:08.384083Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|8d01df23-f012922c-90604633-68e6d5b7_0 is DEAD 2024-11-21T09:22:08.384181Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:22:08.384233Z node 32 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T09:22:08.384240Z node 32 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T09:22:08.384338Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { path: "topic" producer_id: "123" partition_with_generation { generation: 1 } } 2024-11-21T09:22:08.384359Z node 32 :PQ_WRITE_PROXY INFO: session request cookie: 3 path: "topic" producer_id: "123" partition_with_generation { generation: 1 } from ipv6:[::1]:40348 2024-11-21T09:22:08.384362Z node 32 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="topic server" ip=ipv6:[::1]:40348 proto=topic topic=topic durationSec=0 2024-11-21T09:22:08.384376Z node 32 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T09:22:08.384385Z node 32 :PQ_WRITE_PROXY INFO: session to partition: 0, generation: 1 2024-11-21T09:22:08.384534Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T09:22:08.384570Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T09:22:08.384576Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T09:22:08.384578Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T09:22:08.384582Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7439660440013297460:2459] (SourceId=123, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2024-11-21T09:22:08.384585Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2024-11-21T09:22:08.384686Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2024-11-21T09:22:08.384707Z node 32 :PERSQUEUE INFO: new Cookie 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 generated for partition 0 topic 'topic' owner 123 2024-11-21T09:22:08.384775Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 2024-11-21T09:22:08.385219Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T09:22:08.385295Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T09:22:08.385346Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2024-11-21T09:22:08.385357Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T09:22:08.385429Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T09:22:08.385544Z node 32 :PQ_WRITE_PROXY DEBUG: SessionId: ydb://session/3?node_id=32&id=ODMxZGViMmMtZTU5YWQwY2ItNDRlZmUxYzItNGU5MTkyZmI= TxId: 01jd70esvm74xn798r3qw2feqs WriteId: {32, 281474976715675} 2024-11-21T09:22:08.385998Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976715675}, 100000}, State: StateInit] bootstrapping {0, {32, 281474976715675}, 100000} [32:7439660440013297472:2461] 2024-11-21T09:22:08.386376Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976715675}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {32, 281474976715675}, 100000} generation 1 [32:7439660440013297472:2461] 2024-11-21T09:22:08.386425Z node 32 :PERSQUEUE INFO: new Cookie 123|d9ab2b0f-9f7f82a2-fa908741-c68478f_0 generated for partition {0, {32, 281474976715675}, 100000} topic 'topic' owner 123 2024-11-21T09:22:08.386711Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:22:08.386726Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:22:08.386731Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:22:08.386736Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:22:08.388793Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:22:08.389228Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:22:08.389240Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:22:08.389243Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T09:22:08.485760Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 grpc read done: success: 0 data: 2024-11-21T09:22:08.485776Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 grpc read failed 2024-11-21T09:22:08.485783Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 grpc closed 2024-11-21T09:22:08.485787Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|8a1876a-1f5bf884-718d2f1e-2012acde_0 is DEAD 2024-11-21T09:22:08.485942Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:22:08.485955Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T09:22:08.487310Z node 32 :PERSQUEUE WARN: [PQ: 72075186224037892] Unknown transaction 281474976715676 Counters: ================================
name=api.grpc.topic.stream_write.bytes: 20796
name=api.grpc.topic.stream_write.messages: 4
name=topic.write.bytes: 20796
name=topic.write.discarded_bytes: 0
name=topic.write.discarded_messages: 0
name=topic.write.messages: 4
name=topic.write.uncompressed_bytes: 16
name=topic.write.lag_milliseconds:
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=180000: 0
    bin=200: 0
    bin=2000: 3
    bin=30000: 0
    bin=500: 0
    bin=5000: 1
    bin=60000: 0
    bin=999999: 0
name=topic.write.message_size_bytes:
    bin=1024: 1
    bin=10240: 2
    bin=102400: 0
    bin=1048576: 0
    bin=10485760: 0
    bin=20480: 1
    bin=204800: 0
    bin=2097152: 0
    bin=5120: 0
    bin=51200: 0
    bin=524288: 0
    bin=5242880: 0
    bin=67108864: 0
    bin=99999999: 0
name=topic.write.partition_throttled_milliseconds:
    bin=0: 4
    bin=1: 0
    bin=10: 0
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=20: 0
    bin=2500: 0
    bin=5: 0
    bin=50: 0
    bin=500: 0
    bin=5000: 0
    bin=999999: 0
>> KqpIndexMetadata::HandleNotReadyIndex [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams [GOOD] >> KqpMultishardIndex::DataColumnWrite-StreamLookup [GOOD] >> KqpUniqueIndex::InsertFkDuplicate [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] >> KqpMultishardIndex::SortByPk-StreamLookup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] Test command err: Trying to start YDB, gRPC: 6232, MsgBus: 25646 2024-11-21T09:22:07.317723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660432720848801:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:07.317779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001789/r3tmp/tmpBydXHX/pdisk_1.dat 2024-11-21T09:22:07.364754Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6232, node 1 2024-11-21T09:22:07.373151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:07.373163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:07.373165Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:07.373191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25646 TClient is connected to server localhost:25646 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:22:07.418511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:07.418541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:22:07.419568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:07.431368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.439340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.453197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.469103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.478616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.560354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660432720850138:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.560382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.581421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.587102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.596963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.603844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.658411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.666825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.675271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660432720850655:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.675290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.675338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660432720850660:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.675835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:07.680364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660432720850662:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:07.862420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 16557, MsgBus: 24529 2024-11-21T09:22:08.209636Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660440303424731:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.209668Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001789/r3tmp/tmpGMREx4/pdisk_1.dat 2024-11-21T09:22:08.218185Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16557, node 2 2024-11-21T09:22:08.228649Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.228669Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.228671Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.228711Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24529 TClient is connected to server localhost:24529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.311479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.311509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.311897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.312516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:08.316752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.326253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.342235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.354907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.528317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660440303426290:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.528345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.531914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.538132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.549010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.556186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.562626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.569872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.579242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660440303426791:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.579271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.579273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660440303426796:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.579836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.583029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660440303426798:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.760117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.923880Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70etavecrx2s1qs16cpdht, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmIzZTlkOTUtOTc5Njk2MjMtODgzYTg0ODUtNzMzZDkxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:08.925155Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmIzZTlkOTUtOTc5Njk2MjMtODgzYTg0ODUtNzMzZDkxYQ==, ActorId: [2:7439660440303427789:2512], ActorState: ExecuteState, TraceId: 01jd70etavecrx2s1qs16cpdht, Create QueryResponse for error on request, msg: 2024-11-21T09:22:09.001455Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70etcyeaj53n2wzh71s5bh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmIzZTlkOTUtOTc5Njk2MjMtODgzYTg0ODUtNzMzZDkxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:09.001520Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmIzZTlkOTUtOTc5Njk2MjMtODgzYTg0ODUtNzMzZDkxYQ==, ActorId: [2:7439660440303427789:2512], ActorState: ExecuteState, TraceId: 01jd70etcyeaj53n2wzh71s5bh, Create QueryResponse for error on request, msg: 2024-11-21T09:22:09.005377Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70etfb0xjm9px4smjmzh1x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmIzZTlkOTUtOTc5Njk2MjMtODgzYTg0ODUtNzMzZDkxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:09.005439Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmIzZTlkOTUtOTc5Njk2MjMtODgzYTg0ODUtNzMzZDkxYQ==, ActorId: [2:7439660440303427789:2512], ActorState: ExecuteState, TraceId: 01jd70etfb0xjm9px4smjmzh1x, Create QueryResponse for error on request, msg: 2024-11-21T09:22:09.077301Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70etff8267nqae0pa21dx6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmIzZTlkOTUtOTc5Njk2MjMtODgzYTg0ODUtNzMzZDkxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:09.077373Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmIzZTlkOTUtOTc5Njk2MjMtODgzYTg0ODUtNzMzZDkxYQ==, ActorId: [2:7439660440303427789:2512], ActorState: ExecuteState, TraceId: 01jd70etff8267nqae0pa21dx6, Create QueryResponse for error on request, msg: >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams [GOOD] Test command err: Trying to start YDB, gRPC: 16852, MsgBus: 21503 2024-11-21T09:22:08.063503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660440163079337:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.063845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001749/r3tmp/tmpfG5bKe/pdisk_1.dat 2024-11-21T09:22:08.116081Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16852, node 1 2024-11-21T09:22:08.125370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.125385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.125387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.125415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21503 TClient is connected to server localhost:21503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:08.164800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.164823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.165990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.170956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.182718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.197630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.216275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.228718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.351737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440163080885:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.351800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.381232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.386507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.440674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.450877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.458085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.464713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.473310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440163081401:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.473327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.473330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440163081406:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.473744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.478292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660440163081408:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:08.656977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7307, MsgBus: 2698 2024-11-21T09:22:08.942988Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660437966645905:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.943140Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001749/r3tmp/tmpRrinaF/pdisk_1.dat 2024-11-21T09:22:08.950011Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7307, node 2 2024-11-21T09:22:08.959037Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.959047Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.959049Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.959078Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2698 TClient is connected to server localhost:2698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.043478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.043509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.044520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.044755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.056080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.064344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.080813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.089429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.229492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442261614729:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.229521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.233964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.240177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.249356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.255567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.263229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.270298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.278773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442261615242:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.278806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.278857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442261615247:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.279474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.283403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660442261615249:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.480960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 10434, MsgBus: 1925 2024-11-21T09:22:08.120274Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660440657084434:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.120495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001719/r3tmp/tmpVfSrm6/pdisk_1.dat 2024-11-21T09:22:08.173073Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10434, node 1 2024-11-21T09:22:08.179349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.179363Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.179365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.179398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1925 TClient is connected to server localhost:1925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:08.221965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) 2024-11-21T09:22:08.221991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.223036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:08.245308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.254470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.315595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.333676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.343755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.406901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440657085988:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.406922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.427135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.432444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.443668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.450106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.457587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.465226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.472999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440657086478:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.473017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.473046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440657086483:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.473540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.477992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660440657086485:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.658113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.664633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25512, MsgBus: 61820 2024-11-21T09:22:08.975321Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660437168672125:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.975543Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001719/r3tmp/tmpstDw6D/pdisk_1.dat 2024-11-21T09:22:08.983478Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25512, node 2 2024-11-21T09:22:08.993217Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.993233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.993235Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.993276Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61820 TClient is connected to server localhost:61820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.077479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.077508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.077856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.078570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.079880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.135104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.154127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.166600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.240391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660441463640962:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.240416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.246250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.252782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.263029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.269753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.277040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.283694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.292289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660441463641453:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.292312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.292361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660441463641458:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.292904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.297291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660441463641460:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.444359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.451855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 14496, MsgBus: 14187 2024-11-21T09:22:07.331072Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660436351614807:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:07.331089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017c2/r3tmp/tmpDzxtBG/pdisk_1.dat 2024-11-21T09:22:07.373900Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14496, node 1 2024-11-21T09:22:07.385513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:07.385527Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:07.385528Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:07.385565Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14187 TClient is connected to server localhost:14187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:07.431741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:07.431765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:07.432690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.432853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:22:07.440967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.454239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.469194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.478454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.584798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660436351616353:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.584825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.608636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.663120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.673939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.680981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.688138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.694921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.703717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660436351616869:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.703751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660436351616874:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.703751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.704460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:07.708558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660436351616876:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:07.855878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 3852, MsgBus: 26362 2024-11-21T09:22:08.223723Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660438768562455:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.224010Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017c2/r3tmp/tmpTjZrk0/pdisk_1.dat 2024-11-21T09:22:08.233740Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3852, node 2 2024-11-21T09:22:08.248290Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.248305Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.248307Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.248363Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26362 TClient is connected to server localhost:26362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.324150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.324177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.325272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:08.326486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.334858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.342753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T09:22:08.364984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.374903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.522953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660438768563991:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.522978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.525148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.530973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.542034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.548599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.556042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.563288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.571874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660438768564503:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.571887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660438768564508:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.571899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.572496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.576353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660438768564510:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.729180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.425048Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660443063534913:2716], TxId: 281474976715731, task: 1. Ctx: { TraceId : 01jd70etvpe73qjg0mvc60zpdj. SessionId : ydb://session/3?node_id=2&id=Y2ViZmY1ZGItZTZhZDFhNWUtNDkyYWNjYjItZGQ3YmVlNjY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:22:09.425108Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660443063534914:2717], TxId: 281474976715731, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=Y2ViZmY1ZGItZTZhZDFhNWUtNDkyYWNjYjItZGQ3YmVlNjY=. TraceId : 01jd70etvpe73qjg0mvc60zpdj. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439660443063534910:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:22:09.425402Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2ViZmY1ZGItZTZhZDFhNWUtNDkyYWNjYjItZGQ3YmVlNjY=, ActorId: [2:7439660438768564790:2454], ActorState: ExecuteState, TraceId: 01jd70etvpe73qjg0mvc60zpdj, Create QueryResponse for error on request, msg: >> KqpMultishardIndex::SortByPk+StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnSelect-StreamLookup [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 14095, MsgBus: 61387 2024-11-21T09:22:08.109157Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660440378053827:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.109297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001724/r3tmp/tmpciB9sc/pdisk_1.dat 2024-11-21T09:22:08.154140Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14095, node 1 2024-11-21T09:22:08.163462Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.163477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.163478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.163515Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61387 TClient is connected to server localhost:61387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:08.210307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.210338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.211488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.218074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.228438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.244913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.261222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.271134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.401473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440378055376:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.401495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.422210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.428238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.437228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.443665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.450960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.458432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.466402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440378055897:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.466402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440378055892:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.466413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.467111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.471357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660440378055899:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:08.641095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.762977Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660440378057077:2551], TxId: 281474976710677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NzAxZDg5ZjktYzk4MmYxNDUtMWVhNzNlYTktNDg2NTE0YzQ=. TraceId : 01jd70et6q8yb38tmjad57n3yw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:22:08.763043Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660440378057078:2552], TxId: 281474976710677, task: 2. Ctx: { TraceId : 01jd70et6q8yb38tmjad57n3yw. SessionId : ydb://session/3?node_id=1&id=NzAxZDg5ZjktYzk4MmYxNDUtMWVhNzNlYTktNDg2NTE0YzQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439660440378057074:2512], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:22:08.763267Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzAxZDg5ZjktYzk4MmYxNDUtMWVhNzNlYTktNDg2NTE0YzQ=, ActorId: [1:7439660440378056878:2512], ActorState: ExecuteState, TraceId: 01jd70et6q8yb38tmjad57n3yw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2491, MsgBus: 27887 2024-11-21T09:22:08.991724Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660440043957111:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.991900Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001724/r3tmp/tmpQp5yXi/pdisk_1.dat 2024-11-21T09:22:08.999900Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2491, node 2 2024-11-21T09:22:09.009410Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:09.009421Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:09.009423Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:09.009450Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27887 TClient is connected to server localhost:27887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.092037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.092069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.093178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.093899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.104116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.112491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.130177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.139101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.272815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444338925936:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.272843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.277641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.282337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.291435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.297753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.304949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.312128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.320650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444338926439:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.320676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.320717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444338926444:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.321280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.325335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660444338926446:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.517211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.685982Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660444338927692:2551], TxId: 281474976715677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MzI4Mjk4NjEtYmM4Y2EyYWUtZWI5N2Q4M2QtNDNlMmFlNg==. CustomerSuppliedId : . TraceId : 01jd70ev3dewd01yj8bjjma9y8. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2024-11-21T09:22:09.686101Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660444338927693:2552], TxId: 281474976715677, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd70ev3dewd01yj8bjjma9y8. SessionId : ydb://session/3?node_id=2&id=MzI4Mjk4NjEtYmM4Y2EyYWUtZWI5N2Q4M2QtNDNlMmFlNg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439660444338927689:2512], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:22:09.686225Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzI4Mjk4NjEtYmM4Y2EyYWUtZWI5N2Q4M2QtNDNlMmFlNg==, ActorId: [2:7439660444338927484:2512], ActorState: ExecuteState, TraceId: 01jd70ev3dewd01yj8bjjma9y8, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 20768, MsgBus: 27447 2024-11-21T09:22:08.174932Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660439414604295:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.175089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001712/r3tmp/tmpCvwSVU/pdisk_1.dat 2024-11-21T09:22:08.238872Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20768, node 1 2024-11-21T09:22:08.248587Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.248602Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.248603Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.248625Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27447 2024-11-21T09:22:08.277505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.277533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.278682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.316393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.326336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.388081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.403320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.416560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.461287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660439414605848:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.461309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.487845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.493874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.548301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.556088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.563361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.569810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.578477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660439414606350:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.578497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.578558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660439414606355:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.579163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.583391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660439414606357:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.770214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 17955, MsgBus: 19055 2024-11-21T09:22:09.056786Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660445012463788:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:09.056939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001712/r3tmp/tmpb4cojr/pdisk_1.dat 2024-11-21T09:22:09.071346Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17955, node 2 2024-11-21T09:22:09.076197Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:09.076224Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:09.076226Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:09.076265Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19055 TClient is connected to server localhost:19055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.157155Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.157187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.158284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.158977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.169625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.177770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.194468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.203478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.390451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660445012465344:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.390489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.394940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.400598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.410177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.417119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.423473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.430860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.439564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660445012465834:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.439585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660445012465839:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.439590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.440154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.444390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660445012465841:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.583729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... |96.8%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] >> KqpUniqueIndex::InsertNullInComplexFk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 21235, MsgBus: 15632 2024-11-21T09:22:08.041470Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660440505986906:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.041750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001731/r3tmp/tmpBkV3Ed/pdisk_1.dat 2024-11-21T09:22:08.086320Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21235, node 1 2024-11-21T09:22:08.095175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.095188Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.095190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.095225Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15632 TClient is connected to server localhost:15632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.143033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.143059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.144160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:08.165018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.172846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.235282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.252502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.262914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.333836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440505988462:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.333867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.369750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.376472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.388538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.394844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.449504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.458127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.466490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440505988977:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.466505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.466538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440505988982:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.467115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.471501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660440505988984:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.660785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.668355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26874, MsgBus: 3159 2024-11-21T09:22:09.113468Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660443781017932:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:09.113640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001731/r3tmp/tmpMDXGqi/pdisk_1.dat 2024-11-21T09:22:09.121621Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26874, node 2 2024-11-21T09:22:09.131485Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:09.131499Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:09.131501Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:09.131552Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3159 TClient is connected to server localhost:3159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.215184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.215220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.215991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.216264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:22:09.226971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.234343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.250563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.260219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.394366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660443781019473:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.394396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.399056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.404455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.410141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.416904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.424070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.479453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.488365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660443781019989:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.488385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.488386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660443781019994:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.489028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.493324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660443781019996:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.641333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.649099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 4487, MsgBus: 27653 2024-11-21T09:22:08.034649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660440114756508:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.034879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001762/r3tmp/tmpyCVnOV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4487, node 1 2024-11-21T09:22:08.080039Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:08.084297Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.084318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.084320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.084365Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27653 TClient is connected to server localhost:27653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.135853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.135879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.137009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:08.156797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.160614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.222267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.240926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.250097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.317596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440114758058:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.317646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.354632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.361370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.374072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.380841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.388089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.394643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.403364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440114758550:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.403395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.403428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440114758556:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.403970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.408437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660440114758558:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.565053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19970, MsgBus: 63548 2024-11-21T09:22:08.920598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660440683258671:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.920758Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001762/r3tmp/tmpCS3QHn/pdisk_1.dat 2024-11-21T09:22:08.929510Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19970, node 2 2024-11-21T09:22:08.939115Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.939125Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.939126Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.939159Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63548 TClient is connected to server localhost:63548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.023141Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.023179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.023550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.024187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.032780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.040670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.059403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.067962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.186913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444978227534:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.186941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.190682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.196394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.207323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.214036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.269016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.277134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.285739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444978228040:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.285768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.285831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444978228045:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.286364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.290005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660444978228047:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.461278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.467490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.480175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 10217, MsgBus: 12286 2024-11-21T09:22:07.460893Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660432366845409:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:07.461005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001786/r3tmp/tmpQLO9ai/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10217, node 1 2024-11-21T09:22:07.509900Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:07.514630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:07.514641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:07.514643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:07.514667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12286 TClient is connected to server localhost:12286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:07.558593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.562183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:07.562212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:07.563340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:07.571410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.631775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.647234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.657091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:07.721304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660432366846964:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.721331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.741583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.747766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.758508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.813127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.867765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.876926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:07.885063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660432366847482:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.885081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.885080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660432366847487:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:07.885692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:07.889946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660432366847489:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:08.048475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19004, MsgBus: 27789 2024-11-21T09:22:08.356913Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660438528958494:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.356961Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001786/r3tmp/tmp3RUjRk/pdisk_1.dat 2024-11-21T09:22:08.367360Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19004, node 2 2024-11-21T09:22:08.377214Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.377233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.377236Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.377279Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27789 TClient is connected to server localhost:27789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.459718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.459746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.459975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.460820Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:08.460942Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:08.464996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.473132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.490353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.500946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.659313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660438528960074:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.659352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.663575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.668983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.675203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.681781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.689182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.696147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.704132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660438528960569:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.704153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.704180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660438528960574:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.704696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.709315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660438528960576:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.882368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 22003, MsgBus: 21440 2024-11-21T09:22:08.233212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660440046999537:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.233361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016e4/r3tmp/tmpW9w3EK/pdisk_1.dat 2024-11-21T09:22:08.285337Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22003, node 1 2024-11-21T09:22:08.296767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.296781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.296783Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.296823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21440 TClient is connected to server localhost:21440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:08.334662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.334693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.335877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.363248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.374568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.434469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.447910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.458884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.524303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440047001089:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.524333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.547965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.602221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.611866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.618433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.626065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.633063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.641012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440047001598:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.641029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440047001603:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.641031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.641682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.646304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660440047001605:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.807852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 16315, MsgBus: 8732 2024-11-21T09:22:09.306051Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660442558261212:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:09.306198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016e4/r3tmp/tmpO9bz2Y/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16315, node 2 2024-11-21T09:22:09.319933Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:09.321994Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:09.322003Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:09.322004Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:09.322030Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8732 TClient is connected to server localhost:8732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.406332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.406365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.407437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.408084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.408915Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:09.415755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.425216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.442952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.453087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.578922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442558262747:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.578963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.583137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.589437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.598788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.605752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.612969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.619763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.628301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442558263250:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.628324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.628369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442558263255:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.628924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.633274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660442558263257:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.799522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnSelect-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26370, MsgBus: 65268 2024-11-21T09:22:08.241591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660436700578218:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.241745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00170a/r3tmp/tmp6jT6YY/pdisk_1.dat 2024-11-21T09:22:08.296443Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26370, node 1 2024-11-21T09:22:08.307727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.307740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.307743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.307778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65268 TClient is connected to server localhost:65268 2024-11-21T09:22:08.341711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.341735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:22:08.342927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.356154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.358446Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:08.363271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.424915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.439517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.451180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.534328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660436700579750:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.534356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.559332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.565058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.577422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.583824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.638641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.646675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.655563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660436700580265:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.655589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660436700580270:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.655592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.656307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.660592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660436700580272:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:08.832038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 62942, MsgBus: 13052 2024-11-21T09:22:09.124017Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660442493124826:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:09.124037Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00170a/r3tmp/tmptPOuTr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62942, node 2 2024-11-21T09:22:09.138134Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:09.139738Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:09.139753Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:09.139755Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:09.139796Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13052 TClient is connected to server localhost:13052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.224420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.224459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.225518Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.226662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.234595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.242862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.258529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.270742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.464622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442493126390:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.464646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.468157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.474652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.487271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.494133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.500754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.508237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.523388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442493126882:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.523406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.523410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660442493126887:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.523930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.528353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660442493126889:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.693403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.699268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.711097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.838906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] Test command err: Trying to start YDB, gRPC: 6638, MsgBus: 12572 2024-11-21T09:22:08.349612Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660439734869311:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.349639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016a1/r3tmp/tmpQU5rsp/pdisk_1.dat 2024-11-21T09:22:08.405170Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6638, node 1 2024-11-21T09:22:08.411066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.411079Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.411081Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.411113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12572 TClient is connected to server localhost:12572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:08.450981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.451006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.452109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.480081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.484898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.545392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.562210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.570734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.604867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660439734870861:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.604908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.630851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.636970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.647047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.654658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.661009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.668253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.676366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660439734871354:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.676387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660439734871359:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.676393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.677003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.681234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660439734871361:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.858083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2111, MsgBus: 22288 2024-11-21T09:22:09.416129Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660444141083020:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:09.416147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016a1/r3tmp/tmpCWMpL8/pdisk_1.dat 2024-11-21T09:22:09.426463Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2111, node 2 2024-11-21T09:22:09.431386Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:09.431396Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:09.431398Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:09.431430Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22288 TClient is connected to server localhost:22288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.516465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.516490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.517561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.518153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.529854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.537436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.553093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.565403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.666188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444141084547:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.666205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.670395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.676176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.682833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.689990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.697059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.703903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.711928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444141085040:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.711953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.711961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444141085045:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.712484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.717396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660444141085047:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.882680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 19860, MsgBus: 20399 2024-11-21T09:22:07.965574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660436347064240:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:07.965785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00174f/r3tmp/tmp4YnVoh/pdisk_1.dat 2024-11-21T09:22:08.019574Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19860, node 1 2024-11-21T09:22:08.025170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.025186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.025188Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.025221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20399 TClient is connected to server localhost:20399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:08.066471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.066495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.067610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.095670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.098822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.158444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.172800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.185853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.257581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440642033085:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.257608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.285100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.290905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.296700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.304078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.359540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.367234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.376143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440642033602:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.376162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.376178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660440642033607:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.376786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.380164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660440642033609:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.536524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 23317, MsgBus: 22849 2024-11-21T09:22:09.039027Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660444764333972:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:09.039046Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00174f/r3tmp/tmpfZS1MV/pdisk_1.dat 2024-11-21T09:22:09.049818Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23317, node 2 2024-11-21T09:22:09.059473Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:09.059493Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:09.059496Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:09.059532Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22849 TClient is connected to server localhost:22849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.139217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.139257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.140305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.141408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.146629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.154104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.172921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.182916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.315410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444764335528:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.315457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.318990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.324860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.333244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.339723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.394219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.403156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.411933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444764336044:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.411961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660444764336049:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.411972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.412595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.416489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660444764336051:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.579779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpMultishardIndex::DataColumnWrite+StreamLookup [GOOD] |96.9%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex >> KqpIndexes::UpdateOnReadColumns [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3626, MsgBus: 14057 2024-11-21T09:22:08.292179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660438950679606:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:08.292378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016b4/r3tmp/tmps2WC28/pdisk_1.dat 2024-11-21T09:22:08.339937Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3626, node 1 2024-11-21T09:22:08.351934Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:08.351948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:08.351950Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:08.351982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14057 TClient is connected to server localhost:14057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:08.393641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:08.393667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:08.394796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:08.420033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.428581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.442268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.456396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.465348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:08.558570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660438950681154:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.558592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.584053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.589448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.597706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.604898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.611597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.618653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:08.627408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660438950681655:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.627427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.627438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660438950681660:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:08.627905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:08.632028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660438950681662:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:08.802594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 24199, MsgBus: 1464 2024-11-21T09:22:09.162778Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660443646747515:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:09.162794Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0016b4/r3tmp/tmpRs8oys/pdisk_1.dat 2024-11-21T09:22:09.170706Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24199, node 2 2024-11-21T09:22:09.180941Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:09.180955Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:09.180957Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:09.181016Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1464 TClient is connected to server localhost:1464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:09.263048Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:09.263077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:09.264096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:09.265318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.274940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.283069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.299529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.308538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.415743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660443646749054:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.415762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.418375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.423335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.430650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.437962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.444802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.451824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.460585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660443646749547:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.460613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.460640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660443646749552:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.461167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.465124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660443646749554:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.618355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.301565Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660447941720081:2752], TxId: 281474976715731, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjY4YjA0NC1mZGI3Y2FjZi1mMGQ3NjdjYS0yMTAzODZh. TraceId : 01jd70evq280ypk97x7ncvjc7e. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:22:10.301627Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660447941720083:2753], TxId: 281474976715731, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjY4YjA0NC1mZGI3Y2FjZi1mMGQ3NjdjYS0yMTAzODZh. TraceId : 01jd70evq280ypk97x7ncvjc7e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439660447941720078:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:22:10.301886Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjY4YjA0NC1mZGI3Y2FjZi1mMGQ3NjdjYS0yMTAzODZh, ActorId: [2:7439660443646749835:2454], ActorState: ExecuteState, TraceId: 01jd70evq280ypk97x7ncvjc7e, Create QueryResponse for error on request, msg: >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn >> KqpIndexes::NullInIndexTableNoDataRead >> KqpMultishardIndex::DataColumnSelect+StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateOnReadColumns [GOOD] Test command err: Trying to start YDB, gRPC: 26155, MsgBus: 2505 2024-11-21T09:22:05.171131Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660427580636411:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:05.171309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017d5/r3tmp/tmp9NBNOj/pdisk_1.dat 2024-11-21T09:22:05.213076Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26155, node 1 2024-11-21T09:22:05.225280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:05.225290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:05.225291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:05.225319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2505 TClient is connected to server localhost:2505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:05.272957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:05.272990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:05.274055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:05.297949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.307950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.368093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.384175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.392294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:05.451697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427580637955:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.451734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.474348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.479841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.490023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.544910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.599539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.608842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:05.617229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427580638472:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.617250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660427580638477:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.617254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:05.617803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:05.622288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660427580638479:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:05.768262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1639, MsgBus: 12330 2024-11-21T09:22:06.046161Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660427986946990:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:06.046322Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017d5/r3tmp/tmpRvzw9w/pdisk_1.dat 2024-11-21T09:22:06.060589Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1639, node 2 2024-11-21T09:22:06.065610Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:06.065621Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:06.065623Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:06.065648Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12330 TClient is connected to server localhost:12330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:06.146415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:06.146440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:06.147543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:06.148330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.150497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.162557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.178615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.191018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:06.329442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660427986948518:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.329469Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:06.335279Z node 2 :FLAT_TX_SCHEMESHARD W ... boperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:09.456415Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660441494513731:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.456440Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.461101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.466791Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.480296Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.487306Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.493637Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.500967Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.509687Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660441494514244:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.509710Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.509711Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439660441494514249:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:09.510238Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:09.514102Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439660441494514251:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:09.684159Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.691326Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:09.704260Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8576, MsgBus: 11609 2024-11-21T09:22:10.082950Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660448139864749:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:10.083185Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0017d5/r3tmp/tmpDmu7tJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8576, node 6 2024-11-21T09:22:10.097719Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:10.098743Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:10.098753Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:10.098755Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:10.098789Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11609 TClient is connected to server localhost:11609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:10.183074Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:10.183106Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:10.184153Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:10.185418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.187837Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.202472Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.217174Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.230195Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.335461Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660448139866275:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.335481Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.341701Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.397394Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.404453Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.411344Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.425840Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.439889Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.455532Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660448139866790:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.455565Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.455568Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439660448139866795:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.456352Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:10.459368Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439660448139866797:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:10.627235Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.634722Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.642182Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpIndexes::UpsertMultipleUniqIndexes >> KqpUniqueIndex::InsertNullInComplexFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate >> KqpIndexes::SecondaryIndexOrderBy2 >> KqpIndexes::SelectConcurentTX >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots [GOOD] >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex [GOOD] >> KqpIndexes::SecondaryIndexSelectUsingScripting >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex [GOOD] >> KqpIndexes::DeleteByIndex >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] >> KqpIndexes::NullInIndexTableNoDataRead [GOOD] >> KqpIndexes::NullInIndexTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:59.639726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:59.639753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:59.639758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:59.639765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:59.639816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:59.639821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:59.639830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:59.639907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:59.654528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:59.654554Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:59.656950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:59.657057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:59.657099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:59.660774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:59.660892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:59.662405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:59.663284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:59.664725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:59.666486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:59.666501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:59.666514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:59.666520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:59.666525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:59.666560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:59.667810Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:59.682852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:59.683568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:59.683632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:59.683674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:59.683680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:59.684376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:59.684415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:59.684468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:59.684480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:59.684485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:59.684490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:59.684798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:59.684817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:59.684820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:59.685120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:59.685126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:59.685130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:59.685134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:59.685538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:59.685837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:59.687795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:59.688036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:59.688061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:59.688068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:59.688149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:59.688157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:59.688187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:59.688199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:59.688623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:59.688634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:59.688673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:59.688681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:59.688758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:59.688765Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:59.688776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:59.688780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:59.688786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:59.688791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:59.688796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:59.688800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:59.688810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:59.688816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:59.688821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... .570524Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T09:22:11.570627Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:11.570639Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 214748366952 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:11.570644Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T09:22:11.570665Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:11.570674Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T09:22:11.570689Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:11.570694Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:11.570698Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:11.570851Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.570901Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:22:11.571123Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:11.571127Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:11.571144Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:22:11.571153Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:11.571163Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:11.571168Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T09:22:11.571171Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T09:22:11.571173Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T09:22:11.571203Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:22:11.571208Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:22:11.571215Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:22:11.571217Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:22:11.571220Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T09:22:11.571223Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:22:11.571226Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:22:11.571228Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:22:11.571235Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:11.571237Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:11.571240Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2024-11-21T09:22:11.571242Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:22:11.571244Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:22:11.571246Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:22:11.571280Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.571286Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.571288Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:11.571291Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:22:11.571293Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:11.571332Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:11.571335Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:22:11.571340Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:11.571376Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.571381Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.571383Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:11.571386Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:22:11.571388Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:11.571456Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.571462Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.571464Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:11.571466Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:22:11.571468Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:11.571473Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T09:22:11.571981Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.571998Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:11.572122Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:11.572161Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:22:11.572186Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:22:11.572190Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:22:11.572259Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:22:11.572271Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:22:11.572274Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [50:383:2375] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:22:11.572319Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:11.572337Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 23us result status StatusPathDoesNotExist 2024-11-21T09:22:11.572357Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] >> KqpIndexes::SecondaryIndexOrderBy2 [GOOD] >> KqpIndexes::SecondaryIndexReplace >> KqpIndexes::SelectConcurentTX [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-true >> KqpMultishardIndex::DataColumnSelect+StreamLookup [GOOD] >> KqpMultishardIndex::CheckPushTopSort >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots [GOOD] >> KqpIndexes::UpsertMultipleUniqIndexes [GOOD] >> KqpIndexes::UpsertNoIndexColumns >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 29290, MsgBus: 31246 2024-11-21T09:22:10.357242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660448544448629:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:10.357408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00166b/r3tmp/tmpgftH1i/pdisk_1.dat 2024-11-21T09:22:10.420027Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29290, node 1 2024-11-21T09:22:10.431526Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:10.431541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:10.431543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:10.431579Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31246 2024-11-21T09:22:10.458587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:10.458616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:10.459716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:10.498107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.501719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.517612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.576093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.587184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:10.633191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660448544450191:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.633214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.656714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.711648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.719185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.725825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.780774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.788850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:10.845469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660448544450713:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.845484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.845499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660448544450718:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:10.846084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:10.851508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660448544450721:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:10.985453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 24737, MsgBus: 13967 2024-11-21T09:22:11.426224Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660452974668491:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.426257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00166b/r3tmp/tmpeorCCn/pdisk_1.dat 2024-11-21T09:22:11.433264Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24737, node 2 2024-11-21T09:22:11.441900Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.441914Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.441916Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.441959Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13967 TClient is connected to server localhost:13967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.528217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.528243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.528448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.529121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.537035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.544935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.562389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.574134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.681324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660452974670040:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.681358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.684767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.690484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.699190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.705679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.713086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.720004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.728098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660452974670541:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.728119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.728155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660452974670546:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.728653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.733210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660452974670549:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.879685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 6904, MsgBus: 28261 2024-11-21T09:22:11.297581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660451048879507:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.297714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001639/r3tmp/tmp9h4bOo/pdisk_1.dat 2024-11-21T09:22:11.340331Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6904, node 1 2024-11-21T09:22:11.351868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.351880Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.351882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.351909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28261 TClient is connected to server localhost:28261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.398460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.398487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.399553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.422904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.434981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.448834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.465325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.476288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.578125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660451048881059:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.578149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.601642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.607672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.614909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.622098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.628628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.636292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.644620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660451048881564:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.644638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660451048881569:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.644644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.645129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.648880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660451048881571:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.787652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.929580Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70ex8wb5brpnkwe5sxs303, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk2N2IwMjktMmJlMzI1NS02YzljZmUxNS05NTQ0MDRhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:11.930642Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mjk2N2IwMjktMmJlMzI1NS02YzljZmUxNS05NTQ0MDRhZg==, ActorId: [1:7439660451048882557:2512], ActorState: ExecuteState, TraceId: 01jd70ex8wb5brpnkwe5sxs303, Create QueryResponse for error on request, msg: >> KqpIndexes::UniqAndNoUniqSecondaryIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:00.486803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:00.486823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:00.486826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:00.486830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:00.486834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:00.486837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:00.486842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:00.486898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:00.494348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:00.494362Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:00.496016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:00.496084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:00.496115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:00.497959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:00.498020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:00.498087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:00.498265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:00.498748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:00.498961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:00.498968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:00.498978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:00.498983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:00.498987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:00.499017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:00.499938Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:00.510583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:00.510654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.510709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:00.510745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:00.510749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.511426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:00.511453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:00.511507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.511515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:00.511518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:00.511522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:00.511908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.511918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:00.511922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:00.512185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.512191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.512195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:00.512201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:00.512611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:00.512902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:00.512942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:00.513083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:00.513102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:00.513107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:00.513151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:00.513155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:00.513182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:00.513190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:00.513544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:00.513554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:00.513599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:00.513602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:00.513668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.513673Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:00.513683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:00.513686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:00.513690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:00.513693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:00.513697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:00.513699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:00.513708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:00.513712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:00.513715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ode 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.195323Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.195326Z node 49 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:22:12.195328Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T09:22:12.195330Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:22:12.195336Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T09:22:12.195677Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T09:22:12.195693Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T09:22:12.195767Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:12.195778Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 210453399658 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:12.195784Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1006:0 HandleReply TEvOperationPlan: step# 5000007 2024-11-21T09:22:12.195808Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:12.195816Z node 49 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 128 -> 240 2024-11-21T09:22:12.195835Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:12.195840Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:12.195844Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:12.195936Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.196120Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.196609Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:12.196630Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:12.196672Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:22:12.196692Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:12.196732Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:12.196736Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2024-11-21T09:22:12.196740Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2024-11-21T09:22:12.196742Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 3 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T09:22:12.196818Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T09:22:12.196828Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T09:22:12.196845Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T09:22:12.196863Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T09:22:12.196879Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T09:22:12.196885Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T09:22:12.196889Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T09:22:12.196893Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T09:22:12.196911Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:12.196914Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:12.196918Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2024-11-21T09:22:12.196923Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2024-11-21T09:22:12.196925Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:22:12.196927Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:22:12.197030Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.197038Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.197041Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:22:12.197045Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:22:12.197048Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:12.197124Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:12.197128Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:22:12.197139Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:12.197184Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.197190Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.197193Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:22:12.197195Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2024-11-21T09:22:12.197197Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:12.197566Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.197591Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.197596Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:22:12.197602Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:22:12.197608Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:12.197627Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T09:22:12.198197Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.198229Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:12.198258Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:22:12.198500Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T09:22:12.198557Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T09:22:12.198562Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T09:22:12.198631Z node 49 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T09:22:12.198649Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T09:22:12.198654Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [49:442:2434] TestWaitNotification: OK eventTxId 1006 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] Test command err: Trying to start YDB, gRPC: 29981, MsgBus: 4722 2024-11-21T09:22:10.978155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660449296375998:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:10.978275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00165c/r3tmp/tmpjLdxKh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29981, node 1 2024-11-21T09:22:11.031954Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:11.033100Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.033113Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.033115Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.033157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4722 TClient is connected to server localhost:4722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.076745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.079132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.079152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.080334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.086887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.148425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.162761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.175449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.240362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660453591344848:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.240398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.264593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.270750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.279201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.286285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.293297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.300085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.308585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660453591345339:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.308620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.308654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660453591345344:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.309283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.313492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660453591345346:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:11.461167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.601463Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 17016, MsgBus: 2564 2024-11-21T09:22:11.838222Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660449417765961:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.838307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00165c/r3tmp/tmp6notb4/pdisk_1.dat 2024-11-21T09:22:11.846007Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17016, node 2 2024-11-21T09:22:11.853934Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.853944Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.853945Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.853975Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2564 TClient is connected to server localhost:2564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.940422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.940454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.940817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.941432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.946475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.958898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.973389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.986640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.095767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660453712734804:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.095794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.099563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.106113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.119497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.126391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.133059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.140449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.149046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660453712735305:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.149074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660453712735310:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.149086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.149755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.153264Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660453712735312:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:12.322453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TExternalTableTestReboots::DropExternalTableWithReboots [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] >> KqpSysColV0::InnerJoinTables >> KqpIndexes::DeleteByIndex [GOOD] >> KqpSysColV1::StreamSelectRowById >> KqpSystemView::Sessions >> KqpIndexes::NullInIndexTable [GOOD] |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete >> KqpMultishardIndex::CheckPushTopSort [GOOD] >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> KqpIndexes::SecondaryIndexReplace [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 14575, MsgBus: 24243 2024-11-21T09:22:11.210313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660452823268349:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.210512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001654/r3tmp/tmpq3dH0k/pdisk_1.dat 2024-11-21T09:22:11.250077Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14575, node 1 2024-11-21T09:22:11.261776Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.261789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.261791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.261817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24243 TClient is connected to server localhost:24243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.311560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.311591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.312652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.333458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.336410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.398318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.414376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.427426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.463549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660452823269898:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.463585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.485724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.491266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.503476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.510473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.516584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.523882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.532424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660452823270389:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.532444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660452823270394:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.532449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.533066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.537191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660452823270396:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.669305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 18386, MsgBus: 14449 2024-11-21T09:22:12.079968Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660455225083061:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:12.080136Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001654/r3tmp/tmpoa9uOR/pdisk_1.dat 2024-11-21T09:22:12.087392Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18386, node 2 2024-11-21T09:22:12.095631Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:12.095642Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:12.095645Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:12.095671Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14449 TClient is connected to server localhost:14449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:12.181944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:12.181969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:12.182313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.182987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:12.189425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.197739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.211486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.225372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.343883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660455225084600:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.343921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.346234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.352315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.364155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.370794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.377702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.385488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.393808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660455225085103:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.393824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.393885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660455225085108:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.394620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.398280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660455225085110:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:12.562475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.764771Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70ey2sf15y2sa0kxdabvwq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjFkZGIwMmYtMWJkNmQ4MzQtOGQwZDIwMGMtMmRkYmQ4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:12.765763Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjFkZGIwMmYtMWJkNmQ4MzQtOGQwZDIwMGMtMmRkYmQ4Yw==, ActorId: [2:7439660455225086146:2512], ActorState: ExecuteState, TraceId: 01jd70ey2sf15y2sa0kxdabvwq, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 5212, MsgBus: 5659 2024-11-21T09:22:11.221830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660449554042997:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.221845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001650/r3tmp/tmpJC28yo/pdisk_1.dat 2024-11-21T09:22:11.262678Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5212, node 1 2024-11-21T09:22:11.275407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.275421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.275423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.275455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5659 TClient is connected to server localhost:5659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.323107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.323128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.324228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.346575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.350839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.411886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.427015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.435677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.478573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660449554044546:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.478600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.506719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.511477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.523755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.530802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.538119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.544801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.553503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660449554045037:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.553528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.553530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660449554045042:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.554140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.558414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660449554045044:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.704357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 135 cpu_time_us: 135 } query_phases { duration_us: 268 cpu_time_us: 268 } query_phases { duration_us: 1186 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 998 affected_shards: 1 } query_phases { duration_us: 221 cpu_time_us: 221 } query_phases { duration_us: 903 cpu_time_us: 903 } query_phases { duration_us: 486 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 595 } query_phases { duration_us: 280 cpu_time_us: 280 } query_phases { duration_us: 692 cpu_time_us: 692 } query_phases { duration_us: 448 cpu_time_us: 621 } query_phases { duration_us: 1397 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 652 affected_shards: 2 } compilation { duration_us: 80460 cpu_time_us: 78239 } process_cpu_time_us: 2508 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732180931800,\"TaskId\":1,\"Host\":\"ghrun-qcxhsi27zq\",\"ComputeTimeUs\":27}],\"CpuTimeUs\":109}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1732180931800,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":109,\"Max\":109,\"Min\":109}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1732180931800,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732180931800,\"TaskId\":2,\"Host\":\"ghrun-qcxhsi27zq\",\"ComputeTimeUs\":22}],\"CpuTimeUs\":89}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"BaseTimeMs\":1732180931800,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":89,\"Max\":89,\"Min\":89}},\"CTE Name\":\"precompute_8_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":35,\"Subplan Name\":\"CTE precompute_8_1\",\"Plans\":[{\"PlanNodeId\":34,\"Plans\":[{\"PlanNodeId\":33,\"Plans\":[{\"PlanNodeId\":32,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1},{\"InternalOperatorId\":1},{\"InternalOperatorId\":1},{\"InternalOperatorId\":1}],\"Iterator\":\"Map\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"E-Rows\":\"1\",\"Predicate\":\"Nth\",\"Name\":\"Filter\",\"E-Size\":\"5\",\"E-Cost\":\"0\"}],\"Node Type\":\"ConstantExpr-Filter\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732180931799,\"StartTimeMs\":1732180931799,\"TaskId\":2,\"Host\":\"ghrun-qcxhsi27zq\",\"ComputeTimeUs\":3,\"OutputChannels\":[{\"ChannelId\":2,\"DstStageId\":3}]}],\"PeakMemoryUsageBytes\":65536,\"CpuTimeUs\":118}],\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"34\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"Tasks\":1,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"BaseTimeMs\":1732180931799,\"CpuTimeUs\":{\"Count\":1,\"Sum\":81,\"Max\":81,\"Min\":81}}}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Collect\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1732180931799,\"Host\":\"ghrun-qcxhsi27zq\",\"StartTimeMs\":1732180931799,\"ComputeTimeUs\":1,\"InputChannels\":[{\"ChannelId\":2,\"SrcStageId\":1}],\"NodeId\":1,\"OutputChannels\":[{\"ChannelId\":4,\"Dst ... 2 \'\"fk1\")) \'(\'\"fk3\" (Member $172 \'\"fk3\"))))) (lambda \'($173) (block \'(\n (let $174 (AsStruct \'(\'\"Key\" (Member $173 \'\"Key\"))))\n (return (IfPresent (Lookup $42 $174) (lambda \'($175) (Just \'($174 $175 (Or (AggrNotEquals (Member $173 \'\"fk1\") (Member $175 \'\"fk1\")) (AggrNotEquals (Member $173 \'\"fk3\") (Member $175 \'\"fk3\")))))) (Nothing (OptionalType (TupleType $19 $40 $52)))))\n)))) (lambda \'($176) (Nth $176 \'0)) (lambda \'($177) \'((Nth $177 \'1) (Nth $177 \'2))) $31)))) \'(\'(\'\"_logical_id\" \'5220) \'(\'\"_id\" \'\"69bcfa8d-5976d790-1bfc23ae-c4c8995c\"))))\n(let $90 (DqPhyStage \'() (lambda \'() (ToStream (Just $88))) \'(\'(\'\"_logical_id\" \'5233) \'(\'\"_id\" \'\"4c13b86b-6eb27e89-16ba7301-b96605bb\"))))\n(let $91 (DqCnValue (TDqOutput $89 \'0)))\n(let $92 (DqCnValue (TDqOutput $90 \'0)))\n(let $93 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $94 \'($87 $93))\n(let $95 (KqpPhysicalTx \'($89 $90) \'($91 $92) \'($47 $94) $3))\n(let $96 \'\"%kqp%tx_result_binding_7_0\")\n(let $97 (DictType $19 (TupleType $40 $52)))\n(let $98 %kqp%tx_result_binding_7_0)\n(let $99 (DqPhyStage \'() (lambda \'() (Iterator (FlatMap (Map $88 (lambda \'($178) (AsStruct \'(\'\"Key\" (Member $178 \'\"Key\")) \'(\'\"fk1\" (Member $178 \'\"fk1\")) \'(\'\"fk3\" (Member $178 \'\"fk3\"))))) (lambda \'($179) (block \'(\n (let $180 \'(\'\"Key\" (Member $179 \'\"Key\")))\n (let $181 \'(\'\"fk1\" (Member $179 \'\"fk1\")))\n (let $182 \'(\'\"fk3\" (Member $179 \'\"fk3\")))\n (return (IfPresent (Lookup $98 (AsStruct $180)) (lambda \'($183) (If (Nth $183 \'1) (Just (AsStruct $180 $181 \'(\'\"fk2\" (Member (Nth $183 \'0) \'\"fk2\")) $182)) (Nothing (OptionalType $27)))) (Just (AsStruct $180 $181 $43 $182))))\n)))))) \'(\'(\'\"_logical_id\" \'5855) \'(\'\"_id\" \'\"a7460fc7-53b1ff4d-5f53512c-207becce\"))))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator (Map (Filter (DictItems $98) (lambda \'($184) (Nth (Nth $184 \'1) \'1))) (lambda \'($185) (block \'(\n (let $186 (Nth (Nth $185 \'1) \'0))\n (return (AsStruct \'(\'\"Key\" (Member (Nth $185 \'0) \'\"Key\")) \'(\'\"fk1\" (Member $186 \'\"fk1\")) \'(\'\"fk2\" (Member $186 \'\"fk2\")) \'(\'\"fk3\" (Member $186 \'\"fk3\"))))\n)))))) \'(\'(\'\"_logical_id\" \'5869) \'(\'\"_id\" \'\"2155d6e9-f0820f0-dc1cfaa-e9124df0\"))))\n(let $101 (DqCnUnionAll (TDqOutput $99 \'0)))\n(let $102 (lambda \'($187) $187))\n(let $103 (DqPhyStage \'($101) $102 \'(\'(\'\"_logical_id\" \'5965) \'(\'\"_id\" \'\"bdae3226-7b2cd818-b0b6e624-d9eeb184\"))))\n(let $104 (DqCnUnionAll (TDqOutput $100 \'0)))\n(let $105 (DqPhyStage \'($104) $102 \'(\'(\'\"_logical_id\" \'5980) \'(\'\"_id\" \'\"4629b97e-29ed6fb7-f0210c7f-4d8ec66b\"))))\n(let $106 \'($99 $100 $103 $105))\n(let $107 (DqCnResult (TDqOutput $103 \'0) \'()))\n(let $108 (DqCnResult (TDqOutput $105 \'0) \'()))\n(let $109 (KqpTxResultBinding $97 \'\"7\" \'0))\n(let $110 (KqpPhysicalTx $106 \'($107 $108) \'($94 \'($96 $109)) $3))\n(let $111 \'\"%kqp%tx_result_binding_7_1\")\n(let $112 (DqPhyStage \'() (lambda \'() (block \'(\n (let $188 \'(\'\"Key\" \'\"Value\" \'\"fk1\" \'\"fk3\"))\n (return (KqpEffects (KqpUpsertRows $22 (Iterator %kqp%tx_result_binding_7_1) $188 \'(\'(\'\"Mode\" \'\"upsert\")))))\n))) \'(\'(\'\"_logical_id\" \'6495) \'(\'\"_id\" \'\"2732ab9c-c8dbe210-d8a1f380-60e1602c\"))))\n(let $113 \'\"%kqp%tx_result_binding_8_1\")\n(let $114 (ListType $27))\n(let $115 (DqPhyStage \'() (lambda \'() (KqpEffects (KqpDeleteRows $64 (Iterator %kqp%tx_result_binding_8_1)))) \'(\'(\'\"_logical_id\" \'6509) \'(\'\"_id\" \'\"150ab364-e3c36649-52951ac0-d3008cf0\"))))\n(let $116 \'\"%kqp%tx_result_binding_8_0\")\n(let $117 (DqPhyStage \'() (lambda \'() (block \'(\n (let $189 \'(\'\"fk1\" \'\"fk2\" \'\"fk3\" \'\"Key\"))\n (return (KqpEffects (KqpUpsertRows $64 (Iterator %kqp%tx_result_binding_8_0) $189 \'())))\n))) \'(\'(\'\"_logical_id\" \'6523) \'(\'\"_id\" \'\"e14b87a9-cb42f115-65888261-938b86ce\"))))\n(let $118 \'($112 $115 $117))\n(let $119 (KqpTxResultBinding $11 \'\"7\" \'1))\n(let $120 (KqpTxResultBinding $114 \'\"8\" \'0))\n(let $121 (KqpTxResultBinding $114 \'\"8\" \'1))\n(let $122 \'(\'($111 $119) \'($116 $120) \'($113 $121)))\n(let $123 (KqpPhysicalTx $118 \'() $122 \'($36 \'(\'\"with_effects\"))))\n(let $124 \'($4 $17 $38 $48 $61 $76 $86 $95 $110 $123))\n(return (KqpPhysicalQuery $124 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 87975 total_cpu_time_us: 86112 2024-11-21T09:22:11.896530Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 8024, MsgBus: 16066 2024-11-21T09:22:12.095419Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660454965598477:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:12.095722Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001650/r3tmp/tmpPgiVIY/pdisk_1.dat 2024-11-21T09:22:12.101934Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8024, node 2 2024-11-21T09:22:12.110909Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:12.110922Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:12.110923Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:12.110955Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16066 TClient is connected to server localhost:16066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:12.195496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:12.195519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:12.196689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:12.197369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.206029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.212313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.226000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.238864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.337290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660454965600013:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.337321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.339457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.345046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.357013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.363985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.370830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.377579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.386669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660454965600507:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.386691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.386692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660454965600512:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.387231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.391463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660454965600514:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:12.584792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpSysColV0::InnerJoinSelectAsterisk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::NullInIndexTable [GOOD] Test command err: Trying to start YDB, gRPC: 32423, MsgBus: 18006 2024-11-21T09:22:11.269468Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660450848896799:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.269724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001646/r3tmp/tmpjSRxN9/pdisk_1.dat 2024-11-21T09:22:11.307731Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32423, node 1 2024-11-21T09:22:11.318271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.318284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.318285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.318316Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18006 TClient is connected to server localhost:18006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.370393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.370421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.371455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.392327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.405583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.467747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.482924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.491875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.531019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450848898347:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.531038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.556483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.562095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.573095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.579827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.586771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.594103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.602046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450848898838:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.602072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450848898843:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.602078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.602652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.607160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660450848898845:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.773226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.779409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.790436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4872, MsgBus: 8734 2024-11-21T09:22:12.131487Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660457091043888:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:12.131734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001646/r3tmp/tmpxyPY7x/pdisk_1.dat 2024-11-21T09:22:12.140149Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4872, node 2 2024-11-21T09:22:12.149067Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:12.149080Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:12.149081Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:12.149113Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8734 TClient is connected to server localhost:8734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:12.232044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:12.232065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:12.233255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:12.233866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.236613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.245801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.260755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.274096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.393791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457091045419:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.393828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.398398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.403854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.412633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.419856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.427286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.433877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.442355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457091045923:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.442376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.442442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457091045928:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.443103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.447521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660457091045930:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:12.613747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.620265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.630622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:00.692397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:00.692429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:00.692435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:00.692440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:00.692447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:00.692451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:00.692460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:00.692547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:00.703696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:00.703720Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:00.706122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:00.706226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:00.706268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:00.708949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:00.709038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:00.709147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:00.709356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:00.709995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:00.710271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:00.710282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:00.710295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:00.710302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:00.710307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:00.710346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:00.711647Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:00.728828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:00.728931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.729004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:00.729054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:00.729062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.729854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:00.729883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:00.729947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.729961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:00.729965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:00.729970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:00.730357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.730367Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:00.730371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:00.730662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.730670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.730676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:00.730684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:00.731245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:00.731686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:00.731737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:00.731926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:00.731948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:00.731955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:00.732015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:00.732021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:00.732053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:00.732066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:00.732480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:00.732491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:00.732530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:00.732538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:00.732624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:00.732630Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:00.732643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:00.732647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:00.732652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:00.732657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:00.732662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:00.732665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:00.732675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:00.732681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:00.732684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... .726139Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000007 2024-11-21T09:22:12.726333Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:12.726349Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 214748366952 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:12.726355Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1005:0 HandleReply TEvOperationPlan: step# 5000007 2024-11-21T09:22:12.726385Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:12.726397Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 128 -> 240 2024-11-21T09:22:12.726418Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:12.726425Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:12.726430Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:12.726571Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.726796Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T09:22:12.727024Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:12.727028Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:12.727049Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:22:12.727061Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:12.727077Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:12.727081Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T09:22:12.727085Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2024-11-21T09:22:12.727089Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2024-11-21T09:22:12.727130Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T09:22:12.727135Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T09:22:12.727144Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T09:22:12.727148Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:22:12.727152Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T09:22:12.727157Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T09:22:12.727161Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T09:22:12.727164Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T09:22:12.727174Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:12.727177Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:12.727181Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 3, subscribers: 0 2024-11-21T09:22:12.727185Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2024-11-21T09:22:12.727188Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:22:12.727192Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:22:12.727225Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.727231Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.727233Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:22:12.727236Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:22:12.727238Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:12.727279Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:12.727283Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:22:12.727290Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:12.727315Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.727320Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.727323Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:22:12.727326Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2024-11-21T09:22:12.727330Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:12.727373Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.727379Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.727381Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T09:22:12.727384Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:22:12.727386Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:12.727390Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T09:22:12.727803Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.728034Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:12.728046Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T09:22:12.728057Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T09:22:12.728099Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T09:22:12.728104Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T09:22:12.728156Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T09:22:12.728170Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:22:12.728174Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [50:443:2435] TestWaitNotification: OK eventTxId 1005 2024-11-21T09:22:12.728255Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:12.728277Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 30us result status StatusPathDoesNotExist 2024-11-21T09:22:12.728305Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::CheckPushTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 29690, MsgBus: 26078 2024-11-21T09:22:11.335662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660452095602092:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.335843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001631/r3tmp/tmpj5KWJd/pdisk_1.dat 2024-11-21T09:22:11.381927Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29690, node 1 2024-11-21T09:22:11.392758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.392770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.392771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.392791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26078 TClient is connected to server localhost:26078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.436721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.436760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.437875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.466901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.477613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.538757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.553268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.562094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.605745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660452095603637:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.605773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.626258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.631180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.642823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.650121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.656662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.664072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.672943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660452095604127:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.672960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.672982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660452095604132:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.673591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.677159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660452095604134:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.833401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.839296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.845625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.996897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 1609, MsgBus: 24308 2024-11-21T09:22:12.401246Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660457960078686:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:12.401568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001631/r3tmp/tmpQMMIn4/pdisk_1.dat 2024-11-21T09:22:12.407984Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1609, node 2 2024-11-21T09:22:12.415751Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:12.415761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:12.415762Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:12.415782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24308 TClient is connected to server localhost:24308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:12.501633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:12.501658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:12.502737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:12.502888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.508820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.518399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.533905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.547129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.657413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457960080213:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.657441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.662104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.667628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.678874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.685788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.693100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.699801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.708619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457960080715:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.708635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457960080720:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.708644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.709159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.713253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660457960080722:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:12.852767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::UpsertNoIndexColumns [GOOD] >> KqpSystemView::NodesRange1 >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2024-11-21T09:20:50.162773Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660102593593967:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:50.162811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00378e/r3tmp/tmpUhilpy/pdisk_1.dat 2024-11-21T09:20:50.231622Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24210, node 1 2024-11-21T09:20:50.263592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:20:50.263628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:20:50.265053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:20:50.275913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:20:50.275925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:20:50.275927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:20:50.275966Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:20:50.336283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.337358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.337379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.337989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:20:50.338040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:20:50.338047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:20:50.338430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:20:50.338567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:20:50.338577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:20:50.339028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850386, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.340503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:20:50.340576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:20:50.341003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:20:50.341037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:20:50.341049Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:20:50.341061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:20:50.341071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:20:50.341080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:20:50.341466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:20:50.341483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:20:50.341486Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:20:50.341499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:20:50.424000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660102593594759:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.424021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.529546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.529860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.529873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.530365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T09:20:50.546561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850596, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.552443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:20:50.557291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660102593594974:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.557313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:20:50.561256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.561377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:20:50.561386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:20:50.561790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2024-11-21T09:20:50.565194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180850610, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:20:50.566777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 Fast forward 1m partitions 2 Fast forward 1m 2024-11-21T09:20:55.162881Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439660102593593967:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:20:55.162918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2024-11-21T09:21:00.644411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:00.644627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:21:00.659260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:21:00.661772Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T09:21:00.661878Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found partitions 1 2024-11-21T09:21:02.574749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMoveTable Propose, from: /Root/Foo, to: /Root/Bar, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:21:02.574817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:21:02.575606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Foo, dst path: /Root/Bar 2024-11-21T09:21:02.578905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732181342626, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:21:02.580321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715660, done: 0, blocked: 1 2024-11-21T09:21:02.581690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T09:21:02.582284Z node 1 :FLAT_TX_SCHEME ... 1 Version: 13 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:12.490729Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T09:22:12.490732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:12.490735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:12.490739Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 13 2024-11-21T09:22:12.490742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T09:22:12.490768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:12.490779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:12.490781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:12.490783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:22:12.490784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2024-11-21T09:22:12.490790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 2024-11-21T09:22:12.490792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7439660454780917128:2709] 2024-11-21T09:22:12.491078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:12.491082Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037890:1:118:1:12288:10032:0] ] return ack processed 2024-11-21T09:22:12.491092Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T09:22:12.491092Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 parts [ [72075186224037890:1:118:1:12288:10032:0] ] return ack processed 2024-11-21T09:22:12.491094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:12.491105Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T09:22:12.491106Z node 1 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2024-11-21T09:22:12.491130Z node 1 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2024-11-21T09:22:12.491325Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T09:22:12.491339Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T09:22:12.491413Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7439660454780917186:2712], serverId# [1:7439660454780917190:4702], sessionId# [0:0:0] 2024-11-21T09:22:12.491436Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7439660454780917191:2714], serverId# [1:7439660454780917194:4705], sessionId# [0:0:0] 2024-11-21T09:22:12.491493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439660145543268368 RawX2: 4503603922340144 } TabletId: 72075186224037890 State: 4 2024-11-21T09:22:12.491505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2024-11-21T09:22:12.491537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439660145543268368 RawX2: 4503603922340144 } TabletId: 72075186224037890 State: 4 2024-11-21T09:22:12.491543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2024-11-21T09:22:12.491615Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T09:22:12.491635Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T09:22:12.491659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439660446190982173 RawX2: 4503603922340473 } TabletId: 72075186224037891 State: 4 2024-11-21T09:22:12.491667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2024-11-21T09:22:12.491682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439660446190982172 RawX2: 4503603922340472 } TabletId: 72075186224037892 State: 4 2024-11-21T09:22:12.491689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2024-11-21T09:22:12.491959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2024-11-21T09:22:12.491967Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2024-11-21T09:22:12.491982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2024-11-21T09:22:12.491986Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2024-11-21T09:22:12.491995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2024-11-21T09:22:12.492002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2024-11-21T09:22:12.492002Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2024-11-21T09:22:12.492004Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2024-11-21T09:22:12.493138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.493174Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2024-11-21T09:22:12.493184Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2024-11-21T09:22:12.493191Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2024-11-21T09:22:12.493234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T09:22:12.493283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.493302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T09:22:12.493303Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2024-11-21T09:22:12.493318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2024-11-21T09:22:12.493324Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2024-11-21T09:22:12.493329Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2024-11-21T09:22:12.493337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2024-11-21T09:22:12.493343Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2024-11-21T09:22:12.493355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2024-11-21T09:22:12.493373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T09:22:12.493381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2024-11-21T09:22:12.493390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T09:22:12.493555Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2024-11-21T09:22:12.493566Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2024-11-21T09:22:12.493587Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2024-11-21T09:22:12.493673Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2024-11-21T09:22:12.493685Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2024-11-21T09:22:12.493992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T09:22:12.494002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2024-11-21T09:22:12.494014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T09:22:12.494018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T09:22:12.494019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2024-11-21T09:22:12.494022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2024-11-21T09:22:12.494025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2024-11-21T09:22:12.494033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace [GOOD] Test command err: Trying to start YDB, gRPC: 10137, MsgBus: 2473 2024-11-21T09:22:11.457770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660450990540214:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.458106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001610/r3tmp/tmpDjlV7h/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10137, node 1 2024-11-21T09:22:11.510707Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:11.511002Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.511010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.511012Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.511044Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2473 TClient is connected to server localhost:2473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.558671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.558701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.559748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.581501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.585505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.645806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.663866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.673299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.743983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450990541763:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.744014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.778921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.785063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.797315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.803816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.810791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.818572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.833231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450990542264:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.833255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.833293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450990542269:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.833875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.838363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660450990542271:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.979313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16163, MsgBus: 20124 2024-11-21T09:22:12.314078Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660457931051967:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:12.314131Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001610/r3tmp/tmpy0qCqw/pdisk_1.dat 2024-11-21T09:22:12.320899Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16163, node 2 2024-11-21T09:22:12.329375Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:12.329388Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:12.329390Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:12.329420Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20124 TClient is connected to server localhost:20124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:12.415726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:12.415745Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:12.416000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.416849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:12.424001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.431043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.445541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.455526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.581575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457931053502:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.581594Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.586057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.591014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.602072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.608572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.615890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.623085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.631073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457931054006:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.631092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660457931054011:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.631098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.631662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.636141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660457931054013:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:12.778069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.847231Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpSystemView::ReadSuccess >> KqpIndexes::UniqAndNoUniqSecondaryIndex [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertNoIndexColumns [GOOD] Test command err: Trying to start YDB, gRPC: 4333, MsgBus: 12238 2024-11-21T09:22:11.390369Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660450518388028:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.390590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00161e/r3tmp/tmpYavcoc/pdisk_1.dat 2024-11-21T09:22:11.429805Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4333, node 1 2024-11-21T09:22:11.442451Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.442465Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.442466Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.442496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12238 TClient is connected to server localhost:12238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.491522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.491550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.492641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.513929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.517942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.577333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.595512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.604096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.652336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450518389576:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.652367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.684737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.690751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.698682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.753300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.761647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.768936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.777484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450518390082:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.777510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.777512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660450518390087:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.778012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.781834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660450518390089:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.972570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.210550Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70exfvavbk2f55cvsrj5ys, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJhY2MyODktN2M5ODhiOTItNWY2ZjE5YzUtYTI0Y2U0MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:12.212367Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTJhY2MyODktN2M5ODhiOTItNWY2ZjE5YzUtYTI0Y2U0MzM=, ActorId: [1:7439660450518390384:2454], ActorState: ExecuteState, TraceId: 01jd70exfvavbk2f55cvsrj5ys, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11284, MsgBus: 23516 2024-11-21T09:22:12.459162Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660456028860076:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:12.459357Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00161e/r3tmp/tmpnUxNrq/pdisk_1.dat 2024-11-21T09:22:12.466530Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11284, node 2 2024-11-21T09:22:12.475926Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:12.475949Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:12.475951Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:12.475995Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23516 TClient is connected to server localhost:23516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:12.561582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:12.561615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:12.561871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.562621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:12.570897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.577806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.591710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.603345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.720951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660456028861642:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.720972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.724016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.729404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.735107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.741837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.749135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.756463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.764471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660456028862142:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.764497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.764502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660456028862147:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.765039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.769299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660456028862149:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:12.959384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.966775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.980979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns [GOOD] Test command err: Trying to start YDB, gRPC: 12003, MsgBus: 24806 2024-11-21T09:22:11.457313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660449747736185:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:11.457620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00162a/r3tmp/tmpPouMdr/pdisk_1.dat 2024-11-21T09:22:11.508620Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12003, node 1 2024-11-21T09:22:11.518005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:11.518018Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:11.518021Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:11.518065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24806 TClient is connected to server localhost:24806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:11.558812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:11.558836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:11.559887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:11.586733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.592440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.651696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.665327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.673255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:11.730468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660449747737740:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.730487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.752613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.758646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.769576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.775590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.783188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.790267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:11.798320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660449747738231:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.798357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.798372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660449747738236:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:11.798913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:11.803192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660449747738238:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:11.966404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2066, MsgBus: 25517 2024-11-21T09:22:12.327149Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660453793416490:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:12.327200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00162a/r3tmp/tmpqMKQ63/pdisk_1.dat 2024-11-21T09:22:12.336300Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2066, node 2 2024-11-21T09:22:12.345733Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:12.345750Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:12.345752Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:12.345791Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25517 TClient is connected to server localhost:25517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:12.427165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:12.427200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:12.428235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:12.429404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.432907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.441782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.458442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.470034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.560710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660453793418024:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.560730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.565447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.570987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.580807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.587555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.595103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.601770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:12.610363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660453793418516:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.610386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660453793418521:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.610389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:12.610903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:12.615382Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660453793418523:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:12.777126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] >> KqpSysColV1::UpdateAndDelete >> KqpSysColV0::InnerJoinTables [GOOD] >> KqpSysColV0::UpdateAndDelete [GOOD] >> KqpSystemView::PartitionStatsParametricRanges >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] >> KqpSystemView::FailNavigate >> KqpSystemView::Sessions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] 2024-11-21T09:22:03.379854Z node 1 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] 2024-11-21T09:22:08.407935Z node 2 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:624:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:627:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:628:2057] recipient: [4:626:2543] Leader for TabletID 72057594037927937 is [4:629:2544] sender: [4:630:2057] recipient: [4:626:2543] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:629:2544] Leader for TabletID 72057594037927937 is [4:629:2544] sender: [4:699:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:624:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:627:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:628:2057] recipient: [5:626:2543] Leader for TabletID 72057594037927937 is [5:629:2544] sender: [5:630:2057] recipient: [5:626:2543] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:629:2544] Leader for TabletID 72057594037927937 is [5:629:2544] sender: [5:699:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:625:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:628:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:629:2057] recipient: [6:627:2543] Leader for TabletID 72057594037927937 is [6:630:2544] sender: [6:631:2057] recipient: [6:627:2543] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:630:2544] Leader for TabletID 72057594037927937 is [6:630:2544] sender: [6:700:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] >> KqpSysColV1::SelectRange >> KqpSystemView::PartitionStatsRange3 >> KqpSysColV1::SelectRowAsterisk >> KqpSystemView::ReadSuccess [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 10131, MsgBus: 19112 2024-11-21T09:22:13.050736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660458060213509:2221];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.050774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00235f/r3tmp/tmpyrAOPj/pdisk_1.dat 2024-11-21T09:22:13.120897Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10131, node 1 2024-11-21T09:22:13.150774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.150813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.151841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.163750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:13.163764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:13.163766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:13.163799Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19112 TClient is connected to server localhost:19112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:13.246092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.257612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.321496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.332198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.344780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.364012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660458060214870:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.364029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.457499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.463864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.477341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.483998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.538743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.547231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.555919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660458060215386:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.555945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660458060215391:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.555950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.556527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:13.560009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660458060215393:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 8697, MsgBus: 14624 2024-11-21T09:22:13.050887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660459390655603:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.050973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00232b/r3tmp/tmpbAxVNH/pdisk_1.dat 2024-11-21T09:22:13.123666Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8697, node 1 2024-11-21T09:22:13.151527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.151557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.152620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.163739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:13.163751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:13.163753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:13.163782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14624 TClient is connected to server localhost:14624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:13.245693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.257609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.277186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.291438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.302833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.347586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660459390656934:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.347611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.457435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.463420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.517913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.572118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.582254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.589381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.598234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660459390657452:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.598269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.598297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660459390657457:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.598902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:13.602379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660459390657459:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:13.824700Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180933854, txId: 281474976710671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:19:01.000827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:19:01.000845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:01.000848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:19:01.000851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:19:01.000854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:19:01.000856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:19:01.000862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:19:01.000917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:19:01.007894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:19:01.007905Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:19:01.009285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:19:01.009347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:19:01.009375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:19:01.011196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:19:01.011252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:19:01.011306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:01.011453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:01.011899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:01.012084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:01.012091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:01.012100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:19:01.012104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:01.012110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:19:01.012137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:19:01.013074Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:19:01.023448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:19:01.023495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:01.023530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:19:01.023559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:19:01.023564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:01.023947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:01.023965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:19:01.023996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:01.024003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:19:01.024005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:19:01.024009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:19:01.024293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:01.024299Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:19:01.024302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:19:01.024514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:01.024520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:01.024523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:01.024528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:19:01.024899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:19:01.025216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:19:01.025245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:19:01.025357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:19:01.025374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:19:01.025378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:01.025407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:19:01.025411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:19:01.025429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:19:01.025436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:19:01.025703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:19:01.025709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:19:01.025733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:19:01.025736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:19:01.025790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:19:01.025796Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:19:01.025805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:19:01.025809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:01.025815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:19:01.025818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:19:01.025821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:19:01.025823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:19:01.025830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:19:01.025833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:19:01.025836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... rQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:13.308337Z node 176 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:22:13.308380Z node 176 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 51us result status StatusSuccess 2024-11-21T09:22:13.308495Z node 176 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:13.318861Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:22:13.318897Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T09:22:13.318913Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T09:22:13.318929Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2024-11-21T09:22:13.318952Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732180933305874 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732180933305874 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:22:13.319122Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1732180933305874 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T09:22:13.320290Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2024-11-21T09:22:13.320316Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T09:22:13.320363Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T09:22:13.320427Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 2027, MsgBus: 63336 2024-11-21T09:22:13.161399Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660458095838311:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.161570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0022dd/r3tmp/tmpn9oQNz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2027, node 1 2024-11-21T09:22:13.215412Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:13.220688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:13.220702Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:13.220703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:13.220753Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63336 TClient is connected to server localhost:63336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:13.263161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.263193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:13.264245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.293339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.306440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.367491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.385514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.395485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.423541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660458095839860:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.423569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.457435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.464115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.470430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.477248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.484790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.490866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.502542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660458095840351:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.502562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.502571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660458095840356:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.503816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:13.511309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660458095840358:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpSystemView::FailResolve |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 18285, MsgBus: 18737 2024-11-21T09:22:13.438978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660461243014566:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.439136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0022c4/r3tmp/tmpoIvyfC/pdisk_1.dat 2024-11-21T09:22:13.487783Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18285, node 1 2024-11-21T09:22:13.501087Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:13.501099Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:13.501100Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:13.501134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18737 TClient is connected to server localhost:18737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:13.539973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.540000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.541084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:13.570099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.574417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.636408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.651270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.659553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.712227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660461243016109:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.712267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.737130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.743058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.750316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.757155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.764369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.771215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.780654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660461243016600:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.780680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.780743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660461243016605:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.781487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:13.784701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660461243016607:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: Trying to start YDB, gRPC: 62304, MsgBus: 25031 2024-11-21T09:22:13.050663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660461642297694:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.050864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002314/r3tmp/tmp0f0ZpU/pdisk_1.dat 2024-11-21T09:22:13.126226Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62304, node 1 2024-11-21T09:22:13.151063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.151085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.152139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.163717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:13.163732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:13.163734Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:13.163777Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25031 TClient is connected to server localhost:25031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:13.245701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.257574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.321492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.331261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.344508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.363763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660461642299084:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.363784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.457437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.512443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.519103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.573972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.582457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.596777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.611915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660461642299601:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.611946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.611952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660461642299606:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.612553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:13.616236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660461642299608:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 1 ydb-cpp-sdk/2.6.2 2024-11-21T09:22:14.205976Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180934201, txId: 281474976710683] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:02.146934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:02.146965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:02.146971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:02.146976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:02.146982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:02.146986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:02.146994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:02.147079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:02.159177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:02.159205Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:02.161863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:02.161986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:02.162035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:02.165212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:02.165317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:02.165426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.165659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:02.166433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.166731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:02.166744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.166757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:02.166764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:02.166771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:02.166819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:02.168458Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:02.188621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:02.188691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.188742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:02.188788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:02.188796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.189535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.189559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:02.189607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.189617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:02.189621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:02.189626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:02.190112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.190120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:02.190125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:02.190459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.190466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.190471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.190478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.191128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:02.191532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:02.191587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:02.191772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.191794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:02.191800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.191864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:02.191872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.191897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:02.191911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:02.192325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:02.192335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:02.192362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.192366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:02.192430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.192436Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:02.192446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:02.192450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.192456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:02.192461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.192466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:02.192471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:02.192480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:02.192486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:02.192491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... .109328Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T09:22:14.109545Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:14.109577Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 214748366952 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:14.109586Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1006:0 HandleReply TEvOperationPlan: step# 5000007 2024-11-21T09:22:14.109612Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:14.109621Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 128 -> 240 2024-11-21T09:22:14.109648Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:14.109654Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:14.109658Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:14.109840Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.110027Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T09:22:14.110192Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:14.110196Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:14.110214Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:22:14.110224Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:14.110235Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:14.110237Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2024-11-21T09:22:14.110240Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2024-11-21T09:22:14.110255Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2024-11-21T09:22:14.110290Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T09:22:14.110294Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T09:22:14.110301Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T09:22:14.110303Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T09:22:14.110306Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T09:22:14.110310Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T09:22:14.110312Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T09:22:14.110314Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T09:22:14.110321Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:14.110326Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:14.110329Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2024-11-21T09:22:14.110331Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2024-11-21T09:22:14.110333Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:22:14.110336Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:22:14.110376Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.110383Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.110385Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:22:14.110388Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:22:14.110390Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:14.110437Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:14.110440Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:22:14.110445Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:14.110491Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.110497Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.110499Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:22:14.110501Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2024-11-21T09:22:14.110504Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:14.110577Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.110583Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.110586Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T09:22:14.110590Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:22:14.110592Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:14.110597Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T09:22:14.111046Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.111075Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:14.111099Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T09:22:14.111286Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T09:22:14.111332Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T09:22:14.111336Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T09:22:14.111379Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T09:22:14.111391Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T09:22:14.111393Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [50:442:2434] TestWaitNotification: OK eventTxId 1006 2024-11-21T09:22:14.111438Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:14.111455Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 25us result status StatusPathDoesNotExist 2024-11-21T09:22:14.111476Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 16634, MsgBus: 32405 2024-11-21T09:22:13.855917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660459262412077:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.856075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002299/r3tmp/tmpvqMnQl/pdisk_1.dat 2024-11-21T09:22:13.902444Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16634, node 1 2024-11-21T09:22:13.910555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:13.910565Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:13.910566Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:13.910590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32405 TClient is connected to server localhost:32405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:13.955860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.957273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.957298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.958354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.966841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.029534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.048360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.059345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.128159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463557380930:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.128182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.147009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.153063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.163010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.170103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.176997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.184174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.193095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463557381423:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.193131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.193136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463557381428:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.193774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.197152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660463557381430:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:14.349531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.361964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jd70ezpg27j8fq50bwmz1x8w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjZmNjY3NDUtNzk0NTRkODQtMWUwMjI0ZDEtZTMyYzU1OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:14.363754Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180934361, txId: 281474976710672] shutting down >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> KqpSysColV1::SelectRowById >> KqpSystemView::FailNavigate [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] >> KqpSysColV1::StreamSelectRowAsterisk >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 [GOOD] |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] >> KqpSysColV1::SelectRowAsterisk [GOOD] >> KqpSysColV1::SelectRange [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 11016, MsgBus: 26966 2024-11-21T09:22:14.334923Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660463417504374:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:14.335151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002000/r3tmp/tmpTcWtst/pdisk_1.dat 2024-11-21T09:22:14.385521Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11016, node 1 2024-11-21T09:22:14.393321Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:14.393333Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:14.393334Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:14.393360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26966 TClient is connected to server localhost:26966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:14.435614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:14.435646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:14.436819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:14.465396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.471636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.487247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.507123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.517437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.622329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463417505914:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.622356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.656030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.661739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.674473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.681080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.688187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.695371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.704008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463417506429:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.704018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463417506434:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.704028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.704631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.708138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660463417506436:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:14.886573Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439660463417506732:3485], for# user0@builtin, access# DescribeSchema 2024-11-21T09:22:14.886594Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439660463417506732:3485], for# user0@builtin, access# DescribeSchema 2024-11-21T09:22:14.888281Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439660463417506729:2459], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:22:14.888371Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2I3ZWYxYjMtZTAyOTZjNDktOTFhYmNlODYtM2NlNzJiOTQ=, ActorId: [1:7439660463417506722:2455], ActorState: ExecuteState, TraceId: 01jd70f074a81jcnv0evazst03, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 7907, MsgBus: 26925 2024-11-21T09:22:14.193340Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660464258022792:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:14.193360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0021de/r3tmp/tmpURA5JO/pdisk_1.dat 2024-11-21T09:22:14.237888Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7907, node 1 2024-11-21T09:22:14.249707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:14.249721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:14.249722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:14.249757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26925 TClient is connected to server localhost:26925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:14.294380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:14.294407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:14.295487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:14.295607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.307182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.369667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.388773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.396686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.476802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660464258024338:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.476831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.509531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.515116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.526935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.581610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.590139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.597176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.606406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660464258024855:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.606434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.606465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660464258024860:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.607065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.609875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660464258024862:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:14.840584Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180934835, txId: 281474976710671] shutting down >> KqpSystemView::NodesRange1 [GOOD] >> KqpSystemView::QueryStatsScan >> KqpSystemView::FailResolve [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 16975, MsgBus: 18968 2024-11-21T09:22:14.378544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660462760490270:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:14.378781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f89/r3tmp/tmpQKiA1e/pdisk_1.dat 2024-11-21T09:22:14.435470Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16975, node 1 2024-11-21T09:22:14.441125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:14.441140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:14.441142Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:14.441174Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18968 TClient is connected to server localhost:18968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:14.480038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:14.480072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:14.481330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:14.489948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.499070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.512926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.532372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.541828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.696972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660462760491827:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.697000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.734714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.741236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.796006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.807260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.813782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.821187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.830035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660462760492341:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.830066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660462760492346:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.830067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.830698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.834173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660462760492348:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:02.926126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:02.926146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:02.926149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:02.926153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:02.926157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:02.926160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:02.926167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:02.926225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:02.933951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:02.933967Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:02.935570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:02.935639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:02.935676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:02.937584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:02.937650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:02.937729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.937888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:02.938394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.938592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:02.938600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.938608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:02.938612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:02.938616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:02.938641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:02.939575Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:02.950904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:02.950963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.951000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:02.951037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:02.951042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.951571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.951590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:02.951627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.951634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:02.951637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:02.951640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:02.951940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.951948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:02.951951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:02.952218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.952225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.952229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.952233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.952642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:02.952965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:02.952999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:02.953120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.953141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:02.953146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.953190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:02.953195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.953215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:02.953223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:02.953507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:02.953513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:02.953536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.953540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:02.953588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.953592Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:02.953599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:02.953602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.953605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:02.953609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.953612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:02.953614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:02.953621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:02.953624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:02.953627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... .936547Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T09:22:14.936808Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:14.936838Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 214748366952 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:14.936848Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T09:22:14.936913Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:14.936931Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T09:22:14.936959Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:14.936968Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:14.936975Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:14.937323Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.937447Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:22:14.937939Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:14.937951Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:14.937983Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:22:14.938001Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:14.938020Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:14.938025Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T09:22:14.938030Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T09:22:14.938033Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T09:22:14.938095Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:22:14.938103Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:22:14.938117Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:22:14.938140Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:22:14.938146Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T09:22:14.938151Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:22:14.938156Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:22:14.938160Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:22:14.938171Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:14.938175Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:14.938181Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2024-11-21T09:22:14.938185Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:22:14.938188Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:22:14.938191Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:22:14.938259Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.938269Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.938274Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:14.938278Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:22:14.938285Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:14.938370Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:14.938377Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:22:14.938387Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:14.938447Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.938456Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.938459Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:14.938462Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:22:14.938466Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:14.938590Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.938603Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.938607Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:14.938611Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:22:14.938615Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:14.938625Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T09:22:14.939701Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.939736Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:14.939917Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:14.940007Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:22:14.940068Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:22:14.940088Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:22:14.940155Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:22:14.940173Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:22:14.940177Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [50:383:2375] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:22:14.940281Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:14.940314Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 46us result status StatusPathDoesNotExist 2024-11-21T09:22:14.940354Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 26623, MsgBus: 2176 2024-11-21T09:22:14.419626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660465464681805:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:14.419842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f81/r3tmp/tmpGfXal8/pdisk_1.dat 2024-11-21T09:22:14.467188Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26623, node 1 2024-11-21T09:22:14.480455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:14.480469Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:14.480471Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:14.480507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2176 TClient is connected to server localhost:2176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:14.520097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:14.520127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:14.521284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:14.546317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.558847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.620167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.639027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.650046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.733853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660465464683343:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.733879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.765143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.771415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.779195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.785959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.793130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.800438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.808759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660465464683844:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.808781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.808810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660465464683849:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.809377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.813279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660465464683851:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:15.022063Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180935020, txId: 281474976715671] shutting down >> KqpSystemView::PartitionStatsRange2 >> KqpSysColV0::SelectRowAsterisk >> KqpSystemView::PartitionStatsSimple ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] Test command err: Trying to start YDB, gRPC: 62400, MsgBus: 23313 2024-11-21T09:22:12.765595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660456335147353:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:12.765772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001609/r3tmp/tmpMJZzJG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62400, node 1 2024-11-21T09:22:12.820658Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:12.820811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:12.820821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:12.820822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:12.820852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23313 TClient is connected to server localhost:23313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:12.863709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.866164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:12.866184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:12.867286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:12.875116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.889746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.903435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:12.911600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.066767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660460630116212:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.066831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.072406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.127074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.134029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.141230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.147836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.155138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.164332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660460630116727:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.164363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.164386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660460630116732:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:13.165040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:13.168421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660460630116734:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:13.299835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:13.471242Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eyrvf12hqgcmn7bdsncb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBmNDk1Ni05YmE0MWM0ZC04OGI2MTBkYS1iZGM2N2I2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:13.473046Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTBmNDk1Ni05YmE0MWM0ZC04OGI2MTBkYS1iZGM2N2I2, ActorId: [1:7439660460630117001:2454], ActorState: ExecuteState, TraceId: 01jd70eyrvf12hqgcmn7bdsncb, Create QueryResponse for error on request, msg: 2024-11-21T09:22:13.510043Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660460630117451:2532], TxId: 281474976710679, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd70eyv289zh91sf70vbrhnr. SessionId : ydb://session/3?node_id=1&id=OTBmNDk1Ni05YmE0MWM0ZC04OGI2MTBkYS1iZGM2N2I2. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:22:13.510105Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439660460630117452:2533], TxId: 281474976710679, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTBmNDk1Ni05YmE0MWM0ZC04OGI2MTBkYS1iZGM2N2I2. TraceId : 01jd70eyv289zh91sf70vbrhnr. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439660460630117448:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:22:13.510320Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTBmNDk1Ni05YmE0MWM0ZC04OGI2MTBkYS1iZGM2N2I2, ActorId: [1:7439660460630117001:2454], ActorState: ExecuteState, TraceId: 01jd70eyv289zh91sf70vbrhnr, Create QueryResponse for error on request, msg: 2024-11-21T09:22:13.591628Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70eyw8btejhzw0emp58pya, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBmNDk1Ni05YmE0MWM0ZC04OGI2MTBkYS1iZGM2N2I2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:13.591692Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTBmNDk1Ni05YmE0MWM0ZC04OGI2MTBkYS1iZGM2N2I2, ActorId: [1:7439660460630117001:2454], ActorState: ExecuteState, TraceId: 01jd70eyw8btejhzw0emp58pya, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29432, MsgBus: 25447 2024-11-21T09:22:13.861709Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660458074510915:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.861730Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001609/r3tmp/tmpiPrE1b/pdisk_1.dat 2024-11-21T09:22:13.872304Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29432, node 2 2024-11-21T09:22:13.883096Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:13.883109Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:13.883111Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:13.883144Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25447 TClient is connected to server localhost:25447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:13.964559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.964591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.964962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.965645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.971624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.982185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.041490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.052336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.134283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660462369479770:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.134317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.136730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.192960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.205916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.260611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.268423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.275450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.284378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660462369480289:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.284410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.284481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660462369480294:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.285138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.288736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660462369480296:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:14.446826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.619523Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70ezwe6c0ykjmhw1mczz81, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWEzMjM5NjMtYTIyZDlhMTUtZTc5OThiYTQtOTkzNzFkZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:14.619604Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWEzMjM5NjMtYTIyZDlhMTUtZTc5OThiYTQtOTkzNzFkZTg=, ActorId: [2:7439660462369480577:2454], ActorState: ExecuteState, TraceId: 01jd70ezwe6c0ykjmhw1mczz81, Create QueryResponse for error on request, msg: 2024-11-21T09:22:14.664282Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660462369481011:2530], TxId: 281474976715679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MWEzMjM5NjMtYTIyZDlhMTUtZTc5OThiYTQtOTkzNzFkZTg=. TraceId : 01jd70ezyx0mxxxxeyqhnh98at. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T09:22:14.664351Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439660462369481012:2531], TxId: 281474976715679, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd70ezyx0mxxxxeyqhnh98at. SessionId : ydb://session/3?node_id=2&id=MWEzMjM5NjMtYTIyZDlhMTUtZTc5OThiYTQtOTkzNzFkZTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439660462369481008:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T09:22:14.664634Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWEzMjM5NjMtYTIyZDlhMTUtZTc5OThiYTQtOTkzNzFkZTg=, ActorId: [2:7439660462369480577:2454], ActorState: ExecuteState, TraceId: 01jd70ezyx0mxxxxeyqhnh98at, Create QueryResponse for error on request, msg: 2024-11-21T09:22:14.746594Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70f00a1srhtrgyp4d7t93g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWEzMjM5NjMtYTIyZDlhMTUtZTc5OThiYTQtOTkzNzFkZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T09:22:14.746684Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWEzMjM5NjMtYTIyZDlhMTUtZTc5OThiYTQtOTkzNzFkZTg=, ActorId: [2:7439660462369480577:2454], ActorState: ExecuteState, TraceId: 01jd70f00a1srhtrgyp4d7t93g, Create QueryResponse for error on request, msg: 2024-11-21T09:22:14.751572Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 27854, MsgBus: 4751 2024-11-21T09:22:14.113796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660464987212268:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:14.113992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002281/r3tmp/tmpX5prPJ/pdisk_1.dat 2024-11-21T09:22:14.162625Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27854, node 1 2024-11-21T09:22:14.175691Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:14.175705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:14.175706Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:14.175738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4751 TClient is connected to server localhost:4751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:14.213951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:14.213984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:14.215095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:14.218447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.231323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.291649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.306012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.317593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.401945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660464987213800:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.401971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.427816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.434535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.488845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.499018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.505963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.520573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.535853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660464987214316:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.535884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.535888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660464987214321:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.536526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.540446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660464987214323:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 5232, MsgBus: 20453 2024-11-21T09:22:14.427802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660463125859453:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:14.428047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f75/r3tmp/tmpPYMJB2/pdisk_1.dat 2024-11-21T09:22:14.474555Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5232, node 1 2024-11-21T09:22:14.488378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:14.488390Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:14.488393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:14.488427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20453 TClient is connected to server localhost:20453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:14.528868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:14.528940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:14.530770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:14.560575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.565077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.581744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.598301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.606842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.707425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463125860999:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.707454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.741272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.746916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.758676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.813769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.821220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.828232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.837062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463125861516:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.837087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660463125861521:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.837093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.837693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.841749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660463125861523:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 8397, MsgBus: 9948 2024-11-21T09:22:13.727597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660458209059897:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.727849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:13.730325Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660462056716176:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.730638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:13.732045Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660462279888749:2214];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.732138Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660459137509388:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.732430Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:13.735788Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660460850192837:2265];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:13.762501Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:13.763761Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0022a8/r3tmp/tmp2NPs4a/pdisk_1.dat 2024-11-21T09:22:13.821793Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:13.827654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.827677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.840581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8397, node 1 2024-11-21T09:22:13.846140Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:13.846167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:13.846170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:13.846204Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9948 2024-11-21T09:22:13.870907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.870934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.871248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.871265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.871402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.871420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.872196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:13.872227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:13.872313Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T09:22:13.872326Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T09:22:13.872328Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T09:22:13.872582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.872647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.872667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:13.873260Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:22:13.873443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:13.905036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.913712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:13.981189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.045887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.058294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.078929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660462504028853:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.078962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.103134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.164297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.176554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.189397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.201865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.214578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.229503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660462504029473:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.229533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.229550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660462504029478:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.230200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:14.241242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660462504029480:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720668 completed, doublechecking } 2024-11-21T09:22:14.432001Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180934429, txId: 281474976720671] shutting down 2024-11-21T09:22:14.473943Z node 2 :BS_PROXY_PUT ERROR: [924c7960459f02c8] Result# TEvPutResult {Id# [72075186224037888:1:11:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:14.473943Z node 3 :BS_PROXY_PUT ERROR: [84752176cb8ad56f] Result# TEvPutResult {Id# [72075186224037889:1:11:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:14.495977Z node 4 :BS_PROXY_PUT ERROR: [60d5d4ce53500693] Result# TEvPutResult {Id# [72075186224037911:1:10:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:14.543029Z node 5 :BS_PROXY_PUT ERROR: [6a36579d0a3db9db] Result# TEvPutResult {Id# [72075186224037890:1:9:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:14.731329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:14.732899Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:14.764076Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> KqpSysColV1::StreamSelectRange >> KqpSysColV1::StreamInnerJoinTables |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 65051, MsgBus: 32435 2024-11-21T09:22:14.623232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660465109254863:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:14.623407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f55/r3tmp/tmpWDdJiW/pdisk_1.dat 2024-11-21T09:22:14.693871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65051, node 1 2024-11-21T09:22:14.706843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:14.706855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:14.706857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:14.706895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32435 2024-11-21T09:22:14.724349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:14.724384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:14.725455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:14.752831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.762750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.778472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.795634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.808101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:14.940995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660465109256406:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.941032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:14.966150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.971569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:14.981800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.036674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.091429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.101435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.110138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660469404224222:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.110165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.110218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660469404224227:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.110903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:15.114190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660469404224229:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:15.326364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.338054Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439660469404224585:3539], for# user0@builtin, access# SelectRow 2024-11-21T09:22:15.338102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T09:22:15.340679Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmQyNmEyYWMtZmViNTRlMzktNWJhM2RmZi0zNGRjNGM2OQ==, ActorId: [1:7439660469404224565:2458], ActorState: ExecuteState, TraceId: 01jd70f0n1ch04p6hr5c679v8a, Create QueryResponse for error on request, msg: 2024-11-21T09:22:15.340785Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180935337, txId: 281474976710672] shutting down 2024-11-21T09:22:15.340904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jd70f0n1ch04p6hr5c679v8a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQyNmEyYWMtZmViNTRlMzktNWJhM2RmZi0zNGRjNGM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpSystemView::Join >> KqpSysColV1::SelectRowById [GOOD] >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 26611, MsgBus: 3971 2024-11-21T09:22:15.055404Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660467090583955:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.055534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f48/r3tmp/tmpMf8cPv/pdisk_1.dat 2024-11-21T09:22:15.109443Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26611, node 1 2024-11-21T09:22:15.117196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.117210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.117212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.117255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3971 TClient is connected to server localhost:3971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:15.156879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.156911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:15.157969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.186046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.191593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.254165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.269353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.278860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.335346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660467090585513:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.335381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.360955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.416767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.423138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.478223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.486407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.493466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.501678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660467090586032:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.501699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660467090586037:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.501706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.502232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:15.506406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660467090586039:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |96.9%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 2886, MsgBus: 62018 2024-11-21T09:22:15.129479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660466808162567:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.129603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e93/r3tmp/tmpB9GnLO/pdisk_1.dat 2024-11-21T09:22:15.181895Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2886, node 1 2024-11-21T09:22:15.188794Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.188808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.188809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.188844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62018 TClient is connected to server localhost:62018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:15.230887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.230910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.231988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:15.260998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.273523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.334362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.354166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.363497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.494490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660466808164121:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.494521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.518839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.525636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.535496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.542505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.549236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.556470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.565191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660466808164635:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.565231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.565254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660466808164640:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.565937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:15.569167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660466808164642:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:15.734239Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180935779, txId: 281474976715671] shutting down >> KqpSysColV0::SelectRowAsterisk [GOOD] >> KqpSystemView::PartitionStatsRange1 |97.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::PartitionStatsSimple [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSysColV1::StreamSelectRange [GOOD] |97.0%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::QueryStatsScan [GOOD] >> KqpSysColV0::SelectRowById >> KqpSystemView::PartitionStatsRanges ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 26676, MsgBus: 12301 2024-11-21T09:22:15.574431Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660468067996496:2251];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.574506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e8a/r3tmp/tmpBYWnoY/pdisk_1.dat 2024-11-21T09:22:15.615001Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26676, node 1 2024-11-21T09:22:15.628439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.628449Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.628451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.628476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12301 TClient is connected to server localhost:12301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.675237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.675269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:15.676350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:15.698241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.704131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.765213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.779947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.787576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.849303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660468067997833:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.849336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.877230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.883696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.891907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.899055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.906102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.912945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.921626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660468067998324:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.921652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.921659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660468067998329:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.922298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:15.926456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660468067998331:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpSysColV1::InnerJoinSelect >> KqpSysColV1::InnerJoinTables >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> KqpSysColV0::InnerJoinSelect >> KqpSystemView::NodesSimple >> KqpSystemView::PartitionStatsFollower >> KqpSystemView::PartitionStatsRange1 [GOOD] >> KqpSystemView::QueryStatsSimple |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] >> KqpSysColV0::SelectRowById [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> KqpSystemView::PartitionStatsRanges [GOOD] >> KqpSysColV0::InnerJoinSelect [GOOD] >> KqpSysColV1::InnerJoinSelect [GOOD] >> TExternalTableTestReboots::CreateExternalTableWithReboots [GOOD] |97.0%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 23723, MsgBus: 27188 2024-11-21T09:22:15.868750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660469827137794:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.868997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e75/r3tmp/tmpmYlwZw/pdisk_1.dat 2024-11-21T09:22:15.925911Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23723, node 1 2024-11-21T09:22:15.937192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.937224Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.937226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.937267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27188 2024-11-21T09:22:15.969415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.969440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:15.970593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.984193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.995451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.009372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.024744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.033572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.188080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660474122106644:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.188125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.218015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.223309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.235467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.241885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.249332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.256540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.264080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660474122107156:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.264100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.264122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660474122107161:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.264762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:16.269264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660474122107163:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:16.482625Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936528, txId: 281474976710671] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 10264, MsgBus: 16167 2024-11-21T09:22:16.363083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660471255035633:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.363354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e59/r3tmp/tmpdot9Mi/pdisk_1.dat 2024-11-21T09:22:16.404106Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10264, node 1 2024-11-21T09:22:16.416428Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.416441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.416443Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.416490Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16167 TClient is connected to server localhost:16167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.464313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.464336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:16.465419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:16.487244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.500139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.559697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.573030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.585849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.618972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660471255037181:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.619006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.642669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.649103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.655225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.661934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.669740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.676184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.684651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660471255037672:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.684681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.684687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660471255037677:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.685338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:16.689413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660471255037679:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:16.851120Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936849, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 2156, MsgBus: 27746 2024-11-21T09:22:15.563901Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660470002995215:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.564173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e8e/r3tmp/tmphW4V0s/pdisk_1.dat 2024-11-21T09:22:15.604940Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2156, node 1 2024-11-21T09:22:15.618141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.618154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.618155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.618194Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27746 TClient is connected to server localhost:27746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.664845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.664889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:15.665998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:15.687366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.692174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.754849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.774401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.783590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.847269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660470002996764:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.847305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.877212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.883220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.938101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.948630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.003601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.011145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.020281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660474297964577:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.020320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660474297964582:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.020323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.020993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:16.024563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660474297964584:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:16.195548Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936193, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 1964, MsgBus: 10614 2024-11-21T09:22:15.455352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660468038687415:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.455534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e91/r3tmp/tmpLuoMXk/pdisk_1.dat 2024-11-21T09:22:15.512530Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1964, node 1 2024-11-21T09:22:15.523856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.523872Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.523874Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.523919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10614 2024-11-21T09:22:15.556804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.556832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:15.557923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.589532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.600003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.613589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.628954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.641176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.726971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660468038688963:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.726994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.749673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.804655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.814863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.822008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.877168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.885107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:15.893922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660468038689482:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.893949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660468038689487:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.893951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.894585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:15.898370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660468038689489:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:16.097110Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660472333657111:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd70f1by08zbntdx3e0t2q4e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RkNjAzMjAtZDJjN2ZlOTgtZWNmYTk4ZjktNDRmYjUwZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:22:16.259607Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660472333657193:2454] TxId: 281474976715673. Ctx: { TraceId: 01jd70f1by08zbntdx3e0t2q4e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RkNjAzMjAtZDJjN2ZlOTgtZWNmYTk4ZjktNDRmYjUwZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:22:16.260548Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936143, txId: 281474976715671] shutting down 2024-11-21T09:22:16.271365Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660472333657228:2472] TxId: 281474976715675. Ctx: { TraceId: 01jd70f1j5c0g8g4q5p08stx9n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI4MDcwNTgtMjE1ODM2NmUtMmI0YTUzMzgtYTc3MTk3ZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T09:22:16.272424Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936270, txId: 281474976715674] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 30863, MsgBus: 31903 2024-11-21T09:22:15.696690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660466837083642:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.696712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e7d/r3tmp/tmpgksIo5/pdisk_1.dat 2024-11-21T09:22:15.746949Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30863, node 1 2024-11-21T09:22:15.760555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.760573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.760575Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.760611Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31903 TClient is connected to server localhost:31903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:15.797749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.797783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:15.798861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.806018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.814515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.877033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.896672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.907674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.970291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660466837085191:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:15.970327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.004426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.010874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.018775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.032960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.046485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.053433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.061822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660471132052986:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.061836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660471132052993:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.061849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.062484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:16.066288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660471132052995:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:16.262130Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936259, txId: 281474976710671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 62325, MsgBus: 18713 2024-11-21T09:22:16.708986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660472887060852:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.709168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e3e/r3tmp/tmpI73rbJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62325, node 1 2024-11-21T09:22:16.758958Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:16.762774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.762785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.762786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.762810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18713 TClient is connected to server localhost:18713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.807028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.810266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.810285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:16.811475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:16.816612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.876980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.891473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.900758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.978684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660472887062419:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.978713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.004541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.010315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.019228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.026116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.033420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.040262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.048318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660477182030209:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.048344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660477182030214:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.048349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.048954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:17.053375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660477182030216:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 27590, MsgBus: 15257 2024-11-21T09:22:15.795302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660467751259046:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.795440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e72/r3tmp/tmpO5tDeT/pdisk_1.dat 2024-11-21T09:22:15.846641Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27590, node 1 2024-11-21T09:22:15.856395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.856407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.856409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.856434Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15257 TClient is connected to server localhost:15257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:15.899810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.899836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:15.901207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.926005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.935217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.950610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.969225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.979624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.088414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660472046227897:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.088449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.111369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.117355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.123181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.130020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.137214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.144815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.160361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660472046228403:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.160384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.160441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660472046228408:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.161194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:16.164650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660472046228410:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:16.377415Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936416, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 28654, MsgBus: 28342 2024-11-21T09:22:15.836107Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660470847617621:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.836349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e6d/r3tmp/tmpEfFMip/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28654, node 1 2024-11-21T09:22:15.891108Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:15.893231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.893242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.893244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.893274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28342 TClient is connected to server localhost:28342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:15.936951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:15.936970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:15.938103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:15.962596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.971681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:15.986087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.003615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.012713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.108709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660475142586468:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.108735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.136842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.143133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.198162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.206927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.214014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.221260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.229173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660475142586982:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.229198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.229240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660475142586987:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.229917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:16.234060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660475142586989:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:16.408024Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936451, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 31537, MsgBus: 19022 2024-11-21T09:22:16.651084Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660473046449468:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.651316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e37/r3tmp/tmpxcJTPA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31537, node 1 2024-11-21T09:22:16.718133Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:16.718612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.718621Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.718622Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.718646Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19022 2024-11-21T09:22:16.752107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.752134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:16.753254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.765958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.776941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.839539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.856161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.866701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.933039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660473046451012:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.933076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.966146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.971941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.984277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.991241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.046316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.054330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.062634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660477341418824:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.062667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660477341418829:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.062675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.063199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:17.067155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660477341418831:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 1072, MsgBus: 24938 2024-11-21T09:22:16.469617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660472233104293:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.469658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e56/r3tmp/tmpPi6bxR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1072, node 1 2024-11-21T09:22:16.526631Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:16.526877Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.526887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.526888Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.526919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24938 TClient is connected to server localhost:24938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.569138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.569669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.569686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T09:22:16.570758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:16.581326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.640867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.658945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.670023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.762788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660472233105836:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.762832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.790902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.797472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.808606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.862953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.871945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.879014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.887652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660472233106352:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.887677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660472233106357:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.887683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.888223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:16.891877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660472233106359:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 7778, MsgBus: 29140 2024-11-21T09:22:16.628360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660473492239015:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.628389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e52/r3tmp/tmpzzhG54/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7778, node 1 2024-11-21T09:22:16.684071Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:16.686739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.686752Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.686753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.686784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29140 TClient is connected to server localhost:29140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:16.728475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.728501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:16.729576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.733012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.741730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.802266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.821941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.830432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.904040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660473492240557:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.904090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.928841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.934887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.989209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.997519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.005288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.012132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.019963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660477787208369:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.019986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.019991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660477787208374:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.020557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:17.025059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660477787208376:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:17.221073Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180937218, txId: 281474976710671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 26620, MsgBus: 9096 2024-11-21T09:22:16.629797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660473758400182:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.629816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e4f/r3tmp/tmpud4ZDh/pdisk_1.dat 2024-11-21T09:22:16.684507Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26620, node 1 2024-11-21T09:22:16.693727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.693738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.693740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.693773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9096 TClient is connected to server localhost:9096 2024-11-21T09:22:16.730663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.730698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:22:16.731678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.740780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.752153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.765900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.781321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.790877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.935022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660473758401732:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.935058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.960320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.965478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.976618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.984037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.990860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.997720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.006450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660478053369542:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.006479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.006485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660478053369547:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.007023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:17.011399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660478053369549:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 1818, MsgBus: 2837 2024-11-21T09:22:16.859354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660473938100790:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.859467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e13/r3tmp/tmp2l2J8T/pdisk_1.dat 2024-11-21T09:22:16.908323Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1818, node 1 2024-11-21T09:22:16.917643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.917657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.917658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.917692Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2837 TClient is connected to server localhost:2837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:16.960264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.960293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.961341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:16.961659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.973838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.035619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.051416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.061923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.162719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660478233069634:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.162740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.199097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.205436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.215270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.221840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.228927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.235922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.244523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660478233070138:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.244550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.244564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660478233070143:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.245098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:17.249174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660478233070145:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpSystemView::NodesSimple [GOOD] >> KqpSysColV0::SelectRange [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:01.697668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:01.697691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:01.697694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:01.697698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:01.697702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:01.697705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:01.697711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:01.697775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:01.705607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:01.705626Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:01.707330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:01.707413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:01.707450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:01.709601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:01.709666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:01.709744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:01.709950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:01.710484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:01.710694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:01.710700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:01.710710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:01.710715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:01.710719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:01.710746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:01.711694Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:01.724528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:01.724588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:01.724638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:01.724675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:01.724680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:01.725304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:01.725324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:01.725365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:01.725373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:01.725375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:01.725378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:01.725711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:01.725721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:01.725725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:01.726039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:01.726049Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:01.726054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:01.726061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:01.726579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:01.726932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:01.726975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:01.727127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:01.727147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:01.727151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:01.727197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:01.727202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:01.727233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:01.727241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:01.727588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:01.727594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:01.727617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:01.727620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:01.727667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:01.727671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:01.727678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:01.727681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:01.727684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:01.727688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:01.727690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:01.727692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:01.727700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:01.727703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:01.727706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [67:331:2323] 2024-11-21T09:22:17.843353Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T09:22:17.843367Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T09:22:17.843403Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:17.843415Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 287762810984 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:17.843419Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 1004:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T09:22:17.843433Z node 67 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T09:22:17.843448Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:17.843461Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:17.843467Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:22:17.843798Z node 67 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:17.843804Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:22:17.843822Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:22:17.843833Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:22:17.843841Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:17.843854Z node 67 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:17.843859Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T09:22:17.843863Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2024-11-21T09:22:17.843867Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2024-11-21T09:22:17.843871Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T09:22:17.843918Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:22:17.843922Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:22:17.843929Z node 67 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:22:17.843931Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:22:17.843934Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T09:22:17.843937Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:22:17.843940Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:22:17.843942Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:22:17.843949Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:22:17.843951Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:17.843954Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 1 2024-11-21T09:22:17.843956Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:22:17.843958Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T09:22:17.843964Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2024-11-21T09:22:17.844108Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.844118Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.844122Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:17.844126Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T09:22:17.844129Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:17.844363Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.844373Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.844377Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:17.844381Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T09:22:17.844384Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:17.844499Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.844507Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.844511Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:17.844515Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:22:17.844519Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:17.844526Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2024-11-21T09:22:17.844531Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [67:300:2292] 2024-11-21T09:22:17.844806Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.845101Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.845118Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:17.845127Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:22:17.845135Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [67:331:2323] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2024-11-21T09:22:17.845226Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirExternalTable/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:17.845251Z node 67 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirExternalTable/ExternalTable" took 33us result status StatusSuccess 2024-11-21T09:22:17.845322Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirExternalTable/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "a" Type: "Int32" TypeId: 1 Id: 1 NotNull: true } Columns { Name: "b" Type: "Int32" TypeId: 1 Id: 2 NotNull: true } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 8994, MsgBus: 63825 2024-11-21T09:22:16.792965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660474790837683:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.793013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:16.795631Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660474460251933:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.795786Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:16.796142Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660473564249318:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.796295Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e47/r3tmp/tmpkzUyyZ/pdisk_1.dat 2024-11-21T09:22:16.844768Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8994, node 1 2024-11-21T09:22:16.857778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.857794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.857796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.857838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63825 TClient is connected to server localhost:63825 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:22:16.893004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.893047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:22:16.894632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.920358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.920388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:16.920453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.920469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:16.921740Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:22:16.921751Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T09:22:16.921994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:16.922053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.922056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:22:16.934055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.998051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.015724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.028409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.140837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660479085806737:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.140861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.166981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.176831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.189550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.247772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.258171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.273639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.287942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660479085807418:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.287962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.287979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660479085807423:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.288551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:17.292127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660479085807425:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725668 completed, doublechecking } 2024-11-21T09:22:17.466580Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180937464, txId: 281474976725671] shutting down 2024-11-21T09:22:17.579602Z node 3 :BS_PROXY_PUT ERROR: [87d5f46912c1e0c0] Result# TEvPutResult {Id# [72075186224037913:1:10:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:17.605154Z node 2 :BS_PROXY_PUT ERROR: [a4478fa461870477] Result# TEvPutResult {Id# [72075186224037915:1:10:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:17.796137Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 29221, MsgBus: 28971 2024-11-21T09:22:17.243179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660479458721520:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:17.243322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e00/r3tmp/tmpg0dqMl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29221, node 1 2024-11-21T09:22:17.291044Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:17.295883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:17.295893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:17.295894Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:17.295923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28971 TClient is connected to server localhost:28971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:17.342220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.343858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:17.343878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:17.344965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:17.351121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.412840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.426990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.439717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.509572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660479458723062:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.509622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.514291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.568624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.623073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.677432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.731622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.739857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.747914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660479458723586:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.747939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.747942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660479458723591:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.748474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:17.753175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660479458723593:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteKqpUpsert >> ReadLoad::ShouldReadIterate >> TExternalTableTestReboots::ParallelCreateDrop [GOOD] >> ReadLoad::ShouldReadKqp >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> UpsertLoad::ShouldWriteKqpUpsert2 >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> UpsertLoad::ShouldWriteDataBulkUpsert >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:02.363145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:02.363167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:02.363171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:02.363175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:02.363179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:02.363181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:02.363188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:02.363255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:02.370945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:02.370965Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:02.372678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:02.372750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:02.372785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:02.375174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:02.375251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:02.375321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.375519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:02.376245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.376536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:02.376545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.376556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:02.376563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:02.376569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:02.376614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:02.377732Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:02.389752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:02.389808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.389847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:02.389888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:02.389895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.390468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.390482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:02.390516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.390522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:02.390525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:02.390527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:02.390841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.390848Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:02.390851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:02.391107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.391116Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.391121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.391126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.391532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:02.391930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:02.391975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:02.392121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:02.392144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:02.392149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.392200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:02.392224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:02.392251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:02.392264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:02.392642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:02.392649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:02.392670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:02.392672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:02.392719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:02.392725Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:02.392734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:02.392738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.392742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:02.392747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:02.392751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:02.392754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:02.392764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:02.392769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:02.392773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 69090816 2024-11-21T09:22:18.466547Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T09:22:18.466662Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:18.466683Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 287762810984 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:18.466693Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T09:22:18.466735Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:18.466750Z node 67 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T09:22:18.466776Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:18.466785Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:18.466791Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:18.466976Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.467280Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:22:18.467661Z node 67 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:18.467668Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:18.467694Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:22:18.467712Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:18.467730Z node 67 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:18.467735Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T09:22:18.467740Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T09:22:18.467744Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T09:22:18.467791Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:22:18.467802Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:22:18.467815Z node 67 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:22:18.467819Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:22:18.467825Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T09:22:18.467831Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:22:18.467836Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:22:18.467841Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:22:18.467852Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:22:18.467857Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:18.467862Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2024-11-21T09:22:18.467867Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:22:18.467870Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:22:18.467874Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T09:22:18.467946Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.467957Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.467961Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:18.467966Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T09:22:18.467970Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:18.468032Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:18.468039Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:22:18.468049Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:18.468128Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.468139Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.468143Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:18.468150Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:22:18.468154Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:18.468307Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.468322Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.468327Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:22:18.468331Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:22:18.468335Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:18.468345Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T09:22:18.469090Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.469143Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:18.469194Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:18.469399Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T09:22:18.469457Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:22:18.469465Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:22:18.469579Z node 67 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:22:18.469598Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:22:18.469603Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [67:389:2381] TestWaitNotification: OK eventTxId 1004 2024-11-21T09:22:18.469675Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DropMe" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:18.469702Z node 67 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DropMe" took 36us result status StatusPathDoesNotExist 2024-11-21T09:22:18.469737Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DropMe\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DropMe" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-false [GOOD] >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows |97.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> KqpSysColV1::StreamInnerJoinSelect >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:56.011884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:56.011923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.011928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:56.011933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:56.011955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:56.011959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:56.011969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.012078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:56.025019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:56.025041Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.026767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:56.026868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:56.026891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:56.029407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:56.029455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.030960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.031892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.033416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:56.035172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.035178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:56.035222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:56.036604Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.050215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:56.050721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.050820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:56.050866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:56.050873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:56.051694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:56.051718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:56.051721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:56.052064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:56.052391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.052416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.052841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:56.053171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:56.054117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:56.054294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.054371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:56.054375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.054399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:56.054409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.054775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.054807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:56.054867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054872Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:56.054880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:56.054883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.054887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:56.054890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.054894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:56.054896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:56.054904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:56.054908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:56.054911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... , path id: 1 2024-11-21T09:22:19.493737Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1005, path id: 2 2024-11-21T09:22:19.493743Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72075186233409546 2024-11-21T09:22:19.493747Z node 96 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#1005:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T09:22:19.493751Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1005 ready parts: 1/1 2024-11-21T09:22:19.493765Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409547 message:Transaction { AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1005 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409547 2024-11-21T09:22:19.493859Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.493865Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.493868Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1005 2024-11-21T09:22:19.493871Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1005, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 5 2024-11-21T09:22:19.493874Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T09:22:19.493977Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.493984Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.493986Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1005 2024-11-21T09:22:19.493988Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1005, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 2 2024-11-21T09:22:19.493991Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T09:22:19.493996Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T09:22:19.494405Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1005 msg type: 269090816 2024-11-21T09:22:19.494428Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72075186233409547 2024-11-21T09:22:19.494506Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.494743Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestModificationResults wait txId: 1006 2024-11-21T09:22:19.495060Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "B" } } TxId: 1006 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2024-11-21T09:22:19.495079Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_0/B, operationId: 1006:0, at schemeshard: 72075186233409546 2024-11-21T09:22:19.495092Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72075186233409546, LocalPathId: 1], parent name: MyRoot/USER_0, child name: B, child id: [OwnerId: 72075186233409546, LocalPathId: 3], at schemeshard: 72075186233409546 2024-11-21T09:22:19.495097Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 0 2024-11-21T09:22:19.495103Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1006:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2024-11-21T09:22:19.495128Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T09:22:19.495133Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 1 2024-11-21T09:22:19.495420Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1006, response: Status: StatusAccepted TxId: 1006 SchemeshardId: 72075186233409546 PathId: 3, at schemeshard: 72075186233409546 2024-11-21T09:22:19.495435Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1006, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/B 2024-11-21T09:22:19.495458Z node 96 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T09:22:19.495461Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T09:22:19.495476Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 3] 2024-11-21T09:22:19.495485Z node 96 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T09:22:19.495488Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1006, path id: 1 2024-11-21T09:22:19.495491Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1006, path id: 3 2024-11-21T09:22:19.495521Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72075186233409546 2024-11-21T09:22:19.495525Z node 96 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#1006:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T09:22:19.495529Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2024-11-21T09:22:19.495542Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409547 message:Transaction { AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409547 2024-11-21T09:22:19.495597Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.495603Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.495605Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2024-11-21T09:22:19.495608Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 6 2024-11-21T09:22:19.495610Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2024-11-21T09:22:19.495638Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.495643Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.495645Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2024-11-21T09:22:19.495647Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 2 2024-11-21T09:22:19.495649Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 2 2024-11-21T09:22:19.495653Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T09:22:19.495937Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1006 msg type: 269090816 2024-11-21T09:22:19.495959Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72075186233409547 2024-11-21T09:22:19.496134Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.496145Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestModificationResults wait txId: 1007 2024-11-21T09:22:19.496497Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "C" } } TxId: 1007 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2024-11-21T09:22:19.496518Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_0/C, operationId: 1007:0, at schemeshard: 72075186233409546 2024-11-21T09:22:19.496527Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1007:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, at schemeshard: 72075186233409546 2024-11-21T09:22:19.496804Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1007, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_0/C\', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154" TxId: 1007 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T09:22:19.496817Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1007, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/C TestModificationResult got TxId: 1007, wait until txId: 1007 >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi |97.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true [GOOD] >> TGRpcClientLowTest::SimpleRequest >> YdbTableBulkUpsert::Simple >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> YdbYqlClient::SecurityTokenAuth >> TGRpcNewClient::SimpleYqlQuery >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:56.011888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:56.011921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.011926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:56.011931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:56.012003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:56.012007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:56.012015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.012092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:56.024655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:56.024672Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.026472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:56.026555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:56.026574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:56.029397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:56.029466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.030940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.031832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.033248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:56.035150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.035156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:56.035209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:56.036439Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.051840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:56.051906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:56.052023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:56.052031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:56.052773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:56.052792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:56.052796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:56.053237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.053254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:56.053259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:56.053631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.053644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.053648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.053653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.054093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:56.054439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:56.054475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:56.054602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.054675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:56.054680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.054699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:56.054707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.055084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.055120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:56.055178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055182Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:56.055188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:56.055191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.055194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:56.055197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.055200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:56.055202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:56.055210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:56.055213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:56.055216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... , path id: 1 2024-11-21T09:22:19.418910Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1005, path id: 2 2024-11-21T09:22:19.418916Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72075186233409546 2024-11-21T09:22:19.418920Z node 96 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#1005:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T09:22:19.418925Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1005 ready parts: 1/1 2024-11-21T09:22:19.418940Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409547 message:Transaction { AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1005 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409547 2024-11-21T09:22:19.419038Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.419044Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.419047Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1005 2024-11-21T09:22:19.419050Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1005, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 5 2024-11-21T09:22:19.419052Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T09:22:19.419157Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.419164Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.419166Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1005 2024-11-21T09:22:19.419168Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1005, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 2 2024-11-21T09:22:19.419170Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T09:22:19.419176Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T09:22:19.419513Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1005 msg type: 269090816 2024-11-21T09:22:19.419536Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72075186233409547 2024-11-21T09:22:19.419599Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 2024-11-21T09:22:19.419805Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestModificationResults wait txId: 1006 2024-11-21T09:22:19.420138Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "B" } } TxId: 1006 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2024-11-21T09:22:19.420160Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_0/B, operationId: 1006:0, at schemeshard: 72075186233409546 2024-11-21T09:22:19.420175Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72075186233409546, LocalPathId: 1], parent name: MyRoot/USER_0, child name: B, child id: [OwnerId: 72075186233409546, LocalPathId: 3], at schemeshard: 72075186233409546 2024-11-21T09:22:19.420181Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 0 2024-11-21T09:22:19.420187Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1006:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2024-11-21T09:22:19.420240Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T09:22:19.420247Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 1 2024-11-21T09:22:19.420546Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1006, response: Status: StatusAccepted TxId: 1006 SchemeshardId: 72075186233409546 PathId: 3, at schemeshard: 72075186233409546 2024-11-21T09:22:19.420560Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1006, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/B 2024-11-21T09:22:19.420583Z node 96 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T09:22:19.420586Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T09:22:19.420601Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 3] 2024-11-21T09:22:19.420609Z node 96 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T09:22:19.420612Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1006, path id: 1 2024-11-21T09:22:19.420615Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1006, path id: 3 2024-11-21T09:22:19.420647Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72075186233409546 2024-11-21T09:22:19.420651Z node 96 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#1006:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T09:22:19.420655Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2024-11-21T09:22:19.420667Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409547 message:Transaction { AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409547 2024-11-21T09:22:19.420724Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.420733Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.420735Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2024-11-21T09:22:19.420738Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 6 2024-11-21T09:22:19.420740Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2024-11-21T09:22:19.420770Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.420774Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.420776Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2024-11-21T09:22:19.420778Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 2 2024-11-21T09:22:19.420780Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 2 2024-11-21T09:22:19.420784Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T09:22:19.421086Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1006 msg type: 269090816 2024-11-21T09:22:19.421103Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72075186233409547 2024-11-21T09:22:19.421264Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 2024-11-21T09:22:19.421275Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestModificationResults wait txId: 1007 2024-11-21T09:22:19.421578Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "C" } } TxId: 1007 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2024-11-21T09:22:19.421598Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_0/C, operationId: 1007:0, at schemeshard: 72075186233409546 2024-11-21T09:22:19.421608Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1007:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, at schemeshard: 72075186233409546 2024-11-21T09:22:19.421879Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1007, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_0/C\', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154" TxId: 1007 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T09:22:19.421895Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1007, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/C TestModificationResult got TxId: 1007, wait until txId: 1007 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest >> YdbYqlClient::DiscoveryLocationOverride >> TGRpcYdbTest::CreateTableBadRequest3 Test command err: 2024-11-21T09:22:19.212591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:19.212979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:19.212997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046d5/r3tmp/tmp63e8Pe/pdisk_1.dat 2024-11-21T09:22:19.306448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.322837Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.364982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.365016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.375436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.478275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.685926Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2024-11-21T09:22:19.685955Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T09:22:19.764231Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.078199s, errors=0 2024-11-21T09:22:19.764265Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex >> YdbYqlClient::TestReadTableMultiShardWholeTable >> YdbTableBulkUpsertOlap::UpsertCsvBug >> TTableProfileTests::UseDefaultProfile >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> TGRpcYdbTest::RemoveNotExistedDirectory >> TGRpcYdbTest::MakeListRemoveDirectory >> ReadLoad::ShouldReadKqpMoreThanRows >> TGRpcYdbTest::CreateTableBadRequest3 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 >> UpsertLoad::ShouldDropCreateTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> YdbTableBulkUpsert::Simple [GOOD] Test command err: 2024-11-21T09:22:19.305591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:19.306144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:19.306166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046c7/r3tmp/tmpJAQ9yh/pdisk_1.dat 2024-11-21T09:22:19.412569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.427925Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.469812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.469844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.480275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.583107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.792747Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2024-11-21T09:22:19.792780Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2024-11-21T09:22:19.793239Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} started# 5 actors each with inflight# 4 2024-11-21T09:22:19.793249Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.793256Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.793261Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.793266Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.793270Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.794103Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} session: ydb://session/3?node_id=1&id=NGI1MzM2NTYtYzJhMmFlMzAtZDM4NmE0MTctNGE4YWNiYWU= 2024-11-21T09:22:19.794120Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} session: ydb://session/3?node_id=1&id=MzdlNzI0ZTUtOGMxODlmMjgtNGMyMmQ1My0xMWUyMzdj 2024-11-21T09:22:19.794326Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} session: ydb://session/3?node_id=1&id=ZjZiMjljN2UtYWQ0Njg2MmYtNjYxMWM0NWYtNDBkOWY2Nw== 2024-11-21T09:22:19.794532Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} session: ydb://session/3?node_id=1&id=YzJhZDM0MGEtNDllMzlkOTktZGM5MTEyNTgtMWY2YWRhNDU= 2024-11-21T09:22:19.794757Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} session: ydb://session/3?node_id=1&id=MjBhMTlkMTUtODE0YWNhODQtOGQ5YTczZTAtMjE1MWUyODY= 2024-11-21T09:22:19.795387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:715:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.795407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.795415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.795421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.795428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.795435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.795448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.796645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:19.985309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.985342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.985350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.985357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.985364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:757:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:20.089077Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} finished in 1732180940.089064s, errors=0 2024-11-21T09:22:20.089142Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1732180940089 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:20.144745Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 1732180940.144728s, errors=0 2024-11-21T09:22:20.144834Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1732180940144 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:20.212651Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} finished in 1732180940.212636s, errors=0 2024-11-21T09:22:20.212746Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1732180940212 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:20.246718Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} finished in 1732180940.246703s, errors=0 2024-11-21T09:22:20.246801Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1732180940246 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:20.303105Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} finished in 1732180940.303090s, errors=0 2024-11-21T09:22:20.303177Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1732180940303 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:20.303184Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} finished in 0.509970s, oks# 20, errors# 0 2024-11-21T09:22:20.303198Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2024-11-21T09:22:18.952968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:18.953362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:18.953379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046e4/r3tmp/tmpnjBHQj/pdisk_1.dat 2024-11-21T09:22:19.087478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.103712Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.145697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.145717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.156220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.260909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.476138Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2024-11-21T09:22:19.476162Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2024-11-21T09:22:19.476482Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} started# 5 actors each with inflight# 4 2024-11-21T09:22:19.476488Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.476494Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.476499Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.476502Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.476507Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T09:22:19.477046Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} session: ydb://session/3?node_id=1&id=NjQxNjZmMWMtMThmZTc2Zi04ODg4MDNlZC02MGQ4ZDc5NA== 2024-11-21T09:22:19.477057Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} session: ydb://session/3?node_id=1&id=OGI5YTA5NzUtNzJlNzBhZjctMzI0MzA2ZjgtNzdiM2JkOTA= 2024-11-21T09:22:19.477179Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} session: ydb://session/3?node_id=1&id=MTk2ZGJiZjUtNzg5MTA1ZDctOWRkODdjMjQtNDA1N2Y3NjA= 2024-11-21T09:22:19.477302Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} session: ydb://session/3?node_id=1&id=ZGYxZWNkNmQtMzZkYTdjMmMtNGRiN2QwYTYtYmYzMTMyYzI= 2024-11-21T09:22:19.477479Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} session: ydb://session/3?node_id=1&id=MmZhMTdhYTctYmFmMmYwYmQtNTY5Mjc2NjItZDY2NGExNWM= 2024-11-21T09:22:19.477892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:715:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.477903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.477908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.477912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.477915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.477919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.477927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.479329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:19.665805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.665829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.665834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.665839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.665843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:757:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:19.835845Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 1732180939.835830s, errors=0 2024-11-21T09:22:19.835927Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1732180939835 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:19.892732Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} finished in 1732180939.892714s, errors=0 2024-11-21T09:22:19.892825Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1732180939892 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:19.949448Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} finished in 1732180939.949432s, errors=0 2024-11-21T09:22:19.949528Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1732180939949 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:20.018634Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} finished in 1732180940.018620s, errors=0 2024-11-21T09:22:20.018720Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1732180940018 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:20.029959Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} finished in 1732180940.029946s, errors=0 2024-11-21T09:22:20.030009Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1732180940029 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:20.030015Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} finished in 0.553548s, oks# 20, errors# 0 2024-11-21T09:22:20.030029Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 >> YdbYqlClient::QueryLimits >> TGRpcYdbTest::CreateTableWithIndex >> TGRpcClientLowTest::SimpleRequestDummyService >> TGRpcNewClient::TestAuth >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied >> YdbTableBulkUpsert::Types >> YdbYqlClient::SecurityTokenAuth [GOOD] >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TTableProfileTests::UseDefaultProfile [GOOD] >> TGRpcYdbTest::GetOperationBadRequest >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> YdbMonitoring::SelfCheck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2024-11-21T09:22:19.294349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:19.294701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:19.294716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046cc/r3tmp/tmp0vYST1/pdisk_1.dat 2024-11-21T09:22:19.386707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.403025Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.445033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.445067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.455502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.558922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.768120Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2024-11-21T09:22:19.768157Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T09:22:19.834481Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.066272s, errors=0 2024-11-21T09:22:19.834524Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T09:22:20.299221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:20.299251Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:20.299271Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046cc/r3tmp/tmp3hjbHS/pdisk_1.dat 2024-11-21T09:22:20.372764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.386861Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:20.428560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.428586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.438958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.542094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.747134Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2024-11-21T09:22:20.747163Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2024-11-21T09:22:20.814503Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.067295s, errors=0 2024-11-21T09:22:20.814538Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2024-11-21T09:22:19.262689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:19.263146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:19.263174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046cf/r3tmp/tmpUmiMl1/pdisk_1.dat 2024-11-21T09:22:19.357124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.373399Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.415533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.415562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.425987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.529626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.740262Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T09:22:19.740294Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T09:22:19.802391Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.062051s, errors=0 2024-11-21T09:22:19.802421Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T09:22:20.242523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:20.242555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:20.242573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046cf/r3tmp/tmpw6S08D/pdisk_1.dat 2024-11-21T09:22:20.317655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.331307Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:20.373132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.373157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.383702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.486944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.692019Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T09:22:20.692057Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2024-11-21T09:22:20.755021Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.062908s, errors=0 2024-11-21T09:22:20.755056Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 |97.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK >> YdbTableBulkUpsertOlap::UpsertCSV >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> TTableProfileTests::UseTableProfilePreset >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcYdbTest::SdkUuid >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> YdbYqlClient::QueryLimits [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied [GOOD] >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> YdbTableBulkUpsert::Types [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> TGRpcNewClient::YqlQueryWithParams >> TGRpcYdbTest::CreateAlterCopyAndDropTable >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> TGRpcYdbTest::CreateYqlSession >> YdbYqlClient::QueryStats >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> TGRpcYdbTest::ExplainQuery >> YdbTableBulkUpsert::SyncIndexShouldSucceed >> YdbMonitoring::SelfCheck [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore >> YdbMonitoring::SelfCheckWithNodesDying >> TGRpcYdbTest::SdkUuid [GOOD] >> YdbS3Internal::TestS3Listing >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::ReadTable >> TGRpcYdbTest::CreateDeleteYqlSession >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> YdbYqlClient::TestMultipleModifications >> YdbTableBulkUpsert::NotNulls >> TTableProfileTests::OverwriteCompactionPolicy >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts |97.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2024-11-21T09:22:20.446384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:20.446773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:20.446791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046b0/r3tmp/tmp9gb7DN/pdisk_1.dat 2024-11-21T09:22:20.541660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.557389Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:20.599607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.599648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.610238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.713971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.930553Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2024-11-21T09:22:20.930595Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2024-11-21T09:22:20.931021Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} started# 5 actors each with inflight# 4 2024-11-21T09:22:20.931033Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T09:22:20.931042Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T09:22:20.931047Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T09:22:20.931051Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T09:22:20.931056Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T09:22:20.931917Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} session: ydb://session/3?node_id=1&id=MTAzNTAyNWQtNWFmMWYwZDMtMThmMWUwMWQtZTMzZDM5ZA== 2024-11-21T09:22:20.931933Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} session: ydb://session/3?node_id=1&id=Y2VjN2QzMzMtZDA1ZDhlMjgtZTE4YmZmM2EtYTA2NGE2NDg= 2024-11-21T09:22:20.932068Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} session: ydb://session/3?node_id=1&id=YWJlZDFlN2QtZjhkODRiOGItMmEwMTFhNWQtMTQxYjZkMjU= 2024-11-21T09:22:20.932188Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} session: ydb://session/3?node_id=1&id=NGNiMTAyYTktYzE3MzJmOGEtN2IyNGZjODUtYWRjN2M2ZmY= 2024-11-21T09:22:20.932354Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} session: ydb://session/3?node_id=1&id=YTg2MmJiNDctNzBiNThiNjQtMzI1MjM3MGMtMzIzZjdmODA= 2024-11-21T09:22:20.932789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:715:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.932801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.932806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.932810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.932813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.932817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.932825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.933598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:21.121264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.121294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.121300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.121304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.121309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:757:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.229274Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} finished in 1732180941.229257s, errors=0 2024-11-21T09:22:21.229357Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1732180941229 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:21.286311Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} finished in 1732180941.286294s, errors=0 2024-11-21T09:22:21.286389Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1732180941286 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:21.356403Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} finished in 1732180941.356389s, errors=0 2024-11-21T09:22:21.356500Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1732180941356 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:21.356506Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} finished in 1732180941.356505s, errors=0 2024-11-21T09:22:21.356515Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1732180941356 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:21.412589Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 1732180941.412577s, errors=0 2024-11-21T09:22:21.412652Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1732180941412 OperationsOK: 4 OperationsError: 0 } 2024-11-21T09:22:21.412658Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} finished in 0.481671s, oks# 20, errors# 0 2024-11-21T09:22:21.412690Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2024-11-21T09:20:57.904245Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T09:20:57.904274Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T09:20:57.904303Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T09:20:57.904309Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T09:20:57.904316Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:33:2065] 2024-11-21T09:20:57.904320Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2024-11-21T09:20:57.904349Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:33:2065] 2024-11-21T09:20:57.904353Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2024-11-21T09:20:57.904388Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T09:20:57.904445Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:39:2067] 2024-11-21T09:20:57.904451Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant 2024-11-21T09:20:57.904486Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:39:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:20:57.904508Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:40:2067] 2024-11-21T09:20:57.904512Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/tenant 2024-11-21T09:20:57.904517Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:40:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:20:57.904535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2067] 2024-11-21T09:20:57.904538Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/tenant 2024-11-21T09:20:57.904543Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:41:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:20:57.904555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2024-11-21T09:20:57.904564Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:39:2067] 2024-11-21T09:20:57.904569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2024-11-21T09:20:57.904574Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:40:2067] 2024-11-21T09:20:57.904578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2024-11-21T09:20:57.904583Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2067] 2024-11-21T09:20:57.904594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:36:2067] 2024-11-21T09:20:57.904610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:37:2067] 2024-11-21T09:20:57.904620Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/tenant] Set up state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:57.904626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2067] 2024-11-21T09:20:57.904633Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2067][/root/tenant] Ignore empty state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2024-11-21T09:20:57.904688Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:32:2064], cookie# 0, event size# 103 2024-11-21T09:20:57.904696Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T09:20:57.904717Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T09:20:57.904779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2024-11-21T09:20:57.904788Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:39:2067] 2024-11-21T09:20:57.904795Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:36:2067] 2024-11-21T09:20:57.904803Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/tenant] Update to strong state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2024-11-21T09:20:58.304442Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:32:2064] 2024-11-21T09:20:58.304460Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T09:20:58.304473Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:33:2065] 2024-11-21T09:20:58.304476Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2024-11-21T09:20:58.304484Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:32:2064] 2024-11-21T09:20:58.304488Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T09:20:58.304513Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:33:2065] 2024-11-21T09:20:58.304517Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2024-11-21T09:20:58.304528Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T09:20:58.304578Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:39:2067] 2024-11-21T09:20:58.304583Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Upsert description: path# /root/tenant 2024-11-21T09:20:58.304606Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Subscribe: subscriber# [3:39:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:20:58.304621Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:40:2067] 2024-11-21T09:20:58.304624Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/tenant 2024-11-21T09:20:58.304629Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:40:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:20:58.304643Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2067] 2024-11-21T09:20:58.304645Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Upsert description: path# /root/tenant 2024-11-21T09:20:58.304649Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Subscribe: subscriber# [3:41:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:20:58.304662Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2024-11-21T09:20:58.304670Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:39:2067] 2024-11-21T09:20:58.304676Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2024-11-21T09:20:58.304680Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:40:2067] 2024-11-21T09:20:58.304685Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:41:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:9:2056] 2024-11-21T09:20:58.304689Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2067] 2024-11-21T09:20:58.304697Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:36:2067] 2024-11-21T09:20:58.304713Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:37:2067] 2024-11-21T09:20:58.304722Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:35:2067][/root/tenant] Set up state: owner# [3:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:20:58.304729Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/ ... omainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2024-11-21T09:22:19.279786Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:32:2064] 2024-11-21T09:22:19.279813Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Successful handshake: owner# 910, generation# 1 2024-11-21T09:22:19.279833Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:32:2064] 2024-11-21T09:22:19.279838Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Commit generation: owner# 910, generation# 1 2024-11-21T09:22:19.279845Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:33:2065] 2024-11-21T09:22:19.279849Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Successful handshake: owner# 910, generation# 1 2024-11-21T09:22:19.279877Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:33:2065] 2024-11-21T09:22:19.279881Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Commit generation: owner# 910, generation# 1 2024-11-21T09:22:19.279896Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T09:22:19.279955Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:39:2067] 2024-11-21T09:22:19.279962Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T09:22:19.279985Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Subscribe: subscriber# [397:39:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:22:19.280008Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:40:2067] 2024-11-21T09:22:19.280012Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T09:22:19.280017Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Subscribe: subscriber# [397:40:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:22:19.280031Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2067] 2024-11-21T09:22:19.280034Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T09:22:19.280039Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Subscribe: subscriber# [397:41:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:22:19.280052Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:39:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2024-11-21T09:22:19.280062Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:39:2067] 2024-11-21T09:22:19.280069Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:40:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2024-11-21T09:22:19.280074Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:40:2067] 2024-11-21T09:22:19.280082Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:41:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2024-11-21T09:22:19.280088Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2067] 2024-11-21T09:22:19.280099Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:36:2067] 2024-11-21T09:22:19.280117Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:37:2067] 2024-11-21T09:22:19.280128Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][397:35:2067][/Root/Tenant/table_inside] Set up state: owner# [397:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:22:19.280136Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2067] 2024-11-21T09:22:19.280144Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: [main][397:35:2067][/Root/Tenant/table_inside] Ignore empty state: owner# [397:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2024-11-21T09:22:19.686548Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:32:2064] 2024-11-21T09:22:19.686573Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Successful handshake: owner# 910, generation# 1 2024-11-21T09:22:19.686590Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:32:2064] 2024-11-21T09:22:19.686594Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Commit generation: owner# 910, generation# 1 2024-11-21T09:22:19.686598Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:33:2065] 2024-11-21T09:22:19.686600Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Successful handshake: owner# 910, generation# 1 2024-11-21T09:22:19.686620Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:33:2065] 2024-11-21T09:22:19.686622Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Commit generation: owner# 910, generation# 1 2024-11-21T09:22:19.686631Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T09:22:19.686671Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:39:2067] 2024-11-21T09:22:19.686675Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T09:22:19.686692Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Subscribe: subscriber# [399:39:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:22:19.686705Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:40:2067] 2024-11-21T09:22:19.686707Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T09:22:19.686710Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Subscribe: subscriber# [399:40:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:22:19.686719Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2067] 2024-11-21T09:22:19.686721Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T09:22:19.686723Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Subscribe: subscriber# [399:41:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T09:22:19.686732Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:39:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2024-11-21T09:22:19.686738Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:39:2067] 2024-11-21T09:22:19.686743Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:40:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2024-11-21T09:22:19.686746Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:40:2067] 2024-11-21T09:22:19.686752Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:41:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2024-11-21T09:22:19.686755Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2067] 2024-11-21T09:22:19.686762Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:36:2067] 2024-11-21T09:22:19.686775Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:37:2067] 2024-11-21T09:22:19.686781Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][399:35:2067][/Root/Tenant/table_inside] Set up state: owner# [399:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T09:22:19.686788Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2067] 2024-11-21T09:22:19.686793Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: [main][399:35:2067][/Root/Tenant/table_inside] Ignore empty state: owner# [399:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2024-11-21T09:22:19.223374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:19.223961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:19.224003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046df/r3tmp/tmpktgN3d/pdisk_1.dat 2024-11-21T09:22:19.320937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.336276Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.378113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.378138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.388511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.491420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.699114Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2024-11-21T09:22:19.699140Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T09:22:19.771927Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.072745s, errors=0 2024-11-21T09:22:19.771953Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T09:22:20.231135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:20.231179Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:20.231212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046df/r3tmp/tmpzacOSc/pdisk_1.dat 2024-11-21T09:22:20.304733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.318018Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:20.359919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.359953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.370461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.473592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.677509Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2024-11-21T09:22:20.677535Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T09:22:20.739732Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.062151s, errors=0 2024-11-21T09:22:20.739761Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 63410, MsgBus: 7928 2024-11-21T09:22:19.934771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660487253573565:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:19.934791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001df1/r3tmp/tmpS9OGNg/pdisk_1.dat 2024-11-21T09:22:19.992582Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63410, node 1 2024-11-21T09:22:20.003471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.003486Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.003488Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.003524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7928 2024-11-21T09:22:20.035367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.035394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.036571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.068976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:20.072351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:20.134200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:20.150414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:20.163005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:20.186179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660491548542405:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.186201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.206761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.261483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.267004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.273478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.280788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.288575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.296163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660491548542901:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.296182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660491548542906:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.296183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.296672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:20.301054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660491548542908:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:20.515367Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180940560, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2024-11-21T09:22:18.972467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:18.972835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:18.972851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046f5/r3tmp/tmp6D0WEs/pdisk_1.dat 2024-11-21T09:22:19.087322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.103606Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.145654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.145686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.156221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.268719Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2024-11-21T09:22:19.370946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:611:2519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.370976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.398684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.615135Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2024-11-21T09:22:19.615380Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2024-11-21T09:22:19.636336Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 1} TUpsertActor finished in 0.020904s, errors=0 2024-11-21T09:22:19.636483Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T09:22:19.636516Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2024-11-21T09:22:19.688380Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 3} TUpsertActor finished in 0.051783s, errors=0 2024-11-21T09:22:19.688420Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:718:2595] with tag# 3 2024-11-21T09:22:20.136614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:20.136646Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:20.136663Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046f5/r3tmp/tmpXYY8LE/pdisk_1.dat 2024-11-21T09:22:20.210919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.224794Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:20.266554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.266586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.277046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.379949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.584041Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2024-11-21T09:22:20.584066Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2024-11-21T09:22:20.962123Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.378005s, errors=0 2024-11-21T09:22:20.962176Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 2024-11-21T09:22:20.963484Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2024-11-21T09:22:20.965956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:742:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:20.965984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.136825Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2024-11-21T09:22:21.138444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:808:2672], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.138480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.139872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.173351Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T09:22:21.351985Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2024-11-21T09:22:21.352043Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2024-11-21T09:22:21.362555Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 1} TUpsertActor finished in 0.010463s, errors=0 2024-11-21T09:22:21.362649Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T09:22:21.362666Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2024-11-21T09:22:21.414632Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 3} TUpsertActor finished in 0.051899s, errors=0 2024-11-21T09:22:21.414684Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:901:2746] with tag# 3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2024-11-21T09:22:18.952948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:18.953344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:18.953362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046fb/r3tmp/tmp3RpqCv/pdisk_1.dat 2024-11-21T09:22:19.086349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.103752Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.145657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.145687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.156220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.260905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.476682Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2024-11-21T09:22:19.476893Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2024-11-21T09:22:19.503630Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor finished in 0.026692s, errors=0 2024-11-21T09:22:19.503757Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2024-11-21T09:22:19.503773Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:710:2594] with id# {Tag: 0, parent: [1:701:2585], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2024-11-21T09:22:19.504014Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2024-11-21T09:22:19.504906Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:713:2597] 2024-11-21T09:22:19.504944Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Bootstrap called, sample# 0 2024-11-21T09:22:19.504950Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Connect to# 72075186224037888 called 2024-11-21T09:22:19.505096Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:19.506073Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} finished in 0.000968s, read# 1000 2024-11-21T09:22:19.506151Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:713:2597] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 1000 OperationsError: 0 } 2024-11-21T09:22:19.506165Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:716:2600] 2024-11-21T09:22:19.506170Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 2} Bootstrap called, sample# 0 2024-11-21T09:22:19.506172Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 2} Connect to# 72075186224037888 called 2024-11-21T09:22:19.506203Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 2} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:19.514305Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 2} finished in 0.008093s, read# 1000 2024-11-21T09:22:19.514335Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:716:2600] with chunkSize# 1 finished: 0 { DurationMs: 8 OperationsOK: 1000 OperationsError: 0 } 2024-11-21T09:22:19.514348Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:719:2603] 2024-11-21T09:22:19.514352Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 3} Bootstrap called, sample# 0 2024-11-21T09:22:19.514355Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 3} Connect to# 72075186224037888 called 2024-11-21T09:22:19.514389Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 3} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:19.515699Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 3} finished in 0.001305s, read# 1000 2024-11-21T09:22:19.515718Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:719:2603] with chunkSize# 10 finished: 0 { DurationMs: 1 OperationsOK: 1000 OperationsError: 0 } 2024-11-21T09:22:19.515727Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:722:2606] 2024-11-21T09:22:19.515731Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 4} Bootstrap called, sample# 1000 2024-11-21T09:22:19.515734Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 4} Connect to# 72075186224037888 called 2024-11-21T09:22:19.515760Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 4} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:19.516138Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 4} finished in 0.000295s, sampled# 1000, iter finished# 1, oks# 1000 2024-11-21T09:22:19.516155Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} received keyCount# 1000 2024-11-21T09:22:19.516186Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} started read actor with id# [1:725:2609] 2024-11-21T09:22:19.516192Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:710:2594], subTag: 5} Bootstrap called, will read keys# 1000 2024-11-21T09:22:19.530396Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} received point times# 1000, Inflight left# 0 2024-11-21T09:22:19.530449Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 14 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" } 2024-11-21T09:22:19.530465Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} finished in 0.026677s with report: { DurationMs: 0 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 8 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 1 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 14 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2024-11-21T09:22:19.530496Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:710:2594] with tag# 3 2024-11-21T09:22:19.905614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:19.905647Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:19.905665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046fb/r3tmp/tmpbhljfK/pdisk_1.dat 2024-11-21T09:22:19.981420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.994883Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:20.036906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.036936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.047440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.151082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.357766Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2024-11-21T09:22:20.357815Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2024-11-21T09:22:20.378503Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 1} TUpsertActor finished in 0.020639s, errors=0 2024-11-21T09:22:20.378640Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2024-11-21T09:22:20.378662Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:709:2593] with id# {Tag: 0, parent: [2:700:2584], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2024-11-21T09:22:20.378984Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2024-11-21T09:22:20.379009Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:712:2596] 2024-11-21T09:22:20.379021Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 1} Bootstrap called, sample# 0 2024-11-21T09:22:20.379026Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 1} Connect to# 72075186224037888 called 2024-11-21T09:22:20.379084Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 1} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:20.379224Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 1} finished in 0.000132s, read# 10 2024-11-21T09:22:20.379253Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:712:2596] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2024-11-21T09:22:20.379266Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:715:2599] 2024-11-21T09:22:20.379271Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 2} Bootstrap called, sample# 0 2024-11-21T09:22:20.379275Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 2} Connect to# 72075186224037888 called 2024-11-21T09:22:20.379313Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 2} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:20.379506Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 2} finished in 0.000188s, read# 10 2024-11-21T09:22:20.379522Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:715:2599] with chunkSize# 1 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2024-11-21T09:22:20.379532Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:718:2602] 2024-11-21T09:22:20.379537Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 3} Bootstrap called, sample# 0 2024-11-21T09:22:20.379541Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 3} Connect to# 72075186224037888 called 2024-11-21T09:22:20.379577Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 3} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:20.379642Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 3} finished in 0.000061s, read# 10 2024-11-21T09:22:20.379654Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:718:2602] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2024-11-21T09:22:20.379666Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:721:2605] 2024-11-21T09:22:20.379671Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 4} Bootstrap called, sample# 10 2024-11-21T09:22:20.379675Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 4} Connect to# 72075186224037888 called 2024-11-21T09:22:20.379706Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 4} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:20.379756Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 4} finished in 0.000035s, sampled# 10, iter finished# 1, oks# 10 2024-11-21T09:22:20.379768Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} received keyCount# 10 2024-11-21T09:22:20.379805Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} started read actor with id# [2:724:2608] 2024-11-21T09:22:20.379811Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:709:2593], subTag: 5} Bootstrap called, will read keys# 10 2024-11-21T09:22:20.401350Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} received point times# 1000, Inflight left# 0 2024-11-21T09:22:20.401428Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 21 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" } 2024-11-21T09:22:20.401456Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} finished in 0.022769s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 21 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2024-11-21T09:22:20.401477Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:709:2593] with tag# 3 >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2024-11-21T09:22:19.193459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:19.193840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:19.193864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0046da/r3tmp/tmpAISfSX/pdisk_1.dat 2024-11-21T09:22:19.287706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.304745Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:19.347108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:19.347144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:19.357594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:19.460567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:19.667797Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2024-11-21T09:22:19.667975Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2024-11-21T09:22:19.689054Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor finished in 0.021045s, errors=0 2024-11-21T09:22:19.689123Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2024-11-21T09:22:19.689134Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2024-11-21T09:22:19.689320Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2024-11-21T09:22:19.689341Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} started fullscan actor# [1:713:2597] 2024-11-21T09:22:19.689347Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Bootstrap called, sample# 100 2024-11-21T09:22:19.689349Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Connect to# 72075186224037888 called 2024-11-21T09:22:19.689474Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Handle TEvClientConnected called, Status# OK 2024-11-21T09:22:19.689622Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} finished in 0.000135s, sampled# 100, iter finished# 1, oks# 100 2024-11-21T09:22:19.689636Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} received keyCount# 100 2024-11-21T09:22:19.689656Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} started# 10 actors each with inflight# 1 2024-11-21T09:22:19.689661Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} Bootstrap called 2024-11-21T09:22:19.689664Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689670Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} Bootstrap called 2024-11-21T09:22:19.689672Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689675Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} Bootstrap called 2024-11-21T09:22:19.689677Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689679Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} Bootstrap called 2024-11-21T09:22:19.689681Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689684Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} Bootstrap called 2024-11-21T09:22:19.689686Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689689Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} Bootstrap called 2024-11-21T09:22:19.689693Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689696Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} Bootstrap called 2024-11-21T09:22:19.689698Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689701Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} Bootstrap called 2024-11-21T09:22:19.689703Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689706Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} Bootstrap called 2024-11-21T09:22:19.689708Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.689710Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} Bootstrap called 2024-11-21T09:22:19.689712Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T09:22:19.690141Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} session: ydb://session/3?node_id=1&id=YjMwN2FkYjAtNTg1OTA2NWYtZTYwYzliNS0zYjFiOWNhMw== 2024-11-21T09:22:19.690218Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} session: ydb://session/3?node_id=1&id=YWRlZGIyYWEtOGMzZWJiZjctZTZjZGNhMTMtZjY4NzczMjI= 2024-11-21T09:22:19.690384Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} session: ydb://session/3?node_id=1&id=NzUwOWRkZjItM2U5YzY2YWYtYzFjNTM3NWUtNjFiYmI3MWI= 2024-11-21T09:22:19.690511Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} session: ydb://session/3?node_id=1&id=ZWE3YmY0MjEtYWRkNWU1ZC1hZTFmZDczNy1lNzg3OWQ0ZA== 2024-11-21T09:22:19.690621Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} session: ydb://session/3?node_id=1&id=MTcwMWVmNzItYTBmY2Y3MzktNDEyYTFlZS0zNWFjNjEzYQ== 2024-11-21T09:22:19.690737Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} session: ydb://session/3?node_id=1&id=YTBmNmQ5NDItYTNhMTg5LTk3MTAwMWM5LWNhZTUyZjUz 2024-11-21T09:22:19.690930Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} session: ydb://session/3?node_id=1&id=NzRjNDE4MGYtYTQ2MTk3M2MtMzM5NjNjYTEtNDVkOTFhMjI= 2024-11-21T09:22:19.690937Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} session: ydb://session/3?node_id=1&id=NWVmOTUxODItY2NjMzY2NjctNDE0OWEwZjAtMmJmNWNlMDE= 2024-11-21T09:22:19.691130Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} session: ydb://session/3?node_id=1&id=Y2ExZTM3MzctMzJkOTA3ZDgtOTk5M2U5OWYtOWRhN2QwNzI= 2024-11-21T09:22:19.691137Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} session: ydb://session/3?node_id=1&id=MzA1NjIwYzYtYTY3MzA5YzEtNjIzY2U1NTAtZWZmMmU5OTg= 2024-11-21T09:22:19.691717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:738:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.691732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:774:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.691738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:775:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.691742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:776:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.691746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.691749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.691754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.691758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:19.691769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: R ... ZGYzMDU5ZC00ZTQ1ZTY2ZS1mOTQzNTM0Yi04MWNkNzBiMA== 2024-11-21T09:22:21.787024Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 3} session: ydb://session/3?node_id=2&id=YjdhZmIwZTEtOTE1Yjg5MDItNzVjMzM3N2YtMTQ4NWYyOA== 2024-11-21T09:22:21.787375Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 4} session: ydb://session/3?node_id=2&id=NWRlYTdhNTktMmExMjg0N2UtZjY5M2ExY2EtYWRhZjI2NTA= 2024-11-21T09:22:21.787394Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 5} session: ydb://session/3?node_id=2&id=YjkzNjFhMGUtNmM3MmIyNjItMTQ4NDgxMjUtMjJhYzVjMDU= 2024-11-21T09:22:21.787630Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 6} session: ydb://session/3?node_id=2&id=YTlhZGI1MzItOTMyMjMyODEtMzRlZTU2NWItZjhiM2QxOWY= 2024-11-21T09:22:21.787639Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 7} session: ydb://session/3?node_id=2&id=Y2YzZTBiNTgtZGIyZTAyN2ItMzNiNDhkZWQtMjQ5ZmYwYzk= 2024-11-21T09:22:21.787747Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 8} session: ydb://session/3?node_id=2&id=NzBlYjJhNGMtYmM1ZDIwZmQtZTAzY2UzNzQtZDg2MDZkYzM= 2024-11-21T09:22:21.787875Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 9} session: ydb://session/3?node_id=2&id=ODhkNWI5OWMtYWVmMzE5ZDMtYmUzNjRmZDQtNDE1ZTA3Yzg= 2024-11-21T09:22:21.788077Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 10} session: ydb://session/3?node_id=2&id=OGIwMmI2NWQtYTU2MGMzN2EtODUwZDJjZmQtYzIyY2Q0YTM= 2024-11-21T09:22:21.788085Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 11} session: ydb://session/3?node_id=2&id=Y2JmMDk0ZTYtY2JlYTEzZTctODUzNGM5NGYtN2YwM2VkNWY= 2024-11-21T09:22:21.788740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:736:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.788757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:763:2641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.788763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:764:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.788769Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:765:2643], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.788773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:766:2644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.788778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:770:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.788782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:772:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.788818Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:774:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.788950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2668], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.789027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:795:2673], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.789046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.789114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:804:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:21.789808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:21.958107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:784:2662], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:785:2663], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:786:2664], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:787:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:788:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:789:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:794:2672], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:803:2681], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:21.958217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:822:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:22.052063Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 8} finished in 0.264299s, errors=0 2024-11-21T09:22:22.052127Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 8 { Tag: 8 DurationMs: 264 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.114682Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 2} finished in 0.327683s, errors=0 2024-11-21T09:22:22.114756Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 2 { Tag: 2 DurationMs: 327 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.191530Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 6} finished in 0.403882s, errors=0 2024-11-21T09:22:22.191595Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 6 { Tag: 6 DurationMs: 403 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.271709Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 10} finished in 0.483604s, errors=0 2024-11-21T09:22:22.271769Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 10 { Tag: 10 DurationMs: 483 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.374673Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 11} finished in 0.586572s, errors=0 2024-11-21T09:22:22.374758Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 11 { Tag: 11 DurationMs: 586 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.485262Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 4} finished in 0.697865s, errors=0 2024-11-21T09:22:22.485343Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 4 { Tag: 4 DurationMs: 697 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.598520Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 9} finished in 0.810628s, errors=0 2024-11-21T09:22:22.598591Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 9 { Tag: 9 DurationMs: 810 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.715353Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 3} finished in 0.928314s, errors=0 2024-11-21T09:22:22.715428Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 3 { Tag: 3 DurationMs: 928 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.850171Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 5} finished in 1.062759s, errors=0 2024-11-21T09:22:22.850251Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 5 { Tag: 5 DurationMs: 1062 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.983786Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 7} finished in 1.196132s, errors=0 2024-11-21T09:22:22.983877Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 7 { Tag: 7 DurationMs: 1196 OperationsOK: 100 OperationsError: 0 } 2024-11-21T09:22:22.983886Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished in 1.197537s, oks# 1000, errors# 0 2024-11-21T09:22:22.983942Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:709:2593] with tag# 3 |97.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery |97.0%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies >> TGRpcClientLowTest::MultipleSimpleRequests |97.0%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables >> TGRpcYdbTest::ExplainQuery [GOOD] >> TGRpcYdbTest::OperationCancelAfter >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Timeout >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] >> TGRpcYdbTest::BeginTxRequestError >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TTableProfileTests::DescribeTableOptions >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::ReadTablePg >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestReadTableMultiShard >> YdbYqlClient::TestDecimal >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors >> YdbYqlClient::BuildInfo >> TGRpcNewCoordinationClient::SessionMethods >> ClientStatsCollector::PrepareQuery >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate >> YdbYqlClient::ConnectDbAclIsStrictlyChecked >> YdbOlapStore::ManyTables >> TDatabaseQuotas::DisableWritesToDatabase >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TGRpcYdbTest::KeepAlive >> YdbTableBulkUpsert::Timeout [GOOD] >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcNewCoordinationClient::BasicMethods >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] >> TGRpcYdbTest::BeginTxRequestError [GOOD] >> YdbYqlClient::RenameTables [GOOD] >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::Timeout [GOOD] Test command err: 2024-11-21T09:22:20.706928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660489977025165:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.706947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004170/r3tmp/tmpVMIFpD/pdisk_1.dat 2024-11-21T09:22:20.776307Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21438, node 1 2024-11-21T09:22:20.808356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.808380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.810988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.828698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.828710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.828712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.828740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.904509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.905606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.906298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.906339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.906345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:20.906783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.906827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.906830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:20.907295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.908398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940952, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.908412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:20.908484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:20.909002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.909036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.909045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:20.909054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:20.909060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:20.909070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:20.909429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:20.909440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:20.909442Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.909454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:21.010135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.010377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:21.010896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.010907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.011667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2024-11-21T09:22:21.011732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.011777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.011797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:21.012011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:21.012038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.012041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.012045Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:21.012078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.012088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.012090Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:21.014349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.014748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 2814749 ... roposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037905 CpuTimeUsec: 146 } } 2024-11-21T09:22:24.176330Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 185 } } 2024-11-21T09:22:24.176355Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037901 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037901 CpuTimeUsec: 134 } } 2024-11-21T09:22:24.176378Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 131 } } 2024-11-21T09:22:24.176401Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037891 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037891 CpuTimeUsec: 169 } } 2024-11-21T09:22:24.176424Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037919 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037919 CpuTimeUsec: 162 } } 2024-11-21T09:22:24.176448Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037899 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037899 CpuTimeUsec: 160 } } 2024-11-21T09:22:24.176471Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037911 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037911 CpuTimeUsec: 143 } } 2024-11-21T09:22:24.176490Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037908 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037908 CpuTimeUsec: 157 } } 2024-11-21T09:22:24.176967Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177013Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177033Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177053Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177087Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037910 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037910 CpuTimeUsec: 123 } } 2024-11-21T09:22:24.177128Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177145Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177159Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177175Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177191Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177208Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177224Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177243Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177260Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177276Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177300Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177328Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037916 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037916 CpuTimeUsec: 114 } } 2024-11-21T09:22:24.177342Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177358Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177374Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177391Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177407Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177423Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177434Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177457Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177473Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177487Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177500Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177511Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177522Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177548Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037918 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037918 CpuTimeUsec: 133 } } 2024-11-21T09:22:24.177716Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.177755Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.178201Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.178541Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037903 Status: COMPLETE TxId: 281474976715658 Step: 1732180944214 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037903 CpuTimeUsec: 139 } } 2024-11-21T09:22:24.178848Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:24.178860Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.178874Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:24.179427Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:24.179446Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:24.179468Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 1 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 2 usec
: Error: Deadline exceeded 4 usec
: Error: Deadline exceeded 8 usec
: Error: Deadline exceeded 16 usec
: Error: Deadline exceeded 32 usec
: Error: Deadline exceeded 64 usec
: Error: Deadline exceeded 128 usec
: Error: Deadline exceeded 256 usec
: Error: Deadline exceeded 512 usec
: Error: Deadline exceeded 1024 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 2048 usec >> TGRpcYdbTest::ReadTablePg [GOOD] >> TGRpcYdbTest::OperationTimeout >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbYqlClient::AlterTableAddIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:143:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:143:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:142:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:144:2166] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:148:2057] recipient: [12:144:2166] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:147:2167] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:217:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:147:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:150:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:151:2057] recipient: [13:149:2171] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:153:2057] recipient: [13:149:2171] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:152:2172] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:222:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:147:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:150:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:151:2057] recipient: [14:149:2171] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:153:2057] recipient: [14:149:2171] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:152:2172] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:222:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:148:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:151:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:152:2057] recipient: [15:150:2171] Leader for TabletID 72057594037927937 is [15:153:2172] sender: [15:154:2057] recipient: [15:150:2171] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:153:2172] Leader for TabletID 72057594037927937 is [15:153:2172] sender: [15:223:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:153:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:156:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:157:2057] recipient: [16:155:2176] Leader for TabletID 72057594037927937 is [16:158:2177] sender: [16:159:2057] recipient: [16:155:2176] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:158:2177] Leader for TabletID 72057594037927937 is [16:158:2177] sender: [16:228:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:153:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:156:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:157:2057] recipient: [17:155:2176] Leader for TabletID 72057594037927937 is [17:158:2177] sender: [17:159:2057] recipient: [17:155:2176] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:158:2177] Leader for TabletID 72057594037927937 is [17:158:2177] sender: [17:228:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:154:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:157:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:158:2057] recipient: [18:156:2176] Leader for TabletID 72057594037927937 is [18:159:2177] sender: [18:160:2057] recipient: [18:156:2176] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:159:2177] Leader for TabletID 72057594037927937 is [18:159:2177] sender: [18:229:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbS3Internal::BadRequests >> YdbYqlClient::TestReadTableMultiShard [GOOD] >> YdbYqlClient::TestReadTableMultiShardUseSnapshot >> TTableProfileTests::DescribeTableOptions [GOOD] >> KqpSystemView::QueryStatsSimple [GOOD] >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestDecimal1 >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] Test command err: 2024-11-21T09:22:20.706519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660491018906934:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.706554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004176/r3tmp/tmp1J3ePt/pdisk_1.dat 2024-11-21T09:22:20.775793Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23523, node 1 2024-11-21T09:22:20.805281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.805305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.806693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.828415Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.828430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.828433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.828465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.904570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.905681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.908538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.908586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.908588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:20.909051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.909063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T09:22:20.909499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.909866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.910815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940959, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.910829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:20.910924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:20.911391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.911438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.911452Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:20.911466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:20.911477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:20.911487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:20.911822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:20.911835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:20.911837Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.911848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:21.548429Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660494912751400:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.548595Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004176/r3tmp/tmpsAusxL/pdisk_1.dat 2024-11-21T09:22:21.558817Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65184, node 4 2024-11-21T09:22:21.576128Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.576143Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.576145Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.576220Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:21.648525Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.648562Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.650152Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:21.650521Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.650626Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.650635Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.650976Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:21.651017Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:21.651026Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:21.651282Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.651290Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:21.651516Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:21.651580Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.652368Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941701, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.652382Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:21.652459Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:21.652956Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.653005Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.653021Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:21.653037Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:21.653053Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:21.653064Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:21.653251Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7205759404 ... " PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:22.951537Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:22.951564Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:22.953046Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:22.955201Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.955301Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:22.955311Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.955668Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:22.955710Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:22.955717Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:22.956093Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:22.956108Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:22.956191Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:22.956543Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.957525Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943003, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:22.957539Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:22.957610Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:22.958004Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:22.958040Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:22.958049Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:22.958061Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:22.958068Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:22.958077Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:22.958185Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:22.958205Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:22.958208Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:22.958217Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2024-11-21T09:22:23.114237Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T09:22:23.116023Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T09:22:23.117877Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {SUCCESS, 0} 2024-11-21T09:22:23.120243Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T09:22:23.121824Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T09:22:23.705038Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439660503883091963:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:23.705080Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004176/r3tmp/tmpM7obr8/pdisk_1.dat 2024-11-21T09:22:23.716134Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7010, node 13 2024-11-21T09:22:23.734349Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:23.734363Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:23.734365Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:23.734400Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:23.805617Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.805652Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.807154Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:23.808107Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.808185Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.808192Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.808517Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:23.808549Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:23.808556Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:23.808829Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:23.808836Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:23.809094Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:23.809143Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.809841Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943857, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:23.809852Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:23.809896Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:23.810251Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.810288Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.810295Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:23.810304Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:23.810314Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:23.810322Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:23.810462Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:23.810472Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:23.810475Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:23.810484Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2024-11-21T09:22:20.713074Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660490752639239:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.713657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004174/r3tmp/tmpF9HNtW/pdisk_1.dat 2024-11-21T09:22:20.777239Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14829, node 1 2024-11-21T09:22:20.812925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.812948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.814341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.828551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.828565Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.828566Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.828595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.906913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.908089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.908108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.908720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.908765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.908773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:20.909162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.909174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:20.909239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.909583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.910620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940959, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.910643Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:20.910716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:20.911202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.911240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.911256Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:20.911270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:20.911281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:20.911290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:20.911694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:20.911705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:20.911708Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.911719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:21.555052Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660496257059011:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.555088Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004174/r3tmp/tmp16Aty4/pdisk_1.dat 2024-11-21T09:22:21.567758Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18855, node 4 2024-11-21T09:22:21.582144Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.582155Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.582157Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.582188Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:21.655165Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.655188Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.656675Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:21.657045Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.657122Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.657131Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.657475Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:21.657519Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:21.657527Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:21.657818Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.657827Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:21.657959Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:21.658077Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.658696Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941708, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.658705Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:21.658744Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:21.659052Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.659089Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.659101Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:21.659109Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:21.659122Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:21.659132Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:21.659248Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594 ... 221043Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.221057Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.221062Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:24.221104Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.221113Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.221114Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:24.221130Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.221138Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.221140Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T09:22:24.221154Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.221163Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.221165Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:24.221181Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.221190Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.221191Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T09:22:24.222011Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944270, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.222027Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180944270, at schemeshard: 72057594046644480 2024-11-21T09:22:24.222053Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:24.222077Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180944270, at schemeshard: 72057594046644480 2024-11-21T09:22:24.222088Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T09:22:24.222096Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180944270, at schemeshard: 72057594046644480 2024-11-21T09:22:24.222105Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T09:22:24.222113Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732180944270 2024-11-21T09:22:24.222117Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T09:22:24.222563Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.222653Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.222671Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T09:22:24.222691Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T09:22:24.222726Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T09:22:24.222747Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T09:22:24.222759Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:24.222769Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T09:22:24.222779Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T09:22:24.222787Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T09:22:24.222797Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:24.222807Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T09:22:24.222818Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T09:22:24.222821Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T09:22:24.222825Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T09:22:24.223193Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.223214Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.223218Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:24.223262Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.223271Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.223272Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T09:22:24.223284Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.223286Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.223287Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:24.223297Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.223305Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.223306Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:24.223318Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.223320Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.223321Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T09:22:24.223324Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:24.223907Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660506365230830:2298], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:24.293585Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:24.293635Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:24.294975Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:24.295452Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=OTg5ZjRiZWMtODNhY2MxZjItYzZkY2MzNWYtYmM2ZDE1Mjk=, ActorId: [13:7439660506365230798:2292], ActorState: ExecuteState, TraceId: 01jd70f9at5macav6y629j3pqd, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2024-11-21T09:22:24.296499Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=OTg5ZjRiZWMtODNhY2MxZjItYzZkY2MzNWYtYmM2ZDE1Mjk=, ActorId: [13:7439660506365230798:2292], ActorState: ExecuteState, TraceId: 01jd70f9d88hqa4erszgadbbah, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2024-11-21T09:22:24.297241Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=OTg5ZjRiZWMtODNhY2MxZjItYzZkY2MzNWYtYmM2ZDE1Mjk=, ActorId: [13:7439660506365230798:2292], ActorState: ExecuteState, TraceId: 01jd70f9d9bfp4s9ydk49nq3w7, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2024-11-21T09:22:20.707004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660488875729118:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.707056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004173/r3tmp/tmpJvcykg/pdisk_1.dat 2024-11-21T09:22:20.778861Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6914, node 1 2024-11-21T09:22:20.807187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.807221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.808179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.827558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.827570Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.827573Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.827610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:22:20.900436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.901489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.901496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.902164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.902168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T09:22:20.902625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:20.903197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.904793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940952, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.904808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:20.904886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:20.905274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:20.905348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:20.905359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:20.905371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:20.905772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:20.905800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:20.905809Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.905822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:21.549867Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660494869391439:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.550141Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004173/r3tmp/tmpOAcxsL/pdisk_1.dat 2024-11-21T09:22:21.564682Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12552, node 4 2024-11-21T09:22:21.573073Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.573085Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.573087Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.573150Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:21.650167Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.650197Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.651611Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:21.652762Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.652861Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.652884Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.653222Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:21.653271Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:21.653279Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:21.653625Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.653635Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:21.653717Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:21.653950Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.654577Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941701, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.654587Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:21.654630Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:21.654903Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.654937Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.654950Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:21.654961Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:21.654972Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:21.654980Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:21.655046Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940 ... KeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T09:22:24.326538Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486355:3517] HANDLE EvNavigateKeySetResult, txid# 281474976715672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T09:22:24.326545Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486355:3517] txid# 281474976715672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715672 TabletId# 72057594046644480} 2024-11-21T09:22:24.326596Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486355:3517] txid# 281474976715672 HANDLE EvClientConnected 2024-11-21T09:22:24.326642Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.326678Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.327253Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2024-11-21T09:22:24.327281Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486355:3517] txid# 281474976715672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715672} 2024-11-21T09:22:24.327286Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486355:3517] txid# 281474976715672 SEND to# [10:7439660508405486354:2350] Source {TEvProposeTransactionStatus txid# 281474976715672 Status# 53} 2024-11-21T09:22:24.327533Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:24.327554Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:24.327556Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:24.327565Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:24.330869Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944375, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.331629Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715672, done: 0, blocked: 1 2024-11-21T09:22:24.332573Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:24.332597Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:24.332600Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:24.332611Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:24.333156Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715672:0 2024-11-21T09:22:24.336315Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037890 not found 2024-11-21T09:22:24.336457Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T09:22:24.336928Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01jd70f9eg02scejt67mxz00rd, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:45628, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T09:22:24.336991Z node 10 :TX_PROXY DEBUG: actor# [10:7439660504110517241:2135] Handle TEvProposeTransaction 2024-11-21T09:22:24.337001Z node 10 :TX_PROXY DEBUG: actor# [10:7439660504110517241:2135] TxId# 281474976715673 ProcessProposeTransaction 2024-11-21T09:22:24.337008Z node 10 :TX_PROXY DEBUG: actor# [10:7439660504110517241:2135] Cookie# 0 userReqId# "" txid# 281474976715673 SEND to# [10:7439660508405486448:3604] 2024-11-21T09:22:24.337651Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486448:3604] txid# 281474976715673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:45628" 2024-11-21T09:22:24.337676Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486448:3604] txid# 281474976715673 TEvNavigateKeySet requested from SchemeCache 2024-11-21T09:22:24.337764Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486448:3604] txid# 281474976715673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T09:22:24.337795Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486448:3604] HANDLE EvNavigateKeySetResult, txid# 281474976715673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T09:22:24.337806Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486448:3604] txid# 281474976715673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T09:22:24.337837Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486448:3604] txid# 281474976715673 HANDLE EvClientConnected 2024-11-21T09:22:24.337892Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.337926Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.338408Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2024-11-21T09:22:24.338452Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486448:3604] txid# 281474976715673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715673} 2024-11-21T09:22:24.338466Z node 10 :TX_PROXY DEBUG: Actor# [10:7439660508405486448:3604] txid# 281474976715673 SEND to# [10:7439660508405486447:2354] Source {TEvProposeTransactionStatus txid# 281474976715673 Status# 53} 2024-11-21T09:22:24.338779Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:24.338788Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:24.338811Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:24.338814Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:24.341657Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944389, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.342894Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715673, done: 0, blocked: 1 2024-11-21T09:22:24.343891Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:24.343913Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:24.343918Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:24.343926Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:24.344455Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2024-11-21T09:22:24.346332Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6ea800] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346397Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6ead00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346426Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d4000] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346460Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d6300] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346488Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6e5800] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346514Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6dc700] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346537Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d7200] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346574Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6e0800] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346798Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d0400] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346799Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d4f00] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346831Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6ea300] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346848Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6f2500] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346858Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6cdc00] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346877Z node 10 :GRPC_SERVER DEBUG: [0x578c3f681e00] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346886Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d4a00] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346906Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d9a00] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346912Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6db800] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346930Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6e0300] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346936Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d1d00] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346947Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d9500] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346958Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6c2d00] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346965Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6d0e00] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346982Z node 10 :GRPC_SERVER DEBUG: [0x578c3f6c3200] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.346982Z node 10 :GRPC_SERVER DEBUG: [0x578c3ec83700] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.351446Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2024-11-21T09:22:24.351773Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2024-11-21T09:22:20.707300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660490930596822:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.708318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004182/r3tmp/tmpmffu0V/pdisk_1.dat 2024-11-21T09:22:20.774299Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9071, node 1 2024-11-21T09:22:20.805076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.805109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.806660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.827586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.827599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.827602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.827644Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.901648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.902717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.903340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.903392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.903395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:20.903809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.903868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.903876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:20.904319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940952, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:20.905320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:20.906667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.906704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.906719Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:20.906733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:20.906744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:20.906754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:20.907164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:20.907185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:20.907189Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.907201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:20.917814Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jd70f63mf4zfhnj9764fhmh4, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49726, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998404s 2024-11-21T09:22:20.923080Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jd70f63s9rfaghcvcpc3nwen, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49726, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T09:22:21.013642Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jd70f66naxj88df9gvhhgjmq, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49726, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T09:22:21.013777Z node 1 :TX_PROXY DEBUG: actor# [1:7439660490930596898:2137] Handle TEvProposeTransaction 2024-11-21T09:22:21.013790Z node 1 :TX_PROXY DEBUG: actor# [1:7439660490930596898:2137] TxId# 281474976715658 ProcessProposeTransaction 2024-11-21T09:22:21.013809Z node 1 :TX_PROXY DEBUG: actor# [1:7439660490930596898:2137] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7439660495225564889:2587] 2024-11-21T09:22:21.021128Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:49726" 2024-11-21T09:22:21.021248Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T09:22:21.021271Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2024-11-21T09:22:21.021323Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T09:22:21.021351Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T09:22:21.021361Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2024-11-21T09:22:21.021396Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] txid# 281474976715658 HANDLE EvClientConnected 2024-11-21T09:22:21.021471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.021610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:21.021809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.021818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.022314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:21.022338Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2024-11-21T09:22:21.022345Z node 1 :TX_PROXY DEBUG: Actor# [1:7439660495225564889:2587] txid# 281474976715658 SEND to# [1:7439660495225564888:2296] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2024-11-21T09:22:21.022360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.022406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.022423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:21.022541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:21.022561Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:21.022575Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:21.022585Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:21.022588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.022590Z node 1 :FLAT ... node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2024-11-21T09:22:24.382292Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2024-11-21T09:22:24.382293Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T09:22:24.382294Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2024-11-21T09:22:24.382295Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2024-11-21T09:22:24.382296Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2024-11-21T09:22:24.382299Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2024-11-21T09:22:24.382535Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7439660508453430232:2083], Recipient [10:7439660508453429323:2302]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T09:22:24.382555Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T09:22:24.382565Z node 10 :READ_TABLE_API DEBUG: [10:7439660508453430213:2360] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2024-11-21T09:22:24.382570Z node 10 :READ_TABLE_API DEBUG: [10:7439660508453430213:2360] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2024-11-21T09:22:24.382627Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2024-11-21T09:22:24.382688Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2024-11-21T09:22:24.382700Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2024-11-21T09:22:24.382704Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2024-11-21T09:22:24.382718Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2024-11-21T09:22:24.382721Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2024-11-21T09:22:24.382733Z node 10 :READ_TABLE_API DEBUG: [10:7439660508453430213:2360] got stream part, size: 75, RU required: 128 rate limiter absent 2024-11-21T09:22:24.382734Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7439660508453429323:2302], Recipient [10:7439660508453429323:2302]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:24.382737Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:24.382745Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2024-11-21T09:22:24.382751Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2024-11-21T09:22:24.382759Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2024-11-21T09:22:24.382770Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2024-11-21T09:22:24.382777Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2024-11-21T09:22:24.382784Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2024-11-21T09:22:24.382791Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2024-11-21T09:22:24.382794Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2024-11-21T09:22:24.382797Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2024-11-21T09:22:24.382804Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayComplete 2024-11-21T09:22:24.382810Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2024-11-21T09:22:24.382812Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2024-11-21T09:22:24.382815Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2024-11-21T09:22:24.382826Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2024-11-21T09:22:24.382829Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2024-11-21T09:22:24.382832Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2024-11-21T09:22:24.382834Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:24.382837Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2024-11-21T09:22:24.382839Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2024-11-21T09:22:24.382841Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2024-11-21T09:22:24.382853Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2024-11-21T09:22:24.382856Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2024-11-21T09:22:24.382862Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T09:22:24.382876Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2024-11-21T09:22:24.382878Z node 10 :READ_TABLE_API DEBUG: [10:7439660508453430213:2360] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T09:22:24.382894Z node 10 :READ_TABLE_API NOTICE: [10:7439660508453430213:2360] Finish grpc stream, status: 400000 2024-11-21T09:22:24.383156Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7439660508453430215:2360], Recipient [10:7439660508453429323:2302]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2024-11-21T09:22:24.383176Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2024-11-21T09:22:24.383178Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2024-11-21T09:22:24.383184Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2024-11-21T09:22:24.383197Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7439660508453430215:2360], Recipient [10:7439660508453429323:2302]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2024-11-21T09:22:24.383204Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2024-11-21T09:22:24.383215Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660508453430215:2360], Recipient [10:7439660508453429323:2302]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180944424 TxId: 281474976715680 2024-11-21T09:22:24.383768Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DeleteSessionRequest, traceId# 01jd70f9fz3d7karttdbc4dm1v, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# unknown, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 2.009441s 2024-11-21T09:22:24.383931Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6d6d00] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.383972Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6dcc00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384007Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6dd600] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384038Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6d5e00] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384067Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6df900] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384095Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6e3f00] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384132Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6c2d00] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384158Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6d9500] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384186Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6d9f00] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384244Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6dc200] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384276Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6ed500] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384285Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6d8600] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384320Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6e9400] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384326Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6bea00] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384361Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6da400] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384363Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6e6700] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384395Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6c2800] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384398Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6dfe00] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384427Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6df400] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384433Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6d1d00] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384462Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6e6200] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384469Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6de000] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384501Z node 10 :GRPC_SERVER DEBUG: [0x44f47ec82100] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.384501Z node 10 :GRPC_SERVER DEBUG: [0x44f47f6c3200] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TGRpcYdbTest::KeepAlive [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::DescribeTableOptions [GOOD] Test command err: 2024-11-21T09:22:20.934717Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660491262973077:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.934978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004181/r3tmp/tmpmbX1km/pdisk_1.dat 2024-11-21T09:22:20.995226Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23982, node 1 2024-11-21T09:22:21.013192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.013206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.013208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.013251Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:21.035441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.035471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.037042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:21.066017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.067193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.067218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.067754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:21.067806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:21.067814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:21.068175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.068183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:21.068263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:21.068523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.069295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941113, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.069306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:21.069363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:21.069796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.069830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.069847Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:21.069857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:21.069863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:21.069880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:21.070284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:21.070299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:21.070302Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:21.070313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1732188140986562 Nodes { NodeId: 1024 Host: "localhost" Port: 3894 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1732188140986562 } Nodes { NodeId: 1 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 2 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 3 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2024-11-21T09:22:21.789965Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660494616480496:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.790108Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004181/r3tmp/tmpyD2DZd/pdisk_1.dat 2024-11-21T09:22:21.803482Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15947, node 4 2024-11-21T09:22:21.823306Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.823319Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.823321Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.823371Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:21.889934Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.889960Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.891415Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:21.893592Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.893688Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.893696Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.894076Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:21.894131Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:21.894138Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:21.894450Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.894463Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:21.894581Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:21.894792Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.895522Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941939, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.895543Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:21.895589Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> ... X_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944151, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.104969Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T09:22:24.105007Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:24.105390Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.105435Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.105449Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:24.105461Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:24.105469Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:24.105484Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:24.105597Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.105613Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.105616Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:24.105647Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.105666Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.105668Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:24.105673Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:24.106461Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.106592Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.106603Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.106961Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T09:22:24.106994Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T09:22:24.107276Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T09:22:24.108616Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:24.108686Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:24.108697Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T09:22:24.108997Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.607448Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439660505613614143:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:24.607516Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:24.609448Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:24.609477Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:24.611010Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T09:22:24.611281Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61981 2024-11-21T09:22:24.628200Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T09:22:24.638532Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T09:22:24.638656Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:24.859469Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:24.859635Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7439660505613614476:2278], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:24.928673Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7439660505613614476:2278], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TGRpcNewCoordinationClient::BasicMethods [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 1661, MsgBus: 15045 2024-11-21T09:22:17.125649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660476127927947:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:17.125892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:17.128399Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660476079364603:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:17.128608Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e0c/r3tmp/tmp64I7bd/pdisk_1.dat 2024-11-21T09:22:17.149650Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660477821151381:2251];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:17.150484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 1661, node 1 2024-11-21T09:22:17.179420Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:17.183323Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:17.183337Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:17.183339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:17.183373Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15045 TClient is connected to server localhost:15045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:17.225800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.226003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:17.226025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:17.227314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:22:17.237604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.249337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:17.249361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:17.249390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:17.249396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:17.250316Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:22:17.250328Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T09:22:17.250555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:17.250608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:17.301033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.362406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.372660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.410998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660476127929750:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.411022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.431334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.489360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.503161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.560581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.573068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.587449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.602871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660476127930391:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.602900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660476127930396:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.602901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.603453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:17.606775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660476127930398:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:17.809667Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180937807, txId: 281474976710673] shutting down 2024-11-21T09:22:17.892792Z node 2 :BS_PROXY_PUT ERROR: [a1b06ec6ca1bbf99] Result# TEvPutResult {Id# [72075186224037913:1:10:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:17.920518Z node 3 :BS_PROXY_PUT ERROR: [979bf87dfa7f3178] Result# TEvPutResult {Id# [72075186224037915:1:10:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:18.150779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; Trying to start YDB, gRPC: 6947, MsgBus: 63377 2024-11-21T09:22:18.415047Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660482207266254:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:18.415256Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:18.415868Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660480092048765:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:18.415880Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:18.416445Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660483704950993:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:18.416628Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e0c/r3tmp/tmp9Emp9B/pdisk_1.dat 2024-11-21T09:22:18.430610Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6947, node 4 2024-11-21T09:22:18.439169Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:18.439188Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:18.439191Z node 4 :NET_CLASSIFIER WARN: failed to initialize from fi ... have access permissions } 2024-11-21T09:22:22.853785Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:22.854463Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:22.865514Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660497824131052:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720668 completed, doublechecking } 2024-11-21T09:22:23.088108Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180943086, txId: 281474976720673] shutting down 2024-11-21T09:22:23.180303Z node 14 :BS_PROXY_PUT ERROR: [a050328835883785] Result# TEvPutResult {Id# [72075186224037901:1:9:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:23.180320Z node 15 :BS_PROXY_PUT ERROR: [c933d7ad6fd10ad6] Result# TEvPutResult {Id# [72075186224037905:1:9:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:23.419858Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; Trying to start YDB, gRPC: 21683, MsgBus: 8482 2024-11-21T09:22:23.763972Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7439660503299069667:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:23.764042Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:23.764619Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7439660503666934769:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:23.764648Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e0c/r3tmp/tmpYHC5jR/pdisk_1.dat 2024-11-21T09:22:23.773610Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:23.776961Z node 16 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21683, node 16 2024-11-21T09:22:23.790063Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:23.790076Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:23.790078Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:23.790125Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8482 TClient is connected to server localhost:8482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:23.864702Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.864747Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.866721Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:23.866961Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.866987Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.867039Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.867058Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.867863Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.867924Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2024-11-21T09:22:23.867936Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2024-11-21T09:22:23.868084Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:23.868132Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:22:23.881282Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:23.945139Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:23.964648Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:23.978679Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:24.189628Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439660507594038754:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.189659Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.195901Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.208297Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.271207Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.283985Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.298942Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.319723Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.336597Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439660507594039434:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.336631Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.336657Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439660507594039439:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.337536Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:24.343750Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439660507594039441:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:24.579194Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180944577, txId: 281474976715673] shutting down 2024-11-21T09:22:24.765375Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:24.769522Z node 17 :BS_PROXY_PUT ERROR: [4759a68d2968a742] Result# TEvPutResult {Id# [72075186224037912:1:11:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T09:22:24.774005Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:24.777155Z node 18 :BS_PROXY_PUT ERROR: [764da08c05e23cbe] Result# TEvPutResult {Id# [72075186224037889:1:13:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2024-11-21T09:22:20.707905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660488589838436:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.707940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004175/r3tmp/tmp08wWdN/pdisk_1.dat 2024-11-21T09:22:20.777413Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1293, node 1 2024-11-21T09:22:20.808346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.808373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.810053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.827596Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.827613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.827614Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.827659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.900471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.901506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.901521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.902229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.902233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:20.902601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:20.903238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.904786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940952, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.904799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:20.904882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:20.905389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905448Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:20.905464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:20.905476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:20.905489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:20.905941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:20.905947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:20.905950Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.905964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:21.561531Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660495175082299:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.561820Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004175/r3tmp/tmpg08xQ4/pdisk_1.dat 2024-11-21T09:22:21.573492Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10209, node 4 2024-11-21T09:22:21.591225Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.591236Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.591237Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.591272Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:21.661886Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.661912Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.663404Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:21.663511Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.663630Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.663642Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.664039Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:21.664083Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:21.664089Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:21.664448Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.664457Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:21.664550Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:21.664736Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.665564Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941715, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.665575Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:21.665647Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:21.665948Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.665996Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.666010Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:21.666027Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:21.666041Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:21.666057Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:21.666170Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7205759404 ... AT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T09:22:24.642663Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.642673Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.642675Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:24.642692Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.642701Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.642703Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T09:22:24.642718Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.642727Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.642729Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 3 2024-11-21T09:22:24.642743Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.642752Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.642754Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 1 2024-11-21T09:22:24.643460Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944690, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.643475Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180944690, at schemeshard: 72057594046644480 2024-11-21T09:22:24.643498Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T09:22:24.643517Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180944690, at schemeshard: 72057594046644480 2024-11-21T09:22:24.643531Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 128 -> 240 2024-11-21T09:22:24.643545Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180944690, at schemeshard: 72057594046644480 2024-11-21T09:22:24.643558Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:2 128 -> 240 2024-11-21T09:22:24.643571Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715660:3, HandleReply TEvOperationPlan: step# 1732180944690 2024-11-21T09:22:24.643583Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:3 128 -> 240 2024-11-21T09:22:24.643903Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.643984Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.644001Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T09:22:24.644009Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/4 2024-11-21T09:22:24.644033Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:2 ProgressState 2024-11-21T09:22:24.644037Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:2 progress is 2/4 2024-11-21T09:22:24.644048Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:3 ProgressState 2024-11-21T09:22:24.644056Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:3 progress is 3/4 2024-11-21T09:22:24.644067Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:1 ProgressState 2024-11-21T09:22:24.644075Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:1 progress is 4/4 2024-11-21T09:22:24.644079Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T09:22:24.644085Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2024-11-21T09:22:24.644088Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2024-11-21T09:22:24.644090Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:3 2024-11-21T09:22:24.644094Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 5, subscribers: 1 2024-11-21T09:22:24.644392Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.644406Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.644409Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2024-11-21T09:22:24.644450Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.644461Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.644463Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:24.644478Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.644480Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.644481Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:24.644495Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.644503Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.644504Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T09:22:24.644518Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:24.644525Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:24.644527Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2024-11-21T09:22:24.644532Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2024-11-21T09:22:24.644995Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660508709763912:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:22:24.732552Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:24.732595Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:24.733136Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:24.741348Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70f9r09rk0yth4kshk7se9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2UyZWM4ODMtNDUzYmVmM2EtZDQ3YWY5NjUtNTg1ODY3MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:24.742028Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70f9r09rk0yth4kshk7se9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2UyZWM4ODMtNDUzYmVmM2EtZDQ3YWY5NjUtNTg1ODY3MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:24.752148Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd70f9v63ts9jx088hymjxnv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2UyZWM4ODMtNDUzYmVmM2EtZDQ3YWY5NjUtNTg1ODY3MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:24.752960Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70f9v63ts9jx088hymjxnv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2UyZWM4ODMtNDUzYmVmM2EtZDQ3YWY5NjUtNTg1ODY3MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2024-11-21T09:22:21.449392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660496249628607:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.449457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00415b/r3tmp/tmppGhmaP/pdisk_1.dat 2024-11-21T09:22:21.497884Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27390, node 1 2024-11-21T09:22:21.511175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.511190Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.511191Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.511437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:21.532919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.533597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.533618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.534158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:21.534246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:21.534254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:21.534525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:21.534808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.534818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:21.535147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.535948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941582, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.535959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:21.536013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:21.536346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.536392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.536408Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:21.536427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:21.536441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:21.536455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:21.536795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:21.536810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:21.536814Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:21.536823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:21.542832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/TheDirectory, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.542878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.543437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/TheDirectory 2024-11-21T09:22:21.543475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.543510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.543523Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.543554Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:21.543661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:21.543679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:21.543689Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:21.543748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:21.543755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:21.543756Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T09:22:21.546996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941596, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.547010Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180941596, at schemeshard: 72057594046644480 2024-11-21T09:22:21.547033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T09:22:21.547375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.547424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.547438Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T09:22:21.547453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T09:22:21.547460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T09:22:21.547475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T09:22:21.547575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:21.547587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:21.547590Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:21.547611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:21.547612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:21.547613Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:21.547617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T09:22:21.549685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.549710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.550371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /Root/TheDirectory, pathId: 0, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.550406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.550414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.550882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: DROP DIRECTORY, path: /Root/TheDirectory 2024-11-21T09:22:21.550917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPub ... se file: (empty maybe) 2024-11-21T09:22:23.827151Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:23.827152Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:23.827189Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:23.895302Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.895350Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.896886Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:23.897951Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.898051Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.898064Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.898451Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:23.898492Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:23.898502Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:23.898810Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:23.898819Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:23.899007Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:23.899082Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.899775Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943948, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:23.899789Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:23.899846Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:23.900272Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.900307Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.900322Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:23.900336Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:23.900350Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:23.900362Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:23.900487Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:23.900502Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:23.900505Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:23.900517Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1
: Error: Operation cancelled. 2024-11-21T09:22:24.658389Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439660508212673499:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:24.658409Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00415b/r3tmp/tmpO7t9Ig/pdisk_1.dat 2024-11-21T09:22:24.670107Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20369, node 13 2024-11-21T09:22:24.687934Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:24.687946Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:24.687948Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:24.687980Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:24.758949Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:24.758977Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:24.760469Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:24.761883Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.761977Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.761988Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.762350Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:24.762390Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:24.762398Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:24.762666Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.762674Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:24.762781Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:24.762956Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.763605Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944809, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.763615Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:24.763664Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:24.763990Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.764022Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.764036Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:24.764046Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:24.764051Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:24.764060Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:24.764187Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:24.764223Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:24.764232Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:24.764242Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] Test command err: 2024-11-21T09:22:20.706532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660488105091161:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.706553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00417f/r3tmp/tmpwXj2vJ/pdisk_1.dat 2024-11-21T09:22:20.774359Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10102, node 1 2024-11-21T09:22:20.806420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.806457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.807815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:20.827569Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.827582Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.827583Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.827616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:22:20.900458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.901438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.901445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.902130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.902133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:20.902606Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:20.903053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940952, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:20.905191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:20.905718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905764Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:20.905772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:20.905778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:20.905786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:20.906219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:20.906252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:20.906255Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.906271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:21.010104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/FooTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.010222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:21.010458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.010488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.011092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/FooTable 2024-11-21T09:22:21.011133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.011168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.011181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:21.011332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:21.011365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.011372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.011376Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:21.011415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.011421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.011422Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:21.012915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.012935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:21.013271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:21.021057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:21.021068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:21.021085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:21.021384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:21.022005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941071, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.022029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180941071 2024-11-21T09:22:21.022051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:21.022366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.022433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.022446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:21.022741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.022755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.022758Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:21.022790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.022799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.022800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 2 ... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00417f/r3tmp/tmpRbc1nE/pdisk_1.dat 2024-11-21T09:22:24.724155Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:24.733227Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7534, node 13 2024-11-21T09:22:24.746750Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:24.746761Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:24.746763Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:24.746799Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:24.820518Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:24.820552Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:24.822282Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:24.823502Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.823600Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.823612Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.823978Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:24.824024Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:24.824026Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:24.824330Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.824341Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:24.824540Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:24.824647Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.825342Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944872, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.825354Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:24.825408Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:24.825743Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.825792Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.825808Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:24.825818Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:24.825836Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:24.825848Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:24.825935Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:24.825957Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:24.825961Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:24.825972Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:24.834952Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.834988Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:24.835077Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.835088Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.835725Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T09:22:24.835777Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.835823Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.835843Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T09:22:24.835913Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:24.835984Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.835995Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.835999Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:24.836035Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.836042Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.836043Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:24.837515Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:24.837534Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:24.889466Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:24.889882Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.890615Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944935, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.890631Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T09:22:24.890656Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:24.891000Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.891047Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.891061Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:24.891069Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:24.891080Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:24.891123Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:24.891221Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.891233Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.891238Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:24.891270Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.891279Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.891280Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:24.891284Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::OverwriteStoragePolicy >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> TGRpcYdbTest::OperationTimeout [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbS3Internal::BadRequests [GOOD] >> YdbScripting::BasicV0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::OperationTimeout [GOOD] Test command err: 2024-11-21T09:22:21.321582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660495813689850:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.321787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00416f/r3tmp/tmpUfq0Je/pdisk_1.dat 2024-11-21T09:22:21.371796Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3160, node 1 2024-11-21T09:22:21.385511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.385522Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.385524Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.385556Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:21.406910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.407624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.407636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.408087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:21.408130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:21.408138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:21.408500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:21.408543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.408550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:21.408983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.409816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941456, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.409826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:21.409883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:21.410199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.410239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.410254Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:21.410270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:21.410282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:21.410292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:21.410558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:21.410573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:21.410575Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:21.410584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:21.421874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.421897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.423293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:21.967687Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660494677071352:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.967721Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00416f/r3tmp/tmpp037cA/pdisk_1.dat 2024-11-21T09:22:21.982929Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29393, node 4 2024-11-21T09:22:21.999855Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:21.999865Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:21.999867Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:21.999917Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:22.068006Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:22.068034Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:22.069529Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:22.070895Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.070977Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:22.070985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.071260Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:22.071288Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:22.071294Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:22.071624Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:22.071635Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:22.071717Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:22.071921Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.072734Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180942121, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:22.072744Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:22.072782Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:22.073114Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:22.073146Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:22.073157Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:22.073165Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:22.073174Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:22.073181Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:22.073312Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940 ... ota request to queue ShardId: 0, TxId: 281474976715664 2024-11-21T09:22:24.516614Z node 10 :READ_TABLE_API DEBUG: [10:7439660507581115750:2314] Assign stream quota to Shard 0, Quota 5, TxId 281474976715664 Reserved: 5 of 25, Queued: 0 2024-11-21T09:22:24.516756Z node 10 :READ_TABLE_API DEBUG: [10:7439660507581115750:2314] got stream part, size: 84, RU required: 128 rate limiter absent 2024-11-21T09:22:24.516865Z node 10 :READ_TABLE_API DEBUG: [10:7439660507581115750:2314] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T09:22:24.516961Z node 10 :READ_TABLE_API NOTICE: [10:7439660507581115750:2314] Finish grpc stream, status: 400000 2024-11-21T09:22:24.517496Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jd70f9m56htmhdmzwgvjkm8c, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33970, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T09:22:24.523519Z node 10 :READ_TABLE_API DEBUG: [10:7439660507581115772:2316] Adding quota request to queue ShardId: 0, TxId: 281474976715666 2024-11-21T09:22:24.523534Z node 10 :READ_TABLE_API DEBUG: [10:7439660507581115772:2316] Assign stream quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2024-11-21T09:22:24.523697Z node 10 :READ_TABLE_API DEBUG: [10:7439660507581115772:2316] got stream part, size: 210, RU required: 128 rate limiter absent 2024-11-21T09:22:24.523848Z node 10 :READ_TABLE_API DEBUG: [10:7439660507581115772:2316] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T09:22:24.524061Z node 10 :READ_TABLE_API NOTICE: [10:7439660507581115772:2316] Finish grpc stream, status: 400000 2024-11-21T09:22:24.524654Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6db800] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524666Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6dae00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524724Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6d5e00] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524725Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6d1800] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524760Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6d9000] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524776Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6d3b00] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524796Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6cd700] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524812Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6dd600] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524832Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e9e00] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524847Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e7b00] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524879Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6de000] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524895Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6d1300] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524914Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6cfa00] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524925Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6b7e00] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524952Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e8500] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524956Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e8a00] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524982Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e9400] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.524988Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e8f00] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.525011Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e9900] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.525018Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e1c00] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.525045Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6e1700] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.525046Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6cf500] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.525072Z node 10 :GRPC_SERVER DEBUG: [0x447fbec8b000] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T09:22:24.525077Z node 10 :GRPC_SERVER DEBUG: [0x447fbf6d0e00] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:25.135422Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439660512719409246:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:25.135586Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00416f/r3tmp/tmpS152C1/pdisk_1.dat 2024-11-21T09:22:25.151295Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29027, node 13 2024-11-21T09:22:25.161544Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:25.161559Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:25.161561Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:25.161601Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:25.235753Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:25.235786Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:25.237270Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:25.238664Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.238765Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:25.238777Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.239275Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:25.239322Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:25.239330Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:25.239723Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:25.239734Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:25.239799Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:25.240027Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.240949Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180945285, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:25.240963Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:25.241027Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:25.241501Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.241550Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.241567Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:25.241586Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:25.241599Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:25.241616Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:25.241772Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:25.241792Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:25.241801Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:25.241814Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1
: Error: Operation timeout. >> YdbYqlClient::TestYqlWrongTable >> YdbYqlClient::TestReadTableMultiShardUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableBatchLimits >> TGRpcYdbTest::ExecuteQueryImplicitSession >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 >> YdbTableBulkUpsert::ValidRetry >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable >> ClientStatsCollector::CounterCacheMiss [GOOD] >> ClientStatsCollector::CounterRetryOperation >> YdbQueryService::TestCreateAndAttachSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: 2024-11-21T09:22:20.728055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660492010863297:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.728256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004180/r3tmp/tmpEzAPHW/pdisk_1.dat 2024-11-21T09:22:20.783642Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24570, node 1 2024-11-21T09:22:20.827819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.827843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.827844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.827875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:20.828433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.828463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.829926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.900440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.901222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.901234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.901985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.902044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.902054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:20.902520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:20.903126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:20.904345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.904814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940952, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.904827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:20.904919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:20.905326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905365Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:20.905372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:20.905385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:20.905396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:20.905786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:20.905804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:20.905807Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.905817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:27032 2024-11-21T09:22:20.931993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.932120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.932131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.932650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2024-11-21T09:22:20.934898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940980, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.935486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:20.935508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:20.935741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:20.936660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.936791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.936803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.937321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2024-11-21T09:22:21.437915Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660494182094915:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.438155Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:21.439542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:21.439566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:21.440782Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T09:22:21.441014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:21.453373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.453661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.453672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.453711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2024-11-21T09:22:21.453732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.453735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2024-11-21T09:22:21.453817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.454400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2024-11-21T09:22:21.768039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941813, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.768672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:21.768736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T09:22:21.768908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at sche ... CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:25.179300Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:25.179345Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:25.180900Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:25.182057Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.182161Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:25.182174Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.182629Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:25.182676Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:25.182688Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:25.183112Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:25.183124Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:25.183261Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:25.183486Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.184324Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180945229, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:25.184338Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:25.184402Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:25.184768Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.184809Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.184817Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:25.184829Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:25.184838Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:25.184849Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:25.185200Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:25.185216Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:25.185220Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:25.185232Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:25.389807Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.389976Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:25.390158Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:25.390170Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.390960Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:25.391022Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.391083Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.391106Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:25.391173Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:25.391282Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:25.391302Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:25.391306Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:25.391362Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:25.391369Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:25.391371Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:25.392987Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:25.393015Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:25.393452Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:25.446013Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:25.446042Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:25.446068Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:25.446484Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:25.447246Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180945495, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:25.447260Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180945495 2024-11-21T09:22:25.447287Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:25.447782Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.447862Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.447884Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:25.448047Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:25.448061Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:25.448065Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:25.448164Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:25.448176Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:25.448178Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:25.448828Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180945495 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 425 } } 2024-11-21T09:22:25.448903Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:25.448917Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.448921Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:25.449223Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:25.449243Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:25.449254Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndex [GOOD] Test command err: 2024-11-21T09:22:20.783659Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660491839336038:2151];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.783794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004172/r3tmp/tmpicXwVD/pdisk_1.dat 2024-11-21T09:22:20.837751Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14190, node 1 2024-11-21T09:22:20.857699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.857709Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.857712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.857745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:20.883746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.883782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.885445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.915811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.917174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.917198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.917929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.917990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.918002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:20.918510Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.918523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:20.918616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.918947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.920005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940966, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.920018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:20.920089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:20.920543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.920597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.920613Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:20.920632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:20.920645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:20.920664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:20.921086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:20.921109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:20.921118Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.921142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:21.131097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/OlapStore, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:21.131143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.131151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/OlapStore/OlapTable, opId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.131263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 1 -> 2 2024-11-21T09:22:21.131434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:21.131443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.132247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/OlapStore/OlapTable 2024-11-21T09:22:21.132314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:21.132369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:21.132390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:1 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:21.132436Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:21.132459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:21.132644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.132659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.132663Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:21.132711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.132718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.132720Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:21.132734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:21.132741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:21.132742Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T09:22:21.134268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:21.134297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 2 -> 3 2024-11-21T09:22:21.134741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:1 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:21.186621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:1 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:21.186636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:21.186666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 3 -> 128 2024-11-21T09:22:21.187089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:1 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:21.187786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180941232, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:21.187802Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180941232, at schemeshard: 72057594046644480 2024-11-21T09:22:21.187830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:21.187861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:1 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180941232 2024-11-21T09:22:21.187885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 129 2024-11-21T09:22:21.188259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoar ... rd: 72057594046644480, txId: 281474976715668 2024-11-21T09:22:25.655559Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 4 2024-11-21T09:22:25.657496Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropIndexAtMainTable TConfigureParts operationId#281474976715668:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:25.657507Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:25.657528Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 3 -> 128 2024-11-21T09:22:25.657600Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDropParts operationId#281474976715668:2 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:25.657607Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:25.657613Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 4 -> 128 2024-11-21T09:22:25.657963Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropIndexAtMainTable TPropose operationId#281474976715668:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:25.658011Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976715668:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:25.658824Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180945705, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:25.658836Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropIndexAtMainTable TPropose operationId#281474976715668:0 HandleReply TEvOperationPlan, step: 1732180945705, at schemeshard: 72057594046644480 2024-11-21T09:22:25.658854Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 128 -> 129 2024-11-21T09:22:25.658871Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTableIndex TPropose, operationId: 281474976715668:1 HandleReply TEvOperationPlan, step: 1732180945705, at schemeshard: 72057594046644480 2024-11-21T09:22:25.658884Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 128 -> 136 2024-11-21T09:22:25.658893Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976715668:2 HandleReply TEvOperationPlan, step: 1732180945705, at schemeshard: 72057594046644480 2024-11-21T09:22:25.658904Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 128 -> 136 2024-11-21T09:22:25.659332Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.659399Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.659428Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTableIndex TWaitRenamedPathPublication operationId: 281474976715668:1 ProgressState, operation type: TxDropTableIndex, at tablet72057594046644480 2024-11-21T09:22:25.659437Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 136 -> 137 2024-11-21T09:22:25.659470Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:25.659511Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976715668:2 ProgressState, operation type: TxDropTable, at tablet72057594046644480 2024-11-21T09:22:25.659519Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976715668:2 ProgressState, no renaming has been detected for this operation 2024-11-21T09:22:25.659520Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 136 -> 137 2024-11-21T09:22:25.659585Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T09:22:25.659597Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T09:22:25.659600Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T09:22:25.660190Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715668 Step: 1732180945705 OrderId: 281474976715668 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 237 } } 2024-11-21T09:22:25.660251Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTableIndex TDeletePathBarrier operationId: 281474976715668:1 ProgressState, operation type: TxDropTableIndex, at tablet72057594046644480 2024-11-21T09:22:25.660275Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715668:2 ProgressState, operation type: TxDropTable, at tablet72057594046644480 2024-11-21T09:22:25.660308Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715668 Step: 1732180945705 OrderId: 281474976715668 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 199 } } 2024-11-21T09:22:25.660327Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:25.660339Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.660342Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 129 -> 240 2024-11-21T09:22:25.660376Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715668:2 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046644480 2024-11-21T09:22:25.660712Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:0 ProgressState 2024-11-21T09:22:25.660728Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 1/3 2024-11-21T09:22:25.660732Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715668, done: 1, blocked: 2 2024-11-21T09:22:25.660749Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTableIndex TDeletePathBarrier operationId: 281474976715668:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: RenamePathBarrier }, at tablet72057594046644480 2024-11-21T09:22:25.660800Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 137 -> 240 2024-11-21T09:22:25.660820Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715668:2 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: RenamePathBarrier }, at tablet72057594046644480 2024-11-21T09:22:25.660832Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 137 -> 129 2024-11-21T09:22:25.661224Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.661293Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.661311Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:1 ProgressState 2024-11-21T09:22:25.661325Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 2/3 2024-11-21T09:22:25.661358Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:2 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:25.661371Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:2, at schemeshard: 72057594046644480 2024-11-21T09:22:25.661381Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 129 -> 240 2024-11-21T09:22:25.661408Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T09:22:25.661417Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T09:22:25.661420Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 18446744073709551615 2024-11-21T09:22:25.661545Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T09:22:25.661560Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T09:22:25.661563Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 8 2024-11-21T09:22:25.661600Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T09:22:25.661609Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T09:22:25.661617Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T09:22:25.661619Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T09:22:25.661622Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 18446744073709551615 2024-11-21T09:22:25.661981Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715668:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:25.662066Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:2 progress is 3/3 2024-11-21T09:22:25.662084Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2024-11-21T09:22:25.662105Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2024-11-21T09:22:25.662116Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:2 2024-11-21T09:22:25.664073Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2024-11-21T09:22:25.665529Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2024-11-21T09:22:25.665910Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> YdbLogStore::LogStore >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::DataValidation >> YdbYqlClient::TestColumnOrder >> YdbYqlClient::CreateTableWithPartitionAtKeys >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> TGRpcNewCoordinationClient::SessionDescribeWatchData >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:152:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:153:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:153:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:141:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:144:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:145:2057] recipient: [13:143:2166] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:147:2057] recipient: [13:143:2166] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:146:2167] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:216:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:141:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:144:2057] recipient: [14:143:2166] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:145:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:147:2057] recipient: [14:143:2166] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:146:2167] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:216:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:142:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:145:2057] recipient: [15:144:2166] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:146:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:147:2167] sender: [15:148:2057] recipient: [15:144:2166] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:147:2167] Leader for TabletID 72057594037927937 is [15:147:2167] sender: [15:217:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:147:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:150:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:151:2057] recipient: [16:149:2171] Leader for TabletID 72057594037927937 is [16:152:2172] sender: [16:153:2057] recipient: [16:149:2171] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:152:2172] Leader for TabletID 72057594037927937 is [16:152:2172] sender: [16:222:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:147:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:150:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:151:2057] recipient: [17:149:2171] Leader for TabletID 72057594037927937 is [17:152:2172] sender: [17:153:2057] recipient: [17:149:2171] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:152:2172] Leader for TabletID 72057594037927937 is [17:152:2172] sender: [17:222:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:148:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:150:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:152:2057] recipient: [18:151:2171] Leader for TabletID 72057594037927937 is [18:153:2172] sender: [18:154:2057] recipient: [18:151:2171] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:153:2172] Leader for TabletID 72057594037927937 is [18:153:2172] sender: [18:201:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:150:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:153:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:154:2057] recipient: [19:152:2173] Leader for TabletID 72057594037927937 is [19:155:2174] sender: [19:156:2057] recipient: [19:152:2173] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:155:2174] Leader for TabletID 72057594037927937 is [19:155:2174] sender: [19:225:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:153:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:154:2057] recipient: [20:152:2173] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:156:2057] recipient: [20:152:2173] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:155:2174] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:225:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:151:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:154:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:155:2057] recipient: [21:153:2173] Leader for TabletID 72057594037927937 is [21:156:2174] sender: [21:157:2057] recipient: [21:153:2173] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:156:2174] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] >> YdbScripting::MultiResults >> KqpSystemView::Join [GOOD] >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 >> YdbYqlClient::TestYqlIssues >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestAttachTwice >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 >> YdbYqlClient::TestReadTableBatchLimits [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 61496, MsgBus: 5963 2024-11-21T09:22:15.941482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660469224803527:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:15.941501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e60/r3tmp/tmpgVkWCt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61496, node 1 2024-11-21T09:22:15.997138Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:15.998703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:15.998715Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:15.998716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:15.998745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5963 TClient is connected to server localhost:5963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:16.042328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:16.042363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:16.043390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:16.043554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.054555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.069111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.084044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.096424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:16.222085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660473519772363:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.222112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.259810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.266576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.276940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.283841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.338321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.347011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:16.355789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660473519772878:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.355815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660473519772883:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.355819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:16.356509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:16.360340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660473519772885:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T09:22:16.534785Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180936532, txId: 281474976710671] shutting down waiting... 2024-11-21T09:22:17.551155Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180937549, txId: 281474976710673] shutting down waiting... 2024-11-21T09:22:18.574004Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180938571, txId: 281474976710675] shutting down waiting... 2024-11-21T09:22:19.591067Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180939589, txId: 281474976710677] shutting down waiting... 2024-11-21T09:22:20.609714Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180940608, txId: 281474976710679] shutting down 2024-11-21T09:22:20.942080Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439660469224803527:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.942130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2024-11-21T09:22:21.626657Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180941625, txId: 281474976710681] shutting down waiting... 2024-11-21T09:22:22.649538Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180942647, txId: 281474976710683] shutting down waiting... 2024-11-21T09:22:23.667824Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180943665, txId: 281474976710685] shutting down waiting... 2024-11-21T09:22:24.685993Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180944684, txId: 281474976710687] shutting down waiting... 2024-11-21T09:22:25.705677Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180945704, txId: 281474976710689] shutting down waiting... 2024-11-21T09:22:26.728340Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180946726, txId: 281474976710691] shutting down 2024-11-21T09:22:26.772941Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180946770, txId: 281474976710693] shutting down >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Uint8 >> YdbTableBulkUpsert::DataValidation [GOOD] >> TYqlDateTimeTests::SimpleUpsertSelect >> KqpSystemView::PartitionStatsFollower [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest >> TGRpcClientLowTest::GrpcRequestProxy >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestBusySession >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> TTableProfileTests::WrongTableProfile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 9876, MsgBus: 17362 2024-11-21T09:22:16.906914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660473793451219:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:16.907244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001e19/r3tmp/tmpGgcfK7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9876, node 1 2024-11-21T09:22:16.964015Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:16.964190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:16.964203Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:16.964230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:16.964272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17362 TClient is connected to server localhost:17362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:17.007861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:17.007886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:17.009002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:17.033976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:17.175601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660478088419127:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.175635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:17.198628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439660478088419152:2292], Recipient [1:7439660473793451627:2191]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:17.198648Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:17.198651Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T09:22:17.198661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439660478088419148:2289], Recipient [1:7439660473793451627:2191]: {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2024-11-21T09:22:17.198663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T09:22:17.206637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:22:17.206718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Followers, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.206744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Followers, opId: 281474976715658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2024-11-21T09:22:17.206834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T09:22:17.206848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T09:22:17.206852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T09:22:17.206874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T09:22:17.206882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:17.206902Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2024-11-21T09:22:17.207055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:17.207064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.207073Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:22:17.207079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:22:17.207085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T09:22:17.207205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T09:22:17.207224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Followers 2024-11-21T09:22:17.207236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T09:22:17.207239Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715658:0 2024-11-21T09:22:17.207275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:7439660473793451627:2191], Recipient [1:7439660473793451627:2191]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:22:17.207283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T09:22:17.207288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:17.207291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:22:17.207310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T09:22:17.207326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:17.207333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439660473793451702:2234], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2024-11-21T09:22:17.207336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439660473793451702:2234], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2024-11-21T09:22:17.207345Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:7439660478088419152:2292], Recipient [1:7439660473793451627:2191]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:17.207368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:17.207375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T09:22:17.207378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:17.207383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:17.207454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2024-11-21T09:22:17.207463Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:22:17.207487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439660478088419156:2295], Recipient [1:7439660473793451627:2191]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:17.207494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:17.207495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T09:22:17.207503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2711249 ... 6644480 2024-11-21T09:22:27.214400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7439660478088419171:2304], Recipient [1:7439660473793451627:2191]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 768 RowCount: 4 IndexSize: 0 InMemSize: 768 LastAccessTime: 1732180937319 LastUpdateTime: 1732180937306 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82136 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 1732180937209 TableOwnerId: 72057594046644480 FollowerId: 0 2024-11-21T09:22:27.214415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T09:22:27.214430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 768 rowCount 4 cpuUsage 0 2024-11-21T09:22:27.214453Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: DataSize: 768 RowCount: 4 IndexSize: 0 InMemSize: 768 LastAccessTime: 1732180937319 LastUpdateTime: 1732180937306 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T09:22:27.214471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099995s, queue# 1 2024-11-21T09:22:27.217541Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7439660478088419223:2306]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T09:22:27.217541Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7439660478088419226:2308]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T09:22:27.217548Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2024-11-21T09:22:27.217556Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T09:22:27.217570Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2024-11-21T09:22:27.217577Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T09:22:27.217608Z node 1 :TX_DATASHARD DEBUG: SendPeriodicTableStats register new pipe at datashard 72075186224037888 FollowerId 1, TableInfos size = 1 2024-11-21T09:22:27.217608Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7439660478088419225:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T09:22:27.217611Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2024-11-21T09:22:27.217613Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T09:22:27.217622Z node 1 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 1, tableId 2 2024-11-21T09:22:27.217756Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877760, Sender [1:7439660521038092487:2378], Recipient [1:7439660478088419223:2306]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T09:22:27.217759Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T09:22:27.217766Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439660521038092488:2503], Recipient [1:7439660473793451627:2191]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:27.217772Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:27.217775Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T09:22:27.217837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7439660478088419223:2306], Recipient [1:7439660473793451627:2191]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { } ShardState: 3 NodeId: 1 StartTime: 1732180937216 TableOwnerId: 72057594046644480 FollowerId: 1 2024-11-21T09:22:27.217856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T09:22:27.217870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 1 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Readonly' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T09:22:27.217883Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 1 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T09:22:27.314867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [1:7439660473793451627:2191]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T09:22:27.314888Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T09:22:27.314892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T09:22:27.314914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2024-11-21T09:22:27.314919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2024-11-21T09:22:27.314946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 768 row count 4 2024-11-21T09:22:27.314974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2024-11-21T09:22:27.314986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 2: RowCount 4, DataSize 768 2024-11-21T09:22:27.314990Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2024-11-21T09:22:27.315019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2024-11-21T09:22:27.315054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 0 row count 0 2024-11-21T09:22:27.315058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=1, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2024-11-21T09:22:27.315060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=1, pathId 2: RowCount 0, DataSize 0 2024-11-21T09:22:27.315062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 1 2024-11-21T09:22:27.315082Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T09:22:27.315241Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 768 RowCount: 4 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1732180937209 AccessTime: 1732180937319 UpdateTime: 1732180937306 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 2024-11-21T09:22:27.315269Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 1 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1732180937216 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 1 2024-11-21T09:22:27.315324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [1:7439660473793451627:2191]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T09:22:27.315332Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T09:22:27.315334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 ... SELECT from partition_stats, attempt 2 2024-11-21T09:22:27.415636Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660521038092502:2384], owner: [1:7439660521038092498:2382], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:27.415782Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660521038092502:2384], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:22:27.415894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274595843, Sender [1:7439660521038092502:2384], Recipient [1:7439660473793451627:2191]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2024-11-21T09:22:27.415904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2024-11-21T09:22:27.415940Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660521038092502:2384], row count: 2, finished: 1 2024-11-21T09:22:27.415955Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660521038092502:2384], owner: [1:7439660521038092498:2382], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:27.416376Z node 1 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [1:7439660473793451227:2058], database# /Root, query hash# 14960494650040056739, cpu time# 26043 >> YdbYqlClient::TestTzTypesFullStack >> YdbScripting::MultiResults [GOOD] >> YdbScripting::Params ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableBatchLimits [GOOD] Test command err: 2024-11-21T09:22:23.143935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660501772862553:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:23.143953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00413f/r3tmp/tmp1fhyAC/pdisk_1.dat 2024-11-21T09:22:23.195818Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9923, node 1 2024-11-21T09:22:23.214233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:23.214247Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:23.214249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:23.214278Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:23.244356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.244394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.245869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:23.272765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.273554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.273575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.274012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:23.274072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:23.274080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:23.274511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:23.274526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:23.274596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:23.274856Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.275916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943325, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:23.275932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:23.276016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:23.276495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.276542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.276559Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:23.276568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:23.276581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:23.276590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:23.277070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:23.277091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:23.277096Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:23.277114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:23.452165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660501772863497:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:23.452496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:23.455507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660501772863536:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:23.457016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:23.458578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.458681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 1 -> 2 2024-11-21T09:22:23.458791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.458805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.458901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.458922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/Test', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479, at schemeshard: 72057594046644480 2024-11-21T09:22:23.458975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.458979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715662:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/Test', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/Test', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715664:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/Test', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/Test', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715663:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/Test', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479, at schemeshard: 72057594046644480 2024-11-21T09:22:23.459083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715665:0, at schemeshard: 72057594046644480 20 ... anActor 2024-11-21T09:22:26.980119Z node 10 :READ_TABLE_API DEBUG: [10:7439660514869128782:2357] Adding quota request to queue ShardId: 0, TxId: 281474976715678 2024-11-21T09:22:26.980126Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715679, MessageQuota: 25 2024-11-21T09:22:26.980139Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715679, MessageQuota: 25 2024-11-21T09:22:26.980163Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2024-11-21T09:22:26.980165Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715679, at: 72075186224037897 2024-11-21T09:22:26.980172Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7439660514869127232:2299], Recipient [10:7439660514869127232:2299]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:26.980174Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:26.980178Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2024-11-21T09:22:26.980181Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2024-11-21T09:22:26.980184Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715679] at 72075186224037897 for ReadTableScan 2024-11-21T09:22:26.980187Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit ReadTableScan 2024-11-21T09:22:26.980199Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715679] at 72075186224037897 error: , IsFatalError: 0 2024-11-21T09:22:26.980202Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Executed 2024-11-21T09:22:26.980221Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit ReadTableScan 2024-11-21T09:22:26.980223Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715679] at 72075186224037897 to execution unit FinishPropose 2024-11-21T09:22:26.980225Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit FinishPropose 2024-11-21T09:22:26.980229Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is DelayComplete 2024-11-21T09:22:26.980230Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit FinishPropose 2024-11-21T09:22:26.980231Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715679] at 72075186224037897 to execution unit CompletedOperations 2024-11-21T09:22:26.980233Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit CompletedOperations 2024-11-21T09:22:26.980238Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Executed 2024-11-21T09:22:26.980239Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit CompletedOperations 2024-11-21T09:22:26.980241Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715679] at 72075186224037897 has finished 2024-11-21T09:22:26.980242Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:26.980243Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2024-11-21T09:22:26.980244Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2024-11-21T09:22:26.980245Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2024-11-21T09:22:26.980251Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2024-11-21T09:22:26.980253Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715679] at 72075186224037897 on unit FinishPropose 2024-11-21T09:22:26.980255Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715679 at tablet 72075186224037897 send to client, exec latency: 5 ms, propose latency: 6 ms, status: COMPLETE 2024-11-21T09:22:26.980268Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2024-11-21T09:22:26.980390Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127239:2302]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980403Z node 10 :READ_TABLE_API NOTICE: [10:7439660514869128782:2357] Finish grpc stream, status: 400000 2024-11-21T09:22:26.980422Z node 10 :READ_TABLE_API DEBUG: [10:7439660514869128782:2357] Send zero quota to Shard 0, TxId 281474976715678 2024-11-21T09:22:26.980425Z node 10 :READ_TABLE_API DEBUG: [10:7439660514869128782:2357] Send zero quota to Shard 0, TxId 281474976715678 2024-11-21T09:22:26.980426Z node 10 :READ_TABLE_API DEBUG: [10:7439660514869128782:2357] Send zero quota to Shard 0, TxId 281474976715678 2024-11-21T09:22:26.980428Z node 10 :READ_TABLE_API DEBUG: [10:7439660514869128782:2357] Send zero quota to Shard 0, TxId 281474976715678 2024-11-21T09:22:26.980435Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127247:2304]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980458Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127289:2307]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980459Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127242:2303]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980481Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127232:2299]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980500Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127265:2305]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980501Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127238:2301]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980528Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127233:2300]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980531Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127268:2306]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.980552Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439660514869128783:2357], Recipient [10:7439660514869127231:2298]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732180947021 TxId: 281474976715678 2024-11-21T09:22:26.991700Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6f2000] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991787Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6d4000] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991829Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6ee400] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991859Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6c3c00] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991874Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6e0800] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991910Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6f1b00] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991914Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6d8b00] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991940Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6c3200] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991950Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6f0700] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991973Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6d7200] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.991984Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6d3b00] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992004Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6dae00] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992016Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6f2500] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992041Z node 10 :GRPC_SERVER DEBUG: [0x5724eb7ca200] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992052Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6ecb00] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992063Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6ce600] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992085Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6df400] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992093Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6ec600] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992114Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6f1100] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992132Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6ed000] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992138Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6e4900] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992161Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6dea00] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992162Z node 10 :GRPC_SERVER DEBUG: [0x5724fec8c080] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T09:22:26.992186Z node 10 :GRPC_SERVER DEBUG: [0x5724ff6e5300] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::DataValidation [GOOD] Test command err: 2024-11-21T09:22:23.236482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660501852335849:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:23.236531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00413b/r3tmp/tmp1gYsvJ/pdisk_1.dat 2024-11-21T09:22:23.297470Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6373, node 1 2024-11-21T09:22:23.316076Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:23.316092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:23.316094Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:23.316133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:23.336698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.336730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.338307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:23.339365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.340474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.340488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.341038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:23.341103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:23.341115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:23.341484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:23.341496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:23.341538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:23.341871Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.342827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943388, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:23.342840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:23.342910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:23.343336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.343376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.343391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:23.343406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:23.343418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:23.343428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:23.343848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:23.343862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:23.343866Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:23.343879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:23.518541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestNotNullColumns, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.518663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:23.518796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.518808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.519407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestNotNullColumns 2024-11-21T09:22:23.519452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.519490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.519507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:23.519560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:23.519675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:23.519683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:23.519685Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:23.519711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:23.519718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:23.519719Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:23.521196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.521214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:22:23.521556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:23.573576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:23.573587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:23.573603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T09:22:23.573922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:23.574504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943619, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:23.574513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180943619 2024-11-21T09:22:23.574532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T09:22:23.574912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.574995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.575014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:23.575213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:23.575227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:23.575230Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:23.575269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:23.575277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:23.575278Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 7205759 ... 2:26.733964Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:26.735387Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.735486Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.735496Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.735888Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.735951Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.735960Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:26.736316Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.736325Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:26.736401Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.736673Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.737790Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946783, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.737803Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:26.737871Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:26.738348Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.738403Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.738418Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:26.738442Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:26.738452Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:26.738464Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:26.738639Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:26.738668Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:26.738678Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.738692Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:26.935426Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestInvalidData, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.935638Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:26.935844Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.935858Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.936516Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestInvalidData 2024-11-21T09:22:26.936579Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.936632Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.936656Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:26.936737Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:26.936838Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:26.936849Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:26.936854Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:26.936905Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:26.936913Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:26.936914Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:26.938319Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.938345Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:26.938704Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:26.990762Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:26.990776Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:26.990802Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:26.991312Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:26.992239Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947035, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.992255Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180947035 2024-11-21T09:22:26.992286Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:26.992734Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.992838Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.992849Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:26.993110Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:26.993118Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:26.993123Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:26.993189Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:26.993196Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:26.993200Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:26.994208Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180947035 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 460 } } 2024-11-21T09:22:26.994254Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:26.994266Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.994272Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:26.994570Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:26.994590Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:26.994601Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:4542 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid DyNumber string representation >> YdbQueryService::TestAttachTwice [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> YdbScripting::BasicV1 [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:158:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:159:2057] recipient: [7:157:2178] Leader for TabletID 72057594037927937 is [7:160:2179] sender: [7:161:2057] recipient: [7:157:2178] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:160:2179] Leader for TabletID 72057594037927937 is [7:160:2179] sender: [7:230:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:143:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:143:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:142:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:144:2166] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:148:2057] recipient: [12:144:2166] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:147:2167] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:217:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:147:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:150:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:151:2057] recipient: [13:149:2171] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:153:2057] recipient: [13:149:2171] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:152:2172] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:222:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:147:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:150:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:151:2057] recipient: [14:149:2171] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:153:2057] recipient: [14:149:2171] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:152:2172] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:222:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:149:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:152:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:153:2057] recipient: [15:151:2172] Leader for TabletID 72057594037927937 is [15:154:2173] sender: [15:155:2057] recipient: [15:151:2172] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:154:2173] Leader for TabletID 72057594037927937 is [15:154:2173] sender: [15:224:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:154:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:157:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:158:2057] recipient: [16:156:2177] Leader for TabletID 72057594037927937 is [16:159:2178] sender: [16:160:2057] recipient: [16:156:2177] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:159:2178] Leader for TabletID 72057594037927937 is [16:159:2178] sender: [16:229:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:154:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:156:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:158:2057] recipient: [17:157:2177] Leader for TabletID 72057594037927937 is [17:159:2178] sender: [17:160:2057] recipient: [17:157:2177] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:159:2178] Leader for TabletID 72057594037927937 is [17:159:2178] sender: [17:229:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:156:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:159:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:160:2057] recipient: [18:158:2178] Leader for TabletID 72057594037927937 is [18:161:2179] sender: [18:162:2057] recipient: [18:158:2178] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:161:2179] Leader for TabletID 72057594037927937 is [18:161:2179] sender: [18:231:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:161:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:164:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:165:2057] recipient: [19:163:2183] Leader for TabletID 72057594037927937 is [19:166:2184] sender: [19:167:2057] recipient: [19:163:2183] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:166:2184] Leader for TabletID 72057594037927937 is [19:166:2184] sender: [19:236:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:161:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:164:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:165:2057] recipient: [20:163:2183] Leader for TabletID 72057594037927937 is [20:166:2184] sender: [20:167:2057] recipient: [20:163:2183] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:166:2184] Leader for TabletID 72057594037927937 is [20:166:2184] sender: [20:236:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:164:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:167:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:168:2057] recipient: [21:166:2185] Leader for TabletID 72057594037927937 is [21:169:2186] sender: [21:170:2057] recipient: [21:166:2185] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:169:2186] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] >> YdbYqlClient::TraceId [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed |97.1%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> YdbImport::EmptyData ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00413a/r3tmp/tmp0yneYt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14269, node 1 TClient is connected to server localhost:32468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:22:25.165726Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660511279009477:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:25.165950Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00413a/r3tmp/tmpoBxI4h/pdisk_1.dat 2024-11-21T09:22:25.181564Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20690, node 4 2024-11-21T09:22:25.198589Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:25.198603Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:25.198605Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:25.198646Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:25.265850Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:25.265881Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:25.267355Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:25.268858Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.268958Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:25.268966Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.269313Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:25.269360Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:25.269366Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:25.269654Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:25.269664Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:25.269792Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:25.269938Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.270613Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180945320, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:25.270624Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:25.270692Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:25.271102Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.271134Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.271145Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:25.271156Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:25.271168Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:25.271183Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:25.271300Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:25.271317Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:25.271320Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:25.271330Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:25.495352Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439660511279010394:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:25.495385Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:25.527846Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.528027Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:25.528265Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:25.528281Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.529176Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:25.529244Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.529304Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.529334Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:25.529399Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:25.529563Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:25.529584Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:25.529589Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:25.529648Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:25.529657Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:25.529660Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:25.531436Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:25.531475Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:25.532017Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:25.584395Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:25.584409Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:25.584436Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:25.585024Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:25.586066Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180945635, transac ... node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.606471Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.606582Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.606593Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.607234Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.607312Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.607331Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.607759Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.607831Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.607842Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.608314Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.609331Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947658, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.609343Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.609411Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.609824Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.609866Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.609882Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.609896Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.609909Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.609927Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:27.610089Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:27.610110Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:27.610114Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:27.610126Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:27.730120Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.730230Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:27.730374Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.730386Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.732568Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:27.732641Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.732699Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.732714Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:27.733307Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.733316Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.733320Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:27.733370Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.733373Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.733375Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:27.733407Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:27.738145Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.738180Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:27.738661Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:27.790997Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:27.791011Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:27.791037Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:27.791456Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.792360Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947840, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.792374Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180947840 2024-11-21T09:22:27.792403Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:27.792772Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.792859Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.792890Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.793114Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.793129Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.793132Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:27.793169Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.793178Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.793180Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:27.794608Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180947840 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 348 } } 2024-11-21T09:22:27.794653Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:27.794668Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.794680Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:27.794950Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:27.794967Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:27.794977Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbScripting::BasicV1 [GOOD] Test command err: 2024-11-21T09:22:23.118714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660503065394407:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:23.118727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004159/r3tmp/tmpVszf7J/pdisk_1.dat 2024-11-21T09:22:23.163450Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19002, node 1 2024-11-21T09:22:23.183709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:23.183720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:23.183722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:23.183758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:23.212779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.213687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.213707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.214518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:23.214580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:23.214590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:23.214954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:23.214961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:23.215008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:23.215278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.216179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943262, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:23.216194Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:23.216295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:23.216773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.216833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.216849Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:23.216866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:23.216895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:23.216930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:23.217388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:23.217409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:23.217419Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:23.217447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:23.218870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.218892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.220271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:23.398603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ListingObjects, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.398848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:23.399326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.399338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.400079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ListingObjects 2024-11-21T09:22:23.400134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.400181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.400202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:23.400368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:23.400447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:23.400457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:23.400460Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:23.400487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:23.400495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:23.400497Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:23.402775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.402838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.402868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.402885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.402911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.402937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.402961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.402980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:23.403252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreat ... is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T09:22:27.707437Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.707446Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.707465Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T09:22:27.707502Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.707509Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.707511Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T09:22:27.707527Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.707534Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.707536Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:27.707550Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.707557Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.707558Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T09:22:27.707572Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.707574Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.707576Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T09:22:27.714518Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947763, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.714543Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180947763, at schemeshard: 72057594046644480 2024-11-21T09:22:27.714596Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T09:22:27.714637Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180947763, at schemeshard: 72057594046644480 2024-11-21T09:22:27.714656Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T09:22:27.714672Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180947763, at schemeshard: 72057594046644480 2024-11-21T09:22:27.714682Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T09:22:27.714700Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732180947763 2024-11-21T09:22:27.714714Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T09:22:27.715856Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.716012Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.716036Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T09:22:27.716052Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T09:22:27.716087Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T09:22:27.716103Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T09:22:27.716122Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T09:22:27.716132Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T09:22:27.716145Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T09:22:27.716159Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T09:22:27.716171Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:27.716182Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T09:22:27.716186Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T09:22:27.716188Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T09:22:27.716193Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T09:22:27.716490Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.716507Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.716511Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T09:22:27.716572Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.716583Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.716584Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:27.716609Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.716613Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.716615Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:27.716633Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.716640Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.716642Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:27.716656Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.716663Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.716665Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T09:22:27.716671Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T09:22:27.719604Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660520871533849:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:22:27.774587Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:27.774634Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:27.775413Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:27.785535Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70fcg7a7vdavbxtwmtt3kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjI4ZDIxMTgtNTIwZTNhZS1jYzgxNjY3Yy0yOTA2OTYyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:27.800703Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70fctcaq8asf7d4egbx4r2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YjJhMzJjZi0yNDU1NWE1MC04NmJlNjlhLTNhMTFiMzlk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:27.801838Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180947847, txId: 281474976715662] shutting down >> YdbYqlClient::TestDecimalFullStack [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::DateKey >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] Test command err: 2024-11-21T09:22:24.313153Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660505442655901:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:24.313208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004138/r3tmp/tmpFVoyTi/pdisk_1.dat 2024-11-21T09:22:24.375799Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3411, node 1 2024-11-21T09:22:24.401646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:24.401661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:24.401663Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:24.401702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:24.413443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:24.413472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:24.415015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:24.428493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.429337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.429352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.429843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:24.429879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:24.429885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:24.430238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.430249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:24.430305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:24.430559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.431276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944480, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.431286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:24.431335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:24.431624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.431657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.431668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:24.431675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:24.431681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:24.431688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:24.432016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:24.432045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:24.432054Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:24.432073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:24.441523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.441570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:24.441693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.441703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.442281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T09:22:24.442337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.442395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.442422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T09:22:24.442480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:24.442583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:24.442593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:24.442597Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:24.442624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:24.442631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:24.442632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:24.444484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:24.444504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:22:24.452573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T09:22:24.452933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.453610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944501, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.453615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T09:22:24.453633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T09:22:24.453969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.454024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.454033Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T09:22:24.454045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T09:22:24.454054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T09:22:24.454077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T09:22:24.454174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:24.454180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:24.454184Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:24.454214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:24.454217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:24.454219Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:24.454224Z node 1 :FLAT_T ... nished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.840759Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.840794Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.842339Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:27.851189Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.851307Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.851331Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.851734Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.851784Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.851791Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:27.852098Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.852105Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T09:22:27.852458Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.852576Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.853332Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947896, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.853348Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.853409Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.853800Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.853838Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.853848Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.853865Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.853879Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.853890Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:27.854026Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:27.854049Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:27.854053Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:27.854066Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:27.866506Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TheTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.866624Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:27.866755Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.866773Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.867368Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TheTable 2024-11-21T09:22:27.867407Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.867452Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.867467Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:27.867509Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:27.867582Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.867599Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.867609Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:27.867675Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.867684Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.867686Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:27.869099Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.869124Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:27.869463Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:27.921529Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:27.921541Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:27.921563Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:27.921971Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.922637Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947966, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.922649Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180947966 2024-11-21T09:22:27.922674Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:27.922985Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.923049Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.923079Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.923258Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.923269Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.923272Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:27.923303Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.923312Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.923314Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:27.928192Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180947966 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 737 } } 2024-11-21T09:22:27.928285Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:27.928297Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.928302Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:27.928699Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:27.928723Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:27.928733Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDecimalFullStack [GOOD] Test command err: 2024-11-21T09:22:24.289980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660509455763864:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:24.290122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004139/r3tmp/tmplVRAce/pdisk_1.dat 2024-11-21T09:22:24.340678Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26496, node 1 2024-11-21T09:22:24.362675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:24.362688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:24.362691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:24.362721Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:24.388582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:24.388620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:24.390085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:24.417871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.418841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.418856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.419282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:24.419345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:24.419354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:24.419669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.419677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:24.419735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:24.419950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.420881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944466, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.420894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:24.420962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:24.421310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.421343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.421356Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:24.421370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:24.421382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:24.421397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:24.421731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:24.421744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:24.421747Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:24.421758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:24.634076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660509455764793:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.634098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660509455764785:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.634121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.634546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.634603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.634611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.634620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.634626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:22:24.634632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.634638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:24.634680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T09:22:24.634734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.634742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:24.635319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:24.635374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.635427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.635440Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T09:22:24.635466Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.635479Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.635486Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.635505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:24.635714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.635723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.635726Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:24.635754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.635760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.635761Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:24.635770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.635776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.635777Z node 1 :FLA ... 819596Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.819597Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:27.819610Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.819612Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.819614Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T09:22:27.819626Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.819628Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.819630Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T09:22:27.820398Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947868, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.820404Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180947868, at schemeshard: 72057594046644480 2024-11-21T09:22:27.820433Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T09:22:27.820449Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180947868, at schemeshard: 72057594046644480 2024-11-21T09:22:27.820457Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T09:22:27.820464Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180947868, at schemeshard: 72057594046644480 2024-11-21T09:22:27.820471Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T09:22:27.820479Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732180947868 2024-11-21T09:22:27.820484Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T09:22:27.820830Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.820920Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.820930Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T09:22:27.820938Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T09:22:27.820969Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T09:22:27.820973Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T09:22:27.820982Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T09:22:27.820986Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T09:22:27.820995Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T09:22:27.820998Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T09:22:27.821002Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:27.821009Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T09:22:27.821012Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T09:22:27.821015Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T09:22:27.821019Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T09:22:27.821239Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.821246Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.821249Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T09:22:27.821281Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.821283Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.821285Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:27.821298Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.821301Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.821302Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:27.821316Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.821318Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.821319Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:27.821332Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:27.821334Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:27.821336Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T09:22:27.821340Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T09:22:27.821870Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439660521525878692:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:22:27.899657Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:27.899691Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:27.900303Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:27.916744Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70fcv9d6nsxd7x57zqccdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzUxNWE0OTItY2IzODc4NjctMjBmZjJlMzctZjViNGJmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:27.932471Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70fcyh9j7w2kyqfh3c2c40, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzUxNWE0OTItY2IzODc4NjctMjBmZjJlMzctZjViNGJmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:27.946480Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70fcyzf4xw46w3d1kj2yd1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzUxNWE0OTItY2IzODc4NjctMjBmZjJlMzctZjViNGJmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:27.965164Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd70fczg8qyxjckgqrry90m7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzUxNWE0OTItY2IzODc4NjctMjBmZjJlMzctZjViNGJmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:27.979584Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70fczzfy4hvf0qq5ax2nwq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzUxNWE0OTItY2IzODc4NjctMjBmZjJlMzctZjViNGJmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:28.058532Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd70fd0efa6b5t5afnase2fb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzUxNWE0OTItY2IzODc4NjctMjBmZjJlMzctZjViNGJmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:28.060655Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70fd0efa6b5t5afnase2fb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzUxNWE0OTItY2IzODc4NjctMjBmZjJlMzctZjViNGJmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteCachingPolicy >> YdbOlapStore::LogLast50 >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> YdbQueryService::TestCreateDropAttachSession >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery >> YdbOlapStore::LogLast50ByResource ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::AlterForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:56.011911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:56.011948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.011954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:56.011959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:56.011978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:56.011981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:56.011991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.012106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:56.026713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:56.026733Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.028938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:56.029047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:56.029070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:56.031652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:56.031720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.031830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.032001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.033391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:56.035177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.035183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:56.035224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:56.036661Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.054173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:56.054223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:56.054303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:56.054308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:56.054912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:56.054926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:56.054929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:56.055247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:56.055505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.055518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.055926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:56.056317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:56.056383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:56.056637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.056787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:56.056800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.056843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:56.056861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.057350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.057364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.057399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.057404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:56.057485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.057493Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:56.057508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:56.057514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.057523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:56.057531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.057538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:56.057543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:56.057560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:56.057568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:56.057575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T09:22:28.303033Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T09:22:28.303082Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 7 2024-11-21T09:22:28.303128Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:28.303144Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T09:22:28.303158Z node 134 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T09:22:28.303277Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T09:22:28.303295Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T09:22:28.303317Z node 134 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2024-11-21T09:22:28.303329Z node 134 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T09:22:28.303379Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T09:22:28.303394Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:22:28.303519Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:22:28.303533Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:28.303562Z node 134 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T09:22:28.303594Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:22:28.303622Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T09:22:28.303636Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:28.303669Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:28.303673Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:22:28.303695Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:22:28.303737Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:28.303741Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:22:28.303747Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:28.304374Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T09:22:28.304398Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:22:28.304421Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T09:22:28.304472Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T09:22:28.304515Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:22:28.304828Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T09:22:28.304852Z node 134 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:28.304864Z node 134 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T09:22:28.304934Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:22:28.304939Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T09:22:28.304949Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:22:28.304951Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:22:28.304994Z node 134 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:22:28.305008Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:28.305011Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [134:378:2369] 2024-11-21T09:22:28.305027Z node 134 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:22:28.305033Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:22:28.305035Z node 134 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [134:378:2369] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted 2024-11-21T09:22:28.305075Z node 134 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:22:28.305094Z node 134 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T09:22:28.305099Z node 134 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T09:22:28.305104Z node 134 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T09:22:28.305109Z node 134 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T09:22:28.305114Z node 134 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 2024-11-21T09:22:28.305169Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:28.305190Z node 134 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 30us result status StatusPathDoesNotExist 2024-11-21T09:22:28.305227Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:22:28.305259Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:28.305273Z node 134 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 15us result status StatusSuccess 2024-11-21T09:22:28.305324Z node 134 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Waiting until shard idx 72057594046678944:4 is deleted Waiting until shard idx 72057594046678944:5 is deleted Waiting until shard idx 72057594046678944:6 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 Deleted shard idx 72057594046678944:4 Deleted shard idx 72057594046678944:5 Deleted shard idx 72057594046678944:6 >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials >> TTableProfileTests::WrongTableProfile [GOOD] >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> TGRpcYdbTest::SdkUuidViaParams >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::BiStreamPing >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::ZeroRows >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::WrongTableProfile [GOOD] Test command err: 2024-11-21T09:22:20.767558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660491018721257:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:20.768244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004177/r3tmp/tmpe63QSm/pdisk_1.dat 2024-11-21T09:22:20.831614Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61593, node 1 2024-11-21T09:22:20.847655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:20.847671Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:20.847674Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:20.847712Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:20.867276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:20.867301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:20.868855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:20.900464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.901549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.901565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:20.902041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.902050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:20.902416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.902427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:20.902770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.903805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:20.904722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940952, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.904736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:20.904812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:20.905218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.905277Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:20.905289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:20.905303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:20.905314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:20.905768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:20.905787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:20.905791Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:20.905804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:5613 waiting... 2024-11-21T09:22:20.932996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.933096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:20.933101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.933524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T09:22:20.933546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.933581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.933591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T09:22:20.933594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:22:20.933979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:20.934004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:20.934010Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:20.934053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:20.934074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:20.934076Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T09:22:20.934099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:20.934338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:20.934349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T09:22:20.934715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:20.935482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180940980, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:20.935494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet 72057594046644480 2024-11-21T09:22:20.935553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T09:22:20.935925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:20.935963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:20.935977Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T09:22:20.935990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T09:22:20.936000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T09:22:20.936008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T09:22:20.936150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:20.936165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:20.936168Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:20.936194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:20.936202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:20.936221Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:20.936226Z node 1 :FLAT_TX_SCH ... Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:28.129308Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.129975Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180948176, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.129986Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T09:22:28.130045Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:28.130371Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:28.130421Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:28.130437Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:28.130461Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:28.130473Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:28.130487Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:28.130623Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.130637Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.130641Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:28.130671Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.130679Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.130681Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:28.130686Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:28.131692Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.131820Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.131831Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.132270Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T09:22:28.132312Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T09:22:28.132682Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T09:22:28.134299Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:28.134363Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:28.134374Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T09:22:28.134692Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:28.633503Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439660523684371315:2113];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:28.633606Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:28.636025Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:28.636057Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:28.637117Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T09:22:28.637407Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8933 2024-11-21T09:22:28.656277Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T09:22:28.680731Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T09:22:28.680862Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:28.926889Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:28.927070Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7439660523684371580:2278], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> YdbImport::EmptyData [GOOD] >> TYqlDateTimeTests::TimestampKey >> TGRpcNewCoordinationClient::CheckUnauthorized >> TYqlDateTimeTests::DateKey [GOOD] >> TYqlDateTimeTests::DatetimeKey >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts >> YdbYqlClient::TestConstraintViolation [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] Test command err: 2024-11-21T09:22:24.336484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660509118219099:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:24.336532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004136/r3tmp/tmpBg38Pp/pdisk_1.dat 2024-11-21T09:22:24.405136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9564, node 1 2024-11-21T09:22:24.418686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:24.418700Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:24.418701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:24.418741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20312 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:22:24.436417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:24.436443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:22:24.437817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:24.466307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.467440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.467466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.467957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:24.468012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:24.468019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:24.468466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:24.468516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.468524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:24.468937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.469934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944515, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.469944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:24.470015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:24.470448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.470479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.470486Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:24.470495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:24.470507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:24.470516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:24.470906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:24.470922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:24.470926Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:24.470966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:24.478978Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:45854 Call 2024-11-21T09:22:24.480842Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:45854 2024-11-21T09:22:24.613421Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:45854 Call Call 2024-11-21T09:22:24.617937Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS INFO: User has no permission to perform query on this database, database: /Root, user: test_user@builtin, from ip: ipv6:[::1]:45854 2024-11-21T09:22:24.621135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.621209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.621219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.621235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:24.621275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:24.621283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T09:22:24.621847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +(ConnDB):test_user@builtin:- 2024-11-21T09:22:24.621892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.621959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.622130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.622142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.622146Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:24.622164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 2024-11-21T09:22:25.173059Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660511446606885:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:25.173212Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004136/r3tmp/tmpAQpUUs/pdisk_1.dat 2024-11-21T09:22:25.185030Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65407, node 4 2024-11-21T09:22:25.196962Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:25.196981Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:25.196983Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:25.197037Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:25.273484Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:25.273518Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconn ... maybe) 2024-11-21T09:22:26.907711Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.977944Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.977981Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.979478Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:26.981060Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.981187Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.981199Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.981633Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.981679Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.981687Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:26.982043Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.982055Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:26.982227Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.982472Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.983351Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947028, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.983364Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:26.983431Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:26.983806Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.983860Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.983875Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:26.983893Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:26.983907Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:26.983924Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:26.984333Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:26.984354Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:26.984358Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.984371Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:21133 2024-11-21T09:22:27.012525Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.012641Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.012651Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.013179Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2024-11-21T09:22:27.015291Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947063, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.015839Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:27.015855Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:27.016016Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:27.016979Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.017098Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.017110Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.017487Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... waiting... 2024-11-21T09:22:28.018646Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7439660522434531952:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:28.018816Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:28.020814Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:28.020842Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:28.021994Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2024-11-21T09:22:28.022274Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:28.032498Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.032839Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.032854Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.032920Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2024-11-21T09:22:28.032957Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.032969Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2024-11-21T09:22:28.033096Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.033676Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2024-11-21T09:22:28.827668Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180948876, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.828138Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:28.828170Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 2 2024-11-21T09:22:28.828381Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 2 2024-11-21T09:22:28.885667Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180948930, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.892124Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T09:22:28.892166Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2024-11-21T09:22:28.892171Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2024-11-21T09:22:28.896993Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2024-11-21T09:22:28.897146Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError >> TTableProfileTests::DescribeTableWithPartitioningPolicy >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed >> YdbYqlClient::DeleteTableWithDeletedIndex >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2024-11-21T09:22:24.355444Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660507764480292:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:24.355717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004137/r3tmp/tmpAM8mHc/pdisk_1.dat 2024-11-21T09:22:24.409653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29736, node 1 2024-11-21T09:22:24.429818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:24.429830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:24.429833Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:24.429868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:24.455724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:24.455756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:24.457191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:24.489243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.490253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.490270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.490728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:24.490771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:24.490778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:24.491151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.491161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:24.491454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:24.491713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:24.492162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944536, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.492173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:24.492245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:24.492681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.492715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.492726Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:24.492733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:24.492742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:24.492750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:24.493279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:24.493312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:24.493318Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:24.493339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:24.641453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660507764481219:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.641482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660507764481224:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.641487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:24.641827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.641866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.641874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.641884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.641891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:22:24.641901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.641908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:24.641963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T09:22:24.642013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.642022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:24.642545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:24.642586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.642662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.642680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T09:22:24.642708Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.642725Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.642739Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.642763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:24.642888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.642897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.642900Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:24.642984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.642999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.643001Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:24.643044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:24.643051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:24.643053Z node 1 :FLA ... :22:29.071402Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.071428Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T09:22:29.071504Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.071525Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.071536Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.071834Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.071857Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.071862Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:29.071911Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.071920Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.071922Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:29.071938Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.071947Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.071948Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T09:22:29.071989Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:29.072003Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.072006Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.072007Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:29.072026Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.072035Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.072037Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T09:22:29.072896Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949121, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.072909Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180949121, at schemeshard: 72057594046644480 2024-11-21T09:22:29.072934Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:29.072950Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180949121, at schemeshard: 72057594046644480 2024-11-21T09:22:29.072961Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T09:22:29.072970Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180949121, at schemeshard: 72057594046644480 2024-11-21T09:22:29.072980Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T09:22:29.072989Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732180949121 2024-11-21T09:22:29.072993Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T09:22:29.073349Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.073430Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.073440Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T09:22:29.073451Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T09:22:29.073478Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T09:22:29.073482Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T09:22:29.073492Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:29.073498Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T09:22:29.073507Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T09:22:29.073512Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T09:22:29.073517Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:29.073524Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T09:22:29.073526Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T09:22:29.073529Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T09:22:29.073533Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T09:22:29.074192Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.074199Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.074202Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:29.074239Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.074241Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.074243Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T09:22:29.074257Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.074259Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.074261Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:29.074273Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.074275Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.074277Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:29.074290Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.074292Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.074293Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T09:22:29.074298Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:29.074854Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660529268316188:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:29.141461Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:29.141509Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:29.142160Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] >> YdbYqlClient::CreateTableWithMESettings [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2024-11-21T09:22:26.370745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660516332221886:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.370777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004132/r3tmp/tmpYGSGG0/pdisk_1.dat 2024-11-21T09:22:26.423954Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14682, node 1 2024-11-21T09:22:26.447600Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.447612Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.447613Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.447644Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.469028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.469786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.469797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.470320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.470358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.470365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:26.470763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.470908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.470923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.472097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.472104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:26.472569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.472703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:26.473758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946517, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.473762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:26.473823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:26.475468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.475502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.475511Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:26.475519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:26.475525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:26.475537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:26.475869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:26.475879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:26.475881Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.475891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:26.704453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660516332222802:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:26.704475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:26.739355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.739484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSchemeError, reason: Column Key has wrong key type Json, at schemeshard: 72057594046644480 2024-11-21T09:22:26.740158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusSchemeError, reason: Column Key has wrong key type Json, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:26.743678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660516332222833:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:26.743702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:26.746186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.746300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSchemeError, reason: Column Key has wrong key type Yson, at schemeshard: 72057594046644480 2024-11-21T09:22:26.747140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusSchemeError, reason: Column Key has wrong key type Yson, operation: CREATE TABLE, path: /Root/Test test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004132/r3tmp/tmprp7R1G/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25768, node 4 TClient is connected to server localhost:63290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004132/r3tmp/tmpgjD3CD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22785, node 7 TClient is connected to server localhost:27821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:22:29.120442Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439660527830224130:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:29.120639Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004132/r3tmp/tmpTwKyLc/pdisk_1.dat 2024-11-21T09:22:29.132001Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27546, node 10 2024-1 ... :22:29.425991Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.426020Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T09:22:29.426065Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.426081Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.426100Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.426128Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:29.426450Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.426465Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.426470Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:29.426515Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.426525Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.426526Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:29.426540Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.426543Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.426545Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T09:22:29.426558Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.426562Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.426563Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:29.426577Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.426579Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.426581Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T09:22:29.427474Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949471, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.427489Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180949471, at schemeshard: 72057594046644480 2024-11-21T09:22:29.427517Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:29.427541Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180949471, at schemeshard: 72057594046644480 2024-11-21T09:22:29.427549Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T09:22:29.427558Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180949471, at schemeshard: 72057594046644480 2024-11-21T09:22:29.427566Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T09:22:29.427577Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732180949471 2024-11-21T09:22:29.427583Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T09:22:29.428070Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.428221Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.428255Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T09:22:29.428271Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T09:22:29.428315Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T09:22:29.428324Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T09:22:29.428335Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:29.428343Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T09:22:29.428353Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T09:22:29.428362Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T09:22:29.428367Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:29.428377Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T09:22:29.428382Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T09:22:29.428385Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T09:22:29.428390Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T09:22:29.428737Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.428754Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.428759Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:29.428801Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.428811Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.428813Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T09:22:29.428826Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.428829Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.428830Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:29.428842Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.428851Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.428852Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:29.428876Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.428885Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.428886Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T09:22:29.428891Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:29.429447Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439660527830225065:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:29.495124Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:29.495168Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:29.495844Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::CreateDropDescribe >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword >> YdbTableBulkUpsert::Nulls >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType >> TYqlDateTimeTests::DatetimeKey [GOOD] >> TYqlDateTimeTests::IntervalKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2024-11-21T09:22:26.444365Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660514328646113:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.444528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004127/r3tmp/tmpFP6jXE/pdisk_1.dat 2024-11-21T09:22:26.492074Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22043, node 1 2024-11-21T09:22:26.518649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.518663Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.518665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.518712Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:26.540169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.540199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.543282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.547752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.548862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.548894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.549419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.549467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.549475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:26.549826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.549880Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.549889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:26.550228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.551132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946594, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.551150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:26.551245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:26.551590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.551625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.551636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:26.551646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:26.551656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:26.551667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:26.552062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:26.552086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:26.552090Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.552105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:27.281469Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660520522538624:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:27.281687Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004127/r3tmp/tmp662H8D/pdisk_1.dat 2024-11-21T09:22:27.292839Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27111, node 4 2024-11-21T09:22:27.312609Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.312620Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.312621Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.312674Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.381766Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.381793Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.383140Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:27.383736Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.383825Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.383836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.384297Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.384337Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.384348Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.384649Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.384660Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.384739Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.384993Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.385706Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947434, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.385718Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.385774Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.386171Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.386209Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.386221Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.386229Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.386233Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.386245Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:27.386397Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, ... 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:28.892227Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:28.892255Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:28.892960Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.893054Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.893063Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.893455Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:28.893515Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:28.893527Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:28.893634Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T09:22:28.894019Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:28.894033Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:28.894106Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:28.894480Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.895464Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180948939, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.895479Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:28.895565Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:28.896078Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:28.896128Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:28.896144Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:28.896161Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:28.896175Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:28.896192Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:28.896388Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:28.896401Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:28.896405Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:28.896417Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:29.036096Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.036147Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.036285Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 2024-11-21T09:22:29.652400Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439660526779638841:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:29.652479Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004127/r3tmp/tmpvclXTB/pdisk_1.dat 2024-11-21T09:22:29.664903Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16723, node 13 2024-11-21T09:22:29.682945Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:29.682972Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:29.682974Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:29.683011Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:29.752862Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:29.752909Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:29.754334Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:29.756080Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.756180Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.756227Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.756676Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:29.756728Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:29.756737Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:29.757136Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.757147Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:29.757291Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:29.757556Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.758409Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949807, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.758421Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:29.758487Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:29.758911Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.758954Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.758969Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:29.758982Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:29.758994Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:29.759010Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:29.759200Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:29.759243Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:29.759254Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:29.759268Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:29.842114Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.842164Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.842302Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2024-11-21T09:22:26.803536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660514962955478:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.803926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004120/r3tmp/tmpibt6Ut/pdisk_1.dat 2024-11-21T09:22:26.854847Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2312, node 1 2024-11-21T09:22:26.873397Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.873413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.873415Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.873458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.903726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.903751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.905183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:26.933196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.934236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.934252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.934718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.934763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.934770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:26.935083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.935090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:26.935221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.935411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.936572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946979, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.936587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:26.936665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:26.937128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.937166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.937180Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:26.937193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:26.937208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:26.937220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:26.937642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:26.937659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:26.937662Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.937674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:27.106433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.106591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:27.106806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.106816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.107445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:27.107493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.107534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.107554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:27.107747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.107760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.107764Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:27.107794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.107797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.107800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:27.107838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:27.109383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.109457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.109498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.109524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:27.110059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:27.117417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:27.117429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:27.117765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:27.117770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:27.164168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:27.164183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:27.164193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:27.164696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.165474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947210, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.165488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180947210 2024-11-21T09:22:27.165513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:27.165873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.165952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.165974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.166192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046 ... CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:29.871301Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:29.871339Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:29.872833Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:29.873848Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.873965Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.873976Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.874399Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:29.874448Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:29.874458Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:29.874823Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.874835Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:29.874985Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:29.875220Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.875986Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949919, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.875996Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:29.876059Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:29.876434Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.876484Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.876499Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:29.876517Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:29.876530Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:29.876547Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:29.876716Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:29.876736Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:29.876744Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:29.876759Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:30.062441Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.062615Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:30.062802Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.062813Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.063633Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:30.063695Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.063752Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.063782Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:30.063850Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:30.064030Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.064041Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.064046Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:30.064080Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.064088Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.064089Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:30.066090Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:30.066132Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:30.066500Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:30.118520Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:30.118539Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:30.118567Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:30.119046Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:30.119968Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950164, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.119987Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180950164 2024-11-21T09:22:30.120025Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:30.120519Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.120619Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.120638Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:30.120955Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.120966Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.120970Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:30.121009Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.121017Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.121019Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:30.121414Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180950164 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 392 } } 2024-11-21T09:22:30.121490Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:30.121506Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.121512Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:30.122234Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:30.122253Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:30.122267Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> TNodeBrokerTest::TestRandomActions [GOOD] >> YdbImport::Simple >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] >> TSequenceReboots::CreateSequencesWithIndexedTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] Test command err: 2024-11-21T09:22:26.585735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660514244744772:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.585963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004124/r3tmp/tmpUdHS0P/pdisk_1.dat 2024-11-21T09:22:26.639357Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4140, node 1 2024-11-21T09:22:26.659698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.659721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.659722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.659753Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.686512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.686549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.687949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:26.715609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.716701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.716726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.717357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.717441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.717454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:26.717922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.718049Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.718058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:26.718458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.719534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946769, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.719548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:26.719625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:26.720111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.720168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.720185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:26.720196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:26.720226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:26.720245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:26.720689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:26.720707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:26.720711Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.720722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:26.880283Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknownSesson 2024-11-21T09:22:27.430838Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660521163885121:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:27.431066Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004124/r3tmp/tmpmLmEeW/pdisk_1.dat 2024-11-21T09:22:27.447514Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29462, node 4 2024-11-21T09:22:27.470604Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.470620Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.470622Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.470652Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.531220Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.531259Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.532700Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:27.535203Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.535295Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.535304Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.535635Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.535677Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.535680Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.535929Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.535936Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.536111Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.536177Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.536775Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947581, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.536786Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.536841Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.537233Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.537272Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.537285Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.537301Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.537313Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.537323Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:2 ... msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:29.249438Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:29.249441Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:29.249452Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:30.006564Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439660532427225091:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:30.006598Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004124/r3tmp/tmpy46mUg/pdisk_1.dat 2024-11-21T09:22:30.019048Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4174, node 13 2024-11-21T09:22:30.038243Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.038259Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.038262Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.038313Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.107123Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.107164Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.108646Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.109311Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.109443Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.109463Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.109971Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.110023Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.110033Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.110403Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.110420Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.110878Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.111863Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950157, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.111877Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.111961Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.112309Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.112423Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.112464Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.112476Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.112495Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.112507Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.112525Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.112651Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.112673Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.112681Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.112695Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:30.344536Z node 13 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T09:22:30.344895Z node 13 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2024-11-21T09:22:30.345183Z node 13 :KQP_PROXY DEBUG: TraceId: "01jd70ff39ekhr3kc1ny3r40d1", Request has 18445011892759.206440s seconds to be completed 2024-11-21T09:22:30.345567Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ== 2024-11-21T09:22:30.345587Z node 13 :KQP_PROXY DEBUG: TraceId: "01jd70ff39ekhr3kc1ny3r40d1", Created new session, sessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, workerId: [13:7439660532427225994:2292], database: , longSession: 1, local sessions count: 1 2024-11-21T09:22:30.345596Z node 13 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2024-11-21T09:22:30.345621Z node 13 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jd70ff39ekhr3kc1ny3r40d1 2024-11-21T09:22:30.345637Z node 13 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T09:22:30.345642Z node 13 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T09:22:30.345648Z node 13 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T09:22:30.345658Z node 13 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2024-11-21T09:22:30.345676Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T09:22:30.345691Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T09:22:30.345703Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T09:22:30.345708Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T09:22:30.345716Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T09:22:30.345754Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, ActorId: [13:7439660532427225994:2292], ActorState: unknown state, session actor bootstrapped 2024-11-21T09:22:30.348153Z node 13 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, rpc ctrl: [13:7439660532427226010:2293], sameNode: 1, trace_id: 2024-11-21T09:22:30.348169Z node 13 :KQP_PROXY TRACE: Attach local session: [13:7439660532427225994:2292] to rpc: [13:7439660532427226010:2293] on same node 2024-11-21T09:22:30.349951Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, ActorId: [13:7439660532427225994:2292], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T09:22:30.349966Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, ActorId: [13:7439660532427225994:2292], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T09:22:30.349970Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, ActorId: [13:7439660532427225994:2292], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T09:22:30.349973Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, ActorId: [13:7439660532427225994:2292], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T09:22:30.349990Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, ActorId: [13:7439660532427225994:2292], ActorState: unknown state, Session actor destroyed 2024-11-21T09:22:30.350049Z node 13 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ==, workerId: [13:7439660532427225994:2292], local sessions count: 0 2024-11-21T09:22:30.351462Z node 13 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [13:7439660532427226013:2295], trace_id: 2024-11-21T09:22:30.351509Z node 13 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=13&id=NDI4ZGNmN2EtYmJmNzVjNi1jNjgzZDE3Mi0xMmM3YzEwYQ== 2024-11-21T09:22:30.351538Z node 13 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [13:7439660532427226013:2295], selfId: [13:7439660532427225308:2256], source: [13:7439660532427225308:2256] >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> YdbTableBulkUpsertOlap::UpsertArrowBatch >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2024-11-21T09:22:26.761317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660516700841786:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.761358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004122/r3tmp/tmpUUvH9h/pdisk_1.dat 2024-11-21T09:22:26.829592Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61773, node 1 2024-11-21T09:22:26.841906Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.841926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.841928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.841976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:26.861789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.861831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.863302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.864175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.864991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.865003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.865515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.865565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.865573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:26.865977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.865986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:26.866025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.866322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.867137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946916, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.867148Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:26.867214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:26.867585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.867630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.867645Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:26.867658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:26.867671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:26.867680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:26.868050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:26.868088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:26.868100Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.868133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:27.061991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660520995810004:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:27.062018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:27.083258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.083368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:27.083506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.083529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.084065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:27.084102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.084138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.084153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:27.084342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:27.084365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:27.084372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:27.084375Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:27.084418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:27.084428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:27.084429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:27.085694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.085715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:22:27.086706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:27.094971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:27.094982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:27.095011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T09:22:27.095336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.096008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947140, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.096020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180947140 2024-11-21T09:22:27.096042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T09:22:27.096417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.096474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.096487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.096826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:27.096842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:27.096845Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: ... CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.036139Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.036169Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.037645Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.039318Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.039447Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.039461Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.040062Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.040117Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.040126Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.040677Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.041638Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.041650Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.042108Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.042976Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950087, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.042989Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.043067Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.043425Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.043470Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.043481Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.043499Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.043508Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.043525Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.043653Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.043675Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.043680Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.043692Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:30.267350Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.267501Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:30.267649Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.267664Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.268425Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:30.268482Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.268527Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.268546Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:30.268714Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:30.268762Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.268773Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.268778Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:30.268857Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.268877Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.268880Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:30.270372Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:30.270401Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:30.270913Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:30.322737Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:30.322751Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:30.322772Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:30.323199Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:30.323882Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950367, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.323897Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180950367 2024-11-21T09:22:30.323919Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:30.324373Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.324450Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.324468Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:30.324771Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.324788Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.324791Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:30.324827Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.324828Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.324829Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:30.325109Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180950367 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 303 } } 2024-11-21T09:22:30.325206Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:30.325218Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.325227Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:30.325458Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:30.325475Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:30.325483Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2024-11-21T09:21:41.209867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:41.209887Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:21:41.214693Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:21:41.215126Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:21:41.215209Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.215215Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.215226Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T09:21:41.215370Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:21:41.216025Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T09:21:41.216037Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.216042Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.216045Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.216100Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T09:21:41.216145Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T09:21:41.216162Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:41.216169Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:41.248549Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T09:21:41.248587Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T09:21:41.248593Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T09:21:41.248602Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.258934Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:565:2205], Recipient [1:529:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.259316Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:518:2178], Recipient [1:529:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T09:21:41.259326Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T09:21:41.259339Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T09:21:41.259412Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:567:2207], Recipient [1:529:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.259448Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:518:2178], Recipient [1:529:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host8" Port: 7 ResolveHost: "host8" Address: "host8" Location { DataCenter: "7" Module: "7" Rack: "7" Unit: "7" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:41.259452Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:41.259461Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host8" Port: 7 ResolveHost: "host8" Address: "host8" Location { DataCenter: "7" Module: "7" Rack: "7" Unit: "7" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:41.260148Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:41.260171Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host8" Port: 7 ResolveHost: "host8" Address: "host8" Location { DataCenter: "7" Module: "7" Rack: "7" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:41.260190Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:568:2184], Recipient [1:529:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.260195Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.260200Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.260203Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.260239Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:41.260243Z node 1 :NODE_BROKER DEBUG: Registration request from host8:7 (not fixed) tenant: dc-1 2024-11-21T09:21:41.260327Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host8:7 to database resolvehost=host8 address=host8 dc=7 location=DC=7/M=7/R=7/U=7/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T09:21:41.260368Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T09:21:41.271355Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:41.271382Z node 1 :NODE_BROKER DEBUG: Added node #1024 host8:7 2024-11-21T09:21:41.271391Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T09:21:41.271395Z node 1 :NODE_BROKER DEBUG: Add node #1024 host8:7 to epoch cache 2024-11-21T09:21:41.271468Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host8" Port: 7 ResolveHost: "host8" Address: "host8" Location { DataCenter: "7" Module: "7" Rack: "7" Unit: "7" } Expire: 7200024000 Name: "slot-0" } 2024-11-21T09:21:41.271477Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.271617Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:580:2213], Recipient [1:529:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.271652Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:518:2178], Recipient [1:529:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1 ResolveHost: "host2" Address: "host2" Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:21:41.271658Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:21:41.271667Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1 ResolveHost: "host2" Address: "host2" Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:21:41.271758Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:21:41.271771Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1 ResolveHost: "host2" Address: "host2" Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:21:41.271785Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:581:2184], Recipient [1:529:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.271792Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:21:41.271797Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:21:41.271800Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:21:41.271814Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:21:41.271817Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1 (not fixed) tenant: dc-1 2024-11-21T09:21:41.271844Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1 to database resolvehost=host2 address=host2 dc=1 location=DC=1/M=1/R=1/U=1/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T09:21:41.271882Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T09:21:41.282848Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:21:41.282876Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1 2024-11-21T09:21:41.282883Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T09:21:41.282887Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1 to epoch cache 2024-11-21T09:21:41.282944Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1 ResolveHost: "host2" Address: "host2" Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } Expire: 7200024000 Name: "slot-1" } 2024-11-21T09:21:41.282951Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:21:41.283076Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:586:2218], Recipient [1:529:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:21:41.283102Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:518:2178], Recipient [1:529:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1026 } 2024-11-21T09:21:41.283108Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:21:41.283113Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1026 2024-11-21T09:21:41.283137Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:21:41.283140Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) is now active 2024-11-21T09:21:41.283143Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) enqueue tx 2024-11-21T09:21:41.283147Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) starts new tx 2024-11-21T09:21:41.283159Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1026 2024-11-21T09:21:41.283164Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2024-11-21T09:21:41.283179Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:21:41.283190Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: WRONG_REQUEST Reason: "Unknown node" } NodeId: 1026 } 2024-11-21T09:21:41.283194Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) completed tx 2024-11-21T09:21:41.283198Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) unlink from parent 2024-11-21T09:21:41.283201Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1026 2024-11-21T09:21:41.283204Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T09:21:41.283315Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:522:2180], Recipient [1:529:2184]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T09:21:41.283341Z nod ... { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:22:29.406659Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host10" Port: 9 ResolveHost: "host10" Address: "host10" Location { DataCenter: "9" Module: "9" Rack: "9" Unit: "9" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:22:29.406669Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:22497:17756], Recipient [1:22345:17756]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:22:29.406671Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:22:29.406674Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:22:29.406676Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:22:29.406682Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:22:29.406684Z node 1 :NODE_BROKER DEBUG: Registration request from host10:9 (not fixed) tenant: dc-1 2024-11-21T09:22:29.406703Z node 1 :NODE_BROKER DEBUG: Adding node #1029 host10:9 to database resolvehost=host10 address=host10 dc=9 location=DC=9/M=9/R=9/U=9/ lease=1 expire=Thu, 08 Jan 1970 16:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=5 authorizedbycertificate=false 2024-11-21T09:22:29.406736Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=571 2024-11-21T09:22:29.417889Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:22:29.417912Z node 1 :NODE_BROKER DEBUG: Added node #1029 host10:9 2024-11-21T09:22:29.417919Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 570 to 571 2024-11-21T09:22:29.417922Z node 1 :NODE_BROKER DEBUG: Add node #1029 host10:9 to epoch cache 2024-11-21T09:22:29.417983Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1029 Host: "host10" Port: 9 ResolveHost: "host10" Address: "host10" Location { DataCenter: "9" Module: "9" Rack: "9" Unit: "9" } Expire: 662400024000 Name: "slot-5" } 2024-11-21T09:22:29.417990Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:22:29.418577Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22502:17873], Recipient [1:22345:17756]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:29.418608Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:518:2178], Recipient [1:22345:17756]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1029 } 2024-11-21T09:22:29.418613Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:22:29.418618Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1029 2024-11-21T09:22:29.418622Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:22:29.418624Z node 1 :NODE_BROKER TRACE: TTxProcessor(1029) is now active 2024-11-21T09:22:29.418627Z node 1 :NODE_BROKER TRACE: TTxProcessor(1029) enqueue tx 2024-11-21T09:22:29.418630Z node 1 :NODE_BROKER TRACE: TTxProcessor(1029) starts new tx 2024-11-21T09:22:29.418641Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1029 2024-11-21T09:22:29.418647Z node 1 :NODE_BROKER DEBUG: Update node #1029 host10:9 lease in database lease=2 expire=1970-01-08T16:00:00.024000Z 2024-11-21T09:22:29.429865Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:22:29.429950Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1029 Expire: 662400024000 Epoch { Id: 183 Version: 571 Start: 655200024000 End: 658800024000 NextEnd: 662400024000 } } 2024-11-21T09:22:29.429972Z node 1 :NODE_BROKER DEBUG: Extended lease of #1029 host10:9 up to Thu, 08 Jan 1970 16:00:00 UTC (lease 2) 2024-11-21T09:22:29.429979Z node 1 :NODE_BROKER TRACE: TTxProcessor(1029) completed tx 2024-11-21T09:22:29.429983Z node 1 :NODE_BROKER TRACE: TTxProcessor(1029) unlink from parent 2024-11-21T09:22:29.429986Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1029 2024-11-21T09:22:29.429990Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T09:22:29.430997Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22506:17877], Recipient [1:22345:17756]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:29.431037Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:518:2178], Recipient [1:22345:17756]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host11" Port: 10 ResolveHost: "host11" Address: "host11" Location { DataCenter: "10" Module: "10" Rack: "10" Unit: "10" } FixedNodeId: false Path: "dc-1" } 2024-11-21T09:22:29.431044Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T09:22:29.431054Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host11" Port: 10 ResolveHost: "host11" Address: "host11" Location { DataCenter: "10" Module: "10" Rack: "10" Unit: "10" } FixedNodeId: false Path: "dc-1" 2024-11-21T09:22:29.431160Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T09:22:29.431174Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host11" Port: 10 ResolveHost: "host11" Address: "host11" Location { DataCenter: "10" Module: "10" Rack: "10" Unit: "10" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T09:22:29.431187Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:22507:17756], Recipient [1:22345:17756]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:22:29.431191Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T09:22:29.431196Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T09:22:29.431200Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T09:22:29.431215Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T09:22:29.431219Z node 1 :NODE_BROKER DEBUG: Registration request from host11:10 (not fixed) tenant: dc-1 2024-11-21T09:22:29.431246Z node 1 :NODE_BROKER DEBUG: Adding node #1030 host11:10 to database resolvehost=host11 address=host11 dc=10 location=DC=10/M=10/R=10/U=10/ lease=1 expire=Thu, 08 Jan 1970 16:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=6 authorizedbycertificate=false 2024-11-21T09:22:29.431287Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=572 2024-11-21T09:22:29.442545Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T09:22:29.442573Z node 1 :NODE_BROKER DEBUG: Added node #1030 host11:10 2024-11-21T09:22:29.442584Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 571 to 572 2024-11-21T09:22:29.442589Z node 1 :NODE_BROKER DEBUG: Add node #1030 host11:10 to epoch cache 2024-11-21T09:22:29.442658Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1030 Host: "host11" Port: 10 ResolveHost: "host11" Address: "host11" Location { DataCenter: "10" Module: "10" Rack: "10" Unit: "10" } Expire: 662400024000 Name: "slot-6" } 2024-11-21T09:22:29.442667Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T09:22:29.443611Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22512:17882], Recipient [1:22345:17756]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:29.443638Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:518:2178], Recipient [1:22345:17756]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 0 } 2024-11-21T09:22:29.443644Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:22:29.443650Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 0 2024-11-21T09:22:29.443655Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:22:29.443659Z node 1 :NODE_BROKER TRACE: TTxProcessor(0) is now active 2024-11-21T09:22:29.443662Z node 1 :NODE_BROKER TRACE: TTxProcessor(0) enqueue tx 2024-11-21T09:22:29.443666Z node 1 :NODE_BROKER TRACE: TTxProcessor(0) starts new tx 2024-11-21T09:22:29.443681Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #0 2024-11-21T09:22:29.443687Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2024-11-21T09:22:29.443704Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:22:29.443717Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: WRONG_REQUEST Reason: "Unknown node" } NodeId: 0 } 2024-11-21T09:22:29.443722Z node 1 :NODE_BROKER TRACE: TTxProcessor(0) completed tx 2024-11-21T09:22:29.443726Z node 1 :NODE_BROKER TRACE: TTxProcessor(0) unlink from parent 2024-11-21T09:22:29.443729Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 0 2024-11-21T09:22:29.443733Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T09:22:29.444316Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22514:17884], Recipient [1:22345:17756]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:29.444336Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:518:2178], Recipient [1:22345:17756]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1028 } 2024-11-21T09:22:29.444340Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T09:22:29.444344Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1028 2024-11-21T09:22:29.444348Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T09:22:29.444351Z node 1 :NODE_BROKER TRACE: TTxProcessor(1028) is now active 2024-11-21T09:22:29.444354Z node 1 :NODE_BROKER TRACE: TTxProcessor(1028) enqueue tx 2024-11-21T09:22:29.444358Z node 1 :NODE_BROKER TRACE: TTxProcessor(1028) starts new tx 2024-11-21T09:22:29.444363Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1028 2024-11-21T09:22:29.444369Z node 1 :NODE_BROKER DEBUG: Update node #1028 host5:4 lease in database lease=5 expire=1970-01-08T16:00:00.024000Z 2024-11-21T09:22:29.455743Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T09:22:29.455812Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1028 Expire: 662400024000 Epoch { Id: 183 Version: 572 Start: 655200024000 End: 658800024000 NextEnd: 662400024000 } } 2024-11-21T09:22:29.455837Z node 1 :NODE_BROKER DEBUG: Extended lease of #1028 host5:4 up to Thu, 08 Jan 1970 16:00:00 UTC (lease 5) 2024-11-21T09:22:29.455846Z node 1 :NODE_BROKER TRACE: TTxProcessor(1028) completed tx 2024-11-21T09:22:29.455850Z node 1 :NODE_BROKER TRACE: TTxProcessor(1028) unlink from parent 2024-11-21T09:22:29.455854Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1028 2024-11-21T09:22:29.455858Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2024-11-21T09:22:26.941958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660516386290784:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.942037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00411e/r3tmp/tmpob1jUt/pdisk_1.dat 2024-11-21T09:22:27.005779Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11109, node 1 2024-11-21T09:22:27.024718Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.024732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.024734Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.024784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:27.042111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.042143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.043796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.078505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.079438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.079459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.079898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.079939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.079946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.080343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.080356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.080426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.080712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.081416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947126, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.081425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.081486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.081919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.081961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.081975Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.081990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.082002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.082018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:27.082405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:27.082422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:27.082426Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:27.082438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:27.091748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.091796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:27.091874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.091885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.092392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T09:22:27.092428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.092464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.092479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T09:22:27.092522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:27.092586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.092595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.092598Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:27.092625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.092634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.092636Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:27.093966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.093988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:27.145900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:27.146354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.147085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947196, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.147097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T09:22:27.147120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:27.147478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.147528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.147544Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:27.147555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:27.147568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:27.147590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:27.147725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.147738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.147741Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:27.147784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.147792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.147794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:27.147801Z node 1 :FLAT ... 024-11-21T09:22:30.392687Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00411e/r3tmp/tmppTTlVS/pdisk_1.dat 2024-11-21T09:22:30.410489Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11127, node 13 2024-11-21T09:22:30.432165Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.432181Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.432184Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.432263Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.493022Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.493062Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.494441Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.495987Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.496090Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.496099Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.496473Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.496531Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.496542Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.496953Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.496964Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.497002Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.497331Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.498056Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950542, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.498070Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.498136Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.498455Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.498508Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.498523Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.498542Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.498555Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.498572Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.498718Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.498730Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.498734Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.498746Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:30.511361Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.511413Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:30.511529Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.511543Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.512067Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T09:22:30.512119Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.512182Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.512233Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T09:22:30.512290Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:30.512411Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.512422Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.512427Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:30.512481Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.512484Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.512485Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:30.514302Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:30.514342Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:30.566734Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:30.567312Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.568134Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950612, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.568148Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T09:22:30.568172Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:30.568578Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.568648Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.568664Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:30.568676Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:30.568686Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:30.568714Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:30.568881Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.568894Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.568897Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:30.568918Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.568924Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.568925Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:30.568929Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::CreateAlter >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables [GOOD] >> TGRpcClientLowTest::ChangeAcl [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin >> YdbYqlClient::TestReadTableOneBatch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2024-11-21T09:22:27.433225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660521212097058:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:27.433261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00411a/r3tmp/tmpzAs9qc/pdisk_1.dat 2024-11-21T09:22:27.486946Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21894, node 1 2024-11-21T09:22:27.504684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.504699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.504701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.504736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.533030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.533074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.534474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:27.561958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.563058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.563076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.563565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.563612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.563624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.564025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.564036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.564096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.564335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.565242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947609, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.565255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.565323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.565744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.565787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.565801Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.565817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.565830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.565839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:27.566193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:27.566203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:27.566207Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:27.566216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:27.724706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660521212097767:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:27.724743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:27.759314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.759506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:27.759697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.759711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.760445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:27.760504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.760560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.760585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:27.760672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:27.760812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.760830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.760835Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:27.760893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.760901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.760903Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:27.762652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.762676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:27.763113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:27.822184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:27.822197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:27.822218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:27.822670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.823433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947868, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.823447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180947868 2024-11-21T09:22:27.823470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:27.823829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.823906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.823930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.824066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:27.824079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:27.824082Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: ... node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.734261Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.734262Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T09:22:30.734278Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.734285Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.734287Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:30.734302Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.734310Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.734311Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T09:22:30.734325Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.734332Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.734334Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T09:22:30.737957Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950787, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.737973Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950787, at schemeshard: 72057594046644480 2024-11-21T09:22:30.738001Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T09:22:30.738028Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950787, at schemeshard: 72057594046644480 2024-11-21T09:22:30.738058Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T09:22:30.738069Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950787, at schemeshard: 72057594046644480 2024-11-21T09:22:30.738079Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T09:22:30.738095Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732180950787 2024-11-21T09:22:30.738107Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T09:22:30.738515Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.738620Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.738638Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T09:22:30.738657Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T09:22:30.738693Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T09:22:30.738705Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T09:22:30.738717Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T09:22:30.738728Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T09:22:30.738739Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T09:22:30.738749Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T09:22:30.738755Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:30.738768Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T09:22:30.738788Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T09:22:30.738791Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T09:22:30.738796Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T09:22:30.739042Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.739058Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.739063Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T09:22:30.739106Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.739114Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.739116Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:30.739130Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.739139Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.739141Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:30.739157Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.739165Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.739167Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:30.739182Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:30.739194Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:30.739196Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T09:22:30.739201Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T09:22:30.739657Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439660535157722214:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:22:30.826024Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:30.826068Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:30.826790Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:30.829139Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439660535157722309:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:22:30.829217Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZGMyOWZhOTYtMTVlZDlhMzUtYTM0ZmU1ZTktNjZiNTgxNTg=, ActorId: [10:7439660535157722039:2295], ActorState: ExecuteState, TraceId: 01jd70ffpc4s60h4kjtf28vs75, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T09:22:30.832043Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439660535157722327:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T09:22:30.832106Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZGMyOWZhOTYtMTVlZDlhMzUtYTM0ZmU1ZTktNjZiNTgxNTg=, ActorId: [10:7439660535157722039:2295], ActorState: ExecuteState, TraceId: 01jd70ffse3afwf8jvk4a0hyyx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] Test command err: 2024-11-21T09:22:26.438269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660515461242785:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.438626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004126/r3tmp/tmp2D6hNy/pdisk_1.dat 2024-11-21T09:22:26.500718Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24407, node 1 2024-11-21T09:22:26.524383Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.524398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.524400Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.524439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:26.539128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.539165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:25171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:26.540597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.550314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.550993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.551005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.551621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.551669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.551676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:26.552039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.552129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.552132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:26.552496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.553313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946601, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.553325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:26.553424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:26.553759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.553799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.553813Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:26.553829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:26.553841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:26.553852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:26.554300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:26.554322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:26.554327Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.554340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:26.758311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.758532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:26.758901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.758912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.759502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2024-11-21T09:22:26.759559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.759606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.759624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:26.759815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:26.759857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:26.759865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:26.759869Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:26.759896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:26.759903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:26.759904Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:26.762167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:26.762717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 2814749 ... 46644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T09:22:30.758189Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:30.758197Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:30.758198Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 3 2024-11-21T09:22:30.758216Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:30.758226Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:30.758228Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 1 2024-11-21T09:22:30.766341Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950815, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.766363Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950815, at schemeshard: 72057594046644480 2024-11-21T09:22:30.766416Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T09:22:30.766448Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950815, at schemeshard: 72057594046644480 2024-11-21T09:22:30.766463Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 128 -> 240 2024-11-21T09:22:30.766478Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950815, at schemeshard: 72057594046644480 2024-11-21T09:22:30.766491Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:2 128 -> 240 2024-11-21T09:22:30.766503Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715660:3, HandleReply TEvOperationPlan: step# 1732180950815 2024-11-21T09:22:30.766516Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:3 128 -> 240 2024-11-21T09:22:30.767038Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.767143Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.767161Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T09:22:30.767178Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/4 2024-11-21T09:22:30.767210Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:2 ProgressState 2024-11-21T09:22:30.767221Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:2 progress is 2/4 2024-11-21T09:22:30.767231Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:3 ProgressState 2024-11-21T09:22:30.767241Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:3 progress is 3/4 2024-11-21T09:22:30.767251Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:1 ProgressState 2024-11-21T09:22:30.767265Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:1 progress is 4/4 2024-11-21T09:22:30.767270Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T09:22:30.767281Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2024-11-21T09:22:30.767286Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2024-11-21T09:22:30.767288Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:3 2024-11-21T09:22:30.767292Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 5, subscribers: 1 2024-11-21T09:22:30.767707Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:30.767722Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:30.767725Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T09:22:30.767762Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:30.767770Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:30.767771Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:30.767787Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:30.767790Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:30.767791Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:30.767804Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:30.767808Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:30.767811Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T09:22:30.767824Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:30.767827Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:30.767829Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2024-11-21T09:22:30.767834Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2024-11-21T09:22:30.768439Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439660534699105969:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:22:30.839162Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:30.839206Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:30.839760Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:30.856567Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70ffq221h7pp0hy84bn23q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjA1Njc0YTQtMTAzMDIzZjYtNjA5MjZmMDgtYjg0ODUzMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:30.862722Z node 10 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 1 version: {1732180950822:max} readable: {1732180950899:max} at tablet 72075186224037888 2024-11-21T09:22:30.862772Z node 10 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 1 at tablet 72075186224037888 2024-11-21T09:22:30.862883Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[10:7439660534699105747:2298];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1732180950822:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T09:22:30.867578Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[10:7439660534699105747:2298];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1732180950822:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[date;dateTimeS;dateTimeU;id;stringToString;timestamp;utf8ToString;];};]; 2024-11-21T09:22:30.868470Z node 10 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T09:22:30.868864Z node 10 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T09:22:30.869590Z node 10 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2024-11-21T09:22:30.872520Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180950822, txId: 18446744073709551615] shutting down >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> YdbOlapStore::LogLast50 [GOOD] >> YdbOlapStore::LogGrepNonExisting >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple >> YdbYqlClient::TestDescribeDirectory >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] Test command err: 2024-11-21T09:22:26.469606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660516192330044:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.469804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004125/r3tmp/tmp3CikTM/pdisk_1.dat 2024-11-21T09:22:26.523151Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23635, node 1 2024-11-21T09:22:26.538918Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.538930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.538931Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.538956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.569685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.569712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.571205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:26.600709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.601793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.601806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.602264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.602306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.602314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:26.602617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.602630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:26.602721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.602937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.603749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946650, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.603763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:26.603838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:26.604178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.604233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.604249Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:26.604266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:26.604278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:26.604296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:26.604644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:26.604660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:26.604664Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.604677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:27.314938Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660518461363232:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:27.314986Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004125/r3tmp/tmpjID0hX/pdisk_1.dat 2024-11-21T09:22:27.329264Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11323, node 4 2024-11-21T09:22:27.348838Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.348853Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.348855Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.348909Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.415559Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.415597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.417257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:27.417676Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.417776Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.417791Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.418301Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.418353Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.418362Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.418713Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.418721Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.418941Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.419054Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.419860Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947469, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.419874Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.419931Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.420308Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.420351Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.420366Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.420376Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.420384Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.420398Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:27.420566Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7205759404 ... :22:30.789278Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.789296Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T09:22:30.789332Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.789343Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.789354Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.789622Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.789629Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.789632Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:30.789670Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.789673Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.789676Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:30.789691Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.789693Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.789694Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T09:22:30.789708Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:30.789714Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.789733Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.789735Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:30.789749Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.789750Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.789752Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T09:22:30.790435Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950836, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.790448Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950836, at schemeshard: 72057594046644480 2024-11-21T09:22:30.790469Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:30.790495Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950836, at schemeshard: 72057594046644480 2024-11-21T09:22:30.790503Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T09:22:30.790509Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180950836, at schemeshard: 72057594046644480 2024-11-21T09:22:30.790526Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T09:22:30.790534Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732180950836 2024-11-21T09:22:30.790540Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T09:22:30.790944Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.791044Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.791060Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T09:22:30.791075Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T09:22:30.791107Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T09:22:30.791118Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T09:22:30.791132Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:30.791143Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T09:22:30.791158Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T09:22:30.791168Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T09:22:30.791173Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:30.791184Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T09:22:30.791193Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T09:22:30.791195Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T09:22:30.791199Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T09:22:30.791405Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.791416Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.791418Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:30.791447Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.791458Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.791459Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T09:22:30.791474Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.791477Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.791478Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:30.791491Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.791493Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.791495Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:30.791507Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.791513Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.791515Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T09:22:30.791519Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:30.791893Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660534091836160:2298], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:30.861073Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:30.861111Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:30.861659Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2024-11-21T09:22:27.749972Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660521129109414:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:27.750116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004117/r3tmp/tmpss4B6Q/pdisk_1.dat 2024-11-21T09:22:27.817559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15165, node 1 2024-11-21T09:22:27.838133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.838144Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.838146Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.838179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:27.848706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.848732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.850288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.893441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.894250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.894265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.894804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.894839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.894846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.895182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.895189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.895402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.895490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.896537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947945, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.896552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.896645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.897094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.897154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.897170Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.897185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.897198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.897215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:27.897613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:27.897628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:27.897632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:27.897644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2024-11-21T09:22:28.051557Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T09:22:28.053669Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T09:22:28.584089Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660523050316376:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:28.584150Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004117/r3tmp/tmpmMHaMc/pdisk_1.dat 2024-11-21T09:22:28.596960Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13319, node 4 2024-11-21T09:22:28.615207Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:28.615220Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:28.615222Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:28.615258Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:28.684278Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:28.684304Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:28.685930Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:28.686898Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.687005Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.687017Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.687522Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:28.687592Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:28.687602Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:28.687999Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:28.688007Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:28.688123Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:28.688350Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.689211Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180948736, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.689223Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:28.689287Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:28.689633Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:28.689681Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:28.689696Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:28.689707Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is ... UNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:31.054828Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:31.054858Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:31.056383Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:31.057842Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.057939Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.057950Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.058320Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:31.058362Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:31.058370Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:31.058686Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.058698Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:31.058853Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:31.059061Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.059813Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951109, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.059825Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:31.059886Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:31.060240Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.060285Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.060301Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:31.060312Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:31.060325Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:31.060339Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:31.060484Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:31.060498Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:31.060502Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:31.060513Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:31.069540Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/TheDirectory, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.069593Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.070221Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/TheDirectory 2024-11-21T09:22:31.070269Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.070313Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.070331Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.070381Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:31.070470Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.070486Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.070490Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:31.070537Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.070546Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.070548Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T09:22:31.073572Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951123, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.073584Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180951123, at schemeshard: 72057594046644480 2024-11-21T09:22:31.073608Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:31.073955Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.073997Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.074014Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:31.074024Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:31.074031Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:31.074045Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:31.074123Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.074139Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.074142Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:31.074178Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.074187Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.074188Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:31.074195Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:31.077425Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/TheDirectory, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.077480Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.077493Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.077506Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T09:22:31.077523Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:31.077526Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 2, subscribers: 0 2024-11-21T09:22:31.078005Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/TheDirectory, set owner:qqq, add access: +R:qqq, add access: -():qqq:- 2024-11-21T09:22:31.078055Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.078117Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.078276Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:31.078293Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:31.078296Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T09:22:31.078342Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:31.078351Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:31.078352Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T09:22:31.078357Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 >> YdbYqlClient::TestReadWrongTable [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] >> YdbYqlClient::RetryOperationTemplate >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbImport::Simple [GOOD] >> YdbOlapStore::LogNonExistingRequest >> YdbIndexTable::AlterIndexImplBySuperUser >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts |97.1%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2024-11-21T09:22:27.056564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660521751522548:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:27.056776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00411b/r3tmp/tmpLPx8zk/pdisk_1.dat 2024-11-21T09:22:27.103102Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1372, node 1 2024-11-21T09:22:27.123473Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.123485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.123487Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.123527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.147026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.148012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.148034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.148718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.148798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.148812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:27.149211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.149289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.149297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:27.149638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.150538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947196, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.150549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:27.150622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:27.151047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.151092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.151109Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:27.151118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:27.151131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:27.151142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:27.151559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:27.151581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:27.151586Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:27.151598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:27.156949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.156971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.158398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:27.325679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660521751523458:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:27.325702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:27.367303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.367501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:27.367649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.367663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.368471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestTable 2024-11-21T09:22:27.368538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.368595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.368623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:27.368698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:27.368864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:27.368896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:27.368902Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:27.368947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:27.368954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:27.368956Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:27.370714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:27.370740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:22:27.371192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:27.423610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:27.423623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:27.423648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T09:22:27.424138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.424984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947469, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.425001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180947469 2024-11-21T09:22:27.425030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T09:22:27.425431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.425520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.425542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:27.425865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:27.425878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:27.425883Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, ... 1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.541114Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:30.541154Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:30.541183Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:30.541278Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.541294Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.541298Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:30.541348Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.541364Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.541369Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:30.541383Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.541389Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.541391Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T09:22:30.541402Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.541407Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.541408Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2024-11-21T09:22:30.542955Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:30.542975Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:30.543020Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:30.543027Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 2 -> 3 2024-11-21T09:22:30.543499Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:30.543610Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:2 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:30.548162Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:2 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:30.548172Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:30.548187Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 3 -> 128 2024-11-21T09:22:30.548587Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:2 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:30.548628Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:30.548634Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:30.548642Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:30.548953Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:30.549625Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950598, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.549638Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180950598 2024-11-21T09:22:30.549665Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:30.549679Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTableIndex TPropose operationId#281474976715658:1 HandleReply TEvOperationPlan, step: 1732180950598, at schemeshard: 72057594046644480 2024-11-21T09:22:30.549708Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T09:22:30.549728Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:2 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180950598 2024-11-21T09:22:30.549733Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 129 2024-11-21T09:22:30.550270Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.550393Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.550410Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T09:22:30.550421Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 1/3 2024-11-21T09:22:30.550449Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:30.550475Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:2 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:30.551231Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.551251Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.551255Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:30.551297Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.551305Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.551306Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:30.551321Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.551325Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.551326Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2024-11-21T09:22:30.551340Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.551348Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.551349Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:22:30.552143Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180950598 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 214 } } 2024-11-21T09:22:30.552182Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715658 Step: 1732180950598 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 223 } } 2024-11-21T09:22:30.552309Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:30.552322Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.552326Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:30.552360Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:30.552366Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:22:30.552367Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 129 -> 240 2024-11-21T09:22:30.552781Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:30.552796Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 2/3 2024-11-21T09:22:30.552823Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T09:22:30.552833Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 3/3 2024-11-21T09:22:30.552841Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:30.552861Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T09:22:30.552864Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable'unknown table |97.1%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2024-11-21T09:22:28.052561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660524768524688:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:28.052579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004116/r3tmp/tmpf1RjYQ/pdisk_1.dat 2024-11-21T09:22:28.098768Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14649, node 1 2024-11-21T09:22:28.116494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:28.116510Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:28.116512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:28.116554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:28.152837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:28.152863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:28.154437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:28.177914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.178942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.178960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.179601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:28.179656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:28.179665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:28.180277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:28.180287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:28.180339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:28.180717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.181790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180948225, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.181802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:28.181901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:28.182408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:28.182453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:28.182470Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:28.182481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:28.182490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:28.182511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:28.182987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:28.183012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:28.183016Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:28.183037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:28.318606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660524768525607:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:28.318624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660524768525618:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:28.318630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:28.319077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.319118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.319129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.319145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.319147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:22:28.319153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.319161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:28.319206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T09:22:28.319254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.319263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:28.319924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:28.319995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:28.320116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:28.320144Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T09:22:28.320192Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:28.320232Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:28.320243Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:28.320268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:28.320344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.320355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.320359Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:28.320392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.320397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.320398Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:28.320476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.320483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.320484Z node 1 :FLA ... ableExistsActor;event=undelivered;self_id=[10:7439660539062245319:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:31.192845Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004116/r3tmp/tmptGrUuo/pdisk_1.dat 2024-11-21T09:22:31.206110Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25940, node 10 2024-11-21T09:22:31.218212Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:31.218225Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:31.218227Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:31.218263Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:31.292943Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:31.292975Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:31.294491Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:31.296757Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.296859Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.296882Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.297399Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:31.297438Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:31.297441Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:31.297753Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.297760Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:31.297947Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:31.298033Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.298948Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951347, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.298963Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:31.299027Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:31.299556Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.299626Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.299648Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:31.299664Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:31.299679Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:31.299695Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:31.299879Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:31.299903Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:31.299907Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:31.299919Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:31.308253Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jd70fg8c75a1v4jzjhkgnzg8, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51634, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998963s 2024-11-21T09:22:31.309116Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jd70fg8d7951d9xt3y5pxcz1, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51634, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T09:22:31.522048Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jd70fgf1b4t22p3kc7fb678t, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51634, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T09:22:31.522465Z node 10 :TX_PROXY ERROR: [ReadTable [10:7439660539062246233:2296] TxId# 281474976715658] Navigate request failed for table 'Root/NoTable' 2024-11-21T09:22:31.522524Z node 10 :TX_PROXY ERROR: [ReadTable [10:7439660539062246233:2296] TxId# 281474976715658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2024-11-21T09:22:31.522677Z node 10 :READ_TABLE_API NOTICE: [10:7439660539062246232:2296] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2024-11-21T09:22:31.523409Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DeleteSessionRequest, traceId# 01jd70fgf3azmj69x2x35sxdpa, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51634, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 2.008808s 2024-11-21T09:22:31.523724Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6e3000] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523754Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6e4900] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523786Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d0e00] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523817Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d8b00] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523824Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6e3500] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523854Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6cfa00] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523858Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d4a00] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523896Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d0400] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523896Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d7c00] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523930Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6c2300] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523937Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d2200] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523957Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6ea800] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523969Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6dbd00] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523992Z node 10 :GRPC_SERVER DEBUG: [0x50a7f68de00] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.523998Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d5e00] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524027Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d3b00] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524037Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d1d00] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524085Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d0900] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524130Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d4500] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524137Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6e1c00] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524169Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6cf500] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524171Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6d4f00] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524224Z node 10 :GRPC_SERVER DEBUG: [0x50a7ec80b00] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T09:22:31.524228Z node 10 :GRPC_SERVER DEBUG: [0x50a7f6e3f00] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:56.011900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:56.011930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.011935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:56.011940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:56.011958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:56.011962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:56.011971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.012066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:56.025782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:56.025800Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.027764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:56.027859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:56.027879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:56.029977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:56.030034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.030928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.031869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.033368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:56.035160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.035164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:56.035203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:56.036439Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.049896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:56.050705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.050800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:56.050848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:56.050873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:56.051845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.051866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:56.051871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:56.051876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:56.052439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:56.052847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.052863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.052869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.053486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:56.053898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:56.054129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:56.054321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.054416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:56.054423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.054455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:56.054467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.054917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.054962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:56.055040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055046Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:56.055056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:56.055061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.055067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:56.055072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.055077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:56.055081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:56.055091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:56.055096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:56.055101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 1T09:22:31.634810Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:1, at tablet 72057594046678944 2024-11-21T09:22:31.634830Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 10 2024-11-21T09:22:31.634850Z node 146 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[146:445:2404], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:22:31.635204Z node 146 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:31.635210Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:31.635239Z node 146 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:31.635243Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [146:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:22:31.635306Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:22:31.635313Z node 146 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1003:1, ProgressState, NeedSyncHive: 0 2024-11-21T09:22:31.635315Z node 146 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 240 -> 240 2024-11-21T09:22:31.635408Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:31.635419Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:31.635424Z node 146 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:22:31.635429Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:22:31.635434Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 11 2024-11-21T09:22:31.635448Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/2, is published: true 2024-11-21T09:22:31.635888Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T09:22:31.635899Z node 146 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T09:22:31.635911Z node 146 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 2/2 2024-11-21T09:22:31.635915Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/2 2024-11-21T09:22:31.635920Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/2, is published: true 2024-11-21T09:22:31.635925Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/2 2024-11-21T09:22:31.635932Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:22:31.635936Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:22:31.635957Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 10 2024-11-21T09:22:31.635962Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T09:22:31.635965Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T09:22:31.635981Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T09:22:31.636081Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:22:31.636504Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:22:31.636515Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:22:31.636581Z node 146 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:22:31.636596Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:31.636601Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [146:690:2592] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:22:31.636676Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:31.636720Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 55us result status StatusSuccess 2024-11-21T09:22:31.636793Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 Coordinators: 72075186234409548 Coordinators: 72075186234409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409550 Mediators: 72075186234409551 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:31.636862Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2024-11-21T09:22:31.636893Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 30us result status StatusSuccess 2024-11-21T09:22:31.636929Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 Coordinators: 72075186234409548 Coordinators: 72075186234409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409550 Mediators: 72075186234409551 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 2024-11-21T09:22:31.636975Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:31.636991Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 18us result status StatusSuccess 2024-11-21T09:22:31.637041Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2024-11-21T09:22:23.251096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660504412419658:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:23.251117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00413c/r3tmp/tmpB75KVG/pdisk_1.dat 2024-11-21T09:22:23.306952Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29100, node 1 2024-11-21T09:22:23.326518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:23.326531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:23.326534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:23.326576Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:23.351368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:23.351395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:23.352942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:23.380813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.381949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.381963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.382440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:23.382481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:23.382488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:23.382828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:23.382839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:23.383059Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:23.383185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.383910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943430, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:23.383919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:23.383988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:23.384339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.384370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.384380Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:23.384392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:23.384401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:23.384408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:23.384857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:23.384882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:23.384886Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:23.384905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:13287 2024-11-21T09:22:23.410372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.410493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:23.410504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.410980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T09:22:23.411019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.411067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.411086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T09:22:23.411090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 waiting... 2024-11-21T09:22:23.411292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:23.411307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:23.411311Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:23.411355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:23.411380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:23.411386Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T09:22:23.411530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:23.411612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:23.411618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:23.411940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:23.412623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180943458, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:23.412632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T09:22:23.412670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:23.413018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:23.413056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:23.413068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:23.413076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:23.413087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:23.413094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:23.413347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:23.413358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:23.413361Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:23.413386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:23.413396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:23.413397Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:23.413402Z node 1 :FLAT_TX_S ... 1T09:22:31.040426Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951088, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.040442Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T09:22:31.040494Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:31.040916Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.040968Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.040984Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:31.040997Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:31.041012Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:31.041028Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:31.041381Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.041398Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.041402Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:31.041437Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.041445Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.041446Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:31.041452Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:31.042595Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.042737Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.042751Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.043328Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T09:22:31.043396Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T09:22:31.043696Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T09:22:31.045430Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:31.045511Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:31.045518Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T09:22:31.045965Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.546598Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:31.546623Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:31.547811Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T09:22:31.548044Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:31.544295Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439660536223877991:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:31.544366Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TClient is connected to server localhost:16402 2024-11-21T09:22:31.567932Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T09:22:31.576988Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.577101Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusSchemeError, reason: Error at split boundary 0: Value of type Uint64 expected in tuple at position 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.577830Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusSchemeError, reason: Error at split boundary 0: Value of type Uint64 expected in tuple at position 1, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/table-1 2024-11-21T09:22:31.579293Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T09:22:31.579421Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2024-11-21T09:22:26.402852Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660514357908546:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.402874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004133/r3tmp/tmpuiroDZ/pdisk_1.dat 2024-11-21T09:22:26.478010Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5076, node 1 2024-11-21T09:22:26.498252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.498265Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.498267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.498309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:26.503377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.503412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.508785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.546628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.547777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.547797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.549739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.549796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.549806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:26.551849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.552306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.552318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:26.552925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.554275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946601, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.554289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:26.554363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:26.554752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.554790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.554806Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:26.554822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:26.554835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:26.554844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:26.555244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:26.555268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:26.555272Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.555284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:26.575823Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:40636) has now valid token of root@builtin 2024-11-21T09:22:26.590174Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T09:22:27.252707Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660522114561792:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:27.252772Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004133/r3tmp/tmpgK6o2s/pdisk_1.dat 2024-11-21T09:22:27.268961Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24937, node 4 2024-11-21T09:22:27.284647Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.284657Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.284658Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.284690Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.353277Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.353310Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:27.354743Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:27.354803Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.354910Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.354920Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.355285Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.355323Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.355331Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.355612Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.355621Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.355762Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.355875Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.356540Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947406, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.356551Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.356606Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.356951Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.357001Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.357017Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.357029Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.357043Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 28147497 ... xId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.858435Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.858470Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.860002Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.865343Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.865457Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.865466Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.865924Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.865970Z node 19 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.865977Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.866455Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.866464Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.866501Z node 19 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.866899Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.867917Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950913, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.867930Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.868007Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.868556Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.868652Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.868683Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.868707Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.868729Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.868791Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.868928Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.868951Z node 19 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.868957Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.868975Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1732188150775797 Nodes { NodeId: 1024 Host: "localhost" Port: 12136 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1732188150775797 } Nodes { NodeId: 19 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 20 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 21 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2024-11-21T09:22:31.415265Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7439660537433726827:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:31.415320Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004133/r3tmp/tmpRThVll/pdisk_1.dat 2024-11-21T09:22:31.432101Z node 22 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61135, node 22 2024-11-21T09:22:31.450565Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:31.450578Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:31.450580Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:31.450625Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:31.515852Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:31.515881Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:31.517346Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:31.518986Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.519097Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.519111Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.519552Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:31.519601Z node 22 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:31.519609Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:31.519979Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.519990Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:31.520088Z node 22 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:31.520458Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.521396Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951564, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.521409Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:31.521474Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:31.521926Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.521971Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.521984Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:31.521995Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:31.522005Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:31.522017Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:31.522215Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:31.522236Z node 22 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:31.522245Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:31.522258Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Trying to register node 2024-11-21T09:22:31.610326Z node 22 :TICKET_PARSER ERROR: Ticket BEBE821C2D42E8D2A8C43979538F036C1F9FE848: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> YdbOlapStore::ManyTables [GOOD] >> YdbOlapStore::LogPagingBetween >> YdbYqlClient::TestDescribeDirectory [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> YdbYqlClient::TestDoubleKey >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> TYqlDecimalTests::NegativeValues [GOOD] >> TYqlDecimalTests::DecimalKey >> TYqlDateTimeTests::SimpleOperations [GOOD] >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationAsync >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateSequencesWithIndexedTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:14.626441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:14.626484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:14.626495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:14.626502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:14.626529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:14.626533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:14.626544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:14.626664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:14.672722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:14.672747Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:14.675251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:14.675363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:14.675394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:14.690269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:14.690362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:14.699419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:14.711038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:14.717852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:14.734559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:14.734589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:14.734606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:14.734616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:14.734623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:14.734672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:14.736669Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:14.768438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:14.777646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.777768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:14.777830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:14.777842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.778677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:14.778720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:14.778771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.778789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:14.778793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:14.778798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:14.779273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.779304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:14.779309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:14.779697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.779707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.779713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:14.779722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:14.780408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:14.780786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:14.785595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:14.785936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:14.785981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:14.785992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:14.786078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:14.786090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:14.786127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:14.786142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:14.786824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:14.786840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:14.786886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:14.786892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:14.786978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:14.786987Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:14.786998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:14.787002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:14.787007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:14.787013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:14.787017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:14.787022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:14.787038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:14.787044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:14.787048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ode 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChanged 2024-11-21T09:22:31.056604Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 364 RawX2: 1245540518182 } TabletId: 72075186233409548 State: 4 2024-11-21T09:22:31.056617Z node 290 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:22:31.056625Z node 290 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:31.056671Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [290:637:2591], Recipient [290:129:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:31.056675Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:31.056678Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:22:31.056700Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551621, Sender [290:361:2340], Recipient [290:129:2152]: NKikimrTxDataShard.TEvStateChanged Source { RawX1: 361 RawX2: 1245540518180 } TabletId: 72075186233409547 State: 4 2024-11-21T09:22:31.056704Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChanged 2024-11-21T09:22:31.056710Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 361 RawX2: 1245540518180 } TabletId: 72075186233409547 State: 4 2024-11-21T09:22:31.056715Z node 290 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T09:22:31.056719Z node 290 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:31.057379Z node 290 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:31.057398Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [290:364:2342] msg type: 269552133 msg: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 at schemeshard: 72057594046678944 2024-11-21T09:22:31.057422Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:22:31.057499Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [290:636:2590], Recipient [290:129:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:31.057505Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:31.057508Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T09:22:31.057524Z node 290 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:31.057532Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [290:361:2340] msg type: 269552133 msg: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 at schemeshard: 72057594046678944 2024-11-21T09:22:31.057539Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:22:31.057572Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [290:637:2591], Recipient [290:129:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:31.057576Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:31.057579Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T09:22:31.057586Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [290:644:2598], Recipient [290:129:2152]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T09:22:31.057590Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T09:22:31.057597Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046678944 2024-11-21T09:22:31.057614Z node 290 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2024-11-21T09:22:31.058002Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [290:214:2214], Recipient [290:129:2152]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 2024-11-21T09:22:31.058009Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2024-11-21T09:22:31.058013Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:31.058060Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:22:31.058104Z node 290 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:22:31.058210Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [290:214:2214], Recipient [290:129:2152]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 2024-11-21T09:22:31.058213Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2024-11-21T09:22:31.058217Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:22:31.058238Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409547 2024-11-21T09:22:31.058603Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [290:129:2152], Recipient [290:129:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2024-11-21T09:22:31.058611Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2024-11-21T09:22:31.058616Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:31.058619Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:22:31.058630Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T09:22:31.058634Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T09:22:31.058637Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T09:22:31.058640Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T09:22:31.058644Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:31.058709Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [290:392:2366], Recipient [290:129:2152]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:31.058714Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:31.058718Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186233409548, from:72057594046678944 is reset 2024-11-21T09:22:31.058739Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [290:391:2365], Recipient [290:129:2152]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:31.058743Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:31.058746Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186233409547, from:72057594046678944 is reset 2024-11-21T09:22:31.059255Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:22:31.059266Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2024-11-21T09:22:31.059533Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:22:31.059542Z node 290 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:22:31.059564Z node 290 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:31.059580Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [290:644:2598], Recipient [290:129:2152]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:31.059584Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:31.059598Z node 290 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestWaitNotification: OK eventTxId 1003 2024-11-21T09:22:31.059670Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [290:653:2607], Recipient [290:129:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:22:31.059677Z node 290 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:22:31.059688Z node 290 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:31.059728Z node 290 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 34us result status StatusPathDoesNotExist 2024-11-21T09:22:31.059762Z node 290 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> TBlobStorageWardenTest::TestDeleteStoragePool >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::SimpleOperations [GOOD] Test command err: 2024-11-21T09:22:27.715014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660520755135343:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:27.715035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004118/r3tmp/tmpBloclo/pdisk_1.dat 2024-11-21T09:22:27.779854Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26596, node 1 2024-11-21T09:22:27.799238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:27.799248Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:27.799250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:27.799272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64690 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:22:27.815468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:27.815487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:22:27.816894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:27.852606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.853426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:27.853442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.853893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:27.853931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:27.853937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:27.854272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:27.854284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:27.854337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:27.854559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:27.855456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180947903, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:27.855468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:27.855539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:27.856014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:27.856070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:27.856086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:27.856101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:27.856115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:27.856132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:27.856548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:27.856569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:27.856573Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:27.856585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:28.001326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.001459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:28.001679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.001691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.002328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2024-11-21T09:22:28.002388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:28.002447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:28.002472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:28.002535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:28.002618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.002627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.002631Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:28.002667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.002675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.002677Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:28.004674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:28.004715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:28.005139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:28.057127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:28.057138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:28.057162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:28.057690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:28.058484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180948106, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.058496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180948106 2024-11-21T09:22:28.058521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:28.058895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:28.058992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:28.059021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:28.059243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.059262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.059266Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:28.059303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:28.059310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:28.059311Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281 ... Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180952383, at schemeshard: 72057594046644480 2024-11-21T09:22:32.333701Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 128 -> 240 2024-11-21T09:22:32.333710Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180952383, at schemeshard: 72057594046644480 2024-11-21T09:22:32.333717Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:2 128 -> 240 2024-11-21T09:22:32.333727Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715660:3, HandleReply TEvOperationPlan: step# 1732180952383 2024-11-21T09:22:32.333733Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:3 128 -> 240 2024-11-21T09:22:32.334139Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.334224Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.334239Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T09:22:32.334254Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/4 2024-11-21T09:22:32.334285Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:2 ProgressState 2024-11-21T09:22:32.334297Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:2 progress is 2/4 2024-11-21T09:22:32.334307Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:3 ProgressState 2024-11-21T09:22:32.334312Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:3 progress is 3/4 2024-11-21T09:22:32.334321Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:1 ProgressState 2024-11-21T09:22:32.334325Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:1 progress is 4/4 2024-11-21T09:22:32.334329Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T09:22:32.334335Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2024-11-21T09:22:32.334338Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2024-11-21T09:22:32.334341Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:3 2024-11-21T09:22:32.334345Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 5, subscribers: 1 2024-11-21T09:22:32.334619Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:32.334628Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:32.334631Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2024-11-21T09:22:32.334654Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:32.334661Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:32.334662Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:32.334671Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:32.334676Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:32.334677Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:32.334687Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:32.334693Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:32.334694Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T09:22:32.334702Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T09:22:32.334704Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T09:22:32.334705Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2024-11-21T09:22:32.334708Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2024-11-21T09:22:32.335134Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660540317355153:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T09:22:32.427873Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:32.427912Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:32.428445Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:32.439958Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70fh8aawdrzyq85jspp6d2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.453335Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70fhbtf9x9akpvfkhsvm0x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.492606Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd70fhc74a71fmdxqtjbpmad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.494041Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70fhc74a71fmdxqtjbpmad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.537573Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd70fhdgdhea9vq0tt0h15e7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.540634Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70fhdgdhea9vq0tt0h15e7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.553930Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd70fhez98cm3e1g8f1wwbvt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.566833Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70fhfb099dcgk6va0dx4pp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.579093Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd70fhfr10b5byxkhyzjkhey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.590287Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd70fhg487c4ax17qrs13j1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.602973Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd70fhgf3d7tn6xhgkh85qqr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.634936Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd70fhgw30tywapvh6jerfbh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.635814Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd70fhgw30tywapvh6jerfbh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWJhMTUyYWEtYTJjMWMzYy1kNGY3MzFjNC1jNDQ3Yzk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> BindQueue::Basic >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:40.101109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:40.101128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.101133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:40.101136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:40.101145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:40.101148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:40.101163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:40.101228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:40.110867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:40.110888Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.112751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:40.112833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:40.112853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:40.115217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:40.115275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:40.115360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.115525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.116167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.116397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.116406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.116415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:40.116420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.116424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:40.116468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:40.117742Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:40.129133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:40.130022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:40.130085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:40.130090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:40.130680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.130685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:40.130688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:40.130691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:40.131023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:40.131295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.131305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.131309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.131693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:40.131995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:40.132798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:40.132956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:40.132977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.133020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:40.133027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:40.133051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:40.133061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:40.133413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:40.133438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:40.133487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:40.133490Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:40.133498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:40.133500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:40.133506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:40.133509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:40.133511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:40.133518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:40.133522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:40.133524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... t schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2024-11-21T09:22:31.462232Z node 266 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.462252Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.462257Z node 266 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:22:31.462263Z node 266 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T09:22:31.462268Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T09:22:31.462466Z node 266 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.462477Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.462481Z node 266 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:22:31.462485Z node 266 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2024-11-21T09:22:31.462489Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:31.462499Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T09:22:31.462574Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T09:22:31.462581Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T09:22:31.462584Z node 266 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T09:22:31.462595Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T09:22:31.462610Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2024-11-21T09:22:31.462659Z node 266 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:31.462671Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 1142461302889 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:31.462676Z node 266 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2024-11-21T09:22:31.462691Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T09:22:31.462696Z node 266 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T09:22:31.462699Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T09:22:31.462706Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:31.462711Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:31.462714Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T09:22:31.462718Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T09:22:31.462721Z node 266 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T09:22:31.462723Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T09:22:31.462729Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:22:31.462733Z node 266 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T09:22:31.462736Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T09:22:31.462738Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T09:22:31.463279Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.463305Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.463372Z node 266 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:31.463379Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:31.463403Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:22:31.463418Z node 266 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:31.463422Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [266:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T09:22:31.463426Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [266:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T09:22:31.463553Z node 266 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.463561Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.463564Z node 266 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:22:31.463567Z node 266 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T09:22:31.463570Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T09:22:31.463647Z node 266 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.463652Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.463655Z node 266 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T09:22:31.463659Z node 266 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T09:22:31.463662Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T09:22:31.463672Z node 266 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T09:22:31.463677Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [266:122:2148] 2024-11-21T09:22:31.463702Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:31.463706Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T09:22:31.463714Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:31.464476Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.464543Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T09:22:31.464556Z node 266 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T09:22:31.464566Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T09:22:31.464622Z node 266 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1004 2024-11-21T09:22:31.464945Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:22:31.464952Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:22:31.465014Z node 266 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:22:31.465031Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:22:31.465035Z node 266 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [266:974:2913] TestWaitNotification: OK eventTxId 1004 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] Test command err: 2024-11-21T09:22:29.262976Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660530764374190:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:29.262997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004110/r3tmp/tmpbpFzQH/pdisk_1.dat 2024-11-21T09:22:29.325691Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30445, node 1 2024-11-21T09:22:29.343321Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:29.343334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:29.343336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:29.343371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:29.363014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:29.363039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:29.364506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:29.367373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.368363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.368378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.368836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:29.368897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:29.368905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:29.369250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:29.369291Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.369308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:29.369638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.370394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949415, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.370403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:29.370455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:29.370795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.370831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.370845Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:29.370858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:29.370865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:29.370876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:29.371132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:29.371150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:29.371153Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:29.371174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:29.570643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.570689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.570815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 2024-11-21T09:22:30.123594Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660535093965059:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:30.123810Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004110/r3tmp/tmpZWLcbC/pdisk_1.dat 2024-11-21T09:22:30.137663Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3493, node 4 2024-11-21T09:22:30.160391Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.160404Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.160406Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.160461Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.224784Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.224812Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.228682Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.228762Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.228856Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.228879Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.232525Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.232585Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.232589Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.233112Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.236447Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.236472Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.240592Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.244610Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950290, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.244633Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.244716Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.245282Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.245336Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.245350Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 Progress ... UpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T09:22:32.748091Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T09:22:32.748093Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:32.748109Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T09:22:32.748117Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T09:22:32.748119Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T09:22:32.748132Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T09:22:32.748140Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T09:22:32.748141Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2024-11-21T09:22:32.748147Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2024-11-21T09:22:32.748597Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660540835864856:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T09:22:32.824923Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715662:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:32.824964Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715662:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:32.825672Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715662, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:32.853853Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70fhn1fvasjtnev9qzkhaw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ODVhZDE2YWYtNDJiMzU2YTEtNTZiMjYxODgtNzY1Mzk4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:32.858729Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: /Root/TheDir/FooTable, pathId: 0, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.858761Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715664:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:32.859358Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715664, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/TheDir/FooTable 2024-11-21T09:22:32.859413Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.859458Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.859481Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDropParts operationId#281474976715664:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:32.859534Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715664, at schemeshard: 72057594046644480 2024-11-21T09:22:32.859629Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:32.859644Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:32.859648Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2024-11-21T09:22:32.859687Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:32.859695Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:32.859696Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2024-11-21T09:22:32.861191Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDropParts operationId#281474976715664:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:32.861199Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:32.861210Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 4 -> 128 2024-11-21T09:22:32.861481Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976715664:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:32.862047Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180952908, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:32.862059Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976715664:0 HandleReply TEvOperationPlan, step: 1732180952908, at schemeshard: 72057594046644480 2024-11-21T09:22:32.862066Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 128 -> 136 2024-11-21T09:22:32.862296Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976715664:0 ProgressState, operation type: TxDropTable, at tablet72057594046644480 2024-11-21T09:22:32.862308Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976715664:0 ProgressState, no renaming has been detected for this operation 2024-11-21T09:22:32.862310Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 136 -> 137 2024-11-21T09:22:32.862520Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715664:0 ProgressState, operation type: TxDropTable, at tablet72057594046644480 2024-11-21T09:22:32.862534Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715664, done: 0, blocked: 1 2024-11-21T09:22:32.862551Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715664:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715664 Name: RenamePathBarrier }, at tablet72057594046644480 2024-11-21T09:22:32.862585Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 137 -> 129 2024-11-21T09:22:32.862692Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715664 Step: 1732180952908 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 170 } } 2024-11-21T09:22:32.862736Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:32.862847Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.862889Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:32.862905Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.862909Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 129 -> 240 2024-11-21T09:22:32.862972Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.863089Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:32.863100Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:32.863102Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T09:22:32.863128Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:32.863134Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:32.863135Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2024-11-21T09:22:32.863157Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715664:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:32.863225Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 1/1 2024-11-21T09:22:32.863236Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2024-11-21T09:22:32.864657Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2024-11-21T09:22:32.866409Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037888 not found 2024-11-21T09:22:32.866659Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 >> YdbIndexTable::CreateTableAddIndex [GOOD] >> YdbIndexTable::AlterTableAddIndex >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TNebiusAccessServiceTest::Authenticate [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] Test command err: 2024-11-21T09:22:30.329474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660532720664741:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:30.329490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004108/r3tmp/tmpxFqLfG/pdisk_1.dat 2024-11-21T09:22:30.382503Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3086, node 1 2024-11-21T09:22:30.402693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.402708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.402710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.402748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.429862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.429906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.431246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.461055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.461985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.461998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.462701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.462763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.462774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.463232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.463323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.463336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.463754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.464759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950514, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.464773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.464865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.465326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.465377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.465395Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.465408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.465416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.465429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.465831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.465855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.465861Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.465875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:30.603797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.603907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:30.604018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.604026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.604069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/Test/uid, operationId: 281474976715658:1, transaction: WorkingDir: "/Root/Test" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "uid" KeyColumnNames: "uid" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2024-11-21T09:22:30.604102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.604113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test/uid/indexImplTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:22:30.604156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 1 -> 2 2024-11-21T09:22:30.604199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.605048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/Test 2024-11-21T09:22:30.605127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.605202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.605226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTableIndex TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.605279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:30.605317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:30.605345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:30.605414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.605428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.605432Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:30.605467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.605473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.605474Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:30.605484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.605486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.605487Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T09:22:30.605495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.605506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.605507Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2024-11-21T09:22:30.607090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:30.607110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:30.607151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024- ... :0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:33.172235Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:33.172625Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:33.172643Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:33.172754Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:33.172996Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.173889Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180953223, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:33.173902Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:33.173968Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:33.174406Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:33.174448Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:33.174462Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:33.174478Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:33.174493Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:33.174519Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:33.174709Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:33.174726Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:33.174731Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:33.174744Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:33.355816Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.355963Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:33.356145Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:33.356158Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.356881Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:33.356940Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:33.357000Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:33.357022Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:33.357092Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:33.357192Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:33.357206Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:33.357211Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:33.357251Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:33.357261Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:33.357263Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:33.358650Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:33.358701Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:33.358723Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:33.358728Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:33.359250Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:33.411622Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:33.411633Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:33.411693Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:33.411698Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:33.411710Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:33.411716Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:33.411720Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:33.412021Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:33.412750Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180953461, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:33.412763Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180953461 2024-11-21T09:22:33.412792Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:33.413253Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:33.413331Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:33.413347Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:33.413533Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:33.413546Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:33.413550Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:33.413588Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:33.413597Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:33.413599Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:33.414039Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715658 Step: 1732180953461 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 219 } } 2024-11-21T09:22:33.414071Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715658 Step: 1732180953461 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 248 } } 2024-11-21T09:22:33.414098Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180953461 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 159 } } 2024-11-21T09:22:33.414115Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:33.414131Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:33.414142Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:33.414146Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.414150Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:33.414483Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:33.414502Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:33.414515Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting >> TYqlDecimalTests::DecimalKey [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] Test command err: 2024-11-21T09:22:29.818807Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660529992761891:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:29.818982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410d/r3tmp/tmpiMAEMM/pdisk_1.dat 2024-11-21T09:22:29.872290Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1186, node 1 2024-11-21T09:22:29.895264Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:29.895281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:29.895283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:29.895320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:29.919387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:29.919425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:29.920954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:29.949126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.950009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.950022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.950532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:29.950571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:29.950581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:29.950966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:29.951034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.951042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:29.951414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.953911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950003, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.953923Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:29.954005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:29.954462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.954520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.954536Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:29.954552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:29.954564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:29.954579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:29.955086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:29.955106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:29.955110Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:29.955141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:29.965417Z node 1 :TX_PROXY ERROR: Access denied for bad@builtin with access CreateTable to path Root 2024-11-21T09:22:30.669549Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660533452283132:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:30.669721Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410d/r3tmp/tmp8XVBqx/pdisk_1.dat 2024-11-21T09:22:30.683917Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9566, node 4 2024-11-21T09:22:30.702039Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.702053Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.702055Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.702091Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.769762Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.769792Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.771273Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.771752Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.771859Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.771872Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.772281Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.772322Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.772331Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.772733Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.772745Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.772962Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.773152Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.774043Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950822, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.774057Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.774114Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.774519Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.774556Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.774564Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.774574Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.774588Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.774612Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscr ... 024-11-21T09:22:33.063370Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410d/r3tmp/tmpQY8tO1/pdisk_1.dat 2024-11-21T09:22:33.077063Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30552, node 13 2024-11-21T09:22:33.098369Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:33.098383Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:33.098386Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:33.098429Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:33.163334Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:33.163369Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:33.165004Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:33.166357Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.166473Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:33.166484Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.166979Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:33.167021Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:33.167027Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:33.167414Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:33.167426Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:33.167512Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:33.167825Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.168923Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180953216, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:33.168944Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:33.169017Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:33.169537Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:33.169597Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:33.169617Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:33.169631Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:33.169647Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:33.169660Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:33.169800Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:33.169817Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:33.169820Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:33.169836Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:33.180643Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.180684Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:33.180785Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:33.180790Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.181407Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T09:22:33.181453Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:33.181491Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:33.181500Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T09:22:33.181547Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:33.181647Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:33.181650Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:33.181653Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:33.181680Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:33.181683Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:33.181686Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:33.183681Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:33.183716Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:33.236389Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:33.236925Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:33.237691Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180953286, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:33.237704Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T09:22:33.237731Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:33.238189Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:33.238244Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:33.238259Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:33.238271Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:33.238283Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:33.238320Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:33.238462Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:33.238475Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:33.238479Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:33.238507Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:33.238511Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:33.238512Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:33.238518Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2024-11-21T09:22:33.598423Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:22:33.599649Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/004973/r3tmp/tmpKDDz0P/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:33.600531Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/jptk/004973/r3tmp/tmpKDDz0P/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T09:22:33.605149Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T09:22:33.605275Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.605562Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T09:22:33.605576Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.605710Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T09:22:33.605717Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.605800Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T09:22:33.605807Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.605887Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T09:22:33.605894Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T09:22:33.606149Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2024-11-21T09:22:33.606161Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:22:33.606188Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:22:33.606299Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:22:33.610899Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:22:33.611871Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:22:33.611961Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.611968Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:22:33.618108Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.618132Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:22:33.618799Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:22:33.622909Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:22:33.623006Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.634481Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/004973/r3tmp/tmpKDDz0P/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:33.634556Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:22:33.634704Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T09:22:33.634710Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:22:33.634730Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\004,\277\242,E}\322\367\0300c\275\317_\362\032\223\024)" } 2024-11-21T09:22:33.634788Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2024-11-21T09:22:33.634796Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:70:2114] SessionId# [0:0:0] Cookie# 0 2024-11-21T09:22:33.634803Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.016622s 2024-11-21T09:22:33.634860Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2024-11-21T09:22:33.634868Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2024-11-21T09:22:33.639733Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.641263Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.641895Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.644638Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.644773Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.645080Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.645404Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.645451Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.645853Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.645875Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.646119Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.646366Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.646771Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.646791Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.654256Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:33.665969Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:33.668251Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:33.668416Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:22:33.671338Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:22:33.671358Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:22:33.671399Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:22:33.675661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:22:33.675735Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:22:33.675768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:22:33.675794Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsCo ... a# } cookie# 0 2024-11-21T09:22:33.930790Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:2] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:22:33.930798Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:51:2095] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:1] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:22:33.931572Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2024-11-21T09:22:33.931622Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T09:22:33.931644Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T09:22:33.931657Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:9:0:0:241:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T09:22:33.931664Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:9:0:0:241:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:22:33.931729Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK08@propose_group_key.cpp:96} TTxProposeGroupKey Complete 2024-11-21T09:22:33.931796Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:335:2344] Cookie# 0 Recipient# [1:335:2344] RecipientRewrite# [1:335:2344] Request# {NodeID: 1 GroupIDs: 2181038082 } StopGivingGroups# false 2024-11-21T09:22:33.931811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2181038082 } 2024-11-21T09:22:33.931871Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 3321094396947703617 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/004973/r3tmp/tmpKDDz0P//key.txt" EncryptedGroupKey: "w\227P\036\271JV\256\036H\360\371\0025\214\212\222\361\221\304uz\251\301\204\366\261\2514%\365\347/#?|" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2024-11-21T09:22:33.931889Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 3321094396947703617 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/004973/r3tmp/tmpKDDz0P//key.txt" EncryptedGroupKey: "w\227P\036\271JV\256\036H\360\371\0025\214\212\222\361\221\304uz\251\301\204\366\261\2514%\365\347/#?|" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T09:22:33.932068Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T09:22:33.932074Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T09:22:33.932363Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:520:2496] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.932386Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:521:2497] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.932405Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:522:2498] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.932424Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:523:2499] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.932443Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:524:2500] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.932459Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:525:2501] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.932479Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:526:2502] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.932483Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T09:22:33.932613Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.932633Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.932657Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.932686Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.932708Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.932723Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.932732Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.932736Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T09:22:33.932740Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T09:22:33.932760Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] bootstrap ActorId# [1:529:2503] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T09:22:33.932767Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T09:22:33.932813Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 5073528086512678961 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T09:22:33.933322Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T09:22:33.933336Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2024-11-21T09:22:33.933406Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2024-11-21T09:22:33.933726Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2024-11-21T09:22:33.933807Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] bootstrap ActorId# [1:531:2505] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T09:22:33.933813Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T09:22:33.933839Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 8231738257748327547 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T09:22:33.934042Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T09:22:33.934051Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2024-11-21T09:22:33.934114Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [1:532:2506] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T09:22:33.934120Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T09:22:33.934140Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 14240985945856451133 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T09:22:33.934360Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T09:22:33.934368Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 >> TBlobStorageWardenTest::TestSendToInvalidGroupId |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2024-11-21T09:22:34.179471Z node 1 :GRPC_CLIENT DEBUG: [63b7f040710] Connect to grpc://localhost:21648 2024-11-21T09:22:34.181377Z node 1 :GRPC_CLIENT DEBUG: [63b7f040710] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2024-11-21T09:22:34.184759Z node 1 :GRPC_CLIENT DEBUG: [63b7f040710] Status 7 Permission Denied 2024-11-21T09:22:34.184917Z node 1 :GRPC_CLIENT DEBUG: [63b7f040710] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2024-11-21T09:22:34.185656Z node 1 :GRPC_CLIENT DEBUG: [63b7f040710] Response AuthenticateResponse { account { user_account { id: "1234" } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] Test command err: 2024-11-21T09:22:29.947972Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660529941823124:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:29.948112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410c/r3tmp/tmphEgRKg/pdisk_1.dat 2024-11-21T09:22:30.007312Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6606, node 1 2024-11-21T09:22:30.024288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.024302Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.024304Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.024340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:30.048444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.048472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.050004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.077408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.078520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.078537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.079075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.079140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.079149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.079495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.079506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.079672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.079822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.080636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950129, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.080649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.080731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.081094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.081130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.081143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.081158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.081171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.081186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.081560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.081580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.081583Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.081609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410c/r3tmp/tmpJGjaVa/pdisk_1.dat 2024-11-21T09:22:30.806725Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:30.815960Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16585, node 4 2024-11-21T09:22:30.832457Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.832470Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.832472Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.832507Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.903481Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.903512Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.904932Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.906127Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.906224Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.906235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.906803Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.906845Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.906850Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:30.907171Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.907177Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T09:22:30.907479Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.907615Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.908475Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950955, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.908485Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.908532Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.909064Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.909150Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.909173Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.909190Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.909206Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.909231Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.909343Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.909358Z node 4 :FLAT_TX_ ... ated, will use file: (empty maybe) 2024-11-21T09:22:32.569008Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:32.569010Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:32.569063Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:32.631505Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:32.631540Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:32.632971Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:32.634568Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.634661Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:32.634674Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.635113Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:32.635157Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:32.635168Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:32.635495Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:32.635507Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:32.635698Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:32.635863Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.636768Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180952684, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:32.636783Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:32.636844Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:32.637286Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.637322Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.637337Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:32.637355Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:32.637367Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:32.637382Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:32.637515Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:32.637533Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:32.637537Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:32.637549Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:33.393810Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439660544229735990:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:33.394137Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410c/r3tmp/tmpIaGRcv/pdisk_1.dat 2024-11-21T09:22:33.406847Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11056, node 13 2024-11-21T09:22:33.424744Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:33.424761Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:33.424763Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:33.424808Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:33.494192Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:33.494217Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:33.495704Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:33.497264Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.497367Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:33.497379Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.497778Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:33.497821Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:33.497830Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:33.498187Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:33.498197Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:33.498341Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:33.498513Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:33.499426Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180953545, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:33.499439Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:33.499496Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:33.499925Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:33.499969Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:33.499983Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:33.499997Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:33.500011Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:33.500022Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:33.500174Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:33.500189Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:33.500193Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:33.500219Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2024-11-21T09:22:33.601202Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:22:33.602149Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/00496d/r3tmp/tmpsFoj3H/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:33.602241Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/jptk/00496d/r3tmp/tmpsFoj3H/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T09:22:33.605146Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T09:22:33.605279Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.605629Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T09:22:33.605642Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.605753Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T09:22:33.605760Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.605850Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T09:22:33.605855Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.605940Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T09:22:33.605947Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T09:22:33.606141Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:44:2074] ControllerId# 72057594037932033 2024-11-21T09:22:33.606146Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:22:33.606167Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:22:33.606232Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:22:33.611005Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:22:33.611757Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:22:33.612429Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:33.612472Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T09:22:33.612634Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [2:91:2068] ControllerId# 72057594037932033 2024-11-21T09:22:33.612640Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:22:33.612658Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:22:33.612700Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:22:33.613627Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:22:33.613758Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:22:33.613884Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:22:33.619942Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.619963Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:22:33.620089Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.620109Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:22:33.620772Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:22:33.620910Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.620921Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:22:33.622959Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:22:33.623992Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.625421Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.625436Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:22:33.625481Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:22:33.625659Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:33.625698Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.625713Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:22:33.626119Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:22:33.634256Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/00496d/r3tmp/tmpsFoj3H/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:33.634336Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:22:33.634541Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T09:22:3 ... : Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.961447Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.961460Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.961493Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.961497Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T09:22:33.961501Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T09:22:33.961507Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2024-11-21T09:22:33.961672Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:593:2505] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:22:33.961702Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:22:33.961708Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:22:33.961716Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2024-11-21T09:22:33.961721Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2024-11-21T09:22:33.961741Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:22:33.963583Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T09:22:33.963617Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2024-11-21T09:22:33.963625Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:22:33.963699Z node 2 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 2181038082 EnableProxyMock# false NoGroup# false 2024-11-21T09:22:33.963707Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2181038082 2024-11-21T09:22:33.963711Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 2181038082 2024-11-21T09:22:33.963776Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T09:22:33.963782Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T09:22:33.963807Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2181038082 2024-11-21T09:22:33.963822Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2024-11-21T09:22:33.963891Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:29:2059] Cookie# 0 Recipient# [1:439:2377] RecipientRewrite# [1:396:2345] Request# {NodeID: 2 GroupIDs: 2181038082 } StopGivingGroups# false 2024-11-21T09:22:33.963909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 2181038082 } 2024-11-21T09:22:33.964002Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 6166497795482103553 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/00496d/r3tmp/tmpsFoj3H//key.txt" EncryptedGroupKey: ",\344\247\372\371Lf\313\2238\223e\032z-\264\352oW\205\322\303Bu.\231\241\204t\314,\177\271\262@\250" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2024-11-21T09:22:33.964016Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 6166497795482103553 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/00496d/r3tmp/tmpsFoj3H//key.txt" EncryptedGroupKey: ",\344\247\372\371Lf\313\2238\223e\032z-\264\352oW\205\322\303Bu.\231\241\204t\314,\177\271\262@\250" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T09:22:33.964036Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/jptk/00496d/r3tmp/tmpsFoj3H//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T09:22:33.964192Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2181038082 2024-11-21T09:22:33.964265Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T09:22:33.964271Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T09:22:33.964485Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:597:2104] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.964498Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:598:2105] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.964510Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:599:2106] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.964527Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:600:2107] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.964541Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:601:2108] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.964560Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:602:2109] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.964575Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:603:2110] targetNodeId# 1 Marker# DSP01 2024-11-21T09:22:33.964579Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T09:22:33.964932Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.964956Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.964986Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.965039Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.965049Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.965056Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.965112Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:33.965117Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T09:22:33.965121Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T09:22:33.965155Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:597:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2024-11-21T09:22:28.682916Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660522770227275:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:28.683243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004114/r3tmp/tmpGQxTa7/pdisk_1.dat 2024-11-21T09:22:28.750077Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12186, node 1 2024-11-21T09:22:28.771733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:28.771749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:28.771751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:28.771792Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:28.783129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:28.783163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:28.784730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:28.800717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.801736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:28.801764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.802283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:28.802351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:28.802363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:28.802698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:28.802709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:28.802858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:28.803026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:28.804092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180948848, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:28.804107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:28.804174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:28.804549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:28.804599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:28.804617Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:28.804628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:28.804643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:28.804654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:28.805043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:28.805061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:28.805065Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:28.805078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:29.004602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.004766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:29.004960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.004972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.005690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table 2024-11-21T09:22:29.005742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.005793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.005819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:29.005876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:29.005983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:29.006005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:29.006014Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:29.006059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:29.006066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:29.006067Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:29.007494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.007514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:22:29.007860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:29.060662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:29.060678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:29.060705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T09:22:29.061307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:29.062342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949107, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.062359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180949107 2024-11-21T09:22:29.062388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T09:22:29.062905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.063008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.063043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:29.063315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:29.063333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:29.063337Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:29.063372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:29.063380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:29.063381Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 28147497 ... 594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:33.448495Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:33.448496Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T09:22:33.454180Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180953503, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:33.454203Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180953503, at schemeshard: 72057594046644480 2024-11-21T09:22:33.454260Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T09:22:33.454285Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180953503, at schemeshard: 72057594046644480 2024-11-21T09:22:33.454304Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T09:22:33.454313Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180953503, at schemeshard: 72057594046644480 2024-11-21T09:22:33.454329Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T09:22:33.454343Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732180953503 2024-11-21T09:22:33.454357Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T09:22:33.454883Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:33.455011Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:33.455033Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T09:22:33.455047Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T09:22:33.455087Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T09:22:33.455101Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T09:22:33.455114Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T09:22:33.455119Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T09:22:33.455130Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T09:22:33.455137Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T09:22:33.455150Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:33.455159Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T09:22:33.455169Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T09:22:33.455172Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T09:22:33.455177Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T09:22:33.455564Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:33.455586Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:33.455591Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T09:22:33.455643Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:33.455654Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:33.455655Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:33.455676Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:33.455686Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:33.455687Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:33.455703Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:33.455712Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:33.455714Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:33.455729Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:33.455737Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:33.455738Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T09:22:33.455743Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T09:22:33.456344Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439660547125120012:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:22:33.539898Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:33.539937Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:33.540682Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:33.551710Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70fjb642h7qcfgvdr30rb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.566290Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70fjejb0w7vgzjqvr034n6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.579296Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70fjez06ev5v0wbxkk7fe7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.589819Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd70fjfceb27c6svh7dvb2km, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.600772Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70fjfqdnkk64mgv7ac3say, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.612523Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd70fjg2est6prrs7d0avq7q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.624978Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70fjgdesrwv6t3xveg26cq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.645918Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd70fjgt8jbhtcgr4f4yznsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.667297Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70fjhfd7rd33nn5qp7t5vq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:33.692855Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd70fjj4a6gytg7be90ag0y9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTM2NzAzM2QtZDdmNDEyNDMtZjBhZDU0YTgtNzhmMTViNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> YdbIndexTable::AlterTableAddIndex [GOOD] >> YdbLogStore::AlterLogStore >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Pick Disable nodeId# 25 Delete nodeId# 53 Pick Add nodeId# 101 Delete nodeId# 48 Delete nodeId# 8 Delete nodeId# 20 Disable nodeId# 66 Pick Add nodeId# 102 Enable nodeId# 25 Pick Delete nodeId# 64 Delete nodeId# 40 Add nodeId# 103 Delete nodeId# 97 Delete nodeId# 47 Delete nodeId# 78 Disable nodeId# 12 Enable nodeId# 12 Add nodeId# 104 Enable nodeId# 66 Add nodeId# 105 Add nodeId# 106 Delete nodeId# 65 Disable nodeId# 33 Delete nodeId# 18 Disable nodeId# 41 Disable nodeId# 91 Delete nodeId# 46 Disable nodeId# 32 Delete nodeId# 59 Pick Pick Add nodeId# 107 Delete nodeId# 32 Enable nodeId# 41 Disable nodeId# 102 Delete nodeId# 41 Disable nodeId# 60 Enable nodeId# 60 Delete nodeId# 54 Pick Delete nodeId# 28 Delete nodeId# 100 Pick Disable nodeId# 103 Delete nodeId# 67 Disable nodeId# 16 Pick Delete nodeId# 42 Delete nodeId# 103 Enable nodeId# 16 Delete nodeId# 1 Enable nodeId# 91 Disable nodeId# 19 Disable nodeId# 21 Delete nodeId# 31 Add nodeId# 108 Disable nodeId# 68 Disable nodeId# 86 Enable nodeId# 86 Enable nodeId# 68 Disable nodeId# 6 Disable nodeId# 36 Disable nodeId# 17 Add nodeId# 109 Add nodeId# 110 Disable nodeId# 61 Enable nodeId# 19 Pick Add nodeId# 111 Enable nodeId# 36 Pick Disable nodeId# 72 Add nodeId# 112 Pick Add nodeId# 113 Pick Pick Disable nodeId# 91 Delete nodeId# 81 Pick Delete nodeId# 29 Pick Enable nodeId# 102 Disable nodeId# 51 Enable nodeId# 61 Enable nodeId# 6 Pick Pick Add nodeId# 114 Pick Delete nodeId# 87 Pick Add nodeId# 115 Add nodeId# 116 Add nodeId# 117 Add nodeId# 118 Enable nodeId# 21 Delete nodeId# 6 Disable nodeId# 57 Enable nodeId# 33 Add nodeId# 119 Disable nodeId# 70 Pick Add nodeId# 120 Enable nodeId# 57 Add nodeId# 121 Delete nodeId# 107 Delete nodeId# 10 Delete nodeId# 90 Enable nodeId# 51 Pick Pick Enable nodeId# 17 Pick Delete nodeId# 16 Delete nodeId# 14 Pick Disable nodeId# 24 Delete nodeId# 116 Enable nodeId# 70 Enable nodeId# 24 Add nodeId# 122 Delete nodeId# 75 Delete nodeId# 39 Delete nodeId# 21 Enable nodeId# 72 Pick Add nodeId# 123 Delete nodeId# 5 Enable nodeId# 91 Add nodeId# 124 Add nodeId# 125 Pick Disable nodeId# 71 Disable nodeId# 50 Delete nodeId# 27 Delete nodeId# 77 Add nodeId# 126 Add nodeId# 127 Disable nodeId# 15 Enable nodeId# 71 Pick Enable nodeId# 50 Enable nodeId# 15 Delete nodeId# 93 Add nodeId# 128 Pick Pick Pick Delete nodeId# 71 Pick Pick Add nodeId# 129 Pick Delete nodeId# 89 Pick Delete nodeId# 84 Disable nodeId# 124 Disable nodeId# 56 Disable nodeId# 23 Add nodeId# 130 Add nodeId# 131 Delete nodeId# 4 Pick Enable nodeId# 124 Enable nodeId# 23 Pick Pick Add nodeId# 132 Enable nodeId# 56 Pick Disable nodeId# 43 Disable nodeId# 60 Enable nodeId# 60 Add nodeId# 133 Disable nodeId# 122 Pick Pick Disable nodeId# 131 Enable nodeId# 43 Enable nodeId# 131 Add nodeId# 134 Pick Enable nodeId# 122 Disable nodeId# 50 Pick Delete nodeId# 51 Enable nodeId# 50 Pick Disable nodeId# 73 Pick Add nodeId# 135 Delete nodeId# 104 Disable nodeId# 118 Enable nodeId# 118 Add nodeId# 136 Disable nodeId# 66 Disable nodeId# 114 Enable nodeId# 66 Enable nodeId# 114 Delete nodeId# 132 Enable nodeId# 73 Disable nodeId# 22 Delete nodeId# 94 Disable nodeId# 52 Delete nodeId# 136 Disable nodeId# 19 Add nodeId# 137 Delete nodeId# 129 Add nodeId# 138 Enable nodeId# 52 Add nodeId# 139 Pick Pick Disable nodeId# 114 Enable nodeId# 22 Enable nodeId# 114 Pick Disable nodeId# 80 Delete nodeId# 35 Pick Enable nodeId# 19 Pick Disable nodeId# 13 Add nodeId# 140 Pick Add nodeId# 141 Pick Delete nodeId# 101 Add nodeId# 142 Disable nodeId# 133 Enable nodeId# 133 Delete nodeId# 124 Pick Delete nodeId# 106 Add nodeId# 143 Pick Add nodeId# 144 Pick Pick Pick Add nodeId# 145 Pick Add nodeId# 146 Delete nodeId# 61 Enable nodeId# 13 Enable nodeId# 80 Delete nodeId# 137 Pick Pick Delete nodeId# 130 Add nodeId# 147 Disable nodeId# 43 Disable nodeId# 50 Delete nodeId# 69 Disable nodeId# 62 Add nodeId# 148 Add nodeId# 149 Enable nodeId# 43 Disable nodeId# 91 Delete nodeId# 12 Disable nodeId# 83 Add nodeId# 150 Add nodeId# 151 Add nodeId# 152 Add nodeId# 153 Delete nodeId# 133 Disable nodeId# 92 Pick Disable nodeId# 80 Delete nodeId# 73 Add nodeId# 154 Pick Pick Pick Enable nodeId# 92 Add nodeId# 155 Delete nodeId# 17 Delete nodeId# 152 Enable nodeId# 91 Pick Enable nodeId# 83 Enable nodeId# 62 Disable nodeId# 82 Delete nodeId# 23 Pick Add nodeId# 156 Disable nodeId# 98 Delete nodeId# 121 Enable nodeId# 50 Enable nodeId# 98 Enable nodeId# 82 Enable nodeId# 80 Delete nodeId# 102 Delete nodeId# 95 Disable nodeId# 138 Disable nodeId# 50 Pick Add nodeId# 157 Pick Add nodeId# 158 Disable nodeId# 154 Enable nodeId# 138 Enable nodeId# 50 Disable nodeId# 99 Pick Delete nodeId# 68 Disable nodeId# 144 Disable nodeId# 50 Pick Pick Disable nodeId# 49 Delete nodeId# 111 Add nodeId# 159 Add nodeId# 160 Pick Add nodeId# 161 Disable nodeId# 11 Enable nodeId# 50 Enable nodeId# 154 Disable nodeId# 79 Add nodeId# 162 Pick Enable nodeId# 11 Disable nodeId# 50 Pick Disable nodeId# 2 Enable nodeId# 2 Add nodeId# 163 Delete nodeId# 56 Disable nodeId# 98 Disable nodeId# 26 Enable nodeId# 98 Delete nodeId# 155 Enable nodeId# 26 Pick Add nodeId# 164 Delete nodeId# 38 Disable nodeId# 120 Pick Delete nodeId# 158 Add nodeId# 165 Delete nodeId# 92 Enable nodeId# 49 Pick Enable nodeId# 144 Enable nodeId# 99 Pick Enable nodeId# 79 Disable nodeId# 24 Add nodeId# 166 Enable nodeId# 50 Delete nodeId# 145 Pick Disable nodeId# 98 Add nodeId# 167 Add nodeId# 168 Disable nodeId# 168 Disable nodeId# 72 Add nodeId# 169 Disable nodeId# 157 Delete nodeId# 140 Delete nodeId# 125 Delete nodeId# 164 Disable nodeId# 126 Delete nodeId# 150 Add nodeId# 170 Pick Enable nodeId# 126 Pick Pick Disable nodeId# 63 Disable nodeId# 122 Add nodeId# 171 Pick Pick Delete nodeId# 161 Enable nodeId# 157 Add nodeId# 172 Pick Delete nodeId# 15 Pick Pick Delete nodeId# 86 Enable nodeId# 168 Pick Add nodeId# 173 Delete nodeId# 170 Delete nodeId# 167 Delete nodeId# 131 Delete nodeId# 160 Pick Pick Add nodeId# 174 Pick Enable nodeId# 72 Delete nodeId# 70 Delete nodeId# 138 Pick Disable nodeId# 83 Pick Delete nodeId# 98 Disable nodeId# 171 Enable nodeId# 63 Delete nodeId# 26 Add nodeId# 175 Delete nodeId# 173 Add nodeId# 176 Enable nodeId# 83 Pick Delete nodeId# 50 Delete nodeId# 13 Pick Delete nodeId# 166 Delete nodeId# 34 Add nodeId# 177 Delete nodeId# 119 Delete nodeId# 82 Pick Delete nodeId# 74 Enable nodeId# 120 Pick Enable nodeId# 24 Enable nodeId# 122 Enable nodeId# 171 Delete nodeId# 7 Delete nodeId# 153 Delete nodeId# 154 Pick Add nodeId# 178 Add nodeId# 179 Delete nodeId# 113 Pick Delete nodeId# 135 Delete nodeId# 2 Disable nodeId# 162 Disable nodeId# 37 Enable nodeId# 162 Enable nodeId# 37 Disable nodeId# 45 Disable nodeId# 11 Disable nodeId# 159 Enable nodeId# 45 Delete nodeId# 118 Delete nodeId# 115 Disable nodeId# 33 Delete nodeId# 3 Pick Disable nodeId# 142 Delete nodeId# 60 Add nodeId# 180 Pick Enable nodeId# 33 Enable nodeId# 142 Add nodeId# 181 Delete nodeId# 146 Enable nodeId# 11 Add nodeId# 182 Add nodeId# 183 Add nodeId# 184 Enable nodeId# 159 Add nodeId# 185 Delete nodeId# 91 Add nodeId# 186 Add nodeId# 187 Delete nodeId# 127 Disable nodeId# 128 Delete nodeId# 49 Disable nodeId# 159 Pick Add nodeId# 188 Add nodeId# 189 Pick Add nodeId# 190 Add nodeId# 191 Disable nodeId# 174 Delete nodeId# 179 Pick Pick Enable nodeId# 128 Disable nodeId# 109 Enable nodeId# 174 Enable nodeId# 159 Disable nodeId# 96 Delete nodeId# 108 Pick Enable nodeId# 96 Pick Enable nodeId# 109 Add nodeId# 192 Disable nodeId# 44 Pick Disable nodeId# 52 Delete nodeId# 58 Enable nodeId# 52 Disable nodeId# 62 Add nodeId# 193 Add nodeId# 194 Delete nodeId# 159 Delete nodeId# 36 Disable nodeId# 43 Pick Enable nodeId# 44 Disable nodeId# 99 Pick Disable nodeId# 171 Disable nodeId# 189 Pick Pick Add nodeId# 195 Enable nodeId# 62 Enable nodeId# 171 Add nodeId# 196 Add nodeId# 197 Add nodeId# 198 Add nodeId# 199 Add nodeId# 200 Delete nodeId# 117 Enable nodeId# 99 Delete nodeId# 149 Delete nodeId# 109 Enable nodeId# 189 Disable nodeId# 184 Enable nodeId# 43 Pick Enable nodeId# 184 Disable nodeId# 198 Add nodeId# 201 Disable nodeId# 197 Delete nodeId# 181 Enable nodeId# 198 Delete nodeId# 174 Add nodeId# 202 Delete nodeId# 80 Enable nodeId# 197 Pick Pick Pick Pick Pick Disable nodeId# 11 Disable nodeId# 165 Delete nodeId# 142 Add nodeId# 203 Pick Delete nodeId# 180 Delete nodeId# 19 Add nodeId# 204 Pick Delete nodeId# 110 Delete nodeId# 144 Delete nodeId# 168 Disable nodeId# 76 Pick Enable nodeId# 11 Pick Add nodeId# 205 Add nodeId# 206 Add nodeId# 207 Add nodeId# 208 Add nodeId# 209 Add nodeId# 210 Delete nodeId# 151 Add nodeId# 211 Pick Pick Add nodeId# 212 Pick Enable nodeId# 76 Enable nodeId# 165 Delete nodeId# 33 Disable nodeId# 88 Disable nodeId# 203 Disable nodeId# 199 Pick Pick Pick Enable nodeId# 199 Delete nodeId# 199 Pick Pick Delete nodeId# 192 Enable nodeId# 88 Pick Delete nodeId# 63 Pick Delete nodeId# 206 Add nodeId# 213 Delete nodeId# 88 Add nodeId# 214 Enable nodeId# 203 Disable nodeId# 172 Add nodeId# 215 Enable nodeId# 172 Add nodeId# 216 Disable nodeId# 177 Disable nodeId# 76 Enable nodeId# 76 Disable nodeId# 213 Disable nodeId# 163 Disable nodeId# 66 Add nodeId# 217 Enable nodeId# 163 Disable nodeId# 163 Disable nodeId# 134 Enable nodeId# 163 Pick Delete nodeId# 207 Disable nodeId# 62 Add nodeId# 218 Add nodeId# 219 Pick Disable nodeId# 208 Disable nodeId# 139 Delete nodeId# 105 Delete nodeId# 190 Enable nodeId# 213 Disable nodeId# 187 Delete nodeId# 163 Pick Enable nodeId# 208 Disable nodeId# 114 Add nodeId# 220 Pick Enable nodeId# 139 Disable nodeId# 148 Add nodeId# 221 Delete nodeId# 72 Enable nodeId# 66 Disable nodeId# 184 Disable nodeId# 162 Delete nodeId# 221 Enable nodeId# 187 Disable nodeId# 204 Disable nodeId# 122 Disable nodeId# 143 Enable nodeId# 143 Pick Enable nodeId# 162 Disable nodeId# 44 Disable nodeId# 212 Delete nodeId# 147 Delete nodeId# 122 Add nodeId# 222 Pick Enable nodeId# 134 Pick Enable nodeId# 114 Add nodeId# 223 Disable nodeId# 171 Add nodeId# 224 Add nodeId# 225 Add nodeId# 226 Delete nodeId# 224 Enable nodeId# 212 Delete nodeId# 25 Add nodeId# 227 Enable nodeId# 62 Add nodeId# 228 Pick Enable nodeId# 184 Delete nodeId# 66 Add nodeId# 229 Disable nodeId# 128 Disable nodeId# 205 Disable nodeId# 162 Enable nodeId# 148 Delete nodeId# 178 Disable nodeId# 228 Enable nodeId# 162 Disable nodeId# 162 Add nodeId# 230 Disable nodeId# 43 Disable nodeId# 195 Enable nodeId# 195 Add nodeId# 231 Pick Disable nodeId# 148 Disable nodeId# 213 Enable nodeId# 128 Disable nodeId# 123 Disable nodeId# 208 Delete nodeId# 184 Delete nodeId# 169 Pick Enable nodeId# 162 Delete nodeId# 62 Delete nodeId# 185 Disable nodeId# 45 Enable nodeId# 171 Pick Add nodeId# 232 Add nodeId# 233 Enable nodeId# 44 Add nodeId# 234 Delete nodeId# 172 Delete nodeId# 202 Disable nodeId# 182 Delete nodeId# 216 Enable nodeId# 208 Add nodeId# 235 Delete nodeId# 141 Disable nodeId# 9 Add nodeId# 236 Disable nodeId# 44 Add nodeId# 237 Enable nodeId# 44 Enable nodeId# 182 Disable nodeId# 215 Delete nodeId# 123 Delete nodeId# 236 Delete nodeId# 230 Pick Disable nodeId# 189 Add nodeId# 238 Disable nodeId# 187 Delete nodeId# 85 Add nodeId# 239 Pick Disable nodeId# 195 Add nodeId# 240 Pick Disable nodeId# 139 Disable nodeId# 227 Delete nodeId# 215 Add nodeId# 241 Add nodeId# 242 Disable nodeId# 165 Pick Disable nodeId# 211 Disable nodeId# 212 Pick Add nodeId# 243 Delete nodeId# 232 Add nodeId# 244 Delete nodeId# 194 Pick Delete nodeId# 219 Disable nodeId# 235 Pick Add nodeId# 245 Pick Enable nodeId# 195 Enable nodeId# 228 Disable nodeId# 134 Delete nodeId# 22 Pick Add nodeId# 246 Add nodeId# 247 Disable nodeId# 128 Add nodeId# 248 Add nodeId# 249 Pick Delete nodeId# 83 Delete nodeId# 241 Enable nodeId# 45 Disable nodeId# 186 Disable nodeId# 208 Disable nodeId# 162 Delete nodeId# 128 Pick Pick Pick Delete nodeId# 112 Add nodeId# 250 Pick Add nodeId# 251 Disable nodeId# 214 Pick Disable nodeId# 11 Add nodeId# 252 Pick Pick Delete nodeId# 205 Pick Delete ... deId# 20199 Delete nodeId# 20301 Enable nodeId# 20035 Add nodeId# 20321 Add nodeId# 20322 Add nodeId# 20323 Enable nodeId# 20298 Pick Add nodeId# 20324 Delete nodeId# 20275 Pick Pick Add nodeId# 20325 Add nodeId# 20326 Add nodeId# 20327 Add nodeId# 20328 Enable nodeId# 20075 Disable nodeId# 20303 Disable nodeId# 20320 Disable nodeId# 20121 Delete nodeId# 20199 Enable nodeId# 20278 Add nodeId# 20329 Enable nodeId# 20286 Disable nodeId# 20242 Disable nodeId# 20328 Pick Pick Pick Disable nodeId# 20261 Enable nodeId# 20303 Enable nodeId# 20293 Pick Delete nodeId# 20207 Enable nodeId# 20288 Disable nodeId# 20251 Enable nodeId# 20302 Add nodeId# 20330 Enable nodeId# 20314 Enable nodeId# 20261 Disable nodeId# 20330 Delete nodeId# 20268 Disable nodeId# 20303 Delete nodeId# 20314 Disable nodeId# 20307 Add nodeId# 20331 Enable nodeId# 20142 Add nodeId# 20332 Pick Enable nodeId# 20280 Add nodeId# 20333 Disable nodeId# 20310 Add nodeId# 20334 Enable nodeId# 20266 Delete nodeId# 20313 Enable nodeId# 20212 Enable nodeId# 20328 Delete nodeId# 20160 Delete nodeId# 20217 Add nodeId# 20335 Enable nodeId# 20233 Enable nodeId# 20330 Add nodeId# 20336 Add nodeId# 20337 Delete nodeId# 20336 Disable nodeId# 20309 Enable nodeId# 20216 Pick Disable nodeId# 20321 Enable nodeId# 20251 Disable nodeId# 20333 Enable nodeId# 20018 Delete nodeId# 20003 Disable nodeId# 20308 Delete nodeId# 20196 Add nodeId# 20338 Delete nodeId# 20289 Enable nodeId# 20320 Pick Pick Pick Add nodeId# 20339 Add nodeId# 20340 Delete nodeId# 20337 Enable nodeId# 20208 Delete nodeId# 20319 Delete nodeId# 20284 Pick Add nodeId# 20341 Disable nodeId# 20329 Disable nodeId# 20018 Add nodeId# 20342 Enable nodeId# 20309 Pick Pick Add nodeId# 20343 Delete nodeId# 20266 Disable nodeId# 20323 Enable nodeId# 20150 Delete nodeId# 20281 Pick Pick Enable nodeId# 20323 Pick Pick Pick Enable nodeId# 20333 Enable nodeId# 20018 Pick Delete nodeId# 20283 Add nodeId# 20344 Delete nodeId# 20018 Delete nodeId# 20322 Enable nodeId# 20321 Enable nodeId# 20242 Disable nodeId# 20150 Disable nodeId# 20212 Enable nodeId# 20212 Add nodeId# 20345 Pick Disable nodeId# 20238 Enable nodeId# 20238 Pick Delete nodeId# 20328 Delete nodeId# 20253 Add nodeId# 20346 Delete nodeId# 20205 Disable nodeId# 20293 Add nodeId# 20347 Delete nodeId# 20308 Pick Add nodeId# 20348 Enable nodeId# 20023 Pick Enable nodeId# 20176 Disable nodeId# 20318 Delete nodeId# 20212 Disable nodeId# 20278 Add nodeId# 20349 Add nodeId# 20350 Enable nodeId# 20121 Add nodeId# 20351 Pick Delete nodeId# 20192 Enable nodeId# 20278 Pick Disable nodeId# 20304 Add nodeId# 20352 Disable nodeId# 20176 Add nodeId# 20353 Delete nodeId# 20349 Delete nodeId# 20176 Add nodeId# 20354 Disable nodeId# 20298 Delete nodeId# 20346 Pick Disable nodeId# 20339 Add nodeId# 20355 Enable nodeId# 20339 Delete nodeId# 20272 Enable nodeId# 20307 Add nodeId# 20356 Add nodeId# 20357 Add nodeId# 20358 Disable nodeId# 20354 Pick Delete nodeId# 20278 Disable nodeId# 20340 Enable nodeId# 20340 Add nodeId# 20359 Disable nodeId# 20208 Delete nodeId# 20320 Pick Add nodeId# 20360 Delete nodeId# 20352 Delete nodeId# 20354 Disable nodeId# 20204 Disable nodeId# 20309 Pick Pick Disable nodeId# 20023 Pick Delete nodeId# 20286 Pick Delete nodeId# 20316 Enable nodeId# 20303 Enable nodeId# 20309 Delete nodeId# 20323 Disable nodeId# 20317 Delete nodeId# 20280 Enable nodeId# 20293 Disable nodeId# 20288 Disable nodeId# 20134 Enable nodeId# 20304 Add nodeId# 20361 Disable nodeId# 20358 Disable nodeId# 20133 Enable nodeId# 20150 Disable nodeId# 20262 Delete nodeId# 20298 Delete nodeId# 20304 Enable nodeId# 20358 Delete nodeId# 20150 Disable nodeId# 20142 Disable nodeId# 20216 Enable nodeId# 20208 Pick Delete nodeId# 20309 Add nodeId# 20362 Disable nodeId# 20332 Add nodeId# 20363 Delete nodeId# 20350 Delete nodeId# 20344 Delete nodeId# 20310 Delete nodeId# 20355 Pick Delete nodeId# 20347 Pick Enable nodeId# 20329 Enable nodeId# 20134 Delete nodeId# 20357 Enable nodeId# 20317 Enable nodeId# 20262 Disable nodeId# 20333 Delete nodeId# 20261 Pick Delete nodeId# 20303 Enable nodeId# 20216 Disable nodeId# 20325 Pick Pick Enable nodeId# 20204 Disable nodeId# 20312 Delete nodeId# 20342 Delete nodeId# 20174 Add nodeId# 20364 Add nodeId# 20365 Delete nodeId# 20260 Delete nodeId# 20264 Delete nodeId# 20325 Enable nodeId# 20333 Add nodeId# 20366 Add nodeId# 20367 Enable nodeId# 20332 Delete nodeId# 20226 Pick Pick Enable nodeId# 20288 Add nodeId# 20368 Pick Pick Disable nodeId# 20363 Enable nodeId# 20363 Delete nodeId# 20273 Add nodeId# 20369 Pick Add nodeId# 20370 Disable nodeId# 20363 Pick Disable nodeId# 20327 Delete nodeId# 20262 Pick Pick Add nodeId# 20371 Disable nodeId# 20335 Enable nodeId# 20327 Pick Enable nodeId# 20133 Pick Delete nodeId# 20181 Add nodeId# 20372 Pick Add nodeId# 20373 Delete nodeId# 20315 Pick Pick Enable nodeId# 20335 Delete nodeId# 20256 Pick Enable nodeId# 20023 Add nodeId# 20374 Disable nodeId# 20373 Enable nodeId# 20318 Enable nodeId# 20312 Delete nodeId# 20366 Disable nodeId# 20374 Disable nodeId# 20353 Add nodeId# 20375 Pick Delete nodeId# 20334 Delete nodeId# 20311 Enable nodeId# 20373 Add nodeId# 20376 Delete nodeId# 20361 Delete nodeId# 20312 Pick Disable nodeId# 20251 Add nodeId# 20377 Pick Enable nodeId# 20374 Enable nodeId# 20251 Delete nodeId# 20270 Add nodeId# 20378 Add nodeId# 20379 Disable nodeId# 20365 Disable nodeId# 20098 Pick Disable nodeId# 20242 Delete nodeId# 20240 Add nodeId# 20380 Enable nodeId# 20242 Delete nodeId# 20362 Pick Delete nodeId# 20269 Disable nodeId# 20380 Pick Add nodeId# 20381 Pick Disable nodeId# 20338 Pick Delete nodeId# 20250 Pick Delete nodeId# 20075 Enable nodeId# 20353 Disable nodeId# 20372 Delete nodeId# 20300 Disable nodeId# 20341 Disable nodeId# 20231 Enable nodeId# 20372 Add nodeId# 20382 Disable nodeId# 20293 Delete nodeId# 20372 Pick Pick Delete nodeId# 20195 Add nodeId# 20383 Pick Pick Pick Add nodeId# 20384 Pick Enable nodeId# 20341 Add nodeId# 20385 Pick Enable nodeId# 20363 Pick Disable nodeId# 20339 Pick Add nodeId# 20386 Delete nodeId# 20373 Add nodeId# 20387 Disable nodeId# 20353 Delete nodeId# 20204 Enable nodeId# 20380 Delete nodeId# 20305 Pick Add nodeId# 20388 Delete nodeId# 20351 Disable nodeId# 20380 Disable nodeId# 20376 Disable nodeId# 20134 Disable nodeId# 20121 Delete nodeId# 20238 Delete nodeId# 20367 Pick Pick Add nodeId# 20389 Disable nodeId# 20364 Add nodeId# 20390 Delete nodeId# 20142 Pick Delete nodeId# 20371 Enable nodeId# 20338 Pick Pick Disable nodeId# 20299 Enable nodeId# 20293 Delete nodeId# 20324 Pick Add nodeId# 20391 Add nodeId# 20392 Enable nodeId# 20134 Pick Pick Disable nodeId# 20338 Disable nodeId# 20242 Pick Delete nodeId# 20339 Disable nodeId# 20343 Pick Enable nodeId# 20380 Enable nodeId# 20376 Add nodeId# 20393 Add nodeId# 20394 Pick Add nodeId# 20395 Add nodeId# 20396 Add nodeId# 20397 Enable nodeId# 20121 Add nodeId# 20398 Pick Pick Delete nodeId# 20338 Enable nodeId# 20343 Add nodeId# 20399 Add nodeId# 20400 Pick Enable nodeId# 20231 Add nodeId# 20401 Delete nodeId# 20391 Disable nodeId# 20317 Disable nodeId# 20360 Pick Add nodeId# 20402 Pick Add nodeId# 20403 Pick Enable nodeId# 20242 Disable nodeId# 20382 Enable nodeId# 20364 Delete nodeId# 20335 Pick Enable nodeId# 20382 Enable nodeId# 20098 Enable nodeId# 20299 Enable nodeId# 20360 Disable nodeId# 20360 Add nodeId# 20404 Enable nodeId# 20360 Pick Add nodeId# 20405 Enable nodeId# 20353 Pick Add nodeId# 20406 Add nodeId# 20407 Pick Add nodeId# 20408 Pick Disable nodeId# 20379 Add nodeId# 20409 Disable nodeId# 20216 Add nodeId# 20410 Add nodeId# 20411 Pick Delete nodeId# 20399 Disable nodeId# 20397 Disable nodeId# 20360 Disable nodeId# 20345 Disable nodeId# 20398 Disable nodeId# 20377 Enable nodeId# 20379 Delete nodeId# 20364 Pick Pick Pick Disable nodeId# 20293 Pick Add nodeId# 20412 Add nodeId# 20413 Pick Pick Disable nodeId# 20393 Pick Enable nodeId# 20397 Disable nodeId# 20330 Enable nodeId# 20365 Disable nodeId# 20392 Disable nodeId# 20233 Pick Pick Delete nodeId# 20403 Disable nodeId# 20395 Disable nodeId# 20134 Disable nodeId# 20332 Delete nodeId# 20408 Delete nodeId# 20208 Disable nodeId# 20343 Delete nodeId# 20329 Enable nodeId# 20233 Enable nodeId# 20216 Enable nodeId# 20392 Delete nodeId# 20368 Delete nodeId# 20134 Pick Pick Add nodeId# 20414 Pick Delete nodeId# 20333 Pick Disable nodeId# 20327 Pick Disable nodeId# 20242 Disable nodeId# 20321 Enable nodeId# 20327 Pick Pick Pick Delete nodeId# 20317 Disable nodeId# 20307 Add nodeId# 20415 Add nodeId# 20416 Add nodeId# 20417 Enable nodeId# 20321 Delete nodeId# 20397 Enable nodeId# 20377 Disable nodeId# 20410 Pick Pick Disable nodeId# 20318 Delete nodeId# 20394 Enable nodeId# 20410 Enable nodeId# 20360 Pick Add nodeId# 20418 Pick Delete nodeId# 20133 Delete nodeId# 20356 Delete nodeId# 20331 Pick Disable nodeId# 20340 Disable nodeId# 20407 Disable nodeId# 20125 Disable nodeId# 20389 Add nodeId# 20419 Pick Disable nodeId# 20392 Delete nodeId# 20345 Add nodeId# 20420 Pick Enable nodeId# 20398 Add nodeId# 20421 Add nodeId# 20422 Disable nodeId# 20358 Pick Pick Enable nodeId# 20358 Enable nodeId# 20242 Disable nodeId# 20384 Delete nodeId# 20296 Enable nodeId# 20332 Add nodeId# 20423 Delete nodeId# 20293 Disable nodeId# 20387 Delete nodeId# 20216 Pick Enable nodeId# 20407 Disable nodeId# 20381 Delete nodeId# 20422 Disable nodeId# 20035 Disable nodeId# 20360 Add nodeId# 20424 Enable nodeId# 20035 Pick Delete nodeId# 20035 Add nodeId# 20425 Delete nodeId# 20419 Enable nodeId# 20125 Disable nodeId# 20383 Delete nodeId# 20410 Enable nodeId# 20330 Enable nodeId# 20307 Disable nodeId# 20242 Enable nodeId# 20383 Pick Delete nodeId# 20383 Pick Enable nodeId# 20242 Add nodeId# 20426 Enable nodeId# 20392 Pick Enable nodeId# 20343 Delete nodeId# 20251 Add nodeId# 20427 Pick Disable nodeId# 20098 Disable nodeId# 20054 Add nodeId# 20428 Enable nodeId# 20387 Pick Delete nodeId# 20326 Pick Add nodeId# 20429 Delete nodeId# 20156 Pick Disable nodeId# 20411 Delete nodeId# 20340 Enable nodeId# 20098 Add nodeId# 20430 Pick Pick Enable nodeId# 20411 Add nodeId# 20431 Disable nodeId# 20402 Enable nodeId# 20360 Enable nodeId# 20384 Pick Add nodeId# 20432 Delete nodeId# 20370 Enable nodeId# 20389 Disable nodeId# 20376 Disable nodeId# 20401 Add nodeId# 20433 Pick Pick Disable nodeId# 20388 Pick Add nodeId# 20434 Enable nodeId# 20395 Delete nodeId# 20187 Add nodeId# 20435 Enable nodeId# 20381 Disable nodeId# 20374 Add nodeId# 20436 Add nodeId# 20437 Delete nodeId# 20400 Delete nodeId# 20369 Pick Pick Delete nodeId# 20384 Add nodeId# 20438 Enable nodeId# 20401 Pick Pick Delete nodeId# 20388 Pick Delete nodeId# 20412 Pick Add nodeId# 20439 Disable nodeId# 20360 Add nodeId# 20440 Add nodeId# 20441 Enable nodeId# 20360 Enable nodeId# 20374 Pick Enable nodeId# 20402 Disable nodeId# 20424 Add nodeId# 20442 Delete nodeId# 20231 Add nodeId# 20443 Enable nodeId# 20318 Disable nodeId# 20327 Disable nodeId# 20233 Delete nodeId# 20341 Enable nodeId# 20054 Delete nodeId# 20439 Add nodeId# 20444 Add nodeId# 20445 Disable nodeId# 20392 Add nodeId# 20446 Pick Enable nodeId# 20376 Pick Delete nodeId# 20382 Delete nodeId# 20427 Add nodeId# 20447 Add nodeId# 20448 Enable nodeId# 20327 Enable nodeId# 20424 Disable nodeId# 20353 Delete nodeId# 20428 Enable nodeId# 20392 Enable nodeId# 20353 Disable nodeId# 20121 Add nodeId# 20449 Add nodeId# 20450 Add nodeId# 20451 Disable nodeId# 20353 Enable nodeId# 20121 Enable nodeId# 20233 Delete nodeId# 20429 Delete nodeId# 20451 Delete nodeId# 20302 Pick Add nodeId# 20452 Pick Enable nodeId# 20353 Enable nodeId# 20393 Add nodeId# 20453 Delete nodeId# 20363 Delete nodeId# 20407 Delete nodeId# 20446 Pick Add nodeId# 20454 Add nodeId# 20455 Pick Add nodeId# 20456 Delete nodeId# 20233 Add nodeId# 20457 Pick Add nodeId# 20458 Delete nodeId# 20375 Delete nodeId# 20414 Disable nodeId# 20249 Delete nodeId# 20172 Delete nodeId# 20433 Pick Delete nodeId# 20360 Add nodeId# 20459 Pick Disable nodeId# 20288 Disable nodeId# 20450 Delete nodeId# 20395 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2024-11-21T09:22:33.616338Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:22:33.617333Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/004970/r3tmp/tmp2UB18A/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:33.617446Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/jptk/004970/r3tmp/tmp2UB18A/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T09:22:33.617800Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T09:22:33.617879Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.618090Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T09:22:33.618101Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.618223Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T09:22:33.618231Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.618320Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T09:22:33.618327Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:33.618414Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T09:22:33.618422Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T09:22:33.618604Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2024-11-21T09:22:33.618609Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:22:33.618626Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:22:33.618680Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:22:33.622235Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:22:33.622557Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:22:33.622631Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.622637Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:22:33.628309Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.628327Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:22:33.628888Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:22:33.629612Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:22:33.629672Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:33.632902Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/004970/r3tmp/tmp2UB18A/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:33.633641Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:22:33.633809Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T09:22:33.633815Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:22:33.633833Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\252Q)\017\234\310-\027\3040\252\256\250\261\210\000\210;\244L" } 2024-11-21T09:22:33.633864Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2024-11-21T09:22:33.633871Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:70:2114] SessionId# [0:0:0] Cookie# 0 2024-11-21T09:22:33.633879Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.005524s 2024-11-21T09:22:33.634630Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2024-11-21T09:22:33.634642Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2024-11-21T09:22:33.641537Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.642918Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.643645Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.644544Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.644633Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.644989Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.645293Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.645354Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.645646Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.645662Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.645902Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:33.646238Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.646736Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.646753Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:33.654256Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:33.665853Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:33.668251Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:33.668415Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:22:33.671190Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:22:33.671209Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:22:33.671259Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:22:33.676546Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:22:33.676612Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:22:33.676633Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:22:33.676656Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr: ... ock# false NoGroup# false 2024-11-21T09:22:34.622593Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2181038082 2024-11-21T09:22:34.622599Z node 3 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 2181038082 2024-11-21T09:22:34.622659Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T09:22:34.622667Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T09:22:34.622686Z node 3 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2181038082 2024-11-21T09:22:34.622702Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2024-11-21T09:22:34.622790Z node 2 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [3:29:2059] Cookie# 0 Recipient# [2:404:2342] RecipientRewrite# [2:362:2311] Request# {NodeID: 3 GroupIDs: 2181038082 } StopGivingGroups# false 2024-11-21T09:22:34.622809Z node 2 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 3 GroupIDs: 2181038082 } 2024-11-21T09:22:34.622920Z node 3 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 3 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 11427004671863376647 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/004970/r3tmp/tmpllHrve//key.txt" EncryptedGroupKey: "l<8\217|9\205;Wc\262\266?\272\361\344\341\315\367\026\230D\214\000D\220\023Q\257\215vR\035\220a\232" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2024-11-21T09:22:34.622941Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 11427004671863376647 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/jptk/004970/r3tmp/tmpllHrve//key.txt" EncryptedGroupKey: "l<8\217|9\205;Wc\262\266?\272\361\344\341\315\367\026\230D\214\000D\220\023Q\257\215vR\035\220a\232" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T09:22:34.622962Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/jptk/004970/r3tmp/tmpllHrve//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T09:22:34.623161Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T09:22:34.623169Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T09:22:34.623443Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:597:2104] targetNodeId# 2 Marker# DSP01 2024-11-21T09:22:34.623469Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:598:2105] targetNodeId# 2 Marker# DSP01 2024-11-21T09:22:34.623493Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:599:2106] targetNodeId# 2 Marker# DSP01 2024-11-21T09:22:34.623518Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:600:2107] targetNodeId# 2 Marker# DSP01 2024-11-21T09:22:34.623540Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:601:2108] targetNodeId# 2 Marker# DSP01 2024-11-21T09:22:34.623562Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:602:2109] targetNodeId# 2 Marker# DSP01 2024-11-21T09:22:34.623582Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:603:2110] targetNodeId# 2 Marker# DSP01 2024-11-21T09:22:34.623588Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T09:22:34.623661Z node 3 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2181038082 2024-11-21T09:22:34.623920Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:34.623984Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:34.623995Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:34.624022Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:34.624034Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:34.624117Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:34.624146Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T09:22:34.624150Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T09:22:34.624155Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T09:22:34.624178Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [3:606:2111] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T09:22:34.624185Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2024-11-21T09:22:34.624244Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:597:2104] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 5177974499536755625 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T09:22:34.624668Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2024-11-21T09:22:34.624682Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2024-11-21T09:22:34.624746Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T09:22:34.624772Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T09:22:34.624824Z node 2 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [2:607:2506] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T09:22:34.624847Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:22:34.624853Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:22:34.624861Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2024-11-21T09:22:34.624878Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2024-11-21T09:22:34.624897Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:582:2496] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:22:34.624937Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2024-11-21T09:22:34.624977Z node 2 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T09:22:34.624986Z node 2 :BS_PROXY_PUT ERROR: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2024-11-21T09:22:34.624993Z node 2 :BS_PROXY_PUT NOTICE: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:22:34.625053Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:597:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::Overload >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts >> YdbLogStore::LogStore [GOOD] >> YdbLogStore::LogStoreNegative >> ControlImplementationTests::TestControlWrapperAsI64 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2024-11-21T09:22:34.497604Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T09:22:34.498404Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/00495c/r3tmp/tmpJ86jZQ/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:34.498473Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/jptk/00495c/r3tmp/tmpJ86jZQ/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T09:22:34.498805Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T09:22:34.498876Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:34.499040Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T09:22:34.499049Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:34.499161Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T09:22:34.499168Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:34.499230Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T09:22:34.499234Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:34.499286Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T09:22:34.499291Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T09:22:34.499417Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2024-11-21T09:22:34.499420Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T09:22:34.499434Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T09:22:34.499472Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T09:22:34.502721Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T09:22:34.503033Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T09:22:34.503106Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:34.503112Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:22:34.508999Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:34.509025Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T09:22:34.509718Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T09:22:34.510479Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T09:22:34.510540Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T09:22:34.513800Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/jptk/00495c/r3tmp/tmpJ86jZQ/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T09:22:34.513888Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T09:22:34.514044Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T09:22:34.514049Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T09:22:34.514068Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "h\214\276\230O\325\250\022\367\265\315\340l\375\266{itd\302" } 2024-11-21T09:22:34.514109Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2024-11-21T09:22:34.514118Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:70:2114] SessionId# [0:0:0] Cookie# 0 2024-11-21T09:22:34.514127Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.005062s 2024-11-21T09:22:34.514182Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2024-11-21T09:22:34.514188Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2024-11-21T09:22:34.519030Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.520182Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.520903Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.521698Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.521813Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.521989Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.522254Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.522306Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:34.522661Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:34.522676Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:34.522809Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.523011Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:34.523301Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:34.523315Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T09:22:34.527019Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:34.532288Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:34.532620Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:34.532715Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:22:34.535132Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:22:34.535148Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:22:34.535182Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:22:34.537621Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:22:34.537669Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:22:34.537700Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:22:34.537717Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NB ... GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 15804151571750595893 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:22:34.709598Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.709615Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.709639Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 15804151571750595893 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:22:34.709673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 15804151571750595893 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:22:34.709874Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "test_storage" ErasureSpecies: "none" VDiskKind: "Default" Kind: "pool-kind-1" NumGroups: 1 PDiskFilter { Property { Type: ROT } } EncryptionMode: 1 } } } 2024-11-21T09:22:34.730812Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:471:2455] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1294:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T09:22:34.730877Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:22:34.730882Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:22:34.730884Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:22:34.730887Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:22:34.730889Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:22:34.730891Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T09:22:34.730896Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:1294:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T09:22:34.730905Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1294:1] Marker# BPG33 2024-11-21T09:22:34.730908Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1294:1] Marker# BPG32 2024-11-21T09:22:34.730911Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1294:2] Marker# BPG33 2024-11-21T09:22:34.730914Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1294:2] Marker# BPG32 2024-11-21T09:22:34.730917Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1294:3] Marker# BPG33 2024-11-21T09:22:34.730919Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1294:3] Marker# BPG32 2024-11-21T09:22:34.730968Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:3] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:22:34.730976Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:2] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:22:34.730980Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:1] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T09:22:34.731822Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T09:22:34.731890Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T09:22:34.731910Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T09:22:34.731929Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T09:22:34.731938Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T09:22:34.732126Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { VDisks { VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 15804151571750595893 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "test_storage" } Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 15804151571750595893 } } } EncryptionMode: 1 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038082 MainKeyVersion: 0 StoragePoolName: "test_storage" DeviceType: ROT } } InstanceId: "622888b7-ed8f41e0-c6509464-4e10f846" AvailDomain: 1 } 2024-11-21T09:22:34.732147Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {VDisks { VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 15804151571750595893 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "test_storage" } Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 15804151571750595893 } } } EncryptionMode: 1 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038082 MainKeyVersion: 0 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T09:22:34.732191Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [82000002:1:0:0:0] VSlotId# 1:1000:1002 PDiskGuid# 15804151571750595893 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T09:22:34.732353Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [82000002:1:0:0:0] VSlotId# 1:1000:1002 PDiskGuid# 15804151571750595893 2024-11-21T09:22:34.733553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1002 PDiskGuid: 15804151571750595893 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2024-11-21T09:22:34.733824Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1002 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:22:34.734700Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1002 } } 2024-11-21T09:22:34.735499Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.735560Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1002 PDiskGuid: 15804151571750595893 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:22:34.735831Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T09:22:34.735884Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1002 PDiskGuid: 15804151571750595893 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:22:34.735960Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2024-11-21T09:22:34.736329Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2024-11-21T09:22:34.736405Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2024-11-21T09:22:34.736433Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> ControlImplementationTests::TestControlWrapperBounds [GOOD] >> ControlImplementationTests::TestControlWrapperAsI64 [GOOD] >> ControlImplementationTests::TestTControl [GOOD] >> ControlImplementationTests::TestRegisterSharedControl [GOOD] |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] >> YdbLogStore::AlterLogStore [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ControlImplementationTests::TestControlWrapperBounds [GOOD] |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ControlImplementationTests::TestControlWrapperAsI64 [GOOD] |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ControlImplementationTests::TestTControl [GOOD] |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ControlImplementationTests::TestRegisterSharedControl [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer |97.2%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::TestReadTableSnapshot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2024-11-21T09:22:31.466495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660537990878492:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:31.466716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004101/r3tmp/tmp8cvftl/pdisk_1.dat 2024-11-21T09:22:31.517765Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21362, node 1 2024-11-21T09:22:31.536982Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:31.536997Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:31.536999Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:31.537048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:31.567451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:31.567498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:31.568997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:31.596475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.597606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.597625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.598356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:31.598413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:31.598422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:31.599004Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:31.600695Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.600708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:31.601158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.602064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951648, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.602075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:31.602166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:31.602610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.602652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.602671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:31.602681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:31.602691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:31.602702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:31.603120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:31.603127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:31.603132Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:31.603143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:8453 2024-11-21T09:22:31.623908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.624577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:31.624642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.624654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.625296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2024-11-21T09:22:31.625348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.625411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.625437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 waiting... 2024-11-21T09:22:31.625612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.625626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.625630Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:31.625664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.625666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.625668Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:31.625845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:31.628276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:31.628359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:31.628383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:31.628399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:31.628409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:31.628878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:31.636087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439660537990879495:2286];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T09:22:31.639099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439660537990879495:2286];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T09:22:31.639157Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T09:22:31.639851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439660537990879495:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:22:31.639909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439660537990879495:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:22:31.639961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439660537990879495:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:22:31.639989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439660537990879495:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:22:31.640007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439660537990879495:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:22:31.640030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439660537990879495:2286];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.c ... 0 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T09:22:34.987731Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:34.987742Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T09:22:34.987749Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T09:22:34.987751Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T09:22:34.987756Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T09:22:34.987942Z node 10 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T09:22:34.988032Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:34.988047Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:34.988054Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T09:22:34.988094Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:34.988101Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:34.988103Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:34.988117Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:34.988125Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:34.988126Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:34.988139Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:34.988146Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:34.988147Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:22:34.988162Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:22:34.988170Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:22:34.988171Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T09:22:34.988177Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T09:22:34.988617Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439660551175869813:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:22:35.045348Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:35.045391Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:35.046058Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:35.058086Z node 10 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:22:35.058126Z node 10 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715661 at tablet 72075186224037888 2024-11-21T09:22:35.059739Z node 10 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:22:35.060629Z node 10 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715661 at step 1732180955106 at tablet 72075186224037888 { Transactions { TxId: 281474976715661 AckTo { RawX1: 7439660551175869276 RawX2: 42949675318 } } Step: 1732180955106 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T09:22:35.060639Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:35.060661Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:35.060665Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:35.060671Z node 10 :TX_DATASHARD DEBUG: Found ready operation [1732180955106:281474976715661] in PlanQueue unit at 72075186224037888 2024-11-21T09:22:35.060706Z node 10 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1732180955106:281474976715661 keys extracted: 0 2024-11-21T09:22:35.060771Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:35.061544Z node 10 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1732180955106 txid# 281474976715661} 2024-11-21T09:22:35.061556Z node 10 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1732180955106} 2024-11-21T09:22:35.061567Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:22:35.061580Z node 10 :TX_DATASHARD DEBUG: Complete [1732180955106 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [10:7439660555470837205:2782], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T09:22:35.061590Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:35.061754Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70fkv6bv75crfsm5bxa6r5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGI1YTQ4MTItNGU0YjE5ZmEtZjU1NDE1MWYtZGY2MThiNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:35.062428Z node 10 :TX_DATASHARD INFO: Start scan, at: [10:7439660555470837234:2074], tablet: [10:7439660551175869688:2298], scanId: 3, table: /Root/LogsX, gen: 1, deadline: 2024-11-21T09:32:35.062305Z 2024-11-21T09:22:35.062495Z node 10 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [10:7439660555470837234:2074], scanId: 3, table: /Root/LogsX, gen: 1, tablet: [10:7439660551175869688:2298], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2024-11-21T09:22:35.062505Z node 10 :TX_DATASHARD DEBUG: Wakeup driver at: [10:7439660555470837234:2074] 2024-11-21T09:22:35.062638Z node 10 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2024-11-21T09:22:35.062649Z node 10 :TX_DATASHARD DEBUG: TableRanges is over, at: [10:7439660555470837234:2074], scanId: 3, table: /Root/LogsX 2024-11-21T09:22:35.062654Z node 10 :TX_DATASHARD DEBUG: Finish scan, at: [10:7439660555470837234:2074], scanId: 3, table: /Root/LogsX, reason: 0, abortEvent: 2024-11-21T09:22:35.062660Z node 10 :TX_DATASHARD DEBUG: Send ScanData, from: [10:7439660555470837234:2074], to: [10:7439660555470837229:2317], scanId: 3, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2024-11-21T09:22:35.062698Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T09:22:35.062724Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:35.062734Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:35.062739Z node 10 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T09:22:35.062746Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:22:35.064578Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180955106, txId: 281474976715661] shutting down 2024-11-21T09:22:35.106392Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70fkxt7jc9d5bkn48mms6q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YmEyMDYyNWEtODY3OGYwM2QtNmVlZmE1MDctYTgwOWY1ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8016;columns=9; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8016;columns=9; 2024-11-21T09:22:35.111076Z node 10 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:139;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8984;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8984;columns=10; 2024-11-21T09:22:35.112031Z node 10 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:139;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogStore [GOOD] Test command err: 2024-11-21T09:22:31.190541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660537432925013:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:31.190610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004103/r3tmp/tmp7ujLep/pdisk_1.dat 2024-11-21T09:22:31.249971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6883, node 1 2024-11-21T09:22:31.261162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:31.261193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:31.261197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:31.261239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:31.290935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:31.290961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:31.292614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:31.318361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.319495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.319513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.319971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:31.320012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:31.320020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:31.320383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.320393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:31.320524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:31.320676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.321535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951368, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.321554Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:31.321636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:31.322004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.322053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.322065Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:31.322077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:31.322089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:31.322101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:31.322628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:31.322647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:31.322653Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:31.322670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:31.476397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.476545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:31.476735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.476755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.477441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2024-11-21T09:22:31.477502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.477551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.477575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:31.477632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:31.477777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.477811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.477823Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:31.477892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.477902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.477903Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:31.479619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:31.479650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:31.480025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:31.531942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:31.531954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:31.531976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:31.532478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:31.533283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951578, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.533304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180951578 2024-11-21T09:22:31.533333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:31.533727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.533812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.533834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:31.534043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.534058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.534063Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:31.534096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.534103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.534105Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 2814749767 ... 72075186224037892 CpuTimeUsec: 83 } } 2024-11-21T09:22:34.388201Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:34.388226Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.388232Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T09:22:34.388274Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037893 Status: COMPLETE TxId: 281474976710759 Step: 1732180954434 OrderId: 281474976710759 ExecLatency: 2 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037893 CpuTimeUsec: 79 } } 2024-11-21T09:22:34.388295Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:34.388307Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:34.388318Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:34.388329Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:34.388463Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976710759 Step: 1732180954434 OrderId: 281474976710759 ExecLatency: 2 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 197 } } 2024-11-21T09:22:34.388521Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:34.388529Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:2, at schemeshard: 72057594046644480 2024-11-21T09:22:34.388532Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 129 -> 240 2024-11-21T09:22:34.388943Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:0 ProgressState 2024-11-21T09:22:34.388960Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 2/3 2024-11-21T09:22:34.389033Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:2 ProgressState 2024-11-21T09:22:34.389041Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:2 progress is 3/3 2024-11-21T09:22:34.389047Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T09:22:34.389076Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:1 2024-11-21T09:22:34.389085Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:2 2024-11-21T09:22:34.389410Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T09:22:34.390078Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715659, at schemeshard: 72057594046644480 2024-11-21T09:22:34.390460Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710760:0, path# /Root/TheTable 2024-11-21T09:22:34.390508Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710760:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:34.390821Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/TheTable 2024-11-21T09:22:34.390847Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2024-11-21T09:22:34.390895Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 ProgressState 2024-11-21T09:22:34.391209Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046644480 2024-11-21T09:22:34.391848Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180954441, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:34.391874Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 1732180954441 2024-11-21T09:22:34.391877Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T09:22:34.392178Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710760:0 ProgressState 2024-11-21T09:22:34.392192Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T09:22:34.392198Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T09:22:34.392465Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T09:22:35.042206Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439660553207592311:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:35.042391Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004103/r3tmp/tmpMn5DmH/pdisk_1.dat 2024-11-21T09:22:35.059013Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17760, node 13 2024-11-21T09:22:35.080502Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:35.080534Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:35.080536Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:35.080591Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:35.142752Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:35.142790Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:35.144588Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:35.145476Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:35.145600Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:35.145615Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:35.145980Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:35.146053Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:35.146065Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:35.146472Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:35.146488Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:35.146539Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:35.146863Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:35.147623Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180955197, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:35.147637Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:35.147684Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:35.148318Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:35.148377Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:35.148401Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:35.148429Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:35.148447Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:35.148461Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:35.148614Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:35.148654Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:35.148665Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:35.148679Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> BSCRestartPDisk::RestartOneByOneWithReconnects >> YdbLogStore::LogStoreNegative [GOOD] >> YdbLogStore::Dirs >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> GrpcConnectionStringParserTest::NoDatabaseFlag |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2024-11-21T09:22:31.779427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660537606333751:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:31.779745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004100/r3tmp/tmpM1ETBF/pdisk_1.dat 2024-11-21T09:22:31.833952Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61931, node 1 2024-11-21T09:22:31.855741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:31.855756Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:31.855758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:31.855816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:31.879424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:31.879452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:31.880942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:31.909182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.910376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.910396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.910928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:31.910976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:31.910984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:31.911353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.911363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:31.911478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:31.911688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.912487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951956, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.912501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:31.912576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:31.913079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.913124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.913141Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:31.913153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:31.913160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:31.913172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:31.913660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:31.913680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:31.913684Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:31.913697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:32.105308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660541901301971:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:32.105334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:32.140241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.140359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:32.140477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:32.140490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.141072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:32.141122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.141170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.141188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:32.141251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:32.141392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.141407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.141411Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:32.141452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.141460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.141461Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:32.143237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:32.143261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:32.143732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:32.195710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:32.195724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:32.195745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:32.196319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:32.197199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180952243, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:32.197211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180952243 2024-11-21T09:22:32.197233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:32.197639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.197727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.197747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:32.197970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.197987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.197990Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [ ... node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:35.409348Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:35.409404Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:35.409471Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:35.409495Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:35.409500Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:35.409500Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:35.409503Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:35.409506Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:35.409520Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:35.409533Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:35.409540Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:35.409541Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:35.411310Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:35.411329Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:35.411581Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:35.463722Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:35.463735Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:35.463771Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:35.464228Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:35.465009Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180955512, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:35.465023Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180955512 2024-11-21T09:22:35.465064Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:35.465500Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:35.465580Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:35.465596Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:35.465766Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:35.465774Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T09:22:35.465793Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:35.465794Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T09:22:35.465903Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:35.465918Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:35.465921Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:35.465959Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:35.465967Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:35.465968Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:35.466315Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180955512 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 338 } } 2024-11-21T09:22:35.466348Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:35.466357Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:35.466362Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:35.466582Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:35.466597Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:35.466606Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:35.468611Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jd70fmacccj34kcnf7f547z3, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49902, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T09:22:35.478841Z node 10 :READ_TABLE_API DEBUG: [10:7439660552914884651:2305] Adding quota request to queue ShardId: 0, TxId: 281474976715659 2024-11-21T09:22:35.478869Z node 10 :READ_TABLE_API DEBUG: [10:7439660552914884651:2305] Assign stream quota to Shard 0, Quota 5, TxId 281474976715659 Reserved: 5 of 25, Queued: 0 2024-11-21T09:22:35.479152Z node 10 :READ_TABLE_API DEBUG: [10:7439660552914884651:2305] got stream part, size: 35, RU required: 128 rate limiter absent 2024-11-21T09:22:35.479313Z node 10 :READ_TABLE_API DEBUG: [10:7439660552914884651:2305] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T09:22:35.479325Z node 10 :READ_TABLE_API NOTICE: [10:7439660552914884651:2305] Finish grpc stream, status: 400000 2024-11-21T09:22:35.480109Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DeleteSessionRequest, traceId# 01jd70fmarey2skvnk6aenr3sy, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# unknown, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 2.009104s 2024-11-21T09:22:35.480286Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6cf000] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480305Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6dae00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480348Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6dd100] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480355Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6e1700] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480380Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6df900] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480391Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6dc200] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480409Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6cf500] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480419Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6de500] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480437Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6e8a00] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480450Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6d8600] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480476Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6e5800] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480485Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6ea800] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480505Z node 10 :GRPC_SERVER DEBUG: [0x1469dc64ae00] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480516Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6e3f00] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480532Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6d9500] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480542Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6e7100] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480565Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6d6800] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480567Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6d5e00] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480585Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6e3a00] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480589Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6e8500] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480610Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6e1200] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480611Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6d4a00] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480629Z node 10 :GRPC_SERVER DEBUG: [0x1469fec89480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T09:22:35.480634Z node 10 :GRPC_SERVER DEBUG: [0x1469ff6db800] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogWithUnionAllAscending >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[TabletReboots] [GOOD] |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] >> TBSV::ShardsNotLeftInShardsToDelete >> TBSV::ShouldLimitBlockStoreVolumeDropRate |97.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> TBSV::CleanupDroppedVolumesOnRestart |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |97.2%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable >> YdbOlapStore::LogGrepExisting [GOOD] |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> YdbOlapStore::LogExistingRequest >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> YdbOlapStore::LogNonExistingUserId [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert >> YdbOlapStore::LogPagingBefore >> LocalityOperation::LocksFromAnotherTenants >> TKeyValueTest::TestCopyRangeToLongKey >> TReplicationWithRebootsTests::CreateInParallelWithInitialController >> TReplicationWithRebootsTests::Alter >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController >> TReplicationWithRebootsTests::Create >> TReplicationWithRebootsTests::CreateDropRecreate >> TReplicationWithRebootsTests::AlterReplicationConfig >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi |97.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::SplitTableResolvedTimestamps[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:20:39.680674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:20:39.680693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:20:39.680701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:20:39.680717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:20:39.680719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:20:39.680726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:20:39.680790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:20:39.690153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:20:39.690171Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.692044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:20:39.692113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:20:39.692135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:20:39.694213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:20:39.694263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:20:39.695270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.696117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.697427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.699549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:20:39.699556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.699561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:20:39.699588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:20:39.700577Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:20:39.713460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.715933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:20:39.715969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:20:39.715975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:20:39.716652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.716659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:20:39.716661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:20:39.716665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:20:39.716984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:20:39.717234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.717243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.717247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.717658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:20:39.717953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:20:39.718823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:20:39.719005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:20:39.719088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:20:39.719108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:20:39.719117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:20:39.719493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:20:39.719540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:20:39.719620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:20:39.719627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:20:39.719636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:20:39.719640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:20:39.719650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:20:39.719654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:20:39.719658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:20:39.719669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:20:39.719674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:20:39.719678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... sion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:36.074166Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:36.074212Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 52us result status StatusSuccess 2024-11-21T09:22:36.074321Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:36.074370Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T09:22:36.074391Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream/streamImpl" took 22us result status StatusSuccess 2024-11-21T09:22:36.074449Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/MyRoot/Table/Stream/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 2 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.2%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:147:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:147:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:156:2057] recipient: [7:155:2175] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:158:2057] recipient: [7:155:2175] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:157:2176] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:227:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:156:2177] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:160:2057] recipient: [8:156:2177] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:159:2178] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:229:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:156:2177] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:160:2057] recipient: [9:156:2177] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:159:2178] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:229:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:160:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:161:2057] recipient: [10:159:2179] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:163:2057] recipient: [10:159:2179] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:162:2180] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:232:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:161:2181] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:165:2057] recipient: [11:161:2181] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:164:2182] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:234:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 4037927937 is [13:167:2184] sender: [13:168:2057] recipient: [13:164:2183] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:167:2184] Leader for TabletID 72057594037927937 is [13:167:2184] sender: [13:237:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:141:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:144:2057] recipient: [16:143:2166] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:145:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:147:2057] recipient: [16:143:2166] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:146:2167] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:216:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:141:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:145:2057] recipient: [17:143:2166] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:147:2057] recipient: [17:143:2166] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:146:2167] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:216:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:142:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:145:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:146:2057] recipient: [18:144:2166] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:148:2057] recipient: [18:144:2166] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:147:2167] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:217:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:147:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:150:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:151:2057] recipient: [19:149:2171] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:153:2057] recipient: [19:149:2171] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:152:2172] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:222:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:147:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:151:2057] recipient: [20:149:2171] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:153:2057] recipient: [20:149:2171] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:152:2172] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:222:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:148:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:151:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:152:2057] recipient: [21:150:2171] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:154:2057] recipient: [21:150:2171] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:153:2172] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:223:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:153:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:156:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:157:2057] recipient: [22:155:2176] Leader for TabletID 72057594037927937 is [22:158:2177] sender: [22:159:2057] recipient: [22:155:2176] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:158:2177] Leader for TabletID 72057594037927937 is [22:158:2177] sender: [22:228:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:153:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:156:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:157:2057] recipient: [23:155:2176] Leader for TabletID 72057594037927937 is [23:158:2177] sender: [23:159:2057] recipient: [23:155:2176] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:158:2177] Leader for TabletID 72057594037927937 is [23:158:2177] sender: [23:228:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:154:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:157:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:158:2057] recipient: [24:156:2176] Leader for TabletID 72057594037927937 is [24:159:2177] sender: [24:160:2057] recipient: [24:156:2176] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:159:2177] Leader for TabletID 72057594037927937 is [24:159:2177] sender: [24:229:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: 2024-11-21T09:22:29.478403Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660528907117483:2188];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410f/r3tmp/tmpChL4zy/pdisk_1.dat 2024-11-21T09:22:29.522829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:29.537116Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17342, node 1 2024-11-21T09:22:29.556350Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:29.556362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:29.556364Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:29.556388Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:29.578180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:29.578205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:29.579667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:29.614661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.615776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.615800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.616294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:29.616338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:29.616347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:29.616676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.616687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:29.616818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:29.616995Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.617855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949667, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.617866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:29.617933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:29.618296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.618341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.618356Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:29.618371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:29.618378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:29.618390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:29.618845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:29.618870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:29.618875Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:29.618919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:29.766161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660528907118267:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:29.766182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660528907118275:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:29.766188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:29.766743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.766812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.766823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.766836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.766839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T09:22:29.766848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.766858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:29.766911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T09:22:29.766983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.766996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:29.768060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:29.768128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.768252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.768273Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T09:22:29.768325Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.768343Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.768357Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.768393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:29.768687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.768704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.768709Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:29.768765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.768776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.768778Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:29.768796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.768804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.768805Z node 1 :FLAT ... UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:34.954199Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:34.954228Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:34.955612Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:34.957639Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.957744Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:34.957755Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.958088Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:34.958131Z node 19 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:34.958140Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:34.958404Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:34.958414Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:34.958684Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.959368Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180955008, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:34.959381Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:34.959457Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:34.959793Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:34.959830Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:34.959843Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:34.959854Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:34.959865Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:34.959872Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T09:22:34.959979Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:34.960000Z node 19 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:34.960008Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:34.960018Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T09:22:34.960342Z node 19 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:34.994902Z node 19 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:51488) has now valid token of root@builtin 2024-11-21T09:22:35.003374Z node 19 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T09:22:35.922689Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7439660552968069202:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:35.923008Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410f/r3tmp/tmpris0lQ/pdisk_1.dat 2024-11-21T09:22:35.940116Z node 22 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23477, node 22 2024-11-21T09:22:35.954941Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:35.954955Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:35.954957Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:35.955012Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:36.022804Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:36.022856Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:36.024430Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:36.026539Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:36.026650Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:36.026662Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:36.027062Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:36.027141Z node 22 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:36.027152Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:36.027557Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:36.027581Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:36.027641Z node 22 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:36.028078Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:36.029050Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180956072, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:36.029064Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:36.029198Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:36.029595Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:36.029639Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:36.029654Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:36.029670Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:36.029684Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:36.029700Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:36.029803Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:36.029817Z node 22 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:36.029821Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:36.029837Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:36.047534Z node 22 :TICKET_PARSER DEBUG: Ticket 07DC5886F8DE3808BA8FCFE23CB26F2F8B3CECB1 (ipv6:[::1]:52848) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2024-11-21T09:22:36.066810Z node 22 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2024-11-21T09:22:36.077981Z node 22 :TICKET_PARSER DEBUG: Ticket 0F84D03B1E2D0F3363221C6C20B30775ED7457EA (ipv6:[::1]:52876) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2024-11-21T09:22:36.078039Z node 22 :TICKET_PARSER ERROR: Ticket 0F84D03B1E2D0F3363221C6C20B30775ED7457EA: Cannot create token from certificate. Client certificate failed verification |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:152:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:153:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:153:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:227:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:158:2178] Leader for TabletID 72057594037927937 is [11:161:2179] sender: [11:162:2057] recipient: [11:158:2178] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:161:2179] Leader for TabletID 72057594037927937 is [11:161:2179] sender: [11:231:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:141:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:145:2057] recipient: [17:143:2166] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:147:2057] recipient: [17:143:2166] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:146:2167] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:216:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:142:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:145:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:146:2057] recipient: [18:144:2166] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:148:2057] recipient: [18:144:2166] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:147:2167] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:217:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:147:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:150:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:151:2057] recipient: [19:149:2171] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:153:2057] recipient: [19:149:2171] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:152:2172] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:222:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:147:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:151:2057] recipient: [20:149:2171] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:153:2057] recipient: [20:149:2171] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:152:2172] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:222:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:148:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:151:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:152:2057] recipient: [21:150:2171] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:154:2057] recipient: [21:150:2171] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:153:2172] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:201:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:150:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:153:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:154:2057] recipient: [22:152:2173] Leader for TabletID 72057594037927937 is [22:155:2174] sender: [22:156:2057] recipient: [22:152:2173] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:155:2174] Leader for TabletID 72057594037927937 is [22:155:2174] sender: [22:225:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:150:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:152:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:154:2057] recipient: [23:153:2173] Leader for TabletID 72057594037927937 is [23:155:2174] sender: [23:156:2057] recipient: [23:153:2173] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:155:2174] Leader for TabletID 72057594037927937 is [23:155:2174] sender: [23:225:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:151:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:154:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:155:2057] recipient: [24:153:2173] Leader for TabletID 72057594037927937 is [24:156:2174] sender: [24:157:2057] recipient: [24:153:2173] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:156:2174] Leader for TabletID 72057594037927937 is [24:156:2174] sender: [24:226:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:156:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:159:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:160:2057] recipient: [25:158:2178] Leader for TabletID 72057594037927937 is [25:161:2179] sender: [25:162:2057] recipient: [25:158:2178] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:161:2179] Leader for TabletID 72057594037927937 is [25:161:2179] sender: [25:231:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:156:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:159:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:160:2057] recipient: [26:158:2178] Leader for TabletID 72057594037927937 is [26:161:2179] sender: [26:162:2057] recipient: [26:158:2178] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:161:2179] Leader for TabletID 72057594037927937 is [26:161:2179] sender: [26:231:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:22:37.045145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:37.045170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:37.045174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:37.045177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:37.045189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:37.045203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:37.045210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:37.045294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:37.056025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:37.056045Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:37.058517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:37.059234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:37.059258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:37.060718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:37.060894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:37.062315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.062824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:37.064523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:37.066932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.066939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:37.066953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.068370Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:22:37.087670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:37.089465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.089573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:37.089629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:37.089639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:37.090638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:37.090666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:37.090686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:37.091128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091153Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:37.091531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.091555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.092275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:37.092707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:37.093502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:37.093723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.093755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:37.093764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.093825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:37.093832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.093863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.093877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:37.094379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.094422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:22:37.094483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:37.094508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:37.094511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.094515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:37.094518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.094522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:37.094524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:37.094534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:37.094538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:37.094541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:22:37.094823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:22:37.094835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:22:37.094838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:22:37.094842Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:22:37.094846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.094869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... HARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:37.315427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2024-11-21T09:22:37.315458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:37.315866Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 Forgetting tablet 72075186233409569 2024-11-21T09:22:37.316059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2024-11-21T09:22:37.316127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 2024-11-21T09:22:37.316192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T09:22:37.316302Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 2024-11-21T09:22:37.316421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2024-11-21T09:22:37.316461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 Forgetting tablet 72075186233409568 2024-11-21T09:22:37.316607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T09:22:37.316928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2024-11-21T09:22:37.316965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2024-11-21T09:22:37.317042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.317064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:37.317073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2024-11-21T09:22:37.317099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2024-11-21T09:22:37.317126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2024-11-21T09:22:37.317132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T09:22:37.317143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.317153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2024-11-21T09:22:37.317159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2024-11-21T09:22:37.317166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T09:22:37.317171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2024-11-21T09:22:37.317176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2024-11-21T09:22:37.317201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2024-11-21T09:22:37.317207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2024-11-21T09:22:37.317211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2024-11-21T09:22:37.317215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2024-11-21T09:22:37.317971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T09:22:37.317989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2024-11-21T09:22:37.318015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2024-11-21T09:22:37.318021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2024-11-21T09:22:37.318129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:22:37.318136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:22:37.318167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.318173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.318212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2024-11-21T09:22:37.318236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.318241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 129, path id: 1 2024-11-21T09:22:37.318247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2024-11-21T09:22:37.318362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T09:22:37.318375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T09:22:37.318380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2024-11-21T09:22:37.318385Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2024-11-21T09:22:37.318389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2024-11-21T09:22:37.318445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:37.318451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2024-11-21T09:22:37.318461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:37.318508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T09:22:37.318517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T09:22:37.318521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2024-11-21T09:22:37.318525Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2024-11-21T09:22:37.318529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.318538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2024-11-21T09:22:37.318571Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2024-11-21T09:22:37.318602Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2024-11-21T09:22:37.318649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2024-11-21T09:22:37.318729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2024-11-21T09:22:37.319047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T09:22:37.319724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:37.319953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T09:22:37.320048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T09:22:37.320106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2024-11-21T09:22:37.320265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2024-11-21T09:22:37.320273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2024-11-21T09:22:37.320374Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2024-11-21T09:22:37.320395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2024-11-21T09:22:37.320400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1676:3547] TestWaitNotification: OK eventTxId 129 |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:22:37.045145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:37.045171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:37.045176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:37.045180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:37.045190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:37.045203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:37.045212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:37.045295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:37.055492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:37.055513Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:37.057934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:37.058507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:37.058540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:37.060740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:37.060926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:37.062315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.062864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:37.064745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:37.066927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.066935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:37.066951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.068438Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:22:37.087593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:37.089466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.089573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:37.089629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:37.089641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:37.090638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:37.090666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:37.090672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:37.091170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:37.091588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.091611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.092274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:37.092672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:37.093492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:37.093722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.093750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:37.093758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.093828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:37.093836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.093863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.093877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:37.094350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.094388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:22:37.094471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094479Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:37.094491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:37.094495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.094501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:37.094507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.094513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:37.094517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:37.094528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:37.094534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:37.094538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:22:37.094832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:22:37.094844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:22:37.094849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:22:37.094853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:22:37.094857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.094869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.129770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.129793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.129807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.129813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.129819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.131330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:22:37.131721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.131781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:37.131790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.131808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.131844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:37.131848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:22:37.131859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.132357Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.132367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.132374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:37.132429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:465:2058] recipient: [1:15:2062] 2024-11-21T09:22:37.173340Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:37.173389Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 68us result status StatusPathDoesNotExist 2024-11-21T09:22:37.173422Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T09:22:37.173542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:466:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:469:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:470:2058] recipient: [1:468:2430] Leader for TabletID 72057594046678944 is [1:471:2431] sender: [1:472:2058] recipient: [1:468:2430] 2024-11-21T09:22:37.178315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:37.178336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:37.178340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:37.178344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:37.178348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:37.178350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:37.178357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:37.178401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:37.179130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:37.179366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:37.179398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:37.179419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:37.179423Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:37.179439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:37.179533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.179807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.181514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.181532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.181580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:37.181587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.181591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:37.181797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:471:2431] sender: [1:531:2058] recipient: [1:15:2062] 2024-11-21T09:22:37.212570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:37.212618Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 63us result status StatusPathDoesNotExist 2024-11-21T09:22:37.212657Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:22:37.045143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:37.045170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:37.045174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:37.045177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:37.045189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:37.045200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:37.045207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:37.045295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:37.055529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:37.055550Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:37.058114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:37.058901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:37.058927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:37.060660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:37.060839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:37.062313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.062828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:37.064708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.066923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:37.066932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.066939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:37.066953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.068382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:22:37.084651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:37.089465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.089573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:37.089630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:37.089642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:37.090638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.090661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:37.090666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:37.090674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:37.091242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091265Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:37.091706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091719Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.091724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.091730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.092386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:37.092750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:37.093492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:37.093722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.093751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:37.093758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.093828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:37.093837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:37.093863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.093877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:37.094315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.094369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:22:37.094472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:37.094480Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:37.094491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:37.094496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.094501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:37.094507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:37.094514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:37.094518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:37.094528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:37.094535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:37.094539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:22:37.094863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:22:37.094877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:22:37.094882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:22:37.094887Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:22:37.094892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.094904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... erId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T09:22:37.116826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409546 2024-11-21T09:22:37.116878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.116958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T09:22:37.117010Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T09:22:37.117896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:22:37.117940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2024-11-21T09:22:37.118471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T09:22:37.118496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T09:22:37.118717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.118740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:37.118750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T09:22:37.118772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:22:37.118798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T09:22:37.118804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:22:37.118814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.118822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:22:37.118829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T09:22:37.118836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T09:22:37.118841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T09:22:37.118848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T09:22:37.118868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T09:22:37.118874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T09:22:37.118878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:22:37.118882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T09:22:37.119254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:22:37.119265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T09:22:37.119296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T09:22:37.119302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T09:22:37.119401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:22:37.119408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T09:22:37.119430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:37.119435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:37.119464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T09:22:37.119486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:37.119492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T09:22:37.119497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T09:22:37.119585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:22:37.119594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:22:37.119599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:22:37.119604Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T09:22:37.119608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T09:22:37.119664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T09:22:37.119671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T09:22:37.119679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:37.119735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:22:37.119742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T09:22:37.119746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T09:22:37.119750Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:22:37.119754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:37.119761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T09:22:37.119790Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T09:22:37.119829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:37.119878Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T09:22:37.119904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T09:22:37.120260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:22:37.120507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T09:22:37.120576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T09:22:37.120838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T09:22:37.120854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T09:22:37.120930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T09:22:37.120936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T09:22:37.121023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T09:22:37.121037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T09:22:37.121042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:389:2370] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T09:22:37.121089Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T09:22:37.121114Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] Test command err: 2024-11-21T09:22:30.206495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660532994020296:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:30.206766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00410a/r3tmp/tmpuPO0dz/pdisk_1.dat 2024-11-21T09:22:30.259296Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29438, node 1 2024-11-21T09:22:30.277673Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.277687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.277689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.277722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.306782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.306823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.308265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.337610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.338719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.338739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.339185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.339227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.339234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.339576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.339586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.339633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.339895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.340668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950388, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.340679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.340733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.341138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.341172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.341186Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.341199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.341210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.341220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.341615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.341628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.341631Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.341641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:2051 2024-11-21T09:22:30.363016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.363153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.363163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.363621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T09:22:30.363653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.363700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.363713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.363720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 waiting... 2024-11-21T09:22:30.363914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.363924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.363927Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:30.363951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.363957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.363958Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T09:22:30.364107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:30.364202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.364243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:30.364769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.365696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950409, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.365709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T09:22:30.365770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:30.366213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.366259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.366273Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:30.366287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:30.366300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:30.366309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:30.366468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.366485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.366488Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:30.366529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:30.366537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:30.366538Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:30.366544Z node 1 :FLAT_TX_SCH ... Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:36.581326Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:36.582264Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180956625, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:36.582284Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T09:22:36.582346Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:36.582704Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:36.582752Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:36.582770Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:36.582782Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:36.582795Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:36.582812Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:36.582930Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:36.582944Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:36.582953Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:36.582985Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:36.582995Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:36.582997Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:36.583003Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:36.583907Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:36.584026Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:36.584036Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:36.584435Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T09:22:36.584474Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T09:22:36.584809Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T09:22:36.586118Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:36.586196Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:36.586207Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T09:22:36.586589Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:37.085529Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439660564764578934:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:37.085545Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:37.087650Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:37.087682Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:37.088897Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T09:22:37.089180Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5595 2024-11-21T09:22:37.109621Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T09:22:37.120713Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T09:22:37.120808Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:37.397979Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:37.398332Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7439660564764579265:2278], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogExistingUserId >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate+StreamLookup >> DataShardScan::ScanFollowedByUpdate >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> DataShardTxOrder::RandomDotRanges_DelayRS >> DataShardOutOfOrder::UncommittedReadSetAck >> DataShardOutOfOrder::TestOutOfOrderLockLost+StreamLookup >> DataShardOutOfOrder::TestImmediateQueueThenSplit >> DataShardTxOrder::RandomPointsAndRanges >> DataShardTxOrder::RandomPoints_DelayRS >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup-EvWrite >> TFileStoreWithReboots::CreateAlterNoVersion >> LocalityOperation::LocksFromAnotherTenants [GOOD] |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants [GOOD] Test command err: 2024-11-21T09:22:24.869405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:24.869447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:24.869456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004134/r3tmp/tmp0Umcmu/pdisk_1.dat 2024-11-21T09:22:25.041421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.106491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:25.106520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:25.117715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:25.438785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:942:2768], Recipient [1:501:2435]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:25.438800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:25.438803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T09:22:25.438817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:939:2766], Recipient [1:501:2435]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T09:22:25.438820Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T09:22:25.445014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:22:25.445066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.445086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T09:22:25.445119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T09:22:25.445144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T09:22:25.445162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:25.445166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.445173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:22:25.445181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:22:25.445187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T09:22:25.445799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T09:22:25.445825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2024-11-21T09:22:25.445831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T09:22:25.445837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2024-11-21T09:22:25.445901Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:501:2435], Recipient [1:501:2435]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:22:25.445906Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T09:22:25.445934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:25.445939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:22:25.445971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T09:22:25.445988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:25.445992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:714:2584], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T09:22:25.445997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:714:2584], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2024-11-21T09:22:25.446014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.446021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T09:22:25.446025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T09:22:25.446029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:25.446043Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:22:25.446128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:942:2768], Recipient [1:501:2435]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:25.446134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:25.446137Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T09:22:25.446477Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [1:714:2584], Recipient [1:501:2435]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 4 } 2024-11-21T09:22:25.446487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:22:25.446499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:25.446510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:25.446514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:25.446519Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:25.446524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T09:22:25.446537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:22:25.446738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [1:714:2584], Recipient [1:501:2435]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 2 } 2024-11-21T09:22:25.446747Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:22:25.446756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:25.465273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:25.465293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:25.465300Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T09:22:25.465306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T09:22:25.465325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T09:22:25.465329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:22:25.465980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T09:22:25.465996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2024-11-21T09:22:25.466252Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:501:2435], Recipient [1:501:2435]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:22:25.466262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T09:22:25.466272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:25.466279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:25.466286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:25.466305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T09:22:25.466485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:25.466492Z node 1 :FLAT_TX_SCHEMESHA ... ion: 3 2024-11-21T09:22:39.470870Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:39.470873Z node 9 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:39.470875Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 1 2024-11-21T09:22:39.471882Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180959516, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:39.471887Z node 9 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715664:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180959516, at schemeshard: 72057594046644480 2024-11-21T09:22:39.471910Z node 9 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 128 -> 240 2024-11-21T09:22:39.471926Z node 9 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715664:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180959516, at schemeshard: 72057594046644480 2024-11-21T09:22:39.471934Z node 9 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:1 128 -> 240 2024-11-21T09:22:39.471943Z node 9 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715664:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180959516, at schemeshard: 72057594046644480 2024-11-21T09:22:39.471949Z node 9 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:2 128 -> 240 2024-11-21T09:22:39.471957Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715664:3, HandleReply TEvOperationPlan: step# 1732180959516 2024-11-21T09:22:39.471963Z node 9 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:3 128 -> 240 2024-11-21T09:22:39.472428Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:39.472494Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:39.472503Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:3 ProgressState 2024-11-21T09:22:39.472511Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:3 progress is 1/4 2024-11-21T09:22:39.472532Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:1 ProgressState 2024-11-21T09:22:39.472536Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:1 progress is 2/4 2024-11-21T09:22:39.472546Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:0 ProgressState 2024-11-21T09:22:39.472550Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 3/4 2024-11-21T09:22:39.472559Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:2 ProgressState 2024-11-21T09:22:39.472562Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:2 progress is 4/4 2024-11-21T09:22:39.472567Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2024-11-21T09:22:39.472573Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:1 2024-11-21T09:22:39.472576Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:2 2024-11-21T09:22:39.472578Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:3 2024-11-21T09:22:39.472582Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715664, publications: 5, subscribers: 1 2024-11-21T09:22:39.472834Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:39.472839Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:39.472842Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2024-11-21T09:22:39.472881Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:39.472883Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:39.472885Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T09:22:39.472899Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:39.472901Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:39.472903Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 5 2024-11-21T09:22:39.472915Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:39.472917Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:39.472919Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 5 2024-11-21T09:22:39.472931Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T09:22:39.472933Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T09:22:39.472935Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 2 2024-11-21T09:22:39.472939Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 2024-11-21T09:22:39.473552Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7439660572906198685:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2024-11-21T09:22:39.544703Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715665:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:39.544744Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:39.545380Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:22:39.555681Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd70fr7c44035s9nkmjpk0w0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YWFkMzdjZWMtNDg1YzY0ZjUtMWE4NzFlZmUtMjFkZTM2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:39.622503Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70frbw71qx6tj9s0fs63m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YWFkMzdjZWMtNDg1YzY0ZjUtMWE4NzFlZmUtMjFkZTM2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:39.640973Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd70frccc36qferysyn3w18h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YWFkMzdjZWMtNDg1YzY0ZjUtMWE4NzFlZmUtMjFkZTM2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:39.642710Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70frccc36qferysyn3w18h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YWFkMzdjZWMtNDg1YzY0ZjUtMWE4NzFlZmUtMjFkZTM2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:39.643193Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7439660572906198864:2310] TxId: 281474976715669. Ctx: { TraceId: 01jd70frccc36qferysyn3w18h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YWFkMzdjZWMtNDg1YzY0ZjUtMWE4NzFlZmUtMjFkZTM2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [9:7439660572906198864:2310], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2024-11-21T09:22:39.643285Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YWFkMzdjZWMtNDg1YzY0ZjUtMWE4NzFlZmUtMjFkZTM2MWM=, ActorId: [9:7439660572906198495:2310], ActorState: ExecuteState, TraceId: 01jd70frccc36qferysyn3w18h, Create QueryResponse for error on request, msg: 2024-11-21T09:22:39.643378Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd70frccc36qferysyn3w18h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YWFkMzdjZWMtNDg1YzY0ZjUtMWE4NzFlZmUtMjFkZTM2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:39.644749Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T09:22:39.644859Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:39.645024Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T09:22:39.645064Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T09:22:40.030935Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> YdbOlapStore::LogPagingBefore [GOOD] |97.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbOlapStore::LogPagingAfter >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> DataShardScan::ScanFollowedByUpdate [GOOD] >> DataShardTxOrder::DelayData |97.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbTableBulkUpsert::Overload [GOOD] >> YdbTableBulkUpsert::RetryOperationSync >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup-EvWrite |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2024-11-21T09:22:40.072325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:40.072342Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.072357Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.075358Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.075499Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T09:22:40.075562Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.076509Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.084650Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.084803Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.084987Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T09:22:40.085002Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T09:22:40.085010Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T09:22:40.085050Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.088997Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T09:22:40.089068Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.089111Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T09:22:40.089117Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T09:22:40.089122Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T09:22:40.089128Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.089223Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.089231Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.089259Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T09:22:40.089282Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T09:22:40.089338Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.089345Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.089352Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T09:22:40.089358Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T09:22:40.089362Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T09:22:40.089367Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T09:22:40.089373Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.101705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.101729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.101738Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T09:22:40.102231Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T09:22:40.102247Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.102276Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T09:22:40.102306Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T09:22:40.102316Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T09:22:40.102325Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T09:22:40.102333Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.102338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T09:22:40.102343Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T09:22:40.102348Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.102418Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.102424Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T09:22:40.102428Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T09:22:40.102432Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.102443Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T09:22:40.102447Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T09:22:40.102451Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T09:22:40.102454Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.102459Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.125146Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T09:22:40.125173Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.125180Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.125192Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T09:22:40.125210Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.125340Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.125348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.125357Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T09:22:40.125379Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T09:22:40.125384Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.125430Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.125441Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.125445Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T09:22:40.125450Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T09:22:40.126180Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T09:22:40.126200Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.126270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.126278Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.126287Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.126296Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.126301Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T09:22:40.126309Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T09:22:40.126314Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T09:22:40.126321Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.126326Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T09:22:40.126330Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T09:22:40.126334Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T09:22:40.126377Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T09:22:40.126382Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.126386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T09:22:40.126389Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T09:22:40.126393Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T09:22:40.126406Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.126410Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T09:22:40.126413Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:40.126417Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T09:22:40.126429Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T09:22:40.126432Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T09:22:40.126436Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T09:22:40.126441Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.126444Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:40.126448Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 2024-11-21T09:22:40.637869Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.637888Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:4] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.637903Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 4] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:40.637914Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T09:22:40.637920Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.637926Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T09:22:40.637929Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.637931Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T09:22:40.637934Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T09:22:40.637970Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.637972Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:6] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.637977Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 6] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:40.637981Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T09:22:40.637983Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.637986Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T09:22:40.637988Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T09:22:40.637991Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T09:22:40.638012Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.638016Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.638021Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:40.638027Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T09:22:40.638033Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.638051Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.638055Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.638061Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:40.638067Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T09:22:40.638071Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.638090Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.638095Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.638101Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:40.638107Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T09:22:40.638111Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.638115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.638119Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:14] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.638126Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T09:22:40.638145Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.638171Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.638174Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.638178Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:40.638182Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T09:22:40.638185Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.638196Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.638198Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.638203Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:40.638206Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T09:22:40.638209Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.638222Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.638225Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.638229Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:22:40.638233Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.638236Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T09:22:40.638245Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.638248Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2024-11-21T09:22:40.638252Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:40.638256Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T09:22:40.638258Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.638288Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T09:22:40.638291Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:40.638295Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2024-11-21T09:22:40.638304Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T09:22:40.638306Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:40.638308Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2024-11-21T09:22:40.638314Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T09:22:40.638316Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:40.638318Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T09:22:40.638324Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T09:22:40.638326Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:40.638328Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T09:22:40.638333Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T09:22:40.638335Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:40.638337Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T09:22:40.638342Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T09:22:40.638344Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:40.638346Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T09:22:40.638353Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T09:22:40.638355Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:40.638357Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2024-11-21T09:22:40.638362Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T09:22:40.638364Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:40.638366Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit [GOOD] >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate+StreamLookup [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> DataShardOutOfOrder::TestOutOfOrderLockLost+StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup-EvWrite [GOOD] >> YdbTableBulkUpsert::RetryOperation |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 13847664237467179990 |97.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbOlapStore::LogExistingUserId [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop [GOOD] >> YdbOlapStore::LogPagingAfter [GOOD] >> YdbYqlClient::RetryOperationAsync [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbTableBulkUpsert::RetryOperation [GOOD] >> YdbYqlClient::RetryOperationSync |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest >> YdbOlapStore::BulkUpsert >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite [GOOD] >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> DataShardOutOfOrder::UncommittedReads >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit [GOOD] |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2024-11-21T09:22:40.023880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:40.023899Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.023913Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.026506Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.026635Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T09:22:40.026698Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.027556Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.035643Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.035780Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.035933Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T09:22:40.035948Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T09:22:40.035955Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T09:22:40.035994Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.038637Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T09:22:40.038687Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.038719Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T09:22:40.038725Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T09:22:40.038729Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T09:22:40.038735Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.038802Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.038808Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.038826Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T09:22:40.038840Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T09:22:40.038881Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.038885Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.038891Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T09:22:40.038895Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T09:22:40.038898Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T09:22:40.038902Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T09:22:40.038906Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.045451Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.045469Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.045476Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T09:22:40.045781Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T09:22:40.045790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.045807Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T09:22:40.045842Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T09:22:40.045851Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T09:22:40.045859Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T09:22:40.045867Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.045872Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T09:22:40.045878Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T09:22:40.045882Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.045940Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.045945Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T09:22:40.045949Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T09:22:40.045951Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.045960Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T09:22:40.045962Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T09:22:40.045965Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T09:22:40.045967Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.045970Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.066966Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T09:22:40.066994Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.067002Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.067014Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T09:22:40.067028Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.067143Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.067148Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.067154Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T09:22:40.067169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T09:22:40.067173Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.067204Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.067211Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.067214Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T09:22:40.067218Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T09:22:40.067695Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T09:22:40.067705Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.067744Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.067748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.067754Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.067759Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.067762Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T09:22:40.067768Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T09:22:40.067771Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T09:22:40.067776Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.067779Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T09:22:40.067781Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T09:22:40.067785Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T09:22:40.067823Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T09:22:40.067826Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.067828Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T09:22:40.067830Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T09:22:40.067833Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T09:22:40.067840Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.067842Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T09:22:40.067844Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:40.067846Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T09:22:40.067856Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T09:22:40.067859Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T09:22:40.067861Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T09:22:40.067865Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.067867Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:40.067869Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 1000005:149] at 9437186 on unit CompleteOperation 2024-11-21T09:22:42.670263Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:22:42.670267Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T09:22:42.670272Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T09:22:42.670339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T09:22:42.670344Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670346Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T09:22:42.670352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:227:2222], Recipient [1:433:2383]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T09:22:42.670355Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T09:22:42.670357Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2024-11-21T09:22:42.670364Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T09:22:42.670372Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2024-11-21T09:22:42.670381Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T09:22:42.670393Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T09:22:42.670395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670397Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T09:22:42.670407Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T09:22:42.670409Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670411Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T09:22:42.670415Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T09:22:42.670417Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670419Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T09:22:42.670428Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:433:2383], Recipient [1:433:2383]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:42.670430Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:42.670434Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2024-11-21T09:22:42.670438Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2024-11-21T09:22:42.670442Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2024-11-21T09:22:42.670446Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2024-11-21T09:22:42.670450Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T09:22:42.670454Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2024-11-21T09:22:42.670458Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2024-11-21T09:22:42.670460Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2024-11-21T09:22:42.670581Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2024-11-21T09:22:42.670591Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T09:22:42.670600Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2024-11-21T09:22:42.670604Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2024-11-21T09:22:42.670607Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2024-11-21T09:22:42.670609Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T09:22:42.670656Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2024-11-21T09:22:42.670659Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2024-11-21T09:22:42.670662Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2024-11-21T09:22:42.670664Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2024-11-21T09:22:42.670668Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T09:22:42.670670Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2024-11-21T09:22:42.670672Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2024-11-21T09:22:42.670675Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:42.670677Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T09:22:42.670680Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2024-11-21T09:22:42.670682Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2024-11-21T09:22:42.670730Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T09:22:42.670736Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670740Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T09:22:42.670750Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T09:22:42.670754Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670757Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2024-11-21T09:22:42.670773Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T09:22:42.670775Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670779Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T09:22:42.670787Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T09:22:42.670789Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670791Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T09:22:42.670801Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T09:22:42.670803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670805Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T09:22:42.670812Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T09:22:42.670814Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670816Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T09:22:42.670827Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T09:22:42.670831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.670834Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T09:22:42.682076Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T09:22:42.682095Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T09:22:42.682111Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T09:22:42.682124Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T09:22:42.682129Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T09:22:42.682198Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T09:22:42.682205Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.682211Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |97.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2024-11-21T09:22:40.363151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:40.363767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:40.363794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004330/r3tmp/tmprXLYEc/pdisk_1.dat 2024-11-21T09:22:40.496396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.515713Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.558749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:40.559482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:40.570483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:40.677280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.896695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:41.150475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:884:2699], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:41.150503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2704], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:41.150514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:41.151416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:22:41.327130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:22:41.391782Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70fsvy5amn71djwh865eyr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU4YmIzMDctNmZhYjU4NzgtOTY1NjAzM2YtZWIwMDZlZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:41.418646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70ft436g17ptv81awktb3f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU0YWEyODktZDg1NmVkYWQtZWQ2MGE0ZjYtM2ZjODBhNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:41.498017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70ft4t53e90n2knxzrqt42, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ0NTJlN2EtMzU3MzM0MjktZGU1NWM5ZjUtZmY1OWZjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2024-11-21T09:22:41.534001Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd70ft7j54qekympb84jfmc0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ0NTJlN2EtMzU3MzM0MjktZGU1NWM5ZjUtZmY1OWZjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2024-11-21T09:22:41.555495Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70ft89439ym9e2j88qhgrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg0ZTNmNjQtZDI1YjdkMDUtZWU3ZTY3NjYtMTFiODU5Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2024-11-21T09:22:41.556061Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1128:2804] TxId: 281474976715665. Ctx: { TraceId: 01jd70ft89439ym9e2j88qhgrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg0ZTNmNjQtZDI1YjdkMDUtZWU3ZTY3NjYtMTFiODU5Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2024-11-21T09:22:41.558280Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTg0ZTNmNjQtZDI1YjdkMDUtZWU3ZTY3NjYtMTFiODU5Njc=, ActorId: [1:1028:2804], ActorState: ExecuteState, TraceId: 01jd70ft89439ym9e2j88qhgrf, Create QueryResponse for error on request, msg: 2024-11-21T09:22:41.559072Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ0NTJlN2EtMzU3MzM0MjktZGU1NWM5ZjUtZmY1OWZjNTY=, ActorId: [1:1030:2806], ActorState: ExecuteState, TraceId: 01jd70ft7j54qekympb84jfmc0, Create QueryResponse for error on request, msg: 2024-11-21T09:22:41.559287Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd70ft89439ym9e2j88qhgrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg0ZTNmNjQtZDI1YjdkMDUtZWU3ZTY3NjYtMTFiODU5Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:41.569903Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70ft7j54qekympb84jfmc0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ0NTJlN2EtMzU3MzM0MjktZGU1NWM5ZjUtZmY1OWZjNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:41.726697Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd70ftdc7rmp2x4gfn3f740x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc5Nzg0ZGQtYTA4NGM0ZWUtNDIwZDY3NjEtOTdlNDUyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2024-11-21T09:22:42.124697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:42.124726Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:42.124745Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004330/r3tmp/tmp3b82tp/pdisk_1.dat 2024-11-21T09:22:42.203965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:42.219441Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:42.261566Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:42.261603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:42.272086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:42.375747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:42.582999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:42.836302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:42.836321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:803:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:42.836329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:42.836903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:22:43.012590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:22:43.051918Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70fvgm2feae1xxnj0xjfv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjA5MGFjOWUtNjNiODUzMzUtZmQ5YjU1MWEtOTQ3MTQyYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:43.069728Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70fvqqcg3cafm9n9m557k3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTIzMWMyOGItOGUzMjM0MTMtZWU1YWY2OTctYWYxOTQwY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2024-11-21T09:22:43.472500Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:964:2776]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2024-11-21T09:22:43.472570Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWExZTdmYTMtODliNzYzMjQtNjJlYzY2NTEtYjQyMTk3MTY=, ActorId: [2:921:2738], ActorState: ExecuteState, TraceId: 01jd70fvr98w2zzpcvsjkemw8t, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2024-11-21T09:22:40.017574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:40.017597Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.017615Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.020330Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.020454Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T09:22:40.020515Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.021486Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.030433Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.030565Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.030702Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T09:22:40.030717Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T09:22:40.030725Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T09:22:40.030764Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.034286Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T09:22:40.034349Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.034387Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T09:22:40.034393Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T09:22:40.034399Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T09:22:40.034405Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.034492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.034499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.034523Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T09:22:40.034543Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T09:22:40.034592Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.034599Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.034606Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T09:22:40.034610Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T09:22:40.034614Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T09:22:40.034619Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T09:22:40.034624Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.041551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.041570Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.041578Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T09:22:40.041870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T09:22:40.041879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.041896Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T09:22:40.041918Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T09:22:40.041926Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T09:22:40.041933Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T09:22:40.041938Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.041941Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T09:22:40.041945Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T09:22:40.041947Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.041997Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.041999Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T09:22:40.042002Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T09:22:40.042004Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.042012Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T09:22:40.042014Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T09:22:40.042017Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T09:22:40.042019Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.042022Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.062995Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T09:22:40.063030Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.063038Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.063052Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T09:22:40.063071Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.063193Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.063201Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.063210Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T09:22:40.063232Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T09:22:40.063237Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.063279Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.063288Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.063293Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T09:22:40.063298Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T09:22:40.063963Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T09:22:40.063980Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.064030Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.064036Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.064043Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.064052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.064056Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T09:22:40.064064Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T09:22:40.064070Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T09:22:40.064077Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.064081Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T09:22:40.064085Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T09:22:40.064089Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T09:22:40.064131Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T09:22:40.064135Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.064138Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T09:22:40.064142Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T09:22:40.064146Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T09:22:40.064157Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.064160Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T09:22:40.064163Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:40.064166Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T09:22:40.064178Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T09:22:40.064182Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T09:22:40.064185Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T09:22:40.064191Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.064194Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:40.064198Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:22:42.033885Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T09:22:42.033921Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2024-11-21T09:22:42.033925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.033931Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2024-11-21T09:22:42.033950Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2024-11-21T09:22:42.033954Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.033957Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2024-11-21T09:22:42.033971Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2024-11-21T09:22:42.033973Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.033976Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2024-11-21T09:22:42.033983Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2024-11-21T09:22:42.033985Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.033987Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2024-11-21T09:22:42.033995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T09:22:42.033997Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.033999Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T09:22:42.034009Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T09:22:42.034011Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034013Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T09:22:42.034019Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T09:22:42.034021Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034023Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T09:22:42.034033Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T09:22:42.034037Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034039Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T09:22:42.034044Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T09:22:42.034046Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034048Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T09:22:42.034058Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T09:22:42.034060Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034062Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T09:22:42.034071Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T09:22:42.034073Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034075Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T09:22:42.034081Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T09:22:42.034083Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034085Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T09:22:42.034094Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T09:22:42.034096Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034098Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T09:22:42.034107Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:42.034110Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2024-11-21T09:22:42.034117Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T09:22:42.034120Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T09:22:42.034123Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:42.034136Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:42.034139Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2024-11-21T09:22:42.034144Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T09:22:42.034149Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T09:22:42.034151Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:42.034164Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:42.034166Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2024-11-21T09:22:42.034171Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T09:22:42.034175Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T09:22:42.034177Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:42.034188Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:42.034191Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T09:22:42.034197Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T09:22:42.034203Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T09:22:42.034206Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:42.034228Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T09:22:42.034231Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034234Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T09:22:42.034247Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T09:22:42.034249Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034251Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T09:22:42.034260Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T09:22:42.034262Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034264Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T09:22:42.034272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T09:22:42.034275Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:22:42.034277Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T09:22:41.425286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:41.425785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:41.425810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004309/r3tmp/tmpr4dVDi/pdisk_1.dat 2024-11-21T09:22:41.532483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:41.550378Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:41.592631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:41.592656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:41.603120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:41.706008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:41.719155Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:41.719292Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:41.719365Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T09:22:41.719422Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:41.724894Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:41.725013Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:41.725033Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:41.725142Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T09:22:41.725158Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T09:22:41.725165Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T09:22:41.725191Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:41.727583Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T09:22:41.727615Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:41.727627Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T09:22:41.727630Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T09:22:41.727633Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T09:22:41.727635Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:41.727705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:41.727711Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:41.727795Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T09:22:41.727804Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T09:22:41.727812Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:41.727814Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:41.727818Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T09:22:41.727823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:41.727826Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:41.727830Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T09:22:41.727832Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T09:22:41.727835Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T09:22:41.727838Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T09:22:41.727840Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:22:41.727854Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T09:22:41.727857Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:41.727869Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:22:41.727898Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T09:22:41.727905Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T09:22:41.727914Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T09:22:41.727918Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T09:22:41.727921Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T09:22:41.727924Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T09:22:41.727926Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:41.727953Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:41.727956Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T09:22:41.727958Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T09:22:41.727960Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:41.727965Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T09:22:41.727967Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:22:41.727970Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T09:22:41.727972Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:41.727975Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:41.728137Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T09:22:41.728142Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T09:22:41.738327Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:22:41.738343Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:41.738349Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:41.738359Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T09:22:41.738370Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:41.911474Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:41.911489Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:41.911494Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T09:22:41.911506Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T09:22:41.911509Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:41.911542Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:41.911548Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T09:22:41.911550Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T09:22:41.911554Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T09:22:41.912087Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T09:22:41.912094Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:41.912160Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:41.912163Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:41.912168Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:41.912172Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:41.912175Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T09:22:41.912179Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1008:2807], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 48 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 11 FinishTimeMs: 1732180963895 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 4 BuildCpuTimeUs: 7 WaitInputTimeUs: 860 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963894 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.895737Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1008:2807] 2024-11-21T09:22:43.895742Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T09:22:43.895746Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T09:22:43.895817Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1009:2808], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 156 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 127 FinishTimeMs: 1732180963895 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 109 BuildCpuTimeUs: 18 WaitInputTimeUs: 777 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963894 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.895821Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1009:2808] 2024-11-21T09:22:43.895824Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T09:22:43.895828Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T09:22:43.895857Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1010:2809], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 122 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 97 FinishTimeMs: 1732180963895 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 90 BuildCpuTimeUs: 7 WaitInputTimeUs: 732 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963894 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.895860Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1010:2809] 2024-11-21T09:22:43.895863Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T09:22:43.895866Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T09:22:43.895918Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1011:2810], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 101 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 76 FinishTimeMs: 1732180963895 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 53 BuildCpuTimeUs: 23 WaitInputTimeUs: 916 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963894 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.895921Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1011:2810] 2024-11-21T09:22:43.895924Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T09:22:43.895927Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T09:22:43.895948Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1012:2811], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 43 DurationUs: 1000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 25 FinishTimeMs: 1732180963895 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 17 WaitInputTimeUs: 1052 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963894 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.895951Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1012:2811] 2024-11-21T09:22:43.895954Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], 2024-11-21T09:22:43.895957Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1013:2812], 2024-11-21T09:22:43.895969Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1013:2812], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 54 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 22 FinishTimeMs: 1732180963895 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 12 WaitInputTimeUs: 1089 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963894 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.895971Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1013:2812] 2024-11-21T09:22:43.896000Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:43.896006Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd70fwgfd8s6tctsd7136zq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWViM2YyZTItMThmNTUxM2YtMTJhZmU0NmQtOGE2MTc2Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000716s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop [GOOD] Test command err: 2024-11-21T09:22:40.486038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:40.486589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:40.486620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004312/r3tmp/tmpaiono5/pdisk_1.dat 2024-11-21T09:22:40.586205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.604047Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.646082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:40.646113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:40.656597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:40.759716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.773736Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.773870Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.773923Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T09:22:40.773957Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.780277Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.780395Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.780410Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.780498Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T09:22:40.780511Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T09:22:40.780515Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T09:22:40.780540Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.782848Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T09:22:40.782887Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.782900Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T09:22:40.782904Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T09:22:40.782907Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T09:22:40.782910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:40.782988Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.782992Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.783074Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T09:22:40.783083Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T09:22:40.783092Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.783095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.783098Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T09:22:40.783103Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:40.783106Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.783110Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T09:22:40.783113Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T09:22:40.783115Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T09:22:40.783118Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T09:22:40.783121Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:22:40.783134Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T09:22:40.783137Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.783150Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:22:40.783179Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T09:22:40.783186Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T09:22:40.783195Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T09:22:40.783199Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.783201Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T09:22:40.783204Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T09:22:40.783207Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:40.783236Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.783241Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T09:22:40.783243Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T09:22:40.783245Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:40.783250Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T09:22:40.783252Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:22:40.783254Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T09:22:40.783256Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:40.783259Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.783426Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T09:22:40.783431Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T09:22:40.793646Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:22:40.793663Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:40.793668Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:40.793675Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T09:22:40.793683Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.967098Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.967116Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.967123Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T09:22:40.967141Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T09:22:40.967146Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.967188Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:40.967195Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T09:22:40.967199Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T09:22:40.967203Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T09:22:40.967894Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T09:22:40.967912Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:40.968035Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.968041Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.968047Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:40.968052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.968055Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T09:22:40.968061Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... }; 2024-11-21T09:22:43.306318Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2024-11-21T09:22:43.306364Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:851:2683], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 146449 Tasks { TaskId: 1 CpuTimeUs: 145837 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 8 BuildCpuTimeUs: 145829 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180962921 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.306372Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:851:2683] 2024-11-21T09:22:43.306393Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2024-11-21T09:22:43.306398Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:852:2684], task: 2 2024-11-21T09:22:43.306638Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 147551 DurationUs: 1732180961355957 Tables { TablePath: "/Root/table-1" } ExecuterCpuTimeUs: 1102 StartTimeMs: 1950 FinishTimeMs: 1732180963306 Stages { StageGuid: "90df7164-134917fd-4c2e824a-557d9f1f" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/table-1\" \'\"72057594046644480:2\" \'\"\" \'1))\n (let $2 (KqpWideReadTableRanges $1 (Void) \'(\'\"value2\") \'() \'()))\n (let $3 (DataType \'Uint64))\n (let $4 (lambda \'($9) (Just (SafeCast $9 $3))))\n (let $5 (Nothing (OptionalType $3)))\n (let $6 (Condense1 (NarrowMap $2 (lambda \'($7) (AsStruct \'(\'\"value2\" $7)))) (lambda \'($8) (IfPresent (Member $8 \'\"value2\") $4 $5)) (lambda \'($10 $11) (Bool \'false)) (lambda \'($12 $13) (block \'(\n (let $14 (IfPresent (Member $12 \'\"value2\") $4 $5))\n (return (AggrAdd $14 $13))\n )))))\n (return (FromFlow (ExpandMap $6 (lambda \'($15) $15))))\n))))\n)\n" ComputeActors { CpuTimeUs: 146449 Tasks { TaskId: 1 CpuTimeUs: 145837 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 8 BuildCpuTimeUs: 145829 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180962921 } MaxMemoryUsage: 1048576 } } Stages { StageId: 1 StageGuid: "315bf0bd-d2dbb2f2-b4ebf31a-ab26a82c" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (Bool \'false))\n (let $3 (WideCondense1 (ToFlow $1) (lambda \'($5) $5) (lambda \'($6 $7) $2) (lambda \'($8 $9) (AggrAdd $8 $9))))\n (let $4 (Condense (NarrowMap (Take $3 (Uint64 \'1)) (lambda \'($10) (AsStruct \'(\'Sum0 $10)))) (Nothing (OptionalType (StructType \'(\'Sum0 (OptionalType (DataType \'Uint64)))))) (lambda \'($11 $12) $2) (lambda \'($13 $14) (Just $13))))\n (return (FromFlow (Map $4 (lambda \'($15) (AsList (AsStruct \'(\'\"column0\" (Member $15 \'Sum0))))))))\n))))\n)\n" } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"Precompute_0\",\"Parent Relationship\":\"InitPlan\",\"PlanNodeId\":4,\"PlanNodeType\":\"Materialize\",\"Plans\":[{\"Node Type\":\"Aggregate-Limit\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Aggregate\"},{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Aggregate-TableFullScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Aggregate\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"ReadColumns\":[\"value2\"],\"ReadRanges\":[\"key (-∞, +∞)\"],\"Scan\":\"Parallel\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"90df7164-134917fd-4c2e824a-557d9f1f\",\"Stats\":{\"ComputeNodes\":[{\"CpuTimeUs\":146449,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-qcxhsi27zq\",\"NodeId\":2,\"StartTimeMs\":1732180962921,\"TaskId\":1}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"table-1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"315bf0bd-d2dbb2f2-b4ebf31a-ab26a82c\",\"Stats\":{\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\",\"Subplan Name\":\"CTE precompute_0_0\"}],\"StageGuid\":\"\"}" Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\016\010\221\370\010\020\221\370\010\030\221\370\010 \001" } } 2024-11-21T09:22:43.306650Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:43.306664Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:852:2684], TxId: 281474976715662, task: 2. Ctx: { TraceId : 01jd70fvcs2etg2bqfdysqhyh1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646735 2024-11-21T09:22:43.306673Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:852:2684], TxId: 281474976715662, task: 2. Ctx: { TraceId : 01jd70fvcs2etg2bqfdysqhyh1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [2:846:2656], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2024-11-21T09:22:43.306687Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2024-11-21T09:22:43.306696Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2024-11-21T09:22:43.307333Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T09:22:43.307378Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, ActorId: [2:819:2656], ActorState: ExecuteState, TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Create QueryResponse for error on request, msg: 2024-11-21T09:22:43.307465Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T09:22:43.307471Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 281474976715664 ProcessProposeKqpTransaction 2024-11-21T09:22:43.307563Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2024-11-21T09:22:43.307574Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvProposeTransaction 2024-11-21T09:22:43.307578Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 0 ProcessProposeTransaction 2024-11-21T09:22:43.307590Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Cookie# 0 userReqId# "" txid# 0 reqId# [2:888:2716] SnapshotReq marker# P0 2024-11-21T09:22:43.307663Z node 2 :TX_PROXY DEBUG: Actor# [2:890:2716] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2024-11-21T09:22:43.307693Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2024-11-21T09:22:43.307698Z node 2 :TX_PROXY DEBUG: Actor# [2:890:2716] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2024-11-21T09:22:43.307721Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:43.307725Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T09:22:43.307731Z node 2 :KQP_EXECUTER INFO: ActorId: [2:887:2656] TxId: 281474976715664. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T09:22:43.307746Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:887:2656] TxId: 281474976715664. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:43.307752Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:887:2656] TxId: 281474976715664. Ctx: { TraceId: 01jd70fvcs2etg2bqfdysqhyh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T09:22:43.308775Z node 2 :TX_PROXY DEBUG: Actor# [2:888:2716] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2024-11-21T09:22:43.308885Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:558:2485], selfId: [2:50:2097], source: [2:819:2656] 2024-11-21T09:22:43.308922Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:888:2716], Recipient [2:632:2537]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2024-11-21T09:22:43.309102Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YmFhMmQwNzAtY2IwOTlkNjYtYmI4MTA2NDYtNDI2ZGY0YTk=, workerId: [2:819:2656], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 131 >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup [GOOD] Test command err: 2024-11-21T09:22:40.467302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:40.467824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:40.467853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004326/r3tmp/tmpPUBi7u/pdisk_1.dat 2024-11-21T09:22:40.569622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.585684Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.628053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:40.628084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:40.638631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:40.743350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.757264Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.757429Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.757505Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T09:22:40.757544Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.764482Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.764611Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.764627Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.764731Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T09:22:40.764747Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T09:22:40.764752Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T09:22:40.764779Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.767334Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T09:22:40.767389Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.767429Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T09:22:40.767434Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T09:22:40.767439Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T09:22:40.767444Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:40.767552Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.767557Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.767654Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T09:22:40.767666Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T09:22:40.767674Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.767677Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.767681Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T09:22:40.767686Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:40.767690Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.767695Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T09:22:40.767698Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T09:22:40.767701Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T09:22:40.767704Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T09:22:40.767708Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:22:40.767723Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T09:22:40.767726Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.767741Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:22:40.767773Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T09:22:40.767779Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T09:22:40.767790Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T09:22:40.767795Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.767797Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T09:22:40.767800Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T09:22:40.767803Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:40.767833Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.767836Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T09:22:40.767838Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T09:22:40.767840Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:40.767846Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T09:22:40.767848Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:22:40.767850Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T09:22:40.767852Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:40.767855Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.768058Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T09:22:40.768064Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T09:22:40.778330Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:22:40.778351Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:40.778356Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:40.778367Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T09:22:40.778377Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.952565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.952586Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.952594Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T09:22:40.952613Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T09:22:40.952618Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.952640Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:40.952647Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T09:22:40.952652Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T09:22:40.952657Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T09:22:40.953427Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T09:22:40.953444Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:40.953557Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.953563Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.953569Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:40.953575Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.953579Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T09:22:40.953586Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 76715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2843], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 37 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 7 FinishTimeMs: 1732180963948 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 2 BuildCpuTimeUs: 5 WaitInputTimeUs: 631 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963947 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.948652Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1046:2843] 2024-11-21T09:22:43.948657Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1049:2846], CA [2:1050:2847], CA [2:1047:2844], CA [2:1051:2848], CA [2:1048:2845], 2024-11-21T09:22:43.948661Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1049:2846], CA [2:1050:2847], CA [2:1047:2844], CA [2:1051:2848], CA [2:1048:2845], 2024-11-21T09:22:43.948703Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2844], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 139 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 107 FinishTimeMs: 1732180963948 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 97 BuildCpuTimeUs: 10 WaitInputTimeUs: 604 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963947 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.948707Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2844] 2024-11-21T09:22:43.948711Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], CA [2:1048:2845], 2024-11-21T09:22:43.948714Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], CA [2:1048:2845], 2024-11-21T09:22:43.948751Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2845], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 60 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 41 FinishTimeMs: 1732180963948 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 35 BuildCpuTimeUs: 6 WaitInputTimeUs: 611 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963947 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.948754Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2845] 2024-11-21T09:22:43.948757Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], 2024-11-21T09:22:43.948760Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], 2024-11-21T09:22:43.948800Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2846], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 99 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 72 FinishTimeMs: 1732180963948 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 56 BuildCpuTimeUs: 16 WaitInputTimeUs: 734 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963947 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.948805Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2846] 2024-11-21T09:22:43.948822Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2847], CA [2:1051:2848], 2024-11-21T09:22:43.948825Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2847], CA [2:1051:2848], 2024-11-21T09:22:43.948851Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2847], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 46 DurationUs: 1000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 28 FinishTimeMs: 1732180963948 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 13 BuildCpuTimeUs: 15 WaitInputTimeUs: 968 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963947 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.948854Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2847] 2024-11-21T09:22:43.948857Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2848], 2024-11-21T09:22:43.948859Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2848], 2024-11-21T09:22:43.948882Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2848], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 43 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 28 FinishTimeMs: 1732180963948 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 19 BuildCpuTimeUs: 9 WaitInputTimeUs: 1025 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963947 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.948886Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2848] 2024-11-21T09:22:43.948918Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:43.948924Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd70fwj06z3wr0qqzefhsr9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmZiODQ1NmMtOTc2OWZiYmItMjdlMzExZDgtY2VkMTM5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000624s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T09:22:40.489260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:40.489628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:40.489646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00430b/r3tmp/tmp30cAY8/pdisk_1.dat 2024-11-21T09:22:40.586699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.604529Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.646812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:40.646853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:40.657379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:40.760699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.774444Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.774602Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.774657Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T09:22:40.774702Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.781210Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.781342Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.781362Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.781461Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T09:22:40.781478Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T09:22:40.781484Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T09:22:40.781519Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.784052Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T09:22:40.784093Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.784112Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T09:22:40.784116Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T09:22:40.784119Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T09:22:40.784122Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:40.784200Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.784224Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.784327Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T09:22:40.784338Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T09:22:40.784347Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.784350Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.784355Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T09:22:40.784360Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:40.784364Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.784369Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T09:22:40.784372Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T09:22:40.784374Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T09:22:40.784378Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T09:22:40.784381Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:22:40.784395Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T09:22:40.784398Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.784412Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:22:40.784445Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T09:22:40.784454Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T09:22:40.784469Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T09:22:40.784475Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.784479Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T09:22:40.784484Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T09:22:40.784488Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:40.784524Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.784528Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T09:22:40.784531Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T09:22:40.784535Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:40.784543Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T09:22:40.784547Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:22:40.784551Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T09:22:40.784554Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:40.784558Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.784765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T09:22:40.784771Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T09:22:40.795029Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:22:40.795050Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:40.795056Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:40.795066Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T09:22:40.795078Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.968760Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.968781Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.968789Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T09:22:40.968807Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T09:22:40.968812Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.968844Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:40.968852Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T09:22:40.968857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T09:22:40.968862Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T09:22:40.969639Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T09:22:40.969660Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:40.969771Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.969778Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.969784Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:40.969791Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.969795Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T09:22:40.969803Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 1474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2841], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 32 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 8 FinishTimeMs: 1732180963185 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 3 BuildCpuTimeUs: 5 WaitInputTimeUs: 589 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963184 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.185515Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1046:2841] 2024-11-21T09:22:43.185519Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T09:22:43.185523Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T09:22:43.185562Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2842], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 83 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 58 FinishTimeMs: 1732180963185 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 50 BuildCpuTimeUs: 8 WaitInputTimeUs: 571 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963184 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.185565Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2842] 2024-11-21T09:22:43.185569Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T09:22:43.185572Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T09:22:43.185602Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2843], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 50 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 34 FinishTimeMs: 1732180963185 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 30 BuildCpuTimeUs: 4 WaitInputTimeUs: 559 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963184 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.185605Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2843] 2024-11-21T09:22:43.185608Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T09:22:43.185611Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T09:22:43.185650Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2844], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 78 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 62 FinishTimeMs: 1732180963185 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 52 BuildCpuTimeUs: 10 WaitInputTimeUs: 676 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963184 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.185654Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2844] 2024-11-21T09:22:43.185657Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T09:22:43.185660Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T09:22:43.185681Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2845], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 32 DurationUs: 1000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 18 FinishTimeMs: 1732180963185 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 11 WaitInputTimeUs: 876 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963184 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.185685Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2845] 2024-11-21T09:22:43.185687Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2846], 2024-11-21T09:22:43.185692Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2846], 2024-11-21T09:22:43.185704Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2846], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 38 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 24 FinishTimeMs: 1732180963185 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 18 BuildCpuTimeUs: 6 WaitInputTimeUs: 919 HostName: "ghrun-qcxhsi27zq" NodeId: 2 StartTimeMs: 1732180963184 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:43.185706Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2846] 2024-11-21T09:22:43.185734Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:43.185741Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd70fvt206s9w0k7tywp8r1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMGZmYTMtNzZjNDBmOTgtOTk2OGZiY2EtMzk2NmMyNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000506s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit [GOOD] Test command err: 2024-11-21T09:22:40.464479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:40.464915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:40.464937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00433e/r3tmp/tmp12foQn/pdisk_1.dat 2024-11-21T09:22:40.564360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.580171Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.622225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:40.622258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:40.632698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:40.735969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:40.750244Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.750441Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.750517Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T09:22:40.750570Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.757870Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.758003Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.758023Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.758114Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T09:22:40.758127Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T09:22:40.758132Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T09:22:40.758159Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.760812Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T09:22:40.760862Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.760894Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T09:22:40.760900Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T09:22:40.760905Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T09:22:40.760910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:40.761008Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.761015Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.761113Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T09:22:40.761131Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T09:22:40.761140Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.761144Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.761148Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T09:22:40.761153Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:40.761157Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.761162Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T09:22:40.761165Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T09:22:40.761168Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T09:22:40.761171Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T09:22:40.761175Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:22:40.761186Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T09:22:40.761189Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.761202Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:22:40.761233Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T09:22:40.761240Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T09:22:40.761251Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T09:22:40.761255Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.761258Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T09:22:40.761264Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T09:22:40.761266Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:40.761293Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.761295Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T09:22:40.761298Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T09:22:40.761300Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:40.761306Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T09:22:40.761308Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:22:40.761311Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T09:22:40.761313Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:40.761316Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.761498Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T09:22:40.761521Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T09:22:40.771760Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:22:40.771796Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:40.771802Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:40.771814Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T09:22:40.771827Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.945443Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.945459Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.945465Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T09:22:40.945479Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T09:22:40.945483Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.945502Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:40.945509Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T09:22:40.945514Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T09:22:40.945519Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T09:22:40.946130Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T09:22:40.946146Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:40.946240Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.946245Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.946250Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:40.946254Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.946257Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T09:22:40.946263Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... ASHARD INFO: Change sender killed: at tablet: 72075186224037892 2024-11-21T09:22:43.760150Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [2:1138:2897] 2024-11-21T09:22:43.760153Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2024-11-21T09:22:43.760157Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2024-11-21T09:22:43.760160Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2024-11-21T09:22:43.760200Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:988:2787], Recipient [2:988:2787]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:43.760234Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:43.760270Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:988:2787], Recipient [2:718:2598]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2024-11-21T09:22:43.760276Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2024-11-21T09:22:43.760322Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1133:2892], Recipient [2:718:2598]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:43.760325Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:43.760340Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:988:2787]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2024-11-21T09:22:43.760342Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T09:22:43.760345Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2024-11-21T09:22:43.760348Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2024-11-21T09:22:43.760366Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T09:22:43.760370Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:43.760375Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2024-11-21T09:22:43.760379Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2024-11-21T09:22:43.760381Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2024-11-21T09:22:43.760384Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2024-11-21T09:22:43.760387Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T09:22:43.760426Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1135:2894], Recipient [2:988:2787]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:43.760429Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:43.760433Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1133:2892], serverId# [2:1135:2894], sessionId# [0:0:0] 2024-11-21T09:22:43.760508Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:988:2787]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2024-11-21T09:22:43.760512Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T09:22:43.760515Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2024-11-21T09:22:43.760518Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2024-11-21T09:22:43.760522Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2024-11-21T09:22:43.770802Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2024-11-21T09:22:43.770827Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2024-11-21T09:22:43.770852Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T09:22:43.770866Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2024-11-21T09:22:43.770875Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1142:2901] 2024-11-21T09:22:43.770878Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2024-11-21T09:22:43.770882Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2024-11-21T09:22:43.770886Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2024-11-21T09:22:43.770961Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:990:2789], Recipient [2:718:2598]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2024-11-21T09:22:43.770972Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2024-11-21T09:22:43.771041Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:990:2789], Recipient [2:990:2789]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:43.771045Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:43.771082Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1134:2893], Recipient [2:718:2598]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:43.771085Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T09:22:43.771117Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:990:2789]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2024-11-21T09:22:43.771119Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T09:22:43.771123Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2024-11-21T09:22:43.771126Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2024-11-21T09:22:43.771144Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2024-11-21T09:22:43.771148Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:43.771153Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2024-11-21T09:22:43.771157Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2024-11-21T09:22:43.771162Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2024-11-21T09:22:43.771165Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2024-11-21T09:22:43.771168Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2024-11-21T09:22:43.771209Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1136:2895], Recipient [2:990:2789]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:43.771213Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:43.771219Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1134:2893], serverId# [2:1136:2895], sessionId# [0:0:0] 2024-11-21T09:22:43.771288Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:990:2789]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2024-11-21T09:22:43.771291Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T09:22:43.771294Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2024-11-21T09:22:43.771299Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2024-11-21T09:22:43.771303Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2024-11-21T09:22:43.781592Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2024-11-21T09:22:43.782325Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:379:2374], Recipient [2:725:2602] 2024-11-21T09:22:43.782361Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2024-11-21T09:22:43.782633Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2024-11-21T09:22:43.782643Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T09:22:43.782731Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:710:2593], Recipient [2:718:2598]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T09:22:44.336253Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:938:2644], Recipient [2:630:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 938 RawX2: 8589937236 } TxBody: " \0008\000`\200\200\200\005j\322\006\010\001\022\223\006\010\001\022\024\n\022\t\252\003\000\000\000\000\000\000\021T\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\00 2024-11-21T09:22:44.336273Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:44.336302Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2024-11-21T09:22:44.336418Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2024-11-21T09:22:44.336481Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2024-11-21T09:22:29.278074Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660529979376462:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:29.278656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004111/r3tmp/tmpSWC2dP/pdisk_1.dat 2024-11-21T09:22:29.345419Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3858, node 1 2024-11-21T09:22:29.359856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:29.359870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:29.359872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:29.359924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4957 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T09:22:29.377770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:29.377798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T09:22:29.379392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:29.389488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.390288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.390299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.390842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:29.390890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:29.390898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:29.391232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:29.391295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.391299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:29.391630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.392594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949436, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.392604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:29.392675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:29.393131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.393174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.393185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:29.393193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:29.393203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:29.393212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:29.393611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:29.393634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:29.393637Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:29.393650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:4957 2024-11-21T09:22:29.411987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.412629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:29.412709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.412715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.413403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2024-11-21T09:22:29.413443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.413506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.413523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 waiting... 2024-11-21T09:22:29.413770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:29.413792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:29.413797Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:29.413850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T09:22:29.413856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T09:22:29.413857Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:29.413944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T09:22:29.416970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.417030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.417050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.417064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.417072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T09:22:29.418484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:29.431453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439660529979377343:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:22:29.431508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439660529979377343:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:22:29.431544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439660529979377343:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:22:29.431569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439660529979377343:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:22:29.431588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439660529979377343:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:22:29.431610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439660529979377343:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:22:29.431631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439660529979377343:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:22:29.431721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439660529979377343:2291];tablet_id=72075186224037890;process=TTxInitSche ... jVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7439660586247094136:3579], task: 33, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 88 DurationUs: 10000 Tasks { TaskId: 33 CpuTimeUs: 46 FinishTimeMs: 1732180962929 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 8 BuildCpuTimeUs: 38 WaitInputTimeUs: 4081 HostName: "ghrun-qcxhsi27zq" NodeId: 28 StartTimeMs: 1732180962919 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:42.930191Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7439660586247094136:3579] 2024-11-21T09:22:42.930194Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7439660586247094180:3612], CA [28:7439660586247094161:3599], CA [28:7439660586247094179:3611], 2024-11-21T09:22:42.930201Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7439660586247094161:3599], task: 53, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 137 DurationUs: 8000 Tasks { TaskId: 53 CpuTimeUs: 75 FinishTimeMs: 1732180962929 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 9 BuildCpuTimeUs: 66 WaitInputTimeUs: 5495 HostName: "ghrun-qcxhsi27zq" NodeId: 28 StartTimeMs: 1732180962921 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:42.930202Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7439660586247094161:3599] 2024-11-21T09:22:42.930205Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7439660586247094180:3612], CA [28:7439660586247094179:3611], 2024-11-21T09:22:42.930232Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [28:7439660586247094072:3541], seqNo: 1, nRows: 0 2024-11-21T09:22:42.930244Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7439660586247094179:3611], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 951 DurationUs: 7000 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 66 FinishTimeMs: 1732180962929 ComputeCpuTimeUs: 30 BuildCpuTimeUs: 36 WaitInputTimeUs: 4751 HostName: "ghrun-qcxhsi27zq" NodeId: 28 StartTimeMs: 1732180962922 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:42.930247Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7439660586247094179:3611] 2024-11-21T09:22:42.930249Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7439660586247094180:3612], 2024-11-21T09:22:42.930255Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: ExecuteState, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Forwarded TEvStreamData to [28:7439660586247094070:3540] 2024-11-21T09:22:42.930399Z node 28 :KQP_EXECUTER DEBUG: TxId: 281474976715770, send ack to channelId: 66, seqNo: 1, enough: 0, freeSpace: 8388490, to: [28:7439660586247094230:3612] 2024-11-21T09:22:42.930407Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7439660586247094180:3612], TxId: 281474976715770, task: 66. Ctx: { TraceId : 01jd70fvhd24kcq8z8jsm38e4p. SessionId : ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T09:22:42.930414Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7439660586247094180:3612], TxId: 281474976715770, task: 66. Ctx: { TraceId : 01jd70fvhd24kcq8z8jsm38e4p. SessionId : ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T09:22:42.930418Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 66. Tasks execution finished, don't wait for ack delivery in input channelId: 65, seqNo: [1] 2024-11-21T09:22:42.930419Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 66. Tasks execution finished 2024-11-21T09:22:42.930421Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7439660586247094180:3612], TxId: 281474976715770, task: 66. Ctx: { TraceId : 01jd70fvhd24kcq8z8jsm38e4p. SessionId : ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T09:22:42.930434Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 66. pass away 2024-11-21T09:22:42.930446Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715770;task_id=66;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:22:42.930471Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7439660586247094180:3612], task: 66, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 117 DurationUs: 8000 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 27 FinishTimeMs: 1732180962930 ComputeCpuTimeUs: 11 BuildCpuTimeUs: 16 WaitInputTimeUs: 6673 HostName: "ghrun-qcxhsi27zq" NodeId: 28 StartTimeMs: 1732180962922 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:42.930477Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7439660586247094180:3612] 2024-11-21T09:22:42.930502Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:42.930512Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660586247094093:3541] TxId: 281474976715770. Ctx: { TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.009728s ReadRows: 0 ReadBytes: 0 ru: 6 rate limiter was not found force flag: 1 2024-11-21T09:22:42.930530Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: ExecuteState, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T09:22:42.931337Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: ExecuteState, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 18.768 QueriesCount: 1 2024-11-21T09:22:42.931354Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: ExecuteState, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T09:22:42.931374Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: ExecuteState, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T09:22:42.931376Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: ExecuteState, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, EndCleanup, isFinal: 1 2024-11-21T09:22:42.931392Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: ExecuteState, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7439660586247090234:2252] 2024-11-21T09:22:42.931394Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: unknown state, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Cleanup temp tables: 0 2024-11-21T09:22:42.931779Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180962911, txId: 18446744073709551615] shutting down 2024-11-21T09:22:42.931807Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZmZmOWE1YjItZjVlYWQwNjEtNmZjOTQ1MTUtMjNiMjQ3NTU=, ActorId: [28:7439660586247094072:3541], ActorState: unknown state, TraceId: 01jd70fvhd24kcq8z8jsm38e4p, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogExistingUserId [GOOD] Test command err: 2024-11-21T09:22:29.113081Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660529616471684:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:29.113319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004113/r3tmp/tmpJnT1wB/pdisk_1.dat 2024-11-21T09:22:29.164383Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1222, node 1 2024-11-21T09:22:29.185066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:29.185078Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:29.185079Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:29.185107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:29.213201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:29.213225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:29.214701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:29.248157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.249305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.249336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.249780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:29.249837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:29.249847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:29.250274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:29.250283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:29.250348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:29.250676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.251412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180949296, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:29.251421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:29.251479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:29.251776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.251808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.251822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:29.251836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:29.251847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:29.251857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:29.252135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:29.252147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:29.252150Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:29.252189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:13670 2024-11-21T09:22:29.271712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.272186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:29.272246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:29.272255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:29.272948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2024-11-21T09:22:29.272997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:29.273072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:29.273096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 waiting... 2024-11-21T09:22:29.273276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.273296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.273300Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:29.273343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:29.273354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:29.273355Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:29.273487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:29.276342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.276391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.276407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.276421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:29.276447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:29.276832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:29.286427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439660529616472680:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T09:22:29.286467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439660529616472680:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T09:22:29.286497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439660529616472680:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T09:22:29.286521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439660529616472680:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T09:22:29.286543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439660529616472680:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T09:22:29.286566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439660529616472680:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T09:22:29.286594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439660529616472680:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T09:22:29.286616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439660529616472680:2288];tablet_id=72075186224037889;process=TTxInitSc ... Id: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 29, seqNo: [1] 2024-11-21T09:22:42.418322Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 30, seqNo: [1] 2024-11-21T09:22:42.418324Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 31, seqNo: [1] 2024-11-21T09:22:42.418326Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 32, seqNo: [1] 2024-11-21T09:22:42.418329Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 33, seqNo: [1] 2024-11-21T09:22:42.418332Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 34, seqNo: [1] 2024-11-21T09:22:42.418334Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 35, seqNo: [1] 2024-11-21T09:22:42.418336Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 36, seqNo: [1] 2024-11-21T09:22:42.418338Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 37, seqNo: [1] 2024-11-21T09:22:42.418340Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 38, seqNo: [1] 2024-11-21T09:22:42.418342Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 39, seqNo: [1] 2024-11-21T09:22:42.418344Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 40, seqNo: [1] 2024-11-21T09:22:42.418346Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 41, seqNo: [1] 2024-11-21T09:22:42.418348Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 42, seqNo: [1] 2024-11-21T09:22:42.418351Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 43, seqNo: [1] 2024-11-21T09:22:42.418353Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 44, seqNo: [1] 2024-11-21T09:22:42.418355Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 45, seqNo: [1] 2024-11-21T09:22:42.418358Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2024-11-21T09:22:42.418360Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2024-11-21T09:22:42.418362Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2024-11-21T09:22:42.418364Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2024-11-21T09:22:42.418366Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2024-11-21T09:22:42.418368Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2024-11-21T09:22:42.418371Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2024-11-21T09:22:42.418373Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2024-11-21T09:22:42.418374Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2024-11-21T09:22:42.418377Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2024-11-21T09:22:42.418378Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2024-11-21T09:22:42.418380Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2024-11-21T09:22:42.418383Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2024-11-21T09:22:42.418384Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2024-11-21T09:22:42.418387Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2024-11-21T09:22:42.418389Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2024-11-21T09:22:42.418391Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2024-11-21T09:22:42.418393Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2024-11-21T09:22:42.418395Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2024-11-21T09:22:42.418398Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished 2024-11-21T09:22:42.418402Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7439660585293130273:3604], TxId: 281474976715770, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=. TraceId : 01jd70ftzy1qms5th4s11zktwf. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T09:22:42.418441Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. pass away 2024-11-21T09:22:42.418471Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715770;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:22:42.418481Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660585293130192:3535] TxId: 281474976715770. Ctx: { TraceId: 01jd70ftzy1qms5th4s11zktwf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7439660585293130273:3604], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1397 DurationUs: 66000 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 147 FinishTimeMs: 1732180962418 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 88 BuildCpuTimeUs: 59 WaitInputTimeUs: 57425 HostName: "ghrun-qcxhsi27zq" NodeId: 28 StartTimeMs: 1732180962352 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:42.418489Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jd70ftzy1qms5th4s11zktwf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7439660585293130273:3604] 2024-11-21T09:22:42.418512Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660585293130192:3535] TxId: 281474976715770. Ctx: { TraceId: 01jd70ftzy1qms5th4s11zktwf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:42.418522Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660585293130192:3535] TxId: 281474976715770. Ctx: { TraceId: 01jd70ftzy1qms5th4s11zktwf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.020603s ReadRows: 50 ReadBytes: 16000 ru: 50 rate limiter was not found force flag: 1 2024-11-21T09:22:42.418537Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, ActorId: [28:7439660585293130172:3535], ActorState: ExecuteState, TraceId: 01jd70ftzy1qms5th4s11zktwf, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T09:22:42.418618Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, ActorId: [28:7439660585293130172:3535], ActorState: ExecuteState, TraceId: 01jd70ftzy1qms5th4s11zktwf, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 80.002 QueriesCount: 1 2024-11-21T09:22:42.418637Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, ActorId: [28:7439660585293130172:3535], ActorState: ExecuteState, TraceId: 01jd70ftzy1qms5th4s11zktwf, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T09:22:42.418663Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, ActorId: [28:7439660585293130172:3535], ActorState: ExecuteState, TraceId: 01jd70ftzy1qms5th4s11zktwf, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T09:22:42.418666Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, ActorId: [28:7439660585293130172:3535], ActorState: ExecuteState, TraceId: 01jd70ftzy1qms5th4s11zktwf, EndCleanup, isFinal: 1 2024-11-21T09:22:42.418680Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, ActorId: [28:7439660585293130172:3535], ActorState: ExecuteState, TraceId: 01jd70ftzy1qms5th4s11zktwf, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7439660580998159094:2256] 2024-11-21T09:22:42.418683Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, ActorId: [28:7439660585293130172:3535], ActorState: unknown state, TraceId: 01jd70ftzy1qms5th4s11zktwf, Cleanup temp tables: 0 2024-11-21T09:22:42.419100Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180962351, txId: 18446744073709551615] shutting down 2024-11-21T09:22:42.419149Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NzZlYjg3NTktZjQ2OGNmZjMtZWEyMTdhZjQtZmFiNzM2YjA=, ActorId: [28:7439660585293130172:3535], ActorState: unknown state, TraceId: 01jd70ftzy1qms5th4s11zktwf, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2024-11-21T09:22:30.833016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660532152528237:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:30.833262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004104/r3tmp/tmp69xAmB/pdisk_1.dat 2024-11-21T09:22:30.891309Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1800, node 1 2024-11-21T09:22:30.907977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.907989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.907991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.908017Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.933354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.933380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.934799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:30.968724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.969802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.969812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.970341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.970383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.970386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T09:22:30.970858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.970867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.971259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:30.972546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951018, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.972557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.972619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.973048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.973087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.973097Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.973105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.973112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.973124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T09:22:30.973191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.973564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.973578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.973581Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.973595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:31.135420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestNulls_0x0006, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.135553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:31.135727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.135739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.136361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestNulls_0x0006 2024-11-21T09:22:31.136415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.136467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.136489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:31.136544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:31.136638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.136648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.136651Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:31.136680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.136688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.136690Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:31.138269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:31.138294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:31.138630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:31.190826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:31.190840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:31.190876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:31.191347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:31.192123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951235, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.192134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180951235 2024-11-21T09:22:31.192152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:31.192572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.192647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.192667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:31.192854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.192864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.192879Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:31.192910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:31.192917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:31.192919Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046 ... 657:0 2 -> 3 waiting... 2024-11-21T09:22:42.602648Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:42.602658Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:42.602710Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:42.602995Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:42.603716Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180962652, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:42.603726Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:42.603785Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:42.604139Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:42.604177Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:42.604191Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:42.604200Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:42.604230Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:42.604239Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:42.604358Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:42.604370Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:42.604373Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:42.604384Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:42.771250Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:42.771367Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:42.771523Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:42.771535Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:42.771961Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:42.772006Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:42.772053Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:42.772076Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:42.772130Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:42.772248Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:42.772270Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:42.772291Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:42.772334Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:42.772343Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:42.772344Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:42.774056Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:42.774080Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:42.774497Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:42.826503Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:42.826513Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:42.826531Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:42.826894Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:42.827891Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180962876, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:42.827907Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180962876 2024-11-21T09:22:42.827938Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:42.828321Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:42.828410Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:42.828428Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:42.828710Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:42.828722Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:42.828725Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:42.828764Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:42.828770Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:42.828771Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:42.829036Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732180962876 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 273 } } 2024-11-21T09:22:42.829093Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T09:22:42.829103Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:42.829112Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T09:22:42.829295Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:42.829311Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:42.829323Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:152:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:153:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:153:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2175] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:159:2057] recipient: [11:155:2175] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2176] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:147:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:150:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:151:2057] recipient: [23:149:2171] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:153:2057] recipient: [23:149:2171] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:152:2172] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:222:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:148:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:150:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:152:2057] recipient: [24:151:2171] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:154:2057] recipient: [24:151:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:153:2172] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:201:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:153:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:154:2057] recipient: [25:152:2173] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:156:2057] recipient: [25:152:2173] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:155:2174] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:225:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:150:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:153:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:154:2057] recipient: [26:152:2173] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:156:2057] recipient: [26:152:2173] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:155:2174] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:226:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:151:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:154:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:155:2057] recipient: [27:153:2173] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:157:2057] recipient: [27:153:2173] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:156:2174] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:204:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:156:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:155:2175] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:159:2057] recipient: [28:155:2175] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2176] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:153:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:156:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:157:2057] recipient: [29:155:2175] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:159:2057] recipient: [29:155:2175] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:158:2176] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:228:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:154:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:156:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:158:2057] recipient: [30:157:2175] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:160:2057] recipient: [30:157:2175] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:159:2176] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:229:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:159:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:161:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:163:2057] recipient: [31:162:2180] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:165:2057] recipient: [31:162:2180] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:164:2181] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:234:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:159:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:162:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:163:2057] recipient: [32:161:2180] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:165:2057] recipient: [32:161:2180] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:164:2181] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:234:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:152:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:153:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:153:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2175] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:159:2057] recipient: [11:155:2175] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2176] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:147:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:150:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:151:2057] recipient: [23:149:2171] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:153:2057] recipient: [23:149:2171] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:152:2172] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:222:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:148:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:150:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:152:2057] recipient: [24:151:2171] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:154:2057] recipient: [24:151:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:153:2172] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:201:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:153:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:154:2057] recipient: [25:152:2173] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:156:2057] recipient: [25:152:2173] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:155:2174] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:225:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:150:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:153:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:154:2057] recipient: [26:152:2173] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:156:2057] recipient: [26:152:2173] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:155:2174] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:225:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:151:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:154:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:155:2057] recipient: [27:153:2173] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:157:2057] recipient: [27:153:2173] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:156:2174] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:204:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:156:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:155:2175] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:159:2057] recipient: [28:155:2175] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2176] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:153:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:156:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:157:2057] recipient: [29:155:2175] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:159:2057] recipient: [29:155:2175] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:158:2176] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:228:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:154:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:156:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:158:2057] recipient: [30:157:2175] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:160:2057] recipient: [30:157:2175] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:159:2176] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:229:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:159:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:161:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:163:2057] recipient: [31:162:2180] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:165:2057] recipient: [31:162:2180] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:164:2181] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:234:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:159:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:162:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:163:2057] recipient: [32:161:2180] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:165:2057] recipient: [32:161:2180] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:164:2181] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:234:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] >> test.py::test[params-param_in_json_api--ForceBlocks] |97.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> HugeBlobOnlineSizeChange::Compaction >> YdbLogStore::LogTable [GOOD] >> YdbLogStore::AlterLogTable |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> YdbLogStore::AlterLogTable [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2024-11-21T09:22:40.874895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T09:22:40.877188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:40.877288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:40.877318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:40.877591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:40.877602Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004314/r3tmp/tmpvMYcnR/pdisk_1.dat 2024-11-21T09:22:40.960577Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:41.039734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:41.126415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:41.126443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:41.127343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:41.127360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:41.138359Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:22:41.138493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:41.138588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:41.469909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:41.499860Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1187:2345], Recipient [2:1214:2356]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:41.502111Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1187:2345], Recipient [2:1214:2356]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:41.502240Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1214:2356] 2024-11-21T09:22:41.502291Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:41.510849Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1187:2345], Recipient [2:1214:2356]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:41.512133Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:41.512357Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:41.512486Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T09:22:41.512509Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T09:22:41.512516Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T09:22:41.512552Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:41.516197Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T09:22:41.516268Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:41.516289Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1238:2371] 2024-11-21T09:22:41.516294Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T09:22:41.516298Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T09:22:41.516302Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:41.516460Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1214:2356], Recipient [2:1214:2356]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:41.516468Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:41.516531Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T09:22:41.516546Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T09:22:41.516582Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T09:22:41.516588Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:41.516592Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T09:22:41.516596Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T09:22:41.516600Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T09:22:41.516604Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T09:22:41.516609Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T09:22:41.578817Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1242:2372], Recipient [2:1214:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:41.578831Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:41.578838Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1196:2735], serverId# [2:1242:2372], sessionId# [0:0:0] 2024-11-21T09:22:41.578906Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:776:2433], Recipient [2:1242:2372] 2024-11-21T09:22:41.578911Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:41.578938Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:22:41.579002Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T09:22:41.579012Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T09:22:41.579035Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T09:22:41.579042Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T09:22:41.579046Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T09:22:41.579051Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T09:22:41.579056Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:41.579096Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:41.579099Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T09:22:41.579101Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T09:22:41.579103Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:41.579112Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T09:22:41.579115Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:22:41.579117Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T09:22:41.579119Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:41.579122Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:41.580070Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1243:2373], Recipient [2:1214:2356]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T09:22:41.580084Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T09:22:41.580107Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:22:41.580112Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T09:22:41.580115Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:41.580122Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T09:22:41.580130Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:41.911601Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1274:2381], Recipient [2:1214:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:41.911619Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:41.911626Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1271:2757], serverId# [2:1274:2381], sessionId# [0:0:0] 2024-11-21T09:22:41.911781Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:992:2584], Recipient [2:1274:2381] 2024-11-21T09:22:41.911787Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:41.911814Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T09:22:41.911821Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T09:22:41.911826Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T09:22:41.911830Z node 2 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T09:22:41.912610Z node 2 :TX_DATASHARD DEBUG: Plann ... \004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\00 2024-11-21T09:22:45.672515Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:45.672553Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [3:631:2536], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T09:22:45.672558Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T09:22:45.672575Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T09:22:45.672655Z node 3 :TX_DATASHARD TRACE: TxId: 281474976715664, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2024-11-21T09:22:45.672666Z node 3 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2024-11-21T09:22:45.672673Z node 3 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2024-11-21T09:22:45.672725Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2024-11-21T09:22:45.672739Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T09:22:45.672744Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T09:22:45.672749Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:45.672753Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T09:22:45.672762Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2024-11-21T09:22:45.672779Z node 3 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2024-11-21T09:22:45.672785Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T09:22:45.672788Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:45.672793Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T09:22:45.672796Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T09:22:45.672803Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2024-11-21T09:22:45.672815Z node 3 :TX_DATASHARD TRACE: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2024-11-21T09:22:45.672882Z node 3 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T09:22:45.672893Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T09:22:45.672896Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T09:22:45.672900Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2024-11-21T09:22:45.672904Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:45.672911Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2024-11-21T09:22:45.672915Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2024-11-21T09:22:45.672918Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T09:22:45.672921Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2024-11-21T09:22:45.672932Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T09:22:45.672935Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T09:22:45.672939Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2024-11-21T09:22:45.687143Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70fy9915qgjbxvmhp2c75w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmVmZDQxOWQtYjY3ZDk5OTEtZjgzZTYwMzAtOWEyYTczOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:45.687425Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:926:2742], Recipient [3:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T09:22:45.687451Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T09:22:45.687459Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2024-11-21T09:22:45.687467Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2024-11-21T09:22:45.687475Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T09:22:45.687486Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T09:22:45.687489Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T09:22:45.687492Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:45.687495Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T09:22:45.687504Z node 3 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T09:22:45.687508Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T09:22:45.687510Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:45.687512Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T09:22:45.687515Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T09:22:45.687524Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T09:22:45.687557Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is DelayComplete 2024-11-21T09:22:45.687560Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T09:22:45.687562Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T09:22:45.687564Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T09:22:45.687572Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T09:22:45.687574Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T09:22:45.687576Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T09:22:45.687579Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T09:22:45.800267Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2024-11-21T09:22:45.800360Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2024-11-21T09:22:45.800378Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2024-11-21T09:22:45.974641Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T09:22:45.974664Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T09:22:45.974674Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1000 ms, status: COMPLETE 2024-11-21T09:22:45.974697Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T09:22:45.974705Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T09:22:45.974709Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T09:22:45.974717Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:926:2742], 0} after executionsCount# 1 2024-11-21T09:22:45.974734Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:926:2742], 0} sends rowCount# 4, bytes# 96, quota rows left# 997, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T09:22:45.974756Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:926:2742], 0} finished in read 2024-11-21T09:22:45.975327Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:926:2742], Recipient [3:631:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T09:22:45.975352Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } >> TReplicationWithRebootsTests::Alter [GOOD] >> test.py::test[params-param_in_json_api--ForceBlocks] [GOOD] >> test.py::test[params-param_in_json_api--Plan] [GOOD] >> test.py::test[params-param_in_json_api--Results] |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> Secret::Validation |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::Alter [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:38.163601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:38.163649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.163656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:38.163660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:38.163680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:38.163685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:38.163695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.163818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:38.176907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:38.176935Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.179372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:38.179490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:38.179516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:38.182958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:38.183035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:38.184269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.185165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.187153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:38.188686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.188692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:38.188733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:38.190195Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.204002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:38.204793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.204862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:38.204916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:38.204923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:38.205753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:38.205764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:38.205768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:38.206122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:38.206508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.206534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.207052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:38.207469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:38.208094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:38.208307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.208390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:38.208398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.208423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:38.208435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.208879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.208924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:38.209004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.209011Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:38.209021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:38.209025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.209031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:38.209036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.209041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:38.209044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:38.209055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:38.209060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:38.209064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... lags: 2 } ExecLevel: 0 TxId: 1003 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:46.703340Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:46.703348Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T09:22:46.703361Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T09:22:46.703410Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [35:118:2144], Recipient [35:256:2248] 2024-11-21T09:22:46.703413Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:46.703420Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:46.703434Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 118 RawX2: 150323857504 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:46.703439Z node 35 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 1003:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T09:22:46.703458Z node 35 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T09:22:46.703479Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:46.703489Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:46.703501Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 2024-11-21T09:22:46.703780Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:46.703786Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000004 first txId#1003 countTxs#1 2024-11-21T09:22:46.703790Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000004 2024-11-21T09:22:46.703794Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1003:0 2024-11-21T09:22:46.703817Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [35:128:2152], Recipient [35:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:22:46.703820Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:22:46.703832Z node 35 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:46.703835Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:46.703865Z node 35 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:46.703868Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [35:205:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:22:46.703947Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:46.703954Z node 35 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:22:46.703961Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:46.703964Z node 35 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:22:46.703967Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:46.703971Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T09:22:46.703974Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:46.703977Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:22:46.703981Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:22:46.704005Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:22:46.704010Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 1, subscribers: 0 2024-11-21T09:22:46.704015Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T09:22:46.704068Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [35:205:2208], Recipient [35:128:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 3 } 2024-11-21T09:22:46.704072Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:22:46.704082Z node 35 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:46.704088Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:46.704091Z node 35 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:22:46.704095Z node 35 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:22:46.704097Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:46.704106Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T09:22:46.704108Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:46.704559Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:46.704751Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:22:46.704756Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:22:46.704789Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:22:46.704793Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:22:46.704832Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [35:449:2406], Recipient [35:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:46.704838Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:46.704840Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:22:46.704855Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [35:415:2372], Recipient [35:128:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1003 2024-11-21T09:22:46.704858Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:22:46.704877Z node 35 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:22:46.704891Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:46.704894Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [35:447:2404] 2024-11-21T09:22:46.704907Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [35:449:2406], Recipient [35:128:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:46.704910Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:46.704913Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:22:46.704948Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [35:450:2407], Recipient [35:128:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:22:46.704951Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:22:46.704957Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:46.704984Z node 35 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 24us result status StatusSuccess 2024-11-21T09:22:46.705044Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 ControllerId: 72075186233409546 State { Done { FailoverMode: FAILOVER_MODE_FORCE } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[params-param_in_json_api--Results] [GOOD] >> test.py::test[params-param_type_mismatch_fail--Analyze] [SKIPPED] >> test.py::test[params-param_type_mismatch_fail--Debug] [SKIPPED] >> test.py::test[params-param_type_mismatch_fail--ForceBlocks] [SKIPPED] >> test.py::test[params-param_type_mismatch_fail--Plan] [SKIPPED] >> test.py::test[params-param_type_mismatch_fail--Results] >> Secret::SimpleQueryService >> DataShardTxOrder::DelayData [GOOD] |97.4%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2024-11-21T09:22:40.083596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:40.083622Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.083636Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.086004Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.086118Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T09:22:40.086166Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.086848Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.093654Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.093773Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.093913Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T09:22:40.093926Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T09:22:40.093933Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T09:22:40.093985Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.096333Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T09:22:40.096371Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.096399Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T09:22:40.096404Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T09:22:40.096408Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T09:22:40.096412Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.096475Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.096481Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.096499Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T09:22:40.096513Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T09:22:40.096546Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.096551Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.096555Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T09:22:40.096558Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T09:22:40.096561Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T09:22:40.096564Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T09:22:40.096567Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.104382Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.104403Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.104411Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T09:22:40.104734Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T09:22:40.104744Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.104767Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T09:22:40.104795Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T09:22:40.104818Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T09:22:40.104826Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T09:22:40.104832Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.104835Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T09:22:40.104838Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T09:22:40.104841Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.104915Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.104918Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T09:22:40.104921Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T09:22:40.104923Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.104933Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T09:22:40.104935Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T09:22:40.104937Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T09:22:40.104939Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.104943Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.128766Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T09:22:40.128790Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.128798Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.128808Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T09:22:40.128823Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.128962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.128968Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.128974Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T09:22:40.128988Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T09:22:40.128992Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.129019Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.129027Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.129030Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T09:22:40.129034Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T09:22:40.129683Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T09:22:40.129698Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.129749Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.129758Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.129765Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.129772Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.129777Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T09:22:40.129784Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T09:22:40.129789Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T09:22:40.129795Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.129800Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T09:22:40.129804Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T09:22:40.129808Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T09:22:40.129859Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T09:22:40.129863Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.129867Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T09:22:40.129871Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T09:22:40.129875Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T09:22:40.129885Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.129888Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T09:22:40.129892Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:40.129895Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T09:22:40.129908Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T09:22:40.129911Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T09:22:40.129913Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T09:22:40.129917Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.129919Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:40.129921Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... e 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T09:22:47.690937Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T09:22:47.690941Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2024-11-21T09:22:47.690944Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T09:22:47.690948Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T09:22:47.690951Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2024-11-21T09:22:47.690956Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2024-11-21T09:22:47.691049Z node 2 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2024-11-21T09:22:47.691060Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T09:22:47.691071Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T09:22:47.691075Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2024-11-21T09:22:47.691081Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2024-11-21T09:22:47.691085Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2024-11-21T09:22:47.691135Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2024-11-21T09:22:47.691139Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2024-11-21T09:22:47.691143Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2024-11-21T09:22:47.691146Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2024-11-21T09:22:47.691151Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T09:22:47.691155Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2024-11-21T09:22:47.691159Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2024-11-21T09:22:47.691163Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:47.691166Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T09:22:47.691171Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2024-11-21T09:22:47.691245Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:231:2226], Recipient [2:231:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:47.691252Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:47.691259Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:47.691263Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:47.691268Z node 2 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2024-11-21T09:22:47.691272Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2024-11-21T09:22:47.691276Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691280Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2024-11-21T09:22:47.691283Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2024-11-21T09:22:47.691286Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2024-11-21T09:22:47.691386Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2024-11-21T09:22:47.691393Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691396Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2024-11-21T09:22:47.691400Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T09:22:47.691404Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T09:22:47.691407Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691409Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T09:22:47.691411Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:47.691414Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T09:22:47.691421Z node 2 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2024-11-21T09:22:47.691426Z node 2 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2024-11-21T09:22:47.691428Z node 2 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2024-11-21T09:22:47.691432Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691434Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:47.691436Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T09:22:47.691438Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2024-11-21T09:22:47.691443Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691445Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T09:22:47.691447Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T09:22:47.691449Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2024-11-21T09:22:47.691451Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691453Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T09:22:47.691455Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T09:22:47.691457Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2024-11-21T09:22:47.691460Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691462Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T09:22:47.691464Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T09:22:47.691466Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2024-11-21T09:22:47.691469Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691472Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T09:22:47.691476Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2024-11-21T09:22:47.691479Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2024-11-21T09:22:47.691531Z node 2 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2024-11-21T09:22:47.691538Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T09:22:47.691545Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:47.691548Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2024-11-21T09:22:47.691552Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2024-11-21T09:22:47.691555Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2024-11-21T09:22:47.691581Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2024-11-21T09:22:47.691584Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2024-11-21T09:22:47.691590Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2024-11-21T09:22:47.691593Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2024-11-21T09:22:47.691597Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T09:22:47.691600Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2024-11-21T09:22:47.691603Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2024-11-21T09:22:47.691607Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:47.691610Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T09:22:47.691614Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T09:22:47.691617Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T09:22:47.702956Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2024-11-21T09:22:47.702981Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2024-11-21T09:22:47.702993Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:47.703001Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2024-11-21T09:22:47.703020Z node 2 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:22:47.703031Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:47.703077Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:47.703082Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2024-11-21T09:22:47.703089Z node 2 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:22:47.703094Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TFileStoreWithReboots::CreateWithIntermediateDirs |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TFileStoreWithReboots::CheckFileStoreHDDLimits >> test.py::test[params-param_type_mismatch_fail--Results] [GOOD] >> test.py::test[params-primitives--Analyze] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration >> TFileStoreWithReboots::CheckFileStoreHDDLimits [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckFileStoreHDDLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T09:22:48.498414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:48.498440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:48.498446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:48.498451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:48.498457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:48.498462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:48.498471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:48.498546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:48.506563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:48.506583Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:48.508559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:48.509104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:48.509137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:48.510342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:48.510513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:48.510591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:48.510658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:48.511418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:48.511634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:48.511642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:48.511672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:48.511678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:48.511684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:48.511696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.512689Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T09:22:48.524028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:48.524081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.524123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:48.524156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:48.524161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.524630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:48.524651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:48.524691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.524699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:48.524704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:48.524708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:48.525032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.525040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:48.525042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:48.525309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.525315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.525319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:48.525323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:48.525748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:48.526088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:48.526119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:48.526243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:48.526260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:48.526272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:48.526306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:48.526310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:48.526331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:48.526339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:48.526655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:48.526660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:48.526684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:48.526689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T09:22:48.526755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.526760Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:48.526767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:48.526770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:48.526773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:48.526776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:48.526779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:48.526781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:48.526789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:48.526793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:48.526795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T09:22:48.526987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:22:48.526997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T09:22:48.527001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T09:22:48.527006Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T09:22:48.527010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:48.527021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T09:22:48.797741Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 5 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T09:22:48.797786Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type FileStore, boot OK, tablet id 72075186233409549 2024-11-21T09:22:48.797803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T09:22:48.797807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 110, shardIdx: 72057594046678944:4, partId: 0 2024-11-21T09:22:48.797820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T09:22:48.797828Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T09:22:48.797834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T09:22:48.797849Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 2 -> 3 2024-11-21T09:22:48.798051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T09:22:48.798375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T09:22:48.798787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.798810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.798815Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#110:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:48.799383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275054593 2024-11-21T09:22:48.799408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 0, tablet: 72075186233409549 2024-11-21T09:22:48.800249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2024-11-21T09:22:48.800277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: TxId: 110 Origin: 72075186233409549 Status: OK 2024-11-21T09:22:48.800283Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#110:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T09:22:48.800289Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 3 -> 128 2024-11-21T09:22:48.800844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.800989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.800996Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#110:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:48.801003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 110 ready parts: 1/1 2024-11-21T09:22:48.801028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 110 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:48.801303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:110 msg type: 269090816 2024-11-21T09:22:48.801324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 110 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 110 at step: 5000009 2024-11-21T09:22:48.801383Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:48.801400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 110 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:48.801406Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#110:0 HandleReply TEvOperationPlan, step: 5000009, at schemeshard: 72057594046678944 2024-11-21T09:22:48.801425Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 128 -> 240 2024-11-21T09:22:48.801452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:48.801462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: Erasing txId 110 2024-11-21T09:22:48.801815Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:48.801821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:48.801855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T09:22:48.801871Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:48.801876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 110, path id: 1 2024-11-21T09:22:48.801883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 110, path id: 5 2024-11-21T09:22:48.801949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.801955Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 110:0 ProgressState 2024-11-21T09:22:48.801966Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#110:0 progress is 1/1 2024-11-21T09:22:48.801970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2024-11-21T09:22:48.801975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 110, ready parts: 1/1, is published: false 2024-11-21T09:22:48.801980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2024-11-21T09:22:48.801985Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 110:0 2024-11-21T09:22:48.801989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 110:0 2024-11-21T09:22:48.802008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T09:22:48.802014Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 110, publications: 2, subscribers: 0 2024-11-21T09:22:48.802019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2024-11-21T09:22:48.802022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2024-11-21T09:22:48.802115Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T09:22:48.802124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T09:22:48.802128Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 110 2024-11-21T09:22:48.802133Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2024-11-21T09:22:48.802136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T09:22:48.802227Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T09:22:48.802236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T09:22:48.802239Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 110 2024-11-21T09:22:48.802243Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T09:22:48.802246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:22:48.802254Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 110, subscribers: 0 2024-11-21T09:22:48.802875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T09:22:48.802927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2024-11-21T09:22:48.802985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2024-11-21T09:22:48.802991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2024-11-21T09:22:48.803057Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2024-11-21T09:22:48.803075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2024-11-21T09:22:48.803080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:669:2620] TestWaitNotification: OK eventTxId 110 |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[params-primitives--Analyze] [GOOD] >> test.py::test[params-primitives--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 28925, MsgBus: 17171 2024-11-21T09:22:47.204647Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660604227323278:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:47.204905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d82/r3tmp/tmpcIp1AY/pdisk_1.dat 2024-11-21T09:22:47.250722Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28925, node 1 2024-11-21T09:22:47.262471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:47.262484Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:47.262485Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:47.262511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17171 TClient is connected to server localhost:17171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:47.306675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:47.306729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:47.307804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:47.331359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:47.338855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:47.356716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:47.375638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:47.386074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:47.497569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660604227324829:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:47.497599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:47.523640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:47.531155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:47.539041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:47.546746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:47.560539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:47.567440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:47.583150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660604227325344:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:47.583172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:47.583176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660604227325349:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:47.583889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:47.587880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660604227325351:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:47.844682Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTg1OWMzY2ItNmMxZmRiOS1lOTdiMWNkNS00MTczYTUyOA==, ActorId: [1:7439660604227325690:2453], ActorState: ExecuteState, TraceId: 01jd70g0cx5hg1zw446ysh9sh9, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken Trying to start YDB, gRPC: 23713, MsgBus: 8361 2024-11-21T09:22:48.067177Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660610585722646:2055];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/002d82/r3tmp/tmpI9O6lL/pdisk_1.dat 2024-11-21T09:22:48.073376Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 23713, node 2 2024-11-21T09:22:48.081309Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:48.082687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:48.082695Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:48.082696Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:48.082735Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8361 TClient is connected to server localhost:8361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:48.167208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:48.167235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:48.168486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:48.169103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:48.180564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:48.189038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:48.209093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:48.219806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:48.395141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660610585724187:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:48.395170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:48.401344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.408329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.464226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.477397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.484201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.491453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.499847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660610585724689:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:48.499881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439660610585724694:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:48.499885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:48.500604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T09:22:48.504378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439660610585724696:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T09:22:48.814051Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjNlN2NkYWYtNzQ5ZDY1YWEtOGZhMDkxNGItMjhmYzZlZmI=, ActorId: [2:7439660610585724994:2454], ActorState: ExecuteState, TraceId: 01jd70g1b25k2r8tc2b1xt8thh, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> YdbOlapStore::LogTsRangeDescending [GOOD] >> test.py::test[expr-cast_utf8-default.txt-Debug] >> test.py::test[action-eval_each_input_table-default.txt-Analyze] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[library-package_override--ForceBlocks] [SKIPPED] >> test.py::test[library-package_override--Plan] [SKIPPED] >> test.py::test[library-package_override--Results] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--Analyze] [SKIPPED] >> test.py::test[sampling-read-dynamic-Debug] >> test.py::test[limit-yql-8046_empty_sorted_desc--Debug] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--ForceBlocks] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--Plan] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Analyze] >> Secret::Simple >> TFileStoreWithReboots::CreateAlter |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[params-primitives--Debug] [GOOD] >> test.py::test[params-primitives--ForceBlocks] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> test.py::test[expr-cast_utf8-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_utf8-default.txt-ForceBlocks] >> test.py::test[action-eval_each_input_table-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Debug] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TReplicationWithRebootsTests::AlterReplicationConfig [GOOD] >> TReplicationWithRebootsTests::Create [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogTsRangeDescending [GOOD] Test command err: 2024-11-21T09:22:24.371782Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660509361130353:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:24.371981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004135/r3tmp/tmp7npNKn/pdisk_1.dat 2024-11-21T09:22:24.428949Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16843, node 1 2024-11-21T09:22:24.445169Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:24.445182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:24.445185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:24.445240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:24.469298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.470073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.470088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.471297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:24.471351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:24.471361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:24.471769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:24.471779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:24.471976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:24.472254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.472394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:24.472415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:24.473150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180944522, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:24.473162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:24.473225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:24.473763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.473797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.473809Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:24.473824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:24.473836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:24.473846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:24.474158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:24.474663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:24.474676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:24.474680Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:24.474692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:23324 2024-11-21T09:22:24.504805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T09:22:24.504895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.505041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T09:22:24.505058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T09:22:24.505073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T09:22:24.505090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T09:22:24.505098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T09:22:24.505107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T09:22:24.505172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T09:22:24.505497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T09:22:24.505547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:24.505551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.505570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:22:24.505577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T09:22:24.506221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T09:22:24.506248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2024-11-21T09:22:24.506324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:24.506332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:22:24.506356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T09:22:24.506377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:24.506385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439660509361130930:2373], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2024-11-21T09:22:24.506392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439660509361130930:2373], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2024-11-21T09:22:24.506399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:24.506409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 waiting... 2024-11-21T09:22:24.506561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChann ... TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 38, seqNo: [1] 2024-11-21T09:22:49.076444Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 39, seqNo: [1] 2024-11-21T09:22:49.076446Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 40, seqNo: [1] 2024-11-21T09:22:49.076448Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 41, seqNo: [1] 2024-11-21T09:22:49.076450Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 42, seqNo: [1] 2024-11-21T09:22:49.076453Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 43, seqNo: [1] 2024-11-21T09:22:49.076455Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 44, seqNo: [1] 2024-11-21T09:22:49.076457Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 45, seqNo: [1] 2024-11-21T09:22:49.076459Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2024-11-21T09:22:49.076461Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2024-11-21T09:22:49.076463Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2024-11-21T09:22:49.076465Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2024-11-21T09:22:49.076467Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2024-11-21T09:22:49.076469Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2024-11-21T09:22:49.076471Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2024-11-21T09:22:49.076473Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2024-11-21T09:22:49.076474Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2024-11-21T09:22:49.076477Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2024-11-21T09:22:49.076479Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2024-11-21T09:22:49.076480Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2024-11-21T09:22:49.076482Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2024-11-21T09:22:49.076484Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2024-11-21T09:22:49.076486Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2024-11-21T09:22:49.076488Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2024-11-21T09:22:49.076490Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2024-11-21T09:22:49.076492Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2024-11-21T09:22:49.076494Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2024-11-21T09:22:49.076496Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished 2024-11-21T09:22:49.076498Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7439660613252698549:3616], TxId: 281474976715770, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=. TraceId : 01jd70g1h9f3k7vkngzdh5m49d. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T09:22:49.076537Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. pass away 2024-11-21T09:22:49.076564Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715770;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T09:22:49.076584Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660613252698475:3544] TxId: 281474976715770. Ctx: { TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7439660613252698549:3616], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1522 DurationUs: 14000 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 110 FinishTimeMs: 1732180969076 ComputeCpuTimeUs: 36 BuildCpuTimeUs: 74 WaitInputTimeUs: 8563 HostName: "ghrun-qcxhsi27zq" NodeId: 28 StartTimeMs: 1732180969062 } MaxMemoryUsage: 1048576 } 2024-11-21T09:22:49.076591Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7439660613252698549:3616] 2024-11-21T09:22:49.076619Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660613252698475:3544] TxId: 281474976715770. Ctx: { TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:49.076631Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439660613252698475:3544] TxId: 281474976715770. Ctx: { TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.017212s ReadRows: 0 ReadBytes: 0 ru: 11 rate limiter was not found force flag: 1 2024-11-21T09:22:49.076646Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, ActorId: [28:7439660613252698451:3544], ActorState: ExecuteState, TraceId: 01jd70g1h9f3k7vkngzdh5m49d, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T09:22:49.076733Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, ActorId: [28:7439660613252698451:3544], ActorState: ExecuteState, TraceId: 01jd70g1h9f3k7vkngzdh5m49d, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 25.849 QueriesCount: 1 2024-11-21T09:22:49.076751Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, ActorId: [28:7439660613252698451:3544], ActorState: ExecuteState, TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T09:22:49.076779Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, ActorId: [28:7439660613252698451:3544], ActorState: ExecuteState, TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T09:22:49.076782Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, ActorId: [28:7439660613252698451:3544], ActorState: ExecuteState, TraceId: 01jd70g1h9f3k7vkngzdh5m49d, EndCleanup, isFinal: 1 2024-11-21T09:22:49.076806Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, ActorId: [28:7439660613252698451:3544], ActorState: ExecuteState, TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7439660608957727235:2256] 2024-11-21T09:22:49.076814Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, ActorId: [28:7439660613252698451:3544], ActorState: unknown state, TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Cleanup temp tables: 0 2024-11-21T09:22:49.077592Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180969000, txId: 18446744073709551615] shutting down 2024-11-21T09:22:49.077645Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=Y2ZmYmQ4NGYtOTlkNjM2ZmQtYzYxZjE4MDgtN2NjZmUzMjE=, ActorId: [28:7439660613252698451:3544], ActorState: unknown state, TraceId: 01jd70g1h9f3k7vkngzdh5m49d, Session actor destroyed 2024-11-21T09:22:49.219313Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[28:7439660608957727995:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T09:22:49.219315Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[28:7439660608957727994:2288];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T09:22:49.219787Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[28:7439660608957727994:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T09:22:49.220257Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[28:7439660608957728009:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T09:22:49.220276Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[28:7439660608957727996:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T09:22:49.221856Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[28:7439660608957727995:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T09:22:49.223459Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[28:7439660608957727996:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T09:22:49.223496Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[28:7439660608957728009:2291];fline=actor.cpp:33;event=skip_flush_writing; >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Debug] |97.4%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::AlterReplicationConfig [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:38.360010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:38.360028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.360032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:38.360036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:38.360049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:38.360051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:38.360058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.360136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:38.369923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:38.369943Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.371978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:38.372080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:38.372111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:38.374268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:38.374331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:38.374425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.374575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.375126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.375418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.375427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.375440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:38.375447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.375452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:38.375490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:38.376511Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.390411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:38.390467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.390505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:38.390533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:38.390539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.391042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.391060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:38.391088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.391093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:38.391096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:38.391100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:38.391388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.391396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:38.391400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:38.391666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.391671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.391674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.391678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.392071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:38.392340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:38.392377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:38.392500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.392521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:38.392525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.392556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:38.392560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.392576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:38.392583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.392844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.392852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.392907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.392911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:38.392956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.392960Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:38.392967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:38.392969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.392974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:38.392979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.392983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:38.392987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:38.392996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:38.393001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:38.393005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 264 } } 2024-11-21T09:22:50.869359Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:22:50.869376Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 264 } } 2024-11-21T09:22:50.869388Z node 51 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 264 } } 2024-11-21T09:22:50.869393Z node 51 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:50.869472Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [51:437:2412], Recipient [51:120:2146]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:50.869478Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:50.869482Z node 51 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:22:50.869526Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [51:327:2315], Recipient [51:120:2146]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:22:50.869532Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2024-11-21T09:22:50.869539Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:22:50.869543Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T09:22:50.869554Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:22:50.869559Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T09:22:50.869565Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T09:22:50.869578Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:50.869581Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.869586Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T09:22:50.869591Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T09:22:50.869612Z node 51 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:50.870110Z node 51 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:50.870281Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:22:50.870286Z node 51 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:50.870384Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.870388Z node 51 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:50.870406Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.870409Z node 51 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:50.870413Z node 51 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1003:0 2024-11-21T09:22:50.870428Z node 51 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [51:327:2315] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1003 at schemeshard: 72057594046678944 2024-11-21T09:22:50.870444Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [51:120:2146], Recipient [51:120:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:22:50.870448Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T09:22:50.870455Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.870460Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:22:50.870472Z node 51 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:50.870476Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:22:50.870480Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:50.870485Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:22:50.870494Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [51:399:2374] message: TxId: 1003 2024-11-21T09:22:50.870500Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:50.870505Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:22:50.870509Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:22:50.870537Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:50.870973Z node 51 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:50.870993Z node 51 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [51:399:2374] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2024-11-21T09:22:50.871032Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:50.871038Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:404:2379] 2024-11-21T09:22:50.871073Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [51:407:2382], Recipient [51:120:2146]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:50.871078Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:50.871082Z node 51 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T09:22:50.871102Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [51:437:2412], Recipient [51:120:2146]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:50.871106Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:50.871109Z node 51 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2024-11-21T09:22:50.871182Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [51:447:2422], Recipient [51:120:2146]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:22:50.871187Z node 51 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:22:50.871200Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:50.871247Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 42us result status StatusSuccess 2024-11-21T09:22:50.871372Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[sampling-read-dynamic-Debug] [GOOD] >> test.py::test[sampling-read-dynamic-ForceBlocks] >> test.py::test[expr-cast_utf8-default.txt-ForceBlocks] [GOOD] >> test.py::test[params-primitives--ForceBlocks] [GOOD] >> test.py::test[params-primitives--Plan] [GOOD] >> test.py::test[params-primitives--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:38.163601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:38.163648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.163653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:38.163658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:38.163694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:38.163699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:38.163709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.163823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:38.176891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:38.176918Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.179412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:38.179544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:38.179575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:38.182909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:38.182980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:38.184260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.185138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.186915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:38.188677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.188681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:38.188717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:38.190008Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.203988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:38.204776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.204859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:38.204913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:38.204919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:38.205665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:38.205679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:38.205684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:38.206037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:38.206345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.206362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.206861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:38.207221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:38.208075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:38.208263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.208334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:38.208339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.208358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:38.208368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.208726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.208764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:38.208827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208832Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:38.208841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:38.208845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.208850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:38.208855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.208860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:38.208876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:38.208886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:38.208891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:38.208895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... imr::TEvDiscovery::TEvError 2024-11-21T09:22:50.945845Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:50.945851Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000003 first txId#1002 countTxs#1 2024-11-21T09:22:50.945858Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000003 2024-11-21T09:22:50.945864Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1002:0 2024-11-21T09:22:50.945928Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [52:121:2147], Recipient [52:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:22:50.945933Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:22:50.945949Z node 52 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:50.945954Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:50.945982Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:50.946014Z node 52 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:50.946019Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [52:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T09:22:50.946025Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [52:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T09:22:50.946099Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.946107Z node 52 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T09:22:50.946118Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:50.946123Z node 52 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:22:50.946127Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:22:50.946133Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T09:22:50.946138Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:22:50.946144Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:22:50.946148Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:22:50.946173Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:22:50.946178Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 0 2024-11-21T09:22:50.946183Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:22:50.946187Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T09:22:50.946340Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [52:203:2206], Recipient [52:121:2147]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2024-11-21T09:22:50.946347Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:22:50.946359Z node 52 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:22:50.946369Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:22:50.946374Z node 52 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:22:50.946379Z node 52 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:22:50.946384Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:22:50.946398Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:50.946543Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [52:203:2206], Recipient [52:121:2147]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 2 } 2024-11-21T09:22:50.946550Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:22:50.946562Z node 52 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:22:50.946571Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:22:50.946575Z node 52 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:22:50.946580Z node 52 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T09:22:50.946584Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:50.946595Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T09:22:50.946599Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:22:50.947101Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:50.947225Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:22:50.947230Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:22:50.947466Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:22:50.947473Z node 52 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T09:22:50.947520Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T09:22:50.947525Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T09:22:50.947582Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [52:420:2377], Recipient [52:121:2147]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:50.947588Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:50.947593Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:22:50.947616Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [52:417:2374], Recipient [52:121:2147]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1002 2024-11-21T09:22:50.947620Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:22:50.947631Z node 52 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:22:50.947649Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:22:50.947654Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [52:418:2375] 2024-11-21T09:22:50.947675Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [52:420:2377], Recipient [52:121:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:50.947684Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:22:50.947688Z node 52 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 2024-11-21T09:22:50.947734Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [52:421:2378], Recipient [52:121:2147]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:22:50.947738Z node 52 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:22:50.947748Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:50.947783Z node 52 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 34us result status StatusSuccess 2024-11-21T09:22:50.947855Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:56.011884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:56.011923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.011928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:56.011934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:56.011955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:56.011959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:56.011969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.012078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:56.026460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:56.026479Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.028554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:56.028647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:56.028667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:56.030963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:56.031017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.031124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.031869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.033311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:56.035168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.035173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:56.035208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:56.036440Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.053441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:56.053492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.053542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:56.053581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:56.053588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:56.054259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:56.054276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:56.054280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:56.054733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:56.055142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055153Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.055163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.055685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:56.056106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:56.056143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:56.056311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.056392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:56.056398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.056424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:56.056434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.056869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.056922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:56.056985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:56.057000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:56.057004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.057009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:56.057014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.057018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:56.057021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:56.057031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:56.057035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:56.057038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 594046678944 for txId: 1003 at step: 5000004 2024-11-21T09:22:50.692470Z node 223 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:50.692493Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 957777709160 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:50.692501Z node 223 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T09:22:50.692589Z node 223 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T09:22:50.692599Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T09:22:50.692628Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T09:22:50.692656Z node 223 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[223:398:2369], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:22:50.693118Z node 223 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:50.693128Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:50.693182Z node 223 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:50.693188Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [223:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:22:50.693284Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.693294Z node 223 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2024-11-21T09:22:50.693298Z node 223 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T09:22:50.693405Z node 223 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:50.693438Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:50.693443Z node 223 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:22:50.693448Z node 223 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:22:50.693453Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T09:22:50.693469Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:22:50.694029Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.694042Z node 223 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:22:50.694056Z node 223 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:22:50.694060Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:50.694067Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:22:50.694080Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [223:321:2313] message: TxId: 1003 2024-11-21T09:22:50.694088Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:50.694095Z node 223 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:22:50.694100Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:22:50.694157Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T09:22:50.694327Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:22:50.694620Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:50.694629Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [223:598:2512] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:22:50.694764Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:50.694805Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 50us result status StatusSuccess 2024-11-21T09:22:50.694901Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:50.694983Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T09:22:50.695004Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 19us result status StatusSuccess 2024-11-21T09:22:50.695040Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T09:22:50.695084Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:50.695100Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2024-11-21T09:22:50.695153Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[expr-cast_utf8-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_utf8-default.txt-Results] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[action-eval_each_input_table-default.txt-Debug] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-ForceBlocks] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Analyze] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::PartitionStatsFields >> test.py::test[expr-cast_utf8-default.txt-Results] [GOOD] >> test.py::test[expr-distinct_from_opt-default.txt-Analyze] >> test.py::test[params-primitives--Results] [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-Analyze] >> test.py::test[count-count_all_grouped--Analyze] >> test.py::test[sampling-read-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-read-dynamic-Plan] [GOOD] >> test.py::test[sampling-read-dynamic-Results] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Debug] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-ForceBlocks] >> test.py::test[library-library_alias--Analyze] >> Secret::DeactivatedQueryService >> KqpRm::DisonnectNodes |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[limit-limit_over_sort_desc_in_subquery--Analyze] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Debug] >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> VDiskBalancing::TestRandom_Mirror3dc >> test.py::test[action-eval_each_input_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Plan] >> test.py::test[expr-distinct_from_opt-default.txt-Analyze] [GOOD] >> test.py::test[expr-distinct_from_opt-default.txt-Debug] >> test.py::test[pg-pg_corr_count-default.txt-Analyze] [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-Debug] >> test.py::test[order_by-assume_with_filter--Debug] >> test.py::test[action-eval_each_input_table-default.txt-Plan] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Results] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[join-left_all--Analyze] >> test.py::test[sampling-read-dynamic-Results] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-Analyze] >> test.py::test[library-library_alias--Analyze] [GOOD] >> test.py::test[library-library_alias--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:156:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:159:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:160:2057] recipient: [4:158:2180] Leader for TabletID 72057594037927937 is [4:161:2181] sender: [4:162:2057] recipient: [4:158:2180] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:161:2181] Leader for TabletID 72057594037927937 is [4:161:2181] sender: [4:231:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:161:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:164:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:165:2057] recipient: [5:163:2185] Leader for TabletID 72057594037927937 is [5:166:2186] sender: [5:167:2057] recipient: [5:163:2185] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:166:2186] Leader for TabletID 72057594037927937 is [5:166:2186] sender: [5:236:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:161:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:164:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:165:2057] recipient: [6:163:2185] Leader for TabletID 72057594037927937 is [6:166:2186] sender: [6:167:2057] recipient: [6:163:2185] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:166:2186] Leader for TabletID 72057594037927937 is [6:166:2186] sender: [6:236:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:162:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:165:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:166:2057] recipient: [7:164:2185] Leader for TabletID 72057594037927937 is [7:167:2186] sender: [7:168:2057] recipient: [7:164:2185] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:167:2186] Leader for TabletID 72057594037927937 is [7:167:2186] sender: [7:237:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:167:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:170:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:171:2057] recipient: [8:169:2190] Leader for TabletID 72057594037927937 is [8:172:2191] sender: [8:173:2057] recipient: [8:169:2190] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:172:2191] Leader for TabletID 72057594037927937 is [8:172:2191] sender: [8:242:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:167:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:170:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:171:2057] recipient: [9:169:2190] Leader for TabletID 72057594037927937 is [9:172:2191] sender: [9:173:2057] recipient: [9:169:2190] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:172:2191] Leader for TabletID 72057594037927937 is [9:172:2191] sender: [9:242:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:169:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:172:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:173:2057] recipient: [10:171:2191] Leader for TabletID 72057594037927937 is [10:174:2192] sender: [10:175:2057] recipient: [10:171:2191] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:174:2192] Leader for TabletID 72057594037927937 is [10:174:2192] sender: [10:244:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:174:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:177:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:178:2057] recipient: [11:176:2196] Leader for TabletID 72057594037927937 is [11:179:2197] sender: [11:180:2057] recipient: [11:176:2196] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:179:2197] Leader for TabletID 72057594037927937 is [11:179:2197] sender: [11:249:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... sender: [27:181:2057] recipient: [27:180:2198] Leader for TabletID 72057594037927937 is [27:182:2199] sender: [27:183:2057] recipient: [27:180:2198] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:182:2199] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:141:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:144:2057] recipient: [30:143:2166] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:145:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:146:2167] sender: [30:147:2057] recipient: [30:143:2166] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:146:2167] Leader for TabletID 72057594037927937 is [30:146:2167] sender: [30:216:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:141:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:144:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:145:2057] recipient: [31:143:2166] Leader for TabletID 72057594037927937 is [31:146:2167] sender: [31:147:2057] recipient: [31:143:2166] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:146:2167] Leader for TabletID 72057594037927937 is [31:146:2167] sender: [31:216:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:142:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:145:2057] recipient: [32:144:2166] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:146:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:147:2167] sender: [32:148:2057] recipient: [32:144:2166] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:147:2167] Leader for TabletID 72057594037927937 is [32:147:2167] sender: [32:217:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:147:2057] recipient: [33:97:2132] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:149:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:151:2057] recipient: [33:150:2171] Leader for TabletID 72057594037927937 is [33:152:2172] sender: [33:153:2057] recipient: [33:150:2171] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:152:2172] Leader for TabletID 72057594037927937 is [33:152:2172] sender: [33:222:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:147:2057] recipient: [34:97:2132] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:150:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:151:2057] recipient: [34:149:2171] Leader for TabletID 72057594037927937 is [34:152:2172] sender: [34:153:2057] recipient: [34:149:2171] !Reboot 72057594037927937 (actor [34:105:2137]) rebooted! !Reboot 72057594037927937 (actor [34:105:2137]) tablet resolver refreshed! new actor is[34:152:2172] Leader for TabletID 72057594037927937 is [34:152:2172] sender: [34:222:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:139:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:148:2057] recipient: [35:97:2132] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:150:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:152:2057] recipient: [35:151:2171] Leader for TabletID 72057594037927937 is [35:153:2172] sender: [35:154:2057] recipient: [35:151:2171] !Reboot 72057594037927937 (actor [35:105:2137]) rebooted! !Reboot 72057594037927937 (actor [35:105:2137]) tablet resolver refreshed! new actor is[35:153:2172] Leader for TabletID 72057594037927937 is [35:153:2172] sender: [35:223:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:139:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:153:2057] recipient: [36:97:2132] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:156:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:157:2057] recipient: [36:155:2176] Leader for TabletID 72057594037927937 is [36:158:2177] sender: [36:159:2057] recipient: [36:155:2176] !Reboot 72057594037927937 (actor [36:105:2137]) rebooted! !Reboot 72057594037927937 (actor [36:105:2137]) tablet resolver refreshed! new actor is[36:158:2177] Leader for TabletID 72057594037927937 is [36:158:2177] sender: [36:228:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:106:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:139:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:153:2057] recipient: [37:97:2132] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:155:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:157:2057] recipient: [37:156:2176] Leader for TabletID 72057594037927937 is [37:158:2177] sender: [37:159:2057] recipient: [37:156:2176] !Reboot 72057594037927937 (actor [37:105:2137]) rebooted! !Reboot 72057594037927937 (actor [37:105:2137]) tablet resolver refreshed! new actor is[37:158:2177] Leader for TabletID 72057594037927937 is [37:158:2177] sender: [37:228:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:106:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:139:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:154:2057] recipient: [38:97:2132] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:157:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:158:2057] recipient: [38:156:2176] Leader for TabletID 72057594037927937 is [38:159:2177] sender: [38:160:2057] recipient: [38:156:2176] !Reboot 72057594037927937 (actor [38:105:2137]) rebooted! !Reboot 72057594037927937 (actor [38:105:2137]) tablet resolver refreshed! new actor is[38:159:2177] Leader for TabletID 72057594037927937 is [38:159:2177] sender: [38:229:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:106:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:139:2057] recipient: [39:14:2061] >> test.py::test[count-count_all_grouped--Analyze] [GOOD] >> test.py::test[count-count_all_grouped--Debug] >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Analyze] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Plan] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] >> test.py::test[action-eval_each_input_table-default.txt-Results] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-Analyze] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Debug] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--ForceBlocks] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[expr-distinct_from_opt-default.txt-Debug] [GOOD] >> test.py::test[expr-distinct_from_opt-default.txt-ForceBlocks] >> test.py::test[join-left_all--Analyze] [GOOD] >> test.py::test[join-left_all--Debug] >> test.py::test[sampling-take_with_sampling-default.txt-Analyze] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-Debug] >> test.py::test[order_by-assume_with_filter--Debug] [GOOD] >> test.py::test[order_by-assume_with_filter--Plan] [GOOD] >> test.py::test[order_by-assume_with_filter--Results] >> test.py::test[library-library_alias--Debug] [GOOD] >> test.py::test[library-library_alias--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:21:56.012428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:21:56.012446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.012451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:21:56.012454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:21:56.012463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:21:56.012465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:21:56.012470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:21:56.012529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:21:56.024627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:21:56.024648Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.026449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:21:56.026529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:21:56.026550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:21:56.029498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:21:56.029564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:21:56.030945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.031816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.033210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.035149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:21:56.035156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.035163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:21:56.035207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:21:56.036480Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:21:56.053126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:21:56.053183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.053238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:21:56.053281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:21:56.053288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.053972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.053999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:21:56.054036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:21:56.054055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:21:56.054060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:21:56.054547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054570Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:21:56.054576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:21:56.055211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.055238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.055245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.055921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:21:56.056516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:21:56.056561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:21:56.056736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:21:56.056772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.056837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:21:56.056846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:21:56.056889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:21:56.056903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:21:56.057376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:21:56.057387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:21:56.057417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:21:56.057422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:21:56.057492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:21:56.057500Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:21:56.057510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:21:56.057514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.057519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:21:56.057523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:21:56.057528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:21:56.057531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:21:56.057544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:21:56.057548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:21:56.057552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 2024-11-21T09:22:53.387461Z node 233 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:53.387483Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 1000727382120 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:53.387490Z node 233 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T09:22:53.387571Z node 233 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T09:22:53.387581Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T09:22:53.387605Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T09:22:53.387631Z node 233 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[233:417:2380], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T09:22:53.388131Z node 233 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:53.388139Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:53.388182Z node 233 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:53.388188Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [233:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T09:22:53.388291Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:53.388301Z node 233 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2024-11-21T09:22:53.388305Z node 233 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T09:22:53.388417Z node 233 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:53.388430Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:22:53.388434Z node 233 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:22:53.388439Z node 233 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T09:22:53.388444Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 10 2024-11-21T09:22:53.388460Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T09:22:53.389117Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T09:22:53.389131Z node 233 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T09:22:53.389144Z node 233 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T09:22:53.389149Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:53.389155Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T09:22:53.389169Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [233:324:2316] message: TxId: 1003 2024-11-21T09:22:53.389175Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T09:22:53.389181Z node 233 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T09:22:53.389186Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T09:22:53.389227Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T09:22:53.389318Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:22:53.389645Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:22:53.389657Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [233:635:2538] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:22:53.389784Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:53.389827Z node 233 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 51us result status StatusSuccess 2024-11-21T09:22:53.389956Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409548 Coordinators: 72075186233409549 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409547 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:53.390049Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409547 2024-11-21T09:22:53.390072Z node 233 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409547 describe path "/MyRoot/USER_0" took 23us result status StatusSuccess 2024-11-21T09:22:53.390115Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409547 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409548 Coordinators: 72075186233409549 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409547 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186233409547, at schemeshard: 72075186233409547 2024-11-21T09:22:53.390170Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:53.390188Z node 233 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2024-11-21T09:22:53.390241Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[count-count_all_grouped--Debug] [GOOD] >> test.py::test[count-count_all_grouped--ForceBlocks] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Analyze] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Debug] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Analyze] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[action-subquery_orderby2-default.txt-Analyze] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-Debug] >> test.py::test[pg-pg_corr_count-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-ForceBlocks] >> KqpRm::DisonnectNodes [GOOD] |97.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[order_by-assume_with_filter--Results] [GOOD] >> test.py::test[order_by-native_desc_sort-over_sorted-Debug] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Plan] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Results] [SKIPPED] >> test.py::test[order_by-order_by_missing_project_column_join--Debug] >> test.py::test[expr-distinct_from_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-distinct_from_opt-default.txt-Plan] [GOOD] >> test.py::test[expr-distinct_from_opt-default.txt-Results] >> test.py::test[order_by-order_by_missing_project_column_join--Debug] [SKIPPED] >> test.py::test[order_by-order_by_missing_project_column_join--Plan] [SKIPPED] >> test.py::test[order_by-order_by_missing_project_column_join--Results] [SKIPPED] >> test.py::test[order_by-order_by_tablerecord_column--Debug] >> test.py::test[limit-limit_over_sort_desc_in_subquery--ForceBlocks] [GOOD] >> test.py::test[join-left_all--Debug] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Plan] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] >> test.py::test[join-left_all--ForceBlocks] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2024-11-21T09:22:52.965191Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T09:22:52.994537Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T09:22:52.994682Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T09:22:52.995638Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T09:22:53.013648Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:53.015535Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:53.016993Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:53.017398Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T09:22:53.017510Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T09:22:53.017514Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T09:22:53.017535Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T09:22:53.020304Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T09:22:53.020346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T09:22:53.020355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T09:22:53.020410Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:22:53.020420Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T09:22:53.020521Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:22:53.043492Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T09:22:53.043542Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:22:53.054348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T09:22:53.054393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:22:53.054410Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T09:22:53.054423Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:22:53.054449Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T09:22:53.054458Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:22:53.054463Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T09:22:53.054472Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:22:53.065208Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T09:22:53.065249Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T09:22:53.065451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T09:22:53.065459Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T09:22:53.067014Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T09:22:53.068431Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T09:22:53.068905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/jptk/0015fa/r3tmp/tmpj9PHeX/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T09:22:53.069705Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/jptk/0015fa/r3tmp/tmpj9PHeX/pdisk_1.dat 2024-11-21T09:22:53.069717Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/jptk/0015fa/r3tmp/tmpj9PHeX/pdisk_1.dat 2024-11-21T09:22:53.070266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T09:22:53.070359Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T09:22:53.070379Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:22:53.070389Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T09:22:53.070422Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T09:22:53.070456Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T09:22:53.070826Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T09:22:53.070862Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:22:53.081871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T09:22:53.082097Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T09:22:53.083902Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T09:22:53.084039Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/jptk/0015fa/r3tmp/tmpj9PHeX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T09:22:53.084180Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/jptk/0015fa/r3tmp/tmpj9PHeX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/jptk/0015fa/r3tmp/tmpj9PHeX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3150830260591397321 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T09:22:53.085514Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:22:53.085573Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:22:53.085579Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T09:22:53.085605Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T09:22:53.085613Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:22:53.085676Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:22:53.085694Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:22:53.085699Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T09:22:53.085705Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T09:22:53.085716Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T09:22:53.085790Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:22:53.085801Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:22:53.085805Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:22:53.085830Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T09:22:53.085879Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T09:22:53.085885Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T09:22:53.085889Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:22:53.085895Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T09:22:53.086511Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T09:22:53.086520Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T09:22:53.086545Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T09:22:53.086548Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T09:22:53.086727Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T09:22:53.086755Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:22:53.086762Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:22:53.086765Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:22:53.086875Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T09:22:53.086916Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:22:53.086921Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:22:53.086924Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:22:53.106939Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T09:22:53.106964Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T09:22:53.106968Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T09:22:53.106973Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T09:22:53.130773Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:22:53.131542Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2024-11-21T09:22:53.131700Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T09:22:53.132165Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T09:22:53.164818Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2024-11-21T09:22:53.216499Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T09:22:53.258510Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2024-11-21T09:22:53.513855Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T09:22:53.514456Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T09:22:53.514522Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T09:22:54.048141Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2024-11-21T09:22:54.048254Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2024-11-21T09:22:54.048414Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2024-11-21T09:22:54.048554Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:54:2071] ServerId# [1:348:2268] TabletId# 72057594037932033 PipeClientId# [2:54:2071] 2024-11-21T09:22:54.048592Z node 2 :TX_PROXY WARN: actor# [2:142:2085] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2024-11-21T09:22:54.048608Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientDestroyed {TabletId=72057594046578946 ClientId=[2:371:2096] 2024-11-21T09:22:54.048613Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar HandlePipeDestroyed - DISCONNECTED 2024-11-21T09:22:54.048620Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T09:22:54.048635Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:508:2096] 2024-11-21T09:22:54.049205Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:508:2096] 2024-11-21T09:22:54.049247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T09:22:54.049317Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T09:22:54.049337Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T09:22:54.049346Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T09:22:54.049348Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T09:22:54.060235Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] >> test.py::test[count-count_all_grouped--ForceBlocks] [GOOD] >> test.py::test[library-library_alias--ForceBlocks] [GOOD] >> SplitterBasic::LimitExceed [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-Debug] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Analyze] [GOOD] >> Secret::Deactivated >> EscapingBasics::HideSecretsShouldWork [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Debug] >> test.py::test[action-subquery_orderby2-default.txt-ForceBlocks] >> test.py::test[count-count_all_grouped--Plan] [GOOD] >> test.py::test[library-library_alias--Plan] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-ForceBlocks] >> test.py::test[expr-distinct_from_opt-default.txt-Results] [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> test.py::test[count-count_all_grouped--Results] >> test.py::test[library-library_alias--Results] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> test.py::test[library-library_alias--Results] [GOOD] >> test.py::test[expr-expr_yql_function-default.txt-Analyze] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> test.py::test[limit-insert_with_limit--Analyze] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] [GOOD] >> test.py::test[join-left_all--ForceBlocks] [GOOD] >> test.py::test[join-left_all--Plan] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> test.py::test[lineage-flatten_by--Analyze] >> test.py::test[join-left_all--Plan] [GOOD] >> test.py::test[join-left_all--Results] >> test.py::test[count-count_all_grouped--Results] [GOOD] >> test.py::test[csee-expr-default.txt-Analyze] >> KqpErrors::ProposeResultLost_RwTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:155:2058] recipient: [1:153:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:155:2058] recipient: [1:153:2135] Leader for TabletID 9437184 is [1:161:2139] sender: [1:162:2058] recipient: [1:153:2135] Leader for TabletID 9437185 is [0:0:0] sender: [2:165:2049] recipient: [2:156:2093] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:165:2049] recipient: [2:156:2093] Leader for TabletID 9437185 is [2:177:2096] sender: [2:180:2049] recipient: [2:156:2093] Leader for TabletID 9437184 is [1:161:2139] sender: [1:205:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [1:207:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [2:209:2049] recipient: [2:41:2053] Leader for TabletID 9437185 is [2:177:2096] sender: [2:210:2049] recipient: [2:150:2092] Leader for TabletID 9437185 is [2:177:2096] sender: [1:213:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [2:215:2049] recipient: [2:41:2053] Leader for TabletID 9437185 is [2:177:2096] sender: [2:216:2049] recipient: [2:214:2109] Leader for TabletID 9437185 is [2:217:2110] sender: [2:218:2049] recipient: [2:214:2109] Leader for TabletID 9437185 is [2:217:2110] sender: [1:247:2058] recipient: [1:15:2062] >> test.py::test[expr-expr_yql_function-default.txt-Analyze] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Debug] [GOOD] >> EntityId::Distinct [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column--Debug] [GOOD] >> test.py::test[limit-insert_with_limit--Analyze] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-Results] >> test.py::test[order_by-order_by_tablerecord_column--Plan] [GOOD] >> test.py::test[expr-expr_yql_function-default.txt-Debug] >> test.py::test[limit-insert_with_limit--Debug] >> EntityId::MaxId [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-Results] >> test.py::test[optimizers-test_fuse_map_take-default.txt-ForceBlocks] >> test.py::test[action-subquery_orderby2-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_by--Analyze] [GOOD] >> test.py::test[csee-expr-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Debug] [GOOD] >> test.py::test[insert-insert_null-default.txt-Analyze] >> test.py::test[order_by-order_by_tablerecord_column--Results] >> test.py::test[sampling-take_with_sampling-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-Analyze] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--ForceBlocks] >> YdbOlapStore::BulkUpsert [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-Results] >> test.py::test[lineage-flatten_by--Debug] >> test.py::test[csee-expr-default.txt-Debug] >> YdbOlapStore::DuplicateRows >> Cache::Test3 [GOOD] >> Cache::Test4 [GOOD] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] |97.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[expr-expr_yql_function-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_yql_function-default.txt-ForceBlocks] >> test.py::test[produce-process_with_lambda_outstream-default.txt-ForceBlocks] >> test.py::test[action-discard-default.txt-Analyze] >> test.py::test[join-left_all--Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-Analyze] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[sampling-take_with_sampling-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column--Results] [GOOD] >> test.py::test[schema-def_values--Analyze] >> test.py::test[params-param_type_mismatch_fail--Debug] [SKIPPED] >> test.py::test[limit-insert_with_limit--Debug] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-ForceBlocks] [GOOD] >> test.py::test[params-param_type_mismatch_fail--Plan] [SKIPPED] >> test.py::test[pg-pg_corr_count-default.txt-Results] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Plan] [GOOD] >> test.py::test[limit-insert_with_limit--ForceBlocks] >> test.py::test[params-param_type_mismatch_fail--Results] >> test.py::test[pg-pg_types_array_literal-default.txt-Analyze] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] >> test.py::test[insert-insert_null-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-Debug] >> TFileStoreWithReboots::CreateAlterNoVersion [GOOD] >> test.py::test[insert-insert_null-default.txt-Debug] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> Cache::Test4 [GOOD] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[csee-expr-default.txt-Debug] [GOOD] >> test.py::test[csee-expr-default.txt-ForceBlocks] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2024-11-21T09:22:26.700500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660516847605829:2096];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:26.700564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004123/r3tmp/tmpwxPUVY/pdisk_1.dat 2024-11-21T09:22:26.773652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3943, node 1 2024-11-21T09:22:26.792120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:26.792132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:26.792134Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:26.792172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:26.800111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:26.800139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:26.801647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:26.846625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.847706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.847723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.850423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:26.850475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:26.850484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:26.851239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:26.851252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:26.851310Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:26.851702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.852751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180946902, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:26.852761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:26.852832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:26.853222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.853274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.853289Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:26.853306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:26.853314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:26.853334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:26.853720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:26.853739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:26.853743Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:26.853756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:26.876672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:48566" , at schemeshard: 72057594046644480 2024-11-21T09:22:26.876764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.876943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T09:22:26.876963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T09:22:26.876985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T09:22:26.876996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T09:22:26.877009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T09:22:26.877017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T09:22:26.877074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T09:22:26.877331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:26.877380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:26.877391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.877408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T09:22:26.877418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T09:22:26.877946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T09:22:26.877973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2024-11-21T09:22:26.878028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:26.878036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T09:22:26.878071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T09:22:26.878090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:26.878098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439660516847606352:2373], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2024-11-21T09:22:26.878109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439660516847606352:2373], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2024-11-21T09:22:26.878123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:26.878129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 2024-11-21T09:22:26.878240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { ... RD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, message: Origin: 72075186224037888 TxId: 281474976715659 2024-11-21T09:22:45.940585Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037888, partId: 0 2024-11-21T09:22:45.940596Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037888 TxId: 281474976715659 2024-11-21T09:22:45.940637Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.940644Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037889, partId: 0 2024-11-21T09:22:45.940655Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.940666Z node 67 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.940681Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, message: Origin: 72075186224037889 TxId: 281474976715659 2024-11-21T09:22:45.940689Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037889, partId: 0 2024-11-21T09:22:45.940697Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037889 TxId: 281474976715659 2024-11-21T09:22:45.940901Z node 67 :TX_COLUMNSHARD DEBUG: TxPlanStep[4] complete at tablet 72075186224037890 2024-11-21T09:22:45.940941Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:22:45.940957Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037890 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.940967Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037890, partId: 0 2024-11-21T09:22:45.940980Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037890 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.940989Z node 67 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037890 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.941015Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, message: Origin: 72075186224037890 TxId: 281474976715659 2024-11-21T09:22:45.941022Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037890, partId: 0 2024-11-21T09:22:45.941031Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037890 TxId: 281474976715659 2024-11-21T09:22:45.941051Z node 67 :TX_COLUMNSHARD DEBUG: TxPlanStep[4] complete at tablet 72075186224037891 2024-11-21T09:22:45.941072Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:739;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T09:22:45.941077Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037891 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.941079Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037891, partId: 0 2024-11-21T09:22:45.941088Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037891 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.941093Z node 67 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037891 TxId: 281474976715659 MinStep: 0 Step: 1732180965984 2024-11-21T09:22:45.941111Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, message: Origin: 72075186224037891 TxId: 281474976715659 2024-11-21T09:22:45.941118Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037891, partId: 0 2024-11-21T09:22:45.941124Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037891 TxId: 281474976715659 2024-11-21T09:22:45.941198Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941254Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941325Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941348Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941652Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941670Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941677Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941683Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941691Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.941703Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T09:22:45.941719Z node 67 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T09:22:45.941727Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715659 ready parts: 1/1 2024-11-21T09:22:45.941731Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715659, ready parts: 1/1, is published: true 2024-11-21T09:22:45.941743Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [67:7439660596939796217:2311] message: TxId: 281474976715659 2024-11-21T09:22:45.941753Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715659 ready parts: 1/1 2024-11-21T09:22:45.941757Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:22:45.941767Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715659:0 2024-11-21T09:22:45.941808Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2024-11-21T09:22:45.945336Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/LogStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "log2" TtlSettings { Enabled { ColumnName: "saved_at" ExpireAfterSeconds: 2000 } } SchemaPresetName: "default" ColumnShardCount: 4 Sharding { HashSharding { Function: HASH_FUNCTION_CLOUD_LOGS Columns: "timestamp" Columns: "uid" } } } } TxId: 281474976715660 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:57030" , at schemeshard: 72057594046644480 2024-11-21T09:22:45.945402Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /Root/LogStore/log2, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T09:22:45.945442Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusSchemeError, reason: Haven't MAX-index for TTL column and TTL column is not first column in primary key, at schemeshard: 72057594046644480 2024-11-21T09:22:45.945985Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusSchemeError Reason: "Haven\'t MAX-index for TTL column and TTL column is not first column in primary key" TxId: 281474976715660 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T09:22:45.946020Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusSchemeError, reason: Haven't MAX-index for TTL column and TTL column is not first column in primary key, operation: CREATE COLUMN TABLE, path: /Root/LogStore/ assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:458, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (SCHEME_ERROR != SUCCESS)
: Error: Haven't MAX-index for TTL column and TTL column is not first column in primary key , with diff: S(|UC)C(H|)E(ME_ERROR|SS) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x15A32CB9) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+10200 (0x1576A4C8) NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15779437) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x15A34C6E) NTestSuiteYdbLogStore::TCurrentTest::Execute()+429 (0x15778DFD) NUnitTest::TTestFactory::Execute()+803 (0x15A353E3) NUnitTest::RunMain(int, char**)+3005 (0x15A485FD) ??+0 (0x7FAE99F0FD90) __libc_start_main+128 (0x7FAE99F0FE40) _start+41 (0x1477E029) >> test.py::test[expr-expr_yql_function-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-def_values--Analyze] [GOOD] >> test.py::test[expr-expr_yql_function-default.txt-Plan] [GOOD] >> test.py::test[action-discard-default.txt-Analyze] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-Analyze] [GOOD] >> test.py::test[params-param_type_mismatch_fail--Results] [GOOD] >> test.py::test[lineage-flatten_by--Debug] [GOOD] >> test.py::test[expr-expr_yql_function-default.txt-Results] >> test.py::test[schema-def_values--Debug] >> LabeledDbCounters::OneTablet [GOOD] >> test.py::test[action-discard-default.txt-Debug] >> test.py::test[lineage-flatten_by--ForceBlocks] >> test.py::test[join-lookupjoin_not_selected-off-Debug] >> test.py::test[params-tuple--Debug] >> LabeledDbCounters::OneTabletRemoveCounters >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Analyze] >> test.py::test[expr-expr_yql_function-default.txt-Results] [GOOD] >> test.py::test[expr-fallback_filternullelements-default.txt-Analyze] >> test.py::test[pg-pg_types_array_literal-default.txt-Analyze] [GOOD] >> test.py::test[pg-pg_types_array_literal-default.txt-Debug] >> test.py::test[agg_apply-opt_len_count-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-ForceBlocks] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlterNoVersion [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:40.241206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:40.241228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:40.241233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:40.241237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:40.241241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:40.241243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:40.241250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:40.241307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.250804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:40.250828Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:40.252934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.253027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:40.253071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:40.255496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:40.255552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:40.255646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:40.255833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:40.256327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:40.256550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:40.256556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:40.256566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:40.256570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:40.256574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:40.256605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:40.257693Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:40.269543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:40.269616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:40.269677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:40.269716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:40.269722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:40.270421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:40.270445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:40.270496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:40.270503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:40.270506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:40.270510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:40.270832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:40.270840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:40.270843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:40.271275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:40.271292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:40.271298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:40.271305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:40.271770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:40.272130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:40.272171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:40.272345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:40.272368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:40.272384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:40.272423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:40.272427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:40.272453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:40.272463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:40.272797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:40.272804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:40.272836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:40.272840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:40.272925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:40.272930Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:40.272939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:40.272941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:40.272946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:40.272950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:40.272953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:40.272956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:40.272963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:40.272967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:40.272970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72057594037968897 2024-11-21T09:22:57.727946Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T09:22:57.728037Z node 72 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-2" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T09:22:57.728112Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:22:57.728119Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T09:22:57.728134Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:22:57.728139Z node 72 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T09:22:57.728145Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:22:57.728160Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 2024-11-21T09:22:57.728257Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:22:57.728557Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:22:57.728589Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:22:57.728595Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:57.728929Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T09:22:57.728952Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72075186233409546 2024-11-21T09:22:57.728986Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:22:57.729008Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxId: 1002 Origin: 72075186233409546 Status: OK 2024-11-21T09:22:57.729015Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T09:22:57.729020Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 3 -> 128 2024-11-21T09:22:57.729356Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:22:57.729385Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:22:57.729394Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:57.729400Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T09:22:57.729424Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:57.729808Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T09:22:57.729830Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2024-11-21T09:22:57.729887Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:57.729907Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 309237647465 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:57.729915Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T09:22:57.729940Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:22:57.729945Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:22:57.729953Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:57.729960Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T09:22:57.729966Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:22:57.729970Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:22:57.729973Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:22:57.729992Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:22:57.729997Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 1, subscribers: 0 2024-11-21T09:22:57.730001Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T09:22:57.730362Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:57.730375Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:22:57.730396Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:57.730400Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:22:57.730470Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:22:57.730483Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:22:57.730488Z node 72 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:22:57.730492Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:22:57.730497Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:22:57.730509Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T09:22:57.730798Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T09:22:57.730848Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T09:22:57.730855Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T09:22:57.730908Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:22:57.730921Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:22:57.730925Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:397:2378] TestWaitNotification: OK eventTxId 1002 2024-11-21T09:22:57.730985Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:22:57.731007Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_2" took 30us result status StatusSuccess 2024-11-21T09:22:57.731068Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_2" PathDescription { Self { Name: "FS_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "FS_2" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 2 FolderId: "bar" CloudId: "baz" BlockSize: 4096 } Version: 2 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SystemView::PartitionStatsFields [GOOD] >> test.py::test[insert-insert_null-default.txt-Debug] [GOOD] >> test.py::test[insert-insert_null-default.txt-ForceBlocks] >> test.py::test[csee-expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-expr-default.txt-Plan] [GOOD] >> test.py::test[csee-expr-default.txt-Results] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> test.py::test[limit-insert_with_limit--ForceBlocks] [GOOD] >> test.py::test[limit-insert_with_limit--Plan] [GOOD] >> test.py::test[limit-insert_with_limit--Results] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor |97.5%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[schema-def_values--Debug] [GOOD] >> test.py::test[schema-def_values--ForceBlocks] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-Debug] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-ForceBlocks] >> test.py::test[params-tuple--Debug] [GOOD] >> test.py::test[params-tuple--Plan] [GOOD] >> test.py::test[params-tuple--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::PartitionStatsFields [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0034fc/r3tmp/tmpSJiWAg/pdisk_1.dat 2024-11-21T09:21:36.369052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:21:36.408496Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65517, node 1 2024-11-21T09:21:36.438802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:21:36.438815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:21:36.438825Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:21:36.438860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:21:36.458819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.458846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.528502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T09:21:36.544782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.553007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.564098Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660302978209592:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.564196Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:36.564415Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439660302707404142:2195];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.565608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.566585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.566601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.567173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.567187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.568402Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T09:21:36.568414Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T09:21:36.568704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:36.568769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:36.572981Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:36.633699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.638475Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439660302568629792:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.638634Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:36.640297Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439660299659302512:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:21:36.640437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:21:36.642014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.642034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.642568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:21:36.642583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:21:36.646782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:21:36.644018Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T09:21:36.660261Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:21:36.660414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:36.660511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:21:36.723412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T09:21:36.805674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660299409797406:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.805701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660299409797414:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.805709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:21:36.806394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2024-11-21T09:21:36.810473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439660299409797420:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2024-11-21T09:21:36.941666Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70dv14640b3782m3kpw3ap, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThhMzhjZTUtN2M5MjhiZDItNTlhYTdjNWYtYzFiZDllM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:36.946588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.028058Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70dv7r1s4zwy8ntc77krsm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThhMzhjZTUtN2M5MjhiZDItNTlhYTdjNWYtYzFiZDllM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:37.033077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T09:21:37.117257Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70dvag5qe9qe966hnb5yzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThhMzhjZTUtN2M5MjhiZDItNTlhYTdjNWYtYzFiZDllM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:37.135881Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439660303704765045:2337] TxId: 281474976715671. Ctx: { TraceId: 01jd70dvb49mg3cyqcrwy249ba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I0ZDYzY2EtNTAyYjM5ZmMtZGU5ZjkxODktYjQzZTk0NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T09:21:37.135960Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd70dvb49mg3cyqcrwy249ba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I0ZDYzY2EtNTAyYjM5ZmMtZGU5ZjkxODktYjQzZTk0NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:21:37.136766Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439660303704765052:2342], owner: [1:7439660303704765048:2340], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:21:37.136930Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439660303704765052:2342], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:21:37.137045Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439660303704765052:2342], row count: 1, finished: 1 2024-11-21T09:21:37.137064Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439660303704765052:2342], owner: [1:7439660303704765048:2340 ... T09:22:52.475623Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:52.480097Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T09:22:52.624178Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439660625585880202:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:52.624199Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:52.624224Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439660625585880213:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:52.624982Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T09:22:52.629156Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439660625585880216:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:22:52.716555Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70g52f1n8vh9ftj8v21y4b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=OWVjYTIzZWUtZjg1MTMwNjUtMzFiMTVlMmEtNDViMTIxMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:52.729509Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd70g55f1xk7k1bmgvv7gqxt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MjQwZTE3NmEtZDYxZTFhNTEtYzEzYmM1YjEtOGM5YWEyNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:52.730084Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439660625585880368:2322], owner: [16:7439660625585880364:2320], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:52.730222Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439660625585880368:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:22:52.730303Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439660625585880368:2322], row count: 1, finished: 1 2024-11-21T09:22:52.730311Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439660625585880368:2322], owner: [16:7439660625585880364:2320], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:52.730983Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180972729, txId: 281474976715662] shutting down 2024-11-21T09:22:53.744260Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd70g6540rv1ybstw8e21878, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZGE2MDhmYzAtNmQzYzk3NDUtN2E4NGY4My01Mzc2ZDFlOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:53.744788Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439660629880847738:2333], owner: [16:7439660629880847734:2331], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:53.744978Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439660629880847738:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:22:53.745078Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439660629880847738:2333], row count: 1, finished: 1 2024-11-21T09:22:53.745090Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439660629880847738:2333], owner: [16:7439660629880847734:2331], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:53.745645Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180973743, txId: 281474976715664] shutting down 2024-11-21T09:22:54.757673Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd70g74tbfbdsbt9ewn0t0qd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZDI0MWJiMGEtYjZiY2VmNzMtMzdhYzIyOTQtZmU2ZmQxNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:54.758325Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439660634175815079:2344], owner: [16:7439660634175815075:2342], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:54.758489Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439660634175815079:2344], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:22:54.758595Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439660634175815079:2344], row count: 1, finished: 1 2024-11-21T09:22:54.758617Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439660634175815079:2344], owner: [16:7439660634175815075:2342], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:54.759269Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180974757, txId: 281474976715666] shutting down 2024-11-21T09:22:55.770517Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd70g84g6gf16fd00wwrpx8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MTcyNmI1OTQtM2Y1MzZiYzYtZmMxYWRiYTYtZTQ2MTYwNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:55.771080Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439660638470782420:2355], owner: [16:7439660638470782416:2353], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:55.771267Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439660638470782420:2355], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:22:55.771359Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439660638470782420:2355], row count: 1, finished: 1 2024-11-21T09:22:55.771375Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439660638470782420:2355], owner: [16:7439660638470782416:2353], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:55.771956Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180975769, txId: 281474976715668] shutting down 2024-11-21T09:22:56.782642Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd70g945anpwa97ghy38jwjy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MWNiMzVlNTItZmFkZWM4NTctYTcyNGVhMjUtZmU0ODdlMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:56.783148Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439660642765749761:2366], owner: [16:7439660642765749757:2364], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:56.783320Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439660642765749761:2366], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:22:56.783495Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439660642765749761:2366], row count: 1, finished: 1 2024-11-21T09:22:56.783507Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439660642765749761:2366], owner: [16:7439660642765749757:2364], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:56.784020Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180976782, txId: 281474976715670] shutting down 2024-11-21T09:22:57.371605Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7439660625585879304:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:57.371653Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:22:57.796366Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd70ga3s6a9khn22t3ae0xkf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=OTM3OWI2OWQtMzU3MjMxODYtNzA2ZTY4NGEtMmQyYmI2NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:57.796824Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439660647060717112:2380], owner: [16:7439660647060717108:2378], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:57.796970Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439660647060717112:2380], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:22:57.797059Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439660647060717112:2380], row count: 1, finished: 1 2024-11-21T09:22:57.797078Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439660647060717112:2380], owner: [16:7439660647060717108:2378], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:57.797612Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180977795, txId: 281474976715672] shutting down 2024-11-21T09:22:57.813384Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd70ga466xjnjh7bkr0yhz84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=Y2U1OGRjYWEtNWU1NjBlYTItNjVjODQzZTAtODkzZTY0N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:57.814040Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439660647060717144:2389], owner: [16:7439660647060717140:2387], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:57.814234Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439660647060717144:2389], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T09:22:57.814319Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439660647060717144:2389], row count: 1, finished: 1 2024-11-21T09:22:57.814337Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439660647060717144:2389], owner: [16:7439660647060717140:2387], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T09:22:57.815157Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732180977812, txId: 281474976715674] shutting down >> test.py::test[aggregate-group_compact_sorted_with_diff_order--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Plan] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_not_selected-off-Plan] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-Results] [GOOD] >> test.py::test[join-mergejoin_force_align3-off-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2024-11-21T09:22:30.597492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660531151869190:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:30.597506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004107/r3tmp/tmpqYN9xY/pdisk_1.dat 2024-11-21T09:22:30.649237Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29726, node 1 2024-11-21T09:22:30.673096Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:30.673112Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:30.673114Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:30.673153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T09:22:30.697676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:30.697714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:30.699098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:30.727155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.728118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:30.728133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.728605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:30.728656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:30.728664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:30.729095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:30.729106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:30.729176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:30.729434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:30.730200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180950773, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:30.730210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:30.730295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:30.730762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:30.730805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:30.730820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:30.730834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:30.730845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:30.730856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:30.731242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:30.731262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:30.731265Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:30.731278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:30.746645Z node 1 :TICKET_PARSER DEBUG: Ticket 5C648993FDA29C26831C11745378E453F3A211EF (ipv6:[::1]:49460) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2024-11-21T09:22:30.760126Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:49470) has now valid token of root@builtin 2024-11-21T09:22:30.770733Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T09:22:31.444537Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660536916124399:2208];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:31.444565Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004107/r3tmp/tmpZbPgEK/pdisk_1.dat 2024-11-21T09:22:31.458861Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30624, node 4 2024-11-21T09:22:31.478524Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:31.478537Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:31.478539Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:31.478582Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:31.544483Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:31.544526Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:31.546089Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:31.548170Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.548298Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:31.548310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.548795Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:31.548842Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:31.548851Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:31.549304Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:31.549317Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:31.551718Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:31.552406Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:31.552762Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180951599, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:31.552777Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:31.552832Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:31.553462Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:31.553499Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:31.553532Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:31.553549Z node 4 :FLAT_TX_SC ... ion { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:37.682388Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:37.682431Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:37.683908Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:37.685166Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:37.685268Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:37.685283Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:37.685752Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:37.685806Z node 25 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:37.685808Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:37.686206Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:37.686221Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:37.686363Z node 25 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:37.686620Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:37.687545Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180957731, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:37.687561Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:37.687619Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:37.688035Z node 25 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:37.688094Z node 25 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:37.688113Z node 25 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:37.688135Z node 25 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:37.688149Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:37.688167Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:37.688398Z node 25 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:37.688413Z node 25 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:37.688417Z node 25 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:37.688429Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:42.582037Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7439660564470357828:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:42.582075Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:22:47.712503Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:36962) has now valid token of root@builtin 2024-11-21T09:22:47.721825Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T09:22:48.281792Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7439660608595270691:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:48.281972Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/004107/r3tmp/tmp9xYZcU/pdisk_1.dat 2024-11-21T09:22:48.298015Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16032, node 28 2024-11-21T09:22:48.318205Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:48.318219Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:48.318221Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:48.318278Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:48.382131Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:48.382178Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:48.383724Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:48.385217Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.385325Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:48.385338Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.385805Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:48.385842Z node 28 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:48.385845Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:48.386258Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:48.386271Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:48.386405Z node 28 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:48.386621Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:48.387495Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180968434, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:48.387514Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:48.387587Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:48.388034Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:48.388081Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:48.388091Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:48.388110Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:48.388123Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:48.388135Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:48.388327Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:48.388349Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:48.388353Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:48.388367Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:53.282346Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7439660608595270691:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:53.282388Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T09:22:58.414523Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54212) has now valid token of root@builtin 2024-11-21T09:22:58.424600Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] >> test.py::test[csee-expr-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-Analyze] >> test.py::test[pg-pg_types_array_literal-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_types_array_literal-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Analyze] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Debug] >> test.py::test[action-discard-default.txt-Debug] [GOOD] >> test.py::test[action-discard-default.txt-ForceBlocks] >> test.py::test[expr-fallback_filternullelements-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:115:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [1:117:2145] sender: [1:119:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:154:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:158:2057] recipient: [1:99:2134] Leader for TabletID 9437185 is [1:117:2145] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:163:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:165:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:193:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:196:2057] recipient: [1:98:2133] Leader for TabletID 9437184 is [1:114:2143] sender: [1:199:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:200:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:202:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:230:2057] recipient: [1:14:2061] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[lineage-flatten_by--ForceBlocks] [GOOD] >> test.py::test[lineage-flatten_by--Plan] [GOOD] >> test.py::test[lineage-flatten_by--Results] >> test.py::test[expr-fallback_filternullelements-default.txt-Debug] >> test.py::test[insert-insert_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[params-tuple--Results] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Debug] >> test.py::test[limit-insert_with_limit--Results] [GOOD] >> test.py::test[lineage-error_type--Analyze] [SKIPPED] >> test.py::test[lineage-error_type--Debug] [SKIPPED] >> test.py::test[lineage-error_type--ForceBlocks] [SKIPPED] >> test.py::test[lineage-error_type--Plan] [SKIPPED] >> test.py::test[lineage-error_type--Results] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Debug] [SKIPPED] >> test.py::test[insert-insert_null-default.txt-Plan] [GOOD] >> test.py::test[insert-insert_null-default.txt-Results] >> test.py::test[lineage-select_nested_table_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-coalesce_propagate-default.txt-Analyze] >> KqpErrors::ProposeResultLost_RwTx [GOOD] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[produce-process_with_lambda_outstream-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Plan] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] >> test.py::test[pg-select_unionall_scalar-default.txt-Plan] >> test.py::test[schema-def_values--ForceBlocks] [GOOD] >> test.py::test[schema-def_values--Plan] [GOOD] >> test.py::test[schema-def_values--Results] >> test.py::test[agg_apply-opt_len_count-default.txt-Results] [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Analyze] >> test.py::test[join-mergejoin_force_align3-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_force_align3-off-Debug] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx [GOOD] Test command err: 2024-11-21T09:22:57.125432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T09:22:57.134282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T09:22:57.134422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:57.134476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:57.134992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T09:22:57.135011Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/001f90/r3tmp/tmpoWRLdh/pdisk_1.dat 2024-11-21T09:22:57.270746Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:57.363332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:57.452987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:57.453030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:57.454458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:57.454485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:57.465943Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T09:22:57.466104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:57.466207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:57.821949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:58.466503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1515:2919], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:58.466535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1526:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:58.466608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:58.468380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T09:22:59.049139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1529:2927], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:59.360958Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T09:22:59.360986Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T09:22:59.362028Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T09:22:59.362049Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T09:22:59.362733Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T09:22:59.371877Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T09:22:59.375716Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.418486s, cancelAfter: (empty maybe) 2024-11-21T09:22:59.375743Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2024-11-21T09:22:59.375754Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T09:22:59.375762Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T09:22:59.375772Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T09:22:59.375926Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2024-11-21T09:22:59.376148Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1704:2917] TxId: 0. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T09:22:59.376172Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2024-11-21T09:22:59.376187Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T09:22:59.376240Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T09:22:59.376313Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2024-11-21T09:22:59.376355Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T09:22:59.376380Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T09:22:59.376596Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2024-11-21T09:22:59.376615Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T09:22:59.377274Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:59.377305Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jd70gas18b1brcetq73y8gkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhNTcwYzMtYjU5NDNlZDktMTA2MDI4NzYtMzAxYzAzZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T09:22:5 ... : { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T09:22:59.413639Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Preparing 2024-11-21T09:22:59.413642Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037889 not finished yet: Preparing 2024-11-21T09:22:59.413646Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037890 not finished yet: Preparing 2024-11-21T09:22:59.413649Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037891 not finished yet: Preparing 2024-11-21T09:22:59.413658Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 4 datashard(s): DS 72075186224037888 (Preparing), DS 72075186224037889 (Preparing), DS 72075186224037890 (Preparing), DS 72075186224037891 (Preparing), 2024-11-21T09:22:59.413664Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, not immediate tx, become PrepareState 2024-11-21T09:22:59.414632Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shard 72075186224037889 propose error, notDelivered: 0, notPrepared: 0, wasRestart: 0 2024-11-21T09:22:59.414646Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037888 2024-11-21T09:22:59.414653Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037889 2024-11-21T09:22:59.414660Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037890 2024-11-21T09:22:59.414665Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037891 2024-11-21T09:22:59.416109Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2024-11-21T09:22:59.416125Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 2, does not have the CA id yet or is already complete 2024-11-21T09:22:59.416130Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 3, does not have the CA id yet or is already complete 2024-11-21T09:22:59.416134Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 4, does not have the CA id yet or is already complete 2024-11-21T09:22:59.417199Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: UNDETERMINED Issues { message: "State of operation is unknown." issue_code: 2026 severity: 1 issues { message: "Tx state unknown for shard 72075186224037889, txid 281474976715661" issue_code: 200506 severity: 1 } } Result { Stats { } } , to ActorId: [1:1713:3033] 2024-11-21T09:22:59.417242Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2024-11-21T09:22:59.417270Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:59.417276Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T09:22:59.417731Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, ActorId: [1:1713:3033], ActorState: ExecuteState, TraceId: 01jd70gbp7973t932z5nh260jn, Create QueryResponse for error on request, msg: 2024-11-21T09:22:59.418169Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1736:3033] TxId: 0. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T09:22:59.418287Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T09:22:59.418379Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Resolved key sets: 0 2024-11-21T09:22:59.418421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:22:59.418428Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T09:22:59.418437Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T09:22:59.418441Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T09:22:59.418457Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T09:22:59.418463Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T09:22:59.418472Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd70gbp7973t932z5nh260jn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJlYTRiYzMtNzg0YzFjZmUtMTIwMTFhMjUtZGI5M2YyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> test.py::test[distinct-distinct_list_after_group-default.txt-Analyze] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-Debug] >> test.py::test[pg-pg_types_array_literal-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-pg_types_array_literal-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_types_array_literal-default.txt-Results] >> test.py::test[insert-insert_null-default.txt-Results] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-Analyze] >> RunLengthCodec::Random32 >> SemiSortedDeltaAndVarLengthCodec::Random32 >> test.py::test[lineage-flatten_by--Results] [GOOD] >> test.py::test[lineage-grouping_sets--Analyze] >> RunLengthCodec::Random32 [GOOD] >> RunLengthCodec::Random64 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] >> test.py::test[optimizers-coalesce_propagate-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-coalesce_propagate-default.txt-Debug] >> SemiSortedDeltaAndVarLengthCodec::Random32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random64 >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Debug] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--ForceBlocks] |97.5%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> SemiSortedDeltaAndVarLengthCodec::Random64 [GOOD] >> SemiSortedDeltaCodec::BasicTest32 [GOOD] >> SemiSortedDeltaCodec::BasicTest64 [GOOD] >> test.py::test[pg-select_unionall_scalar-default.txt-Plan] [GOOD] >> test.py::test[pg-select_unionall_scalar-default.txt-Results] >> test.py::test[expr-fallback_filternullelements-default.txt-Debug] [GOOD] >> test.py::test[expr-fallback_filternullelements-default.txt-ForceBlocks] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Analyze] >> test.py::test[pg-pg_types_array_literal-default.txt-Results] [GOOD] >> test.py::test[pg-pg_types_orderby--Analyze] >> test.py::test[schema-def_values--Results] [GOOD] >> test.py::test[schema-diffrerent_schemas--Analyze] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] >> test.py::test[action-discard-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-discard-default.txt-Plan] [GOOD] >> test.py::test[action-discard-default.txt-Results] >> Cache::Test5 >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Analyze] [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Debug] >> YdbYqlClient::TestExplicitPartitioning [GOOD] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::BasicTest64 [GOOD] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[insert-insert_relabeled-default.txt-Analyze] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-Debug] >> test.py::test[pg-aggregate_minus_zero--Debug] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Plan] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Results] >> test.py::test[pg-select_unionall_scalar-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_partition_sort-default.txt-Analyze] >> test.py::test[lineage-grouping_sets--Analyze] [GOOD] >> test.py::test[lineage-grouping_sets--Debug] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2024-11-21T09:22:32.033276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660541188009898:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:32.033575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040fe/r3tmp/tmpufRPVG/pdisk_1.dat 2024-11-21T09:22:32.078575Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18610, node 1 2024-11-21T09:22:32.098315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:32.098327Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:32.098329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:32.098361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:32.133831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:32.133863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:32.135596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:32.157239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.158313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:32.158329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.158839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:32.158891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:32.158900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:32.159198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:32.159205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:32.159422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:32.159514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.160435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180952208, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:32.160470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:32.160577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:32.161051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.161099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.161113Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:32.161125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:32.161138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:32.161159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:32.161621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:32.161645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:32.161649Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:32.161664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:32.344340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/Foo, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.344397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:32.345095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/Foo 2024-11-21T09:22:32.345143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.345198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.345216Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:32.345265Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:32.345350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.345360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.345364Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:32.345420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.345437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.345457Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T09:22:32.346236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180952390, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:32.346250Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180952390, at schemeshard: 72057594046644480 2024-11-21T09:22:32.346272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T09:22:32.346607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.346667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.346681Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T09:22:32.346698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T09:22:32.346711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T09:22:32.346725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T09:22:32.346806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.346819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.346822Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T09:22:32.346851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.346859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.346860Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T09:22:32.346866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:32.348801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660541188010854:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:32.348829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:32.383994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo/Test, opId: 281474976715659:0, at schemeshard: 72057594046644480 2 ... e operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:23:00.409605Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T09:23:00.410001Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.410012Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.410016Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T09:23:00.410056Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.410062Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.410064Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T09:23:00.410080Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.410086Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.410088Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T09:23:00.410103Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.410109Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.410110Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T09:23:00.410127Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.410129Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.410131Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T09:23:00.411174Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180980460, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:23:00.411186Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180980460, at schemeshard: 72057594046644480 2024-11-21T09:23:00.411213Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T09:23:00.411237Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180980460, at schemeshard: 72057594046644480 2024-11-21T09:23:00.411246Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T09:23:00.411261Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732180980460, at schemeshard: 72057594046644480 2024-11-21T09:23:00.411273Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T09:23:00.411283Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732180980460 2024-11-21T09:23:00.411293Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T09:23:00.411736Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:23:00.411816Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:23:00.411831Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T09:23:00.411840Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T09:23:00.411867Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T09:23:00.411877Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T09:23:00.411888Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T09:23:00.411896Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T09:23:00.411905Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T09:23:00.411908Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T09:23:00.411913Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T09:23:00.411919Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T09:23:00.411921Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T09:23:00.411924Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T09:23:00.411928Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T09:23:00.412388Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.412413Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.412419Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T09:23:00.412490Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.412511Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.412513Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:23:00.412528Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.412535Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.412536Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:23:00.412548Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.412549Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.412551Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T09:23:00.412562Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T09:23:00.412570Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T09:23:00.412571Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T09:23:00.412576Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T09:23:00.413070Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439660663144672393:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T09:23:00.513346Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:23:00.513381Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:23:00.514210Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T09:23:00.525811Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd70gcnq1nxssbh9ej52mj0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDE2OTMwNTgtM2QwMzViZjItNTYwZDA0YWQtMjdhNjNjNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T09:23:00.562627Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd70gcshdc73e22fzyewdp7m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDE2OTMwNTgtM2QwMzViZjItNTYwZDA0YWQtMjdhNjNjNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TKeyValueTest::TestConcatToLongKey [GOOD] >> test.py::test[join-mergejoin_force_align3-off-Debug] [GOOD] >> test.py::test[join-mergejoin_force_align3-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align3-off-Plan] [GOOD] >> test.py::test[join-mergejoin_force_align3-off-Results] >> test.py::test[distinct-distinct_list_after_group-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_force_align3-off-Results] [GOOD] >> test.py::test[join-mergejoin_force_per_link-off-Analyze] >> test.py::test[produce-process_with_udf-default.txt-Analyze] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Debug] >> test.py::test[optimizers-coalesce_propagate-default.txt-Debug] [GOOD] >> test.py::test[optimizers-coalesce_propagate-default.txt-ForceBlocks] >> test.py::test[schema-diffrerent_schemas--Analyze] [GOOD] >> test.py::test[schema-diffrerent_schemas--Debug] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Plan] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> test.py::test[expr-fallback_filternullelements-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-fallback_filternullelements-default.txt-Plan] [GOOD] >> test.py::test[expr-fallback_filternullelements-default.txt-Results] >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:146:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:146:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:152:2173] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:156:2057] recipient: [7:152:2173] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:155:2174] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:225:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:159:2057] recipient: [8:157:2178] Leader for TabletID 72057594037927937 is [8:160:2179] sender: [8:161:2057] recipient: [8:157:2178] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:160:2179] Leader for TabletID 72057594037927937 is [8:160:2179] sender: [8:230:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:159:2057] recipient: [9:157:2178] Leader for TabletID 72057594037927937 is [9:160:2179] sender: [9:161:2057] recipient: [9:157:2178] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:160:2179] Leader for TabletID 72057594037927937 is [9:160:2179] sender: [9:230:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:161:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:162:2057] recipient: [10:160:2180] Leader for TabletID 72057594037927937 is [10:163:2181] sender: [10:164:2057] recipient: [10:160:2180] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:163:2181] Leader for TabletID 72057594037927937 is [10:163:2181] sender: [10:233:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:164:2057] recipient: [11:162:2182] Leader for TabletID 72057594037927937 is [11:165:2183] sender: [11:166:2057] recipient: [11:162:2182] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:165:2183] Leader for TabletID 72057594037927937 is [11:165:2183] sender: [11:235:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... sender: [33:173:2057] recipient: [33:172:2189] Leader for TabletID 72057594037927937 is [33:174:2190] sender: [33:175:2057] recipient: [33:172:2189] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:174:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:139:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:139:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:141:2057] recipient: [36:97:2132] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:144:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:145:2057] recipient: [36:143:2166] Leader for TabletID 72057594037927937 is [36:146:2167] sender: [36:147:2057] recipient: [36:143:2166] !Reboot 72057594037927937 (actor [36:105:2137]) rebooted! !Reboot 72057594037927937 (actor [36:105:2137]) tablet resolver refreshed! new actor is[36:146:2167] Leader for TabletID 72057594037927937 is [36:146:2167] sender: [36:216:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:106:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:139:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:141:2057] recipient: [37:97:2132] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:143:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:145:2057] recipient: [37:144:2166] Leader for TabletID 72057594037927937 is [37:146:2167] sender: [37:147:2057] recipient: [37:144:2166] !Reboot 72057594037927937 (actor [37:105:2137]) rebooted! !Reboot 72057594037927937 (actor [37:105:2137]) tablet resolver refreshed! new actor is[37:146:2167] Leader for TabletID 72057594037927937 is [37:146:2167] sender: [37:216:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:106:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:139:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:142:2057] recipient: [38:97:2132] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:145:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:146:2057] recipient: [38:144:2166] Leader for TabletID 72057594037927937 is [38:147:2167] sender: [38:148:2057] recipient: [38:144:2166] !Reboot 72057594037927937 (actor [38:105:2137]) rebooted! !Reboot 72057594037927937 (actor [38:105:2137]) tablet resolver refreshed! new actor is[38:147:2167] Leader for TabletID 72057594037927937 is [38:147:2167] sender: [38:217:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:106:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:139:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:147:2057] recipient: [39:97:2132] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:150:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:151:2057] recipient: [39:149:2171] Leader for TabletID 72057594037927937 is [39:152:2172] sender: [39:153:2057] recipient: [39:149:2171] !Reboot 72057594037927937 (actor [39:105:2137]) rebooted! !Reboot 72057594037927937 (actor [39:105:2137]) tablet resolver refreshed! new actor is[39:152:2172] Leader for TabletID 72057594037927937 is [39:152:2172] sender: [39:222:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:106:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:139:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:147:2057] recipient: [40:97:2132] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:150:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:151:2057] recipient: [40:149:2171] Leader for TabletID 72057594037927937 is [40:152:2172] sender: [40:153:2057] recipient: [40:149:2171] !Reboot 72057594037927937 (actor [40:105:2137]) rebooted! !Reboot 72057594037927937 (actor [40:105:2137]) tablet resolver refreshed! new actor is[40:152:2172] Leader for TabletID 72057594037927937 is [40:152:2172] sender: [40:222:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:101:2057] recipient: [41:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:101:2057] recipient: [41:99:2133] Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:106:2057] recipient: [41:99:2133] Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:139:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:150:2057] recipient: [41:97:2132] Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:153:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:105:2137] sender: [41:154:2057] recipient: [41:152:2173] Leader for TabletID 72057594037927937 is [41:155:2174] sender: [41:156:2057] recipient: [41:152:2173] !Reboot 72057594037927937 (actor [41:105:2137]) rebooted! !Reboot 72057594037927937 (actor [41:105:2137]) tablet resolver refreshed! new actor is[41:155:2174] Leader for TabletID 72057594037927937 is [41:155:2174] sender: [41:225:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:101:2057] recipient: [42:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:101:2057] recipient: [42:99:2133] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:106:2057] recipient: [42:99:2133] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:139:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:155:2057] recipient: [42:97:2132] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:158:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:105:2137] sender: [42:159:2057] recipient: [42:157:2178] Leader for TabletID 72057594037927937 is [42:160:2179] sender: [42:161:2057] recipient: [42:157:2178] !Reboot 72057594037927937 (actor [42:105:2137]) rebooted! !Reboot 72057594037927937 (actor [42:105:2137]) tablet resolver refreshed! new actor is[42:160:2179] Leader for TabletID 72057594037927937 is [42:160:2179] sender: [42:230:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:101:2057] recipient: [43:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:101:2057] recipient: [43:99:2133] Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:106:2057] recipient: [43:99:2133] Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:139:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:155:2057] recipient: [43:97:2132] Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:158:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:105:2137] sender: [43:159:2057] recipient: [43:157:2178] Leader for TabletID 72057594037927937 is [43:160:2179] sender: [43:161:2057] recipient: [43:157:2178] !Reboot 72057594037927937 (actor [43:105:2137]) rebooted! !Reboot 72057594037927937 (actor [43:105:2137]) tablet resolver refreshed! new actor is[43:160:2179] Leader for TabletID 72057594037927937 is [43:160:2179] sender: [43:230:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:101:2057] recipient: [44:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:101:2057] recipient: [44:99:2133] Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:106:2057] recipient: [44:99:2133] Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:139:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:156:2057] recipient: [44:97:2132] Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:159:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:105:2137] sender: [44:160:2057] recipient: [44:158:2178] Leader for TabletID 72057594037927937 is [44:161:2179] sender: [44:162:2057] recipient: [44:158:2178] !Reboot 72057594037927937 (actor [44:105:2137]) rebooted! !Reboot 72057594037927937 (actor [44:105:2137]) tablet resolver refreshed! new actor is[44:161:2179] Leader for TabletID 72057594037927937 is [44:161:2179] sender: [44:231:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:101:2057] recipient: [45:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:101:2057] recipient: [45:99:2133] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:106:2057] recipient: [45:99:2133] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:139:2057] recipient: [45:14:2061] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-Analyze] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-ForceBlocks] >> test.py::test[pg-pg_types_orderby--Analyze] [GOOD] >> test.py::test[pg-pg_types_orderby--Debug] >> test.py::test[action-discard-default.txt-Results] [GOOD] >> test.py::test[action-eval_asatom-default.txt-Analyze] >> test.py::test[pg-select_win_partition_sort-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_win_partition_sort-default.txt-Debug] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2024-11-21T09:22:33.896847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660545353028349:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:33.897115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040eb/r3tmp/tmpw0eRIA/pdisk_1.dat 2024-11-21T09:22:33.961554Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5329, node 1 2024-11-21T09:22:33.984356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:33.984367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:33.984369Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:33.984405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:33.997362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:33.997385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:33.998871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:34.014435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.015388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:34.015405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.015977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:34.016040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:34.016050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:34.016450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:34.016460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:34.016512Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:34.016787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.017546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180954063, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:34.017557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:34.017624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:34.017970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:34.018018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:34.018033Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:34.018048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:34.018061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:34.018074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:34.018478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:34.018497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:34.018501Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:34.018513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:34.038776Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:42704) has now valid token of root@builtin 2024-11-21T09:22:34.049202Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T09:22:34.740749Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660549037856292:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:34.740840Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040eb/r3tmp/tmptVy15B/pdisk_1.dat 2024-11-21T09:22:34.754247Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12511, node 4 2024-11-21T09:22:34.771939Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:34.771950Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:34.771951Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:34.771989Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:34.841146Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:34.841192Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:34.842732Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:34.844219Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.844324Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:34.844334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.844750Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:34.844806Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:34.844826Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:34.845178Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:34.845187Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:34.845317Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:34.845471Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:34.846446Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180954889, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:34.846458Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:34.846519Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:34.847024Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:34.847093Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:34.847110Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:34.847124Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:34.847132Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 2814749 ... 762515]; 2024-11-21T09:22:45.258176Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E1121 09:22:50.379119217 2194115 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.384333543 2194116 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.401400492 2117091 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.405697678 2117095 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.412734046 2194334 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.417288132 2117095 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.423090144 2117095 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.427530615 2117444 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.433823596 2117091 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.439132598 2117091 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.445495366 2117091 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:22:50.450844668 2117445 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2024-11-21T09:22:50.980661Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7439660618724558766:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:50.980999Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040eb/r3tmp/tmpevqXMD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8459, node 28 2024-11-21T09:22:51.018065Z node 28 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T09:22:51.018083Z node 28 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:51.018481Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:51.018490Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:51.018492Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:51.018557Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:51.080789Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:51.080827Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:51.082314Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:51.093812Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:51.093947Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:51.093960Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:51.094415Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:51.094454Z node 28 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:51.094461Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:51.094782Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:51.094792Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:51.095061Z node 28 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:51.095244Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:51.096058Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180971143, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:51.096073Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:51.096142Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:51.096678Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:51.096726Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:51.096740Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:51.096754Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:51.096765Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:51.096774Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:51.096949Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:51.096969Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:51.096972Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:51.096982Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:55.980954Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7439660618724558766:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:55.980992Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E1121 09:23:01.112279425 2198711 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.119502225 2198515 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.127397762 2198516 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.133755353 2198516 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.142019848 2198516 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.147921118 2198711 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.157884697 2198711 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.164439226 2198516 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.173166984 2198710 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.179229290 2198516 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.188257140 2198710 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 09:23:01.194423532 2198711 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> Cache::Test2 [GOOD] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:99:2133] ... blocking block result NO_GROUP for [1:102:2133] ... blocking block result NO_GROUP for [1:100:2133] ... blocking block result NO_GROUP for [1:101:2133] >> test.py::test[pg-aggregate_minus_zero--Results] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using5-default.txt-Debug] >> test.py::test[insert-insert_relabeled-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_force_per_link-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_force_per_link-off-Debug] >> test.py::test[produce-process_with_udf-default.txt-Debug] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_list_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] >> EntityId::Order >> test.py::test[optimizers-coalesce_propagate-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-coalesce_propagate-default.txt-Plan] [GOOD] >> test.py::test[optimizers-coalesce_propagate-default.txt-Results] >> test.py::test[lineage-grouping_sets--Debug] [GOOD] >> test.py::test[lineage-grouping_sets--ForceBlocks] >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> test.py::test[expr-fallback_filternullelements-default.txt-Results] [GOOD] >> test.py::test[expr-to_sorted_dict_list_key-default.txt-Analyze] >> test.py::test[schema-diffrerent_schemas--Debug] [GOOD] >> test.py::test[schema-diffrerent_schemas--ForceBlocks] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Results] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EntityId::MinId [GOOD] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] [GOOD] >> test.py::test[order_by-native_desc_sort-over_sorted-Analyze] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Debug] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Plan] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Results] [SKIPPED] >> test.py::test[pg-join_groups-default.txt-Analyze] >> test.py::test[action-eval_asatom-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_asatom-default.txt-Debug] >> test.py::test[pg-join_using5-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using5-default.txt-Plan] >> test.py::test[pg-join_using5-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using5-default.txt-Results] >> test.py::test[bigdate-input_timestamp64-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-Debug] >> test.py::test[optimizers-coalesce_propagate-default.txt-Results] [GOOD] >> test.py::test[optimizers-constant_fold_minmax-default.txt-Analyze] >> Secret::ValidationQueryService >> test.py::test[pg-select_win_partition_sort-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_partition_sort-default.txt-ForceBlocks] >> TFileStoreWithReboots::CreateWithIntermediateDirs [GOOD] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Results] [GOOD] >> test.py::test[agg_phases-min-default.txt-Analyze] >> test.py::test[insert-insert_relabeled-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-Plan] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-Results] >> test.py::test[expr-to_sorted_dict_list_key-default.txt-Analyze] [GOOD] >> test.py::test[expr-to_sorted_dict_list_key-default.txt-Debug] >> test.py::test[join-mergejoin_force_per_link-off-Debug] [GOOD] >> test.py::test[join-mergejoin_force_per_link-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off-Plan] [GOOD] >> test.py::test[join-mergejoin_force_per_link-off-Results] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort-off-Analyze] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> test.py::test[produce-process_with_udf-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Plan] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Results] >> test.py::test[pg-join_using5-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Debug] >> test.py::test[pg-join_groups-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_asatom-default.txt-Debug] [GOOD] >> test.py::test[action-eval_asatom-default.txt-ForceBlocks] >> test.py::test[schema-diffrerent_schemas--ForceBlocks] [GOOD] >> test.py::test[schema-diffrerent_schemas--Plan] [GOOD] >> test.py::test[schema-diffrerent_schemas--Results] >> test.py::test[pg-join_groups-default.txt-Debug] >> test.py::test[pg-pg_types_orderby--Debug] [GOOD] >> test.py::test[pg-pg_types_orderby--ForceBlocks] [SKIPPED] >> test.py::test[pg-pg_types_orderby--Plan] [GOOD] >> test.py::test[pg-pg_types_orderby--Results] [GOOD] >> test.py::test[pg-select_having-default.txt-Analyze] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:48.474916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:48.474941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:48.474947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:48.474952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:48.474958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:48.474962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:48.474971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:48.475056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:48.487034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:48.487060Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:48.489498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:48.489599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:48.489645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:48.492316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:48.492397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:48.492512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:48.492778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:48.493840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:48.494116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:48.494124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:48.494135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:48.494140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:48.494145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:48.494198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:48.495323Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:48.506105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:48.506181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.506241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:48.506282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:48.506289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.507031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:48.507060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:48.507125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.507135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:48.507140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:48.507144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:48.507669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.507691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:48.507698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:48.508073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.508081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.508085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:48.508091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:48.508652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:48.509028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:48.509082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:48.509219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:48.509241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:48.509253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:48.509294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:48.509299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:48.509322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:48.509331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:48.509638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:48.509645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:48.509700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:48.509706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:48.509776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:48.509781Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:48.509790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:48.509793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:48.509797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:48.509801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:48.509804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:48.509807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:48.509816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:48.509820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:48.509823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... txid 1003:1 2024-11-21T09:23:03.436534Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:23:03.436537Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T09:23:03.436540Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T09:23:03.436544Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T09:23:03.436547Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T09:23:03.436550Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T09:23:03.436565Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T09:23:03.436570Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T09:23:03.436574Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T09:23:03.436577Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T09:23:03.436581Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T09:23:03.436584Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T09:23:03.436587Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 2 2024-11-21T09:23:03.437033Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437052Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437057Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:23:03.437062Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T09:23:03.437067Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:23:03.437417Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437433Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437437Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:23:03.437444Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T09:23:03.437448Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T09:23:03.437540Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437550Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437553Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:23:03.437557Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T09:23:03.437561Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T09:23:03.437628Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437638Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437641Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:23:03.437645Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T09:23:03.437648Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T09:23:03.437716Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437727Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.437730Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T09:23:03.437733Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 2 2024-11-21T09:23:03.437737Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T09:23:03.437745Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T09:23:03.438203Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.438678Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.438703Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.438713Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T09:23:03.438724Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T09:23:03.438782Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T09:23:03.438789Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T09:23:03.438909Z node 61 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:23:03.438932Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:23:03.438937Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [61:388:2369] TestWaitNotification: OK eventTxId 1003 2024-11-21T09:23:03.439007Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:03.439048Z node 61 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 50us result status StatusSuccess 2024-11-21T09:23:03.439131Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "z" PathId: 6 IndexTabletId: 72075186233409546 Config { Version: 1 FileSystemId: "Valid/x/y/z" FolderId: "folder" CloudId: "cloud" BlockSize: 4096 BlocksCount: 4096 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Version: 1 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:23:03.439185Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:03.439204Z node 61 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 21us result status StatusPathDoesNotExist 2024-11-21T09:23:03.439223Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] [GOOD] >> test.py::test[expr-iterable-default.txt-Analyze] >> test.py::test[lineage-grouping_sets--ForceBlocks] [GOOD] >> test.py::test[lineage-grouping_sets--Plan] [GOOD] >> test.py::test[lineage-grouping_sets--Results] >> test.py::test[insert-insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta--Analyze] >> test.py::test[optimizers-constant_fold_minmax-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-constant_fold_minmax-default.txt-Debug] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.5%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[schema-diffrerent_schemas--Results] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Results] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Analyze] >> test.py::test[sampling-bind_join_right-default.txt-Analyze] >> test.py::test[agg_phases-min-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-min-default.txt-Debug] >> BootstrapperTest::LoneBootstrapper >> test.py::test[join-mergejoin_narrows_output_sort-off-Analyze] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort-off-Debug] >> test.py::test[expr-to_sorted_dict_list_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_sorted_dict_list_key-default.txt-ForceBlocks] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume_reboots/unittest |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume_reboots/unittest |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> test.py::test[bigdate-input_timestamp64-default.txt-Debug] [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-ForceBlocks] >> test.py::test[solomon-Basic-default.txt-Debug] >> test.py::test[pg-join_groups-default.txt-Debug] [GOOD] >> test.py::test[pg-join_groups-default.txt-ForceBlocks] >> test.py::test[action-eval_asatom-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_asatom-default.txt-Plan] [GOOD] >> test.py::test[action-eval_asatom-default.txt-Results] >> test.py::test[pg-select_win_partition_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_win_partition_sort-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_partition_sort-default.txt-Results] >> test.py::test[expr-iterable-default.txt-Analyze] [GOOD] >> test.py::test[expr-iterable-default.txt-Debug] >> test.py::test[order_by-order_by_udf_duo--Results] >> test.py::test[solomon-Basic-default.txt-Debug] [SKIPPED] >> test.py::test[solomon-Basic-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[solomon-Basic-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-Basic-default.txt-Results] [SKIPPED] >> test.py::test[table_range-merge_non_strict--Analyze] >> test.py::test[pg-select_having-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_having-default.txt-Debug] >> test.py::test[expr-sets-default.txt-Analyze] >> test.py::test[insert-keepmeta--Analyze] [GOOD] >> test.py::test[insert-keepmeta--Debug] >> test.py::test[json-json_value/common_syntax-default.txt-Analyze] >> test.py::test[optimizers-constant_fold_minmax-default.txt-Debug] [GOOD] >> test.py::test[optimizers-constant_fold_minmax-default.txt-ForceBlocks] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[action-eval_asatom-default.txt-Results] [GOOD] >> test.py::test[action-eval_input_output_table--Analyze] >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Analyze] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Analyze] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Debug] >> test.py::test[schema-row_spec_with_default_values--Debug] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume_reboots/unittest >> test.py::test[expr-to_sorted_dict_list_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-to_sorted_dict_list_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_sorted_dict_list_key-default.txt-Results] >> test.py::test[pg-join_groups-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_groups-default.txt-Plan] [GOOD] >> test.py::test[pg-join_groups-default.txt-Results] >> test.py::test[join-mergejoin_narrows_output_sort-off-Debug] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off-Plan] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort-off-Results] [GOOD] >> test.py::test[join-premap_common_cross--Analyze] >> TCmsTest::ManagePermissions |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume_reboots/unittest |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2024-11-21T09:22:32.208573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660543648652932:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:32.208828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ec/r3tmp/tmp1rUm7P/pdisk_1.dat 2024-11-21T09:22:32.262630Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25360, node 1 2024-11-21T09:22:32.279046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:32.279059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:32.279061Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:32.279099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:32.308610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:32.308643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:32.310805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:32.340899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.342082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:32.342093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.342644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:32.342699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:32.342707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:32.343398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:32.343417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:32.343468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:32.348552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.349941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180952397, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:32.349958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:32.350041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:32.350546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.350605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.350621Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:32.350630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:32.350656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:32.350672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:32.351352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:32.351372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:32.351377Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:32.351388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T09:22:32.521934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439660543648653852:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:32.521964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:22:32.555016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.555151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T09:22:32.555298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:32.555309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T09:22:32.555904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T09:22:32.555957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.555994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.556026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T09:22:32.556082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T09:22:32.556233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.556249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.556253Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T09:22:32.556302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.556310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.556311Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T09:22:32.557647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T09:22:32.557668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T09:22:32.558060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T09:22:32.610050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T09:22:32.610061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T09:22:32.610082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T09:22:32.610546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T09:22:32.611320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180952656, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:32.611332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732180952656 2024-11-21T09:22:32.611356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T09:22:32.611903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:32.612010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:32.612033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T09:22:32.612365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:32.612380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:32.612385Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: ... 046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T09:22:46.045387Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:46.045389Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:46.045390Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T09:22:46.045400Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T09:22:46.045405Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T09:22:46.045407Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T09:22:46.045410Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T09:22:46.045889Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439660603652185673:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T09:22:46.114128Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T09:22:46.114193Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T09:22:46.115071Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2024-11-21T09:22:49.030769Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439660615867631625:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:49.030830Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/0040ec/r3tmp/tmpNxveBC/pdisk_1.dat 2024-11-21T09:22:49.043429Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16103, node 10 2024-11-21T09:22:49.063787Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:49.063800Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:49.063802Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:49.063856Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:49.130927Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:49.130964Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:49.132598Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:49.133998Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:49.134100Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:49.134111Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:49.134597Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:49.134654Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:49.134664Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T09:22:49.135134Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:49.135147Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T09:22:49.135196Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T09:22:49.135692Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:49.136702Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180969183, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:49.136716Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T09:22:49.136773Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T09:22:49.137213Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:49.137268Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:49.137286Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T09:22:49.137300Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T09:22:49.137309Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T09:22:49.137322Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T09:22:49.137503Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T09:22:49.137515Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T09:22:49.137520Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:49.137531Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 2024-11-21T09:22:54.031124Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439660615867631625:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:54.031164Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 2024-11-21T09:23:04.038642Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T09:23:04.038660Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 >> test.py::test[table_range-merge_non_strict--Analyze] [GOOD] >> test.py::test[table_range-merge_non_strict--Debug] >> test.py::test[pg-select_win_partition_sort-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_sum_null-default.txt-Analyze] >> test.py::test[expr-iterable-default.txt-Debug] [GOOD] >> test.py::test[expr-iterable-default.txt-ForceBlocks] >> test.py::test[lineage-grouping_sets--Results] [GOOD] >> test.py::test[lineage-union_all_tablerow-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Debug] [SKIPPED] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[expr-sets-default.txt-Analyze] [GOOD] >> test.py::test[expr-sets-default.txt-Debug] >> test.py::test[lineage-union_all_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-aggregate_over_aggregate--Analyze] >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogCountByResource >> test.py::test[pg-select_having-default.txt-Debug] [GOOD] >> test.py::test[pg-select_having-default.txt-ForceBlocks] >> test.py::test[json-json_value/common_syntax-default.txt-Analyze] [GOOD] >> test.py::test[json-json_value/common_syntax-default.txt-Debug] >> test.py::test[optimizers-constant_fold_minmax-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-constant_fold_minmax-default.txt-Plan] [GOOD] >> test.py::test[optimizers-constant_fold_minmax-default.txt-Results] >> test.py::test[insert-keepmeta--Debug] [GOOD] >> test.py::test[insert-keepmeta--ForceBlocks] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> Secret::DeactivatedQueryService [GOOD] >> test.py::test[expr-to_sorted_dict_list_key-default.txt-Results] [GOOD] >> test.py::test[expr-type_as_key-default.txt-Analyze] >> test.py::test[schema-row_spec_with_default_values--Debug] [GOOD] >> test.py::test[schema-row_spec_with_default_values--ForceBlocks] >> test.py::test[bigdate-input_timestamp64-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-Plan] [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-Results] >> test.py::test[pg-join_groups-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_multiple2--Analyze] >> test.py::test[action-eval_input_output_table--Analyze] [GOOD] >> test.py::test[action-eval_input_output_table--Debug] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[sampling-bind_join_right-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-ForceBlocks] >> test.py::test[optimizers-constant_fold_minmax-default.txt-Results] [GOOD] >> test.py::test[optimizers-sorted_scalar_content--Analyze] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--Debug] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--Plan] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--Results] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--Analyze] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--Debug] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--Plan] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--Results] [SKIPPED] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2024-11-21T09:22:53.147561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:294:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/000d96/r3tmp/tmpYsa1Rx/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17870, node 1 TClient is connected to server localhost:29996 2024-11-21T09:22:53.268266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:53.286010Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:53.286646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:53.286656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:53.286659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:53.286736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:53.328770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:53.328809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:53.339317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T09:23:05.856395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:648:2541], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:23:05.856425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:662:2550], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:23:05.856499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:23:05.857436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T09:23:05.860491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:666:2553], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T09:23:05.890127Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:727:2594], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2024-11-21T09:23:05.890466Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjJmY2E2YTAtOGU3OWRhNjUtNjdlNWU0MDMtZjQxZmIzOTQ=, ActorId: [1:646:2539], ActorState: ExecuteState, TraceId: 01jd70ghzx5fd0rt6zjp5jz12a, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> test.py::test[pg-select_win_sum_null-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_win_sum_null-default.txt-Debug] >> test.py::test[join-premap_common_cross--Analyze] [GOOD] >> test.py::test[join-premap_common_cross--Debug] >> test.py::test[expr-iterable-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Results] >> test.py::test[order_by-order_by_udf_duo--Results] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Analyze] >> test.py::test[expr-iterable-default.txt-Plan] [GOOD] >> test.py::test[expr-iterable-default.txt-Results] >> test.py::test[json-json_value/common_syntax-default.txt-Debug] [GOOD] >> test.py::test[json-json_value/common_syntax-default.txt-ForceBlocks] >> test.py::test[table_range-merge_non_strict--Debug] [GOOD] >> test.py::test[table_range-merge_non_strict--ForceBlocks] >> test.py::test[optimizers-aggregate_over_aggregate--Analyze] [GOOD] >> test.py::test[optimizers-aggregate_over_aggregate--Debug] >> test.py::test[pg-select_having-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_having-default.txt-Plan] [GOOD] >> test.py::test[pg-select_having-default.txt-Results] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume_reboots/unittest >> test.py::test[expr-type_as_key-default.txt-Analyze] [GOOD] >> test.py::test[expr-type_as_key-default.txt-Debug] >> test.py::test[schema-row_spec_with_default_values--ForceBlocks] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Plan] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Results] >> test.py::test[pg-join_using_multiple2--Analyze] [GOOD] >> test.py::test[pg-join_using_multiple2--Debug] >> test.py::test[insert-keepmeta--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta--Plan] [GOOD] >> test.py::test[insert-keepmeta--Results] >> test.py::test[bigdate-input_timestamp64-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_fill--Analyze] >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> test.py::test[expr-iterable-default.txt-Results] [GOOD] >> test.py::test[expr-list_from_range-default.txt-Analyze] >> test.py::test[action-eval_input_output_table--Debug] [GOOD] >> test.py::test[action-eval_input_output_table--ForceBlocks] >> TCmsTest::TestForceRestartMode >> test.py::test[order_by-presort_order_by_table-default.txt-Analyze] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Debug] >> test.py::test[udf-same_udf_modules--Analyze] >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Debug] >> TCmsTest::StateRequest >> test.py::test[sampling-bind_join_right-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Results] >> test.py::test[json-json_value/common_syntax-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_value/common_syntax-default.txt-Plan] [GOOD] >> test.py::test[json-json_value/common_syntax-default.txt-Results] >> BootstrapperTest::MultipleBootstrappers [GOOD] >> test.py::test[join-premap_common_cross--Debug] [GOOD] >> test.py::test[join-premap_common_cross--ForceBlocks] >> test.py::test[pg-select_having-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_full_equi_and_const-default.txt-Analyze] >> test.py::test[agg_phases-min-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min-default.txt-ForceBlocks] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Analyze] >> test.py::test[insert-keepmeta--Results] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Analyze] >> test.py::test[optimizers-aggregate_over_aggregate--Debug] [GOOD] >> test.py::test[optimizers-aggregate_over_aggregate--ForceBlocks] >> test.py::test[expr-sets-default.txt-Debug] [GOOD] >> test.py::test[expr-sets-default.txt-ForceBlocks] >> test.py::test[table_range-merge_non_strict--ForceBlocks] [GOOD] >> test.py::test[table_range-merge_non_strict--Plan] [GOOD] >> test.py::test[table_range-merge_non_strict--Results] >> TBSVWithReboots::CreateAssignWithVersion >> test.py::test[expr-type_as_key-default.txt-Debug] [GOOD] >> test.py::test[expr-type_as_key-default.txt-ForceBlocks] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2024-11-21T09:23:04.849740Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:04.849765Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:04.849776Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:04.849982Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T09:23:04.849991Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 7090319362426798975 2024-11-21T09:23:04.850018Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T09:23:04.850022Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16229357239031646724 2024-11-21T09:23:04.850066Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T09:23:04.850070Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 11763690323019591983 2024-11-21T09:23:04.850308Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2024-11-21T09:23:04.850358Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T09:23:04.850374Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T09:23:04.850384Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T09:23:04.850398Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T09:23:04.850403Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.126260s 2024-11-21T09:23:04.850414Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T09:23:04.850419Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2024-11-21T09:23:04.850459Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T09:23:04.850463Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.186178s 2024-11-21T09:23:04.981876Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:04.982103Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T09:23:04.982232Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T09:23:04.982239Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T09:23:05.033800Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:05.034023Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T09:23:05.034142Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T09:23:05.034148Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 4 (idx 2) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2024-11-21T09:23:05.616271Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2024-11-21T09:23:05.616294Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2024-11-21T09:23:05.616500Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T09:23:05.616511Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:05.616546Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T09:23:05.616552Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:05.616906Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T09:23:05.616979Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T09:23:05.617267Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T09:23:05.617276Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T09:23:05.617314Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T09:23:05.617318Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2024-11-21T09:23:06.108847Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2024-11-21T09:23:06.108882Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2024-11-21T09:23:06.108940Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T09:23:06.108947Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:06.108958Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T09:23:06.108964Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:06.109209Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T09:23:06.109238Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2024-11-21T09:23:06.109383Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T09:23:06.109387Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12604849125939852480 2024-11-21T09:23:06.109394Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T09:23:06.109396Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 5312993052334781800 2024-11-21T09:23:06.109458Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T09:23:06.109465Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T09:23:06.109475Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2024-11-21T09:23:06.109479Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2024-11-21T09:23:06.109486Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2024-11-21T09:23:06.109488Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2024-11-21T09:23:06.620060Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2024-11-21T09:23:06.620079Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:06.620093Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2024-11-21T09:23:06.620101Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:06.620246Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T09:23:06.620290Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2024-11-21T09:23:06.620366Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T09:23:06.620370Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 2024-11-21T09:23:06.620381Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T09:23:06.620383Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 ... disconnecting nodes 2 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER ... disconnecting nodes 2 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2024-11-21T09:23:06.620471Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2024-11-21T09:23:06.620475Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2024-11-21T09:23:06.620485Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T09:23:06.620488Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.167359s 2024-11-21T09:23:06.620493Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T09:23:06.620496Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2024-11-21T09:23:06.620498Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2024-11-21T09:23:06.620501Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2024-11-21T09:23:06.620806Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2024-11-21T09:23:06.620815Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:06.621542Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:386:2094] 2024-11-21T09:23:06.624367Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T09:23:06.624383Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T09:23:06.686565Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T09:23:06.686792Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:386:2094] 2024-11-21T09:23:06.686927Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T09:23:06.686933Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 2 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to >> TFileStoreWithReboots::CreateAlter [GOOD] >> TReplicationWithRebootsTests::CreateInParallelWithInitialController [GOOD] >> test.py::test[expr-list_from_range-default.txt-Analyze] [GOOD] >> test.py::test[json-json_value/common_syntax-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_fill--Analyze] [GOOD] >> test.py::test[json-json_value/passing-default.txt-Analyze] >> test.py::test[bigdate-tz_table_fill--Debug] >> test.py::test[pg-join_using_multiple2--Debug] [GOOD] >> test.py::test[expr-list_from_range-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2024-11-21T09:22:40.201612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:40.201634Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.201649Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.204862Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.205044Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T09:22:40.205111Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.206098Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.212918Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.213022Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.213143Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T09:22:40.213157Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T09:22:40.213164Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T09:22:40.213204Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.215616Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T09:22:40.215665Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.215693Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T09:22:40.215697Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T09:22:40.215701Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T09:22:40.215704Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.215766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.215771Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.215788Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T09:22:40.215802Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T09:22:40.215837Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.215842Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.215847Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T09:22:40.215851Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T09:22:40.215853Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T09:22:40.215857Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T09:22:40.215861Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.222550Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.222570Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.222580Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T09:22:40.223039Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T09:22:40.223051Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.223071Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T09:22:40.223102Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T09:22:40.223113Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T09:22:40.223124Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T09:22:40.223132Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.223138Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T09:22:40.223143Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T09:22:40.223147Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.223213Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.223217Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T09:22:40.223221Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T09:22:40.223225Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.223235Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T09:22:40.223239Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T09:22:40.223243Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T09:22:40.223246Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.223252Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.244230Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T09:22:40.244256Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.244263Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.244275Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T09:22:40.244289Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.244418Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.244425Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.244434Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T09:22:40.244459Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T09:22:40.244464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.244505Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.244516Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.244521Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T09:22:40.244527Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T09:22:40.245189Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T09:22:40.245202Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.245251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.245257Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.245265Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.245272Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.245277Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T09:22:40.245284Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T09:22:40.245289Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T09:22:40.245296Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.245300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T09:22:40.245304Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T09:22:40.245308Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T09:22:40.245352Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T09:22:40.245356Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.245360Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T09:22:40.245364Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T09:22:40.245367Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T09:22:40.245377Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.245381Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T09:22:40.245384Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:40.245388Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T09:22:40.245402Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T09:22:40.245406Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T09:22:40.245409Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T09:22:40.245415Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.245418Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:40.245422Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... latency: 1 ms 2024-11-21T09:23:07.415576Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.415605Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.415611Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.415619Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.415624Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.415661Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.415666Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.415675Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.415682Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.415714Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.415719Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.415727Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.415733Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.415761Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.415766Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.415773Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.415778Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.415805Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.415810Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.415819Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.415824Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.415849Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.415855Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.415863Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.415869Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.415901Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.415909Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.415917Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.415922Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.415966Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.415971Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.415980Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.415985Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.416019Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.416025Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.416034Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.416039Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.416069Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.416074Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.416083Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.416088Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.416125Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.416130Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.416139Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.416144Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.416168Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.416173Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.416182Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.416186Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.416241Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.416249Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.416257Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.416263Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.416341Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2024-11-21T09:23:07.416349Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416358Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2024-11-21T09:23:07.416373Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T09:23:07.416377Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416381Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2024-11-21T09:23:07.416394Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T09:23:07.416398Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416402Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2024-11-21T09:23:07.416415Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T09:23:07.416419Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416423Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T09:23:07.416438Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T09:23:07.416442Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416446Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T09:23:07.416454Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T09:23:07.416458Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416461Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T09:23:07.416474Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T09:23:07.416478Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416482Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T09:23:07.416494Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T09:23:07.416517Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416521Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2024-11-21T09:23:07.416532Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2024-11-21T09:23:07.416536Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.416540Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 30 30 31 25 31 28 18 18 29 28 29 31 29 31 31 31 13 29 31 - 29 - 20 26 29 20 26 26 26 17 - - actual 30 30 31 25 31 28 18 18 29 28 29 31 29 31 31 31 13 29 31 - 29 - 20 26 29 20 26 26 26 17 - - interm 6 3 6 0 2 3 5 1 - - 5 - 6 1 1 - 6 - - - - - 4 4 - - - - - - - - >> test.py::test[pg-join_using_multiple2--ForceBlocks] >> test.py::test[pg-select_win_sum_null-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_sum_null-default.txt-ForceBlocks] >> test.py::test[udf-same_udf_modules--Analyze] [GOOD] >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController [GOOD] >> test.py::test[udf-same_udf_modules--Debug] >> TReplicationWithRebootsTests::CreateDropRecreate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2024-11-21T09:22:40.260726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:40.260744Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:40.260758Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T09:22:40.263373Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T09:22:40.263486Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T09:22:40.263545Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:40.264455Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T09:22:40.275390Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:40.275506Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T09:22:40.275648Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T09:22:40.275662Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T09:22:40.275669Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T09:22:40.275704Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T09:22:40.279506Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T09:22:40.279563Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T09:22:40.279603Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T09:22:40.279608Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T09:22:40.279612Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T09:22:40.279617Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.279699Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.279707Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.279731Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T09:22:40.279749Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T09:22:40.279793Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.279800Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T09:22:40.279806Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T09:22:40.279810Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T09:22:40.279814Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T09:22:40.279818Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T09:22:40.279825Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:22:40.290296Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.290315Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.290324Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T09:22:40.290772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T09:22:40.290787Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T09:22:40.290804Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T09:22:40.290831Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T09:22:40.290840Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T09:22:40.290847Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T09:22:40.290854Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.290858Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T09:22:40.290863Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T09:22:40.290868Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.290924Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T09:22:40.290930Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T09:22:40.290934Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T09:22:40.290938Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.290947Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T09:22:40.290950Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T09:22:40.290954Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T09:22:40.290957Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.290962Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T09:22:40.311925Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T09:22:40.311947Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T09:22:40.311953Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T09:22:40.311963Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T09:22:40.311976Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T09:22:40.312077Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.312083Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:22:40.312089Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T09:22:40.312104Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T09:22:40.312107Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T09:22:40.312134Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T09:22:40.312142Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.312145Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T09:22:40.312149Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T09:22:40.312749Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T09:22:40.312765Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:22:40.312826Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.312832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T09:22:40.312838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T09:22:40.312843Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T09:22:40.312848Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T09:22:40.312853Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T09:22:40.312858Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T09:22:40.312875Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.312879Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T09:22:40.312884Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T09:22:40.312888Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T09:22:40.312919Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T09:22:40.312924Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.312927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T09:22:40.312931Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T09:22:40.312934Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T09:22:40.312945Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T09:22:40.312948Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T09:22:40.312950Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T09:22:40.312952Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T09:22:40.312958Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T09:22:40.312961Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T09:22:40.312963Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T09:22:40.312967Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T09:22:40.312969Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T09:22:40.312971Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 71311Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571334Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571338Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571344Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571348Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571368Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571372Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571378Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571382Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571407Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571413Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571419Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571422Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571449Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571453Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571459Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571463Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571483Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571486Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571492Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571496Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571528Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571533Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571539Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571543Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571564Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571568Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571574Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571578Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571597Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571601Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571608Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571612Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571637Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571642Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571648Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571653Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571676Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571681Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571687Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571694Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571718Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571722Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571728Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571732Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571749Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571753Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571759Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571763Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571786Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571790Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571796Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571800Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571827Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571831Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571837Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571841Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571864Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T09:23:07.571868Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2024-11-21T09:23:07.571875Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T09:23:07.571879Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T09:23:07.571944Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2024-11-21T09:23:07.571951Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.571957Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2024-11-21T09:23:07.571970Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T09:23:07.571973Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.571977Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2024-11-21T09:23:07.571989Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T09:23:07.571994Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.571998Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2024-11-21T09:23:07.572009Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T09:23:07.572012Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.572015Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T09:23:07.572029Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T09:23:07.572032Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.572035Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T09:23:07.572042Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T09:23:07.572046Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.572049Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T09:23:07.572060Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T09:23:07.572063Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T09:23:07.572066Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 expect 12 23 31 31 27 20 27 27 29 30 8 25 22 22 31 31 11 13 30 22 12 22 29 15 9 15 22 - - 15 15 - actual 12 23 31 31 27 20 27 27 29 30 8 25 22 22 31 31 11 13 30 22 12 22 29 15 9 15 22 - - 15 15 - interm 2 1 5 6 4 6 6 - - 2 1 2 2 2 - 2 1 - - 2 - - - 2 - - - - - - - - >> test.py::test[pg-select_join_full_equi_and_const-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_join_full_equi_and_const-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlter [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:50.499573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:50.499594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:50.499600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:50.499603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:50.499607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:50.499609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:50.499617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:50.499688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:50.507587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:50.507608Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:50.509488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:50.509569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:50.509611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:50.511437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:50.511498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:50.511606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:50.511750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:50.512293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:50.512527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:50.512534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:50.512544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:50.512549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:50.512554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:50.512590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:50.513888Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:50.526809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:50.526897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.526964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:50.527007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:50.527014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.527773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:50.527794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:50.527843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.527852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:50.527855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:50.527859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:50.528149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.528156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:50.528159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:50.528441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.528449Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.528455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:50.528462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:50.528906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:50.529227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:50.529266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:50.529429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:50.529449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:50.529461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:50.529515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:50.529521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:50.529547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:50.529556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:50.529848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:50.529854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:50.529888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:50.529892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:50.529975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:50.529981Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:50.529994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:50.529999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:50.530004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:50.530010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:50.530015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:50.530017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:50.530025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:50.530030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:50.530033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72057594037968897 2024-11-21T09:23:08.069514Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T09:23:08.069651Z node 72 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-2" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T09:23:08.069725Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:23:08.069732Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T09:23:08.069753Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:23:08.069763Z node 72 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T09:23:08.069770Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T09:23:08.069791Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 2024-11-21T09:23:08.069886Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T09:23:08.070313Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:23:08.070345Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:23:08.070350Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:23:08.070780Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T09:23:08.070811Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72075186233409546 2024-11-21T09:23:08.070867Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T09:23:08.070893Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxId: 1002 Origin: 72075186233409546 Status: OK 2024-11-21T09:23:08.070900Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T09:23:08.070906Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 3 -> 128 2024-11-21T09:23:08.071372Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:23:08.071401Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T09:23:08.071406Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:23:08.071414Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T09:23:08.071452Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:23:08.071989Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T09:23:08.072023Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2024-11-21T09:23:08.072098Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:23:08.072119Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 309237647465 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:23:08.072125Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T09:23:08.072163Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T09:23:08.072167Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:23:08.072177Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:23:08.072185Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T09:23:08.072190Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T09:23:08.072195Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T09:23:08.072198Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T09:23:08.072243Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T09:23:08.072249Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 1, subscribers: 0 2024-11-21T09:23:08.072254Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T09:23:08.073059Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:23:08.073069Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T09:23:08.073104Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:23:08.073109Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T09:23:08.073213Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:23:08.073222Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T09:23:08.073226Z node 72 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T09:23:08.073230Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T09:23:08.073235Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T09:23:08.073249Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T09:23:08.073715Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T09:23:08.073794Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T09:23:08.073806Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T09:23:08.073902Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:23:08.073923Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:23:08.073929Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:397:2378] TestWaitNotification: OK eventTxId 1002 2024-11-21T09:23:08.074032Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:08.074068Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_2" took 46us result status StatusSuccess 2024-11-21T09:23:08.074151Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_2" PathDescription { Self { Name: "FS_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "FS_2" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 2 FolderId: "bar" CloudId: "baz" BlockSize: 4096 } Version: 2 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[order_by-presort_order_by_table-default.txt-Debug] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-ForceBlocks] >> test.py::test[action-eval_input_output_table--ForceBlocks] [GOOD] >> test.py::test[action-eval_input_output_table--Plan] [GOOD] >> test.py::test[action-eval_input_output_table--Results] >> Secret::Deactivated [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> test.py::test[insert-literals_to_string-default.txt-Analyze] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Debug] >> test.py::test[optimizers-aggregate_over_aggregate--ForceBlocks] [GOOD] >> test.py::test[optimizers-aggregate_over_aggregate--Plan] [GOOD] >> test.py::test[optimizers-aggregate_over_aggregate--Results] >> test.py::test[expr-type_as_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-type_as_key-default.txt-Plan] [GOOD] >> test.py::test[expr-type_as_key-default.txt-Results] >> test.py::test[sampling-bind_join_right-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-Analyze] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Analyze] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Debug] >> test.py::test[pg-join_using_tables3-default.txt-Results] [GOOD] >> test.py::test[pg-multi_usage_cross_join-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateInParallelWithInitialController [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:38.163601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:38.163637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.163642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:38.163647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:38.163719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:38.163724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:38.163733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.163816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:38.176943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:38.176964Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.179181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:38.179294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:38.179330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:38.183103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:38.183207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:38.184310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.185171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.187114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:38.188711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.188717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:38.188772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:38.190254Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.205844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:38.205908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:38.205997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:38.206002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:38.206830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:38.206846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:38.206851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:38.207321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.207337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:38.207341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:38.207614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.207619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.207622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.207644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.208075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:38.208354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:38.208384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:38.208518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.208579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:38.208584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.208602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:38.208612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.209037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.209052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.209084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.209092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:38.209161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.209167Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:38.209177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:38.209199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.209205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:38.209209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.209214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:38.209218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:38.209230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:38.209236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:38.209239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 1003 2024-11-21T09:23:08.149617Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:23:08.149628Z node 119 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:23:08.149648Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:23:08.149653Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [119:720:2611] 2024-11-21T09:23:08.149680Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [119:725:2616], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.149684Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.149687Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:23:08.149695Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [119:726:2617], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.149698Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.149701Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:23:08.149712Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [119:724:2615], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.149717Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.149721Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T09:23:08.149730Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [119:416:2373], Recipient [119:129:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1005 2024-11-21T09:23:08.149733Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:23:08.149738Z node 119 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T09:23:08.149753Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [119:416:2373], Recipient [119:129:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2024-11-21T09:23:08.149757Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:23:08.149761Z node 119 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:23:08.149768Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T09:23:08.149772Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [119:720:2611] 2024-11-21T09:23:08.149787Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [119:725:2616], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.149790Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.149794Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T09:23:08.149798Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:23:08.149801Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [119:720:2611] 2024-11-21T09:23:08.149814Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [119:726:2617], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.149817Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.149821Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1005 TestWaitNotification: OK eventTxId 1004 2024-11-21T09:23:08.149885Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [119:727:2618], Recipient [119:129:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:23:08.149889Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:23:08.149900Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:08.149943Z node 119 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication1" took 42us result status StatusSuccess 2024-11-21T09:23:08.150027Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication1" PathDescription { Self { Name: "Replication1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication1" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:23:08.150126Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [119:728:2619], Recipient [119:129:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:23:08.150131Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:23:08.150139Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:08.150156Z node 119 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication2" took 18us result status StatusSuccess 2024-11-21T09:23:08.150195Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication2" PathDescription { Self { Name: "Replication2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication2" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 ControllerId: 72075186233409548 State { StandBy { } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:23:08.150273Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [119:729:2620], Recipient [119:129:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:23:08.150279Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:23:08.150287Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:08.150300Z node 119 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication3" took 14us result status StatusSuccess 2024-11-21T09:23:08.150338Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication3" PathDescription { Self { Name: "Replication3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication3" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 6 } Version: 1 ControllerId: 72075186233409549 State { StandBy { } } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[table_range-merge_non_strict--Results] [GOOD] >> test.py::test[tpch-q13-default.txt-Analyze] >> TCmsTest::TestForceRestartMode [GOOD] >> test.py::test[join-premap_common_cross--ForceBlocks] [GOOD] >> TCmsTest::StateStorageTwoRings >> test.py::test[expr-list_from_range-default.txt-Debug] [GOOD] >> test.py::test[expr-list_from_range-default.txt-ForceBlocks] >> TCmsTest::TestForceRestartModeDisconnects >> test.py::test[json-json_value/passing-default.txt-Analyze] [GOOD] >> test.py::test[json-json_value/passing-default.txt-Debug] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-ForceBlocks] >> test.py::test[join-premap_common_cross--Plan] [GOOD] >> test.py::test[join-premap_common_cross--Results] >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2024-11-21T09:22:55.820200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:294:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/000d60/r3tmp/tmprzLISp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19212, node 1 TClient is connected to server localhost:64544 2024-11-21T09:22:55.957488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T09:22:55.975987Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T09:22:55.976727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:55.976738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:55.976743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:55.976824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T09:22:56.018773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:56.018805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:56.029297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T09:23:08.575820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:655:2546], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T09:23:08.575854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:38.163601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:38.163645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.163650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:38.163655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:38.163685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:38.163689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:38.163697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.163800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:38.177055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:38.177074Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.179314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:38.179420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:38.179444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:38.182577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:38.182643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:38.184269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.185166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.186820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.188659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:38.188668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.188674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:38.188711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:38.189947Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.205128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:38.205183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:38.205280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:38.205285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:38.205989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.205995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:38.205998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:38.206001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:38.206396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:38.206681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.206690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.206693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.207130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:38.207467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:38.208093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:38.208304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.208394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:38.208402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.208424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:38.208437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.208843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.208897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:38.208967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.208973Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:38.208982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:38.208986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.208991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:38.208996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.209000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:38.209004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:38.209014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:38.209019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:38.209023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 1002 2024-11-21T09:23:08.499768Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:23:08.499777Z node 121 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T09:23:08.499795Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [121:623:2536], Recipient [121:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.499797Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.499799Z node 121 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:23:08.499803Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T09:23:08.499806Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [121:618:2531] 2024-11-21T09:23:08.499824Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [121:617:2530], Recipient [121:130:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1003 2024-11-21T09:23:08.499827Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:23:08.499829Z node 121 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T09:23:08.499836Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [121:624:2537], Recipient [121:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.499838Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.499843Z node 121 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:23:08.499849Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [121:622:2535], Recipient [121:130:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.499853Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.499855Z node 121 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T09:23:08.499861Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [121:617:2530], Recipient [121:130:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2024-11-21T09:23:08.499863Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:23:08.499865Z node 121 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:23:08.499869Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T09:23:08.499871Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [121:618:2531] 2024-11-21T09:23:08.499877Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:23:08.499879Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [121:618:2531] 2024-11-21T09:23:08.499887Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [121:623:2536], Recipient [121:130:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.499889Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.499891Z node 121 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T09:23:08.499896Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [121:624:2537], Recipient [121:130:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.499898Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.499900Z node 121 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2024-11-21T09:23:08.499940Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [121:625:2538], Recipient [121:130:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:23:08.499943Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:23:08.499950Z node 121 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:08.499981Z node 121 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication1" took 29us result status StatusSuccess 2024-11-21T09:23:08.500034Z node 121 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication1" PathDescription { Self { Name: "Replication1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication1" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:23:08.500100Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [121:626:2539], Recipient [121:130:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:23:08.500103Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:23:08.500108Z node 121 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:08.500119Z node 121 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication2" took 11us result status StatusSuccess 2024-11-21T09:23:08.500142Z node 121 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication2" PathDescription { Self { Name: "Replication2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication2" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:23:08.500186Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [121:627:2540], Recipient [121:130:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:23:08.500190Z node 121 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:23:08.500195Z node 121 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:08.500224Z node 121 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication3" took 8us result status StatusSuccess 2024-11-21T09:23:08.500253Z node 121 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication3" PathDescription { Self { Name: "Replication3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication3" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 ControllerId: 72075186233409548 State { StandBy { } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbOlapStore::LogCountByResource [GOOD] >> test.py::test[udf-same_udf_modules--Debug] [GOOD] >> test.py::test[udf-same_udf_modules--ForceBlocks] >> test.py::test[expr-type_as_key-default.txt-Results] [GOOD] >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests >> TBSVWithReboots::CreateAlterNoVersion >> test.py::test[bigdate-tz_table_fill--Debug] [GOOD] >> test.py::test[bigdate-tz_table_fill--ForceBlocks] >> test.py::test[action-eval_input_output_table--Results] [GOOD] >> test.py::test[action-runtime_for_select-default.txt-Analyze] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Analyze] |97.6%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateDropRecreate [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T09:22:38.391169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T09:22:38.391185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.391189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T09:22:38.391192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T09:22:38.391204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T09:22:38.391207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T09:22:38.391213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T09:22:38.391281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T09:22:38.398972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T09:22:38.398995Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.400945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T09:22:38.401035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T09:22:38.401060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T09:22:38.403466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T09:22:38.403532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T09:22:38.403638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.403821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.404511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.404764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.404775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.404786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T09:22:38.404792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.404798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T09:22:38.404833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T09:22:38.406032Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T09:22:38.422122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T09:22:38.422199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.422271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T09:22:38.422319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T09:22:38.422326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.423151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.423180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T09:22:38.423225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.423236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T09:22:38.423241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T09:22:38.423248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T09:22:38.423654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.423666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T09:22:38.423671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T09:22:38.424055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.424064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.424069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.424075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.424631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T09:22:38.425052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T09:22:38.425098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T09:22:38.425291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T09:22:38.425316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T09:22:38.425323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.425379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T09:22:38.425386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T09:22:38.425413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T09:22:38.425425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T09:22:38.425785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:22:38.425795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:22:38.425834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:22:38.425839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T09:22:38.425917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T09:22:38.425924Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T09:22:38.425935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T09:22:38.425939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.425945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T09:22:38.425950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T09:22:38.425954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T09:22:38.425958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T09:22:38.425969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T09:22:38.425974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T09:22:38.425979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4 2024-11-21T09:23:08.635223Z node 122 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:23:08.635232Z node 122 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000005 first txId#1004 countTxs#1 2024-11-21T09:23:08.635239Z node 122 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000005 2024-11-21T09:23:08.635244Z node 122 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1004:0 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T09:23:08.635284Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [122:122:2148], Recipient [122:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T09:23:08.635289Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T09:23:08.635314Z node 122 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T09:23:08.635320Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T09:23:08.635367Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T09:23:08.635413Z node 122 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T09:23:08.635418Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [122:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T09:23:08.635424Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [122:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T09:23:08.635552Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T09:23:08.635562Z node 122 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T09:23:08.635572Z node 122 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:23:08.635577Z node 122 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T09:23:08.635581Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:23:08.635587Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T09:23:08.635593Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T09:23:08.635598Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T09:23:08.635603Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T09:23:08.635635Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T09:23:08.635642Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T09:23:08.635647Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T09:23:08.635654Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2024-11-21T09:23:08.635854Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [122:203:2206], Recipient [122:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 11 } 2024-11-21T09:23:08.635865Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:23:08.635879Z node 122 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:23:08.635891Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:23:08.635896Z node 122 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:23:08.635902Z node 122 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T09:23:08.635906Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T09:23:08.635922Z node 122 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:23:08.636099Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [122:203:2206], Recipient [122:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Version: 2 } 2024-11-21T09:23:08.636107Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T09:23:08.636117Z node 122 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:23:08.636128Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T09:23:08.636132Z node 122 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T09:23:08.636136Z node 122 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T09:23:08.636140Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T09:23:08.636152Z node 122 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T09:23:08.636157Z node 122 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T09:23:08.636849Z node 122 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:23:08.637296Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:23:08.637313Z node 122 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T09:23:08.637366Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T09:23:08.637375Z node 122 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestWaitNotification wait txId: 1004 2024-11-21T09:23:08.637432Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T09:23:08.637439Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T09:23:08.637513Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [122:564:2505], Recipient [122:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.637521Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T09:23:08.637525Z node 122 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T09:23:08.637552Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [122:419:2376], Recipient [122:122:2148]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2024-11-21T09:23:08.637556Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T09:23:08.637569Z node 122 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T09:23:08.637592Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T09:23:08.637596Z node 122 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [122:562:2503] 2024-11-21T09:23:08.637618Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [122:564:2505], Recipient [122:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.637623Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T09:23:08.637627Z node 122 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 2024-11-21T09:23:08.637689Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [122:565:2506], Recipient [122:122:2148]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T09:23:08.637694Z node 122 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T09:23:08.637706Z node 122 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T09:23:08.637769Z node 122 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 57us result status StatusSuccess 2024-11-21T09:23:08.637851Z node 122 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.6%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBSVWithReboots::CreateAssignDropIsAllowed >> test.py::test[pg-join_using_multiple2--ForceBlocks] [GOOD] >> test.py::test[pg-join_using_multiple2--Plan] [GOOD] >> test.py::test[pg-join_using_multiple2--Results] >> test.py::test[insert-literals_to_string-default.txt-Debug] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-ForceBlocks] >> test.py::test[order_by-presort_order_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Plan] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] >> TCmsTenatsTest::TestTenantRatioLimit >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Debug] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-ForceBlocks] >> test.py::test[pg-multi_usage_cross_join-default.txt-Debug] [GOOD] >> test.py::test[pg-multi_usage_cross_join-default.txt-Plan] [GOOD] >> test.py::test[pg-multi_usage_cross_join-default.txt-Results] >> TCmsTenatsTest::TestTenantLimit >> test.py::test[optimizers-aggregate_over_aggregate--Results] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--Analyze] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> test.py::test[pg-select_join_full_equi_and_const-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_full_equi_and_const-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_default-default.txt-Analyze] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-Debug] >> test.py::test[expr-list_from_range-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-list_from_range-default.txt-Plan] [GOOD] >> test.py::test[expr-list_from_range-default.txt-Results] >> test.py::test[tpch-q13-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q13-default.txt-Debug] >> test.py::test[expr-sets-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-sets-default.txt-Plan] [GOOD] >> test.py::test[expr-sets-default.txt-Results] >> test.py::test[json-json_value/passing-default.txt-Debug] [GOOD] >> test.py::test[json-json_value/passing-default.txt-ForceBlocks] >> test.py::test[udf-same_udf_modules--ForceBlocks] [GOOD] >> test.py::test[udf-same_udf_modules--Plan] [GOOD] >> test.py::test[udf-same_udf_modules--Results] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Analyze] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Debug] >> TCmsTest::ManageRequestsWrong >> test.py::test[join-join_cbo_3_tables--Analyze] >> test.py::test[action-runtime_for_select-default.txt-Analyze] [GOOD] >> test.py::test[action-runtime_for_select-default.txt-Debug] >> test.py::test[pg-select_win_sum_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_win_sum_null-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_sum_null-default.txt-Results] >> test.py::test[bigdate-tz_table_fill--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_fill--Plan] [GOOD] >> test.py::test[bigdate-tz_table_fill--Results] >> TCmsTest::StateStorageNodesFromOneRing >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> test.py::test[join-premap_common_cross--Results] [GOOD] >> test.py::test[join-premap_common_inner_filter-off-Analyze] >> test.py::test[expr-list_from_range-default.txt-Results] [GOOD] >> test.py::test[expr-list_replicate_fail--Analyze] [SKIPPED] >> test.py::test[expr-list_replicate_fail--Debug] [SKIPPED] >> test.py::test[pg-multi_usage_cross_join-default.txt-Results] [GOOD] >> test.py::test[pg-order_by_shadow_input_columns_qual-default.txt-Debug] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] [GOOD] >> test.py::test[params-missing_param_fail--Analyze] >> test.py::test[action-eval_type-default.txt-Analyze] >> test.py::test[expr-list_replicate_fail--ForceBlocks] [SKIPPED] >> test.py::test[expr-list_replicate_fail--Plan] [SKIPPED] >> test.py::test[expr-list_replicate_fail--Results] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] >> test.py::test[agg_phases-min-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-min-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-min-default.txt-Results] >> test.py::test[insert-literals_to_string-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Plan] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Results] >> test.py::test[pg-select_win_count_all-default.txt-Analyze] >> test.py::test[params-missing_param_fail--Analyze] [SKIPPED] >> test.py::test[params-missing_param_fail--Debug] [SKIPPED] >> test.py::test[params-missing_param_fail--ForceBlocks] [SKIPPED] >> test.py::test[params-missing_param_fail--Plan] [SKIPPED] >> test.py::test[params-missing_param_fail--Results] >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Plan] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2024-11-21T09:22:21.960596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439660495194504308:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:21.960893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00415a/r3tmp/tmpKYfw5e/pdisk_1.dat 2024-11-21T09:22:22.013619Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65296, node 1 2024-11-21T09:22:22.030328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:22.030341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:22.030342Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:22.030371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:22.050111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.051065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:22.051087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.051492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:22.051535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:22.051542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T09:22:22.051853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:22.051862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T09:22:22.051916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T09:22:22.052126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.052900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732180942100, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T09:22:22.052912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T09:22:22.052971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T09:22:22.053287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T09:22:22.053315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T09:22:22.053328Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T09:22:22.053340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T09:22:22.053350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T09:22:22.053363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T09:22:22.053717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T09:22:22.053740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T09:22:22.053744Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T09:22:22.053756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T09:22:22.060812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:22.060830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:22.062187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD location { id: 1 host: "::1" port: 12001 } 2024-11-21T09:22:22.630959Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439660500797215704:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T09:22:22.630991Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:22.633382Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439660500451581971:2226];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/jptk/00415a/r3tmp/tmpYwMiBu/pdisk_1.dat 2024-11-21T09:22:22.638257Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T09:22:22.642618Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T09:22:22.648069Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20842, node 4 2024-11-21T09:22:22.663399Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T09:22:22.663411Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T09:22:22.663413Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T09:22:22.663456Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T09:22:22.731141Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:22.731182Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:22.732749Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T09:22:22.733836Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.733895Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:22.733911Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:22.733938Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T09:22:22.733950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T09:22:22.734280Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T09:22:22.734298Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T09:22:22.734317Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T09:22:22.734356Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T09:22:22.734368Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T09:22:22.734735Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T09:22:22.734747Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T09:22:22.734790Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2024-11-21T09:22:22.734800Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T09:22:22.734845Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T09:22:22.734944Z node 4 :HIV ... 2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [47:7439660698295865084:3552] 2024-11-21T09:23:08.948608Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439660698295865068:3538] TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [47:7439660698295865088:3555], CA [47:7439660698295865155:3607], CA [47:7439660698295865095:3560], CA [47:7439660698295865118:3576], CA [47:7439660698295865152:3605], CA [47:7439660698295865099:3563], CA [47:7439660698295865115:3574], CA [47:7439660698295865122:3579], CA [47:7439660698295865096:3561], CA [47:7439660698295865100:3564], CA [47:7439660698295865124:3580], CA [47:7439660698295865105:3567], CA [47:7439660698295865080:3549], CA [47:7439660698295865147:3601], CA [47:7439660698295865145:3599], CA [47:7439660698295865140:3594], CA [47:7439660698295865135:3589], CA [47:7439660698295865149:3602], CA [47:7439660698295865143:3597], CA [47:7439660698295865138:3592], CA [47:7439660698295865133:3587], CA [47:7439660698295865085:3553], CA [47:7439660698295865136:3590], CA [47:7439660698295865150:3603], CA [47:7439660698295865119:3577], CA [47:7439660698295865093:3559], CA [47:7439660698295865154:3606], CA [47:7439660698295865116:3575], CA [47:7439660698295865098:3562], CA [47:7439660698295865121:3578], CA [47:7439660698295865102:3565], CA [47:7439660698295865125:3581], CA [47:7439660698295865078:3547], CA [47:7439660698295865106:3568], CA [47:7439660698295865130:3584], CA [47:7439660698295865081:3550], CA [47:7439660698295865104:3566], CA [47:7439660698295865127:3582], CA [47:7439660698295865079:3548], CA [47:7439660698295865146:3600], CA [47:7439660698295865108:3569], CA [47:7439660698295865141:3595], CA [47:7439660698295865131:3585], CA [47:7439660698295865144:3598], CA [47:7439660698295865139:3593], CA [47:7439660698295865134:3588], CA [47:7439660698295865142:3596], CA [47:7439660698295865086:3554], CA [47:7439660698295865137:3591], CA [47:7439660698295865132:3586], CA [47:7439660698295865151:3604], CA [47:7439660698295865113:3573], 2024-11-21T09:23:08.948624Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439660698295865068:3538] TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [47:7439660698295865085:3553], task: 11, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 156 DurationUs: 90000 Tasks { TaskId: 11 CpuTimeUs: 39 FinishTimeMs: 1732180988946 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 7 BuildCpuTimeUs: 32 WaitInputTimeUs: 85518 HostName: "ghrun-qcxhsi27zq" NodeId: 47 StartTimeMs: 1732180988856 } MaxMemoryUsage: 1048576 } 2024-11-21T09:23:08.948626Z node 47 :KQP_EXECUTER INFO: TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [47:7439660698295865085:3553] 2024-11-21T09:23:08.948647Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439660698295865068:3538] TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [47:7439660698295865088:3555], CA [47:7439660698295865155:3607], CA [47:7439660698295865095:3560], CA [47:7439660698295865118:3576], CA [47:7439660698295865152:3605], CA [47:7439660698295865099:3563], CA [47:7439660698295865115:3574], CA [47:7439660698295865122:3579], CA [47:7439660698295865096:3561], CA [47:7439660698295865100:3564], CA [47:7439660698295865124:3580], CA [47:7439660698295865105:3567], CA [47:7439660698295865080:3549], CA [47:7439660698295865147:3601], CA [47:7439660698295865145:3599], CA [47:7439660698295865140:3594], CA [47:7439660698295865135:3589], CA [47:7439660698295865149:3602], CA [47:7439660698295865143:3597], CA [47:7439660698295865138:3592], CA [47:7439660698295865133:3587], CA [47:7439660698295865136:3590], CA [47:7439660698295865150:3603], CA [47:7439660698295865119:3577], CA [47:7439660698295865093:3559], CA [47:7439660698295865154:3606], CA [47:7439660698295865116:3575], CA [47:7439660698295865098:3562], CA [47:7439660698295865121:3578], CA [47:7439660698295865102:3565], CA [47:7439660698295865125:3581], CA [47:7439660698295865078:3547], CA [47:7439660698295865106:3568], CA [47:7439660698295865130:3584], CA [47:7439660698295865081:3550], CA [47:7439660698295865104:3566], CA [47:7439660698295865127:3582], CA [47:7439660698295865079:3548], CA [47:7439660698295865146:3600], CA [47:7439660698295865108:3569], CA [47:7439660698295865141:3595], CA [47:7439660698295865131:3585], CA [47:7439660698295865144:3598], CA [47:7439660698295865139:3593], CA [47:7439660698295865134:3588], CA [47:7439660698295865142:3596], CA [47:7439660698295865086:3554], CA [47:7439660698295865137:3591], CA [47:7439660698295865132:3586], CA [47:7439660698295865151:3604], CA [47:7439660698295865113:3573], 2024-11-21T09:23:08.948662Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439660698295865068:3538] TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [47:7439660698295865086:3554], task: 12, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 151 DurationUs: 90000 Tasks { TaskId: 12 CpuTimeUs: 39 FinishTimeMs: 1732180988946 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 6 BuildCpuTimeUs: 33 WaitInputTimeUs: 85516 HostName: "ghrun-qcxhsi27zq" NodeId: 47 StartTimeMs: 1732180988856 } MaxMemoryUsage: 1048576 } 2024-11-21T09:23:08.948664Z node 47 :KQP_EXECUTER INFO: TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [47:7439660698295865086:3554] 2024-11-21T09:23:08.948682Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439660698295865068:3538] TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [47:7439660698295865088:3555], CA [47:7439660698295865155:3607], CA [47:7439660698295865095:3560], CA [47:7439660698295865118:3576], CA [47:7439660698295865152:3605], CA [47:7439660698295865099:3563], CA [47:7439660698295865115:3574], CA [47:7439660698295865122:3579], CA [47:7439660698295865096:3561], CA [47:7439660698295865100:3564], CA [47:7439660698295865124:3580], CA [47:7439660698295865105:3567], CA [47:7439660698295865080:3549], CA [47:7439660698295865147:3601], CA [47:7439660698295865145:3599], CA [47:7439660698295865140:3594], CA [47:7439660698295865135:3589], CA [47:7439660698295865149:3602], CA [47:7439660698295865143:3597], CA [47:7439660698295865138:3592], CA [47:7439660698295865133:3587], CA [47:7439660698295865136:3590], CA [47:7439660698295865150:3603], CA [47:7439660698295865119:3577], CA [47:7439660698295865093:3559], CA [47:7439660698295865154:3606], CA [47:7439660698295865116:3575], CA [47:7439660698295865098:3562], CA [47:7439660698295865121:3578], CA [47:7439660698295865102:3565], CA [47:7439660698295865125:3581], CA [47:7439660698295865078:3547], CA [47:7439660698295865106:3568], CA [47:7439660698295865130:3584], CA [47:7439660698295865081:3550], CA [47:7439660698295865104:3566], CA [47:7439660698295865127:3582], CA [47:7439660698295865079:3548], CA [47:7439660698295865146:3600], CA [47:7439660698295865108:3569], CA [47:7439660698295865141:3595], CA [47:7439660698295865131:3585], CA [47:7439660698295865144:3598], CA [47:7439660698295865139:3593], CA [47:7439660698295865134:3588], CA [47:7439660698295865142:3596], CA [47:7439660698295865137:3591], CA [47:7439660698295865132:3586], CA [47:7439660698295865151:3604], CA [47:7439660698295865113:3573], 2024-11-21T09:23:08.948697Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439660698295865068:3538] TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [47:7439660698295865108:3569], task: 27, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 183 DurationUs: 89000 Tasks { TaskId: 27 CpuTimeUs: 54 FinishTimeMs: 1732180988946 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 8 BuildCpuTimeUs: 46 WaitInputTimeUs: 80171 HostName: "ghrun-qcxhsi27zq" NodeId: 47 StartTimeMs: 1732180988857 } MaxMemoryUsage: 1048576 } 2024-11-21T09:23:08.948699Z node 47 :KQP_EXECUTER INFO: TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [47:7439660698295865108:3569] 2024-11-21T09:23:08.948719Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439660698295865068:3538] TxId: 281474976715773. Ctx: { TraceId: 01jd70gmvydg15g9gsrfeagw01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OTg5MjY4NC0zOWViZGYyZC1hM2Q4Y2M3Mi1iM2Y2ZTA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [47:7439660698295865088:3555], CA [47:7439660698295865155:3607], CA [47:7439660698295865095:3560], CA [47:7439660698295865118:3576], CA [47:7439660698295865152:3605], CA [47:7439660698295865099:3563], CA [47:7439660698295865115:3574], CA [47:7439660698295865122:3579], CA [47:7439660698295865096:3561], CA [47:7439660698295865100:3564], CA [47:7439660698295865124:3580], CA [47:7439660698295865105:3567], CA [47:7439660698295865080:3549], CA [47:7439660698295865147:3601], CA [47:7439660698295865145:3599], CA [47:7439660698295865140:3594], CA [47:7439660698295865135:3589], CA [47:7439660698295865149:3602], CA [47:7439660698295865143:3597], CA [47:7439660698295865138:3592], CA [47:7439660698295865133:3587], CA [47:7439660698295865136:3590], CA [47:7439660698295865150:3603], CA [47:7439660698295865119:3577], CA [47:7439660698295865093:3559], CA [47:7439660698295865154:3606], CA [47:7439660698295865116:3575], CA [47:7439660698295865098:3562], CA [47:7439660698295865121:3578], CA [47:7439660698295865102:3565], CA [47:7439660698295865125:3581], CA [47:7439660698295865078:3547], CA [47:7439660698295865106:3568], CA [47:7439660698295865130:3584], CA [47:7439660698295865081:3550], CA [47:7439660698295865104:3566], CA [47:7439660698295865127:3582], CA [47:7439660698295865079:3548], CA [47:7439660698295865146:3600], CA [47:7439660698295865141:3595], CA [47:7439660698295865131:3585], CA [47:7439660698295865144:3598], CA [47:7439660698295865139:3593], CA [47:7439660698295865134:3588], CA [47:7439660698295865142:3596], CA [47:7439660698295865137:3591], CA [47:7439660698295865132:3586], CA [47:7439660698295865151:3604], CA [47:7439660698295865113:3573], >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] >> test.py::test[udf-same_udf_modules--Results] [GOOD] >> test.py::test[udf-sqlproject_grounds-default.txt-Analyze] >> test.py::test[optimizers-multi_to_empty_constraint--Analyze] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--Debug]